answer
stringlengths 17
10.2M
|
|---|
package seedu.task.ui;
import javafx.scene.Node;
import javafx.scene.Scene;
import javafx.scene.layout.AnchorPane;
import javafx.scene.web.WebView;
import javafx.stage.Stage;
import seedu.task.commons.core.LogsCenter;
import seedu.task.commons.util.FxViewUtil;
import java.util.logging.Logger;
/**
* Controller for a help page
*/
public class HelpWindow extends UiPart {
private static final Logger logger = LogsCenter.getLogger(HelpWindow.class);
private static final String ICON = "/images/help_icon.png";
private static final String FXML = "HelpWindow.fxml";
private static final String TITLE = "Help";
//@@author A0153467Y
private static final String COMMANDSUMMARY_URL = HelpWindow.class.getClassLoader().getResource("command-summary/CommandSummary.html").toExternalForm();
//@@author
private AnchorPane mainPane;
private Stage dialogStage;
public static HelpWindow load(Stage primaryStage) {
logger.fine("Showing help page about the application.");
HelpWindow helpWindow = UiPartLoader.loadUiPart(primaryStage, new HelpWindow());
helpWindow.configure();
return helpWindow;
}
@Override
public void setNode(Node node) {
mainPane = (AnchorPane) node;
}
@Override
public String getFxmlPath() {
return FXML;
}
private void configure(){
Scene scene = new Scene(mainPane, 700, 600);
//Null passed as the parent stage to make it non-modal.
dialogStage = createDialogStage(TITLE, null, scene);
setIcon(dialogStage, ICON);
WebView browser = new WebView();
browser.getEngine().load(COMMANDSUMMARY_URL);
FxViewUtil.applyAnchorBoundaryParameters(browser, 0.0, 0.0, 0.0, 0.0);
mainPane.getChildren().add(browser);
}
public void show() {
dialogStage.showAndWait();
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package service;
import be.luckycode.projetawebservice.Client;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.Stateless;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import org.codehaus.jackson.map.ObjectMapper;
/**
*
* @author michael
*/
@Stateless
@Path("clients")
public class ClientFacadeREST extends AbstractFacade<Client> {
@PersistenceContext(unitName = "be.luckycode_projeta-webservice_war_1.0-SNAPSHOTPU")
private EntityManager em;
public ClientFacadeREST() {
super(Client.class);
}
@POST
@Override
@Consumes({"application/xml", "application/json"})
public void create(Client entity) {
super.create(entity);
}
@PUT
@Override
@Consumes({"application/xml", "application/json"})
public void edit(Client entity) {
super.edit(entity);
}
@DELETE
@Path("{id}")
public void remove(@PathParam("id") Integer id) {
super.remove(super.find(id));
}
@GET
@Path("{id}")
@Produces({"application/xml", "application/json"})
public Client find(@PathParam("id") Integer id) {
return super.find(id);
}
@GET
@Override
@Path("all")
@Produces("application/json")
public List<Client> findAll() {
return super.findAll();
}
@GET
@Path("{from}/{to}")
@Produces({"application/xml", "application/json"})
public List<Client> findRange(@PathParam("from") Integer from, @PathParam("to") Integer to) {
return super.findRange(new int[]{from, to});
}
@GET
@Path("count")
@Produces("text/plain")
public String countREST() {
return String.valueOf(super.count());
}
@java.lang.Override
protected EntityManager getEntityManager() {
return em;
}
@GET
@Path("names")
@Produces("application/json")
public String getClientNames() {
List<Client> clients = super.findAll();
String retVal = "";
ObjectMapper mapper = new ObjectMapper();
List<Map> clientList = new ArrayList<Map>();
for (Client cli : clients) {
Map<String, Object> clientData = generateClientJSON(cli);
clientList.add(clientData);
}
HashMap<String, Object> retClients = new HashMap<String, Object>();
retClients.put("clients", clientList);
try {
retVal = mapper.writeValueAsString(retClients);
} catch (IOException ex) {
Logger.getLogger(UserFacadeREST.class.getName()).log(Level.SEVERE, null, ex);
}
return retVal;
}
private Map<String, Object> generateClientJSON(Client client) {
Map<String, Object> clientData = new HashMap<String, Object>();
clientData.put("clientId", client.getClientId().toString());
clientData.put("clientName", client.getClientName());
/*if (client.getFirstName() != null)
clientData.put("firstName", client.getFirstName());
if (client.getLastName() != null)
clientData.put("lastName", client.getLastName());
if (client.getEmailAddress() != null)
clientData.put("emailAddress", client.getEmailAddress());*/
return clientData;
}
}
|
package stormpot.qpool;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import stormpot.Completion;
import stormpot.Config;
import stormpot.LifecycledPool;
import stormpot.PoolException;
import stormpot.Poolable;
/**
* QueuePool is a fairly simple {@link LifecycledPool} implementation that
* basically consists of a queue of Poolable instances, and a Thread to
* allocate them.
* <p>
* This means that the object allocation always happens in a dedicated thread.
* This means that no thread that calls any of the claim methods, will incur
* the overhead of allocating Poolables. This should lead to reduced deviation
* in the times it takes claim method to complete, provided the pool is not
* depleted.
* @author Chris Vest <mr.chrisvest@gmail.com>
*
* @param <T>
*/
public final class QueuePool<T extends Poolable> implements LifecycledPool<T> {
static final QSlot KILL_PILL = new QSlot(null);
private final BlockingQueue<QSlot<T>> live;
private final BlockingQueue<QSlot<T>> dead;
private final QAllocThread allocThread;
private volatile boolean shutdown = false;
public QueuePool(Config config) {
live = new LinkedBlockingQueue<QSlot<T>>();
dead = new LinkedBlockingQueue<QSlot<T>>();
allocThread = new QAllocThread(live, dead, config);
allocThread.start();
}
public T claim() throws PoolException, InterruptedException {
QSlot<T> slot;
do {
slot = live.take();
} while (invalid(slot));
return slot.obj;
}
private boolean invalid(QSlot<T> slot) {
if (slot == null) {
return false;
}
if (slot == KILL_PILL) {
live.offer(KILL_PILL);
throw new IllegalStateException("pool is shut down");
}
if (slot.poison != null) {
dead.offer(slot);
throw new PoolException("allocation failed", slot.poison);
}
if (slot.expired()) {
dead.offer(slot);
return true;
}
if (shutdown) {
dead.offer(slot);
throw new IllegalStateException("pool is shut down");
}
slot.claim();
return false;
}
public T claim(long timeout, TimeUnit unit) throws PoolException,
InterruptedException {
QSlot<T> slot;
do {
// TODO timeout-reset bug
slot = live.poll(timeout, unit);
} while (invalid(slot));
return slot == null? null : slot.obj;
}
public Completion shutdown() {
shutdown = true;
live.offer(KILL_PILL);
allocThread.interrupt();
return new QPoolShutdownCompletion(allocThread);
}
}
|
package water.api;
import hex.GridSearch;
import java.util.*;
import water.H2O;
import water.util.RString;
/**
*
* @author peta
*/
public class RequestQueries extends RequestArguments {
/** Overwrite this method to be able to change / disable values of other
* arguments on certain argument changes.
*
* This is done for both query checking and request checking.
*/
protected void queryArgumentValueSet(Argument arg, Properties inputArgs) {
}
/** Checks the given arguments.
*
* When first argument is found wrong, generates the json error and returns the
* result to be returned if any problems were found. Otherwise returns
*
* @param args
* @param type
* @return
*/
protected final String checkArguments(Properties args, RequestType type) {
// Why the following lines duplicate lines from Request#92 - handling query?
// reset all arguments
for (Argument arg: _arguments)
arg.reset();
// return query if in query mode
if (type == RequestType.query)
return buildQuery(args,type);
//*/
/** Returns the request query form produced from the given input arguments.
*/
protected String buildQuery(Properties parms, RequestType type) {
if (parms.isEmpty())
type = RequestType.query;
RString result = new RString(_queryHtml);
result.replace("REQ_NAME", this.getClass().getSimpleName());
StringBuilder query = new StringBuilder();
query.append("<form onsubmit='return false;'>");
RString script = new RString(_queryJs);
script.replace("REQUEST_NAME", getClass().getSimpleName());
for (Argument arg: _arguments) {
try {
arg.check(RequestQueries.this, parms.getProperty(arg._name,""));
queryArgumentValueSet(arg, parms);
} catch (IllegalArgumentException e) {
// in query mode only display error for arguments present
if ((type != RequestType.query) || !parms.getProperty(arg._name,"").isEmpty())
query.append("<div class='alert alert-error'>"+e.getMessage()+"</div>");
}
if (arg._hideInQuery)
continue;
if (!arg.disabled()) {
RString x = script.restartGroup("REQUEST_ELEMENT");
x.replace("ELEMENT_NAME",arg._name);
// If some Argument has prerequisites, and those pre-reqs changed on
// this very page load then we do not assign the arg here: the values
// passed will be something valid from the PRIOR page - based on the
// old pre-req - and won't be correct. Not assigning them here means
// we'll act "as if" the field was never filled in.
if( arg._prerequisites != null ) {
StringBuilder sb = new StringBuilder("if( ");
ArrayList<RequestArguments.Argument> preqs = arg._prerequisites;
for( RequestArguments.Argument dep : preqs )
sb.append("specArg!=='").append(dep._name).append("' && ");
sb.append("true ) ");
x.replace("ELEMENT_PREQ",sb);
}
x.append();
x = script.restartGroup("ELEMENT_VALUE");
x.replace("ELEMENT_NAME",arg._name);
x.replace("BODY","function query_value_"+arg._name+"() { "+arg.jsValue()+"} ");
x.append();
}
if (arg.refreshOnChange()) {
RString x = script.restartGroup("ELEMENT_ONCHANGE");
x.replace("BODY",arg.jsRefresh("query_refresh"));
x.append();
}
RString x = script.restartGroup("ELEMENT_ADDONS");
x.replace("BODY", arg.jsAddons());
x.append();
}
for(Argument arg:_arguments){
if (arg._hideInQuery)
continue;
query.append(arg.query());
}
query.append("</form>");
result.replace("QUERY",query.toString());
result.replace("SCRIPT",script.toString());
return result.toString();
}
}
|
package com.malhartech.stram;
import com.malhartech.api.Operator.InputPort;
import com.malhartech.api.Operator.OutputPort;
import com.malhartech.api.Operator.Unifier;
import com.malhartech.api.*;
import com.malhartech.bufferserver.Server;
import com.malhartech.bufferserver.storage.DiskStorage;
import com.malhartech.bufferserver.util.Codec;
import com.malhartech.engine.Operators.PortMappingDescriptor;
import com.malhartech.engine.*;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.ContainerHeartbeat;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.ContainerHeartbeatResponse;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.StramToNodeRequest;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.StreamingContainerContext;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.StreamingNodeHeartbeat;
import com.malhartech.stram.StreamingContainerUmbilicalProtocol.StreamingNodeHeartbeat.DNodeState;
import com.malhartech.stram.TupleRecorder.RecorderSink;
import com.malhartech.stream.*;
import com.malhartech.util.AttributeMap;
import com.malhartech.util.ScheduledThreadPoolExecutor;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.security.PrivilegedExceptionAction;
import java.util.Map.Entry;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSError;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.log4j.LogManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* The main() for streaming container processes launched by {@link com.malhartech.stram.StramAppMaster}.<p>
* <br>
*
*/
public class StramChild
{
private static final Logger logger = LoggerFactory.getLogger(StramChild.class);
private static final String NODE_PORT_SPLIT_SEPARATOR = "\\.";
public static final String NODE_PORT_CONCAT_SEPARATOR = ".";
private static final int SPIN_MILLIS = 20;
private final String containerId;
private final Configuration conf;
private final StreamingContainerUmbilicalProtocol umbilical;
protected final Map<Integer, Node<?>> nodes = new ConcurrentHashMap<Integer, Node<?>>();
private final Map<String, ComponentContextPair<Stream<Object>, StreamContext>> streams = new ConcurrentHashMap<String, ComponentContextPair<Stream<Object>, StreamContext>>();
protected final Map<Integer, WindowGenerator> generators = new ConcurrentHashMap<Integer, WindowGenerator>();
protected final Map<Integer, OperatorContext> activeNodes = new ConcurrentHashMap<Integer, OperatorContext>();
private final Map<Stream<?>, StreamContext> activeStreams = new ConcurrentHashMap<Stream<?>, StreamContext>();
private final Map<WindowGenerator, Object> activeGenerators = new ConcurrentHashMap<WindowGenerator, Object>();
private int heartbeatIntervalMillis = 1000;
private volatile boolean exitHeartbeatLoop = false;
private final Object heartbeatTrigger = new Object();
private String checkpointFsPath;
/**
* Map of last backup window id that is used to communicate checkpoint state back to Stram. TODO: Consider adding this to the node context instead.
*/
private final Map<Integer, Long> backupInfo = new ConcurrentHashMap<Integer, Long>();
private long firstWindowMillis;
private int windowWidthMillis;
private InetSocketAddress bufferServerAddress;
private com.malhartech.bufferserver.Server bufferServer;
private AttributeMap<DAGContext> applicationAttributes;
protected HashMap<Integer, TupleRecorder> tupleRecorders = new HashMap<Integer, TupleRecorder>();
protected StramChild(String containerId, Configuration conf, StreamingContainerUmbilicalProtocol umbilical)
{
logger.debug("instantiated StramChild {}", containerId);
this.umbilical = umbilical;
this.containerId = containerId;
this.conf = conf;
}
public void setup(StreamingContainerContext ctx)
{
this.applicationAttributes = ctx.applicationAttributes;
heartbeatIntervalMillis = ctx.applicationAttributes.attrValue(DAG.STRAM_HEARTBEAT_INTERVAL_MILLIS, 1000);
firstWindowMillis = ctx.startWindowMillis;
windowWidthMillis = ctx.applicationAttributes.attrValue(DAG.STRAM_WINDOW_SIZE_MILLIS, 500);
this.checkpointFsPath = ctx.applicationAttributes.attrValue(DAG.STRAM_CHECKPOINT_DIR, "checkpoint-dfs-path-not-configured");
try {
if (ctx.deployBufferServer) {
// start buffer server, if it was not set externally
bufferServer = new Server(0, 64 * 1024 * 1024, 8);
bufferServer.setSpoolStorage(new DiskStorage());
SocketAddress bindAddr = bufferServer.run();
logger.info("Buffer server started: {}", bindAddr);
this.bufferServerAddress = NetUtils.getConnectAddress(((InetSocketAddress)bindAddr));
}
}
catch (Exception ex) {
logger.warn("deploy request failed due to {}", ex);
}
}
public String getContainerId()
{
return this.containerId;
}
/**
* Initialize container. Establishes heartbeat connection to the master
* process through the callback address provided on the command line. Deploys
* initial modules, then enters the heartbeat loop, which will only terminate
* once container receives shutdown request from the master. On shutdown,
* after exiting heartbeat loop, deactivate all modules and terminate
* processing threads.
*
* @param args
* @throws Throwable
*/
public static void main(String[] args) throws Throwable
{
logger.info("Child starting with classpath: {}", System.getProperty("java.class.path"));
final Configuration defaultConf = new Configuration();
//defaultConf.addResource(MRJobConfig.JOB_CONF_FILE);
UserGroupInformation.setConfiguration(defaultConf);
String host = args[0];
int port = Integer.parseInt(args[1]);
final InetSocketAddress address =
NetUtils.createSocketAddrForHost(host, port);
final String childId = System.getProperty("stram.cid");
//Token<JobTokenIdentifier> jt = loadCredentials(defaultConf, address);
// Communicate with parent as actual task owner.
UserGroupInformation taskOwner =
UserGroupInformation.createRemoteUser(StramChild.class.getName());
//taskOwner.addToken(jt);
final StreamingContainerUmbilicalProtocol umbilical =
taskOwner.doAs(new PrivilegedExceptionAction<StreamingContainerUmbilicalProtocol>()
{
@Override
public StreamingContainerUmbilicalProtocol run() throws Exception
{
return RPC.getProxy(StreamingContainerUmbilicalProtocol.class,
StreamingContainerUmbilicalProtocol.versionID, address, defaultConf);
}
});
logger.debug("PID: " + System.getenv().get("JVM_PID"));
UserGroupInformation childUGI;
try {
childUGI = UserGroupInformation.createRemoteUser(System.getenv(ApplicationConstants.Environment.USER.toString()));
// Add tokens to new user so that it may execute its task correctly.
for (Token<?> token: UserGroupInformation.getCurrentUser().getTokens()) {
childUGI.addToken(token);
}
childUGI.doAs(new PrivilegedExceptionAction<Object>()
{
@Override
public Object run() throws Exception
{
StreamingContainerContext ctx = umbilical.getInitContext(childId);
StramChild stramChild = new StramChild(childId, defaultConf, umbilical);
logger.debug("Got context: " + ctx);
stramChild.setup(ctx);
try {
// main thread enters heartbeat loop
stramChild.monitorHeartbeat();
}
finally {
// teardown
stramChild.teardown();
}
return null;
}
});
}
catch (FSError e) {
logger.error("FSError from child", e);
umbilical.log(childId, e.getMessage());
}
catch (Exception exception) {
logger.warn("Exception running child : "
+ StringUtils.stringifyException(exception));
// Report back any failures, for diagnostic purposes
ByteArrayOutputStream baos = new ByteArrayOutputStream();
exception.printStackTrace(new PrintStream(baos));
umbilical.log(childId, "FATAL: " + baos.toString());
}
catch (Throwable throwable) {
logger.error("Error running child : "
+ StringUtils.stringifyException(throwable));
Throwable tCause = throwable.getCause();
String cause = tCause == null
? throwable.getMessage()
: StringUtils.stringifyException(tCause);
umbilical.log(childId, cause);
}
finally {
RPC.stopProxy(umbilical);
DefaultMetricsSystem.shutdown();
// Shutting down log4j of the child-vm...
// This assumes that on return from Task.activate()
// there is no more logging done.
LogManager.shutdown();
}
}
public synchronized void deactivate()
{
ArrayList<Thread> activeThreads = new ArrayList<Thread>();
ArrayList<Integer> activeOperators = new ArrayList<Integer>();
for (Entry<Integer, Node<?>> e: nodes.entrySet()) {
OperatorContext oc = activeNodes.get(e.getKey());
if (oc == null) {
disconnectNode(e.getKey());
}
else {
activeThreads.add(oc.getThread());
activeOperators.add(e.getKey());
e.getValue().deactivate();
}
}
try {
Iterator<Integer> iterator = activeOperators.iterator();
for (Thread t: activeThreads) {
t.join();
disconnectNode(iterator.next());
}
assert (activeNodes.isEmpty());
}
catch (InterruptedException ex) {
logger.info("Aborting wait for for operators to get deactivated as got interrupted with {}", ex);
}
for (WindowGenerator wg: activeGenerators.keySet()) {
wg.deactivate();
}
activeGenerators.clear();
for (Stream<?> stream: activeStreams.keySet()) {
stream.deactivate();
}
activeStreams.clear();
}
private void disconnectNode(int nodeid)
{
Node<?> node = nodes.get(nodeid);
disconnectWindowGenerator(nodeid, node);
Set<String> removableStreams = new HashSet<String>(); // temporary fix - find out why List does not work.
// with the logic i have in here, the list should not contain repeated streams. but it does and that causes problem.
for (Entry<String, ComponentContextPair<Stream<Object>, StreamContext>> entry: streams.entrySet()) {
String indexingKey = entry.getKey();
Stream<?> stream = entry.getValue().component;
StreamContext context = entry.getValue().context;
String sourceIdentifier = context.getSourceId();
String sinkIdentifier = context.getSinkId();
logger.debug("considering stream {} against id {}", stream, indexingKey);
if (nodeid == Integer.parseInt(sourceIdentifier.split(NODE_PORT_SPLIT_SEPARATOR)[0])) {
/*
* the stream originates at the output port of one of the operators that are going to vanish.
*/
if (activeStreams.containsKey(stream)) {
logger.debug("deactivating {}", stream);
stream.deactivate();
activeStreams.remove(stream);
}
removableStreams.add(sourceIdentifier);
String[] sinkIds = sinkIdentifier.split(", ");
for (String sinkId: sinkIds) {
if (!sinkId.startsWith("tcp:
String[] nodeport = sinkId.split(NODE_PORT_SPLIT_SEPARATOR);
Node<?> n = nodes.get(Integer.parseInt(nodeport[0]));
if (n instanceof UnifierNode) {
n.connectInputPort(nodeport[1] + "(" + sourceIdentifier + ")", null);
}
else if (n != null) {
// check why null pointer exception gets thrown here during shutdown! - chetan
n.connectInputPort(nodeport[1], null);
}
}
else if (stream.isMultiSinkCapable()) {
ComponentContextPair<Stream<Object>, StreamContext> spair = streams.get(sinkId);
logger.debug("found stream {} against {}", spair == null ? null : spair.component, sinkId);
if (spair == null) {
assert (!sinkId.startsWith("tcp:
}
else {
assert (sinkId.startsWith("tcp:
if (activeStreams.containsKey(spair.component)) {
logger.debug("deactivating {} for sink {}", spair.component, sinkId);
spair.component.deactivate();
activeStreams.remove(spair.component);
}
removableStreams.add(sinkId);
}
}
}
}
else {
/**
* the stream may or may not feed into one of the operators which are being undeployed.
*/
String[] sinkIds = sinkIdentifier.split(", ");
for (int i = sinkIds.length; i
String[] nodeport = sinkIds[i].split(NODE_PORT_SPLIT_SEPARATOR);
if (Integer.toString(nodeid).equals(nodeport[0])) {
stream.setSink(sinkIds[i], null);
if (node instanceof UnifierNode) {
node.connectInputPort(nodeport[1] + "(" + sourceIdentifier + ")", null);
}
else {
node.connectInputPort(nodeport[1], null);
}
sinkIds[i] = null;
}
}
String sinkId = null;
for (int i = sinkIds.length; i
if (sinkIds[i] != null) {
if (sinkId == null) {
sinkId = sinkIds[i];
}
else {
sinkId = sinkId.concat(", ").concat(sinkIds[i]);
}
}
}
if (sinkId == null) {
if (activeStreams.containsKey(stream)) {
logger.debug("deactivating {}", stream);
stream.deactivate();
activeStreams.remove(stream);
}
removableStreams.add(indexingKey);
}
else {
// may be we should also check if the count has changed from something to 1
// and replace mux with 1:1 sink. it's not necessary though.
context.setSinkId(sinkId);
}
}
}
for (String streamId: removableStreams) {
logger.debug("removing stream {}", streamId);
// need to check why control comes here twice to remove the stream which was deleted before.
// is it because of multiSinkCapableStream ?
ComponentContextPair<Stream<Object>, StreamContext> pair = streams.remove(streamId);
pair.component.teardown();
}
}
private void disconnectWindowGenerator(int nodeid, Node<?> node)
{
WindowGenerator chosen1 = generators.remove(nodeid);
if (chosen1 != null) {
chosen1.setSink(Integer.toString(nodeid).concat(NODE_PORT_CONCAT_SEPARATOR).concat(Node.INPUT), null);
node.connectInputPort(Node.INPUT, null);
int count = 0;
for (WindowGenerator wg: generators.values()) {
if (chosen1 == wg) {
count++;
}
}
if (count == 0) {
activeGenerators.remove(chosen1);
chosen1.deactivate();
chosen1.teardown();
}
}
}
private synchronized void undeploy(List<OperatorDeployInfo> nodeList)
{
logger.info("got undeploy request {}", nodeList);
/**
* make sure that all the operators which we are asked to undeploy are in this container.
*/
HashMap<Integer, Node<?>> toUndeploy = new HashMap<Integer, Node<?>>();
for (OperatorDeployInfo ndi: nodeList) {
Node<?> node = nodes.get(ndi.id);
if (node == null) {
throw new IllegalArgumentException("Node " + ndi.id + " is not hosted in this container!");
}
else if (toUndeploy.containsKey(ndi.id)) {
throw new IllegalArgumentException("Node " + ndi.id + " is requested to be undeployed more than once");
}
else {
toUndeploy.put(ndi.id, node);
}
}
// track all the ids to undeploy
// track the ones which are active
ArrayList<Thread> joinList = new ArrayList<Thread>();
ArrayList<Integer> discoList = new ArrayList<Integer>();
for (OperatorDeployInfo ndi: nodeList) {
OperatorContext oc = activeNodes.get(ndi.id);
if (oc == null) {
disconnectNode(ndi.id);
}
else {
joinList.add(oc.getThread());
discoList.add(ndi.id);
nodes.get(ndi.id).deactivate();
}
}
try {
Iterator<Integer> iterator = discoList.iterator();
for (Thread t: joinList) {
t.join();
disconnectNode(iterator.next());
}
logger.info("undeploy complete");
}
catch (InterruptedException ex) {
logger.warn("Aborted waiting for the deactivate to finish!");
}
for (OperatorDeployInfo ndi: nodeList) {
nodes.remove(ndi.id);
}
}
public void teardown()
{
deactivate();
assert (streams.isEmpty());
nodes.clear();
HashSet<WindowGenerator> gens = new HashSet<WindowGenerator>();
gens.addAll(generators.values());
generators.clear();
for (WindowGenerator wg: gens) {
wg.teardown();
}
if (bufferServer != null) {
bufferServer.shutdown();
}
gens.clear();
}
protected void triggerHeartbeat()
{
synchronized (heartbeatTrigger) {
heartbeatTrigger.notifyAll();
}
}
protected void monitorHeartbeat() throws IOException
{
umbilical.log(containerId, "[" + containerId + "] Entering heartbeat loop..");
logger.debug("Entering heartbeat loop (interval is {} ms)", this.heartbeatIntervalMillis);
while (!exitHeartbeatLoop) {
synchronized (this.heartbeatTrigger) {
try {
this.heartbeatTrigger.wait(heartbeatIntervalMillis);
}
catch (InterruptedException e1) {
logger.warn("Interrupted in heartbeat loop, exiting..");
break;
}
}
long currentTime = System.currentTimeMillis();
ContainerHeartbeat msg = new ContainerHeartbeat();
msg.setContainerId(this.containerId);
if (this.bufferServerAddress != null) {
msg.bufferServerHost = this.bufferServerAddress.getHostName();
msg.bufferServerPort = this.bufferServerAddress.getPort();
}
List<StreamingNodeHeartbeat> heartbeats = new ArrayList<StreamingNodeHeartbeat>(nodes.size());
// gather heartbeat info for all operators
for (Map.Entry<Integer, Node<?>> e: nodes.entrySet()) {
StreamingNodeHeartbeat hb = new StreamingNodeHeartbeat();
hb.setNodeId(e.getKey());
hb.setGeneratedTms(currentTime);
hb.setIntervalMs(heartbeatIntervalMillis);
if (activeNodes.containsKey(e.getKey())) {
activeNodes.get(e.getKey()).drainHeartbeatCounters(hb.getWindowStats());
hb.setState(DNodeState.ACTIVE.toString());
}
else {
hb.setState(e.getValue().isAlive() ? DNodeState.FAILED.toString() : DNodeState.IDLE.toString());
}
// propagate the backup window, if any
Long backupWindowId = backupInfo.get(e.getKey());
if (backupWindowId != null) {
hb.setLastBackupWindowId(backupWindowId);
}
heartbeats.add(hb);
}
msg.setDnodeEntries(heartbeats);
// heartbeat call and follow-up processing
//logger.debug("Sending heartbeat for {} operators.", msg.getDnodeEntries().size());
try {
ContainerHeartbeatResponse rsp = umbilical.processHeartbeat(msg);
if (rsp != null) {
processHeartbeatResponse(rsp);
// keep polling at smaller interval if work is pending
while (rsp != null && rsp.hasPendingRequests) {
logger.info("Waiting for pending request.");
synchronized (this.heartbeatTrigger) {
try {
this.heartbeatTrigger.wait(500);
}
catch (InterruptedException e1) {
logger.warn("Interrupted in heartbeat loop, exiting..");
break;
}
}
rsp = umbilical.pollRequest(this.containerId);
if (rsp != null) {
processHeartbeatResponse(rsp);
}
}
}
}
catch (Exception e) {
logger.warn("Exception received (may be during shutdown?)", e);
}
}
logger.debug("Exiting hearbeat loop");
umbilical.log(containerId, "[" + containerId + "] Exiting heartbeat loop..");
}
protected void processHeartbeatResponse(ContainerHeartbeatResponse rsp)
{
if (rsp.shutdown) {
logger.info("Received shutdown request");
this.exitHeartbeatLoop = true;
return;
}
if (rsp.undeployRequest != null) {
logger.info("Undeploy request: {}", rsp.undeployRequest);
undeploy(rsp.undeployRequest);
}
if (rsp.deployRequest != null) {
logger.info("Deploy request: {}", rsp.deployRequest);
try {
deploy(rsp.deployRequest);
}
catch (Exception e) {
logger.error("deploy request failed due to {}", e);
// report it to stram
}
}
if (rsp.nodeRequests != null) {
// extended processing per node
for (StramToNodeRequest req: rsp.nodeRequests) {
OperatorContext nc = activeNodes.get(req.getNodeId());
if (nc == null) {
logger.warn("Received request with invalid node id {} ({})", req.getNodeId(), req);
}
else {
logger.debug("Stram request: {}", req);
processStramRequest(nc, req);
}
}
}
}
/**
* Process request from stram for further communication through the protocol. Extended reporting is on a per node basis (won't occur under regular operation)
*
* @param n
* @param snr
*/
private void processStramRequest(OperatorContext context, final StramToNodeRequest snr)
{
int operatorId = snr.getNodeId();
final Node<?> node = nodes.get(operatorId);
final String name = snr.getName();
switch (snr.getRequestType()) {
case REPORT_PARTION_STATS:
logger.warn("Ignoring stram request {}", snr);
break;
case CHECKPOINT:
context.request(new OperatorContext.NodeRequest()
{
@Override
public void execute(Operator operator, int id, long windowId) throws IOException
{
new HdfsBackupAgent(StramChild.this.conf, StramChild.this.checkpointFsPath).backup(id, windowId, operator, StramUtils.getNodeSerDe(null));
// record last backup window id for heartbeat
StramChild.this.backupInfo.put(id, windowId);
node.emitCheckpoint(windowId);
if (operator instanceof CheckpointListener) {
((CheckpointListener)operator).checkpointed(windowId);
((CheckpointListener)operator).committed(snr.getRecoveryCheckpoint());
}
}
});
break;
case START_RECORDING:
logger.debug("Received start recording request for " + operatorId);
if (!tupleRecorders.containsKey(operatorId)) {
context.request(new OperatorContext.NodeRequest()
{
@Override
public void execute(Operator operator, int operatorId, long windowId) throws IOException
{
logger.debug("Executing start recording request for " + operatorId);
TupleRecorder tupleRecorder = tupleRecorders.get(operatorId);
if (tupleRecorder == null) {
tupleRecorder = new TupleRecorder();
if (name != null && !name.isEmpty()) {
tupleRecorder.setRecordingName(name);
}
String basePath = "recordings/" + operatorId + "/" + containerId + "/" + tupleRecorder.getStartTime();
tupleRecorder.setBasePath(basePath);
HashMap<String, RecorderSink> sinkMap = new HashMap<String, RecorderSink>();
PortMappingDescriptor descriptor = node.getPortMappingDescriptor();
for (Map.Entry<String, InputPort<?>> entry: descriptor.inputPorts.entrySet()) {
String streamId = getDeclaredStreamId(operatorId, entry.getKey());
if (streamId != null) {
tupleRecorder.addInputPortInfo(entry.getKey(), streamId);
sinkMap.put(entry.getKey(), tupleRecorder.newSink(entry.getKey()));
}
}
for (Map.Entry<String, OutputPort<?>> entry: descriptor.outputPorts.entrySet()) {
String streamId = getDeclaredStreamId(operatorId, entry.getKey());
if (streamId != null) {
tupleRecorder.addOutputPortInfo(entry.getKey(), streamId);
sinkMap.put(entry.getKey(), tupleRecorder.newSink(entry.getKey()));
}
}
logger.debug("Started recording to base path " + basePath);
node.addSinks(sinkMap);
tupleRecorder.setup(null);
tupleRecorders.put(operatorId, tupleRecorder);
}
}
});
}
else {
logger.error("(START_RECORDING) Operator id " + operatorId + " is already being recorded.");
}
break;
case STOP_RECORDING:
logger.debug("Received stop recording request for " + operatorId);
if (!tupleRecorders.containsKey(operatorId)) {
context.request(new OperatorContext.NodeRequest()
{
@Override
public void execute(Operator operator, int operatorId, long windowId) throws IOException
{
logger.debug("Executing stop recording request for " + operatorId);
TupleRecorder tupleRecorder = tupleRecorders.get(operatorId);
if (tupleRecorder != null) {
node.removeSinks(tupleRecorder.getSinkMap());
tupleRecorder.teardown();
logger.debug("Stopped recording for operator id " + operatorId);
}
}
});
}
else {
logger.error("(STOP_RECORDING) Operator id " + operatorId + " is not being recorded.");
}
break;
default:
logger.error("Unknown request from stram {}", snr);
}
}
private synchronized void deploy(List<OperatorDeployInfo> nodeList) throws Exception
{
/*
* A little bit of up front sanity check would reduce the percentage of deploy failures later.
*/
for (OperatorDeployInfo ndi: nodeList) {
if (nodes.containsKey(ndi.id)) {
throw new IllegalStateException("Node with id: " + ndi.id + " already present in the container");
}
}
deployNodes(nodeList);
HashMap<String, ArrayList<String>> groupedInputStreams = new HashMap<String, ArrayList<String>>();
for (OperatorDeployInfo ndi: nodeList) {
groupInputStreams(groupedInputStreams, ndi);
}
deployOutputStreams(nodeList, groupedInputStreams);
deployInputStreams(nodeList);
activate(nodeList);
}
private void massageUnifierDeployInfo(OperatorDeployInfo odi)
{
for (OperatorDeployInfo.InputDeployInfo idi: odi.inputs) {
idi.portName += "(" + idi.sourceNodeId + NODE_PORT_CONCAT_SEPARATOR + idi.sourcePortName + ")";
}
}
@SuppressWarnings("unchecked")
private void deployNodes(List<OperatorDeployInfo> nodeList) throws Exception
{
OperatorCodec operatorSerDe = StramUtils.getNodeSerDe(null);
BackupAgent backupAgent = new HdfsBackupAgent(this.conf, this.checkpointFsPath);
for (OperatorDeployInfo ndi: nodeList) {
try {
final Object foreignObject;
if (ndi.checkpointWindowId > 0) {
logger.debug("Restoring node {} to checkpoint {}", ndi.id, Codec.getStringWindowId(ndi.checkpointWindowId));
foreignObject = backupAgent.restore(ndi.id, ndi.checkpointWindowId, operatorSerDe);
}
else {
foreignObject = operatorSerDe.read(new ByteArrayInputStream(ndi.serializedNode));
}
String nodeid = Integer.toString(ndi.id).concat("/").concat(ndi.declaredId).concat(":").concat(foreignObject.getClass().getSimpleName());
if (foreignObject instanceof InputOperator && ndi.type == OperatorDeployInfo.OperatorType.INPUT) {
nodes.put(ndi.id, new InputNode(nodeid, (InputOperator)foreignObject));
}
else if (foreignObject instanceof Unifier && ndi.type == OperatorDeployInfo.OperatorType.UNIFIER) {
nodes.put(ndi.id, new UnifierNode(nodeid, (Unifier<Object>)foreignObject));
massageUnifierDeployInfo(ndi);
}
else {
nodes.put(ndi.id, new GenericNode(nodeid, (Operator)foreignObject));
}
}
catch (Exception e) {
logger.error(e.getLocalizedMessage());
throw e;
}
}
}
private void deployOutputStreams(List<OperatorDeployInfo> nodeList, HashMap<String, ArrayList<String>> groupedInputStreams) throws Exception
{
/*
* We proceed to deploy all the output streams. At the end of this block, our streams collection
* will contain all the streams which originate at the output port of the operators. The streams
* are generally mapped against the "nodename.portname" string. But the BufferOutputStreams which
* share the output port with other inline streams are mapped against the Buffer Server port to
* avoid collision and at the same time keep track of these buffer streams.
*/
for (OperatorDeployInfo ndi: nodeList) {
Node<?> node = nodes.get(ndi.id);
for (OperatorDeployInfo.OutputDeployInfo nodi: ndi.outputs) {
String sourceIdentifier = Integer.toString(ndi.id).concat(NODE_PORT_CONCAT_SEPARATOR).concat(nodi.portName);
String sinkIdentifier;
StreamContext context = new StreamContext(nodi.declaredStreamId);
Stream<Object> stream;
ArrayList<String> collection = groupedInputStreams.get(sourceIdentifier);
if (collection == null) {
/*
* Let's create a stream to carry the data to the Buffer Server.
* Nobody in this container is interested in the output placed on this stream, but
* this stream exists. That means someone outside of this container must be interested.
*/
assert (nodi.isInline() == false);
context.setBufferServerAddress(InetSocketAddress.createUnresolved(nodi.bufferServerHost, nodi.bufferServerPort));
stream = new BufferServerOutputStream(StramUtils.getSerdeInstance(nodi.serDeClassName));
stream.setup(context);
logger.debug("deployed a buffer stream {}", stream);
sinkIdentifier = "tcp://".concat(nodi.bufferServerHost).concat(":").concat(String.valueOf(nodi.bufferServerPort)).concat("/").concat(sourceIdentifier);
}
else if (collection.size() == 1) {
if (nodi.isInline()) {
/**
* Let's create an inline stream to carry data from output port to input port of some other node.
* There is only one node interested in output placed on this stream, and that node is in this container.
*/
stream = new InlineStream();
stream.setup(context);
sinkIdentifier = null;
}
else {
/**
* Let's create 2 streams: 1 inline and 1 going to the Buffer Server.
* Although there is a node in this container interested in output placed on this stream, there
* seems to at least one more party interested but placed in a container other than this one.
*/
sinkIdentifier = "tcp://".concat(nodi.bufferServerHost).concat(":").concat(String.valueOf(nodi.bufferServerPort)).concat("/").concat(sourceIdentifier);
StreamContext bssc = new StreamContext(nodi.declaredStreamId);
bssc.setSourceId(sourceIdentifier);
bssc.setSinkId(sinkIdentifier);
bssc.setStartingWindowId(ndi.checkpointWindowId > 0 ? ndi.checkpointWindowId + 1 : 0); // TODO: next window after checkpoint
bssc.setBufferServerAddress(InetSocketAddress.createUnresolved(nodi.bufferServerHost, nodi.bufferServerPort));
BufferServerOutputStream bsos = new BufferServerOutputStream(StramUtils.getSerdeInstance(nodi.serDeClassName));
bsos.setup(bssc);
logger.debug("deployed a buffer stream {}", bsos);
streams.put(sinkIdentifier, new ComponentContextPair<Stream<Object>, StreamContext>(bsos, bssc));
// should we create inline stream here or wait for the input deployments to create the inline streams?
stream = new MuxStream();
stream.setup(context);
stream.setSink(sinkIdentifier, bsos);
logger.debug("stored stream {} against key {}", bsos, sinkIdentifier);
}
}
else {
/**
* Since there are multiple parties interested in this node itself, we are going to come
* to this block multiple times. The actions we take subsequent times are going to be different
* than the first time. We create the MuxStream only the first time.
*/
ComponentContextPair<Stream<Object>, StreamContext> pair = streams.get(sourceIdentifier);
if (pair == null) {
/**
* Let's multiplex the output placed on this stream.
* This container itself contains more than one parties interested.
*/
stream = new MuxStream();
stream.setup(context);
}
else {
stream = pair.component;
}
if (nodi.isInline()) {
sinkIdentifier = null;
}
else {
sinkIdentifier = "tcp://".concat(nodi.bufferServerHost).concat(":").concat(String.valueOf(nodi.bufferServerPort)).concat("/").concat(sourceIdentifier);
StreamContext bssc = new StreamContext(nodi.declaredStreamId);
bssc.setSourceId(sourceIdentifier);
bssc.setSinkId(sinkIdentifier);
bssc.setStartingWindowId(ndi.checkpointWindowId > 0 ? ndi.checkpointWindowId + 1 : 0); // TODO: next window after checkpoint
bssc.setBufferServerAddress(InetSocketAddress.createUnresolved(nodi.bufferServerHost, nodi.bufferServerPort));
BufferServerOutputStream bsos = new BufferServerOutputStream(StramUtils.getSerdeInstance(nodi.serDeClassName));
bsos.setup(bssc);
logger.debug("deployed a buffer stream {}", bsos);
streams.put(sinkIdentifier, new ComponentContextPair<Stream<Object>, StreamContext>(bsos, bssc));
logger.debug("stored stream {} against key {}", bsos, sinkIdentifier);
stream.setup(context);
stream.setSink(sinkIdentifier, bsos);
}
}
if (!streams.containsKey(sourceIdentifier)) {
node.connectOutputPort(nodi.portName, stream);
context.setSourceId(sourceIdentifier);
context.setSinkId(sinkIdentifier);
context.setStartingWindowId(ndi.checkpointWindowId > 0 ? ndi.checkpointWindowId + 1 : 0); // TODO: next window after checkpoint
streams.put(sourceIdentifier, new ComponentContextPair<Stream<Object>, StreamContext>(stream, context));
logger.debug("stored stream {} against key {}", stream, sourceIdentifier);
}
}
}
}
/**
* If the port is connected, find return the declared stream Id.
*
* @param operatorId id of the operator to which the port belongs.
* @param portname name of port to which the stream is connected.
* @return Stream Id if connected, null otherwise.
*/
public final String getDeclaredStreamId(int operatorId, String portname)
{
String identifier = String.valueOf(operatorId).concat(NODE_PORT_CONCAT_SEPARATOR).concat(portname);
ComponentContextPair<Stream<Object>, StreamContext> spair = streams.get(identifier);
if (spair == null) {
return null;
}
return spair.context.getId();
}
private void deployInputStreams(List<OperatorDeployInfo> nodeList) throws Exception
{
// collect any input operators along with their smallest window id,
// those are subsequently used to setup the window generator
ArrayList<OperatorDeployInfo> inputNodes = new ArrayList<OperatorDeployInfo>();
long smallestCheckpointedWindowId = Long.MAX_VALUE;
/*
* Hook up all the downstream ports. There are 2 places where we deal with more than 1
* downstream ports. The first one follows immediately for WindowGenerator. The second
* case is when source for the input port of some node in this container is in another
* container. So we need to create the stream. We need to track this stream along with
* other streams,and many such streams may exist, we hash them against buffer server
* info as we did for outputs but throw in the sinkid in the mix as well.
*/
for (OperatorDeployInfo ndi: nodeList) {
if (ndi.inputs == null || ndi.inputs.isEmpty()) {
/**
* This has to be InputNode, so let's hook the WindowGenerator to it.
* A node which does not take any input cannot exist in the DAG since it would be completely
* unaware of the windows. So for that reason, AbstractInputNode allows Component.INPUT port.
*/
inputNodes.add(ndi);
/*
* When we activate the window Generator, we plan to activate it only from required windowId.
*/
if (ndi.checkpointWindowId < smallestCheckpointedWindowId) {
smallestCheckpointedWindowId = ndi.checkpointWindowId;
}
}
else {
Node<?> node = nodes.get(ndi.id);
for (OperatorDeployInfo.InputDeployInfo nidi: ndi.inputs) {
String sourceIdentifier = Integer.toString(nidi.sourceNodeId).concat(NODE_PORT_CONCAT_SEPARATOR).concat(nidi.sourcePortName);
String sinkIdentifier = Integer.toString(ndi.id).concat(NODE_PORT_CONCAT_SEPARATOR).concat(nidi.portName);
ComponentContextPair<Stream<Object>, StreamContext> pair = streams.get(sourceIdentifier);
if (pair == null) {
/*
* We connect to the buffer server for the input on this port.
* We have already placed all the output streams for all the operators in this container.
* Yet, there is no stream which can source this port so it has to come from the buffer
* server, so let's make a connection to it.
*/
assert (nidi.isInline() == false);
StreamContext context = new StreamContext(nidi.declaredStreamId);
context.setPartitions(nidi.partitionMask, nidi.partitionKeys);
context.setSourceId(sourceIdentifier);
context.setSinkId(sinkIdentifier);
context.setStartingWindowId(ndi.checkpointWindowId > 0 ? ndi.checkpointWindowId + 1 : 0); // TODO: next window after checkpoint
context.setBufferServerAddress(InetSocketAddress.createUnresolved(nidi.bufferServerHost, nidi.bufferServerPort));
@SuppressWarnings("unchecked")
Stream<Object> stream = (Stream)new BufferServerInputStream(StramUtils.getSerdeInstance(nidi.serDeClassName));
stream.setup(context);
logger.debug("deployed buffer input stream {}", stream);
Sink<Object> s = node.connectInputPort(nidi.portName, stream);
stream.setSink(sinkIdentifier,
ndi.checkpointWindowId > 0 ? new WindowIdActivatedSink<Object>(stream, sinkIdentifier, s, ndi.checkpointWindowId) : s);
streams.put(sinkIdentifier,
new ComponentContextPair<Stream<Object>, StreamContext>(stream, context));
logger.debug("put input stream {} against key {}", stream, sinkIdentifier);
}
else {
String streamSinkId = pair.context.getSinkId();
Sink<Object> s;
if (streamSinkId == null) {
s = node.connectInputPort(nidi.portName, pair.component);
pair.context.setSinkId(sinkIdentifier);
}
else if (pair.component.isMultiSinkCapable()) {
s = node.connectInputPort(nidi.portName, pair.component);
pair.context.setSinkId(streamSinkId.concat(", ").concat(sinkIdentifier));
}
else {
/**
* we are trying to tap into existing InlineStream or BufferServerOutputStream.
* Since none of those streams are MultiSinkCapable, we need to replace them with Mux.
*/
StreamContext context = new StreamContext(nidi.declaredStreamId);
context.setSourceId(sourceIdentifier);
context.setSinkId(streamSinkId.concat(", ").concat(sinkIdentifier));
context.setStartingWindowId(ndi.checkpointWindowId > 0 ? ndi.checkpointWindowId + 1 : 0); // TODO: next window after checkpoint
Stream<Object> stream = new MuxStream();
stream.setup(context);
logger.debug("deployed input mux stream {}", stream);
s = node.connectInputPort(nidi.portName, stream);
streams.put(sourceIdentifier, new ComponentContextPair<Stream<Object>, StreamContext>(stream, context));
logger.debug("stored input stream {} against key {}", stream, sourceIdentifier);
/**
* Lets wire the MuxStream to upstream node.
*/
String[] nodeport = sourceIdentifier.split(NODE_PORT_SPLIT_SEPARATOR);
Node<?> upstreamNode = nodes.get(Integer.parseInt(nodeport[0]));
upstreamNode.connectOutputPort(nodeport[1], stream);
Sink<Object> existingSink;
if (pair.component instanceof InlineStream) {
String[] np = streamSinkId.split(NODE_PORT_SPLIT_SEPARATOR);
Node<?> anotherNode = nodes.get(Integer.parseInt(np[0]));
existingSink = anotherNode.connectInputPort(np[1], stream);
/*
* we do not need to do this but looks bad if leave it in limbo.
*/
pair.component.deactivate();
pair.component.teardown();
}
else {
existingSink = pair.component;
/*
* we got this stream since it was mapped against sourceId, but since
* we took that place for MuxStream, we give this a new place of its own.
*/
streams.put(pair.context.getSinkId(), pair);
logger.debug("relocated stream {} against key {}", pair.context.getSinkId());
}
stream.setSink(streamSinkId, existingSink);
}
if (nidi.partitionKeys == null || nidi.partitionKeys.isEmpty()) {
logger.debug("got simple inline stream from {} to {} - {}", new Object[] {sourceIdentifier, sinkIdentifier, nidi});
pair.component.setSink(sinkIdentifier,
ndi.checkpointWindowId > 0 ? new WindowIdActivatedSink<Object>(pair.component, sinkIdentifier, s, ndi.checkpointWindowId) : s);
}
else {
/*
* generally speaking we do not have partitions on the inline streams so the control should not
* come here but if it comes, then we are ready to handle it using the partition aware streams.
*/
logger.debug("got partitions on the inline stream from {} to {} - {}", new Object[] {sourceIdentifier, sinkIdentifier, nidi});
PartitionAwareSink<Object> pas = new PartitionAwareSink<Object>(StramUtils.getSerdeInstance(nidi.serDeClassName), nidi.partitionKeys, nidi.partitionMask, s);
pair.component.setSink(sinkIdentifier,
ndi.checkpointWindowId > 0 ? new WindowIdActivatedSink<Object>(pair.component, sinkIdentifier, pas, ndi.checkpointWindowId) : pas);
}
}
}
}
}
if (!inputNodes.isEmpty()) {
WindowGenerator windowGenerator = setupWindowGenerator(smallestCheckpointedWindowId);
for (OperatorDeployInfo ndi: inputNodes) {
generators.put(ndi.id, windowGenerator);
Node<?> node = nodes.get(ndi.id);
Sink<Object> s = node.connectInputPort(Node.INPUT, windowGenerator);
windowGenerator.setSink(Integer.toString(ndi.id).concat(NODE_PORT_CONCAT_SEPARATOR).concat(Node.INPUT),
ndi.checkpointWindowId > 0 ? new WindowIdActivatedSink<Object>(windowGenerator, Integer.toString(ndi.id).concat(NODE_PORT_CONCAT_SEPARATOR).concat(Node.INPUT), s, ndi.checkpointWindowId) : s);
}
}
}
/**
* Create the window generator for the given start window id.
* This is a hook for tests to control the window generation.
*
* @param smallestWindowId
* @return WindowGenerator
*/
protected WindowGenerator setupWindowGenerator(long smallestWindowId)
{
WindowGenerator windowGenerator = new WindowGenerator(new ScheduledThreadPoolExecutor(1, "WindowGenerator"));
/**
* let's make sure that we send the same window Ids with the same reset windows.
*/
windowGenerator.setResetWindow(firstWindowMillis);
long millisAtFirstWindow = (smallestWindowId >> 32) * 1000 + windowWidthMillis * (smallestWindowId & WindowGenerator.MAX_WINDOW_ID);
windowGenerator.setFirstWindow(millisAtFirstWindow > firstWindowMillis ? millisAtFirstWindow : firstWindowMillis);
windowGenerator.setWindowWidth(windowWidthMillis);
return windowGenerator;
}
@SuppressWarnings({"SleepWhileInLoop", "SleepWhileHoldingLock"})
public synchronized void activate(List<OperatorDeployInfo> nodeList)
{
for (ComponentContextPair<Stream<Object>, StreamContext> pair: streams.values()) {
if (!(pair.component instanceof SocketInputStream || activeStreams.containsKey(pair.component))) {
activeStreams.put(pair.component, pair.context);
pair.component.activate(pair.context);
}
}
final AtomicInteger activatedNodeCount = new AtomicInteger(activeNodes.size());
for (final OperatorDeployInfo ndi: nodeList) {
final Node<?> node = nodes.get(ndi.id);
assert (!activeNodes.containsKey(ndi.id));
new Thread(node.id)
{
@Override
public void run()
{
try {
OperatorContext context = new OperatorContext(new Integer(ndi.id), this, ndi.contextAttributes, applicationAttributes);
node.getOperator().setup(context);
activeNodes.put(ndi.id, context);
activatedNodeCount.incrementAndGet();
logger.info("activating {} in container {}", node, containerId);
node.activate(context);
}
catch (Throwable ex) {
logger.error("Node stopped abnormally because of exception", ex);
}
finally {
activeNodes.remove(ndi.id);
node.getOperator().teardown();
logger.info("deactivated {}", node.id);
}
}
}.start();
}
/**
* we need to make sure that before any of the operators gets the first message, it's activate.
*/
try {
do {
Thread.sleep(SPIN_MILLIS);
}
while (activatedNodeCount.get() < nodes.size());
}
catch (InterruptedException ex) {
logger.debug(ex.getLocalizedMessage());
}
for (ComponentContextPair<Stream<Object>, StreamContext> pair: streams.values()) {
if (pair.component instanceof SocketInputStream && !activeStreams.containsKey(pair.component)) {
activeStreams.put(pair.component, pair.context);
pair.component.activate(pair.context);
}
}
for (WindowGenerator wg: generators.values()) {
if (!activeGenerators.containsKey(wg)) {
activeGenerators.put(wg, generators);
wg.activate(null);
}
}
}
private void groupInputStreams(HashMap<String, ArrayList<String>> groupedInputStreams, OperatorDeployInfo ndi)
{
for (OperatorDeployInfo.InputDeployInfo nidi: ndi.inputs) {
String source = Integer.toString(nidi.sourceNodeId).concat(NODE_PORT_CONCAT_SEPARATOR).concat(nidi.sourcePortName);
/*
* if we do not want to combine multiple streams with different partitions from the
* same upstream node, we could also use the partition to group the streams together.
* This logic comes with the danger that the performance of the group which shares
* the same stream is bounded on the higher side by the performance of the lowest
* performer upstream port. May be combining the streams is not such a good thing
* but let's see if we allow this as an option to the user, what they end up choosing
* the most.
*/
ArrayList<String> collection = groupedInputStreams.get(source);
if (collection == null) {
collection = new ArrayList<String>();
groupedInputStreams.put(source, collection);
}
collection.add(Integer.toString(ndi.id).concat(NODE_PORT_CONCAT_SEPARATOR).concat(nidi.portName));
}
}
}
|
package demo;
import com.jme3.app.Application;
import com.jme3.app.state.BaseAppState;
import com.simsilica.lemur.*;
import com.simsilica.lemur.component.SpringGridLayout;
import com.simsilica.lemur.event.PopupState;
import com.simsilica.lemur.style.ElementId;
import com.simsilica.lemur.text.DocumentModel;
/**
* A demo of a Textfield that allows direct entry as well as provides
* some buttons for manipulating the DocumentModel separately.
*
* @author Paul Speed
*/
public class TextEntryDemoState extends BaseAppState {
private Container window;
/**
* A command we'll pass to the label pop-up to let
* us know when the user clicks away.
*/
private CloseCommand closeCommand = new CloseCommand();
private TextField textField;
private DocumentModel document;
public TextEntryDemoState() {
}
@Override
protected void initialize( Application app ) {
}
@Override
protected void cleanup( Application app ) {
}
@Override
protected void onEnable() {
// We'll wrap the text in a window to make sure the layout is working
window = new Container();
window.addChild(new Label("Word Wrapped Text", new ElementId("window.title.label")));
// Create a multiline text field with our document model
textField = window.addChild(new TextField("Initial text."));
textField.setSingleLine(false);
document = textField.getDocumentModel();
// Setup some preferred sizing since this will be the primary
// element in our GUI
textField.setPreferredWidth(500);
textField.setPreferredLineCount(10);
// Add some actions that will manipulate the document model independently
// of the text field
Container buttons = window.addChild(new Container(new SpringGridLayout(Axis.X, Axis.Y)));
buttons.addChild(new ActionButton(new CallMethodAction(this, "home")));
buttons.addChild(new ActionButton(new CallMethodAction(this, "end")));
buttons.addChild(new ActionButton(new CallMethodAction(this, "forward")));
buttons.addChild(new ActionButton(new CallMethodAction(this, "back")));
buttons.addChild(new ActionButton(new CallMethodAction(this, "insert")));
buttons.addChild(new ActionButton(new CallMethodAction(this, "delete")));
// Add a close button to both show that the layout is working and
// also because it's better UX... even if the popup will close if
// you click outside of it.
//window.addChild(new ActionButton(new CallMethodAction("Close",
// window, "removeFromParent")));
// Creating a modified close button to more easily test that we really
// do lose focus and don't keep accepting 'space' to click this button.
window.addChild(new ActionButton(new CallMethodAction(this, "close")));
// Position the window and pop it up
window.setLocalTranslation(400, 400, 100);
getState(PopupState.class).showPopup(window, closeCommand);
}
@Override
protected void onDisable() {
window.removeFromParent();
}
protected void home() {
document.home(false);
}
protected void end() {
document.end(false);
}
protected void forward() {
document.right();
}
protected void back() {
document.left();
}
protected void insert() {
document.insert('a');
document.insert('d');
document.insert('d');
}
protected void delete() {
document.delete();
}
/**
* Added this to test the bug where elements removed from the
* scene graph would still retain focus... thus their focus actions
* like 'space' to activate buttons would still be active.
*/
protected void close() {
System.out.println("close");
getState(MainMenuState.class).closeChild(this);
}
private class CloseCommand implements Command<Object> {
public void execute( Object src ) {
getState(MainMenuState.class).closeChild(TextEntryDemoState.this);
}
}
}
|
package xquery.xquery3;
import org.exist.test.runner.XSuite;
import org.junit.runner.RunWith;
@RunWith(XSuite.class)
@XSuite.XSuiteFiles({
"src/test/xquery/xquery3"
})
public class XQuery3Tests {
}
|
package org.yamcs.web.rest.archive;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yamcs.YamcsServer;
import org.yamcs.api.EventProducer;
import org.yamcs.api.EventProducerFactory;
import org.yamcs.api.MediaType;
import org.yamcs.archive.EventRecorder;
import org.yamcs.protobuf.Archive.EventSourceInfo;
import org.yamcs.protobuf.Rest.CreateEventRequest;
import org.yamcs.protobuf.Rest.ListEventsResponse;
import org.yamcs.protobuf.Yamcs.Event;
import org.yamcs.protobuf.Yamcs.Event.EventSeverity;
import org.yamcs.security.SystemPrivilege;
import org.yamcs.utils.TimeEncoding;
import org.yamcs.web.BadRequestException;
import org.yamcs.web.HttpException;
import org.yamcs.web.HttpServer;
import org.yamcs.web.InternalServerErrorException;
import org.yamcs.web.rest.RestHandler;
import org.yamcs.web.rest.RestRequest;
import org.yamcs.web.rest.RestRequest.IntervalResult;
import org.yamcs.web.rest.RestStreamSubscriber;
import org.yamcs.web.rest.RestStreams;
import org.yamcs.web.rest.Route;
import org.yamcs.web.rest.SqlBuilder;
import org.yamcs.yarch.Stream;
import org.yamcs.yarch.TableDefinition;
import org.yamcs.yarch.Tuple;
import org.yamcs.yarch.YarchDatabase;
import org.yamcs.yarch.YarchDatabaseInstance;
import com.csvreader.CsvWriter;
import com.google.common.collect.BiMap;
import com.google.protobuf.ExtensionRegistry;
import com.google.protobuf.InvalidProtocolBufferException;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufOutputStream;
public class ArchiveEventRestHandler extends RestHandler {
private static final Logger log = LoggerFactory.getLogger(ArchiveEventRestHandler.class);
private ConcurrentMap<String, EventProducer> eventProducerMap = new ConcurrentHashMap<>();
private AtomicInteger eventSequenceNumber = new AtomicInteger();
private ExtensionRegistry gpbExtensionRegistry;
@Route(path = "/api/archive/:instance/events", method = "GET")
public void listEvents(RestRequest req) throws HttpException {
String instance = verifyInstance(req, req.getRouteParam("instance"));
verifyEventArchiveSupport(instance);
verifyAuthorization(req.getAuthToken(), SystemPrivilege.MayReadEvents);
long pos = req.getQueryParameterAsLong("pos", 0);
int limit = req.getQueryParameterAsInt("limit", 100);
String severity = req.getQueryParameter("severity", "INFO").toUpperCase();
Set<String> sourceSet = new HashSet<>();
for (String names : req.getQueryParameterList("source", Collections.emptyList())) {
for (String name : names.split(",")) {
sourceSet.add(name);
}
}
SqlBuilder sqlb = new SqlBuilder(EventRecorder.TABLE_NAME);
IntervalResult ir = req.scanForInterval();
if (ir.hasInterval()) {
sqlb.where(ir.asSqlCondition("gentime"));
}
if (!sourceSet.isEmpty()) {
sqlb.whereColIn("source", sourceSet);
}
switch (severity) {
case "INFO":
break;
case "WATCH":
sqlb.where("body.severity != 'INFO'");
break;
case "WARNING":
sqlb.whereColIn("body.severity", Arrays.asList("WARNING", "DISTRESS", "CRITICAL", "SEVERE", "ERROR"));
break;
case "DISTRESS":
sqlb.whereColIn("body.severity", Arrays.asList("DISTRESS", "CRITICAL", "SEVERE", "ERROR"));
break;
case "CRITICAL":
sqlb.whereColIn("body.severity", Arrays.asList("CRITICAL", "SEVERE", "ERROR"));
break;
case "SEVERE":
sqlb.whereColIn("body.severity", Arrays.asList("SEVERE", "ERROR"));
break;
default:
sqlb.whereColIn("body.severity = ?", Arrays.asList(severity));
}
if (req.hasQueryParameter("q")) {
sqlb.where("body.message like ?", "%" + req.getQueryParameter("q") + "%");
}
sqlb.descend(req.asksDescending(true));
String sql = sqlb.toString();
if (req.asksFor(MediaType.CSV)) {
ByteBuf buf = req.getChannelHandlerContext().alloc().buffer();
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new ByteBufOutputStream(buf)));
CsvWriter w = new CsvWriter(bw, '\t');
try {
w.writeRecord(ArchiveHelper.EVENT_CSV_HEADER);
} catch (IOException e) {
throw new InternalServerErrorException(e);
}
RestStreams.stream(instance, sql, sqlb.getQueryArguments(), new RestStreamSubscriber(pos, limit) {
@Override
public void processTuple(Stream stream, Tuple tuple) {
try {
w.writeRecord(ArchiveHelper.tupleToCSVEvent(tuple));
} catch (IOException e) {
// TODO maybe support passing up as rest exception using custom listeners
log.error("Could not write csv record ", e);
}
}
@Override
public void streamClosed(Stream stream) {
w.close();
completeOK(req, MediaType.CSV, buf);
}
});
} else {
ListEventsResponse.Builder responseb = ListEventsResponse.newBuilder();
RestStreams.stream(instance, sql, sqlb.getQueryArguments(), new RestStreamSubscriber(pos, limit) {
@Override
public void processTuple(Stream stream, Tuple tuple) {
try {
Event incoming = (Event) tuple.getColumn("body");
Event event = Event.parseFrom(incoming.toByteArray(), getExtensionRegistry());
Event.Builder eventb = Event.newBuilder(event);
eventb.setGenerationTimeUTC(TimeEncoding.toString(eventb.getGenerationTime()));
eventb.setReceptionTimeUTC(TimeEncoding.toString(eventb.getReceptionTime()));
responseb.addEvent(eventb.build());
} catch (InvalidProtocolBufferException e) {
log.error("Invalid GPB message", e);
}
}
@Override
public void streamClosed(Stream stream) {
completeOK(req, responseb.build());
}
});
}
}
@Deprecated // To be removed once all official clients use postEvents2 logic
@Route(path = "/api/archive/:instance/events", method = "POST")
public void postEvent(RestRequest req) throws HttpException {
log.warn("Deprecated use of legacy API. "
+ "Use new API at /api/archive/:instance/events2 instead of /api/archive/:instance/events");
verifyAuthorization(req.getAuthToken(), SystemPrivilege.MayWriteEvents);
// get event from request
String instance = verifyInstance(req, req.getRouteParam("instance"));
Event event = req.bodyAsMessage(Event.newBuilder()).build();
// get event producer for this instance
EventProducer eventProducer = eventProducerMap.computeIfAbsent(instance, x -> {
return EventProducerFactory.getEventProducer(x);
});
// update event reception time
event = event.toBuilder().setReceptionTime(YamcsServer.getTimeService(instance).getMissionTime()).build();
// send event
log.debug("Adding event from REST API: {}", event.toString());
eventProducer.sendEvent(event);
completeOK(req);
}
// TODO rename the path to /api/archive/:instance/events once all official clients are migrated to this new API.
@Route(path = "/api/archive/:instance/events2", method = "POST")
public void postEvent2(RestRequest req) throws HttpException {
verifyAuthorization(req.getAuthToken(), SystemPrivilege.MayWriteEvents);
String instance = verifyInstance(req, req.getRouteParam("instance"));
CreateEventRequest request = req.bodyAsMessage(CreateEventRequest.newBuilder()).build();
if (!request.hasMessage()) {
throw new BadRequestException("Message is required");
}
Event.Builder eventb = Event.newBuilder();
eventb.setSource("User");
eventb.setCreatedBy(req.getUsername());
eventb.setSeqNumber(eventSequenceNumber.getAndIncrement());
eventb.setMessage(request.getMessage());
if (request.hasType()) {
eventb.setType(request.getType());
}
long missionTime = YamcsServer.getTimeService(instance).getMissionTime();
if (request.hasTime()) {
long eventTime = TimeEncoding.parse(request.getTime());
eventb.setGenerationTime(eventTime);
eventb.setReceptionTime(missionTime);
} else {
eventb.setGenerationTime(missionTime);
eventb.setReceptionTime(missionTime);
}
if (request.hasSeverity()) {
EventSeverity severity = EventSeverity.valueOf(request.getSeverity().toUpperCase());
if (severity == null) {
throw new BadRequestException("Unsupported severity: " + request.getSeverity());
}
eventb.setSeverity(severity);
} else {
eventb.setSeverity(EventSeverity.INFO);
}
EventProducer eventProducer = eventProducerMap.computeIfAbsent(instance, x -> {
return EventProducerFactory.getEventProducer(x);
});
// Distribute event (without augmented fields, or they'll get stored)
Event event = eventb.build();
log.debug("Adding event: {}", event.toString());
eventProducer.sendEvent(event);
// Send back the (augmented) event in response
eventb = Event.newBuilder(event);
eventb.setGenerationTimeUTC(TimeEncoding.toString(eventb.getGenerationTime()));
eventb.setReceptionTimeUTC(TimeEncoding.toString(eventb.getReceptionTime()));
completeOK(req, eventb.build());
}
/**
* Shows the distinct sources that occur in the events table. Theoretically the user could also retrieve this
* information via the table-related API, but then users without MayReadTables privilege, would not be able to call
* it.
*/
@Route(path = "/api/archive/:instance/events/sources", method = "GET")
public void listSources(RestRequest req) throws HttpException {
String instance = verifyInstance(req, req.getRouteParam("instance"));
verifyEventArchiveSupport(instance);
verifyAuthorization(req.getAuthToken(), SystemPrivilege.MayReadEvents);
YarchDatabaseInstance ydb = YarchDatabase.getInstance(instance);
EventSourceInfo.Builder responseb = EventSourceInfo.newBuilder();
TableDefinition tableDefinition = ydb.getTable(EventRecorder.TABLE_NAME);
BiMap<String, Short> enumValues = tableDefinition.getEnumValues("source");
if (enumValues != null) {
List<String> unsortedSources = new ArrayList<>();
for (Entry<String, Short> entry : enumValues.entrySet()) {
unsortedSources.add(entry.getKey());
}
Collections.sort(unsortedSources);
responseb.addAllSource(unsortedSources);
}
completeOK(req, responseb.build());
}
/**
* Checks if events are supported for the specified instance. This will succeed in two cases:
* <ol>
* <li>EventRecorder is currently enabled
* <li>EventRecorder has been enabled in the past, but may not be any longer
* </ol>
*/
public static void verifyEventArchiveSupport(String instance) throws BadRequestException {
YarchDatabaseInstance ydb = YarchDatabase.getInstance(instance);
TableDefinition table = ydb.getTable(EventRecorder.TABLE_NAME);
if (table == null) {
throw new BadRequestException("No event archive support for instance '" + instance + "'");
}
}
private ExtensionRegistry getExtensionRegistry() {
if (gpbExtensionRegistry == null) {
HttpServer httpServer = YamcsServer.getGlobalService(HttpServer.class);
gpbExtensionRegistry = httpServer.getGpbExtensionRegistry();
}
return gpbExtensionRegistry;
}
}
|
package cc.softwarefactory.lokki.android.fragments;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.ColorFilter;
import android.graphics.DashPathEffect;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.drawable.Drawable;
import android.location.Location;
import android.location.LocationManager;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.content.LocalBroadcastManager;
import android.support.v7.app.AlertDialog;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import com.androidquery.AQuery;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import com.google.android.gms.maps.model.Circle;
import com.google.android.gms.maps.model.CircleOptions;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import cc.softwarefactory.lokki.android.MainApplication;
import cc.softwarefactory.lokki.android.R;
import cc.softwarefactory.lokki.android.models.Place;
import cc.softwarefactory.lokki.android.models.User;
import cc.softwarefactory.lokki.android.utilities.AnalyticsUtils;
import cc.softwarefactory.lokki.android.utilities.DialogUtils;
import cc.softwarefactory.lokki.android.utilities.Utils;
import cc.softwarefactory.lokki.android.utilities.map.MapUserTypes;
import cc.softwarefactory.lokki.android.utilities.map.MapUtils;
public class MapViewFragment extends Fragment {
private static final String TAG = "MapViewFragment";
public static final String BROADCAST_GO_TO = "GO_TO_LOCATION";
public static final String GO_TO_COORDS = "GO_TO_COORDS";
private static final int DEFAULT_ZOOM = 16;
private SupportMapFragment fragment;
private GoogleMap map;
private HashMap<String, Marker> markerMap;
private AQuery aq;
private static Boolean cancelAsyncTasks = false;
private Context context;
private ArrayList<Circle> placesOverlay;
private double radiusMultiplier = 0.9; // Dont want to fill the screen from edge to edge...
private TextView placeAddingTip;
private final static String BUNDLE_KEY_MAP_STATE ="mapdata";
private LatLng startLocation = null;
public MapViewFragment() {
markerMap = new HashMap<>();
placesOverlay = new ArrayList<>();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.activity_map, container, false);
placeAddingTip = (TextView) rootView.findViewById(R.id.place_adding_tip);
placeAddingTip.setText(R.string.place_adding_tip);
updatePlaceAddingTipVisibility();
aq = new AQuery(getActivity(), rootView);
context = getActivity().getApplicationContext();
LocalBroadcastManager.getInstance(context).registerReceiver(goToReceiver, new IntentFilter(BROADCAST_GO_TO));
return rootView;
}
public void updatePlaceAddingTipVisibility() {
boolean placeIsBeingAdded = getView() != null && getView().findViewById(R.id.addPlaceCircle) != null &&
((ImageView) getView().findViewById(R.id.addPlaceCircle)).getDrawable() != null;
boolean noPlacesAdded = MainApplication.places != null && MainApplication.places.size() > 0;
placeAddingTip.setAlpha(noPlacesAdded && !placeIsBeingAdded ? 1 : 0);
}
@Override
public void onDestroyView() {
// Trying to clean up properties (not to hold anything coming from the map (and avoid mem leaks)).
fragment = null;
map = null;
aq = null;
LocalBroadcastManager.getInstance(context).unregisterReceiver(goToReceiver);
super.onDestroyView();
}
@Override
public void onActivityCreated(Bundle savedInstanceState) { // This method guarantees that the fragment is loaded in the parent activity!
Log.d(TAG, "onActivityCreated");
super.onActivityCreated(savedInstanceState);
FragmentManager fm = getChildFragmentManager();
fragment = (SupportMapFragment) fm.findFragmentById(R.id.map);
if (fragment == null) {
fragment = SupportMapFragment.newInstance();
fm.beginTransaction().replace(R.id.map, fragment).commit();
}
}
//store current map state on SharedPreferences
public void storeMapState(){
if (map == null){
Log.w(TAG, "No map, can't save current location");
return;
}
Double lat = map.getCameraPosition().target.latitude;
Double lon = map.getCameraPosition().target.longitude;
SharedPreferences prefs = context.getSharedPreferences(BUNDLE_KEY_MAP_STATE, Activity.MODE_PRIVATE);
SharedPreferences.Editor editor = prefs.edit();
editor.putString("lat", Double.toString(lat));
editor.putString("lon", Double.toString(lon));
editor.commit();
}
//load current map state from SharedPreferences
public void loadMapState(){
SharedPreferences prefs = context.getSharedPreferences(BUNDLE_KEY_MAP_STATE, Activity.MODE_PRIVATE);
Double lat, lon;
try {
lat = Double.parseDouble(prefs.getString("lat", "0.0"));
lon = Double.parseDouble(prefs.getString("lon", "0.0"));
} catch(Exception e){
Log.d(TAG, "Error Parsing saved coordinates" + e );
lat = 0.0;
lon = 0.0;
}
startLocation = new LatLng(lat, lon);
}
@Override
public void onResume() { // onResume is called after onActivityCreated, when the fragment is loaded 100%
Log.d(TAG, "onResume");
super.onResume();
if (map == null) {
Log.w(TAG, "Map null. creating it.");
setUpMap();
setupAddPlacesOverlay();
} else {
Log.d(TAG, "Map already exists. Nothing to do.");
}
LocalBroadcastManager.getInstance(context).registerReceiver(mMessageReceiver, new IntentFilter("LOCATION-UPDATE"));
LocalBroadcastManager.getInstance(context).registerReceiver(placesUpdateReceiver, new IntentFilter("PLACES-UPDATE"));
checkLocationServiceStatus();
new UpdateMap().execute(MapUserTypes.All);
cancelAsyncTasks = false;
if (MainApplication.places != null) {
updatePlaces();
}
AnalyticsUtils.screenHit(getString(R.string.analytics_screen_map));
if(MainApplication.emailBeingTracked == null && map != null){
if(startLocation == null){
loadMapState();
}
map.moveCamera(CameraUpdateFactory.newLatLngZoom(startLocation, DEFAULT_ZOOM));
//Don't move again on the next resume
startLocation = null;
}
}
private void checkLocationServiceStatus() {
LocationManager lm = (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
boolean gps = lm.isProviderEnabled(LocationManager.GPS_PROVIDER);
boolean network = lm.isProviderEnabled(LocationManager.NETWORK_PROVIDER);
if (!gps && !network && !MainApplication.locationDisabledPromptShown) {
promptLocationService();
MainApplication.locationDisabledPromptShown = true;
}
}
private void promptLocationService() {
new AlertDialog.Builder(getActivity())
.setTitle(R.string.location_services_disabled)
.setMessage(R.string.gps_disabled)
.setCancelable(true)
.setPositiveButton(R.string.settings, new DialogInterface.OnClickListener() {
public void onClick(final DialogInterface dialog, final int id) {
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_click),
getString(R.string.analytics_label_open_settings_from_location_disabled_dialog));
startActivity(new Intent(android.provider.Settings.ACTION_LOCATION_SOURCE_SETTINGS));
}
})
.setNegativeButton(R.string.ignore, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_click),
getString(R.string.analytics_label_ignore_location_disabled_dialog));
}
})
.show();
}
private void setUpMap() {
map = fragment.getMap();
if (map == null) {
Log.e(TAG, "Could not create map!");
return;
}
removeMarkers();
map.setMapType(MainApplication.mapTypes[MainApplication.mapType]);
map.setInfoWindowAdapter(new MyInfoWindowAdapter()); // Set the windowInfo view for each marker
map.setMyLocationEnabled(true);
map.setIndoorEnabled(true);
map.setBuildingsEnabled(true);
map.getUiSettings().setZoomControlsEnabled(false);
map.setOnMarkerClickListener(new GoogleMap.OnMarkerClickListener() {
@Override
public boolean onMarkerClick(Marker marker) {
if (!marker.isInfoWindowShown()) {
marker.showInfoWindow();
MainApplication.emailBeingTracked = marker.getTitle();
}
return true;
}
});
map.setOnMapClickListener(new GoogleMap.OnMapClickListener() {
@Override
public void onMapClick(LatLng latLng) {
MainApplication.emailBeingTracked = null;
}
});
// Set long click to add a place
map.setOnMapLongClickListener(new GoogleMap.OnMapLongClickListener() {
@Override
public void onMapLongClick(LatLng latLng) {
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_long_click),
getString(R.string.analytics_label_add_place_overlay_activated));
setAddPlacesVisible(true);
}
});
map.setOnMyLocationButtonClickListener(new GoogleMap.OnMyLocationButtonClickListener() {
@Override
public boolean onMyLocationButtonClick() {
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_click),
getString(R.string.analytics_label_my_location_button));
MainApplication.locationDisabledPromptShown = false;
MainApplication.emailBeingTracked = null;
checkLocationServiceStatus();
return false;
}
});
}
private void setupAddPlacesOverlay() {
// todo these should probably be initialized once...
Button cancelButton = (Button) getView().findViewById(R.id.cancel_add_place_button);
cancelButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_click),
getString(R.string.analytics_label_cancel_add_place_button));
setAddPlacesVisible(false);
}
});
Button addPlaceButton = (Button) getView().findViewById(R.id.add_place_button);
addPlaceButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
AnalyticsUtils.eventHit(getString(R.string.analytics_category_ux),
getString(R.string.analytics_action_click),
getString(R.string.analytics_label_confirm_add_place_button));
int mapWidth = fragment.getView().getWidth();
int mapHeight = fragment.getView().getHeight() - getView().findViewById(R.id.add_place_buttons).getHeight();
Location middleSideLocation;
if (mapWidth > mapHeight) {
middleSideLocation = MapUtils.convertToLocation(map.getProjection().fromScreenLocation(new Point(mapWidth / 2, 0)), "middleSide");
} else {
middleSideLocation = MapUtils.convertToLocation(map.getProjection().fromScreenLocation(new Point(0, mapHeight / 2)), "middleSide");
}
LatLng centerLatLng = map.getProjection().fromScreenLocation(getAddPlaceCircleCenter());
int radius = (int) middleSideLocation.distanceTo(MapUtils.convertToLocation(centerLatLng, "center"));
DialogUtils.addPlace(getActivity(), centerLatLng, (int) (radius * radiusMultiplier));
}
});
}
private void setAddPlacesVisible(boolean visible) {
if (visible) {
((ImageView) getView().findViewById(R.id.addPlaceCircle)).setImageDrawable(new AddPlaceCircleDrawable());
showAddPlaceButtons();
} else {
((ImageView) getView().findViewById(R.id.addPlaceCircle)).setImageDrawable(null);
hideAddPlaceButtons();
}
updatePlaceAddingTipVisibility();
}
private void showAddPlaceButtons() {
Animation slideUp = AnimationUtils.loadAnimation(this.getActivity().getApplicationContext(), R.anim.add_place_buttons_show);
getView().findViewById(R.id.add_place_buttons).startAnimation(slideUp);
getView().findViewById(R.id.add_place_overlay).setVisibility(View.VISIBLE);
}
private void hideAddPlaceButtons() {
Animation slideDown = AnimationUtils.loadAnimation(this.getActivity().getApplicationContext(), R.anim.add_place_buttons_hide);
slideDown.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationStart(Animation animation) {
}
@Override
public void onAnimationEnd(Animation animation) {
getView().findViewById(R.id.add_place_overlay).setVisibility(View.INVISIBLE);
}
@Override
public void onAnimationRepeat(Animation animation) {
}
});
getView().findViewById(R.id.add_place_buttons).startAnimation(slideDown);
}
private Point getAddPlaceCircleCenter() {
int mapCenterX = fragment.getView().getWidth() / 2;
int mapCenterY = (fragment.getView().getHeight() - getView().findViewById(R.id.add_place_buttons).getHeight()) / 2;
return new Point(mapCenterX, mapCenterY);
}
private void removeMarkers() {
Log.d(TAG, "removeMarkers");
for (Iterator<Marker> it = markerMap.values().iterator(); it.hasNext();) {
Marker m = it.next();
m.remove();
}
markerMap.clear();
}
@Override
public void onPause() {
super.onPause();
storeMapState();
LocalBroadcastManager.getInstance(context).unregisterReceiver(mMessageReceiver);
LocalBroadcastManager.getInstance(context).unregisterReceiver(placesUpdateReceiver);
}
private BroadcastReceiver mMessageReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
Log.d(TAG, "BroadcastReceiver onReceive");
Bundle extras = intent.getExtras();
if (extras == null || !extras.containsKey("current-location")) {
new UpdateMap().execute(MapUserTypes.All);
}
}
};
private BroadcastReceiver placesUpdateReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
Log.d(TAG, "placesUpdateReceiver onReceive");
updatePlaces();
}
};
/**
* Receives intents that cause the map to move to a specific location
*/
private BroadcastReceiver goToReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (map == null){
Log.w(TAG, "null map, not moving camera");
return;
}
Log.d(TAG, "goToReceiver onReceive");
//Parse coordinates from extra data
String coords = intent.getStringExtra(GO_TO_COORDS);
int separator = coords.indexOf(',');
if (separator == -1){
Log.e(TAG, "Invalid coordinates, no separator");
return;
}
double lat, lon;
try {
lat = Double.parseDouble(coords.substring(0 , separator));
lon = Double.parseDouble(coords.substring(separator +1));
}
catch (NumberFormatException e){
Log.e(TAG, "Could not parse coordinates");
return;
}
//If we're tracking a contact, untrack them to prevent the camera from focusing on them
MainApplication.emailBeingTracked = null;
if (MapViewFragment.this.isVisible()){
//If the map is already visible, just move the map
map.moveCamera(CameraUpdateFactory.newLatLngZoom(new LatLng(lat,lon), DEFAULT_ZOOM));
}
//If the map is not visible, move the camera the next time the map is shown
startLocation = new LatLng(lat, lon);
}
};
private void updatePlaces() {
Log.d(TAG, "updatePlaces");
if (map == null) {
return;
}
removePlaces();
for (Place place : MainApplication.places) {
Circle circle = map.addCircle(new CircleOptions()
.center(new LatLng(place.getLat(), place.getLon()))
.radius(place.getRad())
.strokeWidth(0)
.fillColor(getResources().getColor(R.color.place_circle)));
placesOverlay.add(circle);
}
updatePlaceAddingTipVisibility();
}
private void removePlaces() {
Log.d(TAG, "removePlaces");
for (Iterator<Circle> it = placesOverlay.iterator(); it.hasNext();) {
Circle circle = it.next();
circle.remove();
}
placesOverlay.clear();
}
private class UpdateMap extends AsyncTask<MapUserTypes, Void, HashMap<String, Location>> {
@Override
protected HashMap<String, Location> doInBackground(MapUserTypes... params) {
MainApplication.Dashboard dashboard = MainApplication.dashboard;
if (dashboard == null) {
return null;
}
MapUserTypes who = params[0];
Log.d(TAG, "UpdateMap update for all users: " + who);
HashMap<String, Location> markerData = new HashMap<>();
if (who == MapUserTypes.User || who == MapUserTypes.All) {
markerData.put(MainApplication.userAccount, dashboard.getLocation().convertToAndroidLocation()); // User himself
}
if (who == MapUserTypes.Others || who == MapUserTypes.All) {
for (String userId : dashboard.getUserIdsICanSee()) {
User user = dashboard.getUserICanSeeByUserId(userId);
User.Location location = user.getLocation();
String email = dashboard.getEmailByUserId(userId);
Log.d(TAG, "I can see: " + email + " => " + user);
if (MainApplication.iDontWantToSee != null && MainApplication.iDontWantToSee.has(email)) {
Log.d(TAG, "I dont want to see: " + email);
} else {
Location loc = location.convertToAndroidLocation();
if (loc == null) {
Log.w(TAG, "No location could be parsed for: " + email);
}
markerData.put(email, loc);
}
}
}
return markerData;
}
@Override
protected void onPostExecute(HashMap<String, Location> markerDataResult) {
Log.d(TAG, "cancelAsyncTasks: " + cancelAsyncTasks);
super.onPostExecute(markerDataResult);
if (markerDataResult != null && !cancelAsyncTasks && isAdded()) {
for (String email : markerDataResult.keySet()) {
Log.d(TAG, "marker to update: " + email);
if (markerDataResult.get(email) != null) {
new LoadMarkerAsync(markerDataResult.get(email), email).execute();
}
}
}
}
}
private class MyInfoWindowAdapter implements GoogleMap.InfoWindowAdapter {
@Override
public View getInfoWindow(Marker marker) {
return null;
}
@Override
public View getInfoContents(Marker marker) {
if (!aq.isExist() || cancelAsyncTasks || !isAdded()) {
return null;
}
View myContentsView = getActivity().getLayoutInflater().inflate(R.layout.map_info_window, null);
AQuery aq = new AQuery(myContentsView);
String name = Utils.getNameFromEmail(context, marker.getTitle());
aq.id(R.id.contact_name).text(name);
aq.id(R.id.timestamp).text(Utils.timestampText(marker.getSnippet()));
return myContentsView;
}
}
private Bitmap getMarkerBitmap(String email, Boolean accurate, Boolean recent) {
Log.d(TAG, "getMarkerBitmap");
// Add cache checking logic
Bitmap markerImage = MainApplication.avatarCache.get(email + ":" + accurate + ":" + recent);
if (markerImage != null) {
Log.d(TAG, "Marker IN cache: " + email + ":" + accurate + ":" + recent);
return markerImage;
} else {
Log.d(TAG, "Marker NOT in cache. Processing: " + email + ":" + accurate + ":" + recent);
}
Log.d(TAG, "AvatarLoader not in cache. Fetching it. Email: " + email);
// Get avatars
Bitmap userImage = Utils.getPhotoFromEmail(context, email);
if (userImage == null) {
userImage = BitmapFactory.decodeResource(getResources(), R.drawable.default_avatar);
} else {
userImage = Utils.getRoundedCornerBitmap(userImage, 50);
}
// Marker colors, etc.
Log.d(TAG, "userImage size: " + userImage);
View markerView = ((LayoutInflater) getActivity().getSystemService(Context.LAYOUT_INFLATER_SERVICE)).inflate(R.layout.map_marker, null);
aq = new AQuery(markerView);
aq.id(R.id.user_image).image(userImage);
Log.d(TAG, "aq in place");
if (email.equals(MainApplication.userAccount)) {
aq.id(R.id.marker_frame).image(R.drawable.pointers_android_pointer_green);
} else if (!recent || !accurate) {
aq.id(R.id.marker_frame).image(R.drawable.pointers_android_pointer_orange);
}
Log.d(TAG, "Image set. Calling createDrawableFromView");
markerImage = createDrawableFromView(markerView);
MainApplication.avatarCache.put(email + ":" + accurate + ":" + recent, markerImage);
return markerImage;
}
// Convert a view to bitmap
private Bitmap createDrawableFromView(View view) {
Log.d(TAG, "createDrawableFromView");
DisplayMetrics displayMetrics = new DisplayMetrics();
getActivity().getWindowManager().getDefaultDisplay().getMetrics(displayMetrics);
view.setLayoutParams(new WindowManager.LayoutParams(WindowManager.LayoutParams.WRAP_CONTENT, WindowManager.LayoutParams.WRAP_CONTENT));
view.measure(displayMetrics.widthPixels, displayMetrics.heightPixels);
view.layout(0, 0, displayMetrics.widthPixels, displayMetrics.heightPixels);
view.buildDrawingCache();
Bitmap bitmap = Bitmap.createBitmap(view.getMeasuredWidth(), view.getMeasuredHeight(), Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
view.draw(canvas);
return bitmap;
}
class LoadMarkerAsync extends AsyncTask<Void, Void, Bitmap> {
Location position;
LatLng latLng;
String email;
String time;
Boolean accurate;
Boolean recent;
public LoadMarkerAsync(Location position, String email) {
this.email = email;
this.position = position;
}
@Override
protected Bitmap doInBackground(Void... params) {
if (position == null || email == null) {
return null;
}
Log.d(TAG, "LoadMarkerAsync - Email: " + email + ", Position: " + position);
latLng = new LatLng(position.getLatitude(), position.getLongitude());
time = String.valueOf(position.getTime());
accurate = Math.round(position.getAccuracy()) < 100;
recent = (System.currentTimeMillis() - position.getTime()) < 60 * 60 * 1000;
try {
return getMarkerBitmap(email, accurate, recent);
} catch (Exception ex) {
ex.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(Bitmap bitmapResult) {
super.onPostExecute(bitmapResult);
if (bitmapResult == null || cancelAsyncTasks || !isAdded() || map == null) {
return;
}
Marker marker = markerMap.get(email);
Boolean isNew = false;
if (marker != null) {
Log.d(TAG, "onPostExecute - updating marker: " + email);
marker.setPosition(latLng);
marker.setSnippet(time);
marker.setIcon(BitmapDescriptorFactory.fromBitmap(bitmapResult));
} else {
Log.d(TAG, "onPostExecute - creating marker: " + email);
marker = map.addMarker(new MarkerOptions().position(latLng).title(email).snippet(time).icon(BitmapDescriptorFactory.fromBitmap(bitmapResult)));
Log.d(TAG, "onPostExecute - marker created");
markerMap.put(email, marker);
Log.d(TAG, "onPostExecute - marker in map stored. markerMap: " + markerMap.size());
isNew = true;
}
if (marker.getTitle().equals(MainApplication.emailBeingTracked)) {
marker.showInfoWindow();
Log.d(TAG, "onPostExecute - showInfoWindow open");
if (isNew) {
map.moveCamera(CameraUpdateFactory.newLatLngZoom(marker.getPosition(), DEFAULT_ZOOM));
} else {
map.moveCamera(CameraUpdateFactory.newLatLng(marker.getPosition()));
}
} else if (MainApplication.firstTimeZoom && MainApplication.emailBeingTracked == null && MainApplication.userAccount != null && marker.getTitle().equals(MainApplication.userAccount)) {
MainApplication.firstTimeZoom = false;
map.moveCamera(CameraUpdateFactory.newLatLngZoom(marker.getPosition(), DEFAULT_ZOOM));
}
}
}
@Override
public void onDestroy() {
// TODO: Cancel ALL Async tasks
cancelAsyncTasks = true;
super.onDestroy();
}
private class AddPlaceCircleDrawable extends Drawable {
public static final int STROKE_WIDTH = 12;
public final float[] DASH_INTERVALS = new float[]{49, 36};
@Override
public void draw(Canvas canvas) {
Point mapCenter = getAddPlaceCircleCenter();
int radius = Math.min(mapCenter.x, mapCenter.y);
Paint circlePaint = new Paint();
circlePaint.setColor(getResources().getColor(R.color.add_place_circle));
circlePaint.setAntiAlias(true);
circlePaint.setStrokeWidth(STROKE_WIDTH);
DashPathEffect dashPath = new DashPathEffect(DASH_INTERVALS, 1.0f);
circlePaint.setPathEffect(dashPath);
circlePaint.setStyle(Paint.Style.STROKE);
canvas.drawCircle(mapCenter.x, mapCenter.y, (int) (radius * radiusMultiplier - STROKE_WIDTH), circlePaint);
}
@Override
public void setAlpha(int alpha) {
}
@Override
public void setColorFilter(ColorFilter cf) {
}
@Override
public int getOpacity() {
return 0;
}
}
}
|
package com.badlogic.cubocy;
import org.lwjgl.opengl.Display;
import com.badlogic.gdx.Application.ApplicationType;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input.Keys;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.math.Vector2;
public class Cube {
static final int FOLLOW = 0;
static final int FIXED = 1;
static final int CONTROLLED = 2;
static final int DEAD = 3;
static final float ACCELERATION = 10;
static final float MAX_VELOCITY = 5;
static final float DAMP = 0.90f;
Map map;
Vector2 pos = new Vector2();
Vector2 accel = new Vector2();
Vector2 vel = new Vector2();
Rectangle bounds = new Rectangle();
int state = FOLLOW;
float stateTime = 0;
Rectangle cubeButtonRect = new Rectangle(480-64, 320-64, 64, 64);
Rectangle dpadRect = new Rectangle(0, 0, 128, 128);
public Cube(Map map, float x, float y) {
this.map = map;
this.pos.x = x;
this.pos.y = y;
this.bounds.x = pos.x + 0.2f;
this.bounds.y = pos.y + 0.2f;
this.bounds.width = this.bounds.height = 0.60f;
}
Vector2 target = new Vector2();
public void update(float deltaTime) {
processKeys();
if(state == FOLLOW) {
target.set(map.bob.pos);
if(map.bob.dir == Bob.RIGHT) target.x
if(map.bob.dir == Bob.LEFT) target.x++;
target.y += 0.2f;
vel.set(target).sub(pos).mul(Math.min(4, pos.dst(target)) * deltaTime);
tryMove();
}
if(state == CONTROLLED) {
accel.mul(deltaTime);
vel.add(accel.x, accel.y);
if(accel.x == 0) vel.x *= DAMP;
if(accel.y == 0) vel.y *= DAMP;
if (vel.x > MAX_VELOCITY) vel.x = MAX_VELOCITY;
if (vel.x < -MAX_VELOCITY) vel.x = -MAX_VELOCITY;
if (vel.y > MAX_VELOCITY) vel.y = MAX_VELOCITY;
if (vel.y < -MAX_VELOCITY) vel.y = -MAX_VELOCITY;
vel.mul(deltaTime);
tryMove();
vel.mul(1.0f / deltaTime);
}
if(state == FIXED) {
// if(stateTime > 5.0f) {
// stateTime = 0;
// state = FOLLOW;
}
stateTime += deltaTime;
}
private void processKeys () {
float x0 = (Gdx.input.getX(0) / (float)Gdx.graphics.getWidth()) * 480;
float x1 = (Gdx.input.getX(1) / (float)Gdx.graphics.getWidth()) * 480;
float y0 = 320 - (Gdx.input.getY(0) / (float)Gdx.graphics.getHeight()) * 320;
float y1 = 320 - (Gdx.input.getY(1) / (float)Gdx.graphics.getHeight()) * 320;
boolean cubeButton = (Gdx.input.isTouched(0) && cubeButtonRect.contains(x0, y0)) ||
(Gdx.input.isTouched(1) && cubeButtonRect.contains(x1, y1));
if((Gdx.input.isKeyPressed(Keys.SPACE) || cubeButton) && state == FOLLOW && stateTime > 0.5f) {
stateTime = 0;
state = CONTROLLED;
return;
}
if((Gdx.input.isKeyPressed(Keys.SPACE) || cubeButton) && state == CONTROLLED && stateTime > 0.5f) {
stateTime = 0;
state = FIXED;
return;
}
if((Gdx.input.isKeyPressed(Keys.SPACE) || cubeButton) && state == FIXED && stateTime > 0.5f) {
stateTime = 0;
state = FOLLOW;
return;
}
boolean touch0 = Gdx.input.isTouched(0);
boolean touch1 = Gdx.input.isTouched(1);
boolean left = (touch0 && x0 < 60) || (touch1 && x1 < 60);
boolean right = (touch0 && (x0 > 80 && x0 < 128)) || (touch1 && (x1 > 80 && x1 < 128));
boolean down = (touch0 && (y0 < 60)) || (touch1 && (y1 < 60));
boolean up = (touch0 && (y0 > 80 && x0 < 128)) || (touch1 && (y1 > 80 && y1 < 128));
if(state == CONTROLLED) {
if (Gdx.input.isKeyPressed(Keys.A)) {
accel.x = -ACCELERATION;
} else if (Gdx.input.isKeyPressed(Keys.D) || right ) {
accel.x = ACCELERATION;
} else {
accel.x = 0;
}
if (Gdx.input.isKeyPressed(Keys.W) || up) {
accel.y = ACCELERATION;
} else if (Gdx.input.isKeyPressed(Keys.S) || down) {
accel.y = -ACCELERATION;
} else {
accel.y = 0;
}
if(touch0) {
if(dpadRect.contains(x0, y0)) {
accel.x = (x0 - 64) / 64 * ACCELERATION / 2;
accel.y = (y0 - 64) / 64 * ACCELERATION / 2;
} else {
accel.x = 0;
accel.y = 0;
}
}
}
}
Rectangle[] r = { new Rectangle(), new Rectangle(), new Rectangle(), new Rectangle() };
private void tryMove () {
bounds.x += vel.x;
fetchCollidableRects();
for(int i = 0; i < r.length; i++) {
Rectangle rect = r[i];
if(bounds.overlaps(rect)) {
if(vel.x < 0) bounds.x = rect.x + rect.width + 0.01f;
else bounds.x = rect.x - bounds.width - 0.01f;
vel.x = 0;
}
}
bounds.y += vel.y;
fetchCollidableRects();
for(int i = 0; i < r.length; i++) {
Rectangle rect = r[i];
if(bounds.overlaps(rect)) {
if(vel.y < 0) { bounds.y = rect.y + rect.height + 0.01f; }
else bounds.y = rect.y - bounds.height - 0.01f;
vel.y = 0;
}
}
pos.x = bounds.x - 0.2f;
pos.y = bounds.y - 0.2f;
}
private void fetchCollidableRects() {
int p1x = (int)bounds.x;
int p1y = (int)Math.floor(bounds.y);
int p2x = (int)(bounds.x + bounds.width);
int p2y = (int)Math.floor(bounds.y);
int p3x = (int)(bounds.x + bounds.width);
int p3y = (int)(bounds.y + bounds.height);
int p4x = (int)bounds.x;
int p4y = (int)(bounds.y + bounds.height);
int[][] tiles = map.tiles;
int tile1 = tiles[p1x][map.tiles[0].length - 1 - p1y];
int tile2 = tiles[p2x][map.tiles[0].length - 1 - p2y];
int tile3 = tiles[p3x][map.tiles[0].length - 1 - p3y];
int tile4 = tiles[p4x][map.tiles[0].length - 1 - p4y];
if (tile1 != Map.EMPTY)
r[0].set(p1x, p1y, 1, 1);
else
r[0].set(-1, -1, 0, 0);
if (tile2 != Map.EMPTY)
r[1].set(p2x, p2y, 1, 1);
else
r[1].set(-1, -1, 0, 0);
if (tile3 != Map.EMPTY)
r[2].set(p3x, p3y, 1, 1);
else
r[2].set(-1, -1, 0, 0);
if (tile4 != Map.EMPTY)
r[3].set(p4x, p4y, 1, 1);
else
r[3].set(-1, -1, 0, 0);
}
public void setControlled() {
if(state == FOLLOW) {
state = CONTROLLED;
stateTime = 0;
}
}
}
|
package ecologylab.bigsemantics.metametadata;
import java.util.ArrayList;
import java.util.HashMap;
import org.junit.Assert;
import org.junit.Test;
import ecologylab.net.ParsedURL;
/**
*
* @author quyin
*/
public class TestFilterLocation
{
@Test
public void testOverrideParams()
{
String url = "https://www.google.com/search?tbm=isch&source=hp&biw=1147&bih=1218&q=avatar&oq=avatar#newwindow=1&tbm=isch&q=3d+movies";
ParsedURL orig = ParsedURL.getAbsolute(url);
OverrideParams overrideParams = new OverrideParams();
FilterLocation filter = new FilterLocation();
filter.setOverrideParams(overrideParams);
ArrayList<ParsedURL> otherLocs = new ArrayList<ParsedURL>();
ParsedURL filtered = filter.filter(orig, otherLocs);
HashMap<String, String> params = filtered.extractParams(true);
Assert.assertEquals("3d movies", params.get("q"));
}
@Test
public void testExtractParam()
{
String url = "https:
FilterLocation filter = new FilterLocation();
filter.setExtractParam("url");
filter.setDecodeUrl(true);
String result = filter.operateOn(url);
Assert.assertEquals("http://example.com/result.html?page=2023&id=__id__#foo", result);
}
}
|
package de.rwth.dbis.layers.lapps.resource;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Logger;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.nimbusds.oauth2.sdk.ParseException;
import com.nimbusds.oauth2.sdk.http.HTTPRequest;
import com.nimbusds.oauth2.sdk.http.HTTPRequest.Method;
import com.nimbusds.oauth2.sdk.http.HTTPResponse;
import com.nimbusds.openid.connect.sdk.UserInfoErrorResponse;
import com.nimbusds.openid.connect.sdk.UserInfoResponse;
import com.nimbusds.openid.connect.sdk.UserInfoSuccessResponse;
import com.nimbusds.openid.connect.sdk.claims.UserInfo;
import com.wordnik.swagger.annotations.Api;
import com.wordnik.swagger.annotations.ApiOperation;
import com.wordnik.swagger.annotations.ApiParam;
import com.wordnik.swagger.annotations.ApiResponse;
import com.wordnik.swagger.annotations.ApiResponses;
import de.rwth.dbis.layers.lapps.domain.UserFacade;
import de.rwth.dbis.layers.lapps.entity.UserEntity;
import de.rwth.dbis.layers.lapps.exception.OIDCException;
/**
* Users resource (exposed at "users" path).
*/
@Path("/users")
@Api(value = "/users", description = "User ressource")
public class UsersResource {
private static final String OPEN_ID_PROVIDER = "http://api.learning-layers.eu/o/oauth2";
private static final String OPEN_ID_PROVIDER_CONFIGURATION_URI = OPEN_ID_PROVIDER.trim()
+ "/.well-known/openid-configuration";
private static final Logger LOGGER = Logger.getLogger(UsersResource.class.getName());
// only for testing, will always be valid
public static final String OPEN_ID_TEST_TOKEN = "test_token";
public static final int OPEN_ID_USER_ID = -1;
private static UserFacade userFacade = new UserFacade();
/**
* Provides a list of user Ids known to this server.
*
* @return Response with all users as a JSON array.
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Get list of all users")
@ApiResponses(value = {
@ApiResponse(code = HttpStatusCode.UNAUTHORIZED, message = "Invalid authentication"),
@ApiResponse(code = HttpStatusCode.INTERNAL_SERVER_ERROR,
message = "Internal server problems"),
@ApiResponse(code = HttpStatusCode.OK, message = "Default return message")})
public Response getAllUsers(@HeaderParam("access_token") String accessToken) {
try {
authenticate(accessToken);
// TODO: Check for admin rights (not part of the open id authentication process)
} catch (OIDCException e) {
LOGGER.warning(e.getMessage());
return Response.status(HttpStatusCode.UNAUTHORIZED).build();
}
List<UserEntity> entities = (List<UserEntity>) userFacade.findAll();
ArrayList<Integer> userIds = new ArrayList<Integer>();
Iterator<UserEntity> userIt = entities.iterator();
while (userIt.hasNext()) {
userIds.add(userIt.next().getId());
}
try {
ObjectMapper mapper = new ObjectMapper();
return Response.status(HttpStatusCode.OK).entity(mapper.writeValueAsBytes(userIds)).build();
} catch (JsonProcessingException e) {
LOGGER.warning(e.getMessage());
return Response.status(HttpStatusCode.INTERNAL_SERVER_ERROR).build();
}
}
/**
*
* Gets the user for a given id.
*
* @param id
*
* @return Response with user as a JSON object.
*
*/
@GET
@Path("/{id}")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Get user by ID", response = UserEntity.class)
@ApiResponses(value = {
@ApiResponse(code = HttpStatusCode.NOT_FOUND, message = "User not found"),
@ApiResponse(code = HttpStatusCode.INTERNAL_SERVER_ERROR,
message = "Internal server problems"),
@ApiResponse(code = HttpStatusCode.OK, message = "Default return message")})
public Response getUser(@PathParam("id") int id) {
UserEntity user = userFacade.find(id);
if (user == null) {
return Response.status(HttpStatusCode.NOT_FOUND).build();
}
try {
ObjectMapper mapper = new ObjectMapper();
return Response.status(HttpStatusCode.OK).entity(mapper.writeValueAsBytes(user)).build();
} catch (JsonProcessingException e) {
LOGGER.warning(e.getMessage());
return Response.status(HttpStatusCode.INTERNAL_SERVER_ERROR).build();
}
}
/**
*
* Delete the user with the given id.
*
* @param id
*
* @return Response
*/
// TODO: Think about success token (instead of only a 200 response)
@DELETE
@Path("/{id}")
@ApiOperation(value = "Delete user by ID")
@ApiResponses(value = {
@ApiResponse(code = HttpStatusCode.UNAUTHORIZED, message = "Invalid authentication"),
@ApiResponse(code = HttpStatusCode.NOT_FOUND, message = "User not found"),
@ApiResponse(code = HttpStatusCode.NOT_IMPLEMENTED,
message = "Currently, this method is not implemented")})
public Response deleteUser(@HeaderParam("access_token") String accessToken,
@PathParam("id") int id) {
try {
// TODO: Check for admin or user himself rights (not part of the open id authentication
// process)
authenticate(accessToken);
} catch (OIDCException e) {
LOGGER.warning(e.getMessage());
return Response.status(HttpStatusCode.UNAUTHORIZED).build();
}
UserEntity user = userFacade.find(id);
if (user == null) {
return Response.status(HttpStatusCode.NOT_FOUND).build();
}
// TODO: delete user with help of userFacade
return Response.status(HttpStatusCode.NOT_IMPLEMENTED).build();
}
/**
*
* Update the user with the given id.
*
* @param id
* @param UserEntity as JSON
*
* @return Response
*/
// TODO: Think about success token (instead of only a 200 response)
// TODO: Write test case
@PUT
@Path("/{id}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Update user by ID", response = UserEntity.class)
@ApiResponses(value = {
@ApiResponse(code = HttpStatusCode.INTERNAL_SERVER_ERROR,
message = "Internal server problems"),
@ApiResponse(code = HttpStatusCode.UNAUTHORIZED, message = "Invalid authentication"),
@ApiResponse(code = HttpStatusCode.NOT_FOUND, message = "User not found"),
@ApiResponse(code = HttpStatusCode.OK, message = "Default response message")})
public Response updateUser(@HeaderParam("access_token") String accessToken,
@PathParam("id") int id,
@ApiParam(value = "User entity as JSON", required = true) UserEntity updatedUser) {
try {
// TODO: Check for admin or user himself rights (not part of the open id authentication
// process)
authenticate(accessToken);
} catch (OIDCException e) {
LOGGER.warning(e.getMessage());
return Response.status(HttpStatusCode.UNAUTHORIZED).build();
}
UserEntity user = userFacade.find(id);
if (user == null) {
return Response.status(HttpStatusCode.NOT_FOUND).build();
}
// TODO: update user with help of userFacade
try {
ObjectMapper mapper = new ObjectMapper();
return Response.status(HttpStatusCode.OK).entity(mapper.writeValueAsBytes(updatedUser))
.build();
} catch (JsonProcessingException e) {
LOGGER.warning(e.getMessage());
return Response.status(HttpStatusCode.INTERNAL_SERVER_ERROR).build();
}
}
/**
* Tries to authenticate a user for a given OpenIdToken. If the user is not yet registered, it
* will register him to the LAPPS backend.
*
* @return the (LAPPS) id of the user
* @throws OIDCException an exception thrown for all Open Id Connect issues
*/
public static int authenticate(String openIdToken) throws OIDCException {
// return value
int userId = OPEN_ID_USER_ID;
// no token provided
if (openIdToken == null) {
throw new OIDCException("No token was provided");
}
// default testing token returns default testing id
if (openIdToken.equals(OPEN_ID_TEST_TOKEN)) {
return userId;
}
// JSON initialization stuff
ObjectMapper mapper = new ObjectMapper();
JsonNode serverConfig;
// get provider configuration
URL providerConfigurationUri;
try {
providerConfigurationUri = new URL(OPEN_ID_PROVIDER_CONFIGURATION_URI);
} catch (MalformedURLException e) {
throw new OIDCException("Exception during Open Id Connect authentication occured: "
+ e.getMessage());
}
HTTPRequest providerConfigRequest = new HTTPRequest(Method.GET, providerConfigurationUri);
// parse JSON result from configuration request
try {
String configStr = providerConfigRequest.send().getContent();
serverConfig = mapper.readTree(configStr);
} catch (Exception e) {
throw new OIDCException("Exception during Open Id Connect authentication occured: "
+ e.getMessage());
}
// send access token, get user info
HTTPRequest httpRequest;
HTTPResponse httpResponse;
try {
URI userinfoEndpointUri = new URI(serverConfig.get("userinfo_endpoint").textValue());
httpRequest = new HTTPRequest(Method.GET, userinfoEndpointUri.toURL());
httpRequest.setAuthorization("Bearer " + openIdToken);
httpResponse = httpRequest.send();
} catch (IOException | URISyntaxException e) {
throw new OIDCException("Exception during Open Id Connect authentication occured: "
+ e.getMessage());
}
// ..and process the response
UserInfoResponse userInfoResponse;
try {
userInfoResponse = UserInfoResponse.parse(httpResponse);
} catch (ParseException e) {
throw new OIDCException("Exception during Open Id Authentication occured: " + e.getMessage());
}
// request failed (unauthorized)
if (userInfoResponse instanceof UserInfoErrorResponse) {
UserInfoErrorResponse uier = (UserInfoErrorResponse) userInfoResponse;
throw new OIDCException("Exception during Open Id Authentication occured: "
+ uier.getClass().toString());
}
// successful, now get the user info and start extracting content
UserInfo userInfo = ((UserInfoSuccessResponse) userInfoResponse).getUserInfo();
String sub = userInfo.getSubject().toString();
String mail = userInfo.getEmail().toString();
// search for existing user
List<UserEntity> entities = userFacade.findByParameter("oidcId", sub);
// more than one means something bad happened, one means user is already known..
if (entities.size() > 1)
throw new OIDCException("Exception during Open Id Authentication occured.");
else if (entities.size() == 1) {
// quick check, if mail of OIDC server account differs (has changed) to our database entry; if
// so, update our user
if (!entities.get(0).getEmail().equals(mail)) {
UserEntity user = entities.get(0);
userId = user.getId();
user.setEmail(mail);
userFacade.save(user);
}
return userId;
}
// user is unknown, has to be created
userId = createNewUser(sub, mail);
return userId;
}
/**
* Creates a new user for a given oidc_id and mail.
*
* @param oidc_id the "subject" identifier of the open id connect authentication
* @param mail a user email
*
* @return the (LAPPS) id of the user
*/
private static int createNewUser(String oidc_id, String mail) {
UserEntity user = new UserEntity(oidc_id, mail);
user = userFacade.save(user);
return user.getId();
}
}
|
package org.openspaces.pu.service;
import java.io.Serializable;
import java.util.Map;
/**
* A generic service that exists within a processing unit.
*
* @author kimchy
* @see org.openspaces.pu.service.PlainServiceDetails
*/
public interface ServiceDetails extends Serializable {
/**
* Returns the id of the processing unit (usually the bean id).
*/
String getId();
/**
* Returns the service type. For example, space, dotnet, jee.
*/
String getServiceType();
/**
* Returns the type of the serive details. For example, in case of
* space, it can be localcache, proxy, ... .
*/
String getServiceSubType();
/**
* Returns a short description of the service.
*/
String getDescription();
/**
* Returns the long description
*/
String getLongDescription();
/**
* Returns extra attributes the service details wishes to expose.
*/
Map<String, Object> getAttributes();
// /**
// * Aggregates an array of service details into an aggregated view of it. All service details are of the same
// * service type. Can return <code>null</code> if no aggregation can be performed.
// */
// AggregatedServiceDetails aggregateByServiceType(ServiceDetails[] servicesDetails);
// /**
// * Aggregates an array of service details into an aggregated view of it. All service details are of the same
// * service type and service sub type. Can return <code>null</code> if no aggregation can be performed.
// */
// AggregatedServiceDetails aggregateByServiceSubType(ServiceDetails[] servicesDetails);
// /**
// * Aggregates an array of service details into an aggregated view of it. All service details are of the same
// * id (and service type and service sub type). Can return <code>null</code> if no aggregation can be performed.
// */
// AggregatedServiceDetails aggregateById(ServiceDetails[] servicesDetails);
}
|
package facadePattern;
public class Statistics {
private DataFacade facade;
public Statistics(DataFacade facade) {
this.facade = facade;
}
public void getData(String id) {
ClientData client = new ClientData();
String[] allData = this.facade.retrieveData(id);
client.setPassengerDetails(allData);
client.setFlightDetails(allData);
client.setBaggageDetails(allData);
}
}
public class Passenger {
private String id;
private String passengerName = "Billy Bob";
private String passengerAddress = "New York";
private String passengerAge = "23";
public Passenger(String id) {
this.id = id;
}
public String getPassengerName() {
return this.passengerName;
}
public String getPassengerAddress() {
return this.passengerAddress;
}
public String getPassengerAge() {
return this.passengerAge;
}
}
public class Flight {
private String id;
private String flightNo = "AM676798";
private String airLine = "JavaAir";
private String departureTime = "12-11-14 10:43";
public Flight(String id) {
this.id = id;
}
public String getFlightNo() {
return this.flightNo;
}
public String getAirline() {
return this.airLine;
}
public String getDepartureTime() {
return this.departureTime;
}
}
public class Baggage {
private String id;
private String amount = "4";
private String totalWeight = "3KG";
private String allAccountedFor = "true";
public Baggage(String id) {
this.id = id;
}
public String getAmount() {
return this.amount;
}
public String getTotalWeight() {
return this.totalWeight;
}
public String getAllAccountedFor() {
return this.allAccountedFor;
}
}
public class DataFacade {
private String[] data = new String[3];
public DataFacade() {
}
public String[] retrieveData(String id) {
this.data[0] = getPassengerDetails(id);
this.data[1] = getFlightDetails(id);
this.data[2] = getBaggageDetails(id);
return data;
}
public String getPassengerDetails(String id) {
Passenger passenger = new Passenger(id);
String passengerData = passenger.getPassengerName();
passengerData += ", " + passenger.getPassengerAddress();
passengerData += ", " + passenger.getPassengerAge();
return passengerData;
}
public String getFlightDetails(String id) {
Flight flight = new Flight(id);
String flightData = flight.getFlightNo();
flightData += ", " + flight.getAirline();
flightData += ", " + flight.getDepartureTime();
return flightData;
}
public String getBaggageDetails(String id) {
Baggage baggage = new Baggage(id);
String baggageData = baggage.getAmount();
baggageData += ", " + baggage.getTotalWeight();
baggageData += ", " + baggage.getAllAccountedFor();
return baggageData;
}
}
public class ClientData {
public void setPassengerDetails(String[] data) {
System.out.println("Passenger: " + data[0]);
}
public void setFlightDetails(String[] data) {
System.out.println("Flight: " + data[1]);
}
public void setBaggageDetails(String[] data) {
System.out.println("Baggage: " + data[2]);
}
}
public class FacadeDriver {
public static void main (String[] args){
DataFacade data = new DataFacade();
Statistics statistics = new Statistics(data);
statistics.getData("WX4983489");
}
}
|
package integration;
import hello.Application;
import org.fluentlenium.adapter.FluentTest;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.phantomjs.PhantomJSDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.test.*;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import static org.assertj.core.api.Assertions.assertThat;
@RunWith(SpringJUnit4ClassRunner.class)
@SpringApplicationConfiguration(classes = Application.class)
@WebIntegrationTest("server.port:0")
public class GreetingTest extends FluentTest {
@Value("${local.server.port}")
private int port;
private DesiredCapabilities capabilities = DesiredCapabilities.phantomjs();
private WebDriver driver = new PhantomJSDriver(capabilities);
@Override
public WebDriver getDefaultDriver() {
return driver;
}
@Test
public void testThymeleaf() {
goTo("http://127.0.0.1:" + port + "/greeting?name=Sam");
assertThat(pageSource()).contains("Hello, Sam!");
goTo("http://127.0.0.1:" + port + "/greeting?name=Bob");
assertThat(pageSource()).contains("Hello, Bob!");
}
@Test
public void testJavascript() throws InterruptedException {
goTo("http://127.0.0.1:" + port + "/greeting?name=Sam");
Thread.sleep(1000); // There's probably a better way to wait for $(document).ready
assertThat(pageSource()).contains("Hello from javascript!");
}
}
|
package edisyn.synth.oberheimmatrix1000;
import edisyn.*;
import edisyn.gui.*;
import java.awt.*;
import java.awt.geom.*;
import javax.swing.border.*;
import javax.swing.*;
import java.awt.event.*;
import java.util.*;
import java.io.*;
import javax.sound.midi.*;
/**
A patch editor for the Oberheim Matrix 1000.
@author Sean Luke
*/
public class OberheimMatrix1000 extends Synth
{
/// Various collections of parameter names for pop-up menus
public static final String[] KEYBOARD_MODES = new String[] { "Reassign", "Rotate", "Unison", "Reassign w/Rob" };
public static final String[] SYNC = new String[] { "Off", "Soft", "Medium", "Hard" };
// Note actually there are 4 lag modes, the fourth one is also exponential!
public static final String[] PORTAMENTO_MODES = new String[] { "Constant Speed", "Constant Time", "Exponential" };
public static final String[] LFO_SHAPES = new String[] { "Triangle", "Up Saw", "Down Saw", "Square", "Random", "Noise", "S&H" };
public static final String[] LFO_TRIGGERS = new String[] { "None", "Single", "Multi", "External" };
public static final String[] MODULATION_SOURCES = new String[] { "None", "Env 1", "Env 2", "Env 3", "LFO 1", "LFO 2", "Vibrato", "Ramp 1", "Ramp 2", "Keyboard", "Portamento", "Tracking Generator", "Keyboard Gate", "Velocity", "Release Velocity", "Pressure", "Pedal 1", "Pedal 2", "Bend", "Mod Wheel", "Lever 3" };
public static final String[] TRACKING_GENERATOR_SOURCES = new String[] { "Env 1", "Env 2", "Env 3", "LFO 1", "LFO 2", "Vibrato", "Ramp 1", "Ramp 2", "Keyboard", "Portamento", "Tracking Generator", "Keyboard Gate", "Velocity", "Release Velocity", "Pressure", "Pedal 1", "Pedal 2", "Bend", "Mod Wheel", "Lever 3" };
public static final String[] ENV_TRIGGER_MODES = new String[] { "Single", "Single Reset", "Multi", "Multi Reset", "External Single", "External Single Reset", "External Multi", "External Multi Reset" };
public static final String[] ENV_MODES = new String[] { "Normal", "DADR", "Free Run", "DADR + Free Run" };
// There are actually 2 bits here, so we're missing one
public static final String[] ENV_LFO_TRIGGER_MODES = new String[] { "Normal", "LFO 1", "Gated LFO 1" };
public static final String[] RAMP_TRIGGER_MODES = new String[] { "Single", "Multi", "External", "External Gated" };
// VCA2 is not mentioned anywhere else
// I may need to say VCA rather than Amplifier elsewhere
public static final String[] MODULATION_DESTINATIONS = new String[] { "None", "DCO 1 Frequency", "DCO 1 Pulsewidth", "DCO 1 Wave Shape", "DCO 2 Frequency", "DCO 2 Pulsewidth", "DCO 2 Wave Shape", "Mix Level", "Filter FM", "Filter Frequency", "Filter Resonance", "VCA 1 Level", "VCA 2 Level", "Env 1 Delay", "Env 1 Attack", "Env 1 Decay", "Env 1 Release", "Env 1 Amplitude", "Env 2 Delay", "Env 2 Attack", "Env 2 Decay", "Env 2 Release", "Env 2 Amplitude", "Env 3 Delay", "Env 3 Attack", "Env 3 Decay", "Env 3 Release", "Env 3 Amplitude", "LFO 1 Speed", "LFO 1 Amplitude", "LFO 2 Speed", "LFO 2 Amplitude", "Portamento Time" };
public OberheimMatrix1000()
{
for(int i = 0; i < allParameters.length; i++)
{
allParametersToIndex.put(allParameters[i], Integer.valueOf(i));
}
for(int i = 0; i < internalParameters.length; i++)
{
internalParametersToIndex.put(internalParameters[i], Integer.valueOf(i));
}
/// SOUND PANEL
SynthPanel soundPanel = new SynthPanel(this);
VBox vbox = new VBox();
HBox hbox = new HBox();
hbox.add(addNameGlobal(Style.COLOR_GLOBAL()));
hbox.addLast(addOscillatorGlobal(Style.COLOR_A()));
vbox.add(hbox);
vbox.add(addOscillator(1, Style.COLOR_A()));
vbox.add(addOscillator(2, Style.COLOR_A()));
hbox = new HBox();
hbox.add(addFilter(Style.COLOR_C()));
hbox.addLast(addAmplifier(Style.COLOR_C()));
vbox.add(hbox);
soundPanel.add(vbox, BorderLayout.CENTER);
addTab("Oscillators and Filters", soundPanel);
// ENVELOPE PANEL
SynthPanel envelopePanel = new SynthPanel(this);
vbox = new VBox();
hbox = new HBox();
hbox.add(addLFO(1, Style.COLOR_A()));
hbox.addLast(addRamp(1, Style.COLOR_A()));
vbox.add(hbox);
hbox = new HBox();
hbox.add(addLFO(2, Style.COLOR_A()));
hbox.addLast(addRamp(2, Style.COLOR_A()));
vbox.add(hbox);
vbox.add(addEnvelope(1,Style.COLOR_B()));
vbox.add(addEnvelope(2,Style.COLOR_B()));
vbox.add(addEnvelope(3,Style.COLOR_B()));
envelopePanel.add(vbox, BorderLayout.CENTER);
addTab("LFOs and Envelopes", envelopePanel);
// MODULATION PANEL
SynthPanel modulationPanel = new SynthPanel(this);
vbox = new VBox();
vbox.add(addModulation(Style.COLOR_A()));
vbox.add(addTracking(Style.COLOR_B()));
modulationPanel.add(vbox, BorderLayout.CENTER);
addTab("Modulation", modulationPanel);
model.set("name", "UNTITLED");
model.set("bank", 0);
model.set("number", 0);
loadDefaults();
}
public String getDefaultResourceFileName() { return "OberheimMatrix1000.init"; }
public String getHTMLResourceFileName() { return "OberheimMatrix1000.html"; }
public boolean gatherPatchInfo(String title, Model change, boolean writing)
{
JTextField val = new JTextField("" + model.get("bank") + (model.get("number") < 10 ? "0" : "") + model.get("number"), 3);
while(true)
{
boolean result = showMultiOption(this, new String[] { "Patch Number"},
new JComponent[] { val }, title, "Enter the 3-digit Patch Number");
if (result == false)
return false;
int n;
try { n = Integer.parseInt(val.getText()); }
catch (NumberFormatException e)
{
showSimpleError(title, "The 3-digit Patch Number must be an integer 0...999");
continue;
}
if (n < 0 || n > 999)
{
showSimpleError(title, "The 3-digit Patch Number must be an integer 0...999");
continue;
}
change.set("bank", n / 100);
change.set("number", n % 100);
return true;
}
}
public JFrame sprout()
{
JFrame frame = super.sprout();
transmitTo.setEnabled(false);
addOberheimMenu();
return frame;
}
// I believe (hope?) that Matrix 6/6R names are probably the char values 32...95,
// which represent all-caps letters, all numbers, and most punctuation and space.
// is that names are stored with the "lower 6 bits of their ASCII representation".
// If the bytes start at 32, then this makes sense: bytes 32...63 are stored as-is,
// and bytes 64..95 get 64 subtracted from them, so they become 0...31. Clever.
byte packNameByte(byte n)
{
/*
if (n < 32 || n > 95)
n = (byte)32;
if (n >= 64)
n -= 64;
*/
return n;
}
byte unpackNameByte(byte n)
{
/*
n = (byte)(n & 63);
if (n < 32)
n = (byte)(n + 64);
*/
return n;
}
/** Add the global patch category (name, id, number, etc.) */
public JComponent addNameGlobal(Color color)
{
Category globalCategory = new Category(this, getSynthName(), color);
globalCategory.makeUnresettable();
JComponent comp;
String[] params;
HBox hbox = new HBox();
VBox vbox = new VBox();
comp = new PatchDisplay(this, 4);
vbox.add(comp);
comp = new StringComponent("Patch Name", this, "name", 8, "Name must be up to 8 letters, numbers, spaces, or !\"
{
public String replace(String val)
{
return revisePatchName(val);
}
public void update(String key, Model model)
{
super.update(key, model);
updateTitle();
}
};
vbox.add(comp);
hbox.add(vbox);
hbox.addLast(Strut.makeHorizontalStrut(100));
globalCategory.add(hbox, BorderLayout.WEST);
return globalCategory;
}
public JComponent addOscillatorGlobal(Color color)
{
Category category = new Category(this, "Keyboard", color);
JComponent comp;
String[] params;
HBox hbox = new HBox();
VBox vbox = new VBox();
params = KEYBOARD_MODES;
comp = new Chooser("Keyboard Mode", this, "keyboardmode", params);
vbox.add(comp);
// Maybe this should go with the LFOs?
params = PORTAMENTO_MODES;
comp = new Chooser("Portamento Mode", this, "portamentomode", params);
vbox.add(comp);
hbox.add(vbox);
comp = new LabelledDial("Mix", this, "mix", color, 0, 63, 31) // yes, there are *64* values, but 31 is center. It's not quite symmetric.
{
public double getStartAngle()
{
return (270 / 2) * (31.5 / 64.0) * 2 + 90;
}
public int getDefaultValue() { return 31; }
public String map(int val)
{
if (val == 31) return "
else if (val < 31) return "< " + (31 - val);
else return "" + (val - 31) + " >";
}
};
((LabelledDial)comp).addAdditionalLabel("DCO 2 <> 1");
hbox.add(comp);
comp = new LabelledDial("Portamento", this, "portamento", color, 0, 63);
((LabelledDial)comp).addAdditionalLabel("Rate");
hbox.add(comp);
comp = new LabelledDial("Portamento", this, "portamentomod", color, -63, 63);
((LabelledDial)comp).addAdditionalLabel("Vel Mod");
hbox.add(comp);
vbox = new VBox();
comp = new CheckBox("Legato Portamento", this, "portamentolegato");
vbox.add(comp);
hbox.add(vbox);
category.add(hbox, BorderLayout.CENTER);
return category;
}
/** Add an Oscillator category */
public JComponent addOscillator(int osc, Color color)
{
Category category = new Category(this, "Oscillator " + osc, color);
category.makePasteable("dco");
JComponent comp;
String[] params;
HBox hbox = new HBox();
VBox vbox = new VBox();
comp = new CheckBox("Bend", this, "dco" + osc + "bend");
vbox.add(comp);
comp = new CheckBox("Vibrato", this, "dco" + osc + "vibrato");
vbox.add(comp);
comp = new CheckBox("Portamento ", this, "dco" + osc + "portamento");
vbox.add(comp);
vbox.add(comp);
if (osc==2)
{
comp = new CheckBox("Key Tracking", this, "dco" + osc + "keytracking");
vbox.add(comp);
}
hbox.add(vbox);
vbox = new VBox();
comp = new CheckBox("Pulse ", this, "dco" + osc + "pulse"); // add some spaces so this is longer than Wave
vbox.add(comp);
comp = new CheckBox("Wave", this, "dco" + osc + "wave");
vbox.add(comp);
if (osc==2)
{
comp = new CheckBox("Noise", this, "dco" + osc + "noise");
vbox.add(comp);
}
comp = new CheckBox("Click", this, "dco" + osc + "click");
vbox.add(comp);
hbox.add(vbox);
//// Sysex documentation is inconsistent here, it's not clear if it's 5-bit or 6-bit.
//// But the Matrix 1000 is providing 6-bit values, so we're going with that (0...63)
comp = new LabelledDial("Wave Shape", this, "dco" + osc + "shape", color, 0, 63)
{
public boolean isSymmetric() { return true; }
};
((LabelledDial)comp).addAdditionalLabel("Saw <> Tri");
hbox.add(comp);
comp = new LabelledDial("Frequency", this, "dco" + osc + "frequency", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("Frequency", this, "dco" + osc + "frequencymod", color, -63, 63);
((LabelledDial)comp).addAdditionalLabel("LFO 1 Mod");
hbox.add(comp);
comp = new LabelledDial("Pulse Width", this, "dco" + osc + "pulsewidth", color, 0, 63)
{
public boolean isSymmetric() { return true; }
public int getDefaultValue() { return 31; }
};
hbox.add(comp);
comp = new LabelledDial("Pulse Width", this, "dco" + osc + "pulsewidthmod", color, -63, 63)
{
public void update(String key, Model model)
{
super.update(key, model);
}
};
((LabelledDial)comp).addAdditionalLabel("LFO 2 Mod");
hbox.add(comp);
if (osc==2)
{
comp = new LabelledDial("Detune", this, "dco" + osc + "detune", color, -31, 31)
{
public boolean isSymmetric() { return true; }
};
hbox.add(comp);
}
if (osc==1)
{
vbox = new VBox();
params = SYNC;
comp = new Chooser("Sync", this, "dco" + osc + "sync", params);
vbox.add(comp);
hbox.add(vbox);
}
category.add(hbox, BorderLayout.CENTER);
return category;
}
public JComponent addFilter(Color color)
{
Category category = new Category(this, "Filter", color);
JComponent comp;
String[] params;
final HBox hbox = new HBox();
VBox vbox = new VBox();
comp = new CheckBox("Bend", this, "vcfbend");
vbox.add(comp);
comp = new CheckBox("Vibrato", this, "vcfvibrato");
vbox.add(comp);
comp = new CheckBox("Portamento", this, "vcfportamento");
vbox.add(comp);
comp = new CheckBox("Key Tracking", this, "vcfkeytracking");
vbox.add(comp);
hbox.add(vbox);
comp = new LabelledDial("Frequency", this, "vcffrequency", color, 0, 127);
hbox.add(comp);
comp = new LabelledDial("Frequency", this, "vcffrequencyenv1mod", color, -63, 63);
((LabelledDial)comp).addAdditionalLabel("Env 1 Mod");
hbox.add(comp);
comp = new LabelledDial("Frequency", this, "vcffrequencypressuremod", color, -63, 63);
((LabelledDial)comp).addAdditionalLabel("Press Mod");
hbox.add(comp);
comp = new LabelledDial("Resonance", this, "vcfresonance", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("FM", this, "vcffm", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("FM", this, "vcffmenv3mod", color, -63, 63);
((LabelledDial)comp).addAdditionalLabel("Env 3 Mod");
hbox.add(comp);
comp = new LabelledDial("FM", this, "vcffmpressuremod", color, -63, 63);
((LabelledDial)comp).addAdditionalLabel("Press Mod");
hbox.add(comp);
category.add(hbox, BorderLayout.WEST);
return category;
}
/** Add Amplifier and Pan category */
public JComponent addAmplifier(Color color)
{
Category category = new Category(this, "Amplifier", color);
JComponent comp;
String[] params;
HBox hbox = new HBox();
comp = new LabelledDial("Volume", this, "vca1", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("Volume", this, "vca1velmod", color, -63, 63);
((LabelledDial)comp).addAdditionalLabel("Vel Mod");
hbox.add(comp);
comp = new LabelledDial("Volume", this, "vca2env2mod", color, -63, 63);
((LabelledDial)comp).addAdditionalLabel("Env 2 Mod");
hbox.add(comp);
category.add(hbox, BorderLayout.WEST);
return category;
}
/** Add an LFO category */
public JComponent addLFO(final int lfo, Color color)
{
Category category = new Category(this, "LFO " + lfo +
(lfo == 1 ? " (Oscillator Frequency)" : " (Oscillator Pulsewidth)"), color);
category.makePasteable("lfo");
JComponent comp;
String[] params;
HBox hbox = new HBox();
VBox vbox = new VBox();
params = LFO_SHAPES;
comp = new Chooser("Shape", this, "lfo" + lfo + "shape", params);
vbox.add(comp);
params = LFO_TRIGGERS;
comp = new Chooser("Trigger", this, "lfo" + lfo + "trigger", params);
vbox.add(comp);
hbox.add(vbox);
vbox = new VBox();
params = MODULATION_SOURCES;
comp = new Chooser("Sampled Source", this, "lfo" + lfo + "source", params);
vbox.add(comp);
comp = new CheckBox("Lag", this, "lfo" + lfo + "lag");
vbox.add(comp);
hbox.add(vbox);
// The manual says this is 0...63, but the sysex website says this is 5-bit.
// The Matrix 1000 is providing 6-bit values so we're going with that.
comp = new LabelledDial("Retrigger", this, "lfo" + lfo + "retrigger", color, 0, 63);
((LabelledDial)comp).addAdditionalLabel("Point");
hbox.add(comp);
comp = new LabelledDial("Amplitude", this, "lfo" + lfo + "amplitude", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("Amplitude", this, "lfo" + lfo + "amplitudemod", color, -63, 63);
((LabelledDial)comp).addAdditionalLabel("Ramp " + lfo + " Mod");
hbox.add(comp);
comp = new LabelledDial("Speed", this, "lfo" + lfo + "speed", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("Speed", this, "lfo" + lfo + "speedmod", color, -63, 63);
((LabelledDial)comp).addAdditionalLabel(lfo == 1 ? "Press Mod" : "Key Mod");
hbox.add(comp);
category.add(hbox, BorderLayout.WEST);
return category;
}
EnvelopeDisplay[] dadr = new EnvelopeDisplay[3];
EnvelopeDisplay[] dadsr = new EnvelopeDisplay[3];
HBox[] envelopeBox = new HBox[3];
/** Add a "standard" envelope category */
public JComponent addEnvelope(final int env, Color color)
{
Category category = new Category(this, "Envelope " + env +
(env == 1 ? " (Filter Frequency)" :
(env == 2 ? " (Amplitude)" : " (Filter FM)")), color);
category.makePasteable("env");
JComponent comp;
String[] params;
HBox hbox = new HBox();
// separate CheckBoxes maybe?
VBox vbox = new VBox();
params = ENV_TRIGGER_MODES;
comp = new Chooser("Trigger Mode", this, "env" + env + "triggermode", params);
vbox.add(comp);
envelopeBox[env - 1] = new HBox();
// DADR
dadr[env - 1] = new EnvelopeDisplay(this, Style.ENVELOPE_COLOR(),
new String[] { null, "env" + env + "delay", "env" + env + "attack", "env" + env + "decay", "env" + env + "release" },
new String[] { null, null, null, "env" + env + "sustain", null },
new double[] { 0, 0.25/63.0, 0.25/63.0, 0.25 / 63.0, 0.25/63.0},
new double[] { 0, 0, 1.0, 1.0 / 63.0, 0 });
// DADSR
dadsr[env - 1] = new EnvelopeDisplay(this, Style.ENVELOPE_COLOR(),
new String[] { null, "env" + env + "delay", "env" + env + "attack", "env" + env + "decay", null, "env" + env + "release" },
new String[] { null, null, null, "env" + env + "sustain", "env" + env + "sustain", null },
new double[] { 0, 0.2/63.0, 0.2/63.0, 0.2 / 63.0, 0.2, 0.2/63.0},
new double[] { 0, 0, 1.0, 1.0 / 63.0, 1.0/63.0, 0 });
envelopeBox[env - 1].addLast(dadsr[env - 1]);
params = ENV_MODES;
comp = new Chooser("Envelope Mode", this, "env" + env + "mode", params)
{
public void update(String key, Model model)
{
super.update(key, model);
envelopeBox[env - 1].removeLast();
int val = model.get(key);
if (val == 0 || val == 2)
envelopeBox[env - 1].addLast(dadsr[env - 1]);
else
envelopeBox[env - 1].addLast(dadr[env - 1]);
envelopeBox[env - 1].revalidate();
envelopeBox[env - 1].repaint();
}
};
vbox.add(comp);
hbox.add(vbox);
vbox = new VBox();
params = ENV_LFO_TRIGGER_MODES;
comp = new Chooser("LFO Trigger Mode", this, "env" + env + "lfotriggermode", params);
vbox.add(comp);
hbox.add(vbox);
comp = new LabelledDial("Delay", this, "env" + env + "delay", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("Attack", this, "env" + env + "attack", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("Decay", this, "env" + env + "decay", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("Sustain", this, "env" + env + "sustain", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("Release", this, "env" + env + "release", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("Amplitude", this, "env" + env + "amplitude", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("Amplitude", this, "env" + env + "amplitudemod", color, -63, 63);
((LabelledDial)comp).addAdditionalLabel("Vel Mod");
hbox.add(comp);
hbox.addLast(envelopeBox[env - 1]);
category.add(hbox, BorderLayout.CENTER);
return category;
}
/** Add free envelope category */
public JComponent addTracking(Color color)
{
Category category = new Category(this, "Tracking Generator", color);
category.makeDistributable("trackingpoint");
JComponent comp;
String[] params;
HBox hbox = new HBox();
VBox vbox = new VBox();
params = TRACKING_GENERATOR_SOURCES;
comp = new Chooser("Input Source", this, "trackingsource", params);
vbox.add(comp);
hbox.add(vbox);
comp = new LabelledDial("Point 1", this, "trackingpoint1", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("Point 2", this, "trackingpoint2", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("Point 3", this, "trackingpoint3", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("Point 4", this, "trackingpoint4", color, 0, 63);
hbox.add(comp);
comp = new LabelledDial("Point 5", this, "trackingpoint5", color, 0, 63);
hbox.add(comp);
comp = new EnvelopeDisplay(this, Style.ENVELOPE_COLOR(),
new String[] { null, null, null, null, null },
new String[] { "trackingpoint1", "trackingpoint2", "trackingpoint3", "trackingpoint4", "trackingpoint5" },
new double[] { 0, 0.25, 0.25, 0.25, 0.25},
new double[] { 1.0/63, 1.0/63, 1.0/63, 1.0/63, 1.0/63 });
hbox.add(comp);
category.add(hbox, BorderLayout.CENTER);
return category;
}
/** Add free envelope category */
public JComponent addRamp(int ramp, Color color)
{
Category category = new Category(this, "Ramp " + ramp +
(ramp == 1 ? " (LFO 1 Amplitude)" : " (LFO 2 Amplitude)"), color);
category.makePasteable("ramp");
JComponent comp;
String[] params;
HBox hbox = new HBox();
VBox vbox = new VBox();
params = RAMP_TRIGGER_MODES;
comp = new Chooser("Trigger Mode", this, "ramp" + ramp + "mode", params);
vbox.add(comp);
hbox.add(vbox);
comp = new LabelledDial("Rate", this, "ramp" + ramp + "rate", color, 0, 63);
hbox.add(comp);
hbox.addLast(Strut.makeHorizontalStrut(70));
category.add(hbox, BorderLayout.CENTER);
return category;
}
/** Add the Modulation category */
public JComponent addModulation(Color color)
{
Category category = new Category(this, "Modulation", color);
category.makeDistributable("mod");
JComponent comp;
String[] params;
VBox vbox = new VBox();
for(int row = 0; row < 2; row++)
{
if (row == 1)
{
vbox.add(Strut.makeVerticalStrut(30));
}
HBox hbox = new HBox();
for(int j = 1; j < 6; j++)
{
int i = row * 5 + j;
VBox vbox2 = new VBox();
params = MODULATION_SOURCES;
comp = new Chooser("" + i + " Source", this, "mod" + i + "source", params);
vbox2.add(comp);
params = MODULATION_DESTINATIONS;
comp = new Chooser("" + i + " Destination", this, "mod" + i + "destination", params);
vbox2.add(comp);
comp = new LabelledDial("" + i + " Amount", this, "mod" + i + "amount", color, -63, 63); // it's Level, not Amount, so we save some horizontal space
vbox2.add(comp);
hbox.add(vbox2);
}
vbox.add(hbox);
}
category.add(vbox, BorderLayout.WEST);
return category;
}
//// MATRIX PARAMETERS
//// These are the (roughly 100) Oberheim Matrix parameters by parameter number.
//// Note that this parameter number is not the same as the one in the sysex dump
//// (that one is specified in allParametersToIndex).
/** Map of parameter -> index in the allParameters array. */
HashMap internalParametersToIndex = new HashMap();
/** List of all 100 internal Oberheim numerical parameters in order. Note that this is DIFFERENT, ugh,
from the order of parameters in the sysex list, and is missing stuff like modulation and name. */
final static String[] internalParameters = new String[]
{
"dco1frequency",
"dco1frequencymod",
"dco1sync",
"dco1pulsewidth",
"dco1pulsewidthmod",
"dco1shape",
"dco1waveenable",
"dco1fixedmods1",
"dco1fixedmods2",
"dco1click",
"dco2frequency",
"dco2frequencymod",
"dco2detune",
"dco2pulsewidth",
"dco2pulsewidthmod",
"dco2shape",
"dco2waveenable",
"dco2fixedmods1",
"dco2fixedmods2",
"dco2click",
"mix",
"vcffrequency",
"vcffrequencyenv1mod",
"vcffrequencypressuremod",
"vcfresonance",
"vcffixedmods1",
"vcffixedmods2",
"vca1",
"vca1velmod",
"vca2env2mod",
"vcffm",
"vcffmenv3mod",
"vcffmpressuremod",
"trackingsource",
"trackingpoint1",
"trackingpoint2",
"trackingpoint3",
"trackingpoint4",
"trackingpoint5",
"-",
"ramp1rate",
"ramp1mode",
"ramp2rate",
"ramp2mode",
"portamento",
"portamentomod",
"portamentomode",
"portamentolegato",
"keyboardmode",
"-",
"env1delay",
"env1attack",
"env1decay",
"env1sustain",
"env1release",
"env1amplitude",
"env1amplitudemod",
"env1triggermode",
"env1mode",
"env1lfotriggermode",
"env2delay",
"env2attack",
"env2decay",
"env2sustain",
"env2release",
"env2amplitude",
"env2amplitudemod",
"env2triggermode",
"env2mode",
"env2lfotriggermode",
"env3delay",
"env3attack",
"env3decay",
"env3sustain",
"env3release",
"env3amplitude",
"env3amplitudemod",
"env3triggermode",
"env3mode",
"env3lfotriggermode",
"lfo1speed",
"lfo1speedmod",
"lfo1shape",
"lfo1retrigger",
"lfo1amplitude",
"lfo1amplitudemod",
"lfo1trigger",
"lfo1lag",
"lfo1source",
"-",
"lfo2speed",
"lfo2speedmod",
"lfo2shape",
"lfo2retrigger",
"lfo2amplitude",
"lfo2amplitudemod",
"lfo2trigger",
"lfo2lag",
"lfo2source",
"-"
};
/** Map of parameter -> index in the allParameters array. */
HashMap allParametersToIndex = new HashMap();
/** The Matrix 1000 sign-extends into its 7th bit. Basically this means
that if the value is N bits, then remaining high bits should have the
value of the high (Nth) bit. For example, the value 1011 should be
converted to (0) 111011. We also may need to know signed or insigned
values. So to do this, we have two arrays, bitmasks and signed. We
compute them from the initial values stored in bitmasks (which are then
changed to actual bitmasks). If the value below is negative, then the
parameter is expected to be signed. */
final static int[] bitmasks = new int[/*100 or so*/]
{
7,
7,
7,
7,
7,
7,
7,
7,
2,
6,
6,
6,
2,
2,
6,
6,
6,
2,
3,
-6,
6,
2,
1,
2,
1,
2,
7,
6,
2,
2,
6,
6,
6,
2,
1,
6,
2,
1,
3,
5,
5,
6,
6,
2,
1,
3,
5,
5,
6,
3,
6,
6,
6,
6,
6,
6,
2,
2,
3,
6,
6,
6,
6,
6,
6,
2,
2,
3,
6,
6,
6,
6,
6,
6,
2,
2,
5,
6,
6,
6,
6,
6,
6,
2,
6,
2,
-7,
-7,
-7,
-7,
-7,
-7,
-7,
-7,
-7,
-7,
-7,
-7,
-7,
-7,
-7,
-7,
-7,
-7,
5,
-7,
5,
5,
-7,
5,
5,
-7,
5,
5,
-7,
5,
5,
-7,
5,
5,
-7,
5,
5,
-7,
5,
5,
-7,
5,
5,
-7,
5,
5,
-7,
5
};
final static boolean signed[] = new boolean[bitmasks.length];
static
{
for(int i = 0; i < bitmasks.length; i++)
{
if (bitmasks[i] < 0)
{
bitmasks[i] = -bitmasks[i];
signed[i] = true;
}
bitmasks[i] = (1 << bitmasks[i]) - 1;
}
}
final static String[] allParameters = new String[/*100 or so*/]
{
"-", // this is the name, but the Matrix 1000 doesn't recognize names
"-",
"-",
"-",
"-",
"-",
"-",
"-",
"keyboardmode",
"dco1frequency",
"dco1shape",
"dco1pulsewidth",
"dco1fixedmods1",
"dco1waveenable",
"dco2frequency",
"dco2shape",
"dco2pulsewidth",
"dco2fixedmods1",
"dco2waveenable",
"dco2detune",
"mix",
"dco1fixedmods2",
"dco1click",
"dco2fixedmods2",
"dco2click",
"dco1sync",
"vcffrequency",
"vcfresonance",
"vcffixedmods1",
"vcffixedmods2",
"vcffm",
"vca1",
"portamento",
"portamentomode",
"portamentolegato",
"lfo1speed",
"lfo1trigger",
"lfo1lag",
"lfo1shape",
"lfo1retrigger",
"lfo1source",
"lfo1amplitude",
"lfo2speed",
"lfo2trigger",
"lfo2lag",
"lfo2shape",
"lfo2retrigger",
"lfo2source",
"lfo2amplitude",
"env1triggermode",
"env1delay",
"env1attack",
"env1decay",
"env1sustain",
"env1release",
"env1amplitude",
"env1lfotriggermode",
"env1mode",
"env2triggermode",
"env2delay",
"env2attack",
"env2decay",
"env2sustain",
"env2release",
"env2amplitude",
"env2lfotriggermode",
"env2mode",
"env3triggermode",
"env3delay",
"env3attack",
"env3decay",
"env3sustain",
"env3release",
"env3amplitude",
"env3lfotriggermode",
"env3mode",
"trackingsource",
"trackingpoint1",
"trackingpoint2",
"trackingpoint3",
"trackingpoint4",
"trackingpoint5",
"ramp1rate",
"ramp1mode",
"ramp2rate",
"ramp2mode",
"dco1frequencymod",
"dco1pulsewidthmod",
"dco2frequencymod",
"dco2pulsewidthmod",
"vcffrequencyenv1mod",
"vcffrequencypressuremod",
"vca1velmod",
"vca2env2mod",
"env1amplitudemod",
"env2amplitudemod",
"env3amplitudemod",
"lfo1amplitudemod",
"lfo2amplitudemod",
"portamentomod",
"vcffmenv3mod",
"vcffmpressuremod",
"lfo1speedmod",
"lfo2speedmod",
"mod1source",
"mod1amount",
"mod1destination",
"mod2source",
"mod2amount",
"mod2destination",
"mod3source",
"mod3amount",
"mod3destination",
"mod4source",
"mod4amount",
"mod4destination",
"mod5source",
"mod5amount",
"mod5destination",
"mod6source",
"mod6amount",
"mod6destination",
"mod7source",
"mod7amount",
"mod7destination",
"mod8source",
"mod8amount",
"mod8destination",
"mod9source",
"mod9amount",
"mod9destination",
"mod10source",
"mod10amount",
"mod10destination"
};
public byte[] emit(String key)
{
if (key.equals("bank")) return new byte[0]; // this is not emittable
if (key.equals("number")) return new byte[0]; // this is not emittable
int index;
int value;
if (key.equals("name"))
{
return new byte[0]; // ignore
}
else if (key.equals("env1lfotriggermode") || key.equals("env2lfotriggermode") || key.equals("env3lfotriggermode"))
{
index = ((Integer)(internalParametersToIndex.get(key))).intValue();
value = model.get(key);
// convert
if (value >= 1) value = value + 1; // there is no value = 1, that's the same as value = 0
}
else if (key.equals("dco1bend") || key.equals("dco1vibrato"))
{
index = ((Integer)(internalParametersToIndex.get("dco1fixedmods1"))).intValue();
value = model.get("dco1bend") | (model.get("dco1vibrato") << 1);
}
else if (key.equals("dco1portamento"))
{
index = ((Integer)(internalParametersToIndex.get("dco1fixedmods2"))).intValue();
value = model.get("dco1portamento");
}
else if (key.equals("dco2bend") || key.equals("dco2vibrato"))
{
index = ((Integer)(internalParametersToIndex.get("dco2fixedmods1"))).intValue();
value = model.get("dco2bend") | (model.get("dco2vibrato") << 1);
}
else if (key.equals("dco2portamento") || key.equals("dco2keytracking"))
{
index = ((Integer)(internalParametersToIndex.get("dco2fixedmods2"))).intValue();
value = model.get("dco2portamento") | (model.get("dco2keytracking") << 1);
}
else if (key.equals("dco1wave") || key.equals("dco1pulse"))
{
index = ((Integer)(internalParametersToIndex.get("dco1waveenable"))).intValue();
value = model.get("dco1pulse") | (model.get("dco1wave") << 1);
}
else if (key.equals("dco2wave") || key.equals("dco2pulse") || key.equals("dco2noise"))
{
index = ((Integer)(internalParametersToIndex.get("dco2waveenable"))).intValue();
value = model.get("dco2pulse") | (model.get("dco2wave") << 1) | (model.get("dco2noise") << 2);
}
else if (key.equals("vcfbend") || key.equals("vcfvibrato"))
{
index = ((Integer)(internalParametersToIndex.get("vcffixedmods1"))).intValue();
value = model.get("vcfbend") | (model.get("vcfvibrato") << 1);
}
else if (key.equals("vcfportamento") || key.equals("vcfkeytracking"))
{
index = ((Integer)(internalParametersToIndex.get("vcffixedmods2"))).intValue();
value = model.get("vcfportamento") | (model.get("vcfkeytracking") << 1);
}
else if (key.equals("dco2detune"))
{
index = ((Integer)(internalParametersToIndex.get(key))).intValue();
value = model.get(key) & 127; // sign-extend to 7th bit only
}
else if (key.startsWith("mod"))
{
int modnumber = (int)(key.charAt(3) - '0');
if (key.charAt(4) == '0') // it's 10
modnumber = 10;
int modsource = model.get("mod" + modnumber + "source");
int moddestination = model.get("mod" + modnumber + "destination");
int modamount = model.get("mod" + modnumber + "amount") & 127;
// if one is "None", then the other must be as well
if (modsource == 0) moddestination = 0;
else if (moddestination == 0) modsource = 0;
modnumber
return new byte[] { (byte)0xF0, 0x10, 0x06, 0x0B, (byte)modnumber, (byte)modsource, (byte) modamount, (byte)moddestination, (byte)0xF7 };
}
else if (key.equals("trackingsource"))
{
index = ((Integer)(internalParametersToIndex.get(key))).intValue();
value = model.get(key) + 1; // tracking source has no "none"
}
// don't need to customize portamentomode though we'll have to do it on parse
//else if (key.equals("portamentomode"))
// // two things are both exponential
else
{
index = ((Integer)(internalParametersToIndex.get(key))).intValue();
value = model.get(key);
}
byte VV = (byte)(value & 127);
byte PP = (byte)(index & 127);
return new byte[] { (byte)0xF0, 0x10, 0x06, 0x06, PP, VV, (byte)0xF7 };
}
/// ERRORS IN MIDI SYSEX DESCRIPTION
/// Though they're listed as "six bit (signed)" or "seven bit (signed)", all signed values
/// are actually stored as signed 8-bit. Six-bit signed values are just plain signed bytes
/// which range from -32 to +31. Similarly, 7-bit signed values are just plain signed bytes
/// which range from -64 to +63. When emitting or parsing a patch, the nybblization just breaks
/// the byte into two nybbles and that's all.
/// Note however that when sending INDIVIDUAL PARAMETERS, the sysex value is firsty masked to
/// 7 bits (& 127). And in NRPN, all values, even unsigned ones, have 64 added to them to
/// push them to 0...127.
public int parse(byte[] data, boolean ignorePatch, boolean fromFile)
{
byte[] name = new byte[8];
// we don't know the bank, just the number. :-(
int number = data[4];
if (!ignorePatch)
model.set("number", number);
for(int i = 0; i < 134; i++)
{
String key = allParameters[i];
// unpack from nybbles
byte lonybble = data[i * 2 + 5];
byte hinybble = data[i * 2 + 5 + 1];
byte value = (byte)(((hinybble << 4) | (lonybble & 15)));
if (i < 8) // it's the name
name[i] = unpackNameByte(value);
else if (key.equals("env1lfotriggermode") || key.equals("env2lfotriggermode") || key.equals("env3lfotriggermode"))
{
// there is no value = 1, that's the same as value = 0
if (value >= 1) value = (byte)(value - 1);
}
else if (key.equals("dco1fixedmods1"))
{
model.set("dco1bend", value & 1);
model.set("dco1vibrato", (value >>> 1) & 1);
}
else if (key.equals("dco1fixedmods2"))
{
model.set("dco1portamento", value & 1);
}
else if (key.equals("dco2fixedmods1"))
{
model.set("dco2bend", value & 1);
model.set("dco2vibrato", (value >>> 1) & 1);
}
else if (key.equals("dco2fixedmods2"))
{
model.set("dco2portamento", value & 1);
model.set("dco2keytracking", (value >>> 1) & 1);
}
else if (key.equals("dco1waveenable"))
{
model.set("dco1pulse", value & 1);
model.set("dco1wave", (value >>> 1) & 1);
}
else if (key.equals("dco2waveenable"))
{
model.set("dco2pulse", value & 1);
model.set("dco2wave", (value >>> 1) & 1);
model.set("dco2noise", (value >>> 2) & 1);
}
else if (key.equals("vcffixedmods1"))
{
model.set("vcfbend", value & 1);
model.set("vcfvibrato", (value >>> 1) & 1);
}
else if (key.equals("vcffixedmods2"))
{
model.set("vcfportamento", value & 1);
model.set("vcfkeytracking", (value >>> 1) & 1);
}
else if (key.equals("portamentomode"))
{
if (value == 4)
value = (byte)3; // get rid of extra exponential
model.set(key, value);
}
else if (key.equals("trackingsource"))
{
if (value > 0) // Some Matrix 1000 patches have the source set to 0 even though it's not supposed to be!
model.set(key, (value - 1)); // tracking source has no "none"
else
System.err.println("Warning: Tracking Source was incorrectly 0. Setting to 1.");
}
else
{
model.set(key, value);
}
}
// to get the bank, we'll extract it from the name. It appears to be the fourth character
int bank = name[3] - '0';
if (bank < 0 || bank > 9)
bank = 0;
model.set("bank", bank);
if (!fromFile && useClassicPatchNames)
{
model.set("name", PATCH_NAMES[bank * 100 + number]);
}
else
{
// update name just for fun, it may be gibberish
try
{
model.set("name", new String(name, "US-ASCII"));
}
catch (UnsupportedEncodingException e)
{
e.printStackTrace();
}
}
revise();
return PARSE_SUCCEEDED;
}
public byte[] emit(Model tempModel, boolean toWorkingMemory, boolean toFile)
{
if (tempModel == null)
tempModel = getModel();
byte[] data = new byte[268];
String nm = model.get("name", "UNTITLED") + " ";
byte[] name = null;
try { name = nm.getBytes("US-ASCII"); } catch (Exception e ) { }
int value;
byte check = 0;
for(int i = 0; i < 134; i++)
{
String key = allParameters[i];
if (i < 8) // it's the name
value = packNameByte(name[i]);
else if (key.equals("env1lfotriggermode") || key.equals("env2lfotriggermode") || key.equals("env3lfotriggermode"))
{
value = model.get(key);
// convert
if (value >= 1) value = value + 1; // there is no value = 1, that's the same as value = 0
}
else if (key.equals("dco1fixedmods1"))
{
value = (model.get("dco1vibrato") << 1) |
(model.get("dco1bend"));
}
else if (key.equals("dco1fixedmods2"))
{
value = (model.get("dco1portamento"));
}
else if (key.equals("dco2fixedmods1"))
{
value = (model.get("dco2vibrato") << 1) |
(model.get("dco2bend"));
}
else if (key.equals("dco2fixedmods2"))
{
value = (model.get("dco2keytracking") << 1) |
(model.get("dco2portamento"));
}
else if (key.equals("dco1waveenable"))
{
value = (model.get("dco1wave") << 1) |
(model.get("dco1pulse"));
}
else if (key.equals("dco2waveenable"))
{
value = (model.get("dco2noise") << 2) |
(model.get("dco2wave") << 1) |
(model.get("dco2pulse"));
}
else if (key.equals("vcffixedmods1"))
{
value = (model.get("vcfvibrato") << 1) |
(model.get("vcfbend"));
}
else if (key.equals("vcffixedmods2"))
{
value = (model.get("vcfkeytracking") << 1) |
(model.get("vcfportamento"));
}
// Note: no need to handle portamentomode specially, but we DO have to parse it specially
// Ugh, all this below is to deal with the source=destination=0 requirement. Yuck.
else if (key.equals("mod1source") || key.equals("mod1destination"))
{
value = model.get(key);
if (model.get("mod1source") == 0 || model.get("mod1destination") == 0)
value = 0;
}
else if (key.equals("mod2source") || key.equals("mod2destination"))
{
value = model.get(key);
if (model.get("mod2source") == 0 || model.get("mod2destination") == 0)
value = 0;
}
else if (key.equals("mod3source") || key.equals("mod3destination"))
{
value = model.get(key);
if (model.get("mod3source") == 0 || model.get("mod3destination") == 0)
value = 0;
}
else if (key.equals("mod4source") || key.equals("mod4destination"))
{
value = model.get(key);
if (model.get("mod4source") == 0 || model.get("mod4destination") == 0)
value = 0;
}
else if (key.equals("mod5source") || key.equals("mod5destination"))
{
value = model.get(key);
if (model.get("mod5source") == 0 || model.get("mod5destination") == 0)
value = 0;
}
else if (key.equals("mod6source") || key.equals("mod6destination"))
{
value = model.get(key);
if (model.get("mod6source") == 0 || model.get("mod6destination") == 0)
value = 0;
}
else if (key.equals("mod7source") || key.equals("mod7destination"))
{
value = model.get(key);
if (model.get("mod7source") == 0 || model.get("mod7destination") == 0)
value = 0;
}
else if (key.equals("mod8source") || key.equals("mod8destination"))
{
value = model.get(key);
if (model.get("mod8source") == 0 || model.get("mod8destination") == 0)
value = 0;
}
else if (key.equals("mod9source") || key.equals("mod9destination"))
{
value = model.get(key);
if (model.get("mod9source") == 0 || model.get("mod9destination") == 0)
value = 0;
}
else if (key.equals("mod10source") || key.equals("mod10destination"))
{
value = model.get(key);
if (model.get("mod10source") == 0 || model.get("mod10destination") == 0)
value = 0;
}
else if (key.equals("trackingsource"))
{
value = model.get(key) + 1; // tracking source has no "none"
}
else
{
value = model.get(key);
}
// pack to nybbles
if (value < 0) value += 256; // so we're positive.
byte lonybble = (byte)(value & 15);
byte hinybble = (byte)((value >>> 4) & 15);
// it says this about the checksum:
// Checksum.
// The original (not transmitted) data is summed in seven bits ignoring overflows
// I think this means to add into a byte, and then mask to 127.
check += value;
// write
data[i * 2] = lonybble;
data[i * 2 + 1] = hinybble;
}
byte checksum = (byte)(check & 127);
byte[] d = new byte[275];
d[0] = (byte)0xF0;
d[1] = (byte)0x10;
d[2] = (byte)0x06;
if (toWorkingMemory)
{
// 0DH - SINGLE PATCH DATA TO EDIT BUFFER
d[3] = (byte)0x0D;
d[4] = (byte)0x00;
}
else
{
// 01H-SINGLE PATCH DATA
d[3] = (byte)0x01;
d[4] = (byte)model.get("number");
}
/*
///// A bug in the Matrix 1000 means that SINGLE PATCH DATA TO EDIT BUFFER apparently sends
///// corrupted data. So we can't use it. But we still need to send! So we do this by
///// writing to slot 199 when toWorkingMemory is true
if (toWorkingMemory)
{
// 01H-SINGLE PATCH DATA
d[3] = (byte)0x01;
d[4] = (byte)SEND_MATRIX_NUMBER;
}
else
{
// 01H-SINGLE PATCH DATA
d[3] = (byte)0x01;
d[4] = (byte)model.get("number");
}
*/
System.arraycopy(data, 0, d, 5, 268);
d[273] = checksum;
d[274] = (byte)0xF7;
return d;
}
public void changePatch(Model tempModel)
{
changePatch(tempModel.get("bank"), tempModel.get("number"));
}
public void changePatch(int bank, int number)
{
// first change the bank
// 0AH - SET BANK
// we write this store-command as a sysex command
// so it gets stripped when we do a save to file
byte[] data2 = new byte[6];
data2[0] = (byte)0xF0;
data2[1] = (byte)0x10;
data2[2] = (byte)0x06;
data2[3] = (byte)0x0A;
data2[4] = (byte)(bank);
data2[5] = (byte)0xF7;
tryToSendSysex(data2);
// 0CH - UNLOCK BANK
// we write this store-command as a sysex command
// so it gets stripped when we do a save to file
// annoying that this gets re-locked by SET BANK
byte[] data = new byte[5];
data2[0] = (byte)0xF0;
data2[1] = (byte)0x10;
data2[2] = (byte)0x06;
data2[3] = (byte)0x0C;
data2[4] = (byte)0xF7;
// Next do a program change
byte NN = (byte)number;
tryToSendMIDI(buildPC(getChannelOut(), NN));
}
public byte[] requestCurrentDump()
{
byte[] data = new byte[7];
data[0] = (byte)0xF0;
data[1] = (byte)0x10;
data[2] = (byte)0x06;
data[3] = (byte)0x04;
data[4] = (byte)0x04; // request edit buffer
data[5] = (byte)0x00;
data[6] = (byte)0xF7;
return data;
}
public byte[] requestDump(Model tempModel)
{
// Next do a dump request
byte[] data = new byte[7];
data[0] = (byte)0xF0;
data[1] = (byte)0x10;
data[2] = (byte)0x06;
data[3] = (byte)0x04;
data[4] = (byte)0x01; // request single patch
data[5] = (byte)(tempModel.get("number"));
data[6] = (byte)0xF7;
return data;
}
public static final int EXPECTED_SYSEX_LENGTH = 275;
public static boolean recognize(byte[] data)
{
boolean v = (
// The Matrix 1000 doesn't transmit the checksum!
// So it could be one of two lengths:
(data.length == EXPECTED_SYSEX_LENGTH ||
data.length == EXPECTED_SYSEX_LENGTH - 1) &&
data[0] == (byte)0xF0 &&
data[1] == (byte)0x10 &&
data[2] == (byte)0x06 &&
(data[3] == (byte)0x01 || data[3] == (byte)0x0d));
return v;
}
public static final int MAXIMUM_NAME_LENGTH = 8;
public String revisePatchName(String name)
{
name = super.revisePatchName(name); // trim first time
if (name.length() > MAXIMUM_NAME_LENGTH)
name = name.substring(0, MAXIMUM_NAME_LENGTH);
StringBuffer nameb = new StringBuffer(name);
for(int i = 0 ; i < nameb.length(); i++)
{
char c = nameb.charAt(i);
if (c < 32 || c > 127)
nameb.setCharAt(i, ' ');
}
name = nameb.toString();
return super.revisePatchName(name); // trim again
}
/** Verify that all the parameters are within valid values, and tweak them if not. */
public void revise()
{
// check the easy stuff -- out of range parameters
super.revise();
String nm = model.get("name", "UNTITLED");
String newnm = revisePatchName(nm);
if (!nm.equals(newnm))
model.set("name", newnm);
}
boolean sendingMatrix100Parameters = false;
public double getPauseBetweenMIDISends() { if (sendingMatrix100Parameters) return 75; else return 0; }
public static String getSynthName() { return "Oberheim Matrix 1000"; }
public String getPatchName(Model model) { return model.get("name", "UNTITLED"); }
public int getPauseAfterSendAllParameters() { return 200; }
public Model getNextPatchLocation(Model model)
{
int bank = model.get("bank");
int number = model.get("number");
number++;
if (number >= 100)
{
bank++;
number = 0;
if (bank >= 10)
bank = 0;
}
Model newModel = buildModel();
newModel.set("bank", bank);
newModel.set("number", number);
return newModel;
}
public String getPatchLocationName(Model model)
{
// getPatchLocationName() is called from sprout() as a test to see if we should enable
// batch downloading. If we haven't yet created an .init file, then parameters won't exist
// yet and this method will bomb badly. So we return null in this case.
if (!model.exists("number")) return null;
if (!model.exists("bank")) return null;
int number = model.get("number");
return ("" + model.get("bank")) +
(number > 9 ? "" : "0") +
(model.get("number"));
}
///// A bug in the Matrix 1000 means that SINGLE PATCH DATA TO EDIT BUFFER apparently sends
///// corrupted data. So we can't use it. But we still need to send! So we do this by
///// writing to slot 199 when sending in bulk. We have to modify sendAllParameters so that if
///// we're sending in bulk, we change the patch to 199 first so that we always have the
///// Matrix 1000 set up right.
public static final int SEND_MATRIX_NUMBER = 99;
public static final int SEND_MATRIX_BANK = 1;
public boolean sendMatrixParametersInBulk = true; // always for now
public boolean getSendsAllParametersInBulk() { return sendMatrixParametersInBulk; }
public void sendAllParameters()
{
// in case we send parameters individually, we'll add a pause between sending parameters here.
sendingMatrix100Parameters = true;
/*
if (sendMatrixParametersInBulk)
{
// we need to ensure a changepatch to SEND_MATRIX_SLOT here
changePatch(SEND_MATRIX_BANK, SEND_MATRIX_NUMBER);
}
*/
super.sendAllParameters();
// now we turn off the pause
sendingMatrix100Parameters = false;
}
public boolean useClassicPatchNames = true;
public void addOberheimMenu()
{
JMenu menu = new JMenu("Matrix 1000");
menubar.add(menu);
// classic patch names
JCheckBoxMenuItem useClassicPatchNamesMenu = new JCheckBoxMenuItem("Use Classic Patch Names");
menu.add(useClassicPatchNamesMenu);
useClassicPatchNamesMenu.addActionListener(new ActionListener()
{
public void actionPerformed(ActionEvent evt)
{
useClassicPatchNames = useClassicPatchNamesMenu.isSelected();
setLastX("" + useClassicPatchNames, "UseClassicPatchNames", getSynthName(), true);
}
});
String str = getLastX("UseClassicPatchNames", getSynthName(), true);
if (str == null)
useClassicPatchNames = true;
else if (str.equalsIgnoreCase("true"))
useClassicPatchNames = true;
else useClassicPatchNames = false;
useClassicPatchNamesMenu.setSelected(useClassicPatchNames);
menu.addSeparator();
// load patch
for(int i = 0; i < 1000; i += 50)
{
JMenu patchgroup = new JMenu("" + "Request Patch " + (i < 100 ? (i < 10 ? "00" : "0") : "" ) + i + "..." + (i < 100 ? "0" : "") + (i + 49));
menu.add(patchgroup);
for(int j = i; j < i + 50; j++)
{
final int _j = j;
JMenuItem patch = new JMenuItem("" +
(j < 100 ? (j < 10 ? "00" + j : "0" + j) : "" + j) + ": " +
PATCH_NAMES[j]);
patch.addActionListener(new ActionListener()
{
public void actionPerformed(ActionEvent evt)
{
Model tempModel = buildModel();
tempModel.set("number", _j % 100);
tempModel.set("bank", _j / 100);
performRequestDump(tempModel, true);
}
});
patchgroup.add(patch);
}
}
/*
// we don't call this for the time being -- sending individual parameters is slow and fraught with problems
JMenu sendParameters = new JMenu("Send Parameters...");
menu.add(sendParameters);
String str = getLastX("SendParameters", getSynthName(), true);
if (str == null)
sendMatrixParametersInBulk = true;
else if (str.equalsIgnoreCase("BULK"))
sendMatrixParametersInBulk = true;
else if (str.equalsIgnoreCase("INDIVIDUALLY"))
sendMatrixParametersInBulk = false;
else sendMatrixParametersInBulk = true;
ButtonGroup bg = new ButtonGroup();
JRadioButtonMenuItem bulk = new JRadioButtonMenuItem("In Bulk, using Patch 199");
bulk.addActionListener(new ActionListener()
{
public void actionPerformed(ActionEvent evt)
{
sendMatrixParametersInBulk = true;
setLastX("BULK", "SendParameters", getSynthName(), true);
}
});
sendParameters.add(bulk);
bg.add(bulk);
if (sendMatrixParametersInBulk == true) bulk.setSelected(true);
JRadioButtonMenuItem separately = new JRadioButtonMenuItem("As Individual Parameters");
separately.addActionListener(new ActionListener()
{
public void actionPerformed(ActionEvent evt)
{
sendMatrixParametersInBulk = false;
setLastX("INDIVIDUALLY", "SendParameters", getSynthName(), true);
}
});
sendParameters.add(separately);
bg.add(separately);
if (sendMatrixParametersInBulk == false) separately.setSelected(true);
*/
}
// These are drawn from the "Matrix 1000 Patchbook"
public static final String[] PATCH_NAMES =
{
"TOTOHORN",
"1000STRG",
"MOOOG_B",
"EZYBRASS",
"SYNTH",
"MIBES",
"CHUNK",
"MINDSEAR",
"CASTILLO",
"DESTROY+",
"BIG PIK",
"M-CHOIR",
"STRINGME",
")LIQUID(",
"PNO-ELEC",
"BED TRAK",
"STELLAR",
"SYNCAGE",
"SHIVERS",
"+ ZETA +",
"STEELDR.",
"TAURUS",
"POWRSOLO",
"INTERSTL",
"REZTFUL",
"WATRLNG",
"BEELS",
"LIKETHIS",
"NTHENEWS",
"SOFT MIX",
"OBXA-A7",
"BREATH",
"MUTRONO",
"SLOWATER",
"HAUNTING",
"FLANGED",
"TENSION",
"ECHOTRON",
"PIRATES!",
"EP SWEP",
"DEJAVUE'",
"DRAMA",
"VIOLINCE",
"BOUNCE",
"SAGAN'Z",
"OB LEAD",
"FEEDGIT",
"SAMPLE",
"TINYPIAN",
"GALACTIC",
"DOU CIEL",
"WA CLAV",
"DREAMER",
"XA STR",
"CHURCH",
"KIDDING?",
"THUNDER",
"ECHOWURL",
"BLABINET",
"STRUNGS",
"AFRICAN",
"B3+LSLIE",
"CHIMES",
"DIPIAN",
"LAZ HARP",
"SMTHSQ2",
"TRUMPETS",
"PAPANO 4",
"WIPBASS",
"LYLE-8VA",
"SITAR",
"VIOGITAR",
"GOLIATH",
"ANAXYLO",
"FURY0",
"SYNLUTH",
"CHAMBER",
"SPATBRS",
"ETHEREE",
"TBRAZZ",
"NOBLE",
"FLEXTONE",
"GREEZY2",
"ARPPEGT",
"JUMP IES",
"HARDVARK",
"SWEETSKY",
"SHIMRING",
"TIMBOWS",
"GALLOP",
"PRELUDE1",
"GROWLBRZ",
"CLICKORG",
"PRESLEZ1",
"LYLE",
"ARCANGEL",
"BENSHIMR",
"LUSHNESS",
"NOISTRNG",
"SOPIPES",
"PAPANO 4",
"WURLY 2",
"TOTOHORN",
"AGRESORN",
"STRING S",
"ODX 7",
"JUMP IES",
"PROFIT",
"VOICES",
"SAGAN'Z",
"PA ANO 5",
"MONSTER",
"GENVIV",
"CLAVINET",
"STRINGER",
"SIMONISK",
"HOTBODOM",
"XTASY",
")LIQUID(",
"BED TRAK",
"ROADS",
"ECHOWURL",
"POLYPHON",
"DREAMER",
"HISTRUNG",
"DEJAVUE'",
"WET BAZ",
"PIPE",
"OCTAVIA",
"OB-STRGS",
"KAWHY",
"JOCKO",
"FLUTES",
"ATYPICAL",
"BOW VIOL",
"VERTABRA",
"SLAP 1",
"P.ORGAN5",
"OCTAHORN",
"LUSHNESS",
"12\"GITAR",
"STAND UP",
"BERT'S B",
"BRASS-11",
"STRINGME",
"LEED-2",
"OW BASS",
"ORGAN-1",
"HORNISK",
"STRING-1",
"CARIPSO",
"FAZ BASS",
"ORGAN-2",
"PHASEPAD",
"STRING 2",
"BATA",
"BASS SYN",
"SYNCAGE*",
"DIGIHORN",
"CELLO",
"GLASSVOX",
"FRET NOT",
"ORGANISM",
"LIMUIDZ",
"STRANGER",
"PAPANO 7",
"WOW BASS",
"P.ORGAN4",
"MELOHORN",
"LEED-1",
"TINYPIAN",
"SLAP 2",
"FORESTS",
"HONOCLAB",
"STRING 7",
"VELSYNC*",
"RAINECHO",
"ORGAN 9",
"BRAZZ",
"ACCORD",
"DIGPIANO",
"TENOR",
"STORTGAN",
"VIOGITAR",
"STRUNGET",
"BIRDLAND",
"METAL-1",
"METAL-8",
"FUNK ART",
"METAL-13",
"VIBES",
"STRING 6",
"STRNGREZ",
"SINGS",
"TIMBOWS",
"WHISTLE",
"OBERHORN",
"TOOTS ?",
"FIREBALL",
"SMPLTHIS",
"OBXA-11",
"OBXA-12",
"OBXA-A2",
"OBXA-A7",
"OBXA-B7",
"OBXA-B8",
"OBXA-C2",
"OBXA-C4",
"OBXA-C6",
"OBXA-C7",
"OBXA-C8",
"OBXA-D2",
"OBXA-D3",
"OBXA-D4",
"OBXA-D5",
"OBXA-D6",
"OBXA-D7",
"OBXA-D8",
"OBXA-9\"",
"OBCA-RE",
"OBXJMP",
"*'ANGEL",
"+ ZETA +",
"1984SWP6",
"WAVES",
"80MS DDL",
"SYNTH",
"AERIAL",
"ALIENSWP",
"AMBIANCE",
"ANAFTST*",
"ANAHARP",
"ANALOG B",
"ANAXYLO*",
"ANGELS",
"APOLLO",
"ARCANGEL",
"ARGEX-1",
"ARGON7",
"ATYPICAL",
"AW WHY ?",
"BENSHIMR",
"BEOWCOMP",
"BILLY",
"BLASZZ",
"BLOCKOUT",
"BOEPTYN*",
"BOTTLES",
"BOUNCE*",
"BRASSVOX",
"BRILLANT",
"BROADWAY",
"BS ETAK*",
"BURNHOUS",
"CAMERA 1",
"CHIME 1",
"CHIME 2",
"CHUNK",
"CMI HIGH",
"COEAUR 1",
"COLONY 9",
"CRYSLAKE",
"CS-80",
"DEACON",
"DEJAVUE'",
"DIDIER",
"DISTANCE",
"DMACHINE",
"DREAMER",
"DREEMER",
"DUCKTIME",
"DUNK IT",
"E N O 1",
"ECHOSYN",
"ECHOTRON",
"EGYPT",
"EP SWEP*",
"EPCH+BRZ",
"EPDSTRT*",
"ESQ-1",
"ETHEREE",
"FAKE DDL",
"FIFTHS",
"FLOATONG",
"FLPFLOP*",
"FLY TO",
"FM BASS",
"FUNDO",
"*FUNK ART",
"FUNKAY",
"FURYO",
"FWEEP",
"S.1",
"GALACTIC",
"GALLOP *",
"GENIVEEV",
"GENVIV",
"GENVIV*",
"GIRLSWEP",
"GOOD BED",
"GOODTIME",
"GROTTO",
"HACKETT",
"HALO",
"HARMOVOX",
"HARPOON",
"HELI-IN",
"HOMETOWN",
"INTERSTL",
"ITSONICE",
"JAZZQUIT",
"JM JARRE",
"JOHN B'S",
"KCEPMAX*",
"KCEPSAW*",
"KCHSYNC*",
"KIRKLAND",
"LDSUBHRM",
"LIKETHIS",
"LSTLAUGH",
"LUN'AIR",
"M-CHOIR",
"MAGICAL",
"MARIN",
"MATMODUL",
"MATRIX 1",
"MATRIX 2",
"METABOAD",
"METABRD",
"MINDSEAR",
"MONSTER",
"MR KYRIE",
"MUSICBOX",
"NAUTILUS",
"NEW VOX",
"NEWSOUND",
"NIGHTPAD",
"OB SWEEP",
"OB VOX*",
"OB-INTRO",
"OBXA-B2",
"OBXA-B4",
"OCTAVIA",
"OPEN AIR",
"ORDINARY",
"P CHORD",
"P-PLUCK",
"PAD",
"PERC S",
"PHASE 5",
"PHASECHO",
"PICKY",
"PIPESTR",
"PN/FMSWP",
"POLCHOIR",
"POWER",
"PROPHET1",
"PROPHET5",
"PROPHETV",
"PSYLITIS",
"REZ*PULS",
"REZTFUL",
"SAMPLE",
"SATURN",
"SCIENCE",
"SCRITTI+",
"SECRETS",
"SENSIT 2",
"SENSITIV",
"SEQUINOX",
"SHANKAR",
"SHIMMER",
"SHIMRING",
"SHIVERS",
"SKRCHTN*",
"SKTSOKY*",
"SKY HIGH",
"SKYVOICE",
"SLAPBACH",
"SLIDSTG",
"SLOW CRY",
"SLOWATER",
"SMTHSQ2*",
"SOFT MIX",
"SOUNDPAD",
"SOUNDTR[",
"SPACE",
"SPACE/CO",
"SPARKLES",
"SPLASH 1",
"STELLAR*",
"STUGROWL",
"STYX",
"SUBMARIN",
"SUNDAY",
"SUSSUDIO",
"SWEPCORD",
"SWRLEKO*",
"SYN BOX*",
"SYNCAGE*",
"TOTOHORN",
"OBXA-13",
"OBXA-A1",
"OBXA-B1",
"OBXA-C1",
"OBXA-D1",
"6R BRASS",
"AGRESORN",
"ALASKA",
"ANA HIT*",
"ANASUTL*",
"B'ARI/S2",
"B/D-ANA*",
"BAGPIPES",
"BARISAX",
"BASCLRNT",
"BASSCLAR",
"BASSOON",
"BENDHORN",
"BIGBRA$$",
"BONES",
"BRASRAMP",
"BTRASSVOX",
"BRASSY",
"BRAZEN",
"BRECHER",
"BRTH FLT",
"BRUTUS",
"BRZIVIV*",
"BUCHANN*",
"BUZREED",
"CHROMA-S",
"CLARINET",
"CRAZHORN",
"CS-80",
"CUIVRE((",
"DBLREED*",
"EASTREED",
"EDGY",
"ENSEMBL*",
"EUROPE",
"EWF HORN",
"EZYBRASS",
"FACTORY",
"FIFTHS",
"FLGLHORN",
"FLOOT",
"FLUGELHN",
"FLUGLE",
"FLUTE",
"FLUTE TR",
"FLUTE.",
"FLUTES",
"FLUTEY",
"FLUX",
"FM BRAZ",
"FM DELAY",
"FR.HORN",
"FNRCHRN*",
"FTHWEEL*",
"FUE.JAPN",
"FUSION",
"FWEEP",
"GABRIEL",
"GO BED",
"GOLIATH",
"HORN'EM",
"HORN-1",
"HORNENS",
"HORNFALL",
"HORNSAS",
"HORNY",
"HRNSHAKE",
"J HAMMER",
"JTULLFLT",
"JUBILEE",
"KLARYNET",
"KORGHORN",
"LYRICON",
"MATRONE",
"MELFAZE*",
"MELOHORN",
"METHENY5",
"MUTETRPT",
"OB BRASS",
"OB-8",
"OBERHORN",
"OBOE",
"OCT.BRS",
"OCTAFLUT",
"OCTAHORN",
"ORIENT",
"PEDSWP*",
"PEG-BRS",
"PYRMFLT*",
"RAHOOOL*",
"RECORDER",
"RELVELHO",
"RICHCORD",
"ROMAN",
"SEXAFOAM",
"SLO HRN",
"SOPIPES",
"SPATBRS*",
"SQUARDOU",
"STAB",
"STAB-BRS",
"STEPS 2.",
"STUFLUTE",
"SWRLYBRD",
"SYN SAX*",
"SYNBASS",
"SYNBONE",
"SYNBRSS*",
"SYNHORN",
"TBRAZZ",
"TENOR",
"TOTOAL",
"TOUCH+GO",
"TRILLFLT",
"TRMBONE*",
"TROMBONE",
"TRUMPETS",
"TRUPT-EU",
"* 99 *",
"TUBA 2",
"OBXA-A8",
"ELEAD*",
"BDTH-2",
"BIRDY",
"BRECKERL",
"CASTILLO",
"CHICK",
"DESTROY+",
"DIGRUNGE",
"DRAGON-3",
"DXINDIAN",
"FEEDBAK6",
"FEEDBAK8",
"FEEDGIT",
"FIFTH I%",
"FIFTHLIX",
"GLASLEED",
"GROWLBRS",
"H-LEAD",
"HILEED 6",
"J HAMMER",
"JAKOLEED",
"JAN LEAD",
"JAZZ",
"JIMY'SRG",
"KC LEAD*",
"KIDDING?",
"LEAD+PRT",
"LEAD-1",
"LEAD-3",
"LEED-1",
"LEED-2",
"LYLE 2",
"LYLE 3 M",
"METHENEY",
"METLSOLO",
"MILESCOM",
"MINIMOGG",
"MINIMOOG",
"MONOSTRG",
"NASTY",
"OB LEAD*",
"OSC SYNC",
"PANFLOET",
"PINKLEAD",
"POWRSOLO",
"PRSSLIDE",
"QUINCY",
"RECORDER",
"REZLEAD*",
"SAWLEAD*",
"SITAR",
"SMOOTH",
"SMUTHSQ*",
"SOLO",
"SOLODARM",
"SOLOPROF",
"SOLOSYNC",
"SOLOW*",
"SOPIPES",
"SPITLEED",
"SQARELED",
"STUVIB",
"SUSGUIT",
"SNTHE 5",
"UKSOLO",
"UNIBASS",
"UNIWAVE",
"WAKEMANS",
"WEIRDPRC",
"WHISTLER",
"WINAND 1",
"XA'SOLO",
"ZAW'QART",
"OBXA-10",
"OBXA-14",
"OBXA-A3",
"OBXA-B3",
"OBXA-B6",
"OBXA-C3",
"OBXA",
"OBXA-6",
"(ARCO)01",
"*'CANOPY",
"1000STRG",
"TOP",
"2000STRG",
"AGITATO*",
"ALL LOVE",
"ALT84TOP",
"BED TRAK",
"BLACSEAM",
"BOW IT",
"BOW VIOL",
"CELLO",
"CHAMBER",
"CHILLO",
"CLASSIKA",
"CONCERT",
"DEEPCAVE",
"DEPTHS",
"DLAYSTR*",
"DONSTRIG",
"DOU'CIEL",
"DUNGEON",
"DYNASTY",
"E.VIOLIN",
"FAMUS*OB",
"FORESTS",
"GRANULES",
"GREAT\"OB",
"HARMONIC",
"ICY-CHRD",
"INDNSTRG",
"LOWSTRNG",
"LOYAL",
"LUSHNESS",
"LYLE-8VA",
"MELLO=14",
"MKSINGS",
"MONEY $$",
"MUTEDSTR",
"MZSTRING",
"NOBLE",
"NOISTGS",
"OB A3PD*",
"OB-STR1N",
"OB-STRGS",
"OBSTRING",
"OBXA-A6",
"OCARINA",
"OCHESTRY",
"OPENSTRG",
"ORCH*",
"ORIENT",
"PITZ STR",
"PIZZ^+P2",
"PLANET P",
"POLSTRG2",
"PROHET-5",
"PROPHET5",
"RID ZEP",
"ROYAL PH",
"RP STRG5",
"SECRETS'",
"SHARPBOW",
"SHIFT",
"SHRTSTRG",
"SINGS",
"SLIDSTG",
"SLOW BOW",
"SLOW CRY",
"SMASH",
"SOLEMN",
"SOLEMNIS",
"SOLO",
"SOUNDTR",
"SOUNDTRK",
"SOUNDTR[",
"SRTRONGS",
"STAND UP",
"STR END*",
"STR-8VA",
"STRANGER",
"STREENG",
"STREENGS",
"STRING 2",
"STRING 6",
"STRING 7",
"STRING 8",
"STRING S",
"STRING\"8",
"STRING-1",
"STRINGER",
"**A!A!**",
"2600-2",
"AGREBASS",
"ANTEATER",
"ARP-2",
"ATYPBASS",
"AXXE",
"BARISAX",
"BASS PAD",
"BASS SYN",
"BASS ZZT",
"BASS-11",
"BASSA",
"BASSCLAR",
"TUBULAR",
"BASSE OA",
"BASSGTAR",
"BASSHIPO",
"BASSHORN",
"BASSVIOL",
"BASSVOX",
"BIG PIK",
"BIRDLAND",
"BOLUBASS",
"BOTBASS",
"BOWBASS",
"BRAAS",
"BS/STRG*",
"UNI BASS",
"CLAVBASS",
"BOUBLEBS",
"DUCK 2",
"DUCKBASS",
"EARTHESS",
"ELC BASS",
"ELEC BS*",
"FANKNBAZ",
"FAZ BASS",
"FLOORIT",
"FRET EKO",
"FRET NOT",
"FUNK BAZ",
"HARMBAS5",
"HISBASS",
"HOTBODOM",
"JAN BASS",
"JOCKO",
"JOCKO 2",
"LEEDBASS",
"LUMPBASS",
"MINIBASS",
"MONO BS*",
"MOOGER",
"MOOOG_B",
"MUFFEL",
"NOISBASS",
"OCTABASS",
"ORBASS",
"OW BASS",
"PABASS*",
"PLUCK-BS",
"POLBASS1",
"POLYBASS",
"PUKBASS",
"R + B",
"RAGABASS",
"VELBASS",
"RUBBER",
"SEQUBASS",
"VELGROWL",
"SINCBASS",
"WAPBASS",
"SLAP 1",
"SLAP 2",
"SLIDER",
"SLOWBASS",
"SNTHBS1*",
"SOFTBASS",
"SPITBASS",
"SQUISBAZ",
"STAND UP",
"STBASS",
"STR.BASS",
"STRANGTK",
"STRIBASS",
"WET BAZ",
"STRINGBZ",
"SUGITA\"",
"SUPPORT",
"SWELLBAZ",
"SWP.BASS",
"SYBASS 2",
"SYN BS2*",
"SYN BS3*",
"SYN BS4*",
"SYNCBASS",
"TAURUS",
"TENU'OB2",
"WIPBASS",
"TIKBASS",
"AK-48",
"APORT",
"BALLGAME",
"BANJO",
"BASSDRUM",
"BDTH-1",
"BELL 1",
"BELLIKE",
"BELLS",
"BELLS-GS",
"BI-PLANE",
"BOTTLES",
"BTMEHRDR",
"BURST 1",
"CASCAD'4",
"CHIMES",
"CHIMES*",
"CHOPPERZ",
"COINOP 3",
"COPOLIPS",
"CRAZYMAN",
"CRICKET",
"CROZTALK",
"DB BELL",
"DIDIER",
"DREAMING",
"DRIFTER*",
"DRUMPOP",
"DUNDERZ",
"DX-PLUCK",
"TURBO",
"FALLCHYM",
"FIREBALL",
"FLAME ON",
"FLEXTONE",
"FMPLUKS",
"FURYO 2",
"G.S.2",
"G.S.3",
"GLOCK",
"WINDS",
"HAUNTING",
"HEART",
"ZAP",
"HORRORS",
"HOWITZER",
"HVN+HELL",
"INDIAN",
"TOP-GUN*",
"INSIDES",
"JETTZ 3",
"JUNKANOO",
"WETFEET",
"KINGONG",
"KONTAKTE",
"LCTRCUTE",
"WHIZZ",
"LFO ART",
"LFOMALET",
"LIFTOFF",
"LOOPBELL",
"LYLE 3 P",
"LYLES'",
"THUNDRUS",
"MACHINSM",
"MANIAC*",
"MARIMA",
"MOFO",
"MEMORIES",
"MOUNTAIN",
"MRIMBAH*",
"NASTEEZ",
"WARNINGS",
"NOISE-DN",
"NOISSWEP*",
"NOIZGATE",
"NTHENEWS",
"NUKE EM'",
"OCIEAN",
"OCEANWAV",
"OOZES 3",
"PHASES*",
"PINWHEEL",
"PLUCK",
"POLBELS2",
"POLNOISE",
"PORTAL",
"PSYCHYM",
"RAINECHO",
")RAPIST(",
"WATER",
"RUBRTOMS",
"SATURDAY",
"SCRATCH",
"SEQEUNCE",
"SGUSTING",
"SHRNKRAY",
"SIMONISK",
"SMASH*",
"SMPLTHIS",
"PAPANO 4",
"MIKPIANO",
"HONOCLAB",
"MR.ROGRS",
"MTL PNO*",
"MUSETTE",
"MUTDCLV*",
"MUTRONO",
"NYLNPIK*",
"NYLNPK2*",
"NYLON 12",
"OB8 JUMP",
"OBNOXVOX",
"OBXA-B5",
"ODX 7",
"OORGAN",
"LAZ HARP",
"ORGAN 9",
"ORGAN-1",
"ORGAN-1P",
"ORGAN-2",
"ORGANISM",
"AKOUSTIK",
"ORGNIZE*",
"P.ORGAN",
"P.ORGAN4",
"P.ORGAN5",
"PA ANO 5",
"HARPO",
"PAPANO 7",
"LULLABOX",
"PERCCLAV",
"PERCPNO",
"PIANITAR",
"PIANO",
"PIANO BO",
"PIANOLA",
"B-3.1",
"PINPIANO",
"PIPEORG.",
"PIPEORG:",
"PIPES",
"PIPSTRNG",
"PIRATES!",
"PNO-ELEC",
"POLPIANP",
"PRELUDE1",
"PRESLEZ1",
"PROFIT",
"PROPH W",
"PROPHET",
"RESPIANO",
"RMIPIANO",
"ROADS",
"SAL00N 5",
"SALOON 3",
"B-3.2",
"SALOON 7",
"SAMPLORG",
"SAMSGRND",
"SITAR I",
"SMTHSQ2*",
"SPANIEL",
"SPRPRTS*",
"B-3.3",
"B3+LSLIE",
"STRGTR2*",
"SYN CLAV",
"BELLS",
"SYNLUTH",
"BLABINET",
"SYNPIANO",
"CELESTE",
"CHIMES",
"TINEOUT",
"TONYPIAN",
"TOYPIANO",
"TWINSTRG",
"CHURCH",
"VIBECHOES",
"VIBES",
"CLAV B6",
"CLAVI 2",
"WA CLAB*",
"CLAVINET",
"WHAANO",
"WHY FM",
"CLICKORG",
"WURLI8",
"CLUBS",
"WURLY 2",
"WURLY 3",
"X-GRAND",
"XA'ORGAN",
"YOUREYES",
"ZITHER",
"CORDINE1",
"D\"AMMOND",
"GREEZY1",
"GRNDR 6*"
};
}
|
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* the project. */
package edu.wpi.first.wpilibj.templates;
import edu.wpi.first.wpilibj.SimpleRobot;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the SimpleRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
//Alec Pierce and Derrick Lockwood
public class RobotTemplate extends SimpleRobot {
/**
* This function is called once each time the robot enters autonomous mode.
*/
public void autonomous() {
}
/**
* This function is called once each time the robot enters operator control.
*/
public void operatorControl() {
}
}
|
package edu.wustl.catissuecore.util.listener;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpSessionEvent;
import javax.servlet.http.HttpSessionListener;
import edu.wustl.catissuecore.util.global.Constants;
import edu.wustl.common.beans.SessionDataBean;
import edu.wustl.common.dao.DAOFactory;
import edu.wustl.common.dao.JDBCDAO;
import edu.wustl.common.security.PrivilegeManager;
import edu.wustl.common.util.dbManager.DAOException;
import edu.wustl.common.util.logger.Logger;
/**
* @author poornima_govindrao
*
* Listener for cleanup after session invalidates.
*/
public class CatissueCoreSessionListener implements HttpSessionListener{
public void sessionCreated(HttpSessionEvent arg0) {
}
//Cleanup after session invalidates.
public void sessionDestroyed(HttpSessionEvent arg0) {
HttpSession session = arg0.getSession();
SessionDataBean sessionData= (SessionDataBean)session.getAttribute(Constants.SESSION_DATA);
if(sessionData!=null)
cleanUp(sessionData,(String)session.getAttribute(Constants.RANDOM_NUMBER));
// To remove PrivilegeCache from the session, requires user LoginName
// Singleton instance of PrivilegeManager
if(sessionData != null)
{
PrivilegeManager privilegeManager = PrivilegeManager.getInstance();
privilegeManager.removePrivilegeCache(sessionData.getUserName());
}
}
private void cleanUp(SessionDataBean sessionData,String randomNumber)
{
//Delete Advance Query table if exists
//Advance Query table name with userID attached
String tempTableName = Constants.QUERY_RESULTS_TABLE+"_"+sessionData.getUserId();
try
{
JDBCDAO jdbcDao = (JDBCDAO)DAOFactory.getInstance().getDAO(Constants.JDBC_DAO);
jdbcDao.openSession(sessionData);
jdbcDao.delete(tempTableName);
jdbcDao.closeSession();
}
catch(DAOException ex)
{
Logger.out.error("Could not delete the Advance Search temporary table."+ex.getMessage(),ex);
}
String tempTableNameForQuery = Constants.TEMP_OUPUT_TREE_TABLE_NAME + sessionData.getUserId()+randomNumber;
try
{
JDBCDAO jdbcDao = (JDBCDAO)DAOFactory.getInstance().getDAO(Constants.JDBC_DAO);
jdbcDao.openSession(sessionData);
jdbcDao.delete(tempTableNameForQuery);
jdbcDao.closeSession();
}
catch(DAOException ex)
{
Logger.out.error("Could not delete the Query Module Search temporary table."+ex.getMessage(),ex);
}
}
}
|
package org.springframework.roo.addon.email;
import java.io.InputStream;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
import org.springframework.roo.classpath.PhysicalTypeDetails;
import org.springframework.roo.classpath.PhysicalTypeIdentifier;
import org.springframework.roo.classpath.PhysicalTypeMetadata;
import org.springframework.roo.classpath.details.DefaultFieldMetadata;
import org.springframework.roo.classpath.details.DefaultMethodMetadata;
import org.springframework.roo.classpath.details.FieldMetadata;
import org.springframework.roo.classpath.details.MutableClassOrInterfaceTypeDetails;
import org.springframework.roo.classpath.details.annotations.AnnotatedJavaType;
import org.springframework.roo.classpath.details.annotations.AnnotationAttributeValue;
import org.springframework.roo.classpath.details.annotations.AnnotationMetadata;
import org.springframework.roo.classpath.details.annotations.DefaultAnnotationMetadata;
import org.springframework.roo.classpath.itd.InvocableMemberBodyBuilder;
import org.springframework.roo.metadata.MetadataService;
import org.springframework.roo.model.JavaSymbolName;
import org.springframework.roo.model.JavaType;
import org.springframework.roo.process.manager.FileManager;
import org.springframework.roo.process.manager.MutableFile;
import org.springframework.roo.project.Dependency;
import org.springframework.roo.project.Path;
import org.springframework.roo.project.PathResolver;
import org.springframework.roo.project.ProjectMetadata;
import org.springframework.roo.project.ProjectOperations;
import org.springframework.roo.support.lifecycle.ScopeDevelopment;
import org.springframework.roo.support.util.Assert;
import org.springframework.roo.support.util.TemplateUtils;
import org.springframework.roo.support.util.XmlUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
* Provides email configuration operations.
*
* @author Stefan Schmidt
* @since 1.0
*/
@ScopeDevelopment
public class MailOperations {
Logger logger = Logger.getLogger(MailOperations.class.getName());
private FileManager fileManager;
private PathResolver pathResolver;
private MetadataService metadataService;
private ProjectOperations projectOperations;
public MailOperations(FileManager fileManager, PathResolver pathResolver, MetadataService metadataService, ProjectOperations projectOperations) {
Assert.notNull(fileManager, "File manager required");
Assert.notNull(pathResolver, "Path resolver required");
Assert.notNull(metadataService, "Metadata service required");
Assert.notNull(projectOperations, "Project operations required");
this.fileManager = fileManager;
this.pathResolver = pathResolver;
this.metadataService = metadataService;
this.projectOperations = projectOperations;
}
public boolean isInstallEmailAvailable() {
return getPathResolver() != null;
}
public boolean isManageEmailAvailable() {
return fileManager.exists(getPathResolver().getIdentifier(Path.SRC_MAIN_RESOURCES, "applicationContext.xml"));
}
public void installEmail(String hostServer, MailProtocol protocol, String port, String encoding, String username, String password) {
Assert.hasText(hostServer, "Host server name required");
String contextPath = pathResolver.getIdentifier(Path.SRC_MAIN_RESOURCES, "applicationContext.xml");
MutableFile contextMutableFile = null;
Document appCtx = null;
try {
if (fileManager.exists(contextPath)) {
contextMutableFile = fileManager.updateFile(contextPath);
appCtx = XmlUtils.getDocumentBuilder().parse(contextMutableFile.getInputStream());
} else {
new IllegalStateException("Could not aquire the Spring applicationContext.xml file");
}
} catch (Exception e) {
throw new IllegalStateException(e);
}
Element root = (Element) appCtx.getFirstChild();
boolean installDependencies = true;
Element mailBean = XmlUtils.findFirstElement("//bean[@class='org.springframework.mail.javamail.JavaMailSenderImpl']", root);
if (mailBean != null) {
root.removeChild(mailBean);
installDependencies = false;
}
mailBean = appCtx.createElement("bean");
mailBean.setAttribute("class", "org.springframework.mail.javamail.JavaMailSenderImpl");
mailBean.setAttribute("id", "mailSender");
Element property = appCtx.createElement("property");
property.setAttribute("name", "host");
property.setAttribute("value", hostServer);
mailBean.appendChild(property);
root.appendChild(mailBean);
if (protocol != null) {
Element pElement = appCtx.createElement("property");
pElement.setAttribute("name", "protocol");
pElement.setAttribute("value", protocol.getProtocol());
mailBean.appendChild(pElement);
}
if (port != null && port.length() > 0) {
Element pElement = appCtx.createElement("property");
pElement.setAttribute("name", "port");
pElement.setAttribute("value", port);
mailBean.appendChild(pElement);
}
if (encoding != null && encoding.length() > 0) {
Element pElement = appCtx.createElement("property");
pElement.setAttribute("name", "encoding");
pElement.setAttribute("value", encoding);
mailBean.appendChild(pElement);
}
if (username != null && username.length() > 0) {
Element pElement = appCtx.createElement("property");
pElement.setAttribute("name", "username");
pElement.setAttribute("value", username);
mailBean.appendChild(pElement);
}
if (password != null && password.length() > 0) {
Element pElement = appCtx.createElement("property");
pElement.setAttribute("name", "password");
pElement.setAttribute("value", password);
mailBean.appendChild(pElement);
if(MailProtocol.SMTP.equals(protocol)) {
Element javaMailProperties = appCtx.createElement("property");
javaMailProperties.setAttribute("name", "javaMailProperties");
Element props = appCtx.createElement("props");
javaMailProperties.appendChild(props);
Element prop = appCtx.createElement("prop");
prop.setAttribute("key", "mail.smtp.auth");
prop.setTextContent("true");
props.appendChild(prop);
Element prop2 = appCtx.createElement("prop");
prop2.setAttribute("key", "mail.smtp.starttls.enable");
prop2.setTextContent("true");
props.appendChild(prop2);
mailBean.appendChild(javaMailProperties);
}
}
XmlUtils.writeXml(contextMutableFile.getOutputStream(), appCtx);
if (installDependencies) {
updateDependencies();
}
}
public void configureTemplateMessage(String from, String subject) {
String contextPath = pathResolver.getIdentifier(Path.SRC_MAIN_RESOURCES, "applicationContext.xml");
MutableFile contextMutableFile = null;
Document appCtx = null;
try {
if (fileManager.exists(contextPath)) {
contextMutableFile = fileManager.updateFile(contextPath);
appCtx = XmlUtils.getDocumentBuilder().parse(contextMutableFile.getInputStream());
} else {
new IllegalStateException("Could not aquire the Spring applicationContext.xml file");
}
} catch (Exception e) {
throw new IllegalStateException(e);
}
Element root = (Element) appCtx.getFirstChild();
if ((null != from && from.length() > 0) || (null != subject && subject.length() > 0)) {
Element smmBean = XmlUtils.findFirstElement("//bean[@class='org.springframework.mail.SimpleMailMessage']", root);
if (smmBean == null) {
smmBean = appCtx.createElement("bean");
smmBean.setAttribute("class", "org.springframework.mail.SimpleMailMessage");
smmBean.setAttribute("id", "templateMessage");
}
if (null != from && from.length() > 0) {
Element smmProperty = XmlUtils.findFirstElement("//property[@name='from']", root);
if (smmProperty != null) {
smmBean.removeChild(smmProperty);
}
smmProperty = appCtx.createElement("property");
smmProperty.setAttribute("value", from);
smmProperty.setAttribute("name", "from");
smmBean.appendChild(smmProperty);
}
if (null != subject && subject.length() > 0) {
Element smmProperty = XmlUtils.findFirstElement("//property[@name='subject']", root);
if (smmProperty != null) {
smmBean.removeChild(smmProperty);
}
smmProperty = appCtx.createElement("property");
smmProperty.setAttribute("value", subject);
smmProperty.setAttribute("name", "subject");
smmBean.appendChild(smmProperty);
}
root.appendChild(smmBean);
XmlUtils.writeXml(contextMutableFile.getOutputStream(), appCtx);
}
}
public void injectEmailTemplate(JavaType targetType, JavaSymbolName fieldName) {
Assert.notNull(targetType, "Java type required");
Assert.notNull(fieldName, "Field name required");
int modifier = Modifier.PRIVATE;
modifier |= Modifier.TRANSIENT;
List<AnnotationMetadata> annotations = new ArrayList<AnnotationMetadata>();
annotations.add(new DefaultAnnotationMetadata(new JavaType("org.springframework.beans.factory.annotation.Autowired"), new ArrayList<AnnotationAttributeValue<?>>()));
FieldMetadata fieldMetadata = new DefaultFieldMetadata(PhysicalTypeIdentifier.createIdentifier(targetType, Path.SRC_MAIN_JAVA), modifier, fieldName, new JavaType("org.springframework.mail.MailSender"), null, annotations);
// Obtain the physical type and itd mutable details
PhysicalTypeMetadata ptm = (PhysicalTypeMetadata) metadataService.get(fieldMetadata.getDeclaredByMetadataId());
Assert.notNull(ptm, "Java source code unavailable for type " + PhysicalTypeIdentifier.getFriendlyName(fieldMetadata.getDeclaredByMetadataId()));
PhysicalTypeDetails ptd = ptm.getPhysicalTypeDetails();
Assert.notNull(ptd, "Java source code details unavailable for type " + PhysicalTypeIdentifier.getFriendlyName(fieldMetadata.getDeclaredByMetadataId()));
Assert.isInstanceOf(MutableClassOrInterfaceTypeDetails.class, ptd, "Java source code is immutable for type " + PhysicalTypeIdentifier.getFriendlyName(fieldMetadata.getDeclaredByMetadataId()));
MutableClassOrInterfaceTypeDetails mutableTypeDetails = (MutableClassOrInterfaceTypeDetails) ptd;
mutableTypeDetails.addField(fieldMetadata);
String contextPath = pathResolver.getIdentifier(Path.SRC_MAIN_RESOURCES, "applicationContext.xml");
MutableFile contextMutableFile = null;
Document appCtx = null;
try {
if (fileManager.exists(contextPath)) {
contextMutableFile = fileManager.updateFile(contextPath);
appCtx = XmlUtils.getDocumentBuilder().parse(contextMutableFile.getInputStream());
} else {
new IllegalStateException("Could not aquire the Spring applicationContext.xml file");
}
} catch (Exception e) {
throw new IllegalStateException(e);
}
Element root = (Element) appCtx.getFirstChild();
Element smmBean = XmlUtils.findFirstElement("//bean[@class='org.springframework.mail.SimpleMailMessage']", root);
//create some method content to get the user started
InvocableMemberBodyBuilder bodyBuilder = new InvocableMemberBodyBuilder();
String declaredByMetadataId = PhysicalTypeIdentifier.createIdentifier(targetType, Path.SRC_MAIN_JAVA);
List<AnnotationMetadata> smmAnnotations = new ArrayList<AnnotationMetadata>();
List<AnnotatedJavaType> paramTypes = new ArrayList<AnnotatedJavaType>();
List<JavaSymbolName> paramNames = new ArrayList<JavaSymbolName>();
if (smmBean != null) {
smmAnnotations.add(new DefaultAnnotationMetadata(new JavaType("org.springframework.beans.factory.annotation.Autowired"), new ArrayList<AnnotationAttributeValue<?>>()));
FieldMetadata smmFieldMetadata = new DefaultFieldMetadata(PhysicalTypeIdentifier.createIdentifier(targetType, Path.SRC_MAIN_JAVA), modifier, new JavaSymbolName("message"), new JavaType("org.springframework.mail.SimpleMailMessage"), null, smmAnnotations);
mutableTypeDetails.addField(smmFieldMetadata);
} else {
bodyBuilder.appendFormalLine("org.springframework.mail.SimpleMailMessage simpleMailMessage = new org.springframework.mail.SimpleMailMessage();");
paramTypes.add(new AnnotatedJavaType(new JavaType(String.class.getName()), new ArrayList<AnnotationMetadata>()));
paramNames.add(new JavaSymbolName("mailFrom"));
bodyBuilder.appendFormalLine("simpleMailMessage.setFrom(mailFrom);");
paramTypes.add(new AnnotatedJavaType(new JavaType(String.class.getName()), new ArrayList<AnnotationMetadata>()));
paramNames.add(new JavaSymbolName("subject"));
bodyBuilder.appendFormalLine("simpleMailMessage.setSubject(subject);");
}
paramTypes.add(new AnnotatedJavaType(new JavaType(String.class.getName()), new ArrayList<AnnotationMetadata>()));
paramNames.add(new JavaSymbolName("mailTo"));
bodyBuilder.appendFormalLine("simpleMailMessage.setTo(mailTo);");
paramTypes.add(new AnnotatedJavaType(new JavaType(String.class.getName()), new ArrayList<AnnotationMetadata>()));
paramNames.add(new JavaSymbolName("message"));
bodyBuilder.appendFormalLine("simpleMailMessage.setText(message);");
bodyBuilder.newLine();
bodyBuilder.appendFormalLine(fieldName + ".sendMessage(simpleMailMessage);");
mutableTypeDetails.addMethod(new DefaultMethodMetadata(declaredByMetadataId, Modifier.PUBLIC, new JavaSymbolName("sendMessage"), JavaType.VOID_PRIMITIVE, paramTypes, paramNames, new ArrayList<AnnotationMetadata>(), bodyBuilder.getOutput()));
}
private void updateDependencies() {
InputStream templateInputStream = TemplateUtils.getTemplate(getClass(), "dependencies.xml");
Assert.notNull(templateInputStream, "Could not acquire dependencies.xml file");
Document dependencyDoc;
try {
dependencyDoc = XmlUtils.getDocumentBuilder().parse(templateInputStream);
} catch (Exception e) {
throw new IllegalStateException(e);
}
Element dependenciesElement = (Element) dependencyDoc.getFirstChild();
List<Element> dependencies = XmlUtils.findElements("/dependencies/email/dependency", dependenciesElement);
for(Element dependency : dependencies) {
projectOperations.dependencyUpdate(new Dependency(dependency));
}
}
private PathResolver getPathResolver() {
ProjectMetadata projectMetadata = (ProjectMetadata) metadataService.get(ProjectMetadata.getProjectIdentifier());
if (projectMetadata == null) {
return null;
}
return projectMetadata.getPathResolver();
}
}
|
package sendable;
import java.io.Serializable;
import java.net.Socket;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
public class Client implements Serializable {
private static final long serialVersionUID = 296589332172289191L;
Date registrationDate = null;
Date lastOnline = null;
Date lastMessageSent = null;
private int localPort;
private int version;
String text = null;
String name = null;
String middlename = null;
String lastname = null;
String membertype = null;
String login = null;
String password = null;
String cryptoPassword = null;
String email = null;
String lastIp = null;
String sex = null;
String college = null;
String startTrimester = null;
String city = null;
String state = null;
String country = null;
String course = null;
String infnetMail = null;
String whatsapp = null;
String facebook = null;
String twitter = null;
String instagram = null;
String googleplus = null;
String youtube = null;
String msn = null;
Message lastMessage = null;
List<Message> unSentMsgs = null;
Long id = null;
Long msgCount = null;
Long onlinetime = null;
Long messagesSent = null;
Socket sock = null;
String ip = null;
Integer port = null;
String aux1 = null;
String aux2 = null;
String aux3 = null;
String aux4 = null;
public Client(Socket sock) {
this.sock = sock;
}
public Client() {
}
public Client (Socket sock, Message m) {
this.sock = sock;
this.text = m.getText();
this.name = m.getOwnerLogin();
this.membertype = m.getType();
this.lastMessage = m;
}
public Client(String string) {
this.name = string;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getMsgCount() {
return msgCount;
}
public String getLogin() {
return login;
}
public void setLogin(String login) {
this.login = login;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getCryptoPassword() {
return cryptoPassword;
}
public void setCryptoPassword(String cryptoPassword) {
this.cryptoPassword = cryptoPassword;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public void setMsgCount(Long msgCount) {
this.msgCount = msgCount + 1L;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getMiddlename() {
return middlename;
}
public void setMiddlename(String middlename) {
this.middlename = middlename;
}
public String getLastname() {
return lastname;
}
public void setLastname(String lastname) {
this.lastname = lastname;
}
public String getMembertype() {
return membertype;
}
public void setMembertype(String membertype) {
this.membertype = membertype;
}
public Date getRegistrationDate() {
return registrationDate;
}
public void setRegistrationDate(Date membersince) {
this.registrationDate = membersince;
}
public String getTargetIp() {
return ip;
}
public void setTargetIp(String ip) {
this.ip = ip;
}
public Integer getTargetPort() {
return port;
}
public void setTargetPort(Integer port) {
this.port = port;
}
public Long getOnlinetime() {
return onlinetime;
}
public void setOnlinetime(Long onlinetime) {
this.onlinetime = onlinetime;
}
public Date getLastOnline() {
return lastOnline;
}
public void setLastOnline() {
this.lastOnline = Calendar.getInstance().getTime();
}
public Socket getSock() {
return sock;
}
public void setSock(Socket sock) {
this.sock = sock;
}
public Message getLastMessage() {
return lastMessage;
}
public void setLastMessage(Message lastMessage) {
this.lastMessage = lastMessage;
}
public Date getLastMessageSent() {
return lastMessageSent;
}
public void setLastMessageTime(Date lastMessageTime) {
this.lastMessageSent = lastMessageTime;
}
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
@Override
public String toString() {
if (this.getCollege().equalsIgnoreCase("infnet")) {
return this.name + " / " + this.getCourse() + this.getStartTrimester().substring(0, 4);
} else {
return this.name + " / " + this.getCollege();
}
}
public int getLocalPort() {
return localPort;
}
public void setLocalPort(int localPort) {
this.localPort = localPort;
}
public String getLastIp() {
return lastIp;
}
public void setLastIp(String lastIp) {
this.lastIp = lastIp;
}
public String getSex() {
return sex;
}
public void setSex(String sex) {
this.sex = sex;
}
public List<Message> getUnSentMsgs() {
return unSentMsgs;
}
public void setUnSentMsgs(List<Message> unSentMsgs) {
this.unSentMsgs = unSentMsgs;
}
public Long getMessagesSent() {
return messagesSent;
}
public void setMessagesSent(Long messagesSent) {
this.messagesSent = messagesSent;
}
public String getIp() {
return ip;
}
public void setIp(String ip) {
this.ip = ip;
}
public void setLastOnline(Date lastOnline) {
this.lastOnline = lastOnline;
}
public void setLastMessageSent(Date lastMessageSent) {
this.lastMessageSent = lastMessageSent;
}
public String getCollege() {
return college;
}
public void setCollege(String college) {
this.college = college;
}
public String getStartTrimester() {
return startTrimester;
}
public void setStartTrimester(String startTrimester) {
this.startTrimester = startTrimester;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getCountry() {
return country;
}
public void setCountry(String country) {
this.country = country;
}
public String getCourse() {
return course;
}
public void setCourse(String course) {
this.course = course;
}
public Integer getPort() {
return port;
}
public void setPort(Integer port) {
this.port = port;
}
public String getAux1() {
return aux1;
}
public void setAux1(String aux1) {
this.aux1 = aux1;
}
public String getAux2() {
return aux2;
}
public void setAux2(String aux2) {
this.aux2 = aux2;
}
public String getAux3() {
return aux3;
}
public void setAux3(String aux3) {
this.aux3 = aux3;
}
public String getAux4() {
return aux4;
}
public void setAux4(String aux4) {
this.aux4 = aux4;
}
public String getInfnetMail() {
return infnetMail;
}
public void setInfnetMail(String infnetMail) {
this.infnetMail = infnetMail;
}
public String getWhatsapp() {
return whatsapp;
}
public void setWhatsapp(String whatsapp) {
this.whatsapp = whatsapp;
}
public String getFacebook() {
return facebook;
}
public void setFacebook(String facebook) {
this.facebook = facebook;
}
public String getTwitter() {
return twitter;
}
public void setTwitter(String twitter) {
this.twitter = twitter;
}
public String getInstagram() {
return instagram;
}
public void setInstagram(String instagram) {
this.instagram = instagram;
}
public String getGoogleplus() {
return googleplus;
}
public void setGoogleplus(String googleplus) {
this.googleplus = googleplus;
}
public String getYoutube() {
return youtube;
}
public void setYoutube(String youtube) {
this.youtube = youtube;
}
public String getMsn() {
return msn;
}
public void setMsn(String msn) {
this.msn = msn;
}
}
|
package ti.modules.titanium.ui.widget;
import java.util.ArrayList;
import java.lang.Math;
import org.appcelerator.kroll.KrollDict;
import org.appcelerator.kroll.KrollProxy;
import org.appcelerator.kroll.common.Log;
import org.appcelerator.titanium.TiC;
import org.appcelerator.titanium.proxy.TiViewProxy;
import org.appcelerator.titanium.util.TiConvert;
import org.appcelerator.titanium.util.TiEventHelper;
import org.appcelerator.titanium.view.TiCompositeLayout;
import org.appcelerator.titanium.view.TiCompositeLayout.LayoutParams;
import org.appcelerator.titanium.view.TiUIView;
import ti.modules.titanium.ui.ScrollableViewProxy;
import android.app.Activity;
import android.content.Context;
import android.os.Parcelable;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.RelativeLayout;
public class TiUIScrollableView extends TiUIView
{
private static final String TAG = "TiUIScrollableView";
private static final int PAGE_LEFT = 200;
private static final int PAGE_RIGHT = 201;
private final ViewPager mPager;
private final ArrayList<TiViewProxy> mViews;
private final ViewPagerAdapter mAdapter;
private final TiCompositeLayout mContainer;
private final RelativeLayout mPagingControl;
private int mCurIndex = -1;
public TiUIScrollableView(ScrollableViewProxy proxy)
{
super(proxy);
Activity activity = proxy.getActivity();
mViews = new ArrayList<TiViewProxy>();
mAdapter = new ViewPagerAdapter(activity, mViews);
mPager = buildViewPager(activity, mAdapter);
mContainer = new TiViewPagerLayout(activity);
mContainer.addView(mPager, buildFillLayoutParams());
mPagingControl = buildPagingControl(activity);
mContainer.addView(mPagingControl, buildFillLayoutParams());
setNativeView(mContainer);
}
private ViewPager buildViewPager(Context context, ViewPagerAdapter adapter)
{
ViewPager pager = new ViewPager(context);
pager.setAdapter(adapter);
pager.setOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener()
{
private boolean isValidScroll = false;
private boolean justFiredDragEnd = false;
@Override
public void onPageScrollStateChanged(int scrollState)
{
if ((scrollState == ViewPager.SCROLL_STATE_IDLE) && isValidScroll) {
int oldIndex = mCurIndex;
if (mCurIndex >= 0) {
if (oldIndex >=0 && oldIndex != mCurIndex && oldIndex < mViews.size()) {
// Don't know what these focused and unfocused
// events are good for, but they were in our previous
// scrollable implementation.
// cf. https://github.com/appcelerator/titanium_mobile/blob/20335d8603e2708b59a18bafbb91b7292278de8e/android/modules/ui/src/ti/modules/titanium/ui/widget/TiScrollableView.java#L260
TiEventHelper.fireFocused(mViews.get(oldIndex));
}
TiEventHelper.fireUnfocused(mViews.get(mCurIndex));
if (oldIndex >= 0) {
// oldIndex will be -1 if the view has just
// been created and is setting currentPage
// to something other than 0. In that case we
// don't want a scrollEnd to fire.
((ScrollableViewProxy)proxy).fireScrollEnd(mCurIndex, mViews.get(mCurIndex));
}
if (shouldShowPager()) {
showPager();
}
}
// If we don't use this state variable to check if it's a valid
// scroll, this event will fire when the view is first created
// because on creation, the scroll state is initialized to
// `idle` and this handler is called.
isValidScroll = false;
} else if (scrollState == ViewPager.SCROLL_STATE_SETTLING) {
((ScrollableViewProxy)proxy).fireDragEnd(mCurIndex, mViews.get(mCurIndex));
// Note that we just fired a dragEnd so the `onPageSelected`
// handler below doesn't fire a `scrollEnd`. Read below comment.
justFiredDragEnd = true;
}
}
@Override
public void onPageSelected(int page)
{
// If we didn't just fire a `dragEnd` event then this is the case
// where a user drags the view and settles it on a different view.
// Since the OS settling logic is never run, the
// `onPageScrollStateChanged` handler is never run, and therefore
// we forgot to inform the Javascripters that the user just scrolled
// their thing.
if (!justFiredDragEnd && mCurIndex != -1) {
((ScrollableViewProxy)proxy).fireScrollEnd(mCurIndex, mViews.get(mCurIndex));
if (shouldShowPager()) {
showPager();
}
}
}
@Override
public void onPageScrolled(int positionRoundedDown, float positionOffset, int positionOffsetPixels)
{
isValidScroll = true;
// When we touch and drag the view and hold it inbetween the second
// and third sub-view, this function will have been called with values
// similar to:
// positionRoundedDown: 1
// positionOffset: 0.5
// ie, the first parameter is always rounded down; the second parameter
// is always just an offset between the current and next view, it does
// not take into account the current view.
// If we add positionRoundedDown to positionOffset, positionOffset will
// have the 'correct' value; ie, will be a natural number when we're on
// one particular view, something.5 when inbetween views, etc.
float positionFloat = positionOffset + positionRoundedDown;
// `positionFloat` can now be used to calculate the correct value for
// the current index. We add 0.5 so that positionFloat will be rounded
// half up; ie, if it has a value of 1.5, it will be rounded up to 2; if
// it has a value of 1.4, it will be rounded down to 1.
mCurIndex = (int) Math.floor(positionFloat + 0.5);
((ScrollableViewProxy)proxy).fireScroll(mCurIndex, positionFloat, mViews.get(mCurIndex));
// Note that we didn't just fire a dragEnd. See the above comment
// in `onPageSelected`.
justFiredDragEnd = false;
}
});
return pager;
}
private boolean shouldShowPager()
{
Object showPagingControl = proxy.getProperty(TiC.PROPERTY_SHOW_PAGING_CONTROL);
if (showPagingControl != null) {
return TiConvert.toBoolean(showPagingControl);
} else {
return false;
}
}
private TiCompositeLayout.LayoutParams buildFillLayoutParams()
{
TiCompositeLayout.LayoutParams params = new TiCompositeLayout.LayoutParams();
params.autoFillsHeight = true;
params.autoFillsWidth = true;
return params;
}
private RelativeLayout buildPagingControl(Context context)
{
RelativeLayout layout = new RelativeLayout(context);
layout.setFocusable(false);
layout.setFocusableInTouchMode(false);
TiArrowView left = new TiArrowView(context);
left.setVisibility(View.INVISIBLE);
left.setId(PAGE_LEFT);
left.setMinimumWidth(80); // TODO density?
left.setMinimumHeight(80);
left.setOnClickListener(new OnClickListener(){
public void onClick(View v)
{
movePrevious();
}});
RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
params.addRule(RelativeLayout.ALIGN_PARENT_LEFT);
params.addRule(RelativeLayout.CENTER_VERTICAL);
layout.addView(left, params);
TiArrowView right = new TiArrowView(context);
right.setLeft(false);
right.setVisibility(View.INVISIBLE);
right.setId(PAGE_RIGHT);
right.setMinimumWidth(80); // TODO density?
right.setMinimumHeight(80);
right.setOnClickListener(new OnClickListener(){
public void onClick(View v)
{
moveNext();
}});
params = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT);
params.addRule(RelativeLayout.ALIGN_PARENT_RIGHT);
params.addRule(RelativeLayout.CENTER_VERTICAL);
layout.addView(right, params);
layout.setVisibility(View.GONE);
return layout;
}
@Override
public void processProperties(KrollDict d)
{
if (d.containsKey(TiC.PROPERTY_VIEWS)) {
setViews(d.get(TiC.PROPERTY_VIEWS));
}
if (d.containsKey(TiC.PROPERTY_CURRENT_PAGE)) {
int page = TiConvert.toInt(d, TiC.PROPERTY_CURRENT_PAGE);
if (page > 0) {
setCurrentPage(page);
} else {
mCurIndex = 0;
}
} else {
mCurIndex = 0;
}
if (d.containsKey(TiC.PROPERTY_SHOW_PAGING_CONTROL)) {
if (TiConvert.toBoolean(d, TiC.PROPERTY_SHOW_PAGING_CONTROL)) {
showPager();
}
}
super.processProperties(d);
}
@Override
public void propertyChanged(String key, Object oldValue, Object newValue,
KrollProxy proxy)
{
if (TiC.PROPERTY_CURRENT_PAGE.equals(key)) {
setCurrentPage(TiConvert.toInt(newValue));
} else if (TiC.PROPERTY_SHOW_PAGING_CONTROL.equals(key)) {
boolean show = TiConvert.toBoolean(newValue);
if (show) {
showPager();
} else {
hidePager();
}
} else {
super.propertyChanged(key, oldValue, newValue, proxy);
}
}
public void addView(TiViewProxy proxy)
{
mViews.add(proxy);
mAdapter.notifyDataSetChanged();
}
public void removeView(TiViewProxy proxy)
{
if (mViews.contains(proxy)) {
mViews.remove(proxy);
mAdapter.notifyDataSetChanged();
}
}
public void showPager()
{
View v = null;
v = mContainer.findViewById(PAGE_LEFT);
if (v != null) {
v.setVisibility(mCurIndex > 0 ? View.VISIBLE : View.INVISIBLE);
}
v = mContainer.findViewById(PAGE_RIGHT);
if (v != null) {
v.setVisibility(mCurIndex < (mViews.size() - 1) ? View.VISIBLE : View.INVISIBLE);
}
mPagingControl.setVisibility(View.VISIBLE);
((ScrollableViewProxy) proxy).setPagerTimeout();
}
public void hidePager()
{
mPagingControl.setVisibility(View.INVISIBLE);
}
public void moveNext()
{
move(mCurIndex + 1);
}
public void movePrevious()
{
move(mCurIndex - 1);
}
private void move(int index)
{
if (index < 0 || index >= mViews.size()) {
Log.w(TAG, "Request to move to index " + index+ " ignored, as it is out-of-bounds.");
return;
}
mPager.setCurrentItem(index);
}
public void scrollTo(Object view)
{
if (view instanceof Number) {
move(((Number) view).intValue());
} else if (view instanceof TiViewProxy) {
move(mViews.indexOf(view));
}
}
public int getCurrentPage()
{
return mCurIndex;
}
public void setCurrentPage(Object view)
{
scrollTo(view);
}
private void clearViewsList()
{
if (mViews == null || mViews.size() == 0) {
return;
}
for (TiViewProxy viewProxy : mViews) {
viewProxy.releaseViews();
}
mViews.clear();
}
public void setViews(Object viewsObject)
{
boolean changed = false;
clearViewsList();
if (viewsObject instanceof Object[]) {
Object[] views = (Object[])viewsObject;
for (int i = 0; i < views.length; i++) {
if (views[i] instanceof TiViewProxy) {
TiViewProxy tv = (TiViewProxy)views[i];
mViews.add(tv);
changed = true;
}
}
}
if (changed) {
mAdapter.notifyDataSetChanged();
}
}
public ArrayList<TiViewProxy> getViews()
{
return mViews;
}
@Override
public void release()
{
if (mPager != null) {
for (int i = mPager.getChildCount() - 1; i >= 0; i
mPager.removeViewAt(i);
}
}
if (mViews != null) {
for (TiViewProxy viewProxy : mViews) {
viewProxy.releaseViews();
}
mViews.clear();
}
super.release();
}
public static class ViewPagerAdapter extends PagerAdapter
{
private final ArrayList<TiViewProxy> mViewProxies;
public ViewPagerAdapter(Activity activity, ArrayList<TiViewProxy> viewProxies)
{
mViewProxies = viewProxies;
}
@Override
public void destroyItem(View container, int position, Object object)
{
((ViewPager) container).removeView((View) object);
if (position < mViewProxies.size()) {
TiViewProxy proxy = mViewProxies.get(position);
proxy.releaseViews();
}
}
@Override
public void finishUpdate(View container) {}
@Override
public int getCount()
{
return mViewProxies.size();
}
@Override
public Object instantiateItem(View container, int position)
{
ViewPager pager = (ViewPager) container;
TiViewProxy tiProxy = mViewProxies.get(position);
TiUIView tiView = tiProxy.getOrCreateView();
View view = tiView.getNativeView();
if (view.getParent() != null) {
pager.removeView(view);
}
if (position < pager.getChildCount()) {
pager.addView(view, position);
} else {
pager.addView(view);
}
return view;
}
@Override
public boolean isViewFromObject(View view, Object obj)
{
return (obj instanceof View && view.equals(obj));
}
@Override
public void restoreState(Parcelable state, ClassLoader loader) {}
@Override
public Parcelable saveState() {return null;}
@Override
public void startUpdate(View container) {}
@Override
public int getItemPosition(Object object)
{
if (!mViewProxies.contains(object)) {
return POSITION_NONE;
} else {
return POSITION_UNCHANGED;
}
}
}
public class TiViewPagerLayout extends TiCompositeLayout
{
public TiViewPagerLayout(Context context)
{
super(context, proxy);
setFocusable(true);
setFocusableInTouchMode(true);
setDescendantFocusability(ViewGroup.FOCUS_AFTER_DESCENDANTS);
}
@Override
public boolean onTrackballEvent(MotionEvent event)
{
// Any trackball activity should show the pager.
if (shouldShowPager() && mPagingControl.getVisibility() != View.VISIBLE) {
showPager();
}
return super.onTrackballEvent(event);
}
@Override
public boolean dispatchKeyEvent(KeyEvent event)
{
boolean handled = false;
if (event.getAction() == KeyEvent.ACTION_DOWN) {
switch (event.getKeyCode()) {
case KeyEvent.KEYCODE_DPAD_LEFT: {
movePrevious();
handled = true;
break;
}
case KeyEvent.KEYCODE_DPAD_RIGHT: {
moveNext();
handled = true;
break;
}
}
}
return handled || super.dispatchKeyEvent(event);
}
}
}
|
package liv;
import java.util.Scanner;
/**
* class EingabeEAN
*
* Project: Liv - Lebensmittelinhaltverifizierer
*
* @author team equal-IT, team@equal-it.de, Annes Baustelle
* @version 00.00.02 2016/05/11
*
*/
public class EingabeEAN {
// HEAD
// Hier muessen Methoden hin, die einen per Konsole eingegebenen EAN einlesen
public static Scanner einlesen(){
Scanner ean = new Scanner(System.in);
System.out.println("Eingabe: ");
String eingabe = ean.nextLine();
System.out.println("Ausgabe: " + eingabe);
return ean;
} // Methode Scanner
} //class EingabeEAN
// oder
//public Scanner einlesen2(){
//Scanner ean2 = new Scanner(System.in);
//System.out.print("Bitte EAN eingeben: " + ean2);
//String eingabe = ean2.nextLine();
//System.out.println("Eingegebene EAN: " + eingabe);
//return ean2;
// } // Methode einlesen2
// funzt nicht
// String ean;
// BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
// System.out.println("Eingabe :");
// ean = input.readLine();
// System.out.println("Ausgabe: "+text);
// Hier muessen Methoden hin, die einen per Konsole eingegebenen EAN (als String)einlesen
|
package com.capitalone.dashboard.webhook.github;
import com.capitalone.dashboard.repository.CollectorItemRepository;
import com.capitalone.dashboard.settings.ApiSettings;
import com.capitalone.dashboard.client.RestClient;
import com.capitalone.dashboard.model.webhook.github.GitHubParsed;
import com.capitalone.dashboard.misc.HygieiaException;
import com.capitalone.dashboard.model.Collector;
import com.capitalone.dashboard.model.CollectorItem;
import com.capitalone.dashboard.model.Commit;
import com.capitalone.dashboard.model.CommitType;
import com.capitalone.dashboard.model.GitRequest;
import com.capitalone.dashboard.model.webhook.github.GitHubRepo;
import com.capitalone.dashboard.repository.CommitRepository;
import com.capitalone.dashboard.repository.GitRequestRepository;
import com.capitalone.dashboard.service.CollectorService;
import com.capitalone.dashboard.util.Supplier;
import com.capitalone.dashboard.webhook.settings.GitHubWebHookSettings;
import com.capitalone.dashboard.webhook.settings.WebHookSettings;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.bson.types.ObjectId;
import org.joda.time.DateTime;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.ParseException;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.runners.MockitoJUnitRunner;
import org.springframework.web.client.RestOperations;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.regex.Pattern;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class GitHubCommitV3Test {
private static final Log LOG = LogFactory.getLog(GitHubCommitV3Test.class);
@Mock
private CollectorService collectorService;
@Mock
private CommitRepository commitRepository;
@Mock
private GitRequestRepository gitRequestRepository;
@Mock
private CollectorItemRepository collectorItemRepository;
@Mock
private ApiSettings apiSettings;
@Mock
private Supplier<RestOperations> restOperationsSupplier;
private GitHubCommitV3 gitHubCommitV3;
private RestClient restClient;
@Before
public void init() {
RestClient restClientTemp = new RestClient(restOperationsSupplier);
restClient = Mockito.spy(restClientTemp);
gitHubCommitV3 = new GitHubCommitV3 (collectorService, restClient, commitRepository, gitRequestRepository, collectorItemRepository, apiSettings);
}
@Test
public void getCommitsTest() throws HygieiaException, ParseException {
GitHubCommitV3 gitHubCommitV3 = Mockito.spy(this.gitHubCommitV3);
String repoUrl = "http://hostName/OrgName/OwnerName/RepoName";
String branch = "master";
Collector collector = gitHubCommitV3.getCollector();
String collectorId = createGuid("0123456789abcdef");
collector.setId(new ObjectId(collectorId));
CollectorItem collectorItem = gitHubCommitV3.buildCollectorItem(new ObjectId(collectorId), repoUrl, branch);
String collectorItemId = createGuid("0123456789abcdee");
collectorItem.setId(new ObjectId(collectorItemId));
List<Map> commitsMapList = makeCommitsList();
when(collectorService.createCollector(anyObject())).thenReturn(collector);
when(gitHubCommitV3.buildCollectorItem(anyObject(), anyString(), anyString())).thenReturn(collectorItem);
when(collectorService.createCollectorItem(anyObject())).thenReturn(collectorItem);
try {
when(gitHubCommitV3.getCollectorItem(anyString(), anyString())).thenReturn(collectorItem);
} catch (HygieiaException e) {
LOG.info(e.getMessage());
}
when(apiSettings.getWebHook()).thenReturn(makeWebHookSettings());
try {
when(gitHubCommitV3.getCommitNode(anyObject(), anyString(), anyString(), anyObject(), anyString())).thenReturn(null);
} catch (Exception e) {
LOG.error(e.getMessage());
}
List<Commit> commitsList = null;
try {
commitsList = gitHubCommitV3.getCommits(commitsMapList, repoUrl, branch, "senderLogin", "authorLDAPDN");
} catch (Exception e) {
LOG.error(e.getMessage());
}
int size = commitsList.size();
Assert.assertEquals(2, size);
Commit commit1 = commitsList.get(0);
Assert.assertEquals(repoUrl, commit1.getScmUrl());
Assert.assertEquals("master", commit1.getScmBranch());
Assert.assertEquals("commit1ID", commit1.getScmRevisionNumber());
Assert.assertEquals("GitHub WebHook Commit 1", commit1.getScmCommitLog());
Assert.assertEquals("author1Name", commit1.getScmAuthor());
Assert.assertEquals(7, commit1.getNumberOfChanges());
Assert.assertEquals(collectorItemId, commit1.getCollectorItemId().toString());
verify(gitHubCommitV3, times(3)).getCommitNode(anyObject(), anyString(), anyString(), anyObject(), anyString());
Commit commit2 = commitsList.get(1);
Assert.assertEquals(repoUrl, commit2.getScmUrl());
Assert.assertEquals("master", commit2.getScmBranch());
Assert.assertEquals("commit2ID", commit2.getScmRevisionNumber());
Assert.assertEquals("GitHub WebHook Commit 2", commit2.getScmCommitLog());
Assert.assertEquals("author2Name", commit2.getScmAuthor());
Assert.assertEquals(3, commit2.getNumberOfChanges());
Assert.assertEquals(collectorItemId, commit2.getCollectorItemId().toString());
}
@Test
public void setCollectorItemIdExistingCommitTest() throws MalformedURLException, HygieiaException {
GitHubCommitV3 gitHubCommitV3 = Mockito.spy(this.gitHubCommitV3);
List<Commit> commitList = new ArrayList<>();
Commit existingCommit = new Commit();
commitList.add(existingCommit);
String id = createGuid("0123456789abcdef");
existingCommit.setId(new ObjectId(id));
String collectorItemId = createGuid("0123456789abcdee");
existingCommit.setCollectorItemId(new ObjectId(collectorItemId));
CollectorItem collectorItem = new CollectorItem();
collectorItem.setId(new ObjectId(collectorItemId));
Commit newCommit = new Commit();
when(commitRepository.findAllByScmRevisionNumberAndScmUrlIgnoreCaseAndScmBranchIgnoreCaseOrderByTimestampAsc(anyString(), anyString(), anyString())).thenReturn(commitList);
when(collectorService.getCollectorItem(existingCommit.getCollectorItemId())).thenReturn(collectorItem);
gitHubCommitV3.setCollectorItemId(newCommit);
Assert.assertEquals(new ObjectId(id), newCommit.getId());
Assert.assertEquals(new ObjectId(collectorItemId), newCommit.getCollectorItemId());
Assert.assertTrue(collectorItem.isPushed());
}
@Test
public void setCollectorItemIdNewCommitTest() throws MalformedURLException, HygieiaException {
GitHubCommitV3 gitHubCommitV3 = Mockito.spy(this.gitHubCommitV3);
Commit newCommit = new Commit();
String repoUrl = "http://hostName/orgName/repoName";
String branch = "master";
newCommit.setScmUrl(repoUrl);
newCommit.setScmBranch(branch);
Collector collector = gitHubCommitV3.getCollector();
String collectorId = createGuid("0123456789abcdef");
collector.setId(new ObjectId(collectorId));
CollectorItem collectorItem = gitHubCommitV3.buildCollectorItem(new ObjectId(collectorId), repoUrl, branch);
String collectorItemId = createGuid("0123456789abcdee");
collectorItem.setId(new ObjectId(collectorItemId));
when(commitRepository.findAllByScmRevisionNumberAndScmUrlIgnoreCaseAndScmBranchIgnoreCaseOrderByTimestampAsc(anyString(), anyString(), anyString())).thenReturn(null);
when(collectorService.createCollector(anyObject())).thenReturn(collector);
when(gitHubCommitV3.buildCollectorItem(anyObject(), anyString(), anyString())).thenReturn(collectorItem);
when(collectorService.createCollectorItem(anyObject())).thenReturn(collectorItem);
try {
when(gitHubCommitV3.getCollectorItem(anyString(), anyString())).thenReturn(collectorItem);
} catch (HygieiaException e) {
LOG.info(e.getMessage());
}
gitHubCommitV3.setCollectorItemId(newCommit);
Assert.assertEquals(new ObjectId(collectorItemId), newCommit.getCollectorItemId());
}
@Test
public void checkForErrors() {
JSONObject objectWithErrors = new JSONObject();
JSONArray errors = new JSONArray();
objectWithErrors.put("errors", errors);
JSONObject error = new JSONObject();
errors.add(error);
Exception exception = null;
try {
gitHubCommitV3.checkForErrors(objectWithErrors);
} catch (Exception e) {
exception = e;
}
Assert.assertNotNull(exception);
}
@Test
public void getCommitNodeTest() {
JSONObject responseJsonObject = makeRepositoryResponseObject();
GitHubParsed gitHubParsed = null;
try {
gitHubParsed = new GitHubParsed("http://hostName/ownerName/orgName/repoName");
} catch (Exception e) {
LOG.error(e.getMessage());
}
try {
when(restClient.parseAsObject(anyObject())).thenReturn(responseJsonObject);
} catch (Exception e) {
LOG.error(e.getMessage());
}
Object node = null;
try {
node = gitHubCommitV3.getCommitNode(gitHubParsed, "branch", "oid1", new DateTime(), "token");
} catch (Exception e){
LOG.error(e.getMessage());
}
String sha = restClient.getString(node, "oid");
Assert.assertEquals(sha, "oid1");
}
@Test
public void getCommitTypeTest() {
List<Pattern> commitExclusionPatterns = new ArrayList<>();
List<String> notBuiltCommits = new ArrayList<>();
notBuiltCommits.add("test1");
notBuiltCommits.add("test2");
notBuiltCommits.stream().map(regExStr -> Pattern.compile(regExStr, Pattern.CASE_INSENSITIVE)).forEach(commitExclusionPatterns::add);
CommitType commitType = gitHubCommitV3.getCommitType(2, "commit message", new GitHubWebHookSettings(), commitExclusionPatterns);
Assert.assertEquals(CommitType.Merge, commitType);
commitType = gitHubCommitV3.getCommitType(1, "commit message", new GitHubWebHookSettings(), commitExclusionPatterns);
Assert.assertEquals(CommitType.New, commitType);
GitHubWebHookSettings gitHubWebHookSettings = new GitHubWebHookSettings();
List<String> notBuiltCommitsList = new ArrayList<>();
notBuiltCommitsList.add("some value");
gitHubWebHookSettings.setNotBuiltCommits(notBuiltCommitsList);
commitType = gitHubCommitV3.getCommitType(1, "test1", gitHubWebHookSettings, commitExclusionPatterns);
Assert.assertEquals(CommitType.NotBuilt, commitType);
}
@Test
public void setCommitPullNumbersForRebaseAndMergeCommitTest() {
List<Commit> commitList = new ArrayList<>();
Commit commit1 = new Commit();
commitList.add(commit1);
Commit commit2 = new Commit();
commitList.add(commit2);
commit2.setPullNumber("2");
gitHubCommitV3.setCommitPullNumbersForRebaseAndMergeCommit(commitList);
Assert.assertEquals("2", commit1.getPullNumber());
commit1.setPullNumber(null);
Commit commit3 = new Commit();
commitList.add(commit3);
commit2.setPullNumber("3");
Assert.assertNull(commit1.getPullNumber());
}
@Test
public void setCommitPullNumberTest() {
Commit commit = new Commit();
commit.setScmRevisionNumber("1");
GitRequest pr = new GitRequest();
pr.setNumber("2");
when(gitRequestRepository.findByScmRevisionNumberOrScmMergeEventRevisionNumber(commit.getScmRevisionNumber())).thenReturn(pr);
when(gitRequestRepository.findByCommitScmRevisionNumber(commit.getScmRevisionNumber())).thenReturn(null);
gitHubCommitV3.setCommitPullNumber(commit);
Assert.assertEquals("2", commit.getPullNumber());
commit.setPullNumber(null);
when(gitRequestRepository.findByScmRevisionNumberOrScmMergeEventRevisionNumber(commit.getScmRevisionNumber())).thenReturn(null);
when(gitRequestRepository.findByCommitScmRevisionNumber(commit.getScmRevisionNumber())).thenReturn(pr);
gitHubCommitV3.setCommitPullNumber(commit);
Assert.assertEquals("2", commit.getPullNumber());
commit.setPullNumber(null);
when(gitRequestRepository.findByScmRevisionNumberOrScmMergeEventRevisionNumber(commit.getScmRevisionNumber())).thenReturn(null);
when(gitRequestRepository.findByCommitScmRevisionNumber(commit.getScmRevisionNumber())).thenReturn(null);
gitHubCommitV3.setCommitPullNumber(commit);
Assert.assertNull(commit.getPullNumber());
}
@Test
public void getRepositoryTokenTest() {
GitHubCommitV3 gitHubCommitV3 = Mockito.spy(this.gitHubCommitV3);
String scmUrl = "http://hostName/ownerName/orgName/repoName";
Collector collector = gitHubCommitV3.getCollector();
String collectorId = createGuid("0123456789abcdef");
collector.setId(new ObjectId(collectorId));
List<CollectorItem> gitHubRepoList = new ArrayList<>();
GitHubRepo repo1 = new GitHubRepo();
gitHubRepoList.add(repo1);
repo1.setPersonalAccessToken("1");
GitHubRepo repo2 = new GitHubRepo();
gitHubRepoList.add(repo2);
repo1.setPersonalAccessToken("1");
when(collectorService.createCollector(anyObject())).thenReturn(collector);
when(gitHubCommitV3.getCollectorItemRepository().findAllByOptionNameValueAndCollectorIdsIn(anyString(), anyString(), anyObject())).thenReturn(gitHubRepoList);
String result = gitHubCommitV3.getRepositoryToken(scmUrl);
Assert.assertEquals("1", result);
}
@Test
public void getParentShasTest() {
JSONObject commitObject = new JSONObject();
JSONObject parents = new JSONObject();
commitObject.put("parents", parents);
JSONArray nodes = new JSONArray();
parents.put("nodes", nodes);
JSONObject node1 = new JSONObject();
nodes.add(node1);
node1.put("oid", "node1Oid");
JSONObject node2 = new JSONObject();
nodes.add(node2);
node2.put("oid", "node2Oid");
List<String> parentShas = gitHubCommitV3.getParentShas(commitObject);
Assert.assertEquals(2, parentShas.size());
}
private static String createGuid(String hex) {
byte[] bytes = new byte[12];
new Random().nextBytes(bytes);
char[] hexArray = hex.toCharArray();
char[] hexChars = new char[bytes.length * 2];
for ( int j = 0; j < bytes.length; j++ ) {
int v = bytes[j] & 0xFF;
hexChars[j * 2] = hexArray[v >>> 4];
hexChars[j * 2 + 1] = hexArray[v & 0x0F];
}
return new String(hexChars);
}
private JSONObject makeRepositoryResponseObject() {
JSONObject responseJsonObject = new JSONObject();
JSONObject data = new JSONObject();
responseJsonObject.put("data", data);
JSONObject repository = new JSONObject();
data.put("repository", repository);
JSONObject ref = new JSONObject();
repository.put("ref", ref);
JSONObject target = new JSONObject();
ref.put("target", target);
JSONObject history = new JSONObject();
target.put("history", history);
JSONArray edges = new JSONArray();
history.put("edges", edges);
JSONObject edge1 = new JSONObject();
edges.add(edge1);
JSONObject node1 = new JSONObject();
node1.put("oid", "oid1");
edge1.put("node", node1);
JSONObject edge2 = new JSONObject();
edges.add(edge2);
JSONObject node2 = new JSONObject();
node2.put("oid", "oid2");
edge2.put("node", node2);
return responseJsonObject;
}
private List<Map> makeCommitsList() {
List<Map> commitsList = new ArrayList<>();
Map commitsMap1 = new HashMap();
commitsList.add(commitsMap1);
commitsMap1.put("id", "commit1ID");
commitsMap1.put("message", "GitHub WebHook Commit 1");
commitsMap1.put("timestamp", "2018-09-22T11:18:56-05:00");
commitsMap1.put("url", "https://host/commit/commit1ID");
List<Integer> modifiedList1 = new ArrayList<>();
modifiedList1.add(1);
commitsMap1.put("modified", modifiedList1);
Map author1 = new HashMap();
commitsMap1.put("author", author1);
author1.put("name", "author1Name");
author1.put("login", "senderLogin");
commitsMap1.put("added",Arrays.asList("pom.xml", "cucumber.json", "Test.java"));
commitsMap1.put("removed",Arrays.asList(".gitignore", "style.css"));
commitsMap1.put("modified",Arrays.asList("Readme.md", "gulp.js"));
Map commitsMap2 = new HashMap();
commitsList.add(commitsMap2);
commitsMap2.put("id", "commit2ID");
commitsMap2.put("message", "GitHub WebHook Commit 2");
commitsMap2.put("timestamp", "2018-09-22T11:18:56-05:00");
commitsMap2.put("url", "https://host/commit/commit2ID");
List<Integer> modifiedList2 = new ArrayList<>();
modifiedList2.add(1);
commitsMap2.put("modified", modifiedList2);
Map author2 = new HashMap();
commitsMap2.put("author", author2);
author2.put("name", "author2Name");
author2.put("login", "senderLogin");
commitsMap2.put("added",null);
commitsMap2.put("removed",Arrays.asList(".gitignore", "style.css"));
commitsMap2.put("modified",Arrays.asList(""));
return commitsList;
}
private WebHookSettings makeWebHookSettings() {
WebHookSettings webHookSettings = new WebHookSettings();
GitHubWebHookSettings gitHubWebHookSettings = new GitHubWebHookSettings();
webHookSettings.setGitHub(gitHubWebHookSettings);
gitHubWebHookSettings.setToken("c74782b3ca2b57a5230ae7812a");
gitHubWebHookSettings.setCommitTimestampOffset(5);
gitHubWebHookSettings.setUserAgent("GitHub-Hookshot");
List<String> githubEnterpriseHosts = new ArrayList<>();
gitHubWebHookSettings.setGithubEnterpriseHosts(githubEnterpriseHosts);
githubEnterpriseHosts.add("github.com");
return webHookSettings;
}
}
|
package com.spiddekauga.android.ui.list;
import android.app.Fragment;
import android.app.FragmentManager;
import android.support.annotation.NonNull;
import android.util.SparseArray;
/**
* Improved Fragment Pager adapter that can return an instance of the fragment
*/
public abstract class FragmentPagerAdapter<FragmentType extends Fragment> extends android.support.v13.app.FragmentPagerAdapter {
private SparseArray<FragmentType> mFragments = new SparseArray<>();
protected FragmentPagerAdapter(@NonNull FragmentManager fragmentManager) {
super(fragmentManager);
}
/**
* Get the instance of the item at position. Creates a new instance if none exists
* @param position position of item to get
* @return Fragment instance of the item at position.
*/
@Override
public FragmentType getItem(int position) {
FragmentType fragment = mFragments.get(position);
if (fragment == null) {
fragment = instantiateItem(position);
mFragments.append(position, fragment);
}
return fragment;
}
/**
* Create a new instance of the item at the specified position
* @return new fragment instance for this position
*/
protected abstract FragmentType instantiateItem(int position);
@Override
public void notifyDataSetChanged() {
mFragments.clear();
super.notifyDataSetChanged();
}
}
|
package com.scg.util;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
/**
* JUnit tests for Address.java
* @author Brian Stamm
*/
public class AddressTest {
//variables for testing
private String streetNumber;
private String city;
private StateCode state;
private String postalCode;
private String testString;
/**
* Initialize all the variables for testing
*/
@Before
public void initialize() {
streetNumber = "416 Sid Snyder Avenue";
city = "Olympia";
state = StateCode.WA;
postalCode = "98504";
testString = String.format("%s%n%s, %s %s", streetNumber, city, state, postalCode);
}
/**
* Test the constructor & getters
*/
@Test
public void testConstructor() {
Address testAddress = new Address(streetNumber, city, state, postalCode);
assertEquals(testAddress.getStreetNumber(),streetNumber);
assertEquals(testAddress.getCity(),city);
assertEquals(testAddress.getState(),state);
assertEquals(testAddress.getPostalCode(),postalCode);
}
/**
* Test the toString method
*/
@Test
public void testToString() {
Address testAddress = new Address(streetNumber, city, state, postalCode);
assertEquals(testAddress.toString(),testString);
}
}
|
package com.barbapapateam.barbapapa;
import android.app.ActionBar;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.TextView;
import java.util.LinkedList;
public class AdvancedRecommandationActivity extends Activity implements View.OnClickListener {
private ImageButton noB = null;
private ImageButton yesB = null;
private ImageButton goBackB = null;
//Zone de texte contenant la question
TextView t1 = (TextView) findViewById(R.id.ARtextView);
//On distinguera les questions en questions principales (question1, 2 et 3) et questions secondaires (contenue des question1, 2 et 3)
String[] question1 = {"Voulez vous une bière en Pression ?", "Donc plutôt en bouteille ?"};
String[] question2 = {"Voulez vous une bière Blonde ?", "Plutôt blanche ?" ,"Plutôt ambrée", "Plutôt brune ?"};
String[] question3 = {"Voulez vous une bière forte ?", "Plutôt douce ?"};
private String[][] questions = {question1, question2, question3};
private boolean[] bottle = {true, false};
private String[] color = {"Blonde", "Blanche", "Ambrée", "Brune"};
private String[] degre = {"forte", "douce"};
private String[][] attributes = {{},color, degre};
private int indice1 = 0 ;
private int indice2 = 0 ;
String question = questions[0][0];
LinkedList<Beer> beers = Utils.getBeersFromJSON("beers.json", this);
private void getResult(){
}
private void getNextQuestion(){
if(indice1 == 3){
getResult();
} else {
indice1++;
indice2 = 0;
question = questions[indice1][indice2];
t1.setText(question);
}
}
private void getPreviousQuestion(){
indice1
indice2 = 0;
question = questions[indice1][indice2];
t1.setText(question);
}
private void getNextNuance(){
indice2++;
if(questions[indice1][indice2] == null){
indice2 = 0;
question = questions[indice1][indice2];
} else {
question = questions[indice1][indice2];
}
t1.setText(question);
}
private void yes(){
if (indice1 == 2){
for(int i = 0; i< beers.size(); i++){
Beer beer = beers.get(i);
if(attributes[indice1][indice2] == "forte")
if(beer.getABV() <= 6)
beers.remove(i);
else
if(beer.getABV()>6)
beers.remove(i);
}
} else if (indice1 == 0) {
//pour les autres question, les listes dans attributes correspondent a la valeur des attributs de Beer.
for (int i = 0; i < beers.size(); i++) {
Beer beer = beers.get(i);
if (beer.getBottle() != bottle[indice2]){
beers.remove(i);
}
}
} else if (indice1 == 1) { //pour les autres question, les listes dans attributes correspondent a la valeur des attributs de Beer.
for (int i = 0; i < beers.size(); i++) {
Beer beer = beers.get(i);
if (beer.getColor() != attributes[indice1][indice2]){
beers.remove(i);
}
}
}
getNextQuestion();
}
private void no(){
getNextNuance();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.advanced_recommandation);
//Ajout d'une bar d'action
ActionBar actionBar = getActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
//connection des boutons entre model et view
noB = (ImageButton) findViewById(R.id.imageButtonCross);
yesB = (ImageButton) findViewById(R.id.imageButtonValid);
goBackB = (ImageButton) findViewById(R.id.imageButtonBack);
noB.setOnClickListener(this);
yesB.setOnClickListener(this);
goBackB.setOnClickListener(this);
}
@Override
public void onClick(View v) {
switch(v.getId()){
case R.id.imageButtonCross:
no();
break;
case R.id.imageButtonValid:
yes();
break;
case R.id.imageButtonBack:
getPreviousQuestion();
break;
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
switch(item.getItemId()){
//Display back button on action bar
case android.R.id.home:
// app icon in action bar clicked; go home
Intent intent = new Intent(this, MainActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
return true;
}
return super.onOptionsItemSelected(item);
}
}
|
package com.github.mobile.android.core.repo;
import static org.eclipse.egit.github.core.event.Event.TYPE_CREATE;
import static org.eclipse.egit.github.core.event.Event.TYPE_FORK;
import org.eclipse.egit.github.core.Repository;
import org.eclipse.egit.github.core.User;
import org.eclipse.egit.github.core.event.Event;
import org.eclipse.egit.github.core.event.EventPayload;
import org.eclipse.egit.github.core.event.EventRepository;
import org.eclipse.egit.github.core.event.ForkPayload;
/**
* Helper to find a {@link RepositoryEventMatcher} to open for an event
*/
public class RepositoryEventMatcher {
/**
* Get {@link Repository} from event
*
* @param event
* @return gist or null if event doesn't apply
*/
public Repository getRepository(final Event event) {
if (event == null)
return null;
EventPayload payload = event.getPayload();
if (payload == null)
return null;
String type = event.getType();
if (TYPE_FORK.equals(type))
return ((ForkPayload) payload).getForkee();
else if (TYPE_CREATE.equals(type)) {
EventRepository repo = event.getRepo();
if (repo != null) {
String id = repo.getName();
int slash = id.indexOf('/');
if (slash > 0 && slash + 1 < id.length()) {
Repository full = new Repository();
full.setName(id.substring(slash + 1));
String login = id.substring(0, slash);
// Use actor if it matches login parsed from repository id
if (event.getActor() != null && login.equals(event.getActor().getLogin()))
full.setOwner(event.getActor());
else
full.setOwner(new User().setLogin(id.substring(0, slash)));
return full;
}
}
}
return null;
}
}
|
package com.melodies.bandup.MainScreenActivity;
import android.Manifest;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.graphics.Typeface;
import android.location.Location;
import android.location.LocationManager;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.android.volley.VolleyError;
import com.google.android.gms.ads.AdRequest;
import com.google.android.gms.ads.AdView;
import com.melodies.bandup.DatabaseSingleton;
import com.melodies.bandup.LocaleSingleton;
import com.melodies.bandup.R;
import com.melodies.bandup.SoundCloudFragments.SoundCloudPlayerFragment;
import com.melodies.bandup.helper_classes.User;
import com.melodies.bandup.listeners.BandUpErrorListener;
import com.melodies.bandup.listeners.BandUpResponseListener;
import com.melodies.bandup.locale.LocaleRules;
import com.squareup.picasso.Picasso;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Locale;
import static android.content.Context.LOCATION_SERVICE;
/**
* A simple {@link Fragment} subclass.
* Activities that contain this fragment must implement the
* {@link UserDetailsFragment.OnFragmentInteractionListener} interface
* to handle interaction events.
* Use the {@link UserDetailsFragment#newInstance} factory method to
* create an instance of this fragment.
*/
public class UserDetailsFragment extends Fragment {
private User currentUser;
private OnFragmentInteractionListener mListener;
public UserDetailsFragment() {
// Required empty public constructor
}
/**
* Use this factory method to create a new instance of
* this fragment using the provided parameters.
*
* @return A new instance of fragment UserDetailsFragment.
*/
// TODO: Rename and change types and number of parameters
public static UserDetailsFragment newInstance() {
UserDetailsFragment fragment = new UserDetailsFragment();
Bundle args = new Bundle();
fragment.setArguments(args);
return fragment;
}
private TextView txtName;
private TextView txtAge;
private TextView txtFavorite;
private TextView txtPercentage;
private TextView txtDistance;
private TextView txtAboutMe;
private TextView txtInstrumentsTitle;
private TextView txtGenresTitle;
private TextView txtInstrumentsList;
private TextView txtGenresList;
private TextView txtSoundCloudExample;
private TextView txtNoSoundCloudExample;
private ImageView ivUserProfileImage;
private Button btnLike;
private AdView mAdView;
private LinearLayout mSoundcloudArea;
private void initializeViews(View rootView) {
txtName = (TextView) rootView.findViewById(R.id.txtName);
txtDistance = (TextView) rootView.findViewById(R.id.txtDistance);
txtPercentage = (TextView) rootView.findViewById(R.id.txtPercentage);
txtAge = (TextView) rootView.findViewById(R.id.txtAge);
txtFavorite = (TextView) rootView.findViewById(R.id.txtFavorite);
txtAboutMe = (TextView) rootView.findViewById(R.id.txtAboutMe);
txtInstrumentsTitle = (TextView) rootView.findViewById(R.id.txtInstrumentTitle);
txtGenresTitle = (TextView) rootView.findViewById(R.id.txtGenresTitle);
txtInstrumentsList = (TextView) rootView.findViewById(R.id.txtInstrumentsList);
txtGenresList = (TextView) rootView.findViewById(R.id.txtGenresList);
txtSoundCloudExample = (TextView) rootView.findViewById(R.id.txt_audio_example);
ivUserProfileImage = (ImageView) rootView.findViewById(R.id.imgProfile);
btnLike = (Button) rootView.findViewById(R.id.btnLike);
mAdView = (AdView) rootView.findViewById(R.id.adView);
mSoundcloudArea = (LinearLayout) rootView.findViewById(R.id.soundcloud_player_area);
txtFetchError = (TextView) rootView.findViewById(R.id.txtFetchError);
txtNoSoundCloudExample = (TextView) rootView.findViewById(R.id.no_soundcloud_example);
btnLike.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
((MainScreenActivity) getActivity()).onClickLike(currentUser, v);
}
});
}
private void setFonts() {
Typeface caviarDreams = Typeface.createFromAsset(getActivity().getAssets(), "fonts/caviar_dreams.ttf");
Typeface caviarDreamsBold = Typeface.createFromAsset(getActivity().getAssets(), "fonts/caviar_dreams_bold.ttf");
Typeface masterOfBreak = Typeface.createFromAsset(getActivity().getAssets(), "fonts/master_of_break.ttf");
txtName.setTypeface(caviarDreams);
txtDistance.setTypeface(caviarDreams);
txtPercentage.setTypeface(caviarDreams);
txtAge.setTypeface(caviarDreams);
txtFavorite.setTypeface(caviarDreams);
txtAboutMe.setTypeface(caviarDreams);
txtInstrumentsList.setTypeface(caviarDreams);
txtGenresList.setTypeface(caviarDreams);
txtInstrumentsTitle.setTypeface(caviarDreamsBold);
txtGenresTitle.setTypeface(caviarDreamsBold);
btnLike.setTypeface(masterOfBreak);
txtNoSoundCloudExample.setTypeface(caviarDreamsBold);
txtSoundCloudExample.setTypeface(caviarDreamsBold);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
}
// Gets the user_id from userListFragment
}
private TextView txtFetchError;
private ProgressBar progressBar;
private LinearLayout llProfile;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
MainScreenActivity mainScreenActivity = (MainScreenActivity)getActivity();
mainScreenActivity.currentFragment = mainScreenActivity.USER_DETAILS_FRAGMENT;
mainScreenActivity.invalidateOptionsMenu();
// Inflate the layout for this fragment
final View rootView = inflater.inflate(R.layout.fragment_user_details, container, false);
initializeViews(rootView);
setFonts();
progressBar = (ProgressBar) rootView.findViewById(R.id.userListProgressBar);
llProfile = (LinearLayout) rootView.findViewById(R.id.ll_profile);
String argumentUserID = getArguments().getString("user_id");
if (currentUser == null || !currentUser.id.equals(argumentUserID)) {
fetchCurrentUser(getArguments().getString("user_id"));
} else {
populateUser(currentUser);
}
return rootView;
}
private void populateUser(User u) {
// Adding ad Banner
AdRequest adRequest = new AdRequest.Builder().build();
mAdView.loadAd(adRequest);
LocaleRules localeRules = LocaleSingleton.getInstance(getActivity()).getLocaleRules();
if (u.imgURL != null) {
Picasso.with(getActivity()).load(u.imgURL).into(ivUserProfileImage);
}
txtName.setText(u.name);
if (localeRules != null) {
Integer age = u.ageCalc();
if (age != null) {
if (localeRules.ageIsPlural(age)) {
String ageString = String.format("%s %s", age, getString((R.string.age_year_plural)));
txtAge.setText(ageString);
} else {
String ageString = String.format("%s %s", age, getString((R.string.age_year_singular)));
txtAge.setText(ageString);
}
} else {
txtAge.setText(getString(R.string.age_not_available));
}
}
if (u.favoriteinstrument != null && !u.favoriteinstrument.equals("")) {
txtFavorite.setText(u.favoriteinstrument);
} else {
if (u.instruments.size() != 0) {
txtFavorite.setText(u.instruments.get(0));
}
}
if (u.isLiked) {
btnLike.setText(getString(R.string.user_list_liked));
btnLike.setEnabled(false);
btnLike.setBackgroundResource(R.drawable.button_user_list_like_disabled);
}
txtPercentage.setText(u.percentage + "%");
txtAboutMe.setText(u.aboutme);
if (u.location != null) {
Float distanceBetweenUsers = getDistanceToUser(currentUser);
if (distanceBetweenUsers != null) {
SharedPreferences sharedPreferences = getActivity().getSharedPreferences("SettingsFileSwitch", Context.MODE_PRIVATE);
Boolean usesImperial = sharedPreferences.getBoolean("switchUnit", false);
if (usesImperial) {
String distanceString = String.format("%s %s", (int) Math.ceil(kilometersToMiles(distanceBetweenUsers / 1000)), getString(R.string.mi_distance));
txtDistance.setText(distanceString);
} else {
String distanceString = String.format("%s %s", (int) Math.ceil(distanceBetweenUsers / 1000), getString(R.string.km_distance));
txtDistance.setText(distanceString);
}
} else {
txtDistance.setText(R.string.no_distance_available);
}
} else {
txtDistance.setText(R.string.no_distance_available);
}
for (int i = 0; i < u.genres.size(); i++) {
txtGenresList.append(u.genres.get(i) + "\n");
}
for (int i = 0; i < u.instruments.size(); i++) {
txtInstrumentsList.append(u.instruments.get(i) + "\n");
}
createSoundCloudPlayer(u);
llProfile.setVisibility(View.VISIBLE);
txtNoSoundCloudExample.setText(String.format("%s %s", u.name, getString(R.string.no_soundcloud_example)));
if (u.id.equals(currentUser.id)) {
}
}
private Float getDistanceToUser(User u) {
LocationManager locationManager = (LocationManager) getActivity().getSystemService(LOCATION_SERVICE);
String locationProvider = ((MainScreenActivity) getActivity()).bestProvider;
Boolean hasLocationPermission = ((MainScreenActivity) getActivity()).hasLocationPermission();
if (hasLocationPermission) {
Location myLocation = locationManager.getLastKnownLocation(locationProvider);
Location userLocation = new Location("");
if (u.location.getValid()) {
userLocation.setLatitude(u.location.getLatitude());
userLocation.setLongitude(u.location.getLongitude());
} else {
return null;
}
if (myLocation != null) {
return myLocation.distanceTo(userLocation);
} else {
return null;
}
} else {
return null;
}
}
private void createSoundCloudPlayer(User user) {
if (user.soundCloudURL == null || user.soundCloudURL.equals("")){
txtNoSoundCloudExample.setVisibility(View.VISIBLE);
} else {
txtNoSoundCloudExample.setVisibility(View.GONE);
FragmentManager fragmentManager = getChildFragmentManager();
FragmentTransaction ft = fragmentManager.beginTransaction();
mSoundcloudArea.setId(Integer.valueOf(1234));
Fragment soundCloudFragment;
soundCloudFragment = SoundCloudPlayerFragment.newInstance(user.soundCloudURL);
ft.replace(mSoundcloudArea.getId(), soundCloudFragment, "soundCloudFragment");
ft.commit();
}
}
// Request REAL user info from server
public void fetchCurrentUser(String userid) {
JSONObject user = new JSONObject();
try {
user.put("userId", userid);
} catch (JSONException e) {
e.printStackTrace();
}
llProfile.setVisibility(View.INVISIBLE);
progressBar.setVisibility(View.VISIBLE);
DatabaseSingleton.getInstance(getActivity()).getBandUpDatabase().getUserProfile(user, new BandUpResponseListener() {
@Override
public void onBandUpResponse(Object response) {
if (getActivity() == null){
return;
}
progressBar.setVisibility(View.INVISIBLE);
JSONObject responseObj = null;
if (response instanceof JSONObject) {
responseObj = (JSONObject) response;
} else {
txtFetchError.setVisibility(View.VISIBLE);
}
if (responseObj != null) {
// Binding View to real data
currentUser = new User(responseObj);
populateUser(currentUser);
}
}
}, new BandUpErrorListener() {
@Override
public void onBandUpErrorResponse(VolleyError error) {
if (getActivity() == null){
return;
}
txtFetchError.setVisibility(View.VISIBLE);
progressBar.setVisibility(View.INVISIBLE);
}
});
}
private int kilometersToMiles(double miles) {
return (int) Math.round(miles / 1.609344);
}
// TODO: Rename method, update argument and hook method into UI event
public void onButtonPressed(Uri uri) {
if (mListener != null) {
mListener.onFragmentInteraction(uri);
}
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof OnFragmentInteractionListener) {
mListener = (OnFragmentInteractionListener) context;
} else {
throw new RuntimeException(context.toString()
+ " must implement OnFragmentInteractionListener");
}
}
@Override
public void onDetach() {
super.onDetach();
mListener = null;
}
public interface OnFragmentInteractionListener {
// TODO: Update argument type and name
void onFragmentInteraction(Uri uri);
}
}
|
package de.fosd.jdime.strategy;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.StringReader;
import org.apache.log4j.Logger;
import de.fosd.jdime.common.ASTNodeArtifact;
import de.fosd.jdime.common.FileArtifact;
import de.fosd.jdime.common.MergeContext;
import de.fosd.jdime.common.MergeTriple;
import de.fosd.jdime.common.NotYetImplementedException;
import de.fosd.jdime.common.operations.MergeOperation;
import de.fosd.jdime.stats.ScenarioStats;
import de.fosd.jdime.stats.Stats;
import de.fosd.jdime.stats.StatsElement;
/**
* Performs a structured merge.
*
* @author Olaf Lessenich
*
*/
public class StructuredStrategy extends MergeStrategy<FileArtifact> {
/**
* Logger.
*/
private static final Logger LOG = Logger
.getLogger(StructuredStrategy.class);
/*
* (non-Javadoc)
*
* @see de.fosd.jdime.strategy.MergeStrategy#merge(
* de.fosd.jdime.common.operations.MergeOperation,
* de.fosd.jdime.common.MergeContext)
*/
@Override
public final void merge(final MergeOperation<FileArtifact> operation,
final MergeContext context) throws IOException,
InterruptedException {
assert (operation != null);
assert (context != null);
MergeTriple<FileArtifact> triple = operation.getMergeTriple();
assert (triple != null);
assert (triple.isValid()) : "The merge triple is not valid!";
assert (triple.getLeft() instanceof FileArtifact);
assert (triple.getBase() instanceof FileArtifact);
assert (triple.getRight() instanceof FileArtifact);
assert (triple.getLeft().exists() && !triple.getLeft().isDirectory());
assert ((triple.getBase().exists() && !triple.getBase().isDirectory()) || triple
.getBase().isEmptyDummy());
assert (triple.getRight().exists() && !triple.getRight().isDirectory());
context.resetStreams();
FileArtifact target = null;
if (operation.getTarget() != null) {
assert (operation.getTarget() instanceof FileArtifact);
target = (FileArtifact) operation.getTarget();
assert (!target.exists() || target.isEmpty()) : "Would be overwritten: "
+ target;
}
// ASTNodeArtifacts are created from the input files.
// Then, a ASTNodeStrategy can be applied.
// The Result is pretty printed and can be written into the output file.
ASTNodeArtifact left, base, right;
if (LOG.isDebugEnabled()) {
LOG.debug("Merging: " + triple.getLeft().getPath() + " "
+ triple.getBase().getPath() + " "
+ triple.getRight().getPath());
}
long cmdStart = System.currentTimeMillis();
left = new ASTNodeArtifact(triple.getLeft());
base = new ASTNodeArtifact(triple.getBase());
right = new ASTNodeArtifact(triple.getRight());
// Output tree
// Program program = new Program();
// program.state().reset();
// ASTNodeArtifact targetNode = new ASTNodeArtifact(program);
ASTNodeArtifact targetNode = ASTNodeArtifact.createProgram(left);
targetNode.setRevision(left.getRevision());
targetNode.forceRenumbering();
if (LOG.isTraceEnabled()) {
LOG.trace("target.dumpTree(:");
System.out.println(targetNode.dumpTree());
}
MergeTriple<ASTNodeArtifact> nodeTriple = new MergeTriple<ASTNodeArtifact>(
triple.getMergeType(), left, base, right);
MergeOperation<ASTNodeArtifact> astMergeOp = new MergeOperation<ASTNodeArtifact>(
nodeTriple, targetNode);
if (LOG.isTraceEnabled()) {
LOG.trace("ASTMOperation.apply(context)");
}
try {
astMergeOp.apply(context);
if (LOG.isTraceEnabled()) {
LOG.trace("Structured merge finished.");
LOG.trace("target.dumpTree():");
System.out.println(targetNode.dumpTree());
LOG.trace("Pretty-printing left:");
System.out.println(left.prettyPrint());
LOG.trace("Pretty-printing right:");
System.out.println(right.prettyPrint());
LOG.trace("Pretty-printing merge:");
if (context.isQuiet()) {
System.out.println(targetNode.prettyPrint());
}
}
// process input stream
BufferedReader buf = new BufferedReader(new StringReader(
targetNode.prettyPrint()));
boolean conflict = false;
boolean afterconflict = false;
boolean inleft = false;
boolean inright = false;
int conflicts = 0;
int loc = 0;
int cloc = 0;
int tmp = 0;
String line = "";
StringBuffer leftlines = null;
StringBuffer rightlines = null;
while ((line = buf.readLine()) != null) {
if (line.matches("^$") || line.matches("^\\s*$")) {
// skip empty lines
if (!conflict && !afterconflict) {
context.appendLine(line);
}
continue;
}
if (line.matches("^\\s*<<<<<<<.*")) {
conflict = true;
tmp = cloc;
conflicts++;
inleft = true;
if (!afterconflict) {
// new conflict or new chain of conflicts
leftlines = new StringBuffer();
rightlines = new StringBuffer();
} else {
// is directly after a previous conflict
// lets merge them
conflicts
}
} else if (line.matches("^\\s*=======.*")) {
inleft = false;
inright = true;
} else if (line.matches("^\\s*>>>>>>>.*")) {
conflict = false;
afterconflict = true;
if (tmp == cloc) {
// only empty lines
conflicts
}
inright = false;
} else {
loc++;
if (conflict) {
cloc++;
if (inleft) {
leftlines.append(line + System.lineSeparator());
} else if (inright) {
rightlines.append(line + System.lineSeparator());
}
} else {
if (afterconflict) {
// need to print the previous conflict(s)
context.appendLine("<<<<<<< ");
context.append(leftlines.toString());
context.appendLine("======= ");
context.append(rightlines.toString());
context.appendLine(">>>>>>> ");
}
afterconflict = false;
context.appendLine(line);
}
}
}
long cmdStop = System.currentTimeMillis();
long runtime = cmdStop - cmdStart;
LOG.debug("Structured merge finished after " + runtime + " ms.");
if (context.hasErrors()) {
System.err.println(context.getStdErr());
}
// write output
if (target != null) {
assert (target.exists());
target.write(context.getStdIn());
}
// add statistical data to context
if (context.hasStats()) {
assert (cloc <= loc);
Stats stats = context.getStats();
StatsElement linesElement = stats.getElement("lines");
assert (linesElement != null);
StatsElement newElement = new StatsElement();
newElement.setMerged(loc);
newElement.setConflicting(cloc);
linesElement.addStatsElement(newElement);
if (conflicts > 0) {
assert (cloc > 0);
stats.addConflicts(conflicts);
StatsElement filesElement = stats.getElement("files");
assert (filesElement != null);
filesElement.incrementConflicting();
} else {
assert (cloc == 0);
}
stats.increaseRuntime(runtime);
ScenarioStats scenariostats = new ScenarioStats(triple,
conflicts, cloc, loc, runtime);
stats.addScenarioStats(scenariostats);
}
} catch (Throwable t) {
if (!context.isKeepGoing()) {
throw new Error(t);
} else {
LOG.fatal(t + " while merging "
+ triple.getLeft().getPath() + " "
+ triple.getBase().getPath() + " "
+ triple.getRight().getPath());
if (context.hasStats()) {
ScenarioStats scenariostats
= new ScenarioStats(t.toString());
context.getStats().addScenarioStats(scenariostats);
}
}
}
}
/*
* (non-Javadoc)
*
* @see de.fosd.jdime.strategy.MergeStrategy#toString()
*/
@Override
public final String toString() {
return "structured";
}
/*
* (non-Javadoc)
*
* @see de.fosd.jdime.strategy.StatsInterface#createStats()
*/
@Override
public final Stats createStats() {
return new Stats(new String[] { "directories", "files", "lines",
"nodes" });
}
@Override
public final String getStatsKey(final FileArtifact artifact) {
// FIXME: remove me when implementation is complete!
throw new NotYetImplementedException(
"StructuredStrategy: Implement me!");
}
@Override
public final void dump(final FileArtifact artifact, final boolean graphical)
throws IOException {
new ASTNodeStrategy().dump(new ASTNodeArtifact(artifact), graphical);
}
}
|
package application;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
public class SelectedPath
{
private FileChooser fc;
private String path;
public SelectedPath(Stage stage)
{
fc = new FileChooser();
fc.setTitle("Seleccion de pyhton");
this.path = fc.showOpenDialog(stage).getAbsolutePath();
}
public String getPath()
{
return this.path;
}
}
|
// RMG - Reaction Mechanism Generator
// RMG Team (rmg_dev@mit.edu)
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
// all copies or substantial portions of the Software.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
package jing.rxnSys;
import java.io.*;
import jing.rxnSys.ReactionSystem;
import jing.rxn.*;
import jing.chem.*;
import java.util.*;
import jing.mathTool.UncertainDouble;
import jing.param.*;
import jing.chemUtil.*;
import jing.chemParser.*;
//## package jing::rxnSys
// jing\rxnSys\ReactionModelGenerator.java
//## class ReactionModelGenerator
public class ReactionModelGenerator {
protected LinkedList timeStep; //## attribute timeStep
protected ReactionModel reactionModel; //gmagoon 9/24/07
protected String workingDirectory; //## attribute workingDirectory
// protected ReactionSystem reactionSystem;
protected LinkedList reactionSystemList; //10/24/07 gmagoon: changed from reactionSystem to reactionSystemList
protected int paraInfor;//svp
protected boolean error;//svp
protected boolean sensitivity;//svp
protected LinkedList species;//svp
// protected InitialStatus initialStatus;//svp
protected LinkedList initialStatusList; //10/23/07 gmagoon: changed from initialStatus to initialStatusList
protected double rtol;//svp
protected static double atol;
protected PrimaryReactionLibrary primaryReactionLibrary;//9/24/07 gmagoon
protected ReactionModelEnlarger reactionModelEnlarger;//9/24/07 gmagoon
protected LinkedHashSet speciesSeed;//9/24/07 gmagoon;
protected ReactionGenerator reactionGenerator;//9/24/07 gmagoon
protected LibraryReactionGenerator lrg;// = new LibraryReactionGenerator();//9/24/07 gmagoon: moved from ReactionSystem.java;10/4/07 gmagoon: postponed initialization of lrg til later
//10/23/07 gmagoon: added additional variables
protected LinkedList tempList;
protected LinkedList presList;
protected LinkedList validList;//10/24/07 gmagoon: added
//10/25/07 gmagoon: moved variables from modelGeneration()
protected LinkedList initList = new LinkedList();
protected LinkedList beginList = new LinkedList();
protected LinkedList endList = new LinkedList();
protected LinkedList lastTList = new LinkedList();
protected LinkedList currentTList = new LinkedList();
protected LinkedList lastPList = new LinkedList();
protected LinkedList currentPList = new LinkedList();
protected LinkedList conditionChangedList = new LinkedList();
protected LinkedList reactionChangedList = new LinkedList();
protected int numConversions;//5/6/08 gmagoon: moved from initializeReactionSystem() to be an attribute so it can be accessed by modelGenerator()
protected String equationOfState;
// 24Jun2009 MRH: variable stores the first temperature encountered in the condition.txt file
// This temperature is used to select the "best" kinetics from the rxn library
protected static Temperature temp4BestKinetics;
// This is the new "PrimaryReactionLibrary"
protected SeedMechanism seedMechanism;
protected PrimaryThermoLibrary primaryThermoLibrary;
protected PrimaryTransportLibrary primaryTransportLibrary;
protected boolean restart = false;
protected boolean readrestart = false;
protected boolean writerestart = false;
protected LinkedHashSet restartCoreSpcs = new LinkedHashSet();
protected LinkedHashSet restartEdgeSpcs = new LinkedHashSet();
protected LinkedHashSet restartCoreRxns = new LinkedHashSet();
protected LinkedHashSet restartEdgeRxns = new LinkedHashSet();
// Constructors
private HashSet specs = new HashSet();
//public static native long getCpuTime();
//static {System.loadLibrary("cpuTime");}
public static boolean rerunFame = false;
protected static double tolerance;//can be interpreted as "coreTol" (vs. edgeTol)
protected static double termTol;
protected static double edgeTol;
protected static int minSpeciesForPruning;
protected static int maxEdgeSpeciesAfterPruning;
public int limitingReactantID = 1;
//## operation ReactionModelGenerator()
public ReactionModelGenerator() {
workingDirectory = System.getProperty("RMG.workingDirectory");
}
//## operation initializeReactionSystem()
//10/24/07 gmagoon: changed name to initializeReactionSystems
public void initializeReactionSystems() throws InvalidSymbolException, IOException {
//#[ operation initializeReactionSystem()
try {
String initialConditionFile = System.getProperty("jing.rxnSys.ReactionModelGenerator.conditionFile");
if (initialConditionFile == null) {
System.out.println("undefined system property: jing.rxnSys.ReactionModelGenerator.conditionFile");
System.exit(0);
}
//double sandeep = getCpuTime();
//System.out.println(getCpuTime()/1e9/60);
FileReader in = new FileReader(initialConditionFile);
BufferedReader reader = new BufferedReader(in);
//TemperatureModel temperatureModel = null;//10/27/07 gmagoon: commented out
//PressureModel pressureModel = null;//10/27/07 gmagoon: commented out
// ReactionModelEnlarger reactionModelEnlarger = null;//10/9/07 gmagoon: commented out: unneeded now and causes scope problems
FinishController finishController = null;
//DynamicSimulator dynamicSimulator = null;//10/27/07 gmagoon: commented out and replaced with following line
LinkedList dynamicSimulatorList = new LinkedList();
//PrimaryReactionLibrary primaryReactionLibrary = null;//10/14/07 gmagoon: see below
setPrimaryReactionLibrary(null);//10/14/07 gmagoon: changed to use setPrimaryReactionLibrary
double [] conversionSet = new double[50];
String line = ChemParser.readMeaningfulLine(reader);
/*if (line.startsWith("Restart")){
StringTokenizer st = new StringTokenizer(line);
String token = st.nextToken();
token = st.nextToken();
if (token.equalsIgnoreCase("true")) {
//Runtime.getRuntime().exec("cp Restart/allSpecies.txt Restart/allSpecies1.txt");
//Runtime.getRuntime().exec("echo >> allSpecies.txt");
restart = true;
}
else if (token.equalsIgnoreCase("false")) {
Runtime.getRuntime().exec("rm Restart/allSpecies.txt");
restart = false;
}
else throw new InvalidSymbolException("UnIdentified Symbol "+token+" after Restart:");
}
else throw new InvalidSymbolException("Can't find Restart!");*/
//line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Database")){//svp
line = ChemParser.readMeaningfulLine(reader);
}
else throw new InvalidSymbolException("Can't find database!");
// if (line.startsWith("PrimaryThermoLibrary")){//svp
// line = ChemParser.readMeaningfulLine(reader);
// else throw new InvalidSymbolException("Can't find primary thermo library!");
/*
* Added by MRH on 15-Jun-2009
* Give user the option to change the maximum carbon, oxygen,
* and/or radical number for all species. These lines will be
* optional in the condition.txt file. Values are hard-
* coded into RMG (in ChemGraph.java), but any user-
* defined input will override these values.
*/
/*
* Moved from before InitialStatus to before PrimaryThermoLibary
* by MRH on 27-Oct-2009
* Overriding default values of maximum number of "X" per
* chemgraph should come before RMG attempts to make any
* chemgraph. The first instance RMG will attempt to make a
* chemgraph is in reading the primary thermo library.
*/
line = readMaxAtomTypes(line,reader);
// if (line.startsWith("MaxCarbonNumber")) {
// StringTokenizer st = new StringTokenizer(line);
// String dummyString = st.nextToken(); // This should hold "MaxCarbonNumberPerSpecies:"
// int maxCNum = Integer.parseInt(st.nextToken());
// ChemGraph.setMaxCarbonNumber(maxCNum);
// System.out.println("Note: Overriding RMG-defined MAX_CARBON_NUM with user-defined value: " + maxCNum);
// line = ChemParser.readMeaningfulLine(reader);
// if (line.startsWith("MaxOxygenNumber")) {
// StringTokenizer st = new StringTokenizer(line);
// String dummyString = st.nextToken(); // This should hold "MaxOxygenNumberPerSpecies:"
// int maxONum = Integer.parseInt(st.nextToken());
// ChemGraph.setMaxOxygenNumber(maxONum);
// System.out.println("Note: Overriding RMG-defined MAX_OXYGEN_NUM with user-defined value: " + maxONum);
// line = ChemParser.readMeaningfulLine(reader);
// if (line.startsWith("MaxRadicalNumber")) {
// StringTokenizer st = new StringTokenizer(line);
// String dummyString = st.nextToken(); // This should hold "MaxRadicalNumberPerSpecies:"
// int maxRadNum = Integer.parseInt(st.nextToken());
// ChemGraph.setMaxRadicalNumber(maxRadNum);
// System.out.println("Note: Overriding RMG-defined MAX_RADICAL_NUM with user-defined value: " + maxRadNum);
// line = ChemParser.readMeaningfulLine(reader);
// if (line.startsWith("MaxSulfurNumber")) {
// StringTokenizer st = new StringTokenizer(line);
// String dummyString = st.nextToken(); // This should hold "MaxSulfurNumberPerSpecies:"
// int maxSNum = Integer.parseInt(st.nextToken());
// ChemGraph.setMaxSulfurNumber(maxSNum);
// System.out.println("Note: Overriding RMG-defined MAX_SULFUR_NUM with user-defined value: " + maxSNum);
// line = ChemParser.readMeaningfulLine(reader);
// if (line.startsWith("MaxSiliconNumber")) {
// StringTokenizer st = new StringTokenizer(line);
// String dummyString = st.nextToken(); // This should hold "MaxSiliconNumberPerSpecies:"
// int maxSiNum = Integer.parseInt(st.nextToken());
// ChemGraph.setMaxSiliconNumber(maxSiNum);
// System.out.println("Note: Overriding RMG-defined MAX_SILICON_NUM with user-defined value: " + maxSiNum);
// line = ChemParser.readMeaningfulLine(reader);
// if (line.startsWith("MaxHeavyAtom")) {
// StringTokenizer st = new StringTokenizer(line);
// String dummyString = st.nextToken(); // This should hold "MaxHeavyAtomPerSpecies:"
// int maxHANum = Integer.parseInt(st.nextToken());
// ChemGraph.setMaxHeavyAtomNumber(maxHANum);
// System.out.println("Note: Overriding RMG-defined MAX_HEAVYATOM_NUM with user-defined value: " + maxHANum);
// line = ChemParser.readMeaningfulLine(reader);
/*
* Read in the Primary Thermo Library
* MRH 7-Jul-2009
*/
if (line.startsWith("PrimaryThermoLibrary:")) {
/*
* MRH 27Feb2010:
* Changing the "read in Primary Thermo Library information" code
* into it's own method.
*
* Other modules (e.g. PopulateReactions) will be utilizing the exact code.
* Rather than copying and pasting code into other modules, just have
* everything call this new method: readAndMakePTL
*/
readAndMakePTL(reader);
} else throw new InvalidSymbolException("Error reading condition.txt file: "
+ "Could not locate PrimaryThermoLibrary field");
line = ChemParser.readMeaningfulLine(reader);
/*
* MRH 17-May-2010:
* Added primary transport library field
*/
if (line.toLowerCase().startsWith("primarytransportlibrary")) {
readAndMakePTransL(reader);
} else throw new InvalidSymbolException("Error reading condition.txt file: "
+ "Could not locate PrimaryTransportLibrary field.");
line = ChemParser.readMeaningfulLine(reader);
// Extra forbidden structures may be specified after the Primary Thermo Library
if (line.startsWith("ForbiddenStructures:")) {
readExtraForbiddenStructures(reader);
line = ChemParser.readMeaningfulLine(reader);
}
if (line.toLowerCase().startsWith("readrestart")) {
StringTokenizer st = new StringTokenizer(line);
String tempString = st.nextToken(); // "ReadRestart:"
tempString = st.nextToken();
if (tempString.toLowerCase().equals("yes")) {
readrestart = true;
readRestartSpecies();
} else readrestart = false;
line = ChemParser.readMeaningfulLine(reader);
} else throw new InvalidSymbolException("Cannot locate ReadRestart field");
if (line.toLowerCase().startsWith("writerestart")) {
StringTokenizer st = new StringTokenizer(line);
String tempString = st.nextToken(); // "WriteRestart:"
tempString = st.nextToken();
if (tempString.toLowerCase().equals("yes"))
writerestart = true;
else writerestart = false;
line = ChemParser.readMeaningfulLine(reader);
} else throw new InvalidSymbolException("Cannot locate WriteRestart field");
// read temperature model
//gmagoon 10/23/07: modified to handle multiple temperatures; note that this requires different formatting of units in condition.txt
if (line.startsWith("TemperatureModel:")) {
createTModel(line);
// StringTokenizer st = new StringTokenizer(line);
// String name = st.nextToken();
// String modelType = st.nextToken();
// //String t = st.nextToken();
// String unit = st.nextToken();
// unit = ChemParser.removeBrace(unit);
// if (modelType.equals("Constant")) {
// tempList = new LinkedList();
// //read first temperature
// double t = Double.parseDouble(st.nextToken());
// tempList.add(new ConstantTM(t, unit));
// Temperature temp = new Temperature(t, unit);//10/29/07 gmagoon: added this line and next two lines to set Global.lowTemperature and Global.highTemperature
// Global.lowTemperature = (Temperature)temp.clone();
// Global.highTemperature = (Temperature)temp.clone();
// //read remaining temperatures
// while (st.hasMoreTokens()) {
// t = Double.parseDouble(st.nextToken());
// tempList.add(new ConstantTM(t, unit));
// temp = new Temperature(t,unit);//10/29/07 gmagoon: added this line and next two "if" statements to set Global.lowTemperature and Global.highTemperature
// if(temp.getK() < Global.lowTemperature.getK())
// Global.lowTemperature = (Temperature)temp.clone();
// if(temp.getK() > Global.highTemperature.getK())
// Global.highTemperature = (Temperature)temp.clone();
// // Global.temperature = new Temperature(t,unit);
//10/23/07 gmagoon: commenting out; further updates needed to get this to work
//else if (modelType.equals("Curved")) {
// String t = st.nextToken();
// // add reading curved temperature function here
// temperatureModel = new CurvedTM(new LinkedList());
// else {
// throw new InvalidSymbolException("condition.txt: Unknown TemperatureModel = " + modelType);
}
else throw new InvalidSymbolException("condition.txt: can't find TemperatureModel!");
// read in pressure model
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("PressureModel:")) {
createPModel(line);
// StringTokenizer st = new StringTokenizer(line);
// String name = st.nextToken();
// String modelType = st.nextToken();
// //String p = st.nextToken();
// String unit = st.nextToken();
// unit = ChemParser.removeBrace(unit);
// if (modelType.equals("Constant")) {
// presList = new LinkedList();
// //read first pressure
// double p = Double.parseDouble(st.nextToken());
// Pressure pres = new Pressure(p, unit);
// Global.lowPressure = (Pressure)pres.clone();
// Global.highPressure = (Pressure)pres.clone();
// presList.add(new ConstantPM(p, unit));
// //read remaining temperatures
// while (st.hasMoreTokens()) {
// p = Double.parseDouble(st.nextToken());
// presList.add(new ConstantPM(p, unit));
// pres = new Pressure(p, unit);
// if(pres.getBar() < Global.lowPressure.getBar())
// Global.lowPressure = (Pressure)pres.clone();
// if(pres.getBar() > Global.lowPressure.getBar())
// Global.highPressure = (Pressure)pres.clone();
// //Global.pressure = new Pressure(p, unit);
// //10/23/07 gmagoon: commenting out; further updates needed to get this to work
// //else if (modelType.equals("Curved")) {
// // // add reading curved pressure function here
// // pressureModel = new CurvedPM(new LinkedList());
// else {
// throw new InvalidSymbolException("condition.txt: Unknown PressureModel = " + modelType);
}
else throw new InvalidSymbolException("condition.txt: can't find PressureModel!");
// after PressureModel comes an optional line EquationOfState
// if "EquationOfState: Liquid" is found then initial concentrations are assumed to be correct
// if it is ommited, then initial concentrations are normalised to ensure PV=NRT (ideal gas law)
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("EquationOfState")) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken();
String eosType = st.nextToken();
if (eosType.equals("Liquid")) {
equationOfState="Liquid";
System.out.println("Equation of state: Liquid. Relying on concentrations in input file to get density correct; not checking PV=NRT");
}
line = ChemParser.readMeaningfulLine(reader);
}
// Read in InChI generation
if (line.startsWith("InChIGeneration:")) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken();
String inchiOnOff = st.nextToken().toLowerCase();
if (inchiOnOff.equals("on")) {
Species.useInChI = true;
} else if (inchiOnOff.equals("off")) {
Species.useInChI = false;
}
else throw new InvalidSymbolException("condition.txt: Unknown InChIGeneration flag: " + inchiOnOff);
line = ChemParser.readMeaningfulLine(reader);
}
// Read in Solvation effects
if (line.startsWith("Solvation:")) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken();
String solvationOnOff = st.nextToken().toLowerCase();
if (solvationOnOff.equals("on")) {
Species.useSolvation = true;
} else if (solvationOnOff.equals("off")) {
Species.useSolvation = false;
}
else throw new InvalidSymbolException("condition.txt: Unknown solvation flag: " + solvationOnOff);
line = ChemParser.readMeaningfulLine(reader);
}
//line = ChemParser.readMeaningfulLine(reader);//read in reactants or thermo line
// Read in optional QM thermo generation
if (line.startsWith("ThermoMethod:")) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken();
String thermoMethod = st.nextToken().toLowerCase();
if (thermoMethod.equals("qm")) {
ChemGraph.useQM = true;
if(st.hasMoreTokens()){//override the default qmprogram ("both") if there are more; current options: "gaussian03" and "mopac" and of course, "both"
QMTP.qmprogram = st.nextToken().toLowerCase();
}
line=ChemParser.readMeaningfulLine(reader);
if(line.startsWith("QMForCyclicsOnly:")){
StringTokenizer st2 = new StringTokenizer(line);
String nameCyc = st2.nextToken();
String option = st2.nextToken().toLowerCase();
if (option.equals("on")) {
ChemGraph.useQMonCyclicsOnly = true;
}
}
else{
System.out.println("condition.txt: Can't find 'QMForCyclicsOnly:' field");
System.exit(0);
}
line=ChemParser.readMeaningfulLine(reader);
if(line.startsWith("MaxRadNumForQM:")){
StringTokenizer st3 = new StringTokenizer(line);
String nameRadNum = st3.nextToken();
Global.maxRadNumForQM = Integer.parseInt(st3.nextToken());
}
else{
System.out.println("condition.txt: Can't find 'MaxRadNumForQM:' field");
System.exit(0);
}
}//otherwise, the flag useQM will remain false by default and the traditional group additivity approach will be used
line = ChemParser.readMeaningfulLine(reader);//read in reactants
}
// // Read in Solvation effects
// if (line.startsWith("Solvation:")) {
// StringTokenizer st = new StringTokenizer(line);
// String name = st.nextToken();
// String solvationOnOff = st.nextToken().toLowerCase();
// if (solvationOnOff.equals("on")) {
// Species.useSolvation = true;
// } else if (solvationOnOff.equals("off")) {
// Species.useSolvation = false;
// else throw new InvalidSymbolException("condition.txt: Unknown solvation flag: " + solvationOnOff);
// else throw new InvalidSymbolException("condition.txt: Cannot find solvation flag.");
// read in reactants
//10/4/07 gmagoon: moved to initializeCoreEdgeReactionModel
//LinkedHashSet p_speciesSeed = new LinkedHashSet();//gmagoon 10/4/07: changed to p_speciesSeed
//setSpeciesSeed(p_speciesSeed);//gmagoon 10/4/07: added
LinkedHashMap speciesSet = new LinkedHashMap();
/*
* 7/Apr/2010: MRH
* Neither of these variables are utilized
*/
// LinkedHashMap speciesStatus = new LinkedHashMap();
// int speciesnum = 1;
//System.out.println(line);
if (line.startsWith("InitialStatus")) {
speciesSet = populateInitialStatusListWithReactiveSpecies(reader);
// line = ChemParser.readMeaningfulLine(reader);
// while (!line.equals("END")) {
// StringTokenizer st = new StringTokenizer(line);
// String index = st.nextToken();
// String name = null;
// if (!index.startsWith("(")) name = index;
// else name = st.nextToken();
// //if (restart) name += "("+speciesnum+")";
// // 24Jun2009: MRH
// // Check if the species name begins with a number.
// // If so, terminate the program and inform the user to choose
// // a different name. This is implemented so that the chem.inp
// // file generated will be valid when run in Chemkin
// try {
// int doesNameBeginWithNumber = Integer.parseInt(name.substring(0,1));
// System.out.println("\nA species name should not begin with a number." +
// " Please rename species: " + name + "\n");
// System.exit(0);
// } catch (NumberFormatException e) {
// // We're good
// speciesnum ++;
// if (!(st.hasMoreTokens())) throw new InvalidSymbolException("Couldn't find concentration of species: "+name);
// String conc = st.nextToken();
// double concentration = Double.parseDouble(conc);
// String unit = st.nextToken();
// unit = ChemParser.removeBrace(unit);
// if (unit.equals("mole/l") || unit.equals("mol/l") || unit.equals("mole/liter") || unit.equals("mol/liter")) {
// concentration /= 1000;
// unit = "mol/cm3";
// else if (unit.equals("mole/m3") || unit.equals("mol/m3")) {
// concentration /= 1000000;
// unit = "mol/cm3";
// else if (unit.equals("molecule/cm3") || unit.equals("molecules/cm3")) {
// concentration /= 6.022e23;
// else if (!unit.equals("mole/cm3") && !unit.equals("mol/cm3")) {
// throw new InvalidUnitException("Species Concentration in condition.txt!");
// //GJB to allow "unreactive" species that only follow user-defined library reactions.
// // They will not react according to RMG reaction families
// boolean IsReactive = true;
// boolean IsConstantConcentration = false;
// while (st.hasMoreTokens()) {
// String reactive = st.nextToken().trim();
// if (reactive.equalsIgnoreCase("unreactive"))
// IsReactive = false;
// if (reactive.equalsIgnoreCase("constantconcentration"))
// IsConstantConcentration=true;
// Graph g = ChemParser.readChemGraph(reader);
// ChemGraph cg = null;
// try {
// cg = ChemGraph.make(g);
// catch (ForbiddenStructureException e) {
// System.out.println("Forbidden Structure:\n" + e.getMessage());
// throw new InvalidSymbolException("A species in the input file has a forbidden structure.");
// //System.out.println(name);
// Species species = Species.make(name,cg);
// species.setReactivity(IsReactive); // GJB
// species.setConstantConcentration(IsConstantConcentration);
// speciesSet.put(name, species);
// getSpeciesSeed().add(species);
// double flux = 0;
// int species_type = 1; // reacted species
// SpeciesStatus ss = new SpeciesStatus(species,species_type,concentration,flux);
// speciesStatus.put(species, ss);
// line = ChemParser.readMeaningfulLine(reader);
// ReactionTime initial = new ReactionTime(0,"S");
// //10/23/07 gmagoon: modified for handling multiple temperature, pressure conditions; note: concentration within speciesStatus (and list of conversion values) should not need to be modified for each T,P since this is done within isTPCconsistent in ReactionSystem
// initialStatusList = new LinkedList();
// for (Iterator iter = tempList.iterator(); iter.hasNext(); ) {
// TemperatureModel tm = (TemperatureModel)iter.next();
// for (Iterator iter2 = presList.iterator(); iter2.hasNext(); ){
// PressureModel pm = (PressureModel)iter2.next();
// // LinkedHashMap speStat = (LinkedHashMap)speciesStatus.clone();//10/31/07 gmagoon: trying creating multiple instances of speciesStatus to address issues with concentration normalization (last normalization seems to apply to all)
// Set ks = speciesStatus.keySet();
// LinkedHashMap speStat = new LinkedHashMap();
// for (Iterator iter3 = ks.iterator(); iter3.hasNext();){//11/1/07 gmagoon: perform deep copy; (is there an easier or more elegant way to do this?)
// SpeciesStatus ssCopy = (SpeciesStatus)speciesStatus.get(iter3.next());
// speStat.put(ssCopy.getSpecies(),new SpeciesStatus(ssCopy.getSpecies(),ssCopy.getSpeciesType(),ssCopy.getConcentration(),ssCopy.getFlux()));
// initialStatusList.add(new InitialStatus(speStat,tm.getTemperature(initial),pm.getPressure(initial)));
}
else throw new InvalidSymbolException("condition.txt: can't find InitialStatus!");
// read in inert gas concentration
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("InertGas:")) {
populateInitialStatusListWithInertSpecies(reader);
// line = ChemParser.readMeaningfulLine(reader);
// while (!line.equals("END")) {
// StringTokenizer st = new StringTokenizer(line);
// String name = st.nextToken().trim();
// String conc = st.nextToken();
// double inertConc = Double.parseDouble(conc);
// String unit = st.nextToken();
// unit = ChemParser.removeBrace(unit);
// if (unit.equals("mole/l") || unit.equals("mol/l") || unit.equals("mole/liter") || unit.equals("mol/liter")) {
// inertConc /= 1000;
// unit = "mol/cm3";
// else if (unit.equals("mole/m3") || unit.equals("mol/m3")) {
// inertConc /= 1000000;
// unit = "mol/cm3";
// else if (unit.equals("molecule/cm3") || unit.equals("molecules/cm3")) {
// inertConc /= 6.022e23;
// unit = "mol/cm3";
// else if (!unit.equals("mole/cm3") && !unit.equals("mol/cm3")) {
// throw new InvalidUnitException("Inert Gas Concentration not recognized: " + unit);
// //SystemSnapshot.putInertGas(name,inertConc);
// for(Iterator iter=initialStatusList.iterator();iter.hasNext(); ){//6/23/09 gmagoon: needed to change this to accommodate non-static inertConc
// ((InitialStatus)iter.next()).putInertGas(name,inertConc);
// line = ChemParser.readMeaningfulLine(reader);
}
else throw new InvalidSymbolException("condition.txt: can't find Inert gas concentration!");
// read in spectroscopic data estimator
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("SpectroscopicDataEstimator:")) {
setSpectroscopicDataMode(line);
// StringTokenizer st = new StringTokenizer(line);
// String name = st.nextToken();
// String sdeType = st.nextToken().toLowerCase();
// if (sdeType.equals("frequencygroups") || sdeType.equals("default")) {
// SpectroscopicData.mode = SpectroscopicData.Mode.FREQUENCYGROUPS;
// else if (sdeType.equals("therfit") || sdeType.equals("threefrequencymodel")) {
// SpectroscopicData.mode = SpectroscopicData.Mode.THREEFREQUENCY;
// else if (sdeType.equals("off") || sdeType.equals("none")) {
// SpectroscopicData.mode = SpectroscopicData.Mode.OFF;
// else throw new InvalidSymbolException("condition.txt: Unknown SpectroscopicDataEstimator = " + sdeType);
}
else throw new InvalidSymbolException("condition.txt: can't find SpectroscopicDataEstimator!");
// pressure dependence and related flags
line = ChemParser.readMeaningfulLine(reader);
if (line.toLowerCase().startsWith("pressuredependence:"))
line = setPressureDependenceOptions(line,reader);
else
throw new InvalidSymbolException("condition.txt: can't find PressureDependence flag!");
// include species (optional)
/*
*
* MRH 3-APR-2010:
* This if statement is no longer necessary and was causing an error
* when the PressureDependence field was set to "off"
*/
// if (!PDepRateConstant.getMode().name().equals("CHEBYSHEV") &&
// !PDepRateConstant.getMode().name().equals("PDEPARRHENIUS"))
// line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("IncludeSpecies")) {
StringTokenizer st = new StringTokenizer(line);
String iS = st.nextToken();
String fileName = st.nextToken();
HashSet includeSpecies = readIncludeSpecies(fileName);
((RateBasedRME)reactionModelEnlarger).addIncludeSpecies(includeSpecies);
line = ChemParser.readMeaningfulLine(reader);
}
// read in finish controller
if (line.startsWith("FinishController")) {
line = ChemParser.readMeaningfulLine(reader);
StringTokenizer st = new StringTokenizer(line);
String index = st.nextToken();
String goal = st.nextToken();
String type = st.nextToken();
TerminationTester tt;
if (type.startsWith("Conversion")) {
LinkedList spc = new LinkedList();
while (st.hasMoreTokens()) {
String name = st.nextToken();
Species spe = (Species)speciesSet.get(name);
setLimitingReactantID(spe.getID());
if (spe == null) throw new InvalidConversionException("Unknown reactant: " + name);
String conv = st.nextToken();
double conversion;
try {
if (conv.endsWith("%")) {
conversion = Double.parseDouble(conv.substring(0,conv.length()-1))/100;
}
else {
conversion = Double.parseDouble(conv);
}
conversionSet[49] = conversion;
}
catch (NumberFormatException e) {
throw new NumberFormatException("wrong number format for conversion in initial condition file!");
}
SpeciesConversion sc = new SpeciesConversion(spe, conversion);
spc.add(sc);
}
tt = new ConversionTT(spc);
}
else if (type.startsWith("ReactionTime")) {
double time = Double.parseDouble(st.nextToken());
String unit = ChemParser.removeBrace(st.nextToken());
ReactionTime rt = new ReactionTime(time, unit);
tt = new ReactionTimeTT(rt);
}
else {
throw new InvalidSymbolException("condition.txt: Unknown FinishController = " + type);
}
line = ChemParser.readMeaningfulLine(reader);
st = new StringTokenizer(line, ":");
String temp = st.nextToken();
String tol = st.nextToken();
try {
if (tol.endsWith("%")) {
tolerance = Double.parseDouble(tol.substring(0,tol.length()-1))/100;
}
else {
tolerance = Double.parseDouble(tol);
}
}
catch (NumberFormatException e) {
throw new NumberFormatException("wrong number format for conversion in initial condition file!");
}
ValidityTester vt = null;
if (reactionModelEnlarger instanceof RateBasedRME) vt = new RateBasedVT(tolerance);
else if (reactionModelEnlarger instanceof RateBasedPDepRME) vt = new RateBasedPDepVT(tolerance);
else throw new InvalidReactionModelEnlargerException();
finishController = new FinishController(tt, vt);
}
else throw new InvalidSymbolException("condition.txt: can't find FinishController!");
// read in dynamic simulator
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("DynamicSimulator")) {
StringTokenizer st = new StringTokenizer(line,":");
String temp = st.nextToken();
String simulator = st.nextToken().trim();
//read in non-negative option if it exists: syntax would be something like this: "DynamicSimulator: DASSL: non-negative"
if (st.hasMoreTokens()){
if (st.nextToken().trim().toLowerCase().equals("non-negative")){
if(simulator.toLowerCase().equals("dassl")) JDAS.nonnegative = true;
else{
System.err.println("Non-negative option is currently only supported for DASSL. Switch to DASSL solver or remove non-negative option.");
System.exit(0);
}
}
}
numConversions = 0;//5/6/08 gmagoon: moved declaration from initializeReactionSystem() to be an attribute so it can be accessed by modelGenerator()
//int numConversions = 0;
boolean autoflag = false;//5/2/08 gmagoon: updating the following if/else-if block to consider input where we want to check model validity within the ODE solver at each time step; this will be indicated by the use of a string beginning with "AUTO" after the "TimeStep" or "Conversions" line
// read in time step
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("TimeStep:") && finishController.terminationTester instanceof ReactionTimeTT) {
st = new StringTokenizer(line);
temp = st.nextToken();
while (st.hasMoreTokens()) {
temp = st.nextToken();
if (temp.startsWith("AUTO")){//note potential opportunity for making case insensitive by using: temp.toUpperCase().startsWith("AUTO")
autoflag=true;
}
else if (!autoflag){//use "else if" to make sure additional numbers are not read in case numbers are erroneously used following AUTO; note that there could still be a problem if numbers come before "AUTO"
double tStep = Double.parseDouble(temp);
String unit = "sec";
setTimeStep(new ReactionTime(tStep, unit));
}
}
((ReactionTimeTT)finishController.terminationTester).setTimeSteps(timeStep);
}
else if (line.startsWith("Conversions:") && finishController.terminationTester instanceof ConversionTT){
st = new StringTokenizer(line);
temp = st.nextToken();
int i=0;
SpeciesConversion sc = (SpeciesConversion)((ConversionTT)finishController.terminationTester).speciesGoalConversionSet.get(0);
Species convSpecies = sc.species;
Iterator iter = ((InitialStatus)(initialStatusList.get(0))).getSpeciesStatus();//10/23/07 gmagoon: changed to use first element of initialStatusList, as subsequent operations should not be affected by which one is chosen
double initialConc = 0;
while (iter.hasNext()){
SpeciesStatus sps = (SpeciesStatus)iter.next();
if (sps.species.equals(convSpecies)) initialConc = sps.concentration;
}
while (st.hasMoreTokens()){
temp=st.nextToken();
if (temp.startsWith("AUTO")){
autoflag=true;
}
else if (!autoflag){
double conv = Double.parseDouble(temp);
conversionSet[i] = (1-conv) * initialConc;
i++;
}
}
conversionSet[i] = (1 - conversionSet[49])* initialConc;
numConversions = i+1;
}
else throw new InvalidSymbolException("condition.txt: can't find time step for dynamic simulator!");
if (temp.startsWith("AUTOPRUNE")){//for the AUTOPRUNE case, read in additional lines for termTol and edgeTol
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("TerminationTolerance:")) {
st = new StringTokenizer(line);
temp = st.nextToken();
termTol = Double.parseDouble(st.nextToken());
}
else {
System.out.println("Cannot find TerminationTolerance in condition.txt");
System.exit(0);
}
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("PruningTolerance:")) {
st = new StringTokenizer(line);
temp = st.nextToken();
edgeTol = Double.parseDouble(st.nextToken());
}
else {
System.out.println("Cannot find PruningTolerance in condition.txt");
System.exit(0);
}
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("MinSpeciesForPruning:")) {
st = new StringTokenizer(line);
temp = st.nextToken();
minSpeciesForPruning = Integer.parseInt(st.nextToken());
}
else {
System.out.println("Cannot find MinSpeciesForPruning in condition.txt");
System.exit(0);
}
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("MaxEdgeSpeciesAfterPruning:")) {
st = new StringTokenizer(line);
temp = st.nextToken();
maxEdgeSpeciesAfterPruning = Integer.parseInt(st.nextToken());
}
else {
System.out.println("Cannot find MaxEdgeSpeciesAfterPruning in condition.txt");
System.exit(0);
}
//print header for pruning log (based on restart format)
BufferedWriter bw = null;
try {
File f = new File("Pruning/edgeReactions.txt");
bw = new BufferedWriter(new FileWriter("Pruning/edgeReactions.txt", true));
String EaUnits = ArrheniusKinetics.getEaUnits();
bw.write("UnitsOfEa: " + EaUnits);
bw.newLine();
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw != null) {
bw.flush();
bw.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
else if (temp.startsWith("AUTO")){//in the non-autoprune case (i.e. original AUTO functionality), we set the new parameters to values that should reproduce original functionality
termTol = tolerance;
edgeTol = 0;
minSpeciesForPruning = 999999;//arbitrary high number (actually, the value here should not matter, since pruning should not be done)
maxEdgeSpeciesAfterPruning = 999999;
}
// read in atol
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Atol:")) {
st = new StringTokenizer(line);
temp = st.nextToken();
atol = Double.parseDouble(st.nextToken());
}
else throw new InvalidSymbolException("condition.txt: can't find Atol for dynamic simulator!");
// read in rtol
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Rtol:")) {
st = new StringTokenizer(line);
temp = st.nextToken();
String rel_tol = st.nextToken();
if (rel_tol.endsWith("%"))
rtol = Double.parseDouble(rel_tol.substring(0,rel_tol.length()-1));
else
rtol = Double.parseDouble(rel_tol);
}
else throw new InvalidSymbolException("condition.txt: can't find Rtol for dynamic simulator!");
if (simulator.equals("DASPK")) {
paraInfor = 0;//svp
// read in SA
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Error bars")) {//svp
st = new StringTokenizer(line,":");
temp = st.nextToken();
String sa = st.nextToken().trim();
if (sa.compareToIgnoreCase("on")==0) {
paraInfor = 1;
error = true;
}
else if (sa.compareToIgnoreCase("off")==0) {
paraInfor = 0;
error = false;
}
else throw new InvalidSymbolException("condition.txt: can't find error on/off information!");
}
else throw new InvalidSymbolException("condition.txt: can't find SA information!");
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Display sensitivity coefficients")){//svp
st = new StringTokenizer(line,":");
temp = st.nextToken();
String sa = st.nextToken().trim();
if (sa.compareToIgnoreCase("on")==0){
paraInfor = 1;
sensitivity = true;
}
else if (sa.compareToIgnoreCase("off")==0){
if (paraInfor != 1){
paraInfor = 0;
}
sensitivity = false;
}
else throw new InvalidSymbolException("condition.txt: can't find SA on/off information!");
//10/23/07 gmagoon: changed below from dynamicSimulator to dynamicSimulatorList
//6/25/08 gmagoon: changed loop to use i index, and updated DASPK constructor to pass i (mirroring changes to DASSL
//6/25/08 gmagoon: updated to pass autoflag and validity tester; this requires FinishController block of input file to be present before DynamicSimulator block, but this requirement may have already existed anyway, particularly in construction of conversion/time step lists; *perhaps we should formalize this requirement by checking to make sure validityTester is not null?
for (int i = 0;i < initialStatusList.size();i++) {
dynamicSimulatorList.add(new JDASPK(rtol, atol, 0, (InitialStatus)initialStatusList.get(i), i,finishController.getValidityTester(), autoflag, termTol, tolerance));
}
}
species = new LinkedList();
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Display sensitivity information") ){
line = ChemParser.readMeaningfulLine(reader);
System.out.println(line);
while (!line.equals("END")){
st = new StringTokenizer(line);
String name = st.nextToken();
if (name.toUpperCase().equals("ALL")) ReactionSystem.printAllSens = true; //gmagoon 12/22/09: if the line contains the word "all", turn on the flag to print out sensitivity information for everything
species.add(name);
line = ChemParser.readMeaningfulLine(reader);
}
}
}
else if (simulator.equals("DASSL")) {
//10/23/07 gmagoon: changed below from dynamicSimulator to dynamicSimulatorList
// for (Iterator iter = initialStatusList.iterator(); iter.hasNext(); ) {
// dynamicSimulatorList.add(new JDASSL(rtol, atol, 0, (InitialStatus)iter.next()));
//11/1/07 gmagoon: changed loop to use i index, and updated DASSL constructor to pass i
//5/5/08 gmagoon: updated to pass autoflag and validity tester; this requires FinishController block of input file to be present before DynamicSimulator block, but this requirement may have already existed anyway, particularly in construction of conversion/time step lists; *perhaps we should formalize this requirement by checking to make sure validityTester is not null?
for (int i = 0;i < initialStatusList.size();i++) {
dynamicSimulatorList.add(new JDASSL(rtol, atol, 0, (InitialStatus)initialStatusList.get(i), i, finishController.getValidityTester(), autoflag, termTol, tolerance));
}
}
else if (simulator.equals("Chemkin")) {
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("ReactorType")) {
st = new StringTokenizer(line, ":");
temp = st.nextToken();
String reactorType = st.nextToken().trim();
//10/23/07 gmagoon: changed below from dynamicSimulator to dynamicSimulatorList
for (Iterator iter = initialStatusList.iterator(); iter.hasNext(); ) {
//dynamicSimulatorList.add(new JDASPK(rtol, atol, 0, (InitialStatus)iter.next()));
dynamicSimulatorList.add(new Chemkin(rtol, atol, reactorType));//11/4/07 gmagoon: fixing apparent cut/paste error
}
}
}
else throw new InvalidSymbolException("condition.txt: Unknown DynamicSimulator = " + simulator);
//10/23/07 gmagoon: changed below from dynamicSimulator to dynamicSimulatorList; note: although conversionSet should actually be different for each T,P condition, it will be modified in isTPCconsistent within ReactionSystem
for (Iterator iter = dynamicSimulatorList.iterator(); iter.hasNext(); ) {
double [] cs = conversionSet.clone();//11/1/07 gmagoon: trying to make sure multiple instances of conversionSet are used
((DynamicSimulator)(iter.next())).addConversion(cs, numConversions);
}
}
else throw new InvalidSymbolException("condition.txt: can't find DynamicSimulator!");
// read in reaction model enlarger
/* Read in the Primary Reaction Library
* The user can specify as many PRLs,
* including none, as they like.
*/
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("PrimaryReactionLibrary:")) {
readAndMakePRL(reader);
} else throw new InvalidSymbolException("condition.txt: can't find PrimaryReactionLibrary");
/*
* Added by MRH 12-Jun-2009
*
* The SeedMechanism acts almost exactly as the old
* PrimaryReactionLibrary did. Whatever is in the SeedMechanism
* will be placed in the core at the beginning of the simulation.
* The user can specify as many seed mechanisms as they like, with
* the priority (in the case of duplicates) given to the first
* instance. There is no on/off flag.
*/
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("SeedMechanism:")) {
int numMechs = 0;
line = ChemParser.readMeaningfulLine(reader);
while (!line.equals("END")) {
String[] tempString = line.split("Name: ");
String name = tempString[tempString.length-1].trim();
line = ChemParser.readMeaningfulLine(reader);
tempString = line.split("Location: ");
String location = tempString[tempString.length-1].trim();
line = ChemParser.readMeaningfulLine(reader);
tempString = line.split("GenerateReactions: ");
String generateStr = tempString[tempString.length-1].trim();
boolean generate = true;
if (generateStr.equalsIgnoreCase("yes") ||
generateStr.equalsIgnoreCase("on") ||
generateStr.equalsIgnoreCase("true")){
generate = true;
System.out.println("Will generate cross-reactions between species in seed mechanism " + name);
} else if(generateStr.equalsIgnoreCase("no") ||
generateStr.equalsIgnoreCase("off") ||
generateStr.equalsIgnoreCase("false")) {
generate = false;
System.out.println("Will NOT initially generate cross-reactions between species in seed mechanism "+ name);
System.out.println("This may have unintended consequences");
}
else {
System.err.println("Input file invalid");
System.err.println("Please include a 'GenerateReactions: yes/no' line for seed mechanism "+name);
System.exit(0);
}
String path = System.getProperty("jing.rxn.ReactionLibrary.pathName");
path += "/" + location;
if (numMechs==0) {
setSeedMechanism(new SeedMechanism(name, path, generate));
++numMechs;
}
else {
getSeedMechanism().appendSeedMechanism(name, path, generate);
++numMechs;
}
line = ChemParser.readMeaningfulLine(reader);
}
if (numMechs != 0) System.out.println("Seed Mechanisms in use: " + getSeedMechanism().getName());
else setSeedMechanism(null);
} else throw new InvalidSymbolException("Error reading condition.txt file: "
+ "Could not locate SeedMechanism field");
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("ChemkinUnits")) {
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Verbose:")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken();
String OnOff = st.nextToken().toLowerCase();
if (OnOff.equals("off")) {
ArrheniusKinetics.setVerbose(false);
} else if (OnOff.equals("on")) {
ArrheniusKinetics.setVerbose(true);
}
line = ChemParser.readMeaningfulLine(reader);
}
/*
* MRH 3MAR2010:
* Adding user option regarding chemkin file
*
* New field: If user would like the empty SMILES string
* printed with each species in the thermochemistry portion
* of the generated chem.inp file
*/
if (line.toUpperCase().startsWith("SMILES")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // Should be "SMILES:"
String OnOff = st.nextToken().toLowerCase();
if (OnOff.equals("off")) {
Chemkin.setSMILES(false);
} else if (OnOff.equals("on")) {
Chemkin.setSMILES(true);
/*
* MRH 9MAR2010:
* MRH decided not to generate an InChI for every new species
* during an RMG simulation (especially since it is not used
* for anything). Instead, they will only be generated in the
* post-processing, if the user asked for InChIs.
*/
//Species.useInChI = true;
}
line = ChemParser.readMeaningfulLine(reader);
}
if (line.startsWith("A")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // Should be "A:"
String units = st.nextToken();
if (units.equals("moles") || units.equals("molecules"))
ArrheniusKinetics.setAUnits(units);
else {
System.err.println("Units for A were not recognized: " + units);
System.exit(0);
}
} else throw new InvalidSymbolException("Error reading condition.txt file: "
+ "Could not locate Chemkin units A field.");
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Ea")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // Should be "Ea:"
String units = st.nextToken();
if (units.equals("kcal/mol") || units.equals("cal/mol") ||
units.equals("kJ/mol") || units.equals("J/mol") || units.equals("Kelvins"))
ArrheniusKinetics.setEaUnits(units);
else {
System.err.println("Units for Ea were not recognized: " + units);
System.exit(0);
}
} else throw new InvalidSymbolException("Error reading condition.txt file: "
+ "Could not locate Chemkin units Ea field.");
} else throw new InvalidSymbolException("Error reading condition.txt file: "
+ "Could not locate ChemkinUnits field.");
in.close();
// LinkedList temperatureArray = new LinkedList();
// LinkedList pressureArray = new LinkedList();
// Iterator iterIS = initialStatusList.iterator();
// for (Iterator iter = tempList.iterator(); iter.hasNext(); ) {
// TemperatureModel tm = (TemperatureModel)iter.next();
// for (Iterator iter2 = presList.iterator(); iter2.hasNext(); ){
// PressureModel pm = (PressureModel)iter2.next();
// InitialStatus is = (InitialStatus)iterIS.next();
// temperatureArray.add(tm.getTemperature(is.getTime()));
// pressureArray.add(pm.getPressure(is.getTime()));
// PDepNetwork.setTemperatureArray(temperatureArray);
// PDepNetwork.setPressureArray(pressureArray);
//10/4/07 gmagoon: moved to modelGeneration()
//ReactionGenerator p_reactionGenerator = new TemplateReactionGenerator();//10/4/07 gmagoon: changed to p_reactionGenerator from reactionGenerator
// setReactionGenerator(p_reactionGenerator);//10/4/07 gmagoon: added
/*
* MRH 12-Jun-2009
* A TemplateReactionGenerator now requires a Temperature be passed to it.
* This allows RMG to determine the "best" kinetic parameters to use
* in the mechanism generation. For now, I choose to pass the first
* temperature in the list of temperatures. RMG only outputs one mechanism,
* even for multiple temperature/pressure systems, so we can only have one
* set of kinetics.
*/
Temperature t = new Temperature(300,"K");
for (Iterator iter = tempList.iterator(); iter.hasNext();) {
TemperatureModel tm = (TemperatureModel)iter.next();
t = tm.getTemperature(new ReactionTime(0,"sec"));
setTemp4BestKinetics(t);
break;
}
setReactionGenerator(new TemplateReactionGenerator()); //11/4/07 gmagoon: moved from modelGeneration; mysteriously, moving this later moves "Father" lines up in output at runtime, immediately after condition file (as in original code); previously, these Father lines were just before "Can't read primary reaction library files!"
lrg = new LibraryReactionGenerator();//10/10/07 gmagoon: moved from modelGeneration (sequence lrg increases species id, and the different sequence was causing problems as main species id was 6 instead of 1); //10/31/07 gmagoon: restored this line from 10/10/07 backup: somehow it got lost along the way; 11/5/07 gmagoon: changed to use "lrg =" instead of setLibraryReactionGenerator
//10/24/07 gmagoon: updated to use multiple reactionSystem variables
reactionSystemList = new LinkedList();
// LinkedList temperatureArray = new LinkedList();//10/30/07 gmagoon: added temperatureArray variable for passing to PDepNetwork; 11/6/07 gmagoon: moved before initialization of lrg;
Iterator iter3 = initialStatusList.iterator();
Iterator iter4 = dynamicSimulatorList.iterator();
int i = 0;//10/30/07 gmagoon: added
for (Iterator iter = tempList.iterator(); iter.hasNext(); ) {
TemperatureModel tm = (TemperatureModel)iter.next();
//InitialStatus is = (InitialStatus)iter3.next();//10/31/07 gmagoon: fixing apparent bug by moving these inside inner "for loop"
//DynamicSimulator ds = (DynamicSimulator)iter4.next();
for (Iterator iter2 = presList.iterator(); iter2.hasNext(); ){
PressureModel pm = (PressureModel)iter2.next();
InitialStatus is = (InitialStatus)iter3.next();//10/31/07 gmagoon: moved from outer "for loop""
DynamicSimulator ds = (DynamicSimulator)iter4.next();
// temperatureArray.add(tm.getTemperature(is.getTime()));//10/30/07 gmagoon: added; //10/31/07 added .getTemperature(is.getTime()); 11/6/07 gmagoon: moved before initialization of lrg;
//11/1/07 gmagoon: trying to make a deep copy of terminationTester when it is instance of ConversionTT
// TerminationTester termTestCopy;
// if (finishController.getTerminationTester() instanceof ConversionTT){
// ConversionTT termTest = (ConversionTT)finishController.getTerminationTester();
// LinkedList spcCopy = (LinkedList)(termTest.getSpeciesGoalConversionSetList().clone());
// termTestCopy = new ConversionTT(spcCopy);
// else{
// termTestCopy = finishController.getTerminationTester();
FinishController fc = new FinishController(finishController.getTerminationTester(), finishController.getValidityTester());//10/31/07 gmagoon: changed to create new finishController instance in each case (apparently, the finish controller becomes associated with reactionSystem in setFinishController within ReactionSystem); alteratively, could use clone, but might need to change FinishController to be "cloneable"
// FinishController fc = new FinishController(termTestCopy, finishController.getValidityTester());
reactionSystemList.add(new ReactionSystem(tm, pm, reactionModelEnlarger, fc, ds, getPrimaryReactionLibrary(), getReactionGenerator(), getSpeciesSeed(), is, getReactionModel(),lrg, i, equationOfState));
i++;//10/30/07 gmagoon: added
System.out.println("Created reaction system "+i+"\n");
}
}
// PDepNetwork.setTemperatureArray(temperatureArray);//10/30/07 gmagoon: passing temperatureArray to PDepNetwork; 11/6/07 gmagoon: moved before initialization of lrg;
}
catch (IOException e) {
System.err.println("Error reading reaction system initialization file.");
throw new IOException("Input file error: " + e.getMessage());
}
}
public void setReactionModel(ReactionModel p_ReactionModel) {
reactionModel = p_ReactionModel;
}
public void modelGeneration() {
//long begin_t = System.currentTimeMillis();
try{
ChemGraph.readForbiddenStructure();
setSpeciesSeed(new LinkedHashSet());//10/4/07 gmagoon moved from initializeCoreEdgeReactionModel
// setReactionGenerator(new TemplateReactionGenerator());//10/4/07 gmagoon: moved inside initializeReactionSystem; 11/3-4/07 gmagoon: probably reverted on or before 10/10/07 (although I have not investigated this change in detail); //11/4/07 gmagoon: moved inside initializeReactionSystems
// setLibraryReactionGenerator(new LibraryReactionGenerator());//10/10/07 gmagoon: moved after initializeReactionSystem
// initializeCoreEdgeReactionModel();//10/4/07 gmagoon moved from below to run initializeCoreEdgeReactionModel before initializeReactionSystem; 11/3-4/07 gmagoon: probably reverted on or before 10/10/07
initializeReactionSystems();
}
catch (IOException e) {
System.err.println(e.getMessage());
System.exit(0);
}
catch (InvalidSymbolException e) {
System.err.println(e.getMessage());
System.exit(0);
}
//10/31/07 gmagoon: initialize validList (to false) before initializeCoreEdgeReactionModel is called
validList = new LinkedList();
for (Integer i = 0; i<reactionSystemList.size();i++) {
validList.add(false);
}
initializeCoreEdgeReactionModel();//10/4/07 gmagoon: moved before initializeReactionSystem; 11/3-4/07 gmagoon: probably reverted on or before 10/10/07
//10/24/07 gmagoon: changed to use reactionSystemList
// LinkedList initList = new LinkedList();//10/25/07 gmagoon: moved these variables to apply to entire class
// LinkedList beginList = new LinkedList();
// LinkedList endList = new LinkedList();
// LinkedList lastTList = new LinkedList();
// LinkedList currentTList = new LinkedList();
// LinkedList lastPList = new LinkedList();
// LinkedList currentPList = new LinkedList();
// LinkedList conditionChangedList = new LinkedList();
// LinkedList reactionChangedList = new LinkedList();
//5/6/08 gmagoon: determine whether there are intermediate time/conversion steps, type of termination tester is based on characteristics of 1st reaction system (it is assumed that they are all identical in terms of type of termination tester)
boolean intermediateSteps = true;
ReactionSystem rs0 = (ReactionSystem)reactionSystemList.get(0);
if (rs0.finishController.terminationTester instanceof ReactionTimeTT){
if (timeStep == null){
intermediateSteps = false;
}
}
else if (numConversions==1){ //if we get to this block, we presumably have a conversion terminationTester; this required moving numConversions to be attribute...alternative to using numConversions is to access one of the DynamicSimulators and determine conversion length
intermediateSteps=false;
}
//10/24/07 gmagoon: note: each element of for loop could be done in parallel if desired; some modifications would be needed
for (Iterator iter = reactionSystemList.iterator(); iter.hasNext(); ) {
ReactionSystem rs = (ReactionSystem)iter.next();
if ((reactionModelEnlarger instanceof RateBasedPDepRME)) {//1/2/09 gmagoon and rwest: only call initializePDepNetwork for P-dep cases
rs.initializePDepNetwork();
}
ReactionTime init = rs.getInitialReactionTime();
initList.add(init);
ReactionTime begin = init;
beginList.add(begin);
ReactionTime end;
if (rs.finishController.terminationTester instanceof ReactionTimeTT){
//5/5/08 gmagoon: added below if statement to avoid null pointer exception in cases where there are no intermediate time steps specified
if (!(timeStep==null)){
end = (ReactionTime)timeStep.get(0);
}
else{
end= ((ReactionTimeTT)rs.finishController.terminationTester).finalTime;
}
//end = (ReactionTime)timeStep.get(0);
endList.add(end);
}
else{
end = new ReactionTime(1e6,"sec");
endList.add(end);
}
// int iterationNumber = 1;
lastTList.add(rs.getTemperature(init));
currentTList.add(rs.getTemperature(init));
lastPList.add(rs.getPressure(init));
currentPList.add(rs.getPressure(init));
conditionChangedList.add(false);
reactionChangedList.add(false);//10/31/07 gmagoon: added
//Chemkin.writeChemkinInputFile(reactionSystem.getReactionModel(),reactionSystem.getPresentStatus());
}
int iterationNumber = 1;
LinkedList terminatedList = new LinkedList();//10/24/07 gmagoon: this may not be necessary, as if one reactionSystem is terminated, I think all should be terminated
//validList = new LinkedList();//10/31/07 gmagoon: moved before initializeCoreEdgeReactionModel
//10/24/07 gmagoon: initialize allTerminated and allValid to true; these variables keep track of whether all the reactionSystem variables satisfy termination and validity, respectively
boolean allTerminated = true;
boolean allValid = true;
// IF RESTART IS TURNED ON
// Update the systemSnapshot for each ReactionSystem in the reactionSystemList
if (readrestart) {
for (Integer i=0; i<reactionSystemList.size(); i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
InitialStatus is = rs.getInitialStatus();
putRestartSpeciesInInitialStatus(is,i);
rs.appendUnreactedSpeciesStatus((InitialStatus)initialStatusList.get(i), rs.getPresentTemperature());
}
}
//10/24/07 gmagoon: note: each element of for loop could be done in parallel if desired; some modifications would be needed
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
ReactionTime begin = (ReactionTime)beginList.get(i);
ReactionTime end = (ReactionTime)endList.get(i);
endList.set(i,rs.solveReactionSystem(begin, end, true, true, true, iterationNumber-1));
Chemkin.writeChemkinInputFile(rs);
boolean terminated = rs.isReactionTerminated();
terminatedList.add(terminated);
if(!terminated)
allTerminated = false;
boolean valid = rs.isModelValid();
//validList.add(valid);
validList.set(i, valid);//10/31/07 gmagoon: validList initialization moved before initializeCoreEdgeReactionModel
if(!valid)
allValid = false;
reactionChangedList.set(i,false);
}
//9/1/09 gmagoon: if we are using QM, output a file with the CHEMKIN name, the RMG name, the (modified) InChI, and the (modified) InChIKey
if (ChemGraph.useQM){
writeInChIs(getReactionModel());
}
writeDictionary(getReactionModel());
//System.exit(0);
System.out.println("The model core has " + ((CoreEdgeReactionModel)getReactionModel()).getReactedReactionSet().size() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
System.out.println("The model edge has " + ((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSet().size() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getUnreactedSpeciesSet().size() + " species.");
StringBuilder print_info = Global.diagnosticInfo;
print_info.append("\nMolecule \t Flux\t\tTime\t \t\t \t Core \t \t Edge \t \t memory\n");
print_info.append(" \t moleular \t characteristic \t findspecies \t moveUnreactedToReacted \t enlarger \t restart1 \t totalEnlarger \t resetSystem \t readSolverFile\t writeSolverFile \t justSolver \t SolverIterations \t solverSpeciesStatus \t Totalsolver \t gc \t restart+diagnosis \t chemkin thermo \t chemkin reactions \t validitytester \t Species \t Reactions\t Species\t Reactions \t memory used \t allSpecies \t TotalTime \t findRateConstant\t identifyReactedSites \t reactChemGraph \t makespecies\t CheckReverseReaction \t makeTemplateReaction \t getReactionfromStruc \t genReverseFromReac");
print_info.append("\t\t\t\t\t\t\t" + ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size()+ "\t" + ((CoreEdgeReactionModel)getReactionModel()).getReactedReactionSet().size() + "\t" + ((CoreEdgeReactionModel)getReactionModel()).getUnreactedSpeciesSet().size() + "\t" + ((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSetIncludingReverseSize() + "\t"+Global.makeSpecies+"\n");
double solverMin = 0;
double vTester = 0;
/*if (!restart){
writeRestartFile();
writeCoreReactions();
writeAllReactions();
}*/
//System.exit(0);
SpeciesDictionary dictionary = SpeciesDictionary.getInstance();
System.out.println("Species dictionary size: "+dictionary.size());
double tAtInitialization = Global.tAtInitialization;
//10/24/07: changed to use allTerminated and allValid
// step 2: iteratively grow reaction system
while (!allTerminated || !allValid) {
while (!allValid) {
//writeCoreSpecies();
double pt = System.currentTimeMillis();
//prune the reaction model (this will only do something in the AUTO case)
pruneReactionModel();
garbageCollect();
// ENLARGE THE MODEL!!! (this is where the good stuff happens)
enlargeReactionModel();
double totalEnlarger = (System.currentTimeMillis() - pt)/1000/60;
//PDepNetwork.completeNetwork(reactionSystem.reactionModel.getSpeciesSet());
//10/24/07 gmagoon: changed to use reactionSystemList
if ((reactionModelEnlarger instanceof RateBasedPDepRME)) {//1/2/09 gmagoon and rwest: only call initializePDepNetwork for P-dep cases
for (Iterator iter = reactionSystemList.iterator(); iter.hasNext(); ) {
ReactionSystem rs = (ReactionSystem)iter.next();
rs.initializePDepNetwork();
}
//reactionSystem.initializePDepNetwork();
}
pt = System.currentTimeMillis();
//10/24/07 gmagoon: changed to use reactionSystemList
for (Iterator iter = reactionSystemList.iterator(); iter.hasNext(); ) {
ReactionSystem rs = (ReactionSystem)iter.next();
rs.resetSystemSnapshot();
}
//reactionSystem.resetSystemSnapshot();
double resetSystem = (System.currentTimeMillis() - pt)/1000/60;
//10/24/07 gmagoon: changed to use reactionSystemList
for (Integer i = 0; i<reactionSystemList.size();i++) {
//reactionChanged = true;
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
reactionChangedList.set(i,true);
// begin = init;
beginList.set(i, (ReactionTime)initList.get(i));
if (rs.finishController.terminationTester instanceof ReactionTimeTT){
//5/5/08 gmagoon: added below if statement to avoid null pointer exception in cases where there are no intermediate time steps specified
if (!(timeStep==null)){
endList.set(i,(ReactionTime)timeStep.get(0));
}
else{
endList.set(i, ((ReactionTimeTT)rs.finishController.terminationTester).finalTime);
}
// endList.set(i, (ReactionTime)timeStep.get(0));
//end = (ReactionTime)timeStep.get(0);
}
else
endList.set(i, new ReactionTime(1e6,"sec"));
//end = new ReactionTime(1e6,"sec");
// iterationNumber = 1;//10/24/07 gmagoon: moved outside of loop
currentTList.set(i,rs.getTemperature((ReactionTime)beginList.get(i)));
currentPList.set(i,rs.getPressure((ReactionTime)beginList.get(i)));
conditionChangedList.set(i,!(((Temperature)currentTList.get(i)).equals((Temperature)lastTList.get(i))) || !(((Pressure)currentPList.get(i)).equals((Pressure)lastPList.get(i))));
//currentT = reactionSystem.getTemperature(begin);
//currentP = reactionSystem.getPressure(begin);
//conditionChanged = (!currentT.equals(lastT) || !currentP.equals(lastP));
}
iterationNumber = 1;
double startTime = System.currentTimeMillis();
//10/24/07 gmagoon: changed to use reactionSystemList
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
boolean reactionChanged = (Boolean)reactionChangedList.get(i);
boolean conditionChanged = (Boolean)conditionChangedList.get(i);
ReactionTime begin = (ReactionTime)beginList.get(i);
ReactionTime end = (ReactionTime)endList.get(i);
endList.set(i,rs.solveReactionSystem(begin, end, false, reactionChanged, conditionChanged, iterationNumber-1));
//end = reactionSystem.solveReactionSystem(begin, end, false, reactionChanged, conditionChanged, iterationNumber-1);
}
solverMin = solverMin + (System.currentTimeMillis()-startTime)/1000/60;
startTime = System.currentTimeMillis();
//10/24/07 gmagoon: changed to use reactionSystemList
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
Chemkin.writeChemkinInputFile(rs);
//Chemkin.writeChemkinInputFile(reactionSystem);
}
//9/1/09 gmagoon: if we are using QM, output a file with the CHEMKIN name, the RMG name, the (modified) InChI, and the (modified) InChIKey
if (ChemGraph.useQM){
writeInChIs(getReactionModel());
}
writeDictionary(getReactionModel());
double chemkint = (System.currentTimeMillis()-startTime)/1000/60;
if (writerestart) {
/*
* Rename current restart files:
* In the event RMG fails while writing the restart files,
* user won't lose any information
*/
String[] restartFiles = {"Restart/coreReactions.txt", "Restart/coreSpecies.txt",
"Restart/edgeReactions.txt", "Restart/edgeSpecies.txt", "Restart/lindemannReactions.txt",
"Restart/pdepnetworks.txt", "Restart/thirdBodyReactions.txt", "Restart/troeReactions.txt"};
writeBackupRestartFiles(restartFiles);
writeCoreSpecies();
writeCoreReactions();
writeEdgeSpecies();
writeEdgeReactions();
if (PDepNetwork.generateNetworks == true) writePDepNetworks();
/*
* Remove backup restart files from Restart folder
*/
removeBackupRestartFiles(restartFiles);
}
//10/24/07 gmagoon: changed to use reactionSystemList
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
System.out.println("For reaction system: "+(i+1)+" out of "+reactionSystemList.size());
System.out.println("At this time: " + ((ReactionTime)endList.get(i)).toString());
Species spe = SpeciesDictionary.getSpeciesFromID(getLimitingReactantID());
double conv = rs.getPresentConversion(spe);
System.out.print("Conversion of " + spe.getName() + " is:");
System.out.println(conv);
}
System.out.println("Running Time is: " + String.valueOf((System.currentTimeMillis()-tAtInitialization)/1000/60) + " minutes.");
System.out.println("The model edge has " + ((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSet().size() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getUnreactedSpeciesSet().size() + " species.");
//10/24/07 gmagoon: note: all reaction systems should use the same core, but I will display for each reactionSystem for testing purposes:
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
System.out.println("For reaction system: "+(i+1)+" out of "+reactionSystemList.size());
if (rs.getDynamicSimulator() instanceof JDASPK){
JDASPK solver = (JDASPK)rs.getDynamicSimulator();
System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
}
else{
JDASSL solver = (JDASSL)rs.getDynamicSimulator();
System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
}
}
// if (reactionSystem.getDynamicSimulator() instanceof JDASPK){
// JDASPK solver = (JDASPK)reactionSystem.getDynamicSimulator();
// System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
//else{
// JDASSL solver = (JDASSL)reactionSystem.getDynamicSimulator();
// System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
startTime = System.currentTimeMillis();
double mU = memoryUsed();
double gc = (System.currentTimeMillis()-startTime)/1000/60;
startTime = System.currentTimeMillis();
//10/24/07 gmagoon: updating to use reactionSystemList
allValid = true;
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
boolean valid = rs.isModelValid();
if(!valid)
allValid = false;
validList.set(i,valid);
//valid = reactionSystem.isModelValid();
}
vTester = vTester + (System.currentTimeMillis()-startTime)/1000/60;
startTime = System.currentTimeMillis();
writeDiagnosticInfo();
writeEnlargerInfo();
double restart2 = (System.currentTimeMillis()-startTime)/1000/60;
int allSpecies, allReactions;
allSpecies = SpeciesDictionary.getInstance().size();
print_info.append(totalEnlarger + "\t" + resetSystem + "\t" + Global.readSolverFile + "\t" + Global.writeSolverFile + "\t" + Global.solvertime + "\t" + Global.solverIterations + "\t" + Global.speciesStatusGenerator + "\t" + solverMin + "\t" + gc + "\t" + restart2 + "\t" + Global.chemkinThermo + '\t' + Global.chemkinReaction + "\t" + vTester + "\t" + ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size()+ "\t" + ((CoreEdgeReactionModel)getReactionModel()).getReactedReactionSet().size() + "\t" + ((CoreEdgeReactionModel)getReactionModel()).getUnreactedSpeciesSet().size() + "\t" + ((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSetIncludingReverseSize() + "\t" + mU + "\t" + allSpecies + "\t" + (System.currentTimeMillis()-Global.tAtInitialization)/1000/60 + "\t"+ String.valueOf(Global.RT_findRateConstant)+"\t"+Global.RT_identifyReactedSites+"\t"+Global.RT_reactChemGraph+"\t"+Global.makeSpecies+"\t"+Global.checkReactionReverse+"\t"+Global.makeTR+ "\t" + Global.getReacFromStruc + "\t" + Global.generateReverse+"\n");
}
//5/6/08 gmagoon: in order to handle cases where no intermediate time/conversion steps are used, only evaluate the next block of code when there are intermediate time/conversion steps
double startTime = System.currentTimeMillis();
if(intermediateSteps){
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
reactionChangedList.set(i, false);
//reactionChanged = false;
Temperature currentT = (Temperature)currentTList.get(i);
Pressure currentP = (Pressure)currentPList.get(i);
lastTList.set(i,(Temperature)currentT.clone()) ;
lastPList.set(i,(Pressure)currentP.clone());
//lastT = (Temperature)currentT.clone();
//lastP = (Pressure)currentP.clone();
currentTList.set(i,rs.getTemperature((ReactionTime)beginList.get(i)));
currentPList.set(i,rs.getPressure((ReactionTime)beginList.get(i)));
conditionChangedList.set(i,!(((Temperature)currentTList.get(i)).equals((Temperature)lastTList.get(i))) || !(((Pressure)currentPList.get(i)).equals((Pressure)lastPList.get(i))));
//currentP = reactionSystem.getPressure(begin);
//conditionChanged = (!currentT.equals(lastT) || !currentP.equals(lastP));
beginList.set(i,((SystemSnapshot)(rs.getSystemSnapshotEnd().next())).time);
// begin=((SystemSnapshot)(reactionSystem.getSystemSnapshotEnd().next())).time;
if (rs.finishController.terminationTester instanceof ReactionTimeTT){
if (iterationNumber < timeStep.size()){
endList.set(i,(ReactionTime)timeStep.get(iterationNumber));
//end = (ReactionTime)timeStep.get(iterationNumber);
}
else
endList.set(i, ((ReactionTimeTT)rs.finishController.terminationTester).finalTime);
//end = ((ReactionTimeTT)reactionSystem.finishController.terminationTester).finalTime;
}
else
endList.set(i,new ReactionTime(1e6,"sec"));
//end = new ReactionTime(1e6,"sec");
}
iterationNumber++;
startTime = System.currentTimeMillis();//5/6/08 gmagoon: moved declaration outside of if statement so it can be accessed in subsequent vTester line; previous steps are probably so fast that I could eliminate this line without much effect on normal operation with intermediate steps
//double startTime = System.currentTimeMillis();
//10/24/07 gmagoon: changed to use reactionSystemList
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
boolean reactionChanged = (Boolean)reactionChangedList.get(i);
boolean conditionChanged = (Boolean)conditionChangedList.get(i);
ReactionTime begin = (ReactionTime)beginList.get(i);
ReactionTime end = (ReactionTime)endList.get(i);
endList.set(i,rs.solveReactionSystem(begin, end, false, reactionChanged, false, iterationNumber-1));
// end = reactionSystem.solveReactionSystem(begin, end, false, reactionChanged, false, iterationNumber-1);
}
solverMin = solverMin + (System.currentTimeMillis()-startTime)/1000/60;
startTime = System.currentTimeMillis();
//5/6/08 gmagoon: changed to separate validity and termination testing, and termination testing is done last...termination testing should be done even if there are no intermediate conversions; however, validity is guaranteed if there are no intermediate conversions based on previous conditional if statement
allValid = true;
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
boolean valid = rs.isModelValid();
validList.set(i,valid);
if(!valid)
allValid = false;
}
}//5/6/08 gmagoon: end of block for intermediateSteps
allTerminated = true;
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
boolean terminated = rs.isReactionTerminated();
terminatedList.set(i,terminated);
if(!terminated){
allTerminated = false;
System.out.println("Reaction System "+(i+1)+" has not reached its termination criterion");
if (rs.isModelValid()&& runKillableToPreventInfiniteLoop(intermediateSteps, iterationNumber)) {
System.out.println("although it seems to be valid (complete), so it was not interrupted for being invalid.");
System.out.println("This probably means there was an error with the ODE solver, and we risk entering an endless loop.");
System.out.println("Stopping.");
throw new Error();
}
}
}
// //10/24/07 gmagoon: changed to use reactionSystemList
// allTerminated = true;
// allValid = true;
// for (Integer i = 0; i<reactionSystemList.size();i++) {
// ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
// boolean terminated = rs.isReactionTerminated();
// terminatedList.set(i,terminated);
// if(!terminated)
// allTerminated = false;
// boolean valid = rs.isModelValid();
// validList.set(i,valid);
// if(!valid)
// allValid = false;
// //terminated = reactionSystem.isReactionTerminated();
// //valid = reactionSystem.isModelValid();
//10/24/07 gmagoon: changed to use reactionSystemList, allValid
if (allValid) {
//10/24/07 gmagoon: changed to use reactionSystemList
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
System.out.println("For reaction system: "+(i+1)+" out of "+reactionSystemList.size());
System.out.println("At this reaction time: " + ((ReactionTime)endList.get(i)).toString());
Species spe = SpeciesDictionary.getSpeciesFromID(getLimitingReactantID());
double conv = rs.getPresentConversion(spe);
System.out.print("Conversion of " + spe.getName() + " is:");
System.out.println(conv);
}
//System.out.println("At this time: " + end.toString());
//Species spe = SpeciesDictionary.getSpeciesFromID(1);
//double conv = reactionSystem.getPresentConversion(spe);
//System.out.print("current conversion = ");
//System.out.println(conv);
Runtime runTime = Runtime.getRuntime();
System.out.print("Memory used: ");
System.out.println(runTime.totalMemory());
System.out.print("Free memory: ");
System.out.println(runTime.freeMemory());
//runTime.gc();
/* if we're not calling runTime.gc() then don't bother printing this:
System.out.println("After garbage collection:");
System.out.print("Memory used: ");
System.out.println(runTime.totalMemory());
System.out.print("Free memory: ");
System.out.println(runTime.freeMemory());
*/
//10/24/07 gmagoon: note: all reaction systems should use the same core, but I will display for each reactionSystem for testing purposes:
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
System.out.println("For reaction system: "+(i+1)+" out of "+reactionSystemList.size());
if (rs.getDynamicSimulator() instanceof JDASPK){
JDASPK solver = (JDASPK)rs.getDynamicSimulator();
System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
}
else{
JDASSL solver = (JDASSL)rs.getDynamicSimulator();
System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
System.out.println("(although rs.getReactionModel().getReactionNumber() returns "+rs.getReactionModel().getReactionNumber()+")");
}
}
// if (reactionSystem.getDynamicSimulator() instanceof JDASPK){
// JDASPK solver = (JDASPK)reactionSystem.getDynamicSimulator();
// System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
// else{
// JDASSL solver = (JDASSL)reactionSystem.getDynamicSimulator();
// System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
}
vTester = vTester + (System.currentTimeMillis()-startTime)/1000/60;//5/6/08 gmagoon: for case where intermediateSteps = false, this will use startTime declared just before intermediateSteps loop, and will only include termination testing, but no validity testing
}
//System.out.println("Performing model reduction");
if (paraInfor != 0){
System.out.println("Model Generation performed. Now generating sensitivity data.");
//10/24/07 gmagoon: updated to use reactionSystemList
LinkedList dynamicSimulator2List = new LinkedList();
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
//6/25/08 gmagoon: updated to pass index i
//6/25/08 gmagoon: updated to pass (dummy) finishController and autoflag (set to false here);
dynamicSimulator2List.add(new JDASPK(rtol, atol, paraInfor, (InitialStatus)initialStatusList.get(i),i));
//DynamicSimulator dynamicSimulator2 = new JDASPK(rtol, atol, paraInfor, initialStatus);
((DynamicSimulator)dynamicSimulator2List.get(i)).addConversion(((JDASPK)rs.dynamicSimulator).conversionSet, ((JDASPK)rs.dynamicSimulator).conversionSet.length);
//dynamicSimulator2.addConversion(((JDASPK)reactionSystem.dynamicSimulator).conversionSet, ((JDASPK)reactionSystem.dynamicSimulator).conversionSet.length);
rs.setDynamicSimulator((DynamicSimulator)dynamicSimulator2List.get(i));
//reactionSystem.setDynamicSimulator(dynamicSimulator2);
int numSteps = rs.systemSnapshot.size() -1;
rs.resetSystemSnapshot();
beginList.set(i, (ReactionTime)initList.get(i));
//begin = init;
if (rs.finishController.terminationTester instanceof ReactionTimeTT){
endList.set(i,((ReactionTimeTT)rs.finishController.terminationTester).finalTime);
//end = ((ReactionTimeTT)reactionSystem.finishController.terminationTester).finalTime;
}
else{
ReactionTime end = (ReactionTime)endList.get(i);
endList.set(i, end.add(end));
//end = end.add(end);
}
terminatedList.set(i, false);
//terminated = false;
ReactionTime begin = (ReactionTime)beginList.get(i);
ReactionTime end = (ReactionTime)endList.get(i);
rs.solveReactionSystemwithSEN(begin, end, true, false, false);
//reactionSystem.solveReactionSystemwithSEN(begin, end, true, false, false);
}
}
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
Chemkin.writeChemkinInputFile(getReactionModel(),rs.getPresentStatus());
}
//9/1/09 gmagoon: if we are using QM, output a file with the CHEMKIN name, the RMG name, the (modified) InChI, and the (modified) InChIKey
if (ChemGraph.useQM){
writeInChIs(getReactionModel());
}
writeDictionary(getReactionModel());
System.out.println("Model Generation Completed");
return;
}
//9/1/09 gmagoon: this function writes a "dictionary" with Chemkin name, RMG name, (modified) InChI, and InChIKey
//this is based off of writeChemkinFile in ChemkinInputFile.java
private void writeInChIs(ReactionModel p_reactionModel) {
StringBuilder result=new StringBuilder();
for (Iterator iter = ((CoreEdgeReactionModel)p_reactionModel).core.getSpecies(); iter.hasNext(); ) {
Species species = (Species) iter.next();
result.append(species.getChemkinName() + "\t"+species.getName() + "\t" + species.getChemGraph().getModifiedInChIAnew() + "\t" + species.getChemGraph().getModifiedInChIKeyAnew()+ "\n");
}
String file = "inchiDictionary.txt";
try {
FileWriter fw = new FileWriter(file);
fw.write(result.toString());
fw.close();
}
catch (Exception e) {
System.out.println("Error in writing InChI file inchiDictionary.txt!");
System.out.println(e.getMessage());
System.exit(0);
}
}
//9/14/09 gmagoon: function to write dictionary, based on code copied from RMG.java
private void writeDictionary(ReactionModel rm){
CoreEdgeReactionModel cerm = (CoreEdgeReactionModel)rm;
//Write core species to RMG_Dictionary.txt
String coreSpecies ="";
Iterator iter = cerm.getSpecies();
if (Species.useInChI) {
while (iter.hasNext()){
int i=1;
Species spe = (Species) iter.next();
coreSpecies = coreSpecies + spe.getChemkinName() + " " + spe.getInChI() + "\n"+spe.getChemGraph().toString(i)+"\n\n";
}
} else {
while (iter.hasNext()){
int i=1;
Species spe = (Species) iter.next();
coreSpecies = coreSpecies + spe.getChemkinName() + "\n"+spe.getChemGraph().toString(i)+"\n\n";
}
}
try{
File rmgDictionary = new File("RMG_Dictionary.txt");
FileWriter fw = new FileWriter(rmgDictionary);
fw.write(coreSpecies);
fw.close();
}
catch (IOException e) {
System.out.println("Could not write RMG_Dictionary.txt");
System.exit(0);
}
// If we have solvation on, then every time we write the dictionary, also write the solvation properties
if (Species.useSolvation) {
writeSolvationProperties(rm);
}
}
private void writeSolvationProperties(ReactionModel rm){
//Write core species to RMG_Solvation_Properties.txt
CoreEdgeReactionModel cerm = (CoreEdgeReactionModel)rm;
StringBuilder result = new StringBuilder();
result.append("ChemkinName\tChemicalFormula\tMolecularWeight\tRadius\tDiffusivity\tAbrahamS\tAbrahamB\tAbrahamE\tAbrahamL\tAbrahamA\tChemkinName\n\n");
Iterator iter = cerm.getSpecies();
while (iter.hasNext()){
Species spe = (Species)iter.next();
result.append(spe.getChemkinName() + "\t");
result.append(spe.getChemGraph().getChemicalFormula()+ "\t");
result.append(spe.getMolecularWeight() + "\t");
result.append(spe.getChemGraph().getRadius()+ "\t");
result.append(spe.getChemGraph().getDiffusivity()+ "\t");
result.append(spe.getChemGraph().getAbramData().toString()+ "\t");
result.append(spe.getChemkinName() + "\n");
}
try{
File rmgSolvationProperties = new File("RMG_Solvation_Properties.txt");
FileWriter fw = new FileWriter(rmgSolvationProperties);
fw.write(result.toString() );
fw.close();
}
catch (IOException e) {
System.out.println("Could not write RMG_Solvation_Properties.txt");
System.exit(0);
}
}
/*
* MRH 23MAR2010:
* Commenting out deprecated parseRestartFiles method
*/
// private void parseRestartFiles() {
// parseAllSpecies();
// parseCoreSpecies();
// parseEdgeSpecies();
// parseAllReactions();
// parseCoreReactions();
/*
* MRH 23MAR2010:
* Commenting out deprecated parseEdgeReactions method
*/
// private void parseEdgeReactions() {
// SpeciesDictionary dictionary = SpeciesDictionary.getInstance();
// //HasMap speciesMap = dictionary.dictionary;
// try{
// File coreReactions = new File("Restart/edgeReactions.txt");
// FileReader fr = new FileReader(coreReactions);
// BufferedReader reader = new BufferedReader(fr);
// String line = ChemParser.readMeaningfulLine(reader);
// boolean found = false;
// LinkedHashSet reactionSet = new LinkedHashSet();
// while (line != null){
// Reaction reaction = ChemParser.parseEdgeArrheniusReaction(dictionary,line,1,1);
// boolean added = reactionSet.add(reaction);
// if (!added){
// if (reaction.hasResonanceIsomerAsReactant()){
// //Structure reactionStructure = reaction.getStructure();
// found = getResonanceStructure(reaction,"reactants", reactionSet);
// if (reaction.hasResonanceIsomerAsProduct() && !found){
// //Structure reactionStructure = reaction.getStructure();
// found = getResonanceStructure(reaction,"products", reactionSet);
// if (!found){
// System.out.println("Cannot add reaction "+line+" to the Reaction Edge. All resonance isomers have already been added");
// System.exit(0);
// else found = false;
// //Reaction reverse = reaction.getReverseReaction();
// //if (reverse != null) reactionSet.add(reverse);
// line = ChemParser.readMeaningfulLine(reader);
// ((CoreEdgeReactionModel)getReactionModel()).addReactionSet(reactionSet);
// catch (IOException e){
// System.out.println("Could not read the corespecies restart file");
// System.exit(0);
/*
* MRH 23MAR2010:
* Commenting out deprecated parseAllSpecies method
*/
// public void parseCoreReactions() {
// SpeciesDictionary dictionary = SpeciesDictionary.getInstance();
// int i=1;
// //HasMap speciesMap = dictionary.dictionary;
// try{
// File coreReactions = new File("Restart/coreReactions.txt");
// FileReader fr = new FileReader(coreReactions);
// BufferedReader reader = new BufferedReader(fr);
// String line = ChemParser.readMeaningfulLine(reader);
// boolean found = false;
// LinkedHashSet reactionSet = new LinkedHashSet();
// while (line != null){
// Reaction reaction = ChemParser.parseCoreArrheniusReaction(dictionary,line,1,1);//,((CoreEdgeReactionModel)reactionSystem.reactionModel));
// boolean added = reactionSet.add(reaction);
// if (!added){
// if (reaction.hasResonanceIsomerAsReactant()){
// //Structure reactionStructure = reaction.getStructure();
// found = getResonanceStructure(reaction,"reactants", reactionSet);
// if (reaction.hasResonanceIsomerAsProduct() && !found){
// //Structure reactionStructure = reaction.getStructure();
// found = getResonanceStructure(reaction,"products", reactionSet);
// if (!found){
// System.out.println("Cannot add reaction "+line+" to the Reaction Core. All resonance isomers have already been added");
// //System.exit(0);
// else found = false;
// Reaction reverse = reaction.getReverseReaction();
// if (reverse != null) {
// reactionSet.add(reverse);
// //System.out.println(2 + "\t " + line);
// //else System.out.println(1 + "\t" + line);
// line = ChemParser.readMeaningfulLine(reader);
// i=i+1;
// ((CoreEdgeReactionModel)getReactionModel()).addReactedReactionSet(reactionSet);
// catch (IOException e){
// System.out.println("Could not read the coreReactions restart file");
// System.exit(0);
/*
* MRH 23MAR2010:
* Commenting out deprecated parseAllSpecies method
*/
// private void parseAllReactions() {
// SpeciesDictionary dictionary = SpeciesDictionary.getInstance();
// int i=1;
// //HasMap speciesMap = dictionary.dictionary;
// try{
// File allReactions = new File("Restart/allReactions.txt");
// FileReader fr = new FileReader(allReactions);
// BufferedReader reader = new BufferedReader(fr);
// String line = ChemParser.readMeaningfulLine(reader);
// boolean found = false;
// LinkedHashSet reactionSet = new LinkedHashSet();
// OuterLoop:
// while (line != null){
// Reaction reaction = ChemParser.parseArrheniusReaction(dictionary,line,1,1,((CoreEdgeReactionModel)getReactionModel()));
// if (((CoreEdgeReactionModel)getReactionModel()).categorizeReaction(reaction)==-1){
// boolean added = reactionSet.add(reaction);
// if (!added){
// found = false;
// if (reaction.hasResonanceIsomerAsReactant()){
// //Structure reactionStructure = reaction.getStructure();
// found = getResonanceStructure(reaction,"reactants", reactionSet);
// if (reaction.hasResonanceIsomerAsProduct() && !found){
// //Structure reactionStructure = reaction.getStructure();
// found = getResonanceStructure(reaction,"products", reactionSet);
// if (!found){
// Iterator iter = reactionSet.iterator();
// while (iter.hasNext()){
// Reaction reacTemp = (Reaction)iter.next();
// if (reacTemp.equals(reaction)){
// reactionSet.remove(reacTemp);
// reactionSet.add(reaction);
// break;
// //System.out.println("Cannot add reaction "+line+" to the Reaction Core. All resonance isomers have already been added");
// //System.exit(0);
// //else found = false;
// /*Reaction reverse = reaction.getReverseReaction();
// if (reverse != null && ((CoreEdgeReactionModel)reactionSystem.reactionModel).isReactedReaction(reaction)) {
// reactionSet.add(reverse);
// //System.out.println(2 + "\t " + line);
// }*/
// //else System.out.println(1 + "\t" + line);
// i=i+1;
// line = ChemParser.readMeaningfulLine(reader);
// ((CoreEdgeReactionModel)getReactionModel()).addReactionSet(reactionSet);
// catch (IOException e){
// System.out.println("Could not read the corespecies restart file");
// System.exit(0);
private boolean getResonanceStructure(Reaction p_Reaction, String rOrP, LinkedHashSet reactionSet) {
Structure reactionStructure = p_Reaction.getStructure();
//Structure tempreactionStructure = new Structure(reactionStructure.getReactantList(),reactionStructure.getProductList());
boolean found = false;
if (rOrP.equals("reactants")){
Iterator originalreactants = reactionStructure.getReactants();
HashSet tempHashSet = new HashSet();
while(originalreactants.hasNext()){
tempHashSet.add(originalreactants.next());
}
Iterator reactants = tempHashSet.iterator();
while(reactants.hasNext() && !found){
ChemGraph reactant = (ChemGraph)reactants.next();
if (reactant.getSpecies().hasResonanceIsomers()){
Iterator chemGraphIterator = reactant.getSpecies().getResonanceIsomers();
ChemGraph newChemGraph ;//= (ChemGraph)chemGraphIterator.next();
while(chemGraphIterator.hasNext() && !found){
newChemGraph = (ChemGraph)chemGraphIterator.next();
reactionStructure.removeReactants(reactant);
reactionStructure.addReactants(newChemGraph);
reactant = newChemGraph;
if (reactionSet.add(p_Reaction)){
found = true;
}
}
}
}
}
else{
Iterator originalproducts = reactionStructure.getProducts();
HashSet tempHashSet = new HashSet();
while(originalproducts.hasNext()){
tempHashSet.add(originalproducts.next());
}
Iterator products = tempHashSet.iterator();
while(products.hasNext() && !found){
ChemGraph product = (ChemGraph)products.next();
if (product.getSpecies().hasResonanceIsomers()){
Iterator chemGraphIterator = product.getSpecies().getResonanceIsomers();
ChemGraph newChemGraph ;//= (ChemGraph)chemGraphIterator.next();
while(chemGraphIterator.hasNext() && !found){
newChemGraph = (ChemGraph)chemGraphIterator.next();
reactionStructure.removeProducts(product);
reactionStructure.addProducts(newChemGraph);
product = newChemGraph;
if (reactionSet.add(p_Reaction)){
found = true;
}
}
}
}
}
return found;
}
public void parseCoreSpecies() {
// String restartFileContent ="";
//int speciesCount = 0;
//boolean added;
SpeciesDictionary dictionary = SpeciesDictionary.getInstance();
try{
File coreSpecies = new File ("Restart/coreSpecies.txt");
FileReader fr = new FileReader(coreSpecies);
BufferedReader reader = new BufferedReader(fr);
String line = ChemParser.readMeaningfulLine(reader);
//HashSet speciesSet = new HashSet();
// if (reactionSystem == null){//10/24/07 gmagoon: commenting out since contents of if was already commented out anyway
// //ReactionSystem reactionSystem = new ReactionSystem();
setReactionModel(new CoreEdgeReactionModel());//10/4/07 gmagoon:changed to setReactionModel
while (line!=null) {
StringTokenizer st = new StringTokenizer(line);
String index = st.nextToken();
int ID = Integer.parseInt(index);
Species spe = dictionary.getSpeciesFromID(ID);
if (spe == null)
System.out.println("There was no species with ID "+ID +" in the species dictionary");
((CoreEdgeReactionModel)getReactionModel()).addReactedSpecies(spe);
line = ChemParser.readMeaningfulLine(reader);
}
}
catch (IOException e){
System.out.println("Could not read the corespecies restart file");
System.exit(0);
}
}
public static void garbageCollect(){
System.gc();
}
public static long memoryUsed(){
garbageCollect();
Runtime rT = Runtime.getRuntime();
long uM, tM, fM;
tM = rT.totalMemory();
fM = rT.freeMemory();
uM = tM - fM;
System.out.println("After garbage collection:");
System.out.print("Memory used: ");
System.out.println(tM);
System.out.print("Free memory: ");
System.out.println(fM);
return uM;
}
private HashSet readIncludeSpecies(String fileName) {
HashSet speciesSet = new HashSet();
try {
File includeSpecies = new File (fileName);
FileReader fr = new FileReader(includeSpecies);
BufferedReader reader = new BufferedReader(fr);
String line = ChemParser.readMeaningfulLine(reader);
while (line!=null) {
StringTokenizer st = new StringTokenizer(line);
String index = st.nextToken();
String name = null;
if (!index.startsWith("(")) name = index;
else name = st.nextToken().trim();
Graph g = ChemParser.readChemGraph(reader);
ChemGraph cg = null;
try {
cg = ChemGraph.make(g);
}
catch (ForbiddenStructureException e) {
System.out.println("Forbidden Structure:\n" + e.getMessage());
System.out.println("Included species file "+fileName+" contains a forbidden structure.");
System.exit(0);
}
Species species = Species.make(name,cg);
//speciesSet.put(name, species);
speciesSet.add(species);
line = ChemParser.readMeaningfulLine(reader);
System.out.println(line);
}
}
catch (IOException e){
System.out.println("Could not read the included species file" + fileName);
System.exit(0);
}
return speciesSet;
}
/*
* MRH 23MAR2010:
* Commenting out deprecated parseAllSpecies method
*/
// public LinkedHashSet parseAllSpecies() {
// // String restartFileContent ="";
// int speciesCount = 0;
// LinkedHashSet speciesSet = new LinkedHashSet();
// boolean added;
// try{
// long initialTime = System.currentTimeMillis();
// File coreSpecies = new File ("allSpecies.txt");
// BufferedReader reader = new BufferedReader(new FileReader(coreSpecies));
// String line = ChemParser.readMeaningfulLine(reader);
// int i=0;
// while (line!=null) {
// StringTokenizer st = new StringTokenizer(line);
// String index = st.nextToken();
// String name = null;
// if (!index.startsWith("(")) name = index;
// else name = st.nextToken().trim();
// int ID = getID(name);
// name = getName(name);
// Graph g = ChemParser.readChemGraph(reader);
// ChemGraph cg = null;
// try {
// cg = ChemGraph.make(g);
// catch (ForbiddenStructureException e) {
// System.out.println("Forbidden Structure:\n" + e.getMessage());
// System.exit(0);
// Species species;
// if (ID == 0)
// species = Species.make(name,cg);
// else
// species = Species.make(name,cg,ID);
// speciesSet.add(species);
// double flux = 0;
// int species_type = 1;
// line = ChemParser.readMeaningfulLine(reader);
// System.out.println(line);
// catch (IOException e){
// System.out.println("Could not read the allSpecies restart file");
// System.exit(0);
// return speciesSet;
private String getName(String name) {
//int id;
String number = "";
int index=0;
if (!name.endsWith(")")) return name;
else {
char [] nameChars = name.toCharArray();
String temp = String.copyValueOf(nameChars);
int i=name.length()-2;
//char test = "(";
while (i>0){
if (name.charAt(i)== '(') {
index=i;
i=0;
}
else i = i-1;
}
}
number = name.substring(0,index);
return number;
}
private int getID(String name) {
int id;
String number = "";
if (!name.endsWith(")")) return 0;
else {
char [] nameChars = name.toCharArray();
int i=name.length()-2;
//char test = "(";
while (i>0){
if (name.charAt(i)== '(') i=0;
else{
number = name.charAt(i)+number;
i = i-1;
}
}
}
id = Integer.parseInt(number);
return id;
}
/*
* MRH 23MAR2010:
* Commenting out deprecated parseAllSpecies method
*/
// private void parseEdgeSpecies() {
// // String restartFileContent ="";
// SpeciesDictionary dictionary = SpeciesDictionary.getInstance();
// try{
// File edgeSpecies = new File ("Restart/edgeSpecies.txt");
// FileReader fr = new FileReader(edgeSpecies);
// BufferedReader reader = new BufferedReader(fr);
// String line = ChemParser.readMeaningfulLine(reader);
// //HashSet speciesSet = new HashSet();
// while (line!=null) {
// StringTokenizer st = new StringTokenizer(line);
// String index = st.nextToken();
// int ID = Integer.parseInt(index);
// Species spe = dictionary.getSpeciesFromID(ID);
// if (spe == null)
// System.out.println("There was no species with ID "+ID +" in the species dictionary");
// //reactionSystem.reactionModel = new CoreEdgeReactionModel();
// ((CoreEdgeReactionModel)getReactionModel()).addUnreactedSpecies(spe);
// line = ChemParser.readMeaningfulLine(reader);
// catch (IOException e){
// System.out.println("Could not read the edgepecies restart file");
// System.exit(0);
/*private int calculateAllReactionsinReactionTemplate() {
int totalnum = 0;
TemplateReactionGenerator trg = (TemplateReactionGenerator)reactionSystem.reactionGenerator;
Iterator iter = trg.getReactionTemplateLibrary().getReactionTemplate();
while (iter.hasNext()){
ReactionTemplate rt = (ReactionTemplate)iter.next();
totalnum += rt.getNumberOfReactions();
}
return totalnum;
}*/
private void writeEnlargerInfo() {
try {
File diagnosis = new File("enlarger.xls");
FileWriter fw = new FileWriter(diagnosis);
fw.write(Global.enlargerInfo.toString());
fw.close();
}
catch (IOException e) {
System.out.println("Cannot write enlarger file");
System.exit(0);
}
}
private void writeDiagnosticInfo() {
try {
File diagnosis = new File("diagnosis.xls");
FileWriter fw = new FileWriter(diagnosis);
fw.write(Global.diagnosticInfo.toString());
fw.close();
}
catch (IOException e) {
System.out.println("Cannot write diagnosis file");
System.exit(0);
}
}
//10/25/07 gmagoon: I don't think this is used, but I will update to use reactionSystem and reactionTime as parameter to access temperature; commented-out usage of writeRestartFile will need to be modified
//Is still incomplete.
public void writeRestartFile(ReactionSystem p_rs, ReactionTime p_time ) {
//writeCoreSpecies(p_rs);
//writeCoreReactions(p_rs, p_time);
//writeEdgeSpecies();
//writeAllReactions(p_rs, p_time);
//writeEdgeReactions(p_rs, p_time);
//String restartFileName;
//String restartFileContent="";
}
/*
* MRH 25MAR2010
* This method is no longer used
*/
/*Only write the forward reactions in the model core.
The reverse reactions are generated from the forward reactions.*/
//10/25/07 gmagoon: added reaction system and reaction time as parameters and eliminated use of Global.temperature
// private void writeEdgeReactions(ReactionSystem p_rs, ReactionTime p_time) {
// StringBuilder restartFileContent =new StringBuilder();
// int reactionCount = 1;
// try{
// File coreSpecies = new File ("Restart/edgeReactions.txt");
// FileWriter fw = new FileWriter(coreSpecies);
// for(Iterator iter=((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSet().iterator();iter.hasNext();){
// Reaction reaction = (Reaction) iter.next();
// //if (reaction.getDirection()==1){
// //restartFileContent = restartFileContent + "("+ reactionCount + ") "+species.getChemkinName() + " " + reactionSystem.getPresentConcentration(species) + " (mol/cm3) \n";
// restartFileContent = restartFileContent.append(reaction.toRestartString(p_rs.getTemperature(p_time)) + "\n");
// reactionCount = reactionCount + 1;
// //restartFileContent += "\nEND";
// fw.write(restartFileContent.toString());
// fw.close();
// catch (IOException e){
// System.out.println("Could not write the restart edgereactions file");
// System.exit(0);
/*
* MRH 25MAR2010:
* This method is no longer used
*/
//10/25/07 gmagoon: added reaction system and reaction time as parameters and eliminated use of Global.temperature
// private void writeAllReactions(ReactionSystem p_rs, ReactionTime p_time) {
// StringBuilder restartFileContent = new StringBuilder();
// int reactionCount = 1;
// try{
// File allReactions = new File ("Restart/allReactions.txt");
// FileWriter fw = new FileWriter(allReactions);
// for(Iterator iter=getReactionModel().getReaction();iter.hasNext();){
// Reaction reaction = (Reaction) iter.next();
// //restartFileContent = restartFileContent + "("+ reactionCount + ") "+species.getChemkinName() + " " + reactionSystem.getPresentConcentration(species) + " (mol/cm3) \n";
// restartFileContent = restartFileContent.append(reaction.toRestartString(p_rs.getTemperature(p_time)) + "\n");
// for(Iterator iter=((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSet().iterator();iter.hasNext();){
// Reaction reaction = (Reaction) iter.next();
// //if (reaction.getDirection()==1){
// //restartFileContent = restartFileContent + "("+ reactionCount + ") "+species.getChemkinName() + " " + reactionSystem.getPresentConcentration(species) + " (mol/cm3) \n";
// restartFileContent = restartFileContent.append(reaction.toRestartString(p_rs.getTemperature(p_time)) + "\n");
// //restartFileContent += "\nEND";
// fw.write(restartFileContent.toString());
// fw.close();
// catch (IOException e){
// System.out.println("Could not write the restart edgereactions file");
// System.exit(0);
private void writeEdgeSpecies() {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter("Restart/edgeSpecies.txt"));
for(Iterator iter=((CoreEdgeReactionModel)getReactionModel()).getUnreactedSpeciesSet().iterator();iter.hasNext();){
Species species = (Species) iter.next();
bw.write(species.getName()+"("+species.getID()+")");
bw.newLine();
int dummyInt = 0;
bw.write(species.getChemGraph().toStringWithoutH(dummyInt));
bw.newLine();
}
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw != null) {
bw.flush();
bw.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
private void writePrunedEdgeSpecies(Species species) {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter("Pruning/edgeSpecies.txt", true));
bw.write(species.getChemkinName());
bw.newLine();
int dummyInt = 0;
bw.write(species.getChemGraph().toString(dummyInt));
bw.newLine();
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw != null) {
bw.flush();
bw.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
/*
* MRH 25MAR2010:
* This method is no longer used
*/
//10/25/07 gmagoon: added reaction system and reaction time as parameters and eliminated use of Global.temperature
// private void writeCoreReactions(ReactionSystem p_rs, ReactionTime p_time) {
// StringBuilder restartFileContent = new StringBuilder();
// int reactionCount = 0;
// try{
// File coreSpecies = new File ("Restart/coreReactions.txt");
// FileWriter fw = new FileWriter(coreSpecies);
// for(Iterator iter=getReactionModel().getReaction();iter.hasNext();){
// Reaction reaction = (Reaction) iter.next();
// if (reaction.getDirection()==1){
// //restartFileContent = restartFileContent + "("+ reactionCount + ") "+species.getChemkinName() + " " + reactionSystem.getPresentConcentration(species) + " (mol/cm3) \n";
// restartFileContent = restartFileContent.append(reaction.toRestartString(p_rs.getTemperature(p_time)) + "\n");
// reactionCount = reactionCount + 1;
// //restartFileContent += "\nEND";
// fw.write(restartFileContent.toString());
// fw.close();
// catch (IOException e){
// System.out.println("Could not write the restart corereactions file");
// System.exit(0);
private void writeCoreSpecies() {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter("Restart/coreSpecies.txt"));
for(Iterator iter=getReactionModel().getSpecies();iter.hasNext();){
Species species = (Species) iter.next();
bw.write(species.getName()+"("+species.getID()+")");
bw.newLine();
int dummyInt = 0;
bw.write(species.getChemGraph().toStringWithoutH(dummyInt));
bw.newLine();
}
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw != null) {
bw.flush();
bw.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
private void writeCoreReactions() {
BufferedWriter bw_rxns = null;
BufferedWriter bw_troe = null;
BufferedWriter bw_lindemann = null;
BufferedWriter bw_thirdbody = null;
try {
bw_rxns = new BufferedWriter(new FileWriter("Restart/coreReactions.txt"));
bw_troe = new BufferedWriter(new FileWriter("Restart/troeReactions.txt"));
bw_lindemann = new BufferedWriter(new FileWriter("Restart/lindemannReactions.txt"));
bw_thirdbody = new BufferedWriter(new FileWriter("Restart/thirdBodyReactions.txt"));
String EaUnits = ArrheniusKinetics.getEaUnits();
String AUnits = ArrheniusKinetics.getAUnits();
bw_rxns.write("UnitsOfEa: " + EaUnits);
bw_rxns.newLine();
bw_troe.write("Unit:\nA: mol/cm3/s\nE: " + EaUnits + "\n\nReactions:");
bw_troe.newLine();
bw_lindemann.write("Unit:\nA: mol/cm3/s\nE: " + EaUnits + "\n\nReactions:");
bw_lindemann.newLine();
bw_thirdbody.write("Unit:\nA: mol/cm3/s\nE: " + EaUnits + "\n\nReactions :");
bw_thirdbody.newLine();
CoreEdgeReactionModel cerm = (CoreEdgeReactionModel)getReactionModel();
LinkedHashSet allcoreRxns = cerm.core.reaction;
for(Iterator iter=allcoreRxns.iterator(); iter.hasNext();){
Reaction reaction = (Reaction) iter.next();
if (reaction.isForward()) {
if (reaction instanceof TROEReaction) {
TROEReaction troeRxn = (TROEReaction) reaction;
bw_troe.write(troeRxn.toRestartString(new Temperature(298,"K")));
bw_troe.newLine();
}
else if (reaction instanceof LindemannReaction) {
LindemannReaction lindeRxn = (LindemannReaction) reaction;
bw_lindemann.write(lindeRxn.toRestartString(new Temperature(298,"K")));
bw_lindemann.newLine();
}
else if (reaction instanceof ThirdBodyReaction) {
ThirdBodyReaction tbRxn = (ThirdBodyReaction) reaction;
bw_thirdbody.write(tbRxn.toRestartString(new Temperature(298,"K")));
bw_thirdbody.newLine();
}
else {
//bw.write(reaction.toChemkinString(new Temperature(298,"K")));
bw_rxns.write(reaction.toRestartString(new Temperature(298,"K"),false));
bw_rxns.newLine();
}
}
}
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw_rxns != null) {
bw_rxns.flush();
bw_rxns.close();
}
if (bw_troe != null) {
bw_troe.flush();
bw_troe.close();
}
if (bw_lindemann != null) {
bw_lindemann.flush();
bw_lindemann.close();
}
if (bw_thirdbody != null) {
bw_thirdbody.flush();
bw_thirdbody.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
private void writeEdgeReactions() {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter("Restart/edgeReactions.txt"));
String EaUnits = ArrheniusKinetics.getEaUnits();
bw.write("UnitsOfEa: " + EaUnits);
bw.newLine();
CoreEdgeReactionModel cerm = (CoreEdgeReactionModel)getReactionModel();
LinkedHashSet alledgeRxns = cerm.edge.reaction;
for(Iterator iter=alledgeRxns.iterator(); iter.hasNext();){
Reaction reaction = (Reaction) iter.next();
if (reaction.isForward()) {
//bw.write(reaction.toChemkinString(new Temperature(298,"K")));
bw.write(reaction.toRestartString(new Temperature(298,"K"),false));
bw.newLine();
} else if (reaction.getReverseReaction().isForward()) {
//bw.write(reaction.getReverseReaction().toChemkinString(new Temperature(298,"K")));
bw.write(reaction.getReverseReaction().toRestartString(new Temperature(298,"K"),false));
bw.newLine();
} else
System.out.println("Could not determine forward direction for following rxn: " + reaction.toString());
}
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw != null) {
bw.flush();
bw.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
//gmagoon 4/5/10: based on Mike's writeEdgeReactions
private void writePrunedEdgeReaction(Reaction reaction) {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter("Pruning/edgeReactions.txt", true));
if (reaction.isForward()) {
bw.write(reaction.toChemkinString(new Temperature(298,"K")));
// bw.write(reaction.toRestartString(new Temperature(298,"K")));
bw.newLine();
} else if (reaction.getReverseReaction().isForward()) {
bw.write(reaction.getReverseReaction().toChemkinString(new Temperature(298,"K")));
//bw.write(reaction.getReverseReaction().toRestartString(new Temperature(298,"K")));
bw.newLine();
} else
System.out.println("Could not determine forward direction for following rxn: " + reaction.toString());
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw != null) {
bw.flush();
bw.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
private void writePDepNetworks() {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter("Restart/pdepnetworks.txt"));
int numFameTemps = PDepRateConstant.getTemperatures().length;
int numFamePress = PDepRateConstant.getPressures().length;
int numChebyTemps = ChebyshevPolynomials.getNT();
int numChebyPress = ChebyshevPolynomials.getNP();
int numPlog = PDepArrheniusKinetics.getNumPressures();
String EaUnits = ArrheniusKinetics.getEaUnits();
bw.write("UnitsOfEa: " + EaUnits);
bw.newLine();
bw.write("NumberOfFameTemps: " + numFameTemps);
bw.newLine();
bw.write("NumberOfFamePress: " + numFamePress);
bw.newLine();
bw.write("NumberOfChebyTemps: " + numChebyTemps);
bw.newLine();
bw.write("NumberOfChebyPress: " + numChebyPress);
bw.newLine();
bw.write("NumberOfPLogs: " + numPlog);
bw.newLine();
bw.newLine();
LinkedList allNets = PDepNetwork.getNetworks();
int netCounter = 0;
for(Iterator iter=allNets.iterator(); iter.hasNext();){
PDepNetwork pdepnet = (PDepNetwork) iter.next();
++netCounter;
bw.write("PDepNetwork #" + netCounter);
bw.newLine();
// Write netReactionList
LinkedList netRxns = pdepnet.getNetReactions();
bw.write("netReactionList:");
bw.newLine();
for (Iterator iter2=netRxns.iterator(); iter2.hasNext();) {
PDepReaction currentPDepRxn = (PDepReaction)iter2.next();
bw.write(currentPDepRxn.toString());
bw.newLine();
bw.write(writeRatesAndParameters(currentPDepRxn,numFameTemps,
numFamePress,numChebyTemps,numChebyPress,numPlog));
PDepReaction currentPDepReverseRxn = currentPDepRxn.getReverseReaction();
bw.write(currentPDepReverseRxn.toString());
bw.newLine();
bw.write(writeRatesAndParameters(currentPDepReverseRxn,numFameTemps,
numFamePress,numChebyTemps,numChebyPress,numPlog));
}
// Write nonincludedReactionList
LinkedList nonIncludeRxns = pdepnet.getNonincludedReactions();
bw.write("nonIncludedReactionList:");
bw.newLine();
for (Iterator iter2=nonIncludeRxns.iterator(); iter2.hasNext();) {
PDepReaction currentPDepRxn = (PDepReaction)iter2.next();
bw.write(currentPDepRxn.toString());
bw.newLine();
bw.write(writeRatesAndParameters(currentPDepRxn,numFameTemps,
numFamePress,numChebyTemps,numChebyPress,numPlog));
PDepReaction currentPDepReverseRxn = currentPDepRxn.getReverseReaction();
bw.write(currentPDepReverseRxn.toString());
bw.newLine();
bw.write(writeRatesAndParameters(currentPDepReverseRxn,numFameTemps,
numFamePress,numChebyTemps,numChebyPress,numPlog));
}
// Write pathReactionList
LinkedList pathRxns = pdepnet.getPathReactions();
bw.write("pathReactionList:");
bw.newLine();
for (Iterator iter2=pathRxns.iterator(); iter2.hasNext();) {
PDepReaction currentPDepRxn = (PDepReaction)iter2.next();
bw.write(currentPDepRxn.getDirection() + "\t" + currentPDepRxn.toRestartString(new Temperature(298,"K")));
bw.newLine();
}
bw.newLine();
bw.newLine();
}
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw != null) {
bw.flush();
bw.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
public String writeRatesAndParameters(PDepReaction pdeprxn, int numFameTemps,
int numFamePress, int numChebyTemps, int numChebyPress, int numPlog) {
StringBuilder sb = new StringBuilder();
// Write the rate coefficients
double[][] rateConstants = pdeprxn.getPDepRate().getRateConstants();
for (int i=0; i<numFameTemps; i++) {
for (int j=0; j<numFamePress; j++) {
sb.append(rateConstants[i][j] + "\t");
}
sb.append("\n");
}
sb.append("\n");
// If chebyshev polynomials are present, write them
if (numChebyTemps != 0) {
ChebyshevPolynomials chebyPolys = pdeprxn.getPDepRate().getChebyshev();
for (int i=0; i<numChebyTemps; i++) {
for (int j=0; j<numChebyPress; j++) {
sb.append(chebyPolys.getAlpha(i,j) + "\t");
}
sb.append("\n");
}
sb.append("\n");
}
// If plog parameters are present, write them
else if (numPlog != 0) {
PDepArrheniusKinetics kinetics = pdeprxn.getPDepRate().getPDepArrheniusKinetics();
for (int i=0; i<numPlog; i++) {
double Hrxn = pdeprxn.calculateHrxn(new Temperature(298,"K"));
sb.append(kinetics.pressures[i].getPa() + "\t" + kinetics.getKinetics(i).toChemkinString(Hrxn,new Temperature(298,"K"),false) + "\n");
}
sb.append("\n");
}
return sb.toString();
}
public LinkedList getTimeStep() {
return timeStep;
}
public void setTimeStep(ReactionTime p_timeStep) {
if (timeStep == null)
timeStep = new LinkedList();
timeStep.add(p_timeStep);
}
public String getWorkingDirectory() {
return workingDirectory;
}
public void setWorkingDirectory(String p_workingDirectory) {
workingDirectory = p_workingDirectory;
}
//svp
public boolean getError(){
return error;
}
//svp
public boolean getSensitivity(){
return sensitivity;
}
public LinkedList getSpeciesList() {
return species;
}
//gmagoon 10/24/07: commented out getReactionSystem and setReactionSystem
// public ReactionSystem getReactionSystem() {
// return reactionSystem;
//11/2/07 gmagoon: adding accessor method for reactionSystemList
public LinkedList getReactionSystemList(){
return reactionSystemList;
}
//added by gmagoon 9/24/07
// public void setReactionSystem(ReactionSystem p_ReactionSystem) {
// reactionSystem = p_ReactionSystem;
//copied from ReactionSystem.java by gmagoon 9/24/07
public ReactionModel getReactionModel() {
return reactionModel;
}
public void readRestartSpecies() {
System.out.println("Reading in species from Restart folder");
// Read in core species -- NOTE code is almost duplicated in Read in edge species (second part of procedure)
try {
FileReader in = new FileReader("Restart/coreSpecies.txt");
BufferedReader reader = new BufferedReader(in);
String line = ChemParser.readMeaningfulLine(reader);
while (line != null) {
// The first line of a new species is the user-defined name
String totalSpeciesName = line;
String[] splitString1 = totalSpeciesName.split("[(]");
String[] splitString2 = splitString1[splitString1.length-1].split("[)]");
// The remaining lines are the graph
Graph g = ChemParser.readChemGraph(reader);
// Make the ChemGraph, assuming it does not contain a forbidden structure
ChemGraph cg = null;
try {
cg = ChemGraph.make(g);
} catch (ForbiddenStructureException e) {
System.out.println("Error reading graph: Graph contains a forbidden structure.\n" + g.toString());
System.exit(0);
}
// Make the species
int intLocation = totalSpeciesName.indexOf("(" + splitString2[0] + ")");
Species species = Species.make(totalSpeciesName.substring(0,intLocation),cg,Integer.parseInt(splitString2[0]));
// Add the new species to the set of species
restartCoreSpcs.add(species);
/*int species_type = 1; // reacted species
for (int i=0; i<numRxnSystems; i++) {
SpeciesStatus ss = new SpeciesStatus(species,species_type,y[i],yprime[i]);
speciesStatus[i].put(species, ss);
}*/
line = ChemParser.readMeaningfulLine(reader);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
// Read in edge species
try {
FileReader in = new FileReader("Restart/edgeSpecies.txt");
BufferedReader reader = new BufferedReader(in);
String line = ChemParser.readMeaningfulLine(reader);
while (line != null) {
// The first line of a new species is the user-defined name
String totalSpeciesName = line;
String[] splitString1 = totalSpeciesName.split("[(]");
String[] splitString2 = splitString1[splitString1.length-1].split("[)]"); // Change JDM to reflect MRH 2-11-2010
// The remaining lines are the graph
Graph g = ChemParser.readChemGraph(reader);
// Make the ChemGraph, assuming it does not contain a forbidden structure
ChemGraph cg = null;
try {
cg = ChemGraph.make(g);
} catch (ForbiddenStructureException e) {
System.out.println("Error reading graph: Graph contains a forbidden structure.\n" + g.toString());
System.exit(0);
}
// Make the species
Species species = Species.make(splitString1[0],cg,Integer.parseInt(splitString2[0]));
// Add the new species to the set of species
restartEdgeSpcs.add(species);
line = ChemParser.readMeaningfulLine(reader);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public void readRestartReactions() {
// Grab the IDs from the core species
int[] coreSpcsIds = new int[restartCoreSpcs.size()];
int i = 0;
for (Iterator iter = restartCoreSpcs.iterator(); iter.hasNext();) {
Species spcs = (Species)iter.next();
coreSpcsIds[i] = spcs.getID();
++i;
}
System.out.println("Reading reactions from Restart folder");
// Read in core reactions
try {
FileReader in = new FileReader("Restart/coreReactions.txt");
BufferedReader reader = new BufferedReader(in);
String line = ChemParser.readMeaningfulLine(reader);
// Determine units of Ea
StringTokenizer st = new StringTokenizer(line);
String tempString = st.nextToken();
String EaUnits = st.nextToken();
line = ChemParser.readMeaningfulLine(reader);
while (line != null) {
if (!line.trim().equals("DUP")) {
Reaction r = ChemParser.parseRestartReaction(line,coreSpcsIds,"core",EaUnits);
Iterator rxnIter = restartCoreRxns.iterator();
boolean foundRxn = false;
while (rxnIter.hasNext()) {
Reaction old = (Reaction)rxnIter.next();
if (old.equals(r)) {
old.addAdditionalKinetics(r.getKinetics()[0],1);
foundRxn = true;
break;
}
}
if (!foundRxn) {
if (r.hasReverseReaction()) r.generateReverseReaction();
restartCoreRxns.add(r);
}
}
line = ChemParser.readMeaningfulLine(reader);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
try {
SeedMechanism.readThirdBodyReactions("Restart/thirdBodyReactions.txt");
} catch (IOException e1) {
e1.printStackTrace();
}
try {
SeedMechanism.readLindemannReactions("Restart/lindemannReactions.txt");
} catch (IOException e1) {
e1.printStackTrace();
}
try {
SeedMechanism.readTroeReactions("Restart/troeReactions.txt");
} catch (IOException e1) {
e1.printStackTrace();
}
restartCoreRxns.addAll(SeedMechanism.reactionSet);
// Read in edge reactions
try {
FileReader in = new FileReader("Restart/edgeReactions.txt");
BufferedReader reader = new BufferedReader(in);
String line = ChemParser.readMeaningfulLine(reader);
// Determine units of Ea
StringTokenizer st = new StringTokenizer(line);
String tempString = st.nextToken();
String EaUnits = st.nextToken();
line = ChemParser.readMeaningfulLine(reader);
while (line != null) {
if (!line.trim().equals("DUP")) {
Reaction r = ChemParser.parseRestartReaction(line,coreSpcsIds,"edge",EaUnits);
Iterator rxnIter = restartEdgeRxns.iterator();
boolean foundRxn = false;
while (rxnIter.hasNext()) {
Reaction old = (Reaction)rxnIter.next();
if (old.equals(r)) {
old.addAdditionalKinetics(r.getKinetics()[0],1);
foundRxn = true;
break;
}
}
if (!foundRxn) {
r.generateReverseReaction();
restartEdgeRxns.add(r);
}
}
line = ChemParser.readMeaningfulLine(reader);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public LinkedHashMap getRestartSpeciesStatus(int i) {
LinkedHashMap speciesStatus = new LinkedHashMap();
try {
FileReader in = new FileReader("Restart/coreSpecies.txt");
BufferedReader reader = new BufferedReader(in);
Integer numRxnSystems = Integer.parseInt(ChemParser.readMeaningfulLine(reader));
String line = ChemParser.readMeaningfulLine(reader);
while (line != null) {
// The first line of a new species is the user-defined name
String totalSpeciesName = line;
String[] splitString1 = totalSpeciesName.split("[(]");
String[] splitString2 = splitString1[1].split("[)]");
double y = 0.0;
double yprime = 0.0;
for (int j=0; j<numRxnSystems; j++) {
StringTokenizer st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
if (j == i) {
y = Double.parseDouble(st.nextToken());
yprime = Double.parseDouble(st.nextToken());
}
}
// The remaining lines are the graph
Graph g = ChemParser.readChemGraph(reader);
// Make the ChemGraph, assuming it does not contain a forbidden structure
ChemGraph cg = null;
try {
cg = ChemGraph.make(g);
} catch (ForbiddenStructureException e) {
System.out.println("Error reading graph: Graph contains a forbidden structure.\n" + g.toString());
System.exit(0);
}
// Make the species
Species species = Species.make(splitString1[0],cg);
// Add the new species to the set of species
//restartCoreSpcs.add(species);
int species_type = 1; // reacted species
SpeciesStatus ss = new SpeciesStatus(species,species_type,y,yprime);
speciesStatus.put(species, ss);
line = ChemParser.readMeaningfulLine(reader);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return speciesStatus;
}
public void putRestartSpeciesInInitialStatus(InitialStatus is, int i) {
try {
FileReader in = new FileReader("Restart/coreSpecies.txt");
BufferedReader reader = new BufferedReader(in);
String line = ChemParser.readMeaningfulLine(reader);
while (line != null) {
// The first line of a new species is the user-defined name
String totalSpeciesName = line;
String[] splitString1 = totalSpeciesName.split("[(]");
String[] splitString2 = splitString1[1].split("[)]");
// The remaining lines are the graph
Graph g = ChemParser.readChemGraph(reader);
// Make the ChemGraph, assuming it does not contain a forbidden structure
ChemGraph cg = null;
try {
cg = ChemGraph.make(g);
} catch (ForbiddenStructureException e) {
System.out.println("Error reading graph: Graph contains a forbidden structure.\n" + g.toString());
System.exit(0);
}
// Make the species
Species species = Species.make(splitString1[0],cg);
// Add the new species to the set of species
//restartCoreSpcs.add(species);
if (is.getSpeciesStatus(species) == null) {
SpeciesStatus ss = new SpeciesStatus(species,1,0.0,0.0);
is.putSpeciesStatus(ss);
}
line = ChemParser.readMeaningfulLine(reader);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public void readPDepNetworks() {
SpeciesDictionary sd = SpeciesDictionary.getInstance();
LinkedList allNetworks = PDepNetwork.getNetworks();
try {
FileReader in = new FileReader("Restart/pdepnetworks.txt");
BufferedReader reader = new BufferedReader(in);
StringTokenizer st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
String tempString = st.nextToken();
String EaUnits = st.nextToken();
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
tempString = st.nextToken();
int numFameTs = Integer.parseInt(st.nextToken());
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
tempString = st.nextToken();
int numFamePs = Integer.parseInt(st.nextToken());
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
tempString = st.nextToken();
int numChebyTs = Integer.parseInt(st.nextToken());
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
tempString = st.nextToken();
int numChebyPs = Integer.parseInt(st.nextToken());
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
tempString = st.nextToken();
int numPlogs = Integer.parseInt(st.nextToken());
double[][] rateCoefficients = new double[numFameTs][numFamePs];
double[][] chebyPolys = new double[numChebyTs][numChebyPs];
Kinetics[] plogKinetics = new Kinetics[numPlogs];
String line = ChemParser.readMeaningfulLine(reader); // line should be "PDepNetwork
while (line != null) {
line = ChemParser.readMeaningfulLine(reader); // line should now be "netReactionList:"
PDepNetwork newNetwork = new PDepNetwork();
LinkedList netRxns = newNetwork.getNetReactions();
LinkedList nonincludeRxns = newNetwork.getNonincludedReactions();
line = ChemParser.readMeaningfulLine(reader); // line is either data or "nonIncludedReactionList"
// If line is "nonincludedreactionlist", we need to skip over this while loop
if (!line.toLowerCase().startsWith("nonincludedreactionlist")) {
while (!line.toLowerCase().startsWith("nonincludedreactionlist")) {
// Read in the forward rxn
String[] reactsANDprods = line.split("\\
PDepIsomer Reactants = null;
String reacts = reactsANDprods[0].trim();
if (reacts.contains("+")) {
String[] indivReacts = reacts.split("[+]");
String name = indivReacts[0].trim();
Species spc1 = sd.getSpeciesFromChemkinName(name);
if (spc1 == null) {
spc1 = getSpeciesBySPCName(name,sd);
}
name = indivReacts[1].trim();
Species spc2 = sd.getSpeciesFromChemkinName(name);
if (spc2 == null) {
spc2 = getSpeciesBySPCName(name,sd);
}
Reactants = new PDepIsomer(spc1,spc2);
} else {
String name = reacts.trim();
Species spc = sd.getSpeciesFromChemkinName(name);
if (spc == null) {
spc = getSpeciesBySPCName(name,sd);
}
Reactants = new PDepIsomer(spc);
}
PDepIsomer Products = null;
String prods = reactsANDprods[1].trim();
if (prods.contains("+")) {
String[] indivProds = prods.split("[+]");
String name = indivProds[0].trim();
Species spc1 = sd.getSpeciesFromChemkinName(name);
if (spc1 == null) {
spc1 = getSpeciesBySPCName(name,sd);
}
name = indivProds[1].trim();
Species spc2 = sd.getSpeciesFromChemkinName(name);
if (spc2 == null) {
spc2 = getSpeciesBySPCName(name,sd);
}
Products = new PDepIsomer(spc1,spc2);
} else {
String name = prods.trim();
Species spc = sd.getSpeciesFromChemkinName(name);
if (spc == null) {
spc = getSpeciesBySPCName(name,sd);
}
Products = new PDepIsomer(spc);
}
newNetwork.addIsomer(Reactants);
newNetwork.addIsomer(Products);
for (int i=0; i<numFameTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numFamePs; j++) {
rateCoefficients[i][j] = Double.parseDouble(st.nextToken());
}
}
PDepRateConstant pdepk = null;
if (numChebyTs > 0) {
for (int i=0; i<numChebyTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numChebyPs; j++) {
chebyPolys[i][j] = Double.parseDouble(st.nextToken());
}
}
ChebyshevPolynomials chebyshev = new ChebyshevPolynomials(numChebyTs,
ChebyshevPolynomials.getTlow(), ChebyshevPolynomials.getTup(),
numChebyPs, ChebyshevPolynomials.getPlow(), ChebyshevPolynomials.getPup(),
chebyPolys);
pdepk = new PDepRateConstant(rateCoefficients,chebyshev);
} else if (numPlogs > 0) {
for (int i=0; i<numPlogs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
Pressure p = new Pressure(Double.parseDouble(st.nextToken()),"Pa");
UncertainDouble dA = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
UncertainDouble dn = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
double Ea = Double.parseDouble(st.nextToken());
if (EaUnits.equals("cal/mol"))
Ea = Ea / 1000;
else if (EaUnits.equals("J/mol"))
Ea = Ea / 4.184 / 1000;
else if (EaUnits.equals("kJ/mol"))
Ea = Ea / 4.184;
else if (EaUnits.equals("Kelvins"))
Ea = Ea * 1.987;
UncertainDouble dE = new UncertainDouble(Ea,0.0,"A");
ArrheniusKinetics k = new ArrheniusKinetics(dA, dn, dE, "", 1, "", "");
PDepArrheniusKinetics pdepAK = new PDepArrheniusKinetics(i);
pdepAK.setKinetics(i, p, k);
pdepk = new PDepRateConstant(rateCoefficients,pdepAK);
}
}
PDepReaction forward = new PDepReaction(Reactants, Products, pdepk);
// Read in the reverse reaction
line = ChemParser.readMeaningfulLine(reader);
for (int i=0; i<numFameTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numFamePs; j++) {
rateCoefficients[i][j] = Double.parseDouble(st.nextToken());
}
}
pdepk = null;
if (numChebyTs > 0) {
for (int i=0; i<numChebyTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numChebyPs; j++) {
chebyPolys[i][j] = Double.parseDouble(st.nextToken());
}
}
ChebyshevPolynomials chebyshev = new ChebyshevPolynomials(numChebyTs,
ChebyshevPolynomials.getTlow(), ChebyshevPolynomials.getTup(),
numChebyPs, ChebyshevPolynomials.getPlow(), ChebyshevPolynomials.getPup(),
chebyPolys);
pdepk = new PDepRateConstant(rateCoefficients,chebyshev);
} else if (numPlogs > 0) {
for (int i=0; i<numPlogs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
Pressure p = new Pressure(Double.parseDouble(st.nextToken()),"Pa");
UncertainDouble dA = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
UncertainDouble dn = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
double Ea = Double.parseDouble(st.nextToken());
if (EaUnits.equals("cal/mol"))
Ea = Ea / 1000;
else if (EaUnits.equals("J/mol"))
Ea = Ea / 4.184 / 1000;
else if (EaUnits.equals("kJ/mol"))
Ea = Ea / 4.184;
else if (EaUnits.equals("Kelvins"))
Ea = Ea * 1.987;
UncertainDouble dE = new UncertainDouble(Ea,0.0,"A");
ArrheniusKinetics k = new ArrheniusKinetics(dA, dn, dE, "", 1, "", "");
PDepArrheniusKinetics pdepAK = new PDepArrheniusKinetics(i);
pdepAK.setKinetics(i, p, k);
pdepk = new PDepRateConstant(rateCoefficients,pdepAK);
}
}
PDepReaction reverse = new PDepReaction(Products, Reactants, pdepk);
reverse.setReverseReaction(forward);
forward.setReverseReaction(reverse);
netRxns.add(forward);
line = ChemParser.readMeaningfulLine(reader);
}
}
// This loop ends once line == "nonIncludedReactionList"
line = ChemParser.readMeaningfulLine(reader); // line is either data or "pathReactionList"
if (!line.toLowerCase().startsWith("pathreactionList")) {
while (!line.toLowerCase().startsWith("pathreactionlist")) {
// Read in the forward rxn
String[] reactsANDprods = line.split("\\
PDepIsomer Reactants = null;
String reacts = reactsANDprods[0].trim();
if (reacts.contains("+")) {
String[] indivReacts = reacts.split("[+]");
String name = indivReacts[0].trim();
Species spc1 = sd.getSpeciesFromChemkinName(name);
if (spc1 == null) {
spc1 = getSpeciesBySPCName(name,sd);
}
name = indivReacts[1].trim();
Species spc2 = sd.getSpeciesFromChemkinName(name);
if (spc2 == null) {
spc2 = getSpeciesBySPCName(name,sd);
}
Reactants = new PDepIsomer(spc1,spc2);
} else {
String name = reacts.trim();
Species spc = sd.getSpeciesFromChemkinName(name);
if (spc == null) {
spc = getSpeciesBySPCName(name,sd);
}
Reactants = new PDepIsomer(spc);
}
PDepIsomer Products = null;
String prods = reactsANDprods[1].trim();
if (prods.contains("+")) {
String[] indivProds = prods.split("[+]");
String name = indivProds[0].trim();
Species spc1 = sd.getSpeciesFromChemkinName(name);
if (spc1 == null) {
spc1 = getSpeciesBySPCName(name,sd);
}
name = indivProds[1].trim();
Species spc2 = sd.getSpeciesFromChemkinName(name);
if (spc2 == null) {
spc2 = getSpeciesBySPCName(name,sd);
}
Products = new PDepIsomer(spc1,spc2);
} else {
String name = prods.trim();
Species spc = sd.getSpeciesFromChemkinName(name);
if (spc == null) {
spc = getSpeciesBySPCName(name,sd);
}
Products = new PDepIsomer(spc);
}
newNetwork.addIsomer(Reactants);
newNetwork.addIsomer(Products);
for (int i=0; i<numFameTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numFamePs; j++) {
rateCoefficients[i][j] = Double.parseDouble(st.nextToken());
}
}
PDepRateConstant pdepk = null;
if (numChebyTs > 0) {
for (int i=0; i<numChebyTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numChebyPs; j++) {
chebyPolys[i][j] = Double.parseDouble(st.nextToken());
}
}
ChebyshevPolynomials chebyshev = new ChebyshevPolynomials(numChebyTs,
ChebyshevPolynomials.getTlow(), ChebyshevPolynomials.getTup(),
numChebyPs, ChebyshevPolynomials.getPlow(), ChebyshevPolynomials.getPup(),
chebyPolys);
pdepk = new PDepRateConstant(rateCoefficients,chebyshev);
} else if (numPlogs > 0) {
for (int i=0; i<numPlogs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
Pressure p = new Pressure(Double.parseDouble(st.nextToken()),"Pa");
UncertainDouble dA = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
UncertainDouble dn = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
double Ea = Double.parseDouble(st.nextToken());
if (EaUnits.equals("cal/mol"))
Ea = Ea / 1000;
else if (EaUnits.equals("J/mol"))
Ea = Ea / 4.184 / 1000;
else if (EaUnits.equals("kJ/mol"))
Ea = Ea / 4.184;
else if (EaUnits.equals("Kelvins"))
Ea = Ea * 1.987;
UncertainDouble dE = new UncertainDouble(Ea,0.0,"A");
ArrheniusKinetics k = new ArrheniusKinetics(dA, dn, dE, "", 1, "", "");
PDepArrheniusKinetics pdepAK = new PDepArrheniusKinetics(i);
pdepAK.setKinetics(i, p, k);
pdepk = new PDepRateConstant(rateCoefficients,pdepAK);
}
}
PDepReaction forward = new PDepReaction(Reactants, Products, pdepk);
// Read in the reverse reaction
line = ChemParser.readMeaningfulLine(reader);
for (int i=0; i<numFameTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numFamePs; j++) {
rateCoefficients[i][j] = Double.parseDouble(st.nextToken());
}
}
pdepk = null;
if (numChebyTs > 0) {
for (int i=0; i<numChebyTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numChebyPs; j++) {
chebyPolys[i][j] = Double.parseDouble(st.nextToken());
}
}
ChebyshevPolynomials chebyshev = new ChebyshevPolynomials(numChebyTs,
ChebyshevPolynomials.getTlow(), ChebyshevPolynomials.getTup(),
numChebyPs, ChebyshevPolynomials.getPlow(), ChebyshevPolynomials.getPup(),
chebyPolys);
pdepk = new PDepRateConstant(rateCoefficients,chebyshev);
} else if (numPlogs > 0) {
for (int i=0; i<numPlogs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
Pressure p = new Pressure(Double.parseDouble(st.nextToken()),"Pa");
UncertainDouble dA = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
UncertainDouble dn = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
double Ea = Double.parseDouble(st.nextToken());
if (EaUnits.equals("cal/mol"))
Ea = Ea / 1000;
else if (EaUnits.equals("J/mol"))
Ea = Ea / 4.184 / 1000;
else if (EaUnits.equals("kJ/mol"))
Ea = Ea / 4.184;
else if (EaUnits.equals("Kelvins"))
Ea = Ea * 1.987;
UncertainDouble dE = new UncertainDouble(Ea,0.0,"A");
ArrheniusKinetics k = new ArrheniusKinetics(dA, dn, dE, "", 1, "", "");
PDepArrheniusKinetics pdepAK = new PDepArrheniusKinetics(i);
pdepAK.setKinetics(i, p, k);
pdepk = new PDepRateConstant(rateCoefficients,pdepAK);
}
}
PDepReaction reverse = new PDepReaction(Products, Reactants, pdepk);
reverse.setReverseReaction(forward);
forward.setReverseReaction(reverse);
nonincludeRxns.add(forward);
line = ChemParser.readMeaningfulLine(reader);
}
}
// This loop ends once line == "pathReactionList"
line = ChemParser.readMeaningfulLine(reader); // line is either data or "PDepNetwork #_" or null (end of file)
while (line != null && !line.toLowerCase().startsWith("pdepnetwork")) {
st = new StringTokenizer(line);
int direction = Integer.parseInt(st.nextToken());
// First token is the rxn structure: A+B=C+D
// Note: Up to 3 reactants/products allowed
// : Either "=" or "=>" will separate reactants and products
String structure = st.nextToken();
// Separate the reactants from the products
boolean generateReverse = false;
String[] reactsANDprods = structure.split("\\=>");
if (reactsANDprods.length == 1) {
reactsANDprods = structure.split("[=]");
generateReverse = true;
}
sd = SpeciesDictionary.getInstance();
LinkedList r = ChemParser.parseReactionSpecies(sd, reactsANDprods[0]);
LinkedList p = ChemParser.parseReactionSpecies(sd, reactsANDprods[1]);
Structure s = new Structure(r,p);
s.setDirection(direction);
// Next three tokens are the modified Arrhenius parameters
double rxn_A = Double.parseDouble(st.nextToken());
double rxn_n = Double.parseDouble(st.nextToken());
double rxn_E = Double.parseDouble(st.nextToken());
if (EaUnits.equals("cal/mol"))
rxn_E = rxn_E / 1000;
else if (EaUnits.equals("J/mol"))
rxn_E = rxn_E / 4.184 / 1000;
else if (EaUnits.equals("kJ/mol"))
rxn_E = rxn_E / 4.184;
else if (EaUnits.equals("Kelvins"))
rxn_E = rxn_E * 1.987;
UncertainDouble uA = new UncertainDouble(rxn_A,0.0,"A");
UncertainDouble un = new UncertainDouble(rxn_n,0.0,"A");
UncertainDouble uE = new UncertainDouble(rxn_E,0.0,"A");
// The remaining tokens are comments
String comments = "";
if (st.hasMoreTokens()) {
String beginningOfComments = st.nextToken();
int startIndex = line.indexOf(beginningOfComments);
comments = line.substring(startIndex);
}
if (comments.startsWith("!")) comments = comments.substring(1);
// while (st.hasMoreTokens()) {
// comments += st.nextToken();
ArrheniusKinetics[] k = new ArrheniusKinetics[1];
k[0] = new ArrheniusKinetics(uA,un,uE,"",1,"",comments);
Reaction pathRxn = new Reaction();
// if (direction == 1)
// pathRxn = Reaction.makeReaction(s,k,generateReverse);
// else
// pathRxn = Reaction.makeReaction(s.generateReverseStructure(),k,generateReverse);
pathRxn = Reaction.makeReaction(s,k,generateReverse);
PDepIsomer Reactants = new PDepIsomer(r);
PDepIsomer Products = new PDepIsomer(p);
PDepReaction pdeppathrxn = new PDepReaction(Reactants,Products,pathRxn);
newNetwork.addReaction(pdeppathrxn,true);
line = ChemParser.readMeaningfulLine(reader);
}
PDepNetwork.getNetworks().add(newNetwork);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* MRH 14Jan2010
*
* getSpeciesBySPCName
*
* Input: String name - Name of species, normally chemical formula followed
* by "J"s for radicals, and then (#)
* SpeciesDictionary sd
*
* This method was originally written as a complement to the method readPDepNetworks.
* jdmo found a bug with the readrestart option. The bug was that the method was
* attempting to add a null species to the Isomer list. The null species resulted
* from searching the SpeciesDictionary by chemkinName (e.g. C4H8OJJ(48)), when the
* chemkinName present in the dictionary was SPC(48).
*
*/
public Species getSpeciesBySPCName(String name, SpeciesDictionary sd) {
String[] nameFromNumber = name.split("\\(");
String newName = "SPC(" + nameFromNumber[1];
return sd.getSpeciesFromChemkinName(newName);
}
/**
* MRH 12-Jun-2009
*
* Function initializes the model's core and edge.
* The initial core species always consists of the species contained
* in the condition.txt file. If seed mechanisms exist, those species
* (and the reactions given in the seed mechanism) are also added to
* the core.
* The initial edge species/reactions are determined by reacting the core
* species by one full iteration.
*/
public void initializeCoreEdgeModel() {
LinkedHashSet allInitialCoreSpecies = new LinkedHashSet();
LinkedHashSet allInitialCoreRxns = new LinkedHashSet();
if (readrestart) {
readRestartReactions();
if (PDepNetwork.generateNetworks) readPDepNetworks();
allInitialCoreSpecies.addAll(restartCoreSpcs);
allInitialCoreRxns.addAll(restartCoreRxns);
}
// Add the species from the condition.txt (input) file
allInitialCoreSpecies.addAll(getSpeciesSeed());
// Add the species from the seed mechanisms, if they exist
if (hasSeedMechanisms()) {
allInitialCoreSpecies.addAll(getSeedMechanism().getSpeciesSet());
allInitialCoreRxns.addAll(getSeedMechanism().getReactionSet());
}
CoreEdgeReactionModel cerm = new CoreEdgeReactionModel(allInitialCoreSpecies, allInitialCoreRxns);
if (readrestart) {
cerm.addUnreactedSpeciesSet(restartEdgeSpcs);
cerm.addUnreactedReactionSet(restartEdgeRxns);
}
setReactionModel(cerm);
PDepNetwork.reactionModel = getReactionModel();
PDepNetwork.reactionSystem = (ReactionSystem) getReactionSystemList().get(0);
// Determine initial set of reactions and edge species using only the
// species enumerated in the input file and the seed mechanisms as the core
if (!readrestart) {
LinkedHashSet reactionSet;
if (hasSeedMechanisms() && getSeedMechanism().shouldGenerateReactions()) {
reactionSet = getReactionGenerator().react(allInitialCoreSpecies);
}
else {
reactionSet = new LinkedHashSet();
for (Iterator iter = speciesSeed.iterator(); iter.hasNext(); ) {
Species spec = (Species) iter.next();
reactionSet.addAll(getReactionGenerator().react(allInitialCoreSpecies, spec));
}
}
reactionSet.addAll(getLibraryReactionGenerator().react(allInitialCoreSpecies));
// Set initial core-edge reaction model based on above results
if (reactionModelEnlarger instanceof RateBasedRME) {
Iterator iter = reactionSet.iterator();
while (iter.hasNext()){
Reaction r = (Reaction)iter.next();
cerm.addReaction(r);
}
}
else {
// Only keep the reactions involving bimolecular reactants and bimolecular products
Iterator iter = reactionSet.iterator();
while (iter.hasNext()){
Reaction r = (Reaction)iter.next();
if (r.getReactantNumber() > 1 && r.getProductNumber() > 1){
cerm.addReaction(r);
}
else {
cerm.categorizeReaction(r.getStructure());
PDepNetwork.addReactionToNetworks(r);
}
}
}
}
for (Integer i = 0; i < reactionSystemList.size(); i++) {
ReactionSystem rs = (ReactionSystem) reactionSystemList.get(i);
rs.setReactionModel(getReactionModel());
}
// We cannot return a system with no core reactions, so if this is a case we must add to the core
while (getReactionModel().isEmpty() && !PDepNetwork.hasCoreReactions((CoreEdgeReactionModel) getReactionModel())) {
for (Integer i = 0; i < reactionSystemList.size(); i++) {
ReactionSystem rs = (ReactionSystem) reactionSystemList.get(i);
if (reactionModelEnlarger instanceof RateBasedPDepRME)
rs.initializePDepNetwork();
rs.appendUnreactedSpeciesStatus((InitialStatus)initialStatusList.get(i), rs.getPresentTemperature());
}
enlargeReactionModel();
}
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
rs.setReactionModel(getReactionModel());
}
return;
}
//## operation initializeCoreEdgeModelWithPRL()
//9/24/07 gmagoon: moved from ReactionSystem.java
public void initializeCoreEdgeModelWithPRL() {
//#[ operation initializeCoreEdgeModelWithPRL()
initializeCoreEdgeModelWithoutPRL();
CoreEdgeReactionModel cerm = (CoreEdgeReactionModel)getReactionModel();
LinkedHashSet primarySpeciesSet = getPrimaryReactionLibrary().getSpeciesSet(); //10/14/07 gmagoon: changed to use getPrimaryReactionLibrary
LinkedHashSet primaryReactionSet = getPrimaryReactionLibrary().getReactionSet();
cerm.addReactedSpeciesSet(primarySpeciesSet);
cerm.addPrimaryReactionSet(primaryReactionSet);
LinkedHashSet newReactions = getReactionGenerator().react(cerm.getReactedSpeciesSet());
if (reactionModelEnlarger instanceof RateBasedRME)
cerm.addReactionSet(newReactions);
else {
Iterator iter = newReactions.iterator();
while (iter.hasNext()){
Reaction r = (Reaction)iter.next();
if (r.getReactantNumber() == 2 && r.getProductNumber() == 2){
cerm.addReaction(r);
}
}
}
return;
//
}
//## operation initializeCoreEdgeModelWithoutPRL()
//9/24/07 gmagoon: moved from ReactionSystem.java
protected void initializeCoreEdgeModelWithoutPRL() {
//#[ operation initializeCoreEdgeModelWithoutPRL()
CoreEdgeReactionModel cerm = new CoreEdgeReactionModel(new LinkedHashSet(getSpeciesSeed()));
setReactionModel(cerm);
PDepNetwork.reactionModel = getReactionModel();
PDepNetwork.reactionSystem = (ReactionSystem) getReactionSystemList().get(0);
// Determine initial set of reactions and edge species using only the
// species enumerated in the input file as the core
LinkedHashSet reactionSet = getReactionGenerator().react(getSpeciesSeed());
reactionSet.addAll(getLibraryReactionGenerator().react(getSpeciesSeed()));
// Set initial core-edge reaction model based on above results
if (reactionModelEnlarger instanceof RateBasedRME) {
// Only keep the reactions involving bimolecular reactants and bimolecular products
Iterator iter = reactionSet.iterator();
while (iter.hasNext()){
Reaction r = (Reaction)iter.next();
cerm.addReaction(r);
}
}
else {
// Only keep the reactions involving bimolecular reactants and bimolecular products
Iterator iter = reactionSet.iterator();
while (iter.hasNext()){
Reaction r = (Reaction)iter.next();
if (r.getReactantNumber() > 1 && r.getProductNumber() > 1){
cerm.addReaction(r);
}
else {
cerm.categorizeReaction(r.getStructure());
PDepNetwork.addReactionToNetworks(r);
}
}
}
//10/9/07 gmagoon: copy reactionModel to reactionSystem; there may still be scope problems, particularly in above elseif statement
//10/24/07 gmagoon: want to copy same reaction model to all reactionSystem variables; should probably also make similar modifications elsewhere; may or may not need to copy in ...WithPRL function
for (Integer i = 0; i < reactionSystemList.size(); i++) {
ReactionSystem rs = (ReactionSystem) reactionSystemList.get(i);
rs.setReactionModel(getReactionModel());
}
//reactionSystem.setReactionModel(getReactionModel());
// We cannot return a system with no core reactions, so if this is a case we must add to the core
while (getReactionModel().isEmpty()&&!PDepNetwork.hasCoreReactions((CoreEdgeReactionModel) getReactionModel())) {
for (Integer i = 0; i < reactionSystemList.size(); i++) {
ReactionSystem rs = (ReactionSystem) reactionSystemList.get(i);
if (reactionModelEnlarger instanceof RateBasedPDepRME)
rs.initializePDepNetwork();
rs.appendUnreactedSpeciesStatus((InitialStatus)initialStatusList.get(i), rs.getPresentTemperature());
}
enlargeReactionModel();
}
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
rs.setReactionModel(getReactionModel());
}
return;
//
}
//## operation initializeCoreEdgeReactionModel()
//9/24/07 gmagoon: moved from ReactionSystem.java
public void initializeCoreEdgeReactionModel() {
System.out.println("\nInitializing core-edge reaction model");
// setSpeciesSeed(new LinkedHashSet());//10/4/07 gmagoon:moved from initializeReactionSystem; later moved to modelGeneration()
//#[ operation initializeCoreEdgeReactionModel()
// if (hasPrimaryReactionLibrary()) initializeCoreEdgeModelWithPRL();
// else initializeCoreEdgeModelWithoutPRL();
/*
* MRH 12-Jun-2009
*
* I've lumped the initializeCoreEdgeModel w/ and w/o a seed mechanism
* (which used to be the PRL) into one function. Before, RMG would
* complete one iteration (construct the edge species/rxns) before adding
* the seed mechanism to the rxn, thereby possibly estimating kinetic
* parameters for a rxn that exists in a seed mechanism
*/
initializeCoreEdgeModel();
//
}
//9/24/07 gmagoon: copied from ReactionSystem.java
public ReactionGenerator getReactionGenerator() {
return reactionGenerator;
}
//10/4/07 gmagoon: moved from ReactionSystem.java
public void setReactionGenerator(ReactionGenerator p_ReactionGenerator) {
reactionGenerator = p_ReactionGenerator;
}
//9/25/07 gmagoon: moved from ReactionSystem.java
//10/24/07 gmagoon: changed to use reactionSystemList
//## operation enlargeReactionModel()
public void enlargeReactionModel() {
//#[ operation enlargeReactionModel()
if (reactionModelEnlarger == null) throw new NullPointerException("ReactionModelEnlarger");
System.out.println("\nEnlarging reaction model");
reactionModelEnlarger.enlargeReactionModel(reactionSystemList, reactionModel, validList);
return;
//
}
public void pruneReactionModel() {
HashMap prunableSpeciesMap = new HashMap();
//check whether all the reaction systems reached target conversion/time
boolean allReachedTarget = true;
for (Integer i = 0; i < reactionSystemList.size(); i++) {
JDAS ds = (JDAS)((ReactionSystem) reactionSystemList.get(i)).getDynamicSimulator();
if (!ds.targetReached) allReachedTarget = false;
}
JDAS ds0 = (JDAS)((ReactionSystem) reactionSystemList.get(0)).getDynamicSimulator(); //get the first reactionSystem dynamic simulator
//prune the reaction model if AUTO is being used, and all reaction systems have reached target time/conversion, and edgeTol is non-zero (and positive, obviously), and if there are a sufficient number of species in the reaction model (edge + core)
if ( JDAS.autoflag &&
allReachedTarget &&
edgeTol>0 &&
(((CoreEdgeReactionModel)reactionModel).getEdge().getSpeciesNumber()+reactionModel.getSpeciesNumber())>= minSpeciesForPruning){
int numberToBePruned = ((CoreEdgeReactionModel)reactionModel).getEdge().getSpeciesNumber() - maxEdgeSpeciesAfterPruning;
Iterator iter = JDAS.edgeID.keySet().iterator();//determine the maximum edge flux ratio for each edge species
while(iter.hasNext()){
Species spe = (Species)iter.next();
Integer id = (Integer)JDAS.edgeID.get(spe);
double maxmaxRatio = ds0.maxEdgeFluxRatio[id-1];
boolean prunable = ds0.prunableSpecies[id-1];
for (Integer i = 1; i < reactionSystemList.size(); i++) {//go through the rest of the reaction systems to see if there are higher max flux ratios
JDAS ds = (JDAS)((ReactionSystem) reactionSystemList.get(i)).getDynamicSimulator();
if(ds.maxEdgeFluxRatio[id-1] > maxmaxRatio) maxmaxRatio = ds.maxEdgeFluxRatio[id-1];
if(prunable && !ds.prunableSpecies[id-1]) prunable = false;//I can't imagine a case where this would occur (if the conc. is zero at one condition, it should be zero at all conditions), but it is included for completeness
}
//if the maximum max edge flux ratio is less than the edge inclusion threshhold and the species is "prunable" (i.e. it doesn't have any reactions producing it with zero flux), schedule the species for pruning
if( prunable){ // && maxmaxRatio < edgeTol
prunableSpeciesMap.put(spe, maxmaxRatio);
// at this point prunableSpecies includes ALL prunable species, no matter how large their flux
}
}
// sort the prunableSpecies by maxmaxRatio
// i.e. sort the map by values
List prunableSpeciesList = new LinkedList(prunableSpeciesMap.entrySet());
Collections.sort(prunableSpeciesList, new Comparator() {
public int compare(Object o1, Object o2) {
return ((Comparable) ((Map.Entry) (o1)).getValue())
.compareTo(((Map.Entry) (o2)).getValue());
}
});
List speciesToPrune = new LinkedList();
for (Iterator it = prunableSpeciesList.iterator(); it.hasNext();) {
Map.Entry entry = (Map.Entry)it.next();
Species spe = (Species)entry.getKey();
double maxmaxRatio = (Double)entry.getValue();
if (maxmaxRatio < edgeTol)
{
System.out.println("Edge species "+spe.getChemkinName() +" has a maximum flux ratio ("+maxmaxRatio+") lower than edge inclusion threshhold and will be pruned.");
speciesToPrune.add(spe);
}
else if ( numberToBePruned - speciesToPrune.size() > 0 ) {
System.out.println("Edge species "+spe.getChemkinName() +" has a low maximum flux ratio ("+maxmaxRatio+") and will be pruned to reduce the edge size to the maximum ("+maxEdgeSpeciesAfterPruning+").");
speciesToPrune.add(spe);
}
else break; // no more to be pruned
}
//now, speciesToPrune has been filled with species that should be pruned from the edge
System.out.println("Pruning...");
//prune species from the edge
//remove species from the edge and from the species dictionary and from edgeID
iter = speciesToPrune.iterator();
while(iter.hasNext()){
Species spe = (Species)iter.next();
writePrunedEdgeSpecies(spe);
((CoreEdgeReactionModel)getReactionModel()).getUnreactedSpeciesSet().remove(spe);
//SpeciesDictionary.getInstance().getSpeciesSet().remove(spe);
SpeciesDictionary.getInstance().remove(spe);
JDAS.edgeID.remove(spe);
}
//remove reactions from the edge involving pruned species
iter = ((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSet().iterator();
HashSet toRemove = new HashSet();
while(iter.hasNext()){
Reaction reaction = (Reaction)iter.next();
if (reactionPrunableQ(reaction, speciesToPrune)) toRemove.add(reaction);
}
iter = toRemove.iterator();
while(iter.hasNext()){
Reaction reaction = (Reaction)iter.next();
writePrunedEdgeReaction(reaction);
ReactionTemplate rt = reaction.getReactionTemplate();
reaction.setReactionTemplate(null);//remove from ReactionTemplate's reactionDictionaryByStructure
reaction.setReverseReaction(null);
rt.removeFromReactionDictionaryByStructure(reaction.getStructure());//remove from ReactionTemplate's reactionDictionaryByStructure
reaction.getStructure().clearProducts();
reaction.getStructure().clearReactants();
((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSet().remove(reaction);
}
//remove reactions from PDepNetworks in PDep cases
if (reactionModelEnlarger instanceof RateBasedPDepRME) {
iter = PDepNetwork.getNetworks().iterator();
HashSet pdnToRemove = new HashSet();
HashSet toRemovePath;
HashSet toRemoveNet;
HashSet toRemoveNonincluded;
HashSet toRemoveIsomer;
while (iter.hasNext()){
PDepNetwork pdn = (PDepNetwork)iter.next();
//identify path reactions to remove
Iterator rIter = pdn.getPathReactions().iterator();
toRemovePath = new HashSet();
while(rIter.hasNext()){
Reaction reaction = (Reaction)rIter.next();
if (reactionPrunableQ(reaction, speciesToPrune)) toRemovePath.add(reaction);
}
//identify net reactions to remove
rIter = pdn.getNetReactions().iterator();
toRemoveNet = new HashSet();
while(rIter.hasNext()){
Reaction reaction = (Reaction)rIter.next();
if (reactionPrunableQ(reaction, speciesToPrune)) toRemoveNet.add(reaction);
}
//identify nonincluded reactions to remove
rIter = pdn.getNonincludedReactions().iterator();
toRemoveNonincluded = new HashSet();
while(rIter.hasNext()){
Reaction reaction = (Reaction)rIter.next();
if (reactionPrunableQ(reaction, speciesToPrune)) toRemoveNonincluded.add(reaction);
}
//identify isomers to remove
Iterator iIter = pdn.getIsomers().iterator();
toRemoveIsomer = new HashSet();
while(iIter.hasNext()){
PDepIsomer pdi = (PDepIsomer)iIter.next();
Iterator isIter = pdi.getSpeciesListIterator();
while(isIter.hasNext()){
Species spe = (Species)isIter.next();
if (speciesToPrune.contains(spe)&&!toRemove.contains(pdi)) toRemoveIsomer.add(pdi);
}
if(pdi.getSpeciesList().size()==0 && !toRemove.contains(pdi)) toRemoveIsomer.add(pdi);//if the pdi doesn't contain any species, schedule it for removal
}
//remove path reactions
Iterator iterRem = toRemovePath.iterator();
while(iterRem.hasNext()){
Reaction reaction = (Reaction)iterRem.next();
ReactionTemplate rt = reaction.getReactionTemplate();
reaction.setReactionTemplate(null);//remove from ReactionTemplate's reactionDictionaryByStructure
if(reaction.getStructure() != null){
if(rt!=null){
rt.removeFromReactionDictionaryByStructure(reaction.getStructure());//remove from ReactionTemplate's reactionDictionaryByStructure
}
reaction.getStructure().clearProducts();
reaction.getStructure().clearReactants();
}
pdn.removeFromPathReactionList((PDepReaction)reaction);
}
//remove net reactions
iterRem = toRemoveNet.iterator();
while(iterRem.hasNext()){
Reaction reaction = (Reaction)iterRem.next();
ReactionTemplate rt = reaction.getReactionTemplate();
reaction.setReactionTemplate(null);//remove from ReactionTemplate's reactionDictionaryByStructure
if(reaction.getStructure() != null){
if(rt!=null){
rt.removeFromReactionDictionaryByStructure(reaction.getStructure());//remove from ReactionTemplate's reactionDictionaryByStructure
}
//reaction.getStructure().clearProducts();
//reaction.getStructure().clearReactants();
}
pdn.removeFromNetReactionList((PDepReaction)reaction);
}
//remove nonincluded reactions
iterRem = toRemoveNonincluded.iterator();
while(iterRem.hasNext()){
Reaction reaction = (Reaction)iterRem.next();
ReactionTemplate rt = reaction.getReactionTemplate();
reaction.setReactionTemplate(null);//remove from ReactionTemplate's reactionDictionaryByStructure
if(reaction.getStructure() != null){
if(rt!=null){
rt.removeFromReactionDictionaryByStructure(reaction.getStructure());//remove from ReactionTemplate's reactionDictionaryByStructure
}
// reaction.getStructure().clearProducts();
// reaction.getStructure().clearReactants();
}
pdn.removeFromNonincludedReactionList((PDepReaction)reaction);
}
//remove isomers
iterRem = toRemoveIsomer.iterator();
while(iterRem.hasNext()){
PDepIsomer pdi = (PDepIsomer)iterRem.next();
pdn.removeFromIsomerList(pdi);
}
//remove the entire network if the network has no path or net reactions
if(pdn.getPathReactions().size()==0&&pdn.getNetReactions().size()==0) pdnToRemove.add(pdn);
}
iter = pdnToRemove.iterator();
while (iter.hasNext()){
PDepNetwork pdn = (PDepNetwork)iter.next();
PDepNetwork.getNetworks().remove(pdn);
}
}
}
return;
}
//determines whether a reaction can be removed; returns true ; cf. categorizeReaction() in CoreEdgeReactionModel
//returns true if the reaction involves reactants or products that are in p_prunableSpecies; otherwise returns false
public boolean reactionPrunableQ(Reaction p_reaction, Collection p_prunableSpecies){
Iterator iter = p_reaction.getReactants();
while (iter.hasNext()) {
Species spe = (Species)iter.next();
if (p_prunableSpecies.contains(spe))
return true;
}
iter = p_reaction.getProducts();
while (iter.hasNext()) {
Species spe = (Species)iter.next();
if (p_prunableSpecies.contains(spe))
return true;
}
return false;
}
public boolean hasPrimaryReactionLibrary() {
if (primaryReactionLibrary == null) return false;
return (primaryReactionLibrary.size() > 0);
}
public boolean hasSeedMechanisms() {
if (getSeedMechanism() == null) return false;
return (seedMechanism.size() > 0);
}
//9/25/07 gmagoon: moved from ReactionSystem.java
public PrimaryReactionLibrary getPrimaryReactionLibrary() {
return primaryReactionLibrary;
}
//9/25/07 gmagoon: moved from ReactionSystem.java
public void setPrimaryReactionLibrary(PrimaryReactionLibrary p_PrimaryReactionLibrary) {
primaryReactionLibrary = p_PrimaryReactionLibrary;
}
//10/4/07 gmagoon: added
public LinkedHashSet getSpeciesSeed() {
return speciesSeed;
}
//10/4/07 gmagoon: added
public void setSpeciesSeed(LinkedHashSet p_speciesSeed) {
speciesSeed = p_speciesSeed;
}
//10/4/07 gmagoon: added
public LibraryReactionGenerator getLibraryReactionGenerator() {
return lrg;
}
//10/4/07 gmagoon: added
public void setLibraryReactionGenerator(LibraryReactionGenerator p_lrg) {
lrg = p_lrg;
}
public static Temperature getTemp4BestKinetics() {
return temp4BestKinetics;
}
public static void setTemp4BestKinetics(Temperature firstSysTemp) {
temp4BestKinetics = firstSysTemp;
}
public SeedMechanism getSeedMechanism() {
return seedMechanism;
}
public void setSeedMechanism(SeedMechanism p_seedMechanism) {
seedMechanism = p_seedMechanism;
}
public PrimaryThermoLibrary getPrimaryThermoLibrary() {
return primaryThermoLibrary;
}
public void setPrimaryThermoLibrary(PrimaryThermoLibrary p_primaryThermoLibrary) {
primaryThermoLibrary = p_primaryThermoLibrary;
}
public static double getAtol(){
return atol;
}
public boolean runKillableToPreventInfiniteLoop(boolean intermediateSteps, int iterationNumber) {
ReactionSystem rs0 = (ReactionSystem)reactionSystemList.get(0);
if (!intermediateSteps)//if there are no intermediate steps (for example when using AUTO method), return true;
return true;
//if there are intermediate steps, the run is killable if the iteration number exceeds the number of time steps / conversions
else if (rs0.finishController.terminationTester instanceof ReactionTimeTT){
if (iterationNumber - 1 > timeStep.size()){ //-1 correction needed since when this is called, iteration number has been incremented
return true;
}
}
else //the case where intermediate conversions are specified
if (iterationNumber - 1 > numConversions){ //see above; it is possible there is an off-by-one error here, so further testing will be needed
return true;
}
return false; //return false if none of the above criteria are met
}
public void readAndMakePRL(BufferedReader reader) throws IOException {
int Ilib = 0;
String line = ChemParser.readMeaningfulLine(reader);
while (!line.equals("END")) {
String[] tempString = line.split("Name: ");
String name = tempString[tempString.length-1].trim();
line = ChemParser.readMeaningfulLine(reader);
tempString = line.split("Location: ");
String location = tempString[tempString.length-1].trim();
String path = System.getProperty("jing.rxn.ReactionLibrary.pathName");
path += "/" + location;
if (Ilib==0) {
setPrimaryReactionLibrary(new PrimaryReactionLibrary(name, path));
Ilib++;
}
else {
getPrimaryReactionLibrary().appendPrimaryReactionLibrary(name, path);
Ilib++;
}
line = ChemParser.readMeaningfulLine(reader);
}
if (Ilib==0) {
setPrimaryReactionLibrary(null);
}
else System.out.println("Primary Reaction Libraries in use: " + getPrimaryReactionLibrary().getName());
}
public void readAndMakePTL(BufferedReader reader) {
int numPTLs = 0;
String line = ChemParser.readMeaningfulLine(reader);
while (!line.equals("END")) {
String[] tempString = line.split("Name: ");
String name = tempString[tempString.length-1].trim();
line = ChemParser.readMeaningfulLine(reader);
tempString = line.split("Location: ");
String path = tempString[tempString.length-1].trim();
if (numPTLs==0) {
setPrimaryThermoLibrary(new PrimaryThermoLibrary(name,path));
++numPTLs;
}
else {
getPrimaryThermoLibrary().appendPrimaryThermoLibrary(name,path);
++numPTLs;
}
line = ChemParser.readMeaningfulLine(reader);
}
if (numPTLs == 0) setPrimaryThermoLibrary(null);
}
public void readExtraForbiddenStructures(BufferedReader reader) throws IOException {
System.out.println("Reading extra forbidden structures from input file.");
String line = ChemParser.readMeaningfulLine(reader);
while (!line.equals("END")) {
StringTokenizer token = new StringTokenizer(line);
String fgname = token.nextToken();
Graph fgGraph = null;
try {
fgGraph = ChemParser.readFGGraph(reader);
}
catch (InvalidGraphFormatException e) {
System.out.println("Invalid functional group in "+fgname);
throw new InvalidFunctionalGroupException(fgname + ": " + e.getMessage());
}
if (fgGraph == null) throw new InvalidFunctionalGroupException(fgname);
FunctionalGroup fg = FunctionalGroup.makeForbiddenStructureFG(fgname, fgGraph);
ChemGraph.addForbiddenStructure(fg);
line = ChemParser.readMeaningfulLine(reader);
System.out.println(" Forbidden structure: "+fgname);
}
}
public void setSpectroscopicDataMode(String line) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken();
String sdeType = st.nextToken().toLowerCase();
if (sdeType.equals("frequencygroups") || sdeType.equals("default")) {
SpectroscopicData.mode = SpectroscopicData.Mode.FREQUENCYGROUPS;
}
else if (sdeType.equals("therfit") || sdeType.equals("threefrequencymodel")) {
SpectroscopicData.mode = SpectroscopicData.Mode.THREEFREQUENCY;
}
else if (sdeType.equals("off") || sdeType.equals("none")) {
SpectroscopicData.mode = SpectroscopicData.Mode.OFF;
}
else throw new InvalidSymbolException("condition.txt: Unknown SpectroscopicDataEstimator = " + sdeType);
}
/**
* Sets the pressure dependence options to on or off. If on, checks for
* more options and sets them as well.
* @param line The current line in the condition file; should start with "PressureDependence:"
* @param reader The reader currently being used to parse the condition file
*/
public String setPressureDependenceOptions(String line, BufferedReader reader) throws InvalidSymbolException {
// Determine pressure dependence mode
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken(); // Should be "PressureDependence:"
String pDepType = st.nextToken();
if (pDepType.toLowerCase().equals("off")) {
// No pressure dependence
reactionModelEnlarger = new RateBasedRME();
PDepNetwork.generateNetworks = false;
line = ChemParser.readMeaningfulLine(reader);
}
else if (pDepType.toLowerCase().equals("modifiedstrongcollision") ||
pDepType.toLowerCase().equals("reservoirstate") ||
pDepType.toLowerCase().equals("chemdis")) {
reactionModelEnlarger = new RateBasedPDepRME();
PDepNetwork.generateNetworks = true;
// Set pressure dependence method
if (pDepType.toLowerCase().equals("reservoirstate"))
((RateBasedPDepRME) reactionModelEnlarger).setPDepKineticsEstimator(new FastMasterEqn(FastMasterEqn.Mode.RESERVOIRSTATE));
else if (pDepType.toLowerCase().equals("modifiedstrongcollision"))
((RateBasedPDepRME) reactionModelEnlarger).setPDepKineticsEstimator(new FastMasterEqn(FastMasterEqn.Mode.STRONGCOLLISION));
//else if (pDepType.toLowerCase().equals("chemdis"))
// ((RateBasedPDepRME) reactionModelEnlarger).setPDepKineticsEstimator(new Chemdis());
else
throw new InvalidSymbolException("condition.txt: Unknown PressureDependence mode = " + pDepType);
RateBasedPDepRME pdepModelEnlarger = (RateBasedPDepRME) reactionModelEnlarger;
// Turn on spectroscopic data estimation if not already on
if (pdepModelEnlarger.getPDepKineticsEstimator() instanceof FastMasterEqn && SpectroscopicData.mode == SpectroscopicData.Mode.OFF) {
System.out.println("Warning: Spectroscopic data needed for pressure dependence; switching SpectroscopicDataEstimator to FrequencyGroups.");
SpectroscopicData.mode = SpectroscopicData.Mode.FREQUENCYGROUPS;
}
else if (pdepModelEnlarger.getPDepKineticsEstimator() instanceof Chemdis && SpectroscopicData.mode != SpectroscopicData.Mode.THREEFREQUENCY) {
System.out.println("Warning: Switching SpectroscopicDataEstimator to three-frequency model.");
SpectroscopicData.mode = SpectroscopicData.Mode.THREEFREQUENCY;
}
// Next line must be PDepKineticsModel
line = ChemParser.readMeaningfulLine(reader);
if (line.toLowerCase().startsWith("pdepkineticsmodel:")) {
st = new StringTokenizer(line);
name = st.nextToken();
String pDepKinType = st.nextToken();
if (pDepKinType.toLowerCase().equals("chebyshev")) {
PDepRateConstant.setMode(PDepRateConstant.Mode.CHEBYSHEV);
// Default is to cubic order for basis functions
FastMasterEqn.setNumTBasisFuncs(4);
FastMasterEqn.setNumPBasisFuncs(4);
}
else if (pDepKinType.toLowerCase().equals("pdeparrhenius"))
PDepRateConstant.setMode(PDepRateConstant.Mode.PDEPARRHENIUS);
else if (pDepKinType.toLowerCase().equals("rate"))
PDepRateConstant.setMode(PDepRateConstant.Mode.RATE);
else
throw new InvalidSymbolException("condition.txt: Unknown PDepKineticsModel = " + pDepKinType);
// For Chebyshev polynomials, optionally specify the number of
// temperature and pressure basis functions
// Such a line would read, e.g.: "PDepKineticsModel: Chebyshev 4 4"
if (st.hasMoreTokens() && PDepRateConstant.getMode() == PDepRateConstant.Mode.CHEBYSHEV) {
try {
int numTBasisFuncs = Integer.parseInt(st.nextToken());
int numPBasisFuncs = Integer.parseInt(st.nextToken());
FastMasterEqn.setNumTBasisFuncs(numTBasisFuncs);
FastMasterEqn.setNumPBasisFuncs(numPBasisFuncs);
}
catch (NoSuchElementException e) {
throw new InvalidSymbolException("condition.txt: Missing number of pressure basis functions for Chebyshev polynomials.");
}
}
}
else
throw new InvalidSymbolException("condition.txt: Missing PDepKineticsModel after PressureDependence line.");
// Determine temperatures and pressures to use
// These can be specified automatically using TRange and PRange or
// manually using Temperatures and Pressures
Temperature[] temperatures = null;
Pressure[] pressures = null;
String Tunits = "K";
Temperature Tmin = new Temperature(300.0, "K");
Temperature Tmax = new Temperature(2000.0, "K");
int Tnumber = 8;
String Punits = "bar";
Pressure Pmin = new Pressure(0.01, "bar");
Pressure Pmax = new Pressure(100.0, "bar");
int Pnumber = 5;
// Read next line of input
line = ChemParser.readMeaningfulLine(reader);
boolean done = !(line.toLowerCase().startsWith("trange:") ||
line.toLowerCase().startsWith("prange:") ||
line.toLowerCase().startsWith("temperatures:") ||
line.toLowerCase().startsWith("pressures:"));
// Parse lines containing pressure dependence options
// Possible options are "TRange:", "PRange:", "Temperatures:", and "Pressures:"
// You must specify either TRange or Temperatures and either PRange or Pressures
// The order does not matter
while (!done) {
st = new StringTokenizer(line);
name = st.nextToken();
if (line.toLowerCase().startsWith("trange:")) {
Tunits = ChemParser.removeBrace(st.nextToken());
Tmin = new Temperature(Double.parseDouble(st.nextToken()), Tunits);
Tmax = new Temperature(Double.parseDouble(st.nextToken()), Tunits);
Tnumber = Integer.parseInt(st.nextToken());
}
else if (line.toLowerCase().startsWith("prange:")) {
Punits = ChemParser.removeBrace(st.nextToken());
Pmin = new Pressure(Double.parseDouble(st.nextToken()), Punits);
Pmax = new Pressure(Double.parseDouble(st.nextToken()), Punits);
Pnumber = Integer.parseInt(st.nextToken());
}
else if (line.toLowerCase().startsWith("temperatures:")) {
Tnumber = Integer.parseInt(st.nextToken());
Tunits = ChemParser.removeBrace(st.nextToken());
temperatures = new Temperature[Tnumber];
for (int i = 0; i < Tnumber; i++) {
temperatures[i] = new Temperature(Double.parseDouble(st.nextToken()), Tunits);
}
Tmin = temperatures[0];
Tmax = temperatures[Tnumber-1];
}
else if (line.toLowerCase().startsWith("pressures:")) {
Pnumber = Integer.parseInt(st.nextToken());
Punits = ChemParser.removeBrace(st.nextToken());
pressures = new Pressure[Pnumber];
for (int i = 0; i < Pnumber; i++) {
pressures[i] = new Pressure(Double.parseDouble(st.nextToken()), Punits);
}
Pmin = pressures[0];
Pmax = pressures[Pnumber-1];
}
// Read next line of input
line = ChemParser.readMeaningfulLine(reader);
done = !(line.toLowerCase().startsWith("trange:") ||
line.toLowerCase().startsWith("prange:") ||
line.toLowerCase().startsWith("temperatures:") ||
line.toLowerCase().startsWith("pressures:"));
}
// Set temperatures and pressures (if not already set manually)
if (temperatures == null) {
temperatures = new Temperature[Tnumber];
if (PDepRateConstant.getMode() == PDepRateConstant.Mode.CHEBYSHEV) {
// Use the Gauss-Chebyshev points
// The formula for the Gauss-Chebyshev points was taken from
// the Chemkin theory manual
for (int i = 1; i <= Tnumber; i++) {
double T = -Math.cos((2 * i - 1) * Math.PI / (2 * Tnumber));
T = 2.0 / ((1.0/Tmax.getK() - 1.0/Tmin.getK()) * T + 1.0/Tmax.getK() + 1.0/Tmin.getK());
temperatures[i-1] = new Temperature(T, "K");
}
}
else {
// Distribute equally on a 1/T basis
double slope = (1.0/Tmax.getK() - 1.0/Tmin.getK()) / (Tnumber - 1);
for (int i = 0; i < Tnumber; i++) {
double T = 1.0/(slope * i + 1.0/Tmin.getK());
temperatures[i] = new Temperature(T, "K");
}
}
}
if (pressures == null) {
pressures = new Pressure[Pnumber];
if (PDepRateConstant.getMode() == PDepRateConstant.Mode.CHEBYSHEV) {
// Use the Gauss-Chebyshev points
// The formula for the Gauss-Chebyshev points was taken from
// the Chemkin theory manual
for (int i = 1; i <= Pnumber; i++) {
double P = -Math.cos((2 * i - 1) * Math.PI / (2 * Pnumber));
P = Math.pow(10, 0.5 * ((Math.log10(Pmax.getBar()) - Math.log10(Pmin.getBar())) * P + Math.log10(Pmax.getBar()) + Math.log10(Pmin.getBar())));
pressures[i-1] = new Pressure(P, "bar");
}
}
else {
// Distribute equally on a log P basis
double slope = (Math.log10(Pmax.getBar()) - Math.log10(Pmin.getBar())) / (Pnumber - 1);
for (int i = 0; i < Pnumber; i++) {
double P = Math.pow(10, slope * i + Math.log10(Pmin.getBar()));
pressures[i] = new Pressure(P, "bar");
}
}
}
FastMasterEqn.setTemperatures(temperatures);
PDepRateConstant.setTemperatures(temperatures);
PDepRateConstant.setTMin(Tmin);
PDepRateConstant.setTMax(Tmax);
ChebyshevPolynomials.setTlow(Tmin);
ChebyshevPolynomials.setTup(Tmax);
FastMasterEqn.setPressures(pressures);
PDepRateConstant.setPressures(pressures);
PDepRateConstant.setPMin(Pmin);
PDepRateConstant.setPMax(Pmax);
ChebyshevPolynomials.setPlow(Pmin);
ChebyshevPolynomials.setPup(Pmax);
/*
* New option for input file: DecreaseGrainSize
* User now has the option to re-run fame with additional grains
* (smaller grain size) when the p-dep rate exceeds the
* high-P-limit rate.
* Default value: off
*/
if (line.toLowerCase().startsWith("decreasegrainsize")) {
st = new StringTokenizer(line);
String tempString = st.nextToken(); // "DecreaseGrainSize:"
tempString = st.nextToken().trim().toLowerCase();
if (tempString.equals("on") || tempString.equals("yes") ||
tempString.equals("true")) {
rerunFame = true;
} else rerunFame = false;
line = ChemParser.readMeaningfulLine(reader);
}
}
else {
throw new InvalidSymbolException("condition.txt: Unknown PressureDependence = " + pDepType);
}
return line;
}
public void createTModel(String line) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken();
String modelType = st.nextToken();
String unit = st.nextToken();
unit = ChemParser.removeBrace(unit);
if (modelType.equals("Constant")) {
tempList = new LinkedList();
//read first temperature
double t = Double.parseDouble(st.nextToken());
tempList.add(new ConstantTM(t, unit));
Temperature temp = new Temperature(t, unit);//10/29/07 gmagoon: added this line and next two lines to set Global.lowTemperature and Global.highTemperature
Global.lowTemperature = (Temperature)temp.clone();
Global.highTemperature = (Temperature)temp.clone();
//read remaining temperatures
while (st.hasMoreTokens()) {
t = Double.parseDouble(st.nextToken());
tempList.add(new ConstantTM(t, unit));
temp = new Temperature(t,unit);//10/29/07 gmagoon: added this line and next two "if" statements to set Global.lowTemperature and Global.highTemperature
if(temp.getK() < Global.lowTemperature.getK())
Global.lowTemperature = (Temperature)temp.clone();
if(temp.getK() > Global.highTemperature.getK())
Global.highTemperature = (Temperature)temp.clone();
}
}
else {
throw new InvalidSymbolException("condition.txt: Unknown TemperatureModel = " + modelType);
}
}
public void createPModel(String line) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken();
String modelType = st.nextToken();
String unit = st.nextToken();
unit = ChemParser.removeBrace(unit);
if (modelType.equals("Constant")) {
presList = new LinkedList();
//read first pressure
double p = Double.parseDouble(st.nextToken());
Pressure pres = new Pressure(p, unit);
Global.lowPressure = (Pressure)pres.clone();
Global.highPressure = (Pressure)pres.clone();
presList.add(new ConstantPM(p, unit));
//read remaining temperatures
while (st.hasMoreTokens()) {
p = Double.parseDouble(st.nextToken());
presList.add(new ConstantPM(p, unit));
pres = new Pressure(p, unit);
if(pres.getBar() < Global.lowPressure.getBar())
Global.lowPressure = (Pressure)pres.clone();
if(pres.getBar() > Global.lowPressure.getBar())
Global.highPressure = (Pressure)pres.clone();
}
}
else {
throw new InvalidSymbolException("condition.txt: Unknown PressureModel = " + modelType);
}
}
public LinkedHashMap populateInitialStatusListWithReactiveSpecies(BufferedReader reader) throws IOException {
LinkedHashMap speciesSet = new LinkedHashMap();
LinkedHashMap speciesStatus = new LinkedHashMap();
String line = ChemParser.readMeaningfulLine(reader);
while (!line.equals("END")) {
StringTokenizer st = new StringTokenizer(line);
String index = st.nextToken();
String name = null;
if (!index.startsWith("(")) name = index;
else name = st.nextToken();
//if (restart) name += "("+speciesnum+")";
// 24Jun2009: MRH
// Check if the species name begins with a number.
// If so, terminate the program and inform the user to choose
// a different name. This is implemented so that the chem.inp
// file generated will be valid when run in Chemkin
try {
int doesNameBeginWithNumber = Integer.parseInt(name.substring(0,1));
System.out.println("\nA species name should not begin with a number." +
" Please rename species: " + name + "\n");
System.exit(0);
} catch (NumberFormatException e) {
// We're good
}
if (!(st.hasMoreTokens())) throw new InvalidSymbolException("Couldn't find concentration of species: "+name);
String conc = st.nextToken();
double concentration = Double.parseDouble(conc);
String unit = st.nextToken();
unit = ChemParser.removeBrace(unit);
if (unit.equals("mole/l") || unit.equals("mol/l") || unit.equals("mole/liter") || unit.equals("mol/liter")) {
concentration /= 1000;
unit = "mol/cm3";
}
else if (unit.equals("mole/m3") || unit.equals("mol/m3")) {
concentration /= 1000000;
unit = "mol/cm3";
}
else if (unit.equals("molecule/cm3") || unit.equals("molecules/cm3")) {
concentration /= 6.022e23;
}
else if (!unit.equals("mole/cm3") && !unit.equals("mol/cm3")) {
throw new InvalidUnitException("Species Concentration in condition.txt!");
}
//GJB to allow "unreactive" species that only follow user-defined library reactions.
// They will not react according to RMG reaction families
boolean IsReactive = true;
boolean IsConstantConcentration = false;
while (st.hasMoreTokens()) {
String reactive = st.nextToken().trim();
if (reactive.equalsIgnoreCase("unreactive"))
IsReactive = false;
if (reactive.equalsIgnoreCase("constantconcentration"))
IsConstantConcentration=true;
}
Graph g = ChemParser.readChemGraph(reader);
ChemGraph cg = null;
try {
cg = ChemGraph.make(g);
}
catch (ForbiddenStructureException e) {
System.out.println("Forbidden Structure:\n" + e.getMessage());
throw new InvalidSymbolException("A species in the input file has a forbidden structure.");
}
//System.out.println(name);
Species species = Species.make(name,cg);
species.setReactivity(IsReactive); // GJB
species.setConstantConcentration(IsConstantConcentration);
speciesSet.put(name, species);
getSpeciesSeed().add(species);
double flux = 0;
int species_type = 1; // reacted species
SpeciesStatus ss = new SpeciesStatus(species,species_type,concentration,flux);
speciesStatus.put(species, ss);
line = ChemParser.readMeaningfulLine(reader);
}
ReactionTime initial = new ReactionTime(0,"S");
//10/23/07 gmagoon: modified for handling multiple temperature, pressure conditions; note: concentration within speciesStatus (and list of conversion values) should not need to be modified for each T,P since this is done within isTPCconsistent in ReactionSystem
initialStatusList = new LinkedList();
for (Iterator iter = tempList.iterator(); iter.hasNext(); ) {
TemperatureModel tm = (TemperatureModel)iter.next();
for (Iterator iter2 = presList.iterator(); iter2.hasNext(); ){
PressureModel pm = (PressureModel)iter2.next();
// LinkedHashMap speStat = (LinkedHashMap)speciesStatus.clone();//10/31/07 gmagoon: trying creating multiple instances of speciesStatus to address issues with concentration normalization (last normalization seems to apply to all)
Set ks = speciesStatus.keySet();
LinkedHashMap speStat = new LinkedHashMap();
for (Iterator iter3 = ks.iterator(); iter3.hasNext();){//11/1/07 gmagoon: perform deep copy; (is there an easier or more elegant way to do this?)
SpeciesStatus ssCopy = (SpeciesStatus)speciesStatus.get(iter3.next());
speStat.put(ssCopy.getSpecies(),new SpeciesStatus(ssCopy.getSpecies(),ssCopy.getSpeciesType(),ssCopy.getConcentration(),ssCopy.getFlux()));
}
initialStatusList.add(new InitialStatus(speStat,tm.getTemperature(initial),pm.getPressure(initial)));
}
}
return speciesSet;
}
public void populateInitialStatusListWithInertSpecies(BufferedReader reader) {
String line = ChemParser.readMeaningfulLine(reader);
while (!line.equals("END")) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken().trim();
String conc = st.nextToken();
double inertConc = Double.parseDouble(conc);
String unit = st.nextToken();
unit = ChemParser.removeBrace(unit);
if (unit.equals("mole/l") || unit.equals("mol/l") || unit.equals("mole/liter") || unit.equals("mol/liter")) {
inertConc /= 1000;
unit = "mol/cm3";
}
else if (unit.equals("mole/m3") || unit.equals("mol/m3")) {
inertConc /= 1000000;
unit = "mol/cm3";
}
else if (unit.equals("molecule/cm3") || unit.equals("molecules/cm3")) {
inertConc /= 6.022e23;
unit = "mol/cm3";
}
else if (!unit.equals("mole/cm3") && !unit.equals("mol/cm3")) {
throw new InvalidUnitException("Inert Gas Concentration not recognized: " + unit);
}
//SystemSnapshot.putInertGas(name,inertConc);
for(Iterator iter=initialStatusList.iterator();iter.hasNext(); ){//6/23/09 gmagoon: needed to change this to accommodate non-static inertConc
((InitialStatus)iter.next()).putInertGas(name,inertConc);
}
line = ChemParser.readMeaningfulLine(reader);
}
}
public String readMaxAtomTypes(String line, BufferedReader reader) {
if (line.startsWith("MaxCarbonNumber")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // This should hold "MaxCarbonNumberPerSpecies:"
int maxCNum = Integer.parseInt(st.nextToken());
ChemGraph.setMaxCarbonNumber(maxCNum);
System.out.println("Note: Overriding RMG-defined MAX_CARBON_NUM with user-defined value: " + maxCNum);
line = ChemParser.readMeaningfulLine(reader);
}
if (line.startsWith("MaxOxygenNumber")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // This should hold "MaxOxygenNumberPerSpecies:"
int maxONum = Integer.parseInt(st.nextToken());
ChemGraph.setMaxOxygenNumber(maxONum);
System.out.println("Note: Overriding RMG-defined MAX_OXYGEN_NUM with user-defined value: " + maxONum);
line = ChemParser.readMeaningfulLine(reader);
}
if (line.startsWith("MaxRadicalNumber")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // This should hold "MaxRadicalNumberPerSpecies:"
int maxRadNum = Integer.parseInt(st.nextToken());
ChemGraph.setMaxRadicalNumber(maxRadNum);
System.out.println("Note: Overriding RMG-defined MAX_RADICAL_NUM with user-defined value: " + maxRadNum);
line = ChemParser.readMeaningfulLine(reader);
}
if (line.startsWith("MaxSulfurNumber")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // This should hold "MaxSulfurNumberPerSpecies:"
int maxSNum = Integer.parseInt(st.nextToken());
ChemGraph.setMaxSulfurNumber(maxSNum);
System.out.println("Note: Overriding RMG-defined MAX_SULFUR_NUM with user-defined value: " + maxSNum);
line = ChemParser.readMeaningfulLine(reader);
}
if (line.startsWith("MaxSiliconNumber")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // This should hold "MaxSiliconNumberPerSpecies:"
int maxSiNum = Integer.parseInt(st.nextToken());
ChemGraph.setMaxSiliconNumber(maxSiNum);
System.out.println("Note: Overriding RMG-defined MAX_SILICON_NUM with user-defined value: " + maxSiNum);
line = ChemParser.readMeaningfulLine(reader);
}
if (line.startsWith("MaxHeavyAtom")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // This should hold "MaxHeavyAtomPerSpecies:"
int maxHANum = Integer.parseInt(st.nextToken());
ChemGraph.setMaxHeavyAtomNumber(maxHANum);
System.out.println("Note: Overriding RMG-defined MAX_HEAVYATOM_NUM with user-defined value: " + maxHANum);
line = ChemParser.readMeaningfulLine(reader);
}
return line;
}
public ReactionModelEnlarger getReactionModelEnlarger() {
return reactionModelEnlarger;
}
public LinkedList getTempList() {
return tempList;
}
public LinkedList getPressList() {
return presList;
}
public LinkedList getInitialStatusList() {
return initialStatusList;
}
public void writeBackupRestartFiles(String[] listOfFiles) {
for (int i=0; i<listOfFiles.length; i++) {
File temporaryRestartFile = new File(listOfFiles[i]);
if (temporaryRestartFile.exists()) temporaryRestartFile.renameTo(new File(listOfFiles[i]+"~"));
}
}
public void removeBackupRestartFiles(String[] listOfFiles) {
for (int i=0; i<listOfFiles.length; i++) {
File temporaryRestartFile = new File(listOfFiles[i]+"~");
temporaryRestartFile.delete();
}
}
public static boolean rerunFameWithAdditionalGrains() {
return rerunFame;
}
public void setLimitingReactantID(int id) {
limitingReactantID = id;
}
public int getLimitingReactantID() {
return limitingReactantID;
}
public void readAndMakePTransL(BufferedReader reader) {
int numPTLs = 0;
String line = ChemParser.readMeaningfulLine(reader);
while (!line.equals("END")) {
String[] tempString = line.split("Name: ");
String name = tempString[tempString.length-1].trim();
line = ChemParser.readMeaningfulLine(reader);
tempString = line.split("Location: ");
String path = tempString[tempString.length-1].trim();
if (numPTLs==0) {
setPrimaryTransportLibrary(new PrimaryTransportLibrary(name,path));
++numPTLs;
}
else {
getPrimaryTransportLibrary().appendPrimaryTransportLibrary(name,path);
++numPTLs;
}
line = ChemParser.readMeaningfulLine(reader);
}
if (numPTLs == 0) setPrimaryTransportLibrary(null);
}
public PrimaryTransportLibrary getPrimaryTransportLibrary() {
return primaryTransportLibrary;
}
public void setPrimaryTransportLibrary(PrimaryTransportLibrary p_primaryTransportLibrary) {
primaryTransportLibrary = p_primaryTransportLibrary;
}
}
|
package se.chalmers.dat255.sleepfighter.utils.collect;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import se.chalmers.dat255.sleepfighter.utils.message.Message;
import se.chalmers.dat255.sleepfighter.utils.message.MessageBus;
import com.google.common.collect.ForwardingIterator;
import com.google.common.collect.ForwardingList;
import com.google.common.collect.ForwardingListIterator;
/**
* ObservableList is a list that notifies observes of changes.<br/>
* Written originally for aTetria (github).
*
* @author Centril<twingoow@gmail.com> / Mazdak Farrokhzad.
* @version 1.0
* @since Jun 5, 2013
*/
public class ObservableList<E> extends ForwardingList<E> {
/**
* Operation enumerates the type of change operation in list that was observed.
*
* @author Centril<twingoow@gmail.com> / Mazdak Farrokhzad.
* @version 1.0
* @since Jun 5, 2013
*/
public enum Operation {
ADD, REMOVE, CLEAR, UPDATE;
public boolean isRemove() {
return this == REMOVE || this == CLEAR;
}
}
/**
* Event models events for changes in list.
*
* @author Centril<twingoow@gmail.com> / Mazdak Farrokhzad.
* @version 1.0
* @since Jun 5, 2013
*/
public class Event implements Message {
private Operation operation;
private int index;
private Collection<?> elements;
protected Event( Operation op, int index, Collection<?> elements ) {
this.operation = op;
this.index = index;
this.elements = elements;
}
/**
* The index that the {@link #operation()} was run for,<br/>
* or -1 index is unknown (e.g adding to end of list.
*
* @return the index.
*/
public int index() {
return this.index;
}
/**
* The affected elements, or null.
*
* @return affected elements.
*/
public Collection<?> elements() {
return this.elements;
}
/**
* The type of change operation.
*
* @return the operation.
*/
public Operation operation() {
return this.operation;
}
/**
* The source ObservableList.
*
* @return the source.
*/
public ObservableList<E> source() {
return ObservableList.this;
}
public String toString() {
return "ObservableList.Event[operation: " + this.operation() + ", index: " + this.index() + ", element(s): " + this.elements() + "]";
}
}
private List<E> delegate;
private MessageBus<Message> bus;
@Override
protected List<E> delegate() {
return this.delegate;
}
protected void setDelegate( List<E> delegate ) {
this.delegate = delegate;
}
/**
* Sets the message bus, if not set, no events will be received.
*
* @param bus the buss that receives events.
*/
public void setMessageBus( MessageBus<Message> bus ) {
this.bus = bus;
}
/**
* Returns the message bus, or null if not set.
*
* @return the message bus.
*/
public MessageBus<Message> getMessageBus() {
return this.bus;
}
/**
* Constructs the observable list, the delegate and bus must be set after.
*/
public ObservableList() {
}
/**
* Constructs the observable list.
*
* @param delegate the list to delegate to.
* @param bus the message bus that receives events.
*/
public ObservableList( List<E> delegate, MessageBus<Message> bus ) {
this.delegate = delegate;
this.bus = bus;
}
@Override
public boolean add( E element ) {
boolean retr = super.add( element );
this.fireEvent( new Event( Operation.ADD, -1, Collections.singleton( element ) ) );
return retr;
}
@Override
public boolean addAll( Collection<? extends E> collection ) {
boolean retr = super.addAll( collection );
this.fireEvent( new Event( Operation.ADD, -1, collection ) );
return retr;
}
@Override
public void add( int index, E element ) {
super.add( index, element );
this.fireEvent( new Event( Operation.ADD, index, Collections.singleton( element ) ) );
}
@Override
public boolean addAll( int index, Collection<? extends E> elements ) {
boolean retr = super.addAll( index, elements );
this.fireEvent( new Event( Operation.ADD, index, elements ) );
return retr;
}
@Override
public void clear() {
super.clear();
this.fireEvent( new Event( Operation.CLEAR, -1, null ) );
}
@Override
public boolean remove( Object object ) {
if ( super.remove( object ) ) {
this.fireEvent( new Event( Operation.REMOVE, -1, Collections.singleton( object ) ) );
return true;
} else {
return false;
}
}
@Override
public boolean removeAll( Collection<?> collection ) {
if ( super.remove( collection ) ) {
this.fireEvent( new Event( Operation.REMOVE, -1, collection ) );
return true;
} else {
return false;
}
}
@Override
public boolean retainAll( Collection<?> collection ) {
if ( super.retainAll( collection ) ) {
this.fireEvent( new Event( Operation.REMOVE, -1, collection ) );
return true;
} else {
return false;
}
}
protected void fireEvent( Event e ) {
if ( this.bus == null ) {
return;
}
this.bus.publish( e );
}
@Override
public E remove( int index ) {
E elem = super.remove( index );
this.fireEvent( new Event( Operation.REMOVE, index, Collections.singleton( elem ) ) );
return elem;
}
@Override
public E set( int index, E element ) {
E old = super.set( index, element );
this.fireEvent( new Event( Operation.UPDATE, index, Collections.singleton( element ) ) );
return old;
}
@Override
public List<E> subList( int fromIndex, int toIndex ) {
return new ObservableList<E>( super.subList( fromIndex, toIndex ), this.bus );
}
@Override
public Iterator<E> iterator() {
return new ForwardingIterator<E>() {
private E curr;
private final Iterator<E> delegate = ObservableList.this.delegate().iterator();
@Override
protected Iterator<E> delegate() {
return this.delegate;
}
@Override
public E next() {
this.curr = super.next();
return this.curr;
}
@Override
public void remove() {
super.remove();
fireEvent( new Event( Operation.REMOVE, -1, Collections.singleton( this.curr ) ) );
}
};
}
private class ListIter extends ForwardingListIterator<E> {
private int currIndex;
private E curr;
private final ListIterator<E> delegate;
@Override
protected ListIterator<E> delegate() {
return this.delegate;
}
public ListIter( ListIterator<E> delegate ) {
this.delegate = delegate;
}
@Override
public E next() {
this.currIndex = this.nextIndex();
this.curr = super.next();
return this.curr;
}
@Override
public E previous() {
this.currIndex = this.previousIndex();
this.curr = super.previous();
return this.curr;
}
@Override
public void remove() {
super.remove();
fireEvent( new Event( Operation.REMOVE, this.currIndex, Collections.singleton( this.curr ) ) );
}
@Override
public void add( E element ) {
super.add( element );
fireEvent( new Event( Operation.ADD, this.currIndex, Collections.singleton( element ) ) );
}
@Override
public void set( E element ) {
super.set( element );
fireEvent( new Event( Operation.UPDATE, this.currIndex, Collections.singleton( element ) ) );
this.curr = element;
}
}
@Override
public ListIterator<E> listIterator() {
return new ListIter( super.listIterator() );
}
@Override
public ListIterator<E> listIterator( int index ) {
return new ListIter( super.listIterator( index ) );
}
}
|
package net.java.sip.communicator.impl.gui.main;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.beans.PropertyChangeEvent;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Map;
import javax.swing.AbstractAction;
import javax.swing.DefaultListModel;
import javax.swing.ImageIcon;
import javax.swing.InputMap;
import javax.swing.JComponent;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.KeyStroke;
import net.java.sip.communicator.impl.gui.main.configforms.ConfigurationFrame;
import net.java.sip.communicator.impl.gui.main.contactlist.CListKeySearchListener;
import net.java.sip.communicator.impl.gui.main.contactlist.ContactList;
import net.java.sip.communicator.impl.gui.main.contactlist.ContactListModel;
import net.java.sip.communicator.impl.gui.main.contactlist.ContactListPanel;
import net.java.sip.communicator.impl.gui.main.contactlist.ContactNode;
import net.java.sip.communicator.impl.gui.main.contactlist.MetaContactNode;
import net.java.sip.communicator.impl.gui.main.i18n.Messages;
import net.java.sip.communicator.impl.gui.main.message.ChatWindow;
import net.java.sip.communicator.impl.gui.main.utils.Constants;
import net.java.sip.communicator.impl.gui.main.utils.ImageLoader;
import net.java.sip.communicator.service.contactlist.MetaContact;
import net.java.sip.communicator.service.contactlist.MetaContactGroup;
import net.java.sip.communicator.service.contactlist.MetaContactListService;
import net.java.sip.communicator.service.protocol.Contact;
import net.java.sip.communicator.service.protocol.OperationFailedException;
import net.java.sip.communicator.service.protocol.OperationSetPersistentPresence;
import net.java.sip.communicator.service.protocol.OperationSetPresence;
import net.java.sip.communicator.service.protocol.PresenceStatus;
import net.java.sip.communicator.service.protocol.ProtocolProviderService;
import net.java.sip.communicator.service.protocol.event.ContactPresenceStatusChangeEvent;
import net.java.sip.communicator.service.protocol.event.ContactPresenceStatusListener;
import net.java.sip.communicator.service.protocol.event.ProviderPresenceStatusChangeEvent;
import net.java.sip.communicator.service.protocol.event.ProviderPresenceStatusListener;
import net.java.sip.communicator.service.protocol.icqconstants.IcqStatusEnum;
/**
* The main application frame.
*
* @author Yana Stamcheva
*/
public class MainFrame extends JFrame {
private JPanel contactListPanel = new JPanel(new BorderLayout());
private JPanel menusPanel = new JPanel(new BorderLayout());
private Menu menu = new Menu();
private ConfigurationFrame configFrame = new ConfigurationFrame();
private CallPanel callPanel;
private StatusPanel statusPanel;
private MainTabbedPane tabbedPane;
private QuickMenu quickMenu;
private Hashtable protocolSupportedOperationSets = new Hashtable();
private Hashtable protocolPresenceSets = new Hashtable();
private Dimension minimumFrameSize = new Dimension(
Constants.MAINFRAME_MIN_WIDTH, Constants.MAINFRAME_MIN_HEIGHT);
private Hashtable protocolProviders = new Hashtable();
private MetaContactListService contactList;
private ArrayList accounts = new ArrayList();
public MainFrame() {
callPanel = new CallPanel(this);
tabbedPane = new MainTabbedPane(this);
quickMenu = new QuickMenu(this);
statusPanel = new StatusPanel(this);
this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
this.setInitialBounds();
this.setTitle(Messages.getString("sipCommunicator"));
this.setIconImage(ImageLoader.getImage(ImageLoader.SIP_LOGO));
this.init();
}
private void init() {
this.menusPanel.add(menu, BorderLayout.NORTH);
this.menusPanel.add(quickMenu, BorderLayout.CENTER);
this.contactListPanel.add(tabbedPane, BorderLayout.CENTER);
this.contactListPanel.add(callPanel, BorderLayout.SOUTH);
this.getContentPane().add(menusPanel, BorderLayout.NORTH);
this.getContentPane().add(contactListPanel, BorderLayout.CENTER);
this.getContentPane().add(statusPanel, BorderLayout.SOUTH);
}
private void setInitialBounds() {
this.setSize(155, 400);
this.contactListPanel.setPreferredSize(new Dimension(140, 350));
this.contactListPanel.setMinimumSize(new Dimension(80, 200));
this.setLocation(Toolkit.getDefaultToolkit().getScreenSize().width
- this.getWidth(), 50);
}
public CallPanel getCallPanel() {
return callPanel;
}
public MetaContactListService getContactList() {
return this.contactList;
}
public void setContactList(MetaContactListService contactList) {
this.contactList = contactList;
ContactListPanel clistPanel = this.tabbedPane.getContactListPanel();
clistPanel.initTree(contactList);
//add a key listener to the tabbed pane, when the contactlist is
//initialized
this.tabbedPane.addKeyListener(new CListKeySearchListener
(clistPanel.getContactList()));
}
public ConfigurationFrame getConfigFrame() {
return configFrame;
}
public void setConfigFrame(ConfigurationFrame configFrame) {
this.configFrame = configFrame;
}
public Map getSupportedOperationSets
(ProtocolProviderService protocolProvider) {
return (Map)this.protocolSupportedOperationSets.get(protocolProvider);
}
public void addProtocolSupportedOperationSets
(ProtocolProviderService protocolProvider,
Map supportedOperationSets) {
this.protocolSupportedOperationSets.put(protocolProvider,
supportedOperationSets);
Iterator entrySetIter = supportedOperationSets.entrySet().iterator();
for (int i = 0; i < supportedOperationSets.size(); i++)
{
Map.Entry entry = (Map.Entry) entrySetIter.next();
Object key = entry.getKey();
Object value = entry.getValue();
if(key.equals(OperationSetPersistentPresence.class.getName())
|| key.equals(OperationSetPresence.class.getName())){
OperationSetPresence presence
= (OperationSetPresence)value;
this.protocolPresenceSets.put( protocolProvider,
presence);
presence
.addProviderPresenceStatusListener
(new ProviderPresenceStatusAdapter());
presence
.addContactPresenceStatusListener
(new ContactPresenceStatusAdapter());
try {
presence
.publishPresenceStatus(IcqStatusEnum.ONLINE, "");
this.getStatusPanel().stopConnecting(
protocolProvider.getProtocolName());
this.statusPanel.setSelectedStatus
(protocolProvider.getProtocolName(),
Constants.ONLINE_STATUS);
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalStateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (OperationFailedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
private class ProviderPresenceStatusAdapter
implements ProviderPresenceStatusListener {
public void providerStatusChanged
(ProviderPresenceStatusChangeEvent evt) {
}
public void providerStatusMessageChanged
(PropertyChangeEvent evt) {
}
}
private class ContactPresenceStatusAdapter
implements ContactPresenceStatusListener {
public void contactPresenceStatusChanged
(ContactPresenceStatusChangeEvent evt) {
Contact sourceContact = evt.getSourceContact();
PresenceStatus newStatus = evt.getNewStatus();
MetaContact metaContact
= contactList.findMetaContactByContact(sourceContact);
if (metaContact != null){
ContactListModel model
= (ContactListModel)tabbedPane.getContactListPanel()
.getContactList().getModel();
MetaContactNode node
= model.getContactNodeByContact(metaContact);
if(node != null){
node.setStatusIcon
(new ImageIcon(Constants.getStatusIcon
(newStatus)));
node.changeProtocolContactStatus(
sourceContact.getProtocolProvider()
.getProtocolName(),
newStatus);
//Refresh the node status icon.
model.contactStatusChanged(model.indexOf(node));
}
}
}
}
public StatusPanel getStatusPanel() {
return statusPanel;
}
public Map getProtocolProviders() {
return this.protocolProviders;
}
public void addProtocolProvider(
ProtocolProviderService protocolProvider) {
this.protocolProviders.put( protocolProvider.getProtocolName(),
protocolProvider);
}
public void addAccount(Account account){
this.accounts.add(account);
}
public Account getAccount(){
return (Account)this.accounts.get(0);
}
public OperationSetPresence getProtocolPresence
(ProtocolProviderService protocolProvider) {
return (OperationSetPresence)
this.protocolPresenceSets.get(protocolProvider);
}
}
|
package com.paritytrading.parity.client;
import com.paritytrading.nassau.soupbintcp.SoupBinTCP;
import com.paritytrading.nassau.soupbintcp.SoupBinTCPClient;
import com.paritytrading.nassau.soupbintcp.SoupBinTCPClientStatusListener;
import com.paritytrading.parity.net.poe.POE;
import com.paritytrading.parity.net.poe.POEClientListener;
import com.paritytrading.parity.net.poe.POEClientParser;
import java.io.Closeable;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
class OrderEntry implements Closeable {
private ByteBuffer txBuffer;
private Selector selector;
private SoupBinTCPClient transport;
private volatile boolean closed;
private Object txLock;
private OrderEntry(Selector selector, SocketChannel channel, POEClientListener listener) {
this.txBuffer = ByteBuffer.allocateDirect(POE.MAX_INBOUND_MESSAGE_LENGTH);
this.selector = selector;
this.transport = new SoupBinTCPClient(channel, POE.MAX_OUTBOUND_MESSAGE_LENGTH,
new POEClientParser(listener), new StatusListener());
this.closed = false;
this.txLock = new Object();
new Thread(new Receiver()).start();
}
static OrderEntry open(InetSocketAddress address, POEClientListener listener) throws IOException {
SocketChannel channel = SocketChannel.open();
channel.connect(address);
channel.configureBlocking(false);
Selector selector = Selector.open();
channel.register(selector, SelectionKey.OP_READ);
return new OrderEntry(selector, channel, listener);
}
@Override
public void close() {
closed = true;
}
SoupBinTCPClient getTransport() {
return transport;
}
void send(POE.InboundMessage message) throws IOException {
txBuffer.clear();
message.put(txBuffer);
txBuffer.flip();
synchronized (txLock) {
transport.send(txBuffer);
}
}
private class StatusListener implements SoupBinTCPClientStatusListener {
@Override
public void heartbeatTimeout(SoupBinTCPClient session) {
close();
}
@Override
public void loginAccepted(SoupBinTCPClient session, SoupBinTCP.LoginAccepted payload) {
}
@Override
public void loginRejected(SoupBinTCPClient session, SoupBinTCP.LoginRejected payload) {
close();
}
@Override
public void endOfSession(SoupBinTCPClient session) {
}
}
private class Receiver implements Runnable {
private static final long TIMEOUT_MILLIS = 100;
@Override
public void run() {
try {
while (!closed) {
int numKeys = selector.select(TIMEOUT_MILLIS);
if (numKeys > 0) {
if (transport.receive() < 0)
break;
selector.selectedKeys().clear();
}
synchronized (txLock) {
transport.keepAlive();
}
}
} catch (IOException e) {
}
try {
transport.close();
} catch (IOException e) {
}
try {
selector.close();
} catch (IOException e) {
}
}
}
}
|
package dr.evomodel.antigenic;
import dr.evolution.util.*;
import dr.inference.model.*;
import dr.math.MathUtils;
import dr.math.distributions.NormalDistribution;
import dr.util.*;
import dr.xml.*;
import java.io.*;
import java.util.*;
import java.util.logging.Logger;
/**
* @author Andrew Rambaut
* @author Trevor Bedford
* @author Marc Suchard
* @version $Id$
*/
public class AntigenicLikelihood extends AbstractModelLikelihood implements Citable {
private static final boolean CHECK_INFINITE = false;
private static final boolean USE_THRESHOLDS = true;
private static final boolean USE_INTERVALS = true;
public final static String ANTIGENIC_LIKELIHOOD = "antigenicLikelihood";
// column indices in table
private static final int COLUMN_LABEL = 0;
private static final int SERUM_STRAIN = 2;
private static final int ROW_LABEL = 1;
private static final int VIRUS_STRAIN = 3;
private static final int SERUM_DATE = 4;
private static final int VIRUS_DATE = 5;
private static final int TITRE = 6;
public enum MeasurementType {
INTERVAL,
POINT,
THRESHOLD,
MISSING
}
public AntigenicLikelihood(
int mdsDimension,
Parameter mdsPrecisionParameter,
TaxonList strainTaxa,
MatrixParameter locationsParameter,
Parameter datesParameter,
Parameter columnParameter,
Parameter rowParameter,
DataTable<String[]> dataTable,
double intervalWidth,
List<String> virusLocationStatisticList) {
super(ANTIGENIC_LIKELIHOOD);
List<String> strainNames = new ArrayList<String>();
Map<String, Double> strainDateMap = new HashMap<String, Double>();
this.intervalWidth = intervalWidth;
boolean useIntervals = USE_INTERVALS && intervalWidth > 0.0;
int thresholdCount = 0;
for (int i = 0; i < dataTable.getRowCount(); i++) {
String[] values = dataTable.getRow(i);
int column = columnLabels.indexOf(values[COLUMN_LABEL]);
if (column == -1) {
columnLabels.add(values[0]);
column = columnLabels.size() - 1;
}
int columnStrain = -1;
if (strainTaxa != null) {
columnStrain = strainTaxa.getTaxonIndex(values[SERUM_STRAIN]);
} else {
columnStrain = strainNames.indexOf(values[SERUM_STRAIN]);
if (columnStrain == -1) {
strainNames.add(values[SERUM_STRAIN]);
Double date = Double.parseDouble(values[SERUM_DATE]);
strainDateMap.put(values[SERUM_STRAIN], date);
columnStrain = strainNames.size() - 1;
}
}
if (columnStrain == -1) {
throw new IllegalArgumentException("Error reading data table: Unrecognized serum strain name, " + values[SERUM_STRAIN] + ", in row " + (i+1));
}
int row = rowLabels.indexOf(values[ROW_LABEL]);
if (row == -1) {
rowLabels.add(values[ROW_LABEL]);
row = rowLabels.size() - 1;
}
int rowStrain = -1;
if (strainTaxa != null) {
rowStrain = strainTaxa.getTaxonIndex(values[VIRUS_STRAIN]);
} else {
rowStrain = strainNames.indexOf(values[VIRUS_STRAIN]);
if (rowStrain == -1) {
strainNames.add(values[VIRUS_STRAIN]);
Double date = Double.parseDouble(values[VIRUS_DATE]);
strainDateMap.put(values[VIRUS_STRAIN], date);
rowStrain = strainNames.size() - 1;
}
}
if (rowStrain == -1) {
throw new IllegalArgumentException("Error reading data table: Unrecognized virus strain name, " + values[VIRUS_STRAIN] + ", in row " + (i+1));
}
boolean isThreshold = false;
double rawTitre = Double.NaN;
if (values[TITRE].length() > 0) {
try {
rawTitre = Double.parseDouble(values[TITRE]);
} catch (NumberFormatException nfe) {
// check if threshold below
if (values[TITRE].contains("<")) {
rawTitre = Double.parseDouble(values[TITRE].replace("<",""));
isThreshold = true;
thresholdCount++;
}
// check if threshold above
if (values[TITRE].contains(">")) {
throw new IllegalArgumentException("Error in measurement: unsupported greater than threshold at row " + (i+1));
}
}
}
MeasurementType type = (isThreshold ? MeasurementType.THRESHOLD : (useIntervals ? MeasurementType.INTERVAL : MeasurementType.POINT));
Measurement measurement = new Measurement(column, columnStrain, row, rowStrain, type, rawTitre);
if (USE_THRESHOLDS || !isThreshold) {
measurements.add(measurement);
}
}
double[] maxColumnTitre = new double[columnLabels.size()];
double[] maxRowTitre = new double[rowLabels.size()];
for (Measurement measurement : measurements) {
double titre = measurement.log2Titre;
if (Double.isNaN(titre)) {
titre = measurement.log2Titre;
}
if (titre > maxColumnTitre[measurement.column]) {
maxColumnTitre[measurement.column] = titre;
}
if (titre > maxRowTitre[measurement.row]) {
maxRowTitre[measurement.row] = titre;
}
}
if (strainTaxa != null) {
this.strains = strainTaxa;
// fill in the strain name array for local use
for (int i = 0; i < strains.getTaxonCount(); i++) {
strainNames.add(strains.getTaxon(i).getId());
}
} else {
Taxa taxa = new Taxa();
for (String strain : strainNames) {
taxa.addTaxon(new Taxon(strain));
}
this.strains = taxa;
}
this.mdsDimension = mdsDimension;
this.mdsPrecisionParameter = mdsPrecisionParameter;
addVariable(mdsPrecisionParameter);
this.locationsParameter = locationsParameter;
setupLocationsParameter(this.locationsParameter, strainNames);
addVariable(this.locationsParameter);
if (datesParameter != null) {
// this parameter is not used in this class but is setup to be used in other classes
datesParameter.setDimension(strainNames.size());
String[] labelArray = new String[strainNames.size()];
strainNames.toArray(labelArray);
datesParameter.setDimensionNames(labelArray);
for (int i = 0; i < strainNames.size(); i++) {
Double date = strainDateMap.get(strainNames.get(i));
if (date == null) {
throw new IllegalArgumentException("Date missing for strain: " + strainNames.get(i));
}
datesParameter.setParameterValue(i, date);
}
}
// If no column parameter is given, make one to hold maximum values for scaling titres...
if (columnParameter == null) {
this.columnEffectsParameter = new Parameter.Default("columnEffects");
} else {
this.columnEffectsParameter = columnParameter;
this.columnEffectsParameter.addBounds(new Parameter.DefaultBounds(Double.MAX_VALUE, 0.0, 1));
addVariable(this.columnEffectsParameter);
}
this.columnEffectsParameter.setDimension(columnLabels.size());
String[] labelArray = new String[columnLabels.size()];
columnLabels.toArray(labelArray);
this.columnEffectsParameter.setDimensionNames(labelArray);
for (int i = 0; i < maxColumnTitre.length; i++) {
this.columnEffectsParameter.setParameterValueQuietly(i, maxColumnTitre[i]);
}
// If no row parameter is given, then we will only use the column effects
this.rowEffectsParameter = rowParameter;
if (this.rowEffectsParameter != null) {
this.rowEffectsParameter.addBounds(new Parameter.DefaultBounds(Double.MAX_VALUE, 0.0, 1));
this.rowEffectsParameter.setDimension(rowLabels.size());
addVariable(this.rowEffectsParameter);
labelArray = new String[rowLabels.size()];
rowLabels.toArray(labelArray);
this.rowEffectsParameter.setDimensionNames(labelArray);
for (int i = 0; i < maxRowTitre.length; i++) {
this.rowEffectsParameter.setParameterValueQuietly(i, maxRowTitre[i]);
}
}
StringBuilder sb = new StringBuilder();
sb.append("\tAntigenicLikelihood:\n");
sb.append("\t\t" + this.strains.getTaxonCount() + " strains\n");
sb.append("\t\t" + columnLabels.size() + " unique columns\n");
sb.append("\t\t" + rowLabels.size() + " unique rows\n");
sb.append("\t\t" + measurements.size() + " assay measurements\n");
if (USE_THRESHOLDS) {
sb.append("\t\t" + thresholdCount + " thresholded measurements\n");
}
if (useIntervals) {
sb.append("\n\t\tAssuming a log 2 measurement interval width of " + intervalWidth + "\n");
}
Logger.getLogger("dr.evomodel").info(sb.toString());
// initial locations
double earliestDate = datesParameter.getParameterValue(0);
for (int i=0; i<datesParameter.getDimension(); i++) {
double date = datesParameter.getParameterValue(i);
if (earliestDate > date) {
earliestDate = date;
}
}
for (int i = 0; i < locationsParameter.getParameterCount(); i++) {
String name = strainNames.get(i);
double date = (double) strainDateMap.get(strainNames.get(i));
double diff = (date-earliestDate);
locationsParameter.getParameter(i).setParameterValueQuietly(0, diff + MathUtils.nextGaussian());
for (int j = 1; j < mdsDimension; j++) {
double r = MathUtils.nextGaussian();
locationsParameter.getParameter(i).setParameterValueQuietly(j, r);
}
}
locationChanged = new boolean[this.locationsParameter.getParameterCount()];
logLikelihoods = new double[measurements.size()];
storedLogLikelihoods = new double[measurements.size()];
makeDirty();
}
protected void setupLocationsParameter(MatrixParameter locationsParameter, List<String> strains) {
locationsParameter.setColumnDimension(mdsDimension);
locationsParameter.setRowDimension(strains.size());
for (int i = 0; i < strains.size(); i++) {
locationsParameter.getParameter(i).setId(strains.get(i));
}
}
@Override
protected void handleModelChangedEvent(Model model, Object object, int index) {
}
@Override
protected void handleVariableChangedEvent(Variable variable, int index, Variable.ChangeType type) {
if (variable == locationsParameter) {
locationChanged[index / mdsDimension] = true;
} else if (variable == mdsPrecisionParameter) {
setLocationChangedFlags(true);
} else if (variable == columnEffectsParameter) {
setLocationChangedFlags(true);
} else if (variable == rowEffectsParameter) {
setLocationChangedFlags(true);
} else {
// could be a derived class's parameter
}
likelihoodKnown = false;
}
@Override
protected void storeState() {
System.arraycopy(logLikelihoods, 0, storedLogLikelihoods, 0, logLikelihoods.length);
}
@Override
protected void restoreState() {
double[] tmp = logLikelihoods;
logLikelihoods = storedLogLikelihoods;
storedLogLikelihoods = tmp;
likelihoodKnown = false;
}
@Override
protected void acceptState() {
}
@Override
public Model getModel() {
return this;
}
@Override
public double getLogLikelihood() {
if (!likelihoodKnown) {
logLikelihood = computeLogLikelihood();
}
return logLikelihood;
}
// This function can be overwritten to implement other sampling densities, i.e. discrete ranks
private double computeLogLikelihood() {
double precision = mdsPrecisionParameter.getParameterValue(0);
double sd = 1.0 / Math.sqrt(precision);
logLikelihood = 0.0;
int i = 0;
for (Measurement measurement : measurements) {
if (locationChanged[measurement.rowStrain] || locationChanged[measurement.columnStrain]) {
double distance = computeDistance(measurement.rowStrain, measurement.columnStrain);
double logNormalization = calculateTruncationNormalization(distance, sd);
// double logNormalization = 0.0;
switch (measurement.type) {
case INTERVAL: {
double minTitre = transformTitre(measurement.log2Titre, measurement.column, measurement.row, distance, sd);
double maxTitre = transformTitre(measurement.log2Titre + 1.0, measurement.column, measurement.row, distance, sd);
logLikelihoods[i] = computeMeasurementIntervalLikelihood(minTitre, maxTitre) - logNormalization;
} break;
case POINT: {
double titre = transformTitre(measurement.log2Titre, measurement.column, measurement.row, distance, sd);
logLikelihoods[i] = computeMeasurementLikelihood(titre) - logNormalization;
} break;
case THRESHOLD: {
double maxTitre = transformTitre(measurement.log2Titre, measurement.column, measurement.row, distance, sd);
logLikelihoods[i] = computeMeasurementThresholdLikelihood(maxTitre) - logNormalization;
} break;
case MISSING:
break;
}
}
logLikelihood += logLikelihoods[i];
i++;
}
likelihoodKnown = true;
setLocationChangedFlags(false);
return logLikelihood;
}
private void setLocationChangedFlags(boolean flag) {
for (int i = 0; i < locationChanged.length; i++) {
locationChanged[i] = flag;
}
}
protected double computeDistance(int rowStrain, int columnStrain) {
if (rowStrain == columnStrain) {
return 0.0;
}
Parameter X = locationsParameter.getParameter(rowStrain);
Parameter Y = locationsParameter.getParameter(columnStrain);
double sum = 0.0;
for (int i = 0; i < mdsDimension; i++) {
double difference = X.getParameterValue(i) - Y.getParameterValue(i);
sum += difference * difference;
}
return Math.sqrt(sum);
}
/**
* Transforms a titre into log2 space and normalizes it with respect to a unit normal
* @param titre
* @param column
* @param row
* @param mean
* @param sd
* @return
*/
private double transformTitre(double titre, int column, int row, double mean, double sd) {
double t;
double columnEffect = columnEffectsParameter.getParameterValue(column);
if (rowEffectsParameter != null) {
double rowEffect = rowEffectsParameter.getParameterValue(row);
t = ((rowEffect + columnEffect) * 0.5) - titre;
} else {
t = columnEffect - titre;
}
return (t - mean) / sd;
}
private static double computeMeasurementIntervalLikelihood_CDF(double minTitre, double maxTitre) {
// once transformed, the minTitre will be the greater value
double cdf1 = NormalDistribution.standardCDF(minTitre, false);
double cdf2 = NormalDistribution.standardCDF(maxTitre, false);
double lnL = Math.log(cdf1 - cdf2);
if (cdf1 == cdf2) {
lnL = Math.log(cdf1);
}
if (CHECK_INFINITE && Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
private static double computeMeasurementIntervalLikelihood(double minTitre, double maxTitre) {
// once transformed, the minTitre will be the greater value
double cdf1 = NormalDistribution.standardTail(minTitre, true);
double cdf2 = NormalDistribution.standardTail(maxTitre, true);
double lnL = Math.log(cdf2 - cdf1);
if (cdf1 == cdf2) {
lnL = Math.log(cdf1);
}
if (CHECK_INFINITE && Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
private static double computeMeasurementLikelihood(double titre) {
double lnL = NormalDistribution.logPdf(titre, 0.0, 1.0);
if (CHECK_INFINITE && Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
// private static double computeMeasurementLowerBoundLikelihood(double transformedMinTitre) {
// // a lower bound in non-transformed titre so the bottom tail of the distribution
// double cdf = NormalDistribution.standardTail(transformedMinTitre, false);
// double lnL = Math.log(cdf);
// if (CHECK_INFINITE && Double.isNaN(lnL) || Double.isInfinite(lnL)) {
// throw new RuntimeException("infinite");
// return lnL;
private static double computeMeasurementThresholdLikelihood(double transformedMaxTitre) {
// a upper bound in non-transformed titre so the upper tail of the distribution
// using special tail function of NormalDistribution (see main() in NormalDistribution for test)
double tail = NormalDistribution.standardTail(transformedMaxTitre, true);
double lnL = Math.log(tail);
if (CHECK_INFINITE && Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
private static double calculateTruncationNormalization(double distance, double sd) {
return NormalDistribution.cdf(distance, 0.0, sd, true);
}
@Override
public void makeDirty() {
likelihoodKnown = false;
setLocationChangedFlags(true);
}
private class Measurement {
private Measurement(final int column, final int columnStrain, final int row, final int rowStrain, final MeasurementType type, final double titre) {
this.column = column;
this.columnStrain = columnStrain;
this.row = row;
this.rowStrain = rowStrain;
this.type = type;
this.log2Titre = Math.log(titre) / Math.log(2);
}
final int column;
final int row;
final int columnStrain;
final int rowStrain;
final MeasurementType type;
final double log2Titre;
};
private final List<Measurement> measurements = new ArrayList<Measurement>();
private final List<String> columnLabels = new ArrayList<String>();
private final List<String> rowLabels = new ArrayList<String>();
private final int mdsDimension;
private final double intervalWidth;
private final Parameter mdsPrecisionParameter;
private final MatrixParameter locationsParameter;
private final TaxonList strains;
// private final CompoundParameter tipTraitParameter;
private final Parameter columnEffectsParameter;
private final Parameter rowEffectsParameter;
private double logLikelihood = 0.0;
private boolean likelihoodKnown = false;
private final boolean[] locationChanged;
private double[] logLikelihoods;
private double[] storedLogLikelihoods;
// XMLObjectParser
public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
public final static String FILE_NAME = "fileName";
public final static String TIP_TRAIT = "tipTrait";
public final static String LOCATIONS = "locations";
public final static String DATES = "dates";
public static final String MDS_DIMENSION = "mdsDimension";
public static final String INTERVAL_WIDTH = "intervalWidth";
public static final String MDS_PRECISION = "mdsPrecision";
public static final String COLUMN_EFFECTS = "columnEffects";
public static final String ROW_EFFECTS = "rowEffects";
public static final String STRAINS = "strains";
public String getParserName() {
return ANTIGENIC_LIKELIHOOD;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
String fileName = xo.getStringAttribute(FILE_NAME);
DataTable<String[]> assayTable;
try {
assayTable = DataTable.Text.parse(new FileReader(fileName), true, false);
} catch (IOException e) {
throw new XMLParseException("Unable to read assay data from file: " + e.getMessage());
}
System.out.println("Loaded HI table file: " + fileName);
int mdsDimension = xo.getIntegerAttribute(MDS_DIMENSION);
double intervalWidth = 0.0;
if (xo.hasAttribute(INTERVAL_WIDTH)) {
intervalWidth = xo.getDoubleAttribute(INTERVAL_WIDTH);
}
// CompoundParameter tipTraitParameter = null;
// if (xo.hasChildNamed(TIP_TRAIT)) {
// tipTraitParameter = (CompoundParameter) xo.getElementFirstChild(TIP_TRAIT);
TaxonList strains = null;
if (xo.hasChildNamed(STRAINS)) {
strains = (TaxonList) xo.getElementFirstChild(STRAINS);
}
MatrixParameter locationsParameter = (MatrixParameter) xo.getElementFirstChild(LOCATIONS);
Parameter datesParameter = null;
if (xo.hasChildNamed(DATES)) {
datesParameter = (Parameter) xo.getElementFirstChild(DATES);
}
Parameter mdsPrecision = (Parameter) xo.getElementFirstChild(MDS_PRECISION);
Parameter columnEffectsParameter = null;
if (xo.hasChildNamed(COLUMN_EFFECTS)) {
columnEffectsParameter = (Parameter) xo.getElementFirstChild(COLUMN_EFFECTS);
}
Parameter rowEffectsParameter = null;
if (xo.hasChildNamed(ROW_EFFECTS)) {
rowEffectsParameter = (Parameter) xo.getElementFirstChild(ROW_EFFECTS);
}
AntigenicLikelihood AGL = new AntigenicLikelihood(
mdsDimension,
mdsPrecision,
strains,
locationsParameter,
datesParameter,
columnEffectsParameter,
rowEffectsParameter,
assayTable,
intervalWidth,
null);
Logger.getLogger("dr.evomodel").info("Using EvolutionaryCartography model. Please cite:\n" + Utils.getCitationString(AGL));
return AGL;
}
|
package dr.evomodel.antigenic;
import dr.evolution.util.*;
import dr.inference.model.*;
import dr.math.MathUtils;
import dr.math.distributions.NormalDistribution;
import dr.util.*;
import dr.xml.*;
import java.io.*;
import java.util.*;
import java.util.logging.Logger;
/**
* @author Andrew Rambaut
* @author Trevor Bedford
* @author Marc Suchard
* @version $Id$
*/
public class AntigenicLikelihood extends AbstractModelLikelihood implements Citable {
private static final boolean CHECK_INFINITE = false;
private static final boolean USE_THRESHOLDS = true;
private static final boolean USE_INTERVALS = true;
public final static String ANTIGENIC_LIKELIHOOD = "antigenicLikelihood";
// column indices in table
private static final int COLUMN_LABEL = 0;
private static final int SERUM_STRAIN = 2;
private static final int ROW_LABEL = 1;
private static final int VIRUS_STRAIN = 3;
private static final int SERUM_DATE = 4;
private static final int VIRUS_DATE = 5;
private static final int TITRE = 6;
public enum MeasurementType {
INTERVAL,
POINT,
THRESHOLD,
MISSING
}
public AntigenicLikelihood(
int mdsDimension,
Parameter mdsPrecisionParameter,
TaxonList strainTaxa,
MatrixParameter locationsParameter,
CompoundParameter tipTraitParameter,
Parameter datesParameter,
Parameter columnParameter,
Parameter rowParameter,
DataTable<String[]> dataTable,
double intervalWidth,
List<String> virusLocationStatisticList) {
super(ANTIGENIC_LIKELIHOOD);
List<String> strainNames = new ArrayList<String>();
Map<String, Double> strainDateMap = new HashMap<String, Double>();
this.intervalWidth = intervalWidth;
boolean useIntervals = USE_INTERVALS && intervalWidth > 0.0;
int thresholdCount = 0;
for (int i = 0; i < dataTable.getRowCount(); i++) {
String[] values = dataTable.getRow(i);
int column = columnLabels.indexOf(values[COLUMN_LABEL]);
if (column == -1) {
columnLabels.add(values[0]);
column = columnLabels.size() - 1;
}
int columnStrain = -1;
if (strainTaxa != null) {
columnStrain = strainTaxa.getTaxonIndex(values[SERUM_STRAIN]);
} else {
columnStrain = strainNames.indexOf(values[SERUM_STRAIN]);
if (columnStrain == -1) {
strainNames.add(values[SERUM_STRAIN]);
Double date = Double.parseDouble(values[SERUM_DATE]);
strainDateMap.put(values[SERUM_STRAIN], date);
columnStrain = strainNames.size() - 1;
}
}
if (columnStrain == -1) {
throw new IllegalArgumentException("Error reading data table: Unrecognized serum strain name, " + values[SERUM_STRAIN] + ", in row " + (i+1));
}
int row = rowLabels.indexOf(values[ROW_LABEL]);
if (row == -1) {
rowLabels.add(values[ROW_LABEL]);
row = rowLabels.size() - 1;
}
int rowStrain = -1;
if (strainTaxa != null) {
rowStrain = strainTaxa.getTaxonIndex(values[VIRUS_STRAIN]);
} else {
rowStrain = strainNames.indexOf(values[VIRUS_STRAIN]);
if (rowStrain == -1) {
strainNames.add(values[VIRUS_STRAIN]);
Double date = Double.parseDouble(values[VIRUS_DATE]);
strainDateMap.put(values[VIRUS_STRAIN], date);
rowStrain = strainNames.size() - 1;
}
}
if (rowStrain == -1) {
throw new IllegalArgumentException("Error reading data table: Unrecognized virus strain name, " + values[VIRUS_STRAIN] + ", in row " + (i+1));
}
boolean isThreshold = false;
double rawTitre = Double.NaN;
if (values[TITRE].length() > 0) {
try {
rawTitre = Double.parseDouble(values[TITRE]);
} catch (NumberFormatException nfe) {
// check if threshold below
if (values[TITRE].contains("<")) {
rawTitre = Double.parseDouble(values[TITRE].replace("<",""));
isThreshold = true;
thresholdCount++;
}
// check if threshold above
if (values[TITRE].contains(">")) {
throw new IllegalArgumentException("Error in measurement: unsupported greater than threshold at row " + (i+1));
}
}
}
MeasurementType type = (isThreshold ? MeasurementType.THRESHOLD : (useIntervals ? MeasurementType.INTERVAL : MeasurementType.POINT));
Measurement measurement = new Measurement(column, columnStrain, row, rowStrain, type, rawTitre);
if (USE_THRESHOLDS || !isThreshold) {
measurements.add(measurement);
}
}
double[] maxColumnTitre = new double[columnLabels.size()];
double[] maxRowTitre = new double[rowLabels.size()];
for (Measurement measurement : measurements) {
double titre = measurement.log2Titre;
if (Double.isNaN(titre)) {
titre = measurement.log2Titre;
}
if (titre > maxColumnTitre[measurement.column]) {
maxColumnTitre[measurement.column] = titre;
}
if (titre > maxRowTitre[measurement.row]) {
maxRowTitre[measurement.row] = titre;
}
}
if (strainTaxa != null) {
this.strains = strainTaxa;
// fill in the strain name array for local use
for (int i = 0; i < strains.getTaxonCount(); i++) {
strainNames.add(strains.getTaxon(i).getId());
}
} else {
Taxa taxa = new Taxa();
for (String strain : strainNames) {
taxa.addTaxon(new Taxon(strain));
}
this.strains = taxa;
}
this.mdsDimension = mdsDimension;
this.mdsPrecisionParameter = mdsPrecisionParameter;
addVariable(mdsPrecisionParameter);
this.locationsParameter = locationsParameter;
setupLocationsParameter(this.locationsParameter, strainNames);
this.tipTraitParameter = tipTraitParameter;
if (tipTraitParameter != null) {
setupTipTraitsParameter(this.tipTraitParameter, strainNames);
}
if (datesParameter != null) {
// this parameter is not used in this class but is setup to be used in other classes
setupDatesParameter(datesParameter, strainNames, strainDateMap);
}
this.columnEffectsParameter = setupColumnEffectsParameter(columnParameter, maxColumnTitre);
this.rowEffectsParameter = setupRowEffectsParameter(rowParameter, maxRowTitre);
setupInitialLocations(strainNames, strainDateMap);
StringBuilder sb = new StringBuilder();
sb.append("\tAntigenicLikelihood:\n");
sb.append("\t\t" + this.strains.getTaxonCount() + " strains\n");
sb.append("\t\t" + columnLabels.size() + " unique columns\n");
sb.append("\t\t" + rowLabels.size() + " unique rows\n");
sb.append("\t\t" + measurements.size() + " assay measurements\n");
if (USE_THRESHOLDS) {
sb.append("\t\t" + thresholdCount + " thresholded measurements\n");
}
if (useIntervals) {
sb.append("\n\t\tAssuming a log 2 measurement interval width of " + intervalWidth + "\n");
}
Logger.getLogger("dr.evomodel").info(sb.toString());
locationChanged = new boolean[this.locationsParameter.getParameterCount()];
logLikelihoods = new double[measurements.size()];
storedLogLikelihoods = new double[measurements.size()];
makeDirty();
}
private Parameter setupRowEffectsParameter(Parameter rowParameter, double[] maxRowTitre) {
// If no row parameter is given, then we will only use the column effects
if (rowParameter != null) {
rowParameter.addBounds(new Parameter.DefaultBounds(Double.MAX_VALUE, 0.0, 1));
rowParameter.setDimension(rowLabels.size());
addVariable(rowParameter);
String[] labelArray = new String[rowLabels.size()];
rowLabels.toArray(labelArray);
rowParameter.setDimensionNames(labelArray);
for (int i = 0; i < maxRowTitre.length; i++) {
rowParameter.setParameterValueQuietly(i, maxRowTitre[i]);
}
}
return rowParameter;
}
private Parameter setupColumnEffectsParameter(Parameter columnParameter, double[] maxColumnTitre) {
// If no column parameter is given, make one to hold maximum values for scaling titres...
if (columnParameter == null) {
columnParameter = new Parameter.Default("columnEffects");
} else {
columnParameter.addBounds(new Parameter.DefaultBounds(Double.MAX_VALUE, 0.0, 1));
addVariable(columnParameter);
}
columnParameter.setDimension(columnLabels.size());
String[] labelArray = new String[columnLabels.size()];
columnLabels.toArray(labelArray);
columnParameter.setDimensionNames(labelArray);
for (int i = 0; i < maxColumnTitre.length; i++) {
columnParameter.setParameterValueQuietly(i, maxColumnTitre[i]);
}
return columnParameter;
}
protected void setupLocationsParameter(MatrixParameter locationsParameter, List<String> strains) {
locationsParameter.setColumnDimension(mdsDimension);
locationsParameter.setRowDimension(strains.size());
for (int i = 0; i < strains.size(); i++) {
locationsParameter.getParameter(i).setId(strains.get(i));
}
addVariable(this.locationsParameter);
}
private void setupDatesParameter(Parameter datesParameter, List<String> strainNames, Map<String, Double> strainDateMap) {
datesParameter.setDimension(strainNames.size());
String[] labelArray = new String[strainNames.size()];
strainNames.toArray(labelArray);
datesParameter.setDimensionNames(labelArray);
for (int i = 0; i < strainNames.size(); i++) {
Double date = strainDateMap.get(strainNames.get(i));
if (date == null) {
throw new IllegalArgumentException("Date missing for strain: " + strainNames.get(i));
}
datesParameter.setParameterValue(i, date);
}
}
private void setupTipTraitsParameter(CompoundParameter tipTraitsParameter, List<String> strainNames) {
tipIndices = new int[strainNames.size()];
for (int i = 0; i < strainNames.size(); i++) {
tipIndices[i] = -1;
}
for (int i = 0; i < tipTraitsParameter.getParameterCount(); i++) {
String label = tipTraitsParameter.getParameter(i).getParameterName();
if (label.endsWith(".antigenic")) {
label = label.substring(0, label.indexOf(".antigenic"));
}
int index = strainNames.indexOf(label);
if (index != -1) {
if (tipIndices[index] != -1) {
throw new IllegalArgumentException("Duplicated tip name: " + label);
}
tipIndices[index] = i;
} else {
throw new IllegalArgumentException("Unmatched tip name in assay data: " + label);
}
}
// we are only setting this parameter not listening to it:
// addVariable(this.tipTraitParameter);
}
private void setupInitialLocations(List<String> strainNames, Map<String,Double> strainDateMap) {
double earliestDate = Double.POSITIVE_INFINITY;
for (double date : strainDateMap.values()) {
if (earliestDate > date) {
earliestDate = date;
}
}
for (int i = 0; i < locationsParameter.getParameterCount(); i++) {
double date = (double) strainDateMap.get(strainNames.get(i));
double diff = (date-earliestDate);
locationsParameter.getParameter(i).setParameterValueQuietly(0, diff + MathUtils.nextGaussian());
for (int j = 1; j < mdsDimension; j++) {
double r = MathUtils.nextGaussian();
locationsParameter.getParameter(i).setParameterValueQuietly(j, r);
}
}
}
@Override
protected void handleModelChangedEvent(Model model, Object object, int index) {
}
@Override
protected void handleVariableChangedEvent(Variable variable, int index, Variable.ChangeType type) {
if (variable == locationsParameter) {
int loc = index / mdsDimension;
locationChanged[loc] = true;
if (tipTraitParameter != null && tipIndices[loc] != -1) {
Parameter location = locationsParameter.getParameter(loc);
Parameter tip = tipTraitParameter.getParameter(tipIndices[loc]);
int dim = index % mdsDimension;
tip.setParameterValue(dim, location.getParameterValue(dim));
}
} else if (variable == mdsPrecisionParameter) {
setLocationChangedFlags(true);
} else if (variable == columnEffectsParameter) {
setLocationChangedFlags(true);
} else if (variable == rowEffectsParameter) {
setLocationChangedFlags(true);
} else {
// could be a derived class's parameter
}
likelihoodKnown = false;
}
@Override
protected void storeState() {
System.arraycopy(logLikelihoods, 0, storedLogLikelihoods, 0, logLikelihoods.length);
}
@Override
protected void restoreState() {
double[] tmp = logLikelihoods;
logLikelihoods = storedLogLikelihoods;
storedLogLikelihoods = tmp;
likelihoodKnown = false;
}
@Override
protected void acceptState() {
}
public Model getModel() {
return this;
}
public double getLogLikelihood() {
if (!likelihoodKnown) {
logLikelihood = computeLogLikelihood();
}
return logLikelihood;
}
// This function can be overwritten to implement other sampling densities, i.e. discrete ranks
private double computeLogLikelihood() {
double precision = mdsPrecisionParameter.getParameterValue(0);
double sd = 1.0 / Math.sqrt(precision);
logLikelihood = 0.0;
int i = 0;
for (Measurement measurement : measurements) {
if (locationChanged[measurement.rowStrain] || locationChanged[measurement.columnStrain]) {
double mapDistance = computeDistance(measurement.rowStrain, measurement.columnStrain);
double logNormalization = calculateTruncationNormalization(mapDistance, sd);
switch (measurement.type) {
case INTERVAL: {
// once transformed the lower titre becomes the higher distance
double minHiDistance = transformTitre(measurement.log2Titre + 1.0, measurement.column, measurement.row);
double maxHiDistance = transformTitre(measurement.log2Titre, measurement.column, measurement.row);
logLikelihoods[i] = computeMeasurementIntervalLikelihood(minHiDistance, maxHiDistance, mapDistance, sd) - logNormalization;
} break;
case POINT: {
double hiDistance = transformTitre(measurement.log2Titre, measurement.column, measurement.row);
logLikelihoods[i] = computeMeasurementLikelihood(hiDistance, mapDistance, sd) - logNormalization;
} break;
case THRESHOLD: {
double hiDistance = transformTitre(measurement.log2Titre, measurement.column, measurement.row);
logLikelihoods[i] = computeMeasurementThresholdLikelihood(hiDistance, mapDistance, sd) - logNormalization;
} break;
case MISSING:
break;
}
}
logLikelihood += logLikelihoods[i];
i++;
}
likelihoodKnown = true;
setLocationChangedFlags(false);
return logLikelihood;
}
private void setLocationChangedFlags(boolean flag) {
for (int i = 0; i < locationChanged.length; i++) {
locationChanged[i] = flag;
}
}
protected double computeDistance(int rowStrain, int columnStrain) {
if (rowStrain == columnStrain) {
return 0.0;
}
Parameter X = locationsParameter.getParameter(rowStrain);
Parameter Y = locationsParameter.getParameter(columnStrain);
double sum = 0.0;
for (int i = 0; i < mdsDimension; i++) {
double difference = X.getParameterValue(i) - Y.getParameterValue(i);
sum += difference * difference;
}
return Math.sqrt(sum);
}
/**
* Transforms a titre into log2 space and normalizes it with respect to a unit normal
* @param titre
* @param column
* @param row
* @return
*/
private double transformTitre(double titre, int column, int row) {
double t;
double columnEffect = columnEffectsParameter.getParameterValue(column);
if (rowEffectsParameter != null) {
double rowEffect = rowEffectsParameter.getParameterValue(row);
t = ((rowEffect + columnEffect) * 0.5) - titre;
} else {
t = columnEffect - titre;
}
return t;
}
private static double computeMeasurementIntervalLikelihood(double minDistance, double maxDistance, double mean, double sd) {
double cdf1 = NormalDistribution.cdf(minDistance, mean, sd, false);
double cdf2 = NormalDistribution.cdf(maxDistance, mean, sd, false);
double lnL = Math.log(cdf2 - cdf1);
if (cdf1 == cdf2) {
lnL = Math.log(cdf1);
}
if (CHECK_INFINITE && Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
private static double computeMeasurementIntervalLikelihood_CDF(double minDistance, double maxDistance, double mean, double sd) {
double cdf1 = NormalDistribution.cdf(minDistance, mean, sd, false);
double cdf2 = NormalDistribution.cdf(maxDistance, mean, sd, false);
double lnL = Math.log(cdf1 - cdf2);
if (cdf1 == cdf2) {
lnL = Math.log(cdf1);
}
if (CHECK_INFINITE && Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
private static double computeMeasurementLikelihood(double distance, double mean, double sd) {
double lnL = NormalDistribution.logPdf(distance, mean, sd);
if (CHECK_INFINITE && Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
// private static double computeMeasurementLowerBoundLikelihood(double transformedMinTitre) {
// // a lower bound in non-transformed titre so the bottom tail of the distribution
// double cdf = NormalDistribution.standardTail(transformedMinTitre, false);
// double lnL = Math.log(cdf);
// if (CHECK_INFINITE && Double.isNaN(lnL) || Double.isInfinite(lnL)) {
// throw new RuntimeException("infinite");
// return lnL;
private static double computeMeasurementThresholdLikelihood(double distance, double mean, double sd) {
// a upper bound in non-transformed titre so the upper tail of the distribution
// using special tail function of NormalDistribution (see main() in NormalDistribution for test)
double tail = NormalDistribution.tailCDF(distance, mean, sd);
double lnL = Math.log(tail);
if (CHECK_INFINITE && Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
private static double calculateTruncationNormalization(double distance, double sd) {
return NormalDistribution.cdf(distance, 0.0, sd, true);
}
public void makeDirty() {
likelihoodKnown = false;
setLocationChangedFlags(true);
}
private class Measurement {
private Measurement(final int column, final int columnStrain, final int row, final int rowStrain, final MeasurementType type, final double titre) {
this.column = column;
this.columnStrain = columnStrain;
this.row = row;
this.rowStrain = rowStrain;
this.type = type;
this.log2Titre = Math.log(titre) / Math.log(2);
}
final int column;
final int row;
final int columnStrain;
final int rowStrain;
final MeasurementType type;
final double log2Titre;
};
private final List<Measurement> measurements = new ArrayList<Measurement>();
private final List<String> columnLabels = new ArrayList<String>();
private final List<String> rowLabels = new ArrayList<String>();
private final int mdsDimension;
private final double intervalWidth;
private final Parameter mdsPrecisionParameter;
private final MatrixParameter locationsParameter;
private final CompoundParameter tipTraitParameter;
private final TaxonList strains;
private int[] tipIndices;
private final Parameter columnEffectsParameter;
private final Parameter rowEffectsParameter;
private double logLikelihood = 0.0;
private boolean likelihoodKnown = false;
private final boolean[] locationChanged;
private double[] logLikelihoods;
private double[] storedLogLikelihoods;
// XMLObjectParser
public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
public final static String FILE_NAME = "fileName";
public final static String TIP_TRAIT = "tipTrait";
public final static String LOCATIONS = "locations";
public final static String DATES = "dates";
public static final String MDS_DIMENSION = "mdsDimension";
public static final String INTERVAL_WIDTH = "intervalWidth";
public static final String MDS_PRECISION = "mdsPrecision";
public static final String COLUMN_EFFECTS = "columnEffects";
public static final String ROW_EFFECTS = "rowEffects";
public static final String STRAINS = "strains";
public String getParserName() {
return ANTIGENIC_LIKELIHOOD;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
String fileName = xo.getStringAttribute(FILE_NAME);
DataTable<String[]> assayTable;
try {
assayTable = DataTable.Text.parse(new FileReader(fileName), true, false);
} catch (IOException e) {
throw new XMLParseException("Unable to read assay data from file: " + e.getMessage());
}
System.out.println("Loaded HI table file: " + fileName);
int mdsDimension = xo.getIntegerAttribute(MDS_DIMENSION);
double intervalWidth = 0.0;
if (xo.hasAttribute(INTERVAL_WIDTH)) {
intervalWidth = xo.getDoubleAttribute(INTERVAL_WIDTH);
}
CompoundParameter tipTraitParameter = null;
if (xo.hasChildNamed(TIP_TRAIT)) {
tipTraitParameter = (CompoundParameter) xo.getElementFirstChild(TIP_TRAIT);
}
TaxonList strains = null;
if (xo.hasChildNamed(STRAINS)) {
strains = (TaxonList) xo.getElementFirstChild(STRAINS);
}
MatrixParameter locationsParameter = (MatrixParameter) xo.getElementFirstChild(LOCATIONS);
Parameter datesParameter = null;
if (xo.hasChildNamed(DATES)) {
datesParameter = (Parameter) xo.getElementFirstChild(DATES);
}
Parameter mdsPrecision = (Parameter) xo.getElementFirstChild(MDS_PRECISION);
Parameter columnEffectsParameter = null;
if (xo.hasChildNamed(COLUMN_EFFECTS)) {
columnEffectsParameter = (Parameter) xo.getElementFirstChild(COLUMN_EFFECTS);
}
Parameter rowEffectsParameter = null;
if (xo.hasChildNamed(ROW_EFFECTS)) {
rowEffectsParameter = (Parameter) xo.getElementFirstChild(ROW_EFFECTS);
}
AntigenicLikelihood AGL = new AntigenicLikelihood(
mdsDimension,
mdsPrecision,
strains,
locationsParameter,
tipTraitParameter,
datesParameter,
columnEffectsParameter,
rowEffectsParameter,
assayTable,
intervalWidth,
null);
Logger.getLogger("dr.evomodel").info("Using EvolutionaryCartography model. Please cite:\n" + Utils.getCitationString(AGL));
return AGL;
}
|
package dr.evomodel.antigenic;
import dr.inference.model.*;
import dr.util.Author;
import dr.util.Citable;
import dr.util.Citation;
import dr.xml.*;
import java.util.ArrayList;
import java.util.List;
/**
* @author Andrew Rambaut
* @author Trevor Bedford
* @author Marc Suchard
* @version $Id$
*/
public class AntigenicSplitPrior extends AbstractModelLikelihood implements Citable {
public final static String ANTIGENIC_SPLIT_PRIOR = "antigenicSplitPrior";
public AntigenicSplitPrior(
MatrixParameter locationsParameter,
Parameter datesParameter,
Parameter regressionSlopeParameter,
Parameter regressionPrecisionParameter,
Parameter splitTimeParameter,
Parameter splitAngleParameter,
Parameter splitProportionParameter
) {
super(ANTIGENIC_SPLIT_PRIOR);
this.locationsParameter = locationsParameter;
addVariable(this.locationsParameter);
this.datesParameter = datesParameter;
addVariable(this.datesParameter);
dimension = locationsParameter.getParameter(0).getDimension();
count = locationsParameter.getParameterCount();
this.regressionSlopeParameter = regressionSlopeParameter;
addVariable(regressionSlopeParameter);
regressionSlopeParameter.addBounds(new Parameter.DefaultBounds(Double.MAX_VALUE, 0.0, 1));
this.regressionPrecisionParameter = regressionPrecisionParameter;
addVariable(regressionPrecisionParameter);
regressionPrecisionParameter.addBounds(new Parameter.DefaultBounds(Double.MAX_VALUE, 0.0, 1));
this.splitTimeParameter = splitTimeParameter;
addVariable(splitTimeParameter);
splitTimeParameter.addBounds(new Parameter.DefaultBounds(Double.MAX_VALUE, 0.0, 1));
this.splitAngleParameter = splitAngleParameter;
addVariable(splitAngleParameter);
splitAngleParameter.addBounds(new Parameter.DefaultBounds(0.5*Math.PI, 0.0, 1));
this.splitProportionParameter = splitProportionParameter;
addVariable(splitProportionParameter);
splitProportionParameter.addBounds(new Parameter.DefaultBounds(1.0, 0.0, 1));
likelihoodKnown = false;
earliestDate = datesParameter.getParameterValue(0);
for (int i=0; i<count; i++) {
double date = datesParameter.getParameterValue(i);
if (earliestDate > date) {
earliestDate = date;
}
}
}
@Override
protected void handleModelChangedEvent(Model model, Object object, int index) {
}
@Override
protected void handleVariableChangedEvent(Variable variable, int index, Variable.ChangeType type) {
if (variable == locationsParameter || variable == datesParameter
|| variable == regressionSlopeParameter || variable == regressionPrecisionParameter
|| variable == splitTimeParameter || variable == splitAngleParameter
|| variable == splitProportionParameter) {
likelihoodKnown = false;
}
}
@Override
protected void storeState() {
storedLogLikelihood = logLikelihood;
}
@Override
protected void restoreState() {
logLikelihood = storedLogLikelihood;
likelihoodKnown = false;
}
@Override
protected void acceptState() {
}
@Override
public Model getModel() {
return this;
}
@Override
public double getLogLikelihood() {
if (!likelihoodKnown) {
logLikelihood = computeLogLikelihood();
}
return logLikelihood;
}
private double computeLogLikelihood() {
double precision = regressionPrecisionParameter.getParameterValue(0);
double logLikelihood = (0.5 * Math.log(precision) * count) - (0.5 * precision * sumOfSquaredResiduals());
likelihoodKnown = true;
return logLikelihood;
}
// go through each location and compute sum of squared residuals from regression line
protected double sumOfSquaredResiduals() {
double ssr = 0.0;
for (int i=0; i < count; i++) {
Parameter loc = locationsParameter.getParameter(i);
double date = datesParameter.getParameterValue(i);
double beta = regressionSlopeParameter.getParameterValue(0);
double x = loc.getParameterValue(0);
double y = (date-earliestDate) * beta;
ssr += (x - y) * (x - y);
for (int j=1; j < dimension; j++) {
x = loc.getParameterValue(j);
ssr += x*x;
}
}
return ssr;
}
protected double computeDistance(int rowStrain, int columnStrain) {
if (rowStrain == columnStrain) {
return 0.0;
}
Parameter X = locationsParameter.getParameter(rowStrain);
Parameter Y = locationsParameter.getParameter(columnStrain);
double sum = 0.0;
for (int i = 0; i < dimension; i++) {
double difference = X.getParameterValue(i) - Y.getParameterValue(i);
sum += difference * difference;
}
return Math.sqrt(sum);
}
@Override
public void makeDirty() {
likelihoodKnown = false;
}
private final int dimension;
private final int count;
private final Parameter datesParameter;
private final MatrixParameter locationsParameter;
private final Parameter regressionSlopeParameter;
private final Parameter regressionPrecisionParameter;
private final Parameter splitTimeParameter;
private final Parameter splitAngleParameter;
private final Parameter splitProportionParameter;
private double earliestDate;
private double logLikelihood = 0.0;
private double storedLogLikelihood = 0.0;
private boolean likelihoodKnown = false;
// XMLObjectParser
public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
public final static String LOCATIONS = "locations";
public final static String DATES = "dates";
public final static String REGRESSIONSLOPE = "regressionSlope";
public final static String REGRESSIONPRECISION = "regressionPrecision";
public final static String SPLITTIME = "splitTime";
public final static String SPLITANGLE = "splitAngle";
public final static String SPLITPROPORTION = "splitProportion";
public String getParserName() {
return ANTIGENIC_SPLIT_PRIOR;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
MatrixParameter locationsParameter = (MatrixParameter) xo.getElementFirstChild(LOCATIONS);
Parameter datesParameter = (Parameter) xo.getElementFirstChild(DATES);
Parameter regressionSlopeParameter = (Parameter) xo.getElementFirstChild(REGRESSIONSLOPE);
Parameter regressionPrecisionParameter = (Parameter) xo.getElementFirstChild(REGRESSIONPRECISION);
Parameter splitTimeParameter = (Parameter) xo.getElementFirstChild(SPLITTIME);
Parameter splitAngleParameter = (Parameter) xo.getElementFirstChild(SPLITANGLE);
Parameter splitProportionParameter = (Parameter) xo.getElementFirstChild(SPLITPROPORTION);
AntigenicSplitPrior AGDP = new AntigenicSplitPrior(
locationsParameter,
datesParameter,
regressionSlopeParameter,
regressionPrecisionParameter,
splitTimeParameter,
splitAngleParameter,
splitProportionParameter);
// Logger.getLogger("dr.evomodel").info("Using EvolutionaryCartography model. Please cite:\n" + Utils.getCitationString(AGL));
return AGDP;
}
|
package edu.cuny.qc.speech.AuToBI;
import edu.cuny.qc.speech.AuToBI.classifier.WekaClassifier;
import edu.cuny.qc.speech.AuToBI.core.*;
import edu.cuny.qc.speech.AuToBI.featureextractor.FeatureExtractorException;
import edu.cuny.qc.speech.AuToBI.featureset.*;
import edu.cuny.qc.speech.AuToBI.io.*;
import edu.cuny.qc.speech.AuToBI.util.AuToBIReaderUtils;
import edu.cuny.qc.speech.AuToBI.util.AuToBIUtils;
import edu.cuny.qc.speech.AuToBI.util.ClassifierUtils;
import weka.classifiers.functions.Logistic;
import javax.sound.sampled.UnsupportedAudioFileException;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.*;
public class AuToBITrainTest {
public static void main(String[] args) {
AuToBI autobi = new AuToBI();
autobi.init(args);
try {
List<FormattedFile> training_files = AuToBIReaderUtils.globFormattedFiles(autobi.getParameter("training_files"));
List<FormattedFile> testing_files = AuToBIReaderUtils.globFormattedFiles(autobi.getParameter("testing_files"));
String task = autobi.getParameter("task");
String model_file = autobi.getParameter("model_file");
// Added here to comply to comply with the new feature registration changes to FeatureSetPropagator
autobi.registerAllFeatureExtractors();
autobi.registerNullFeatureExtractor("speaker_id");
// This line tells autobi to ignore any deaccented words.
autobi.getParameters().setParameter("attribute_omit", "nominal_PitchAccentType:NOACCENT");
// implement getFeatureSet() so it returns an appropriate feature
// set.
FeatureSet training_fs = getFeatureSet(task);
autobi.propagateFeatureSet(training_files, training_fs);
FeatureSet testing_fs = getFeatureSet(task);
autobi.propagateFeatureSet(testing_files, testing_fs);
// any other classifier can be slotted in here.
WekaClassifier classifier = new WekaClassifier(new Logistic());
classifier.train(training_fs);
// writing model file
AuToBIUtils.log("writing model to: " + model_file);
FileOutputStream fos;
ObjectOutputStream out;
try {
fos = new FileOutputStream(model_file);
out = new ObjectOutputStream(fos);
out.writeObject(classifier);
out.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
// prediction on test set
ClassifierUtils.generatePredictions(classifier, "hyp", "DEFAULT", testing_fs);
EvaluationResults er = ClassifierUtils.generateEvaluationResults("hyp", testing_fs.getClassAttribute(), testing_fs);
EvaluationSummary es = new EvaluationSummary(er);
System.out.print("Test Results on test set\n" + es.toString());
AuToBIUtils.log("Test Results on test set\n" + es.toString());
} catch (FeatureExtractorException e) {
e.printStackTrace();
}
catch (AuToBIException e) {
e.printStackTrace();
} catch (UnsupportedAudioFileException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* @return
* @throws AuToBIException
*/
private static FeatureSet getFeatureSet(String task) throws AuToBIException {
// TODO Auto-generated method stub
if (task.equals("pitch_accent_detection"))
return new PitchAccentDetectionFeatureSet();
if (task.equals("pitch_accent_classification"))
return new PitchAccentClassificationFeatureSet();
if (task.equals("intonational_phrase_boundary_detection"))
return new IntonationalPhraseBoundaryDetectionFeatureSet();
if (task.equals("intermediate_phrase_boundary_detection"))
return new IntermediatePhraseBoundaryDetectionFeatureSet();
if (task.equals("boundary_tone_classification"))
return new PhraseAccentBoundaryToneClassificationFeatureSet();
if (task.equals("phrase_accent_classification"))
return new PhraseAccentClassificationFeatureSet();
throw new AuToBIException("No defined feature set for task: " + task);
}
}
|
package edu.oldenburg.it.bluemoep;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.ws.ProtocolException;
import oauth.signpost.OAuthConsumer;
import oauth.signpost.basic.DefaultOAuthConsumer;
import oauth.signpost.exception.OAuthCommunicationException;
import oauth.signpost.exception.OAuthExpectationFailedException;
import oauth.signpost.exception.OAuthMessageSignerException;
import oauth.signpost.http.HttpRequest;
@WebServlet("/MessageReceiver")
public class MessageReceiver extends HttpServlet {
private static final long serialVersionUID = 3383004164508555005L;
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String message = request.getParameter("message");
String lat = request.getParameter("lat");
String lng = request.getParameter("lng");
System.out.println("Message: " + message);
System.out.println("Lat: " + lat);
System.out.println("Lng: " + lng);
String encoded = "status=" + URLEncoder.encode(message, "UTF-8")+"&lat="+URLEncoder.encode(lat, "UTF-8")+"&long="+URLEncoder.encode(lng, "UTF-8");
System.out.println(encoded);
// Authentication
String ConsumerKey = "DHo3qG8QHm0kFqmLTu2p7sHiv" ;
String ConsumerSecret = "xSEzKKryxE3ENwEWlCMgkP5oTV73fPW90INS5TbaB833vUc713";
String AccessToken = "2558944136-b8AhCt9CLr2jWk6u6mZo19C3QRg7Ht64P2Fom0A";
String AccessTokenSecret = "Ro9CHzMpywB8cmj0ZJTFukVypHRqanFBF4pZdhIAK58VT";
//HttpURLConnection connection = null;
OutputStreamWriter wr = null;
BufferedReader rd = null;
StringBuilder sb = null;
String line = null;
// create a consumer object and configure it with the access
// token and token secret obtained from the service provider
OAuthConsumer consumer = new DefaultOAuthConsumer(ConsumerKey, ConsumerSecret);
consumer.setTokenWithSecret(AccessToken, AccessTokenSecret);
// create an HTTP request to a protected resource
URL url = new URL("https://api.twitter.com/1.1/statuses/update.json");
HttpURLConnection httpUrlConn = (HttpURLConnection) url.openConnection();
try {
consumer.sign(httpUrlConn);
} catch (OAuthMessageSignerException e1) {
// TODO Auto-generated catch block
System.out.println("MessageSignerException");
e1.printStackTrace();
} catch (OAuthExpectationFailedException e1) {
// TODO Auto-generated catch block
System.out.println("ExpectionFailedException");
e1.printStackTrace();
} catch (OAuthCommunicationException e1) {
// TODO Auto-generated catch block
System.out.println("CommunicationException");
e1.printStackTrace();
}
// send the request
httpUrlConn.setDoOutput(true);
// httpUrlConn.setDoInput(true);
httpUrlConn.connect();
// ServerMessage sending
try {
//get the output stream writer and write the output to the server
wr = new OutputStreamWriter(httpUrlConn.getOutputStream(), "UTF-8");
wr.write(encoded);
wr.flush();
//read the result from the server
rd = new BufferedReader(new InputStreamReader(httpUrlConn.getInputStream(), "UTF-8"));
sb = new StringBuilder();
while ((line = rd.readLine()) != null)
{
sb.append(line + '\n');
}
System.out.println(sb.toString());
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (ProtocolException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
finally
{
//close the connection, set all objects to null
httpUrlConn.disconnect();
rd = null;
sb = null;
wr = null;
//connection = null;
}
}
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
}
}
|
package edu.umn.cs.spatial.mapReduce;
import java.io.IOException;
import java.io.PrintStream;
import java.util.Iterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.spatial.GridInfo;
import org.apache.hadoop.util.LineReader;
import edu.umn.edu.spatial.Point;
import edu.umn.edu.spatial.PointWithDistance;
import edu.umn.edu.spatial.PointWithK;
import edu.umn.edu.spatial.Rectangle;
/**
* This performs a range query map reduce job with text file input.
* @author aseldawy
*
*/
public class KNNMapReduce {
public static final Log LOG = LogFactory.getLog(KNNMapReduce.class);
public static class Map extends MapReduceBase
implements
Mapper<PointWithK, Point, PointWithK, PointWithDistance> {
public void map(
PointWithK queryPoint,
Point inputPoint,
OutputCollector<PointWithK, PointWithDistance> output,
Reporter reporter) throws IOException {
output.collect(queryPoint, new PointWithDistance(inputPoint, inputPoint.distanceTo(queryPoint)));
}
}
public static class Reduce extends MapReduceBase implements
Reducer<PointWithK, PointWithDistance, Point, PointWithDistance> {
@Override
public void reduce(PointWithK key, Iterator<PointWithDistance> values,
OutputCollector<Point, PointWithDistance> output, Reporter reporter)
throws IOException {
PointWithDistance[] knn = new PointWithDistance[key.k];
int neighborsFound = 0;
int maxi = 0;
while (values.hasNext()) {
PointWithDistance p = values.next();
if (neighborsFound < knn.length) {
// Append to list if found less than required neighbors
knn[neighborsFound] = (PointWithDistance) p.clone();
// Update point with maximum index if required
if (p.getDistance() > knn[maxi].getDistance())
maxi = neighborsFound;
// Increment total neighbors found
neighborsFound++;
} else {
// Check if the new point is better that the farthest neighbor
// Check if current point is better than the point with max distance
if (p.getDistance() < knn[maxi].getDistance())
knn[maxi] = (PointWithDistance) p.clone();
// Update point with maximum index
for (int i = 0; i < knn.length;i++) {
if (knn[i].getDistance() > knn[maxi].getDistance())
maxi = i;
}
}
}
for (int i = 0; i < neighborsFound; i++) {
output.collect(key, knn[i]);
}
}
}
/**
* Entry point to the file.
* Params <query rectangle> <input filenames> <output filename>
* query rectangle: in the form x1,y1,x2,y2
* input filenames: A list of paths to input files in HDFS
* output filename: A path to an output file in HDFS
* @param args
* @throws Exception
*/
public static void main(String[] args) throws Exception {
JobConf conf = new JobConf(KNNMapReduce.class);
conf.setJobName("KNN");
// Retrieve query rectangle and store it to an HDFS file
PointWithK queryPoint = new PointWithK();
String[] parts = args[0].split(",");
queryPoint.x = Integer.parseInt(parts[0]);
queryPoint.y = Integer.parseInt(parts[1]);
queryPoint.k = Integer.parseInt(parts[2]);
// Get the HDFS file system
FileSystem fs = FileSystem.get(conf);
Path queryFilepath = new Path("/knn_query");
// Open an output stream for the file
FSDataOutputStream out = fs.create(queryFilepath, true);
PrintStream ps = new PrintStream(out);
ps.print(0+","+queryPoint.x +","+ queryPoint.y +","+
queryPoint.k);
ps.close();
// add this query file as the first input path to the job
RQInputFormat.addInputPath(conf, queryFilepath);
conf.setOutputKeyClass(PointWithK.class);
conf.setOutputValueClass(PointWithDistance.class);
conf.setMapperClass(Map.class);
conf.setReducerClass(Reduce.class);
conf.setCombinerClass(Reduce.class);
conf.setInputFormat(KNNInputFormat.class);
conf.setOutputFormat(TextOutputFormat.class);
// All files except first and last ones are input files
Path[] inputPaths = new Path[args.length - 2];
for (int i = 1; i < args.length - 1; i++)
RQInputFormat.addInputPath(conf, inputPaths[i-1] = new Path(args[i]));
boolean jobFinished = false;
// Get grid info of the file to be processed
GridInfo gridInfo = fs.getFileStatus(inputPaths[0]).getGridInfo();
if (gridInfo == null) {
JobClient.runJob(conf);
return;
}
// Calculate initial rectangle to be processed (the one that contains the query point)
int column = (int) ((queryPoint.x - gridInfo.xOrigin) / gridInfo.cellWidth);
int row = (int) ((queryPoint.y - gridInfo.yOrigin) / gridInfo.cellHeight);
Rectangle rectProcessed = new Rectangle(
0,
(int)(column * gridInfo.cellWidth + gridInfo.xOrigin),
(int)(row * gridInfo.cellHeight + gridInfo.yOrigin),
(int)((column + 1)* gridInfo.cellWidth + gridInfo.xOrigin),
(int)((row + 1) * gridInfo.cellHeight + gridInfo.yOrigin)
);
LOG.info("Going to process this rectangle next round: "+rectProcessed);
conf.set(SplitCalculator.QUERY_RANGE, rectProcessed.x1 + ","+rectProcessed.y1+","+
rectProcessed.x2 +","+rectProcessed.y2);
int round = 0;
while (!jobFinished) {
// Last argument is the base name output file
Path outputPath = new Path(args[args.length - 1]+"_"+round);
FileOutputFormat.setOutputPath(conf, outputPath);
JobClient.runJob(conf);
// Check that results are correct
FileStatus[] resultFiles = fs.listStatus(outputPath);
// Maximum distance of neighbors
double farthestNeighbor = 0.0;
for (FileStatus resultFile : resultFiles) {
if (resultFile.getLen() > 0) {
LineReader in = new LineReader(fs.open(resultFile.getPath()));
Text line = new Text();
while (in.readLine(line) > 0) {
int i = 0;
// Skip all characters till the -
while (line.charAt(i++) != '-');
// Parse the rest of the line to get the distance
double distance = Double.parseDouble(new String(line.getBytes(), i, line.getLength() - i));
if (distance > farthestNeighbor)
farthestNeighbor = distance;
}
in.close();
}
}
jobFinished = true;
LOG.info("Farthest neighbor: "+farthestNeighbor);
// Ensure that maximum distance cannot go outside current cell
for (int i = 0; i < inputPaths.length; i++) {
// Find cell that contains query point; the one that was actually processed
if (gridInfo == null)
continue;
LOG.info("The cell that was processed: "+rectProcessed);
double minDistance = rectProcessed.minDistance(queryPoint);
LOG.info("Min distance within processed cell: "+minDistance);
if (minDistance < farthestNeighbor) {
// TODO ensure that there is another grid cell at that distance
// This indicates that there might be a nearer neighbor in
// an adjacent cell
LOG.warn("Result is incorrect! farthestNeighbor: "+farthestNeighbor+", maxDistance: "+minDistance);
// Add all grid cells that need to be processed
for (double x = gridInfo.xOrigin; (x + gridInfo.cellWidth / 2) < gridInfo.xOrigin + gridInfo.gridWidth; x += gridInfo.cellWidth) {
for (double y = gridInfo.yOrigin; (y + gridInfo.cellHeight / 2) < gridInfo.yOrigin + gridInfo.gridHeight; y += gridInfo.cellHeight) {
Rectangle rect = new Rectangle(0, (int)x, (int)y, (int)x + (int)gridInfo.cellWidth, (int)x + (int)gridInfo.cellHeight);
if (rect.minDistance(queryPoint) < farthestNeighbor && !rectProcessed.union(rect).equals(rectProcessed)) {
// Add this rectangle to the next processed items
rectProcessed = rectProcessed.union(rect);
jobFinished = false;
}
}
}
}
}
++round;
}
}
}
|
package edu.wpi.first.wpilibj.templates;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.DriverStation;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.RobotDrive;
import edu.wpi.first.wpilibj.Servo;
import edu.wpi.first.wpilibj.SimpleRobot;
import edu.wpi.first.wpilibj.Talon;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.camera.AxisCamera;
import edu.wpi.first.wpilibj.smartdashboard.*;
import edu.wpi.first.wpilibj.camera.AxisCameraException;
import edu.wpi.first.wpilibj.Watchdog;
public class RobotMain extends SimpleRobot {
Compressor compressor = new Compressor(1, 1);
RobotDrive chassis;
Joystick leftStick = new Joystick(1);
Joystick rightStick = new Joystick(2);
AxisCamera camera;
Servo servoTest;
DriverStation driverStation;
Watchdog watchdogTimer;
Talon fl;
Talon bl;
Talon fr;
Talon br;
private VisionProcessing visionProcessing;
public RobotMain() {
}
public void robotInit() {
watchdogTimer = Watchdog.getInstance();
camera = AxisCamera.getInstance("10.14.92.11");
visionProcessing = new VisionProcessing();
visionProcessing.init(camera);
driverStation = DriverStation.getInstance();
servoTest = new Servo(5);
fl = new Talon(1);
bl = new Talon(2);
br = new Talon(3);
fr = new Talon(4);
compressor.start();
chassis = new RobotDrive(fl, bl, fr, br);
chassis.setExpiration(2000);
chassis.setInvertedMotor(RobotDrive.MotorType.kFrontRight, true);
chassis.setInvertedMotor(RobotDrive.MotorType.kRearRight, true);
}
public void autonomous() {
watchdogTimer.setExpiration(2);
watchdogTimer.setEnabled(true);
visionProcessing.autonomous();
while (this.isAutonomous() && this.isEnabled()) {
driveNowhere();
visionProcessing.autonomousUpdate();
SmartDashboard.putBoolean("Target Hot", visionProcessing.target.Hot);
}
watchdogTimer.feed();
}
public void operatorControl() {
watchdogTimer.setExpiration(2);
watchdogTimer.setEnabled(true);
chassis.setSafetyEnabled(false);
SmartDashboard.putString("Alliance", driverStation.getAlliance().name);
while (this.isOperatorControl() && this.isEnabled()) {
SmartDashboard.putNumber("Mecanum X", getMecX());
SmartDashboard.putNumber("Mecanum Y", getMecY());
SmartDashboard.putNumber("Mecanum Rotation", getMecRot());
SmartDashboard.putNumber("Front Left", fl.getSpeed());
SmartDashboard.putNumber("Front Right", fr.getSpeed());
SmartDashboard.putNumber("Back Left", bl.getSpeed());
SmartDashboard.putNumber("Back Right", br.getSpeed());
//System.out.println("Axies: "+getMecX()+", "+getMecY()+", "+getMecRot());
//System.out.println("Drive: fl:"+fl.getSpeed()+" bl:"+bl.getSpeed()+" fr:"+fr.getSpeed()+" br:"+br.getSpeed());
mecanumDrive(getMecX(), getMecY(), getMecRot());
//chassis.mecanumDrive_Cartesian(getMecX(), getMecY(), getMecRot(), 0);
if (rightStick.getRawButton(3)) {
servoTest.setAngle(-360);
} else if (rightStick.getRawButton(2)) { //down
servoTest.setAngle(360);
}
watchdogTimer.feed();
Timer.delay(.01);
}
}
public void disabled() {
watchdogTimer.setEnabled(false);
}
public void test() {
}
private double getMecX() {
return deadZone(rightStick.getAxis(Joystick.AxisType.kX));
}
private double getMecY() {
return deadZone(rightStick.getAxis(Joystick.AxisType.kY));
}
private double getMecRot() {
return deadZone(leftStick.getAxis(Joystick.AxisType.kX));
}
private double deadZone(double value) {
return (abs(value) < .1) ? 0 : value;
}
private double abs(double value) {
return value < 0 ? -value : value;
}
private void mecanumDrive(double x, double y, double r) {
y = -y;
double frn = 0;
double fln = 0;
double brn = 0;
double bln = 0;
frn *= 1;
fln *= 1;
brn *= 1;
bln *= 1;
fln = y + x + r;
frn = y - x - r;
bln = y - x + r;
brn = y + x - r;
fr.set(-maxAt1(frn));
fl.set(maxAt1(fln));
br.set(-maxAt1(brn));
bl.set(maxAt1(bln));
}
private double maxAt1(double n) {
return n < -1 ? -1 : (n > 1 ? 1 : n);
}
private void driveNowhere() {
chassis.tankDrive(0, 0);
}
}
|
package org.bouncycastle.cms;
import org.bouncycastle.asn1.ASN1Object;
import org.bouncycastle.asn1.ASN1OctetString;
import org.bouncycastle.asn1.ASN1Sequence;
import org.bouncycastle.asn1.DERObjectIdentifier;
import org.bouncycastle.asn1.cms.IssuerAndSerialNumber;
import org.bouncycastle.asn1.cms.KeyAgreeRecipientIdentifier;
import org.bouncycastle.asn1.cms.KeyAgreeRecipientInfo;
import org.bouncycastle.asn1.cms.OriginatorIdentifierOrKey;
import org.bouncycastle.asn1.cms.OriginatorPublicKey;
import org.bouncycastle.asn1.cms.RecipientEncryptedKey;
import org.bouncycastle.asn1.cms.RecipientKeyIdentifier;
import org.bouncycastle.asn1.cms.ecc.MQVuserKeyingMaterial;
import org.bouncycastle.asn1.pkcs.PrivateKeyInfo;
import org.bouncycastle.asn1.x509.AlgorithmIdentifier;
import org.bouncycastle.asn1.x509.SubjectKeyIdentifier;
import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo;
import org.bouncycastle.jce.spec.MQVPrivateKeySpec;
import org.bouncycastle.jce.spec.MQVPublicKeySpec;
import java.io.IOException;
import java.io.InputStream;
import java.security.GeneralSecurityException;
import java.security.InvalidKeyException;
import java.security.Key;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.PrivateKey;
import java.security.Provider;
import java.security.PublicKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.X509EncodedKeySpec;
import javax.crypto.Cipher;
import javax.crypto.KeyAgreement;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
/**
* the RecipientInfo class for a recipient who has been sent a message
* encrypted using key agreement.
*/
public class KeyAgreeRecipientInformation
extends RecipientInformation
{
private KeyAgreeRecipientInfo info;
private ASN1OctetString _encryptedKey;
public KeyAgreeRecipientInformation(
KeyAgreeRecipientInfo info,
AlgorithmIdentifier encAlg,
InputStream data)
{
this(info, encAlg, null, data);
}
public KeyAgreeRecipientInformation(
KeyAgreeRecipientInfo info,
AlgorithmIdentifier encAlg,
AlgorithmIdentifier macAlg,
InputStream data)
{
super(encAlg, macAlg, AlgorithmIdentifier.getInstance(info.getKeyEncryptionAlgorithm()), data);
this.info = info;
this.rid = new RecipientId();
try
{
ASN1Sequence s = this.info.getRecipientEncryptedKeys();
// TODO Handle the case of more than one encrypted key
RecipientEncryptedKey id = RecipientEncryptedKey.getInstance(
s.getObjectAt(0));
KeyAgreeRecipientIdentifier karid = id.getIdentifier();
IssuerAndSerialNumber iAndSN = karid.getIssuerAndSerialNumber();
if (iAndSN != null)
{
rid.setIssuer(iAndSN.getName().getEncoded());
rid.setSerialNumber(iAndSN.getSerialNumber().getValue());
}
else
{
RecipientKeyIdentifier rKeyID = karid.getRKeyID();
// Note: 'date' and 'other' fieldss of RecipientKeyIdentifier appear to be only informational
rid.setSubjectKeyIdentifier(rKeyID.getSubjectKeyIdentifier().getOctets());
}
_encryptedKey = id.getEncryptedKey();
}
catch (IOException e)
{
throw new IllegalArgumentException("invalid rid in KeyAgreeRecipientInformation");
}
}
private PublicKey getSenderPublicKey(Key receiverPrivateKey,
OriginatorIdentifierOrKey originator, Provider prov)
throws CMSException, GeneralSecurityException, IOException
{
IssuerAndSerialNumber iAndSN = originator.getIssuerAndSerialNumber();
if (iAndSN != null)
{
// TODO Support all alternatives for OriginatorIdentifierOrKey
// see RFC 3852 6.2.2
throw new CMSException("No support for 'originator' as IssuerAndSerialNumber");
}
SubjectKeyIdentifier ski = originator.getSubjectKeyIdentifier();
if (ski != null)
{
// TODO Support all alternatives for OriginatorIdentifierOrKey
// see RFC 3852 6.2.2
throw new CMSException("No support for 'originator' as SubjectKeyIdentifier");
}
// Must be OriginatorPublicKey then
return getPublicKeyFromOriginatorPublicKey(receiverPrivateKey, originator.getOriginatorKey(), prov);
}
private PublicKey getPublicKeyFromOriginatorPublicKey(Key receiverPrivateKey,
OriginatorPublicKey originatorPublicKey, Provider prov)
throws CMSException, GeneralSecurityException, IOException
{
PrivateKeyInfo privInfo = PrivateKeyInfo.getInstance(
ASN1Object.fromByteArray(receiverPrivateKey.getEncoded()));
SubjectPublicKeyInfo pubInfo = new SubjectPublicKeyInfo(
privInfo.getAlgorithmId(),
originatorPublicKey.getPublicKey().getBytes());
X509EncodedKeySpec pubSpec = new X509EncodedKeySpec(pubInfo.getEncoded());
KeyFactory fact = KeyFactory.getInstance(keyEncAlg.getObjectId().getId(), prov);
return fact.generatePublic(pubSpec);
}
private SecretKey calculateAgreedWrapKey(String wrapAlg,
PublicKey senderPublicKey, PrivateKey receiverPrivateKey, Provider prov)
throws CMSException, GeneralSecurityException, IOException
{
String agreeAlg = keyEncAlg.getObjectId().getId();
if (agreeAlg.equals(CMSEnvelopedGenerator.ECMQV_SHA1KDF))
{
byte[] ukmEncoding = info.getUserKeyingMaterial().getOctets();
MQVuserKeyingMaterial ukm = MQVuserKeyingMaterial.getInstance(
ASN1Object.fromByteArray(ukmEncoding));
PublicKey ephemeralKey = getPublicKeyFromOriginatorPublicKey(receiverPrivateKey,
ukm.getEphemeralPublicKey(), prov);
senderPublicKey = new MQVPublicKeySpec(senderPublicKey, ephemeralKey);
receiverPrivateKey = new MQVPrivateKeySpec(receiverPrivateKey, receiverPrivateKey);
}
KeyAgreement agreement = KeyAgreement.getInstance(agreeAlg, prov);
agreement.init(receiverPrivateKey);
agreement.doPhase(senderPublicKey, true);
return agreement.generateSecret(wrapAlg);
}
private Key unwrapSessionKey(String wrapAlg, SecretKey agreedKey,
Provider prov)
throws GeneralSecurityException
{
AlgorithmIdentifier aid = encAlg;
if (aid == null)
{
aid = macAlg;
}
String alg = aid.getObjectId().getId();
byte[] encryptedKey = _encryptedKey.getOctets();
// TODO Should we try alternate ways of unwrapping?
// (see KeyTransRecipientInformation.getSessionKey)
Cipher keyCipher = Cipher.getInstance(wrapAlg, prov);
keyCipher.init(Cipher.UNWRAP_MODE, agreedKey);
return keyCipher.unwrap(encryptedKey, alg, Cipher.SECRET_KEY);
}
protected Key getSessionKey(Key receiverPrivateKey, Provider prov)
throws CMSException
{
try
{
String wrapAlg = DERObjectIdentifier.getInstance(
ASN1Sequence.getInstance(keyEncAlg.getParameters()).getObjectAt(0)).getId();
PublicKey senderPublicKey = getSenderPublicKey(receiverPrivateKey,
info.getOriginator(), prov);
SecretKey agreedWrapKey = calculateAgreedWrapKey(wrapAlg,
senderPublicKey, (PrivateKey)receiverPrivateKey, prov);
return unwrapSessionKey(wrapAlg, agreedWrapKey, prov);
}
catch (NoSuchAlgorithmException e)
{
throw new CMSException("can't find algorithm.", e);
}
catch (InvalidKeyException e)
{
throw new CMSException("key invalid in message.", e);
}
catch (InvalidKeySpecException e)
{
throw new CMSException("originator key spec invalid.", e);
}
catch (NoSuchPaddingException e)
{
throw new CMSException("required padding not supported.", e);
}
catch (Exception e)
{
throw new CMSException("originator key invalid.", e);
}
}
/**
* decrypt the content and return it
*/
public CMSTypedStream getContentStream(
Key key,
String prov)
throws CMSException, NoSuchProviderException
{
return getContentStream(key, CMSUtils.getProvider(prov));
}
public CMSTypedStream getContentStream(
Key key,
Provider prov)
throws CMSException
{
Key sKey = getSessionKey(key, prov);
return getContentFromSessionKey(sKey, prov);
}
}
|
package water.api;
import hex.*;
import hex.genmodel.utils.DistributionFamily;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.NotImplementedException;
import water.*;
import water.api.schemas3.*;
import water.exceptions.H2OIllegalArgumentException;
import water.exceptions.H2OKeyNotFoundArgumentException;
import water.fvec.Frame;
import water.fvec.Vec;
import water.udf.CFuncRef;
import water.util.Log;
class ModelMetricsHandler extends Handler {
/** Class which contains the internal representation of the ModelMetrics list and params. */
public static final class ModelMetricsList extends Iced {
public Model _model;
public Frame _frame;
public ModelMetrics[] _model_metrics;
public String _predictions_name;
public String _deviances_name;
public boolean _deviances;
public boolean _reconstruction_error;
public boolean _reconstruction_error_per_feature;
public int _deep_features_hidden_layer = -1;
public String _deep_features_hidden_layer_name = null;
public boolean _reconstruct_train;
public boolean _project_archetypes;
public boolean _reverse_transform;
public boolean _leaf_node_assignment;
public int _exemplar_index = -1;
public String _custom_metric_func;
public String _auc_type;
public int _top_n;
public int _top_bottom_n;
public boolean _abs;
// Fetch all metrics that match model and/or frame
ModelMetricsList fetch() {
final Key[] modelMetricsKeys = KeySnapshot.globalSnapshot().filter(new KeySnapshot.KVFilter() {
@Override public boolean filter(KeySnapshot.KeyInfo k) {
try {
if( !Value.isSubclassOf(k._type, ModelMetrics.class) ) return false; // Fast-path cutout
ModelMetrics mm = DKV.getGet(k._key);
// If we're filtering by model filter by Model. :-)
if( _model != null && !mm.isForModel((Model)DKV.getGet(_model._key)) ) return false;
// If we're filtering by frame filter by Frame. :-)
if( _frame != null && !mm.isForFrame((Frame)DKV.getGet(_frame._key)) ) return false;
} catch( NullPointerException | ClassCastException ex ) {
return false; // Handle all kinds of broken racey key updates
}
return true;
}
}).keys();
_model_metrics = new ModelMetrics[modelMetricsKeys.length];
for (int i = 0; i < modelMetricsKeys.length; i++)
_model_metrics[i] = DKV.getGet(modelMetricsKeys[i]);
return this; // Flow coding
}
// Delete the metrics that match model and/or frame
ModelMetricsList delete() {
ModelMetricsList matches = fetch();
for (ModelMetrics mm : matches._model_metrics)
DKV.remove(mm._key);
return matches;
}
/** Return all the models matching the model&frame filters */
public Schema list(int version, ModelMetricsList m) {
return this.schema(version).fillFromImpl(m.fetch());
}
protected ModelMetricsListSchemaV3 schema(int version) {
switch (version) {
case 3: return new ModelMetricsListSchemaV3();
default: throw H2O.fail("Bad version for ModelMetrics schema: " + version);
}
}
} // class ModelMetricsList
/** Schema for a list of ModelMetricsBaseV3.
* This should be common across all versions of ModelMetrics schemas, so it lives here.
* TODO: move to water.api.schemas3
* */
public static final class ModelMetricsListSchemaV3 extends RequestSchemaV3<ModelMetricsList, ModelMetricsListSchemaV3> {
// Input fields
@API(help = "Key of Model of interest (optional)")
public KeyV3.ModelKeyV3<Model> model;
@API(help = "Key of Frame of interest (optional)")
public KeyV3.FrameKeyV3 frame;
@API(help = "Key of predictions frame, if predictions are requested (optional)", direction = API.Direction.INOUT)
public KeyV3.FrameKeyV3 predictions_frame;
@API(help = "Key for the frame containing per-observation deviances (optional)", direction = API.Direction.INOUT)
public KeyV3.FrameKeyV3 deviances_frame;
@API(help = "Compute reconstruction error (optional, only for Deep Learning AutoEncoder models)", json = false)
public boolean reconstruction_error;
@API(help = "Compute reconstruction error per feature (optional, only for Deep Learning AutoEncoder models)", json = false)
public boolean reconstruction_error_per_feature;
@API(help = "Extract Deep Features for given hidden layer (optional, only for Deep Learning models)", json = false)
public int deep_features_hidden_layer;
@API(help = "Extract Deep Features for given hidden layer by name (optional, only for Deep Water models)", json = false)
public String deep_features_hidden_layer_name;
@API(help = "Reconstruct original training frame (optional, only for GLRM models)", json = false)
public boolean reconstruct_train;
@API(help = "Project GLRM archetypes back into original feature space (optional, only for GLRM models)", json = false)
public boolean project_archetypes;
@API(help = "Reverse transformation applied during training to model output (optional, only for GLRM models)", json = false)
public boolean reverse_transform;
@API(help = "Return the leaf node assignment (optional, only for DRF/GBM models)", json = false)
public boolean leaf_node_assignment;
@API(help = "Type of the leaf node assignment (optional, only for DRF/GBM models)", values = {"Path", "Node_ID"}, json = false)
public Model.LeafNodeAssignment.LeafNodeAssignmentType leaf_node_assignment_type;
@API(help = "Predict the class probabilities at each stage (optional, only for GBM models)", json = false)
public boolean predict_staged_proba;
@API(help = "Predict the feature contributions - Shapley values (optional, only for DRF, GBM and XGBoost models)", json = false)
public boolean predict_contributions;
@API(help = "Specify how to output feature contributions in XGBoost - XGBoost by default outputs contributions for 1-hot encoded features, " +
"specifying a Compact output format will produce a per-feature contribution", values = {"Original", "Compact"}, json = false)
public Model.Contributions.ContributionsOutputFormat predict_contributions_output_format;
@API(help = "Only for predict_contributions function - sort Shapley values and return top_n highest (optional)", json = false)
public int top_n;
@API(help = "Only for predict_contributions function - sort Shapley values and return top_bottom_n lowest (optional)", json = false)
public int top_bottom_n;
@API(help = "Only for predict_contributions function - sort absolute Shapley values (optional)", json = false)
public boolean abs;
@API(help = "Retrieve the feature frequencies on paths in trees in tree-based models (optional, only for GBM, DRF and Isolation Forest)", json = false)
public boolean feature_frequencies;
@API(help = "Retrieve all members for a given exemplar (optional, only for Aggregator models)", json = false)
public int exemplar_index;
@API(help = "Compute the deviances per row (optional, only for classification or regression models)", json = false)
public boolean deviances;
@API(help = "Reference to custom evaluation function, format: `language:keyName=funcName`", json=false)
public String custom_metric_func;
@API(help = "Set default multinomial AUC type. Must be one of: \"AUTO\", \"NONE\", \"MACRO_OVR\", \"WEIGHTED_OVR\", \"MACRO_OVO\", \"WEIGHTED_OVO\". Default is \"NONE\" (optional, only for multinomial classification).", json=false, direction = API.Direction.INPUT)
public String auc_type;
// Output fields
@API(help = "ModelMetrics", direction = API.Direction.OUTPUT)
public ModelMetricsBaseV3[] model_metrics;
@Override public ModelMetricsHandler.ModelMetricsList fillImpl(ModelMetricsList mml) {
// TODO: check for type!
mml._model = (this.model == null || this.model.key() == null ? null : this.model.key().get());
mml._frame = (this.frame == null || this.frame.key() == null ? null : this.frame.key().get());
mml._predictions_name = (null == this.predictions_frame || null == this.predictions_frame.key() ? null : this.predictions_frame.key().toString());
mml._reconstruction_error = this.reconstruction_error;
mml._reconstruction_error_per_feature = this.reconstruction_error_per_feature;
mml._deep_features_hidden_layer = this.deep_features_hidden_layer;
mml._deep_features_hidden_layer_name = this.deep_features_hidden_layer_name;
mml._reconstruct_train = this.reconstruct_train;
mml._project_archetypes = this.project_archetypes;
mml._reverse_transform = this.reverse_transform;
mml._leaf_node_assignment = this.leaf_node_assignment;
mml._exemplar_index = this.exemplar_index;
mml._deviances = this.deviances;
mml._auc_type = this.auc_type;
mml._top_n = this.top_n;
mml._top_bottom_n = this.top_bottom_n;
mml._abs = this.abs;
if (model_metrics != null) {
mml._model_metrics = new ModelMetrics[model_metrics.length];
for( int i=0; i<model_metrics.length; i++ )
mml._model_metrics[i++] = (ModelMetrics)model_metrics[i].createImpl();
}
return mml;
}
@Override public ModelMetricsListSchemaV3 fillFromImpl(ModelMetricsList mml) {
// PojoUtils.copyProperties(this, m, PojoUtils.FieldNaming.CONSISTENT);
// Shouldn't need to do this manually. . .
this.model = (mml._model == null ? null : new KeyV3.ModelKeyV3(mml._model._key));
this.frame = (mml._frame == null ? null : new KeyV3.FrameKeyV3(mml._frame._key));
this.predictions_frame = (mml._predictions_name == null ? null : new KeyV3.FrameKeyV3(Key.<Frame>make(mml._predictions_name)));
this.deviances_frame = (mml._deviances_name == null ? null : new KeyV3.FrameKeyV3(Key.<Frame>make(mml._deviances_name)));
this.reconstruction_error = mml._reconstruction_error;
this.reconstruction_error_per_feature = mml._reconstruction_error_per_feature;
this.deep_features_hidden_layer = mml._deep_features_hidden_layer;
this.deep_features_hidden_layer_name = mml._deep_features_hidden_layer_name;
this.reconstruct_train = mml._reconstruct_train;
this.project_archetypes = mml._project_archetypes;
this.reverse_transform = mml._reverse_transform;
this.leaf_node_assignment = mml._leaf_node_assignment;
this.exemplar_index = mml._exemplar_index;
this.deviances = mml._deviances;
this.auc_type = mml._auc_type;
this.top_n = mml._top_n;
this.top_bottom_n = mml._top_bottom_n;
this.abs = mml._abs;
if (null != mml._model_metrics) {
this.model_metrics = new ModelMetricsBaseV3[mml._model_metrics.length];
for( int i=0; i<model_metrics.length; i++ ) {
ModelMetrics mm = mml._model_metrics[i];
this.model_metrics[i] = (ModelMetricsBaseV3) SchemaServer.schema(3, mm.getClass()).fillFromImpl(mm);
}
} else {
this.model_metrics = new ModelMetricsBaseV3[0];
}
return this;
}
} // ModelMetricsListSchemaV3
// TODO: almost identical to ModelsHandler; refactor
public static ModelMetrics getFromDKV(Key key) {
if (null == key)
throw new IllegalArgumentException("Got null key.");
Value v = DKV.get(key);
if (null == v)
throw new IllegalArgumentException("Did not find key: " + key.toString());
Iced ice = v.get();
if (! (ice instanceof ModelMetrics))
throw new IllegalArgumentException("Expected a Model for key: " + key.toString() + "; got a: " + ice.getClass());
return (ModelMetrics)ice;
}
/** Return a single ModelMetrics. */
@SuppressWarnings("unused") // called through reflection by RequestServer
public ModelMetricsListSchemaV3 fetch(int version, ModelMetricsListSchemaV3 s) {
ModelMetricsList m = s.createAndFillImpl();
s.fillFromImpl(m.fetch());
return s;
}
/** Delete one or more ModelMetrics. */
@SuppressWarnings("unused") // called through reflection by RequestServer
public ModelMetricsListSchemaV3 delete(int version, ModelMetricsListSchemaV3 s) {
ModelMetricsList m = s.createAndFillImpl();
s.fillFromImpl(m.delete());
return s;
}
@SuppressWarnings("unused") // called through reflection by RequestServer
public ModelMetricsListSchemaV3 score(int version, ModelMetricsListSchemaV3 s) {
// parameters checking:
if (null == s.model) throw new H2OIllegalArgumentException("model", "predict", s.model);
if (null == DKV.get(s.model.name)) throw new H2OKeyNotFoundArgumentException("model", "predict", s.model.name);
if (null == s.frame) throw new H2OIllegalArgumentException("frame", "predict", s.frame);
if (null == DKV.get(s.frame.name)) throw new H2OKeyNotFoundArgumentException("frame", "predict", s.frame.name);
ModelMetricsList parms = s.createAndFillImpl();
String customMetricFunc = s.custom_metric_func;
if (customMetricFunc == null) {
customMetricFunc = parms._model._parms._custom_metric_func;
}
// set user given auc type, used for scoring a testing data fe. from h2o.performance function
MultinomialAucType at = parms._model._parms._auc_type;
if(s.auc_type != null) {
parms._model._parms._auc_type = MultinomialAucType.valueOf(s.auc_type.toUpperCase());
}
parms._model.score(parms._frame, parms._predictions_name, null, true, CFuncRef.from(customMetricFunc)).remove(); // throw away predictions, keep metrics as a side-effect
ModelMetricsListSchemaV3 mm = this.fetch(version, s);
// TODO: for now only binary predictors write an MM object.
// For the others cons one up here to return the predictions frame.
if (null == mm)
mm = new ModelMetricsListSchemaV3();
if (null == mm.model_metrics || 0 == mm.model_metrics.length) {
Log.warn("Score() did not return a ModelMetrics for model: " + s.model + " on frame: " + s.frame);
}
// set original auc type back
parms._model._parms._auc_type = at;
return mm;
}
public static final class ModelMetricsMaker extends Iced {
public String _predictions_frame;
public String _actuals_frame;
public String[] _domain;
public DistributionFamily _distribution;
public MultinomialAucType _auc_type;
public ModelMetrics _model_metrics;
}
public static final class ModelMetricsMakerSchemaV3 extends SchemaV3<ModelMetricsMaker, ModelMetricsMakerSchemaV3> {
@API(help="Predictions Frame.", direction=API.Direction.INOUT)
public String predictions_frame;
@API(help="Actuals Frame.", direction=API.Direction.INOUT)
public String actuals_frame;
@API(help="Weights Frame.", direction=API.Direction.INOUT)
public String weights_frame;
@API(help="Domain (for classification).", direction=API.Direction.INOUT)
public String[] domain;
@API(help="Distribution (for regression).", direction=API.Direction.INOUT, values = { "gaussian", "poisson", "gamma", "laplace" })
public DistributionFamily distribution;
@API(help = "Default AUC type (for multinomial classification).",
valuesProvider = ModelParamsValuesProviders.MultinomialAucTypeSchemeValuesProvider.class,
level = API.Level.secondary, direction = API.Direction.INOUT, gridable = true)
public MultinomialAucType auc_type;
@API(help="Model Metrics.", direction=API.Direction.OUTPUT)
public ModelMetricsBaseV3 model_metrics;
}
/**
* Make a model metrics object from actual and predicted values
*/
@SuppressWarnings("unused") // called through reflection by RequestServer
public ModelMetricsMakerSchemaV3 make(int version, ModelMetricsMakerSchemaV3 s) {
// parameters checking:
if (null == s.predictions_frame) throw new H2OIllegalArgumentException("predictions_frame", "make", s.predictions_frame);
Frame pred = DKV.getGet(s.predictions_frame);
if (null == pred) throw new H2OKeyNotFoundArgumentException("predictions_frame", "make", s.predictions_frame);
if (null == s.actuals_frame) throw new H2OIllegalArgumentException("actuals_frame", "make", s.actuals_frame);
Frame act = DKV.getGet(s.actuals_frame);
if (null == act) throw new H2OKeyNotFoundArgumentException("actuals_frame", "make", s.actuals_frame);
Vec weights = null;
if (null != s.weights_frame) {
Frame weightsFrame = DKV.getGet(s.weights_frame);
if (null == weightsFrame) throw new H2OKeyNotFoundArgumentException("weights_frame", "make", s.actuals_frame);
weights = weightsFrame.anyVec();
}
if (s.domain ==null) {
if (pred.numCols()!=1) {
throw new H2OIllegalArgumentException("predictions_frame", "make", "For regression problems (domain=null), the predictions_frame must have exactly 1 column.");
}
ModelMetricsRegression mm = ModelMetricsRegression.make(pred.anyVec(), act.anyVec(), weights, s.distribution);
s.model_metrics = new ModelMetricsRegressionV3().fillFromImpl(mm);
} else if (s.domain.length==2) {
if (pred.numCols()!=1) {
throw new H2OIllegalArgumentException("predictions_frame", "make", "For domains with 2 class labels, the predictions_frame must have exactly one column containing the class-1 probabilities.");
}
ModelMetricsBinomial mm = ModelMetricsBinomial.make(pred.anyVec(), act.anyVec(), weights, s.domain);
s.model_metrics = new ModelMetricsBinomialV3().fillFromImpl(mm);
} else if (s.domain.length>2){
if (pred.numCols()!=s.domain.length) {
throw new H2OIllegalArgumentException("predictions_frame", "make", "For domains with " + s.domain.length + " class labels, the predictions_frame must have exactly " + s.domain.length + " columns containing the class-probabilities.");
}
if (s.distribution == DistributionFamily.ordinal) {
ModelMetricsOrdinal mm = ModelMetricsOrdinal.make(pred, act.anyVec(), s.domain);
s.model_metrics = new ModelMetricsOrdinalV3().fillFromImpl(mm);
} else {
ModelMetricsMultinomial mm = ModelMetricsMultinomial.make(pred, act.anyVec(), weights, s.domain, s.auc_type);
s.model_metrics = new ModelMetricsMultinomialV3().fillFromImpl(mm);
}
} else {
throw H2O.unimpl();
}
return s;
}
/**
* Score a frame with the given model and return a Job that output a frame with predictions.
* Do *not* calculate ModelMetrics.
*/
@SuppressWarnings("unused") // called through reflection by RequestServer
public JobV3 predictAsync(int version, final ModelMetricsListSchemaV3 s) {
// parameters checking:
if (null == s.model) throw new H2OIllegalArgumentException("model", "predict", s.model);
if (null == DKV.get(s.model.name)) throw new H2OKeyNotFoundArgumentException("model", "predict", s.model.name);
if (null == s.frame) throw new H2OIllegalArgumentException("frame", "predict", s.frame);
if (null == DKV.get(s.frame.name)) throw new H2OKeyNotFoundArgumentException("frame", "predict", s.frame.name);
if (s.deviances || null != s.deviances_frame)
throw new H2OIllegalArgumentException("deviances", "not supported for async", s.deviances_frame);
final ModelMetricsList parms = s.createAndFillImpl();
long workAmount = parms._frame.anyVec().nChunks();
if (s.predict_contributions) {
workAmount = parms._frame.anyVec().length();
if (null == parms._predictions_name)
parms._predictions_name = "contributions_" + Key.make().toString().substring(0, 5) + "_" + parms._model._key.toString() + "_on_" + parms._frame._key.toString();
} else if (s.deep_features_hidden_layer > 0 || s.deep_features_hidden_layer_name != null) {
if (null == parms._predictions_name)
parms._predictions_name = "deep_features" + Key.make().toString().substring(0, 5) + "_" +
parms._model._key.toString() + "_on_" + parms._frame._key.toString();
} else if (null == parms._predictions_name) {
parms._predictions_name = "transformation" + Key.make().toString().substring(0, 5) + "_" + parms._model._key.toString() + "_on_" + parms._frame._key.toString();
}
final Job<Frame> j = new Job<>(Key.make(parms._predictions_name), Frame.class.getName(), "transformation");
H2O.H2OCountedCompleter work = new H2O.H2OCountedCompleter() {
@Override
public void compute2() {
if (s.predict_contributions) {
if (! (parms._model instanceof Model.Contributions)) {
throw new H2OIllegalArgumentException("Model type " + parms._model._parms.algoName() + " doesn't support calculating Feature Contributions.");
}
Model.Contributions mc = (Model.Contributions) parms._model;
if (parms._top_n == 0 && parms._top_bottom_n == 0 && !parms._abs) {
Model.Contributions.ContributionsOutputFormat outputFormat = null == s.predict_contributions_output_format ?
Model.Contributions.ContributionsOutputFormat.Original : s.predict_contributions_output_format;
Model.Contributions.ContributionsOptions options = new Model.Contributions.ContributionsOptions().setOutputFormat(outputFormat);
mc.scoreContributions(parms._frame, Key.make(parms._predictions_name), j, options);
} else {
mc.scoreContributions(parms._frame, Key.make(parms._predictions_name), parms._top_n, parms._top_bottom_n, parms._abs, j);
}
} else if (s.deep_features_hidden_layer < 0 && s.deep_features_hidden_layer_name == null) {
parms._model.score(parms._frame, parms._predictions_name, j, false, CFuncRef.from(s.custom_metric_func));
} else if (s.deep_features_hidden_layer_name != null){
Frame predictions;
try {
predictions = ((Model.DeepFeatures) parms._model).scoreDeepFeatures(parms._frame, s.deep_features_hidden_layer_name, j);
} catch(IllegalArgumentException e) {
Log.warn(e.getMessage());
throw e;
}
if (predictions!=null) {
predictions = new Frame(Key.make(parms._predictions_name), predictions.names(), predictions.vecs());
DKV.put(predictions._key, predictions);
}
} else {
Frame predictions = ((Model.DeepFeatures) parms._model).scoreDeepFeatures(parms._frame, s.deep_features_hidden_layer, j);
predictions = new Frame(Key.make(parms._predictions_name), predictions.names(), predictions.vecs());
DKV.put(predictions._key, predictions);
}
if ((parms._model._warningsP != null) && (parms._model._warningsP.length > 0)) { // add prediction warning here only
String[] allWarnings = (String[]) ArrayUtils.addAll(j.warns(), parms._model._warningsP); // copy both over
j.setWarnings(allWarnings);
}
tryComplete();
}
};
j.start(work, workAmount);
return new JobV3().fillFromImpl(j);
}
/**
* Score a frame with the given model and return the metrics AND the prediction frame.
*/
@SuppressWarnings("unused") // called through reflection by RequestServer
public ModelMetricsListSchemaV3 predict(int version, ModelMetricsListSchemaV3 s) {
// parameters checking:
if (s.model == null) throw new H2OIllegalArgumentException("model", "predict", null);
if (DKV.get(s.model.name) == null) throw new H2OKeyNotFoundArgumentException("model", "predict", s.model.name);
// Aggregator doesn't need a Frame to 'predict'
if (s.exemplar_index < 0) {
if (s.frame == null) throw new H2OIllegalArgumentException("frame", "predict", null);
if (DKV.get(s.frame.name) == null) throw new H2OKeyNotFoundArgumentException("frame", "predict", s.frame.name);
}
ModelMetricsList parms = s.createAndFillImpl();
Frame predictions;
Frame deviances = null;
if (!s.reconstruction_error && !s.reconstruction_error_per_feature && s.deep_features_hidden_layer < 0 &&
!s.project_archetypes && !s.reconstruct_train && !s.leaf_node_assignment && !s.predict_staged_proba && !s.predict_contributions && !s.feature_frequencies && s.exemplar_index < 0) {
if (null == parms._predictions_name)
parms._predictions_name = "predictions" + Key.make().toString().substring(0,5) + "_" + parms._model._key.toString() + "_on_" + parms._frame._key.toString();
String customMetricFunc = s.custom_metric_func;
if (customMetricFunc == null) {
customMetricFunc = parms._model._parms._custom_metric_func;
}
predictions = parms._model.score(parms._frame, parms._predictions_name, null, true, CFuncRef.from(customMetricFunc));
if (s.deviances) {
if (!parms._model.isSupervised())
throw new H2OIllegalArgumentException("Deviances can only be computed for supervised models.");
if (null == parms._deviances_name)
parms._deviances_name = "deviances" + Key.make().toString().substring(0, 5) + "_" + parms._model._key.toString() + "_on_" + parms._frame._key.toString();
deviances = parms._model.computeDeviances(parms._frame, predictions, parms._deviances_name);
}
} else {
if (s.deviances)
throw new H2OIllegalArgumentException("Cannot compute deviances in combination with other special predictions.");
if (Model.DeepFeatures.class.isAssignableFrom(parms._model.getClass())) {
if (s.reconstruction_error || s.reconstruction_error_per_feature) {
if (s.deep_features_hidden_layer >= 0)
throw new H2OIllegalArgumentException("Can only compute either reconstruction error OR deep features.", "");
if (null == parms._predictions_name)
parms._predictions_name = "reconstruction_error" + Key.make().toString().substring(0,5) + "_" + parms._model._key.toString() + "_on_" + parms._frame._key.toString();
predictions = ((Model.DeepFeatures) parms._model).scoreAutoEncoder(parms._frame, Key.make(parms._predictions_name), parms._reconstruction_error_per_feature);
} else {
if (s.deep_features_hidden_layer < 0)
throw new H2OIllegalArgumentException("Deep features hidden layer index must be >= 0.", "");
if (null == parms._predictions_name)
parms._predictions_name = "deep_features" + Key.make().toString().substring(0,5) + "_" + parms._model._key.toString() + "_on_" + parms._frame._key.toString();
predictions = ((Model.DeepFeatures) parms._model).scoreDeepFeatures(parms._frame, s.deep_features_hidden_layer);
}
predictions = new Frame(Key.<Frame>make(parms._predictions_name), predictions.names(), predictions.vecs());
DKV.put(predictions._key, predictions);
} else if(Model.GLRMArchetypes.class.isAssignableFrom(parms._model.getClass())) {
if(s.project_archetypes) {
if (parms._predictions_name == null)
parms._predictions_name = "reconstructed_archetypes_" + Key.make().toString().substring(0, 5) + "_" + parms._model._key.toString() + "_of_" + parms._frame._key.toString();
predictions = ((Model.GLRMArchetypes) parms._model).scoreArchetypes(parms._frame, Key.<Frame>make(parms._predictions_name), s.reverse_transform);
} else {
assert s.reconstruct_train;
if (parms._predictions_name == null)
parms._predictions_name = "reconstruction_" + Key.make().toString().substring(0, 5) + "_" + parms._model._key.toString() + "_of_" + parms._frame._key.toString();
predictions = ((Model.GLRMArchetypes) parms._model).scoreReconstruction(parms._frame, Key.<Frame>make(parms._predictions_name), s.reverse_transform);
}
} else if(s.leaf_node_assignment) {
assert(Model.LeafNodeAssignment.class.isAssignableFrom(parms._model.getClass()));
if (null == parms._predictions_name)
parms._predictions_name = "leaf_node_assignment" + Key.make().toString().substring(0, 5) + "_" + parms._model._key.toString() + "_on_" + parms._frame._key.toString();
Model.LeafNodeAssignment.LeafNodeAssignmentType type = null == s.leaf_node_assignment_type ? Model.LeafNodeAssignment.LeafNodeAssignmentType.Path : s.leaf_node_assignment_type;
predictions = ((Model.LeafNodeAssignment) parms._model).scoreLeafNodeAssignment(parms._frame, type, Key.<Frame>make(parms._predictions_name));
} else if(s.feature_frequencies) {
assert(Model.FeatureFrequencies.class.isAssignableFrom(parms._model.getClass()));
if (null == parms._predictions_name)
parms._predictions_name = "feature_frequencies" + Key.make().toString().substring(0, 5) + "_" + parms._model._key.toString() + "_on_" + parms._frame._key.toString();
predictions = ((Model.FeatureFrequencies) parms._model).scoreFeatureFrequencies(parms._frame, Key.<Frame>make(parms._predictions_name));
} else if(s.predict_staged_proba) {
if (! (parms._model instanceof Model.StagedPredictions)) {
throw new H2OIllegalArgumentException("Model type " + parms._model._parms.algoName() + " doesn't support Staged Predictions.");
}
if (null == parms._predictions_name)
parms._predictions_name = "staged_proba_" + Key.make().toString().substring(0, 5) + "_" + parms._model._key.toString() + "_on_" + parms._frame._key.toString();
predictions = ((Model.StagedPredictions) parms._model).scoreStagedPredictions(parms._frame, Key.<Frame>make(parms._predictions_name));
} else if(s.predict_contributions) {
if (! (parms._model instanceof Model.Contributions)) {
throw new H2OIllegalArgumentException("Model type " + parms._model._parms.algoName() + " doesn't support calculating Feature Contributions.");
}
Model.Contributions mc = (Model.Contributions) parms._model;
if (null == parms._predictions_name)
parms._predictions_name = "contributions_" + Key.make().toString().substring(0, 5) + "_" + parms._model._key.toString() + "_on_" + parms._frame._key.toString();
Model.Contributions.ContributionsOutputFormat outputFormat = null == s.predict_contributions_output_format ?
Model.Contributions.ContributionsOutputFormat.Original : s.predict_contributions_output_format;
Model.Contributions.ContributionsOptions options = new Model.Contributions.ContributionsOptions().setOutputFormat(outputFormat);
predictions = mc.scoreContributions(parms._frame, Key.make(parms._predictions_name), null, options);
} else if(s.exemplar_index >= 0) {
assert(Model.ExemplarMembers.class.isAssignableFrom(parms._model.getClass()));
if (null == parms._predictions_name)
parms._predictions_name = "members_" + parms._model._key.toString() + "_for_exemplar_" + parms._exemplar_index;
predictions = ((Model.ExemplarMembers) parms._model).scoreExemplarMembers(Key.<Frame>make(parms._predictions_name), parms._exemplar_index);
}
else throw new H2OIllegalArgumentException("Requires a Deep Learning, GLRM, DRF or GBM model.", "Model must implement specific methods.");
}
ModelMetricsListSchemaV3 mm = this.fetch(version, s);
// TODO: for now only binary predictors write an MM object.
// For the others cons one up here to return the predictions frame.
if (null == mm)
mm = new ModelMetricsListSchemaV3();
mm.predictions_frame = new KeyV3.FrameKeyV3(predictions._key);
if (parms._leaf_node_assignment) //don't show metrics in leaf node assignments are made
mm.model_metrics = null;
if (deviances !=null)
mm.deviances_frame = new KeyV3.FrameKeyV3(deviances._key);
if (null == mm.model_metrics || 0 == mm.model_metrics.length) {
// There was no response in the test set -> cannot make a model_metrics object
} else {
mm.model_metrics[0].predictions = new FrameV3(predictions, 0, 100); // TODO: Should call schema(version)
}
return mm;
}
}
|
package jolie.process;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import jolie.ExecutionThread;
import jolie.Interpreter;
import jolie.lang.Constants;
import jolie.net.CommChannel;
import jolie.net.CommMessage;
import jolie.net.OutputPort;
import jolie.runtime.ExitingException;
import jolie.runtime.Expression;
import jolie.runtime.FaultException;
import jolie.runtime.Value;
import jolie.runtime.VariablePath;
import jolie.runtime.typing.RequestResponseTypeDescription;
import jolie.runtime.typing.Type;
import jolie.runtime.typing.TypeCheckingException;
import jolie.util.LocationParser;
public class SolicitResponseProcess implements Process
{
final private String operationId;
final private OutputPort outputPort;
final private VariablePath inputVarPath; // may be null
final private Expression outputExpression; // may be null
final private Process installProcess; // may be null
final private RequestResponseTypeDescription types;
public SolicitResponseProcess(
String operationId,
OutputPort outputPort,
Expression outputExpression,
VariablePath inputVarPath,
Process installProcess,
RequestResponseTypeDescription types
) {
this.operationId = operationId;
this.outputPort = outputPort;
this.outputExpression = outputExpression;
this.inputVarPath = inputVarPath;
this.installProcess = installProcess;
this.types = types;
}
public Process clone( TransformationReason reason )
{
return new SolicitResponseProcess(
operationId,
outputPort,
( outputExpression == null ) ? null : outputExpression.cloneExpression( reason ),
( inputVarPath == null ) ? null : (VariablePath)inputVarPath.cloneExpression( reason ),
( installProcess == null ) ? null : installProcess.clone( reason ),
types
);
}
private void log( String message )
{
if ( Interpreter.getInstance().verbose() ) {
Interpreter.getInstance().logInfo( "[SolicitResponse operation " + operationId + "@" + outputPort.id() + "]: " + message );
}
}
public void run()
throws FaultException
{
if ( ExecutionThread.currentThread().isKilled() ) {
return;
}
CommChannel channel = null;
try {
URI uri = outputPort.getLocation();
CommMessage message =
CommMessage.createRequest(
operationId,
LocationParser.getResourcePath( uri ),
( outputExpression == null ) ? Value.create() : outputExpression.evaluate()
);
if ( types.requestType() != null ) {
types.requestType().check( message.value() );
}
channel = outputPort.getCommChannel();
log( "sending request " + message.id() );
channel.send( message );
log( "request " + message.id() + " sent" );
do {
message = channel.recvResponseFor( message );
} while( message == null );
log( "received response for request " + message.id() );
if ( inputVarPath != null ) {
inputVarPath.getValue().refCopy( message.value() );
}
if ( message.isFault() ) {
Type faultType = types.getFaultType( message.fault().faultName() );
if ( faultType != null ) {
try {
faultType.check( message.fault().value() );
} catch( TypeCheckingException e ) {
throw new FaultException( Constants.TYPE_MISMATCH_FAULT_NAME, "Received fault TypeMismatch (" + operationId + "@" + outputPort.id() + "): " + e.getMessage() );
}
}
throw message.fault();
} else {
if ( types.responseType() != null ) {
try {
types.responseType().check( message.value() );
} catch( TypeCheckingException e ) {
throw new FaultException( Constants.TYPE_MISMATCH_FAULT_NAME, "Received message TypeMismatch (" + operationId + "@" + outputPort.id() + "): " + e.getMessage() );
}
}
}
try {
installProcess.run();
} catch( ExitingException e ) { assert false; }
} catch( IOException e ) {
throw new FaultException( Constants.IO_EXCEPTION_FAULT_NAME, e );
} catch( URISyntaxException e ) {
Interpreter.getInstance().logSevere( e );
} catch( TypeCheckingException e ) {
throw new FaultException( Constants.TYPE_MISMATCH_FAULT_NAME, "Output message TypeMismatch (" + operationId + "@" + outputPort.id() + "): " + e.getMessage() );
} finally {
if ( channel != null ) {
try {
channel.release();
} catch( IOException e ) {
Interpreter.getInstance().logWarning( e );
}
}
}
}
public boolean isKillable()
{
return true;
}
}
|
import com.google.gson.Gson;
import java.sql.*;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.Map;
import org.json.JSONObject;
import java.net.URI;
import java.net.URISyntaxException;
import static spark.Spark.*;
import spark.template.freemarker.FreeMarkerEngine;
import spark.ModelAndView;
import static spark.Spark.get;
import static javax.measure.unit.SI.KILOGRAM;
import javax.measure.quantity.Mass;
import org.jscience.physics.model.RelativisticModel;
import org.jscience.physics.amount.Amount;
import static javax.measure.unit.SI.KILOGRAM;
import javax.measure.quantity.Mass;
import org.jscience.physics.model.RelativisticModel;
import org.jscience.physics.amount.Amount;
import com.heroku.sdk.jdbc.DatabaseUrl;
public class Main {
public static void main(String[] args) {
Gson gson = new Gson();
port(Integer.valueOf(System.getenv("PORT")));
staticFileLocation("/public");
get("/hello", (req, res) -> "Hello World");
/*
get("/user", (request, response) -> {
Map<String, Object> attributes = new HashMap<>();
attributes.put("message", "Hello World!");
return new ModelAndView(attributes, "user.ftl");
}, new FreeMarkerEngine());
*/
get("/db", (req, res) -> {
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try {
connection = DatabaseUrl.extract().getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS ticks (tick timestamp)");
stmt.executeUpdate("INSERT INTO ticks VALUES (now())");
ResultSet rs = stmt.executeQuery("SELECT tick FROM ticks");
ArrayList<String> output = new ArrayList<String>();
while (rs.next()) {
output.add( "Read from DB: " + rs.getTimestamp("tick"));
}
attributes.put("results", output);
return new ModelAndView(attributes, "db2.ftl");
} catch (Exception e) {
attributes.put("message", "There was an error: " + e);
return new ModelAndView(attributes, "error.ftl");
} finally {
if (connection != null) try{connection.close();} catch(SQLException e){}
}
}, new FreeMarkerEngine());
get("/user_info", (req, res) ->
{
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try{
connection = DatabaseUrl.extract().getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS user_info (user_email varchar(100), user_password varchar(30), user_name varchar(30) )");
// stmt.executeUpdate("INSERT INTO users_info VALUES ('user_email','user_password','user_name')");
ResultSet rs = stmt.executeQuery("SELECT user_email, user_password FROM user_info");
ArrayList<String> output = new ArrayList<String>();
while(rs.next())
{
output.add("read user " + "email: " + rs.getString("user_email") + " password: " + rs.getString("user_password") );
}
attributes.put("results",output);
return new ModelAndView(attributes, "user_info.ftl");
} catch (Exception e) {
attributes.put("message", "There was an error: " + e);
return new ModelAndView(attributes, "error.ftl");
} finally {
if (connection != null) try{connection.close();} catch(SQLException e){}
}}, new FreeMarkerEngine());
get("/user_image", (req, res) ->
{
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try{
connection = DatabaseUrl.extract().getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS user_info_image (user_name varchar(100), user_image varchar(10000000) )");
//stmt.executeUpdate("INSERT INTO user_info_image VALUES ('Smith','it should be dataurl data')");
ResultSet rs = stmt.executeQuery("SELECT user_name, user_image FROM user_info_image");
ArrayList<String> output = new ArrayList<String>();
while(rs.next())
{
output.add("read user " + "name: " + rs.getString("user_name") + " photo: " + rs.getString("user_image") );
}
attributes.put("results",output);
return new ModelAndView(attributes, "image.ftl");
} catch (Exception e) {
attributes.put("message", "There was an error: " + e);
return new ModelAndView(attributes, "error.ftl");
} finally {
if (connection != null) try{connection.close();} catch(SQLException e){}
}}, new FreeMarkerEngine());
// add user image Database
post("/user_info_image", (req, res) ->
{
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try{
connection = DatabaseUrl.extract().getConnection();
System.out.println(req.body());
JSONObject obj=new JSONObject(req.body());
String username_image=obj.getString("edit_username");
String image=obj.getString("edit_userimage");
Statement stmt = connection.createStatement();
stmt.executeUpdate("INSERT INTO user_info_image (user_name, user_image )"+ "VALUES('" + username_image + "','" + image + "')");
// stmt.executeUpdate("INSERT INTO users_info_image VALUES ('user_email','user_password','user_name')");
return req.body();
}catch(Exception e){
System.err.println("Exception: "+e);
return e.getMessage();
} finally {
if (connection != null) try{connection.close();} catch(SQLException e){}
}});
post("/adduser",(req,res)->
{
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try{
connection = DatabaseUrl.extract().getConnection();
System.out.println(req.body());
JSONObject obj = new JSONObject(req.body());
String email = obj.getString("signup-email");
String password = obj.getString("signup-password");
Statement stmt = connection.createStatement();
stmt.executeUpdate("INSERT INTO user_info(user_email, user_password, user_name)" +
"VALUES('" + email + "', '" + password + "', 'null')");
return req.body();
} catch (Exception e) {
System.err.println("Exception: "+ e);
return e.getMessage();
} finally {
if (connection != null) try{connection.close();} catch(SQLException e){}
}});
get("/get_user_image", (req, res) ->
{
ArrayList<Object> data=new ArrayList<>();
Connection connection = null;
try{
connection = DatabaseUrl.extract().getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery("SELECT user_name FROM user_info_image WHERE user_name='John'");
while(rs.next())
{
Map<String,Object> photo=new HashMap<>();
photo.put("user_name", rs.getString("user_name"));
data.add(photo);
}
} catch (Exception e) {
data.add("There was an error: " + e);
} finally {
if (connection != null)
try{connection.close();
} catch(SQLException e){}
}
return data;
},gson::toJson);
/*
get("/user", (req, res) -> {
ArrayList<String> users = new ArrayList<String>();
users.add("John Doe");
users.add("Tony Doe");
users.add("test one");
Map<String, Object> attributes = new HashMap<>();
attributes.put("users", users);
return new ModelAndView(attributes, "user.ftl");
}, new FreeMarkerEngine());
*/
get("/api/timeline_info", (req, res) -> {
Map<String, Object> data = new HashMap<>();
data.put("header_username","Smith");
data.put("title1", "sport");
data.put("content1", "today night, gym");
data.put("image1", "background: #FFC1C1;");
data.put("title2","sport");
data.put("content2", "monday moring, swimming with John");
data.put("image2", "background: #BFEFFF;");
data.put("title3","sport");
data.put("content3", "friday, a basketball competition");
data.put("image3", "background: #FFC1C1;");
return data;
}, gson::toJson);
/*
get("/getuser", (req, res) -> {
Connection connection = null;
List<Object> data = new ArrayList<>();
connection = DatabaseUrl.extract().getConnection();
// JSONObject obj = new JSONObject(req.body());
// String email = obj.getString("loginin-email");
// String password = obj.getString("loginin-password");
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeUpdate("SELECT user_name FROM user_info" +
"WHERE user_email='john2@gmail.com' ");
Map<String, Object> output = new HashMap<>();
while (rs.next()) {
output.put("user_email" , rs.getString("user_name"));
}
data.add(output);
return data;
}, gson::toJson);
*/
get("/recommendation", (req, res) -> {
ArrayList<String> users = new ArrayList<String>();
users.add("John Doe");
users.add("Smith");
users.add("Daniel");
users.add("Mark");
users.add("Ellen");
users.add("Lily");
users.add("Julio");
users.add("Chela");
users.add("Bells");
ArrayList<String> images = new ArrayList<String>();
images.add("picture/image1.jpg");
images.add("picture/image2.jpg");
images.add("picture/image3.jpg");
images.add("picture/image4.jpg");
images.add("picture/image5.jpg");
images.add("picture/image6.jpg");
images.add("picture/image7.jpg");
images.add("picture/image8.jpg");
images.add("picture/image9.jpg");
Map<String, Object> attributes = new HashMap<>();
attributes.put("users", users);
attributes.put("images", images);
return new ModelAndView(attributes, "recommendation.ftl");
}, new FreeMarkerEngine());
get("/api/contact.xml", (req, res ) ->
{
Map<String, Object> data = new HashMap<>();
data.put("username","Smith");
String xml= "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
"<user_profile>" +
"<user_name> Smith </user_name>"+
"<num_timeline> 10 </num_timeline>" +
"</user_profile>" ;
res.type("text/xml");
return xml;
});
}
}
|
package org.objectweb.proactive.core.xml;
import java.io.FileInputStream;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.log4j.Logger;
import org.objectweb.proactive.core.descriptor.xml.VariablesHandler;
import org.objectweb.proactive.core.util.log.Loggers;
import org.objectweb.proactive.core.util.log.ProActiveLogger;
/**
* This class provides a Variable Contract between the deployment descriptor and the application program.
* Variables can be defined of different types, thus inforcing different requirements to the contract.
* @author The ProActive Team
*/
public class VariableContract implements Serializable {
static Logger logger = ProActiveLogger.getLogger(Loggers.DEPLOYMENT);
public static VariableContract xmlproperties = null;
public static final Lock lock = new Lock();
private boolean closed;
private static final Pattern variablePattern = Pattern.compile("(\\$\\{(.*?)\\})");
private static final Pattern legalPattern = Pattern.compile("^\\$\\{[\\w\\.]+\\}$");
private class PropertiesDatas {
public String value;
public VariableContractType type;
public String setFrom; //Descriptor, Program
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append(value).append(" type=").append(type).append(" setFrom=")
.append(setFrom);
return sb.toString();
}
}
private HashMap list;
/**
* Constructor of the class. Creates a new instance.
*
*/
public VariableContract() {
list = new HashMap();
closed = false;
}
/**
* Marks the contract as closed. No more variables can be defined or set.
*/
public void close() {
//before closing we set the JavaProperties values
java.util.Iterator it = list.keySet().iterator();
while (it.hasNext()) {
String name = (String) it.next();
PropertiesDatas data = (PropertiesDatas) list.get(name);
if (data.type.equals(VariableContractType.JavaPropertyVariable)
|| data.type
.equals(VariableContractType.JavaPropertyDescriptorDefault)
|| data.type
.equals(VariableContractType.JavaPropertyProgramDefault)) {
try {
String value = System.getProperty(name);
if (value == null) value="";
setVariableFrom(name, value, data.type, "JavaProperty");
} catch (Exception ex) {
if (logger.isDebugEnabled())
logger.debug("Unable to get java property: " + name);
}
}
}// while
closed = true;
}
/**
* Tells wether this contract is closed or not.
*
* @return True if it is closed, false otherwise.
*/
public boolean isClosed() {
return closed;
}
public void setVariableFromProgram(String name, String value,
VariableContractType type) {
setVariableFrom(name, value, type, "Program");
}
private void setVariableFrom(String name, String value,
VariableContractType type, String from) {
if (logger.isDebugEnabled()) {
logger.debug("Setting from " + from + ": " + type + " " + name +
"=" + value);
}
if (closed) {
throw new IllegalArgumentException(
"Variable Contract is Closed. Variables can no longer be set");
}
checkGenericLogic(name, value, type);
if ((value.length() > 0) && !type.hasSetAbility(from)) {
throw new IllegalArgumentException("Variable " + name +
" can not be set from " + from + " for type: " + type);
}
if ((value.length() <= 0) && !type.hasSetEmptyAbility(from)) {
throw new IllegalArgumentException("Variable " + name +
" can not be set empty from " + from + " for type: " + type);
}
if (list.containsKey(name)) {
PropertiesDatas var = (PropertiesDatas) list.get(name);
if (!type.hasPriority(var.setFrom, from)) {
if (logger.isDebugEnabled()) {
logger.debug("Skipping, lower priority (" + from + " < " +
var.setFrom + ") for type: " + type);
}
return;
}
}
unsafeAdd(name, value, type, from);
}
public void setVariableFromProgram(HashMap map, VariableContractType type)
throws NullPointerException {
if ((map == null) || (type == null)) {
throw new NullPointerException("Null arguments");
}
String name;
java.util.Iterator it = map.keySet().iterator();
while (it.hasNext()) {
name = (String) it.next();
setVariableFromProgram(name, (String) map.get(name), type);
}
}
public void setDescriptorVariable(String name, String value,
VariableContractType type) {
setVariableFrom(name, value, type, "Descriptor");
}
/**
* Loads the variable contract from a Java Properties file format
* @param file The file location.
* @throws org.xml.sax.SAXException
*/
public void load(String file) throws org.xml.sax.SAXException {
Properties properties = new Properties();
if (logger.isDebugEnabled()) {
logger.debug("Loading propeties file:" + file);
}
// Open the file
try {
FileInputStream stream = new FileInputStream(file);
properties.load(stream);
} catch (Exception ex) {
if (logger.isDebugEnabled()) {
logger.debug("Curret Working Directory: " +
System.getProperty("user.dir"));
}
throw new org.xml.sax.SAXException(
"Tag property cannot open file : [" + file + "]");
}
String name;
String value;
Iterator it = properties.keySet().iterator();
while (it.hasNext()) {
name = (String) it.next();
value = properties.getProperty(name);
setDescriptorVariable(name, value,
VariableContractType.DescriptorVariable);
}
}
/**
* Loads a file with Variable Contract tags into the this instance.
* @param file
*/
public void loadXML(String file) {
if (logger.isDebugEnabled()) {
logger.debug("Loading XML variable file:" + file);
}
VariablesHandler.createVariablesHandler(file, this);
}
/**
* Returns the value of the variable name passed as parameter.
* @param name The name of the variable.
* @return The value of the variable.
*/
public String getValue(String name) {
if (list.containsKey(name)) {
PropertiesDatas var = (PropertiesDatas) list.get(name);
return var.value;
}
return null;
}
/**
* Replaces the variables inside a text with their values.
* I
* @param text Text with variables inside.
* @return The text with the values
*/
public String transform(String text) {
if(text==null) return null;
Matcher m=variablePattern.matcher(text);
StringBuffer sb=new StringBuffer();
while(m.find()){
if(!isLegalName(m.group(1)))
throw new IllegalArgumentException("Error, malformed variable:"+m.group(1));
String name=m.group(2);
String value=getValue(name);
if(value==null || value.length()>0)
throw new IllegalArgumentException("Error, variable value not found: "+name+"=?");
if(logger.isDebugEnabled()){
logger.debug("Matched:"+name+" = "+value);
//logger.debug(m);
}
m.appendReplacement(sb, name.toLowerCase());
}
m.appendTail(sb);
return sb.toString();
}
private void checkGenericLogic(String name, String value,
VariableContractType type) {
/*
* Generic Logical checks
*/
if (name == null) {
throw new NullPointerException("Variable Name is null.");
}
if (name.length() <= 0) {
throw new IllegalArgumentException("Variable Name is empty.");
}
if (!isLegalName("${"+name+"}")) {
throw new IllegalArgumentException("Illegal variable name:"+name);
}
if (value == null) {
throw new NullPointerException("Variable Value is null.");
}
if (type == null) {
throw new NullPointerException("Variable Type is null.");
}
if (list.containsKey(name) &&
!((PropertiesDatas) list.get(name)).type.equals(type)) {
throw new IllegalArgumentException("Variable " + name +
" is already defined with type: " +
((PropertiesDatas) list.get(name)).type);
}
}
private void unsafeAdd(String name, String value,
VariableContractType type, String setFrom)
throws NullPointerException, IllegalArgumentException {
if (name == null) {
throw new NullPointerException("XML Variable Name is null.");
}
if (name.length() <= 0) {
throw new IllegalArgumentException("XML Variable Name is empty.");
}
if (value == null) {
throw new NullPointerException("XML Variable Value is null.");
}
PropertiesDatas data;
if (list.containsKey(name)) {
data = (PropertiesDatas) list.get(name);
if (logger.isDebugEnabled()) {
logger.debug("...Modifying variable registry: "+name+"="+value);
}
} else {
data = new PropertiesDatas();
if (logger.isDebugEnabled()) {
logger.debug("...Creating new registry for variable: "+name+"="+value);
}
}
data.type = type;
data.value = value;
data.setFrom = setFrom;
list.put(name, data);
}
/**
* Checks if there are empty values in the contract. All errors are printed through
* the logger.
* @return True if the contract has no empty values.
*/
public boolean checkContract() {
boolean retval = true;
String name;
java.util.Iterator it = list.keySet().iterator();
while (it.hasNext()) {
name = (String) it.next();
PropertiesDatas data = (PropertiesDatas) list.get(name);
if(data.value.length()<=0){
logger.error(data.type.getEmptyErrorMessage(name));
retval=false;
}
}
return retval;
}
public String toString() {
StringBuffer sb = new StringBuffer();
PropertiesDatas var;
String name;
java.util.Iterator it = list.keySet().iterator();
while (it.hasNext()) {
name = (String) it.next();
var = (PropertiesDatas) list.get(name);
sb.append(name).append("=").append(var).append("\n");
}
return sb.toString();
}
public boolean isLegalName(String var){
Matcher m = legalPattern.matcher(var);
return m.matches();
}
/**
* Class used for exclusive acces to global static variable:
* org.objectweb.proactive.core.xml.XMLProperties.xmlproperties
*
* @author The ProActive Team
*/
static public class Lock {
private boolean locked;
private Lock() {
locked = false;
}
/**
* Call this method to release the lock on the XMLProperty variable.
* This method will also clean the variable contents.
*/
public synchronized void release() {
locked = false;
notify();
}
/**
* Call this method to get the lock on the XMLProperty object instance.
*/
public synchronized void aquire() {
while (locked) {
try {
wait();
} catch (InterruptedException e) {
}
}
locked = true;
}
}
}
|
package gov.nih.nci.calab.ui.security;
import gov.nih.nci.calab.dto.common.UserBean;
import gov.nih.nci.calab.exception.CaNanoLabSecurityException;
import gov.nih.nci.calab.service.security.LoginService;
import gov.nih.nci.calab.service.security.UserService;
import gov.nih.nci.calab.service.util.CaNanoLabConstants;
import gov.nih.nci.calab.ui.core.AbstractBaseAction;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
import org.apache.struts.action.ActionMessages;
import org.apache.struts.validator.DynaValidatorForm;
/**
* The LoginAction authenticates a user into the caLAB system.
*
* @author doswellj, pansu
*/
public class LoginAction extends AbstractBaseAction {
public ActionForward executeTask(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
ActionForward forward = null;
// logout first
HttpSession session = request.getSession();
if (!session.isNew()) {
// invalidate the old one
session.invalidate();
}
DynaValidatorForm theForm = (DynaValidatorForm) form;
String strLoginId = (String) theForm.get("loginId");
String strPassword = (String) theForm.get("password");
// Encrypt the password.
// String strEncryptedPass = PasswordService.getInstance().encrypt(
// strPassword);
// Call CSM to authenticate the user.
LoginService loginservice = new LoginService(
CaNanoLabConstants.CSM_APP_NAME);
Boolean blnAuthenticated = loginservice.login(strLoginId, strPassword);
// strEncryptedPass);
if (blnAuthenticated == true) {
// check if the password is the initial password
// redirect to change password page
if (strLoginId.equals(strPassword)) {
ActionMessages msgs = new ActionMessages();
ActionMessage msg = new ActionMessage(
"message.login.changepassword");
msgs.add("message", msg);
saveMessages(request, msgs);
return mapping.findForward("changePassword");
}
session = request.getSession();
setUserSessionInfo(session, strLoginId);
forward = mapping.findForward("success");
} else {
throw new CaNanoLabSecurityException("Invalid Credentials.");
}
return forward;
}
private void setUserSessionInfo(HttpSession session, String loginName)
throws Exception {
UserService userService = new UserService(
CaNanoLabConstants.CSM_APP_NAME);
UserBean user = userService.getUserBean(loginName);
session.setAttribute("user", user);
session.setAttribute("userService", userService);
Boolean createSample = userService.checkCreatePermission(user,
CaNanoLabConstants.CSM_PG_SAMPLE);
session.setAttribute("canCreateSample", createSample);
Boolean createProtocol = userService.checkCreatePermission(user,
CaNanoLabConstants.CSM_PG_PROTOCOL);
session.setAttribute("canCreateProtocol", createProtocol);
Boolean createReport = userService.checkCreatePermission(user,
CaNanoLabConstants.CSM_PG_REPORT);
session.setAttribute("canCreateReport", createReport);
Boolean createParticle = userService.checkCreatePermission(user,
CaNanoLabConstants.CSM_PG_PARTICLE);
session.setAttribute("canCreateNanoparticle", createParticle);
boolean isAdmin = userService.isAdmin(user.getLoginName());
session.setAttribute("isAdmin", isAdmin);
boolean canDelete = userService.checkDeletePermission(user,
CaNanoLabConstants.CSM_PG_PARTICLE);
if (canDelete && isAdmin) {
session.setAttribute("canUserDeleteChars", "true");
} else {
session.setAttribute("canUserDeleteChars", "false");
}
}
public boolean loginRequired() {
return false;
}
public boolean canUserExecute(UserBean user) {
return true;
}
}
|
import java.io.File;
import java.util.Scanner;
import java.io.IOException;
import java.io.PrintWriter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.CommandLine;
public class Main {
public static void main(String[] args) throws Exception {
CommandLineParser parser = new GnuParser();
Options options = new Options();
options.addOption( OptionBuilder.withLongOpt("input")
.hasArg()
.withDescription("input file")
.create("i") );
options.addOption( OptionBuilder.withLongOpt("output")
.hasArg()
.withDescription("output file")
.create("o") );
CommandLine line = parser.parse(options, args);
String input, output;
if (line.hasOption("input"))
input = line.getOptionValue("input");
else
input = null;
output = perform(input);
if (line.hasOption("output")) {
try( PrintWriter out = new PrintWriter(line.getOptionValue("output")) ) {
out.println(output);
}
} else {
System.out.println(output);
}
}
public static String perform(String inputFileName) {
try {
SimplexTable simplexTable = new SimplexTable(createSimplexTable(inputFileName));
return simplexTable.problemString() + "\nSolution:\n\n" + solve(simplexTable);
} catch (IOException e) {
return "File not found: " + inputFileName;
}
}
private static String solve(SimplexTable simplexTable) {
int cols = simplexTable.cols();
int rows = simplexTable.rows();
int resCol, resRow;
int [] rowNames = new int[rows];
double [] solution = new double[rows];
boolean solved = false;
String answer = "";
for (int j = 0; j < rows; j++)
rowNames[j] = cols + j + 1;
while (!solved) {
resCol = simplexTable.findResCol();
if (resCol == -1) {
return "Objective function is unlimited\n";
} else if (resCol == 0) {
solved = true;
} else {
resRow = simplexTable.findResRow(resCol);
rowNames[resRow] = resCol;
simplexTable.step(resRow, resCol);
}
}
for (int i = 1; i <= cols; i++) {
int j = 0;
for (; j < rows && rowNames[j] != i; j++);
answer += "x" + i + " = ";
if (j == rows) {
answer += "0\n";
}
else {
answer += simplexTable.getElement(j, 0) + "\n";
}
}
answer += "max{ F(x) } = " + simplexTable.getElement(rows, 0) + '\n';
return answer;
}
private static double[][] createSimplexTable(String fileName) throws IOException {
Scanner scanner;
if (fileName == null) {
scanner = new Scanner(System.in);
} else {
scanner = new Scanner(new File(fileName));
}
if (fileName == null) System.out.print("Number of variables: ");
int cols = scanner.nextInt();
if (fileName == null) System.out.print("Number of limitations: ");
int rows = scanner.nextInt();
double [][] simplexTable = new double[rows + 1][cols + 1];
simplexTable[rows][0] = 0;
if (fileName == null) System.out.println("\nCOST FUNCTION");
for (int i = 1; i <= cols; ++i) {
if (fileName == null) System.out.print(" Multiplier
simplexTable[rows][i] = -scanner.nextDouble();
}
if (fileName == null) System.out.println("\nLIMITATIONS: MULTIPLIERS");
for (int i = 0; i < rows; ++i) {
if (fileName == null) System.out.println(" Limitation #" + (i + 1) + ":");
for (int j = 1; j <= cols; ++j) {
if (fileName == null) System.out.print(" Multiplier
simplexTable[i][j] = scanner.nextDouble();
}
}
if (fileName == null) System.out.println("\nLIMITATIONS: FREE TERMS");
for (int i = 0; i < rows; ++i) {
if (fileName == null) System.out.print(" Limitation
simplexTable[i][0] = scanner.nextDouble();
}
if (fileName == null) System.out.println("");
return simplexTable;
}
}
|
package org.commcare.api.json;
import org.commcare.api.util.ApiConstants;
import org.commcare.modern.util.Pair;
import org.javarosa.core.model.Constants;
import org.javarosa.core.model.FormDef;
import org.javarosa.core.model.FormIndex;
import org.javarosa.core.model.SelectChoice;
import org.javarosa.core.model.data.*;
import org.javarosa.core.model.data.helper.Selection;
import org.javarosa.core.model.instance.TreeReference;
import org.javarosa.form.api.FormEntryController;
import org.javarosa.form.api.FormEntryModel;
import org.javarosa.form.api.FormEntryPrompt;
import org.json.JSONArray;
import org.json.JSONObject;
import java.util.*;
/**
* Utility functions for performing some action on a Form and receiving a JSON response
*/
public class JsonActionUtils {
/**
* Delete a repeat at the specified index, return the JSON response
*
* @param controller the FormEntryController under consideration
* @param model the FormEntryModel under consideration
* @param formIndexString the form index of the repeat group to be deleted
* @return The JSON representation of the updated form tree
*/
public static JSONObject deleteRepeatToJson(FormEntryController controller,
FormEntryModel model, String repeatIndexString, String formIndexString) {
FormIndex formIndex = indexFromString(formIndexString, model.getForm());
controller.jumpToIndex(formIndex);
controller.deleteRepeat(Integer.parseInt(repeatIndexString));
return getCurrentJson(controller, model);
}
/**
* Expand (IE create) the repeat at the specified form index
*
* @param controller the FormEntryController under consideration
* @param model the FormEntryModel under consideration
* @param formIndexString the form index of the repeat group to be expanded
* @return The JSON representation of the updated question tree
*/
public static JSONObject descendRepeatToJson(FormEntryController controller,
FormEntryModel model, String formIndexString) {
FormIndex formIndex = indexFromString(formIndexString, model.getForm());
controller.jumpToIndex(formIndex);
controller.descendIntoNewRepeat();
return getCurrentJson(controller, model);
}
/**
* Get the JSON representation of the question tree of this controller/model pair
*
* @param controller the FormEntryController under consideration
* @param model the FormEntryModel under consideration
* @return The JSON representation of the question tree
*/
public static JSONObject getCurrentJson(FormEntryController controller,
FormEntryModel model) {
JSONObject ret = new JSONObject();
ret.put(ApiConstants.QUESTION_TREE_KEY, getFullFormJSON(model, controller));
return ret;
}
// Similar to above, but get the questions for only one formIndex (OQPS)
public static JSONObject getCurrentJson(FormEntryController controller,
FormEntryModel model,
String formIndex) {
JSONObject ret = new JSONObject();
ret.put(ApiConstants.QUESTION_TREE_KEY, getOneQuestionPerScreenJSON(model, controller,
JsonActionUtils.indexFromString("" + formIndex, model.getForm())));
return ret;
}
/**
* Answer the question, return the updated JSON representation of the question tree
*
* @param controller the FormEntryController under consideration
* @param model the FormEntryModel under consideration
* @param answer the answer to enter
* @param prompt the question to be answered
* @return The JSON representation of the updated question tree
*/
public static JSONObject questionAnswerToJson(FormEntryController controller,
FormEntryModel model, String answer, FormEntryPrompt prompt) {
JSONObject ret = new JSONObject();
IAnswerData answerData;
if (answer == null || answer.equals("None")) {
answerData = null;
} else {
try {
answerData = getAnswerData(prompt, answer);
} catch (IllegalArgumentException e) {
ret.put(ApiConstants.RESPONSE_STATUS_KEY, "error");
ret.put(ApiConstants.ERROR_TYPE_KEY, "illegal-argument");
ret.put(ApiConstants.ERROR_REASON_KEY, e.getMessage());
return ret;
}
}
int result = controller.answerQuestion(prompt.getIndex(), answerData);
if (result == FormEntryController.ANSWER_REQUIRED_BUT_EMPTY) {
ret.put(ApiConstants.RESPONSE_STATUS_KEY, "validation-error");
ret.put(ApiConstants.ERROR_TYPE_KEY, "required");
} else if (result == FormEntryController.ANSWER_CONSTRAINT_VIOLATED) {
ret.put(ApiConstants.RESPONSE_STATUS_KEY, "validation-error");
ret.put(ApiConstants.ERROR_TYPE_KEY, "constraint");
ret.put(ApiConstants.ERROR_REASON_KEY, prompt.getConstraintText());
} else if (result == FormEntryController.ANSWER_OK) {
if (controller.getOneQuestionPerScreen()) {
ret.put(ApiConstants.QUESTION_TREE_KEY, getOneQuestionPerScreenJSON(
model, controller, controller.getCurrentIndex()));
} else {
ret.put(ApiConstants.QUESTION_TREE_KEY, getFullFormJSON(model, controller));
}
ret.put(ApiConstants.RESPONSE_STATUS_KEY, "accepted");
}
return ret;
}
/**
* Answer the question, return the updated JSON representation of the question tree
*
* @param controller the FormEntryController under consideration
* @param model the FormEntryModel under consideration
* @param answer the answer to enter
* @param index the form index of the question to be answered
* @return The JSON representation of the updated question tree
*/
public static JSONObject questionAnswerToJson(FormEntryController controller,
FormEntryModel model, String answer, String index) {
FormIndex formIndex = indexFromString(index, model.getForm());
FormEntryPrompt prompt = model.getQuestionPrompt(formIndex);
return questionAnswerToJson(controller, model, answer, prompt);
}
/**
* Return the IAnswerData version of the string data input
*
* @param formEntryPrompt the FormEntryPrompt for this question
* @param data the String answer
* @return the IAnswerData version of @data above
*/
public static IAnswerData getAnswerData(FormEntryPrompt formEntryPrompt, String data) {
int index;
switch(formEntryPrompt.getDataType()){
case Constants.DATATYPE_CHOICE:
index = Integer.parseInt(data);
SelectChoice selectChoiceAnswer = formEntryPrompt.getSelectChoices().get(index - 1);
return new SelectOneData(selectChoiceAnswer.selection());
case Constants.DATATYPE_CHOICE_LIST:
String[] split = parseMultiSelectString(data);
Vector<Selection> ret = new Vector<>();
for (String s : split) {
index = Integer.parseInt(s);
Selection selection = formEntryPrompt.getSelectChoices().get(index - 1).selection();
ret.add(selection);
}
return new SelectMultiData(ret);
case Constants.DATATYPE_GEOPOINT:
return AnswerDataFactory.template(formEntryPrompt.getControlType(), formEntryPrompt.getDataType()).cast(
new UncastData(convertTouchFormsGeoPointString(data)));
}
return data.equals("") ? null : AnswerDataFactory.template(formEntryPrompt.getControlType(), formEntryPrompt.getDataType()).cast(new UncastData(data));
}
// we need to remove the brackets Touchforms includes and replace the commas with spaces
private static String convertTouchFormsGeoPointString(String touchformsString) {
return touchformsString.replace(",", " ").replace("[", "").replace("]", "");
}
/**
* OK, this function is kind of a monster. Given a FormDef and a String representation of the form index,
* return a full fledged FormIndex object.
*/
public static FormIndex indexFromString(String stringIndex, FormDef form) {
if (stringIndex == null || stringIndex.equals("None")) {
return null;
} else if (stringIndex.equals("<")) {
return FormIndex.createBeginningOfFormIndex();
} else if (stringIndex.equals(">")) {
return FormIndex.createEndOfFormIndex();
}
List<Pair<Integer, Integer>> list = stepToList(stringIndex);
FormIndex ret = reduceFormIndex(list, null);
ret.assignRefs(form);
return ret;
}
public static int getQuestionType(FormEntryModel model, String stringIndex, FormDef form){
FormIndex index = indexFromString(stringIndex, form);
return model.getEvent(index);
}
/**
* Given a String represnetation of a form index, decompose it into a list of <index, multiplicity> pairs
*
* @param index the comma separated String representation of the form index
* @return @index represented as a list of index,multiplicity integer pairs
*/
private static List<Pair<Integer, Integer>> stepToList(String index) {
ArrayList<Pair<Integer, Integer>> ret = new ArrayList<>();
String[] split = index.split(",");
List<String> list = Arrays.asList(split);
Collections.reverse(list);
for (String step : list) {
if (!step.trim().equals("")) {
Pair<Integer, Integer> pair = stepFromString(step);
ret.add(pair);
}
}
return ret;
}
/**
* Given the string representation of one "Step" in a form index, return an Integer pair of <index, multiplicity>
*/
private static Pair<Integer, Integer> stepFromString(String step) {
// honestly not sure. thanks obama/drew
if (step.endsWith("J")) {
return new Pair<>(Integer.parseInt("" + step.substring(0, step.length() - 1)), TreeReference.INDEX_REPEAT_JUNCTURE);
}
// we want to deal with both '.' and '_' as separators for the time being for TF legacy reasons
String[] split = step.split("[._:]");
// the form index is the first part, the multiplicity is the second
int i = Integer.parseInt(split[0].trim());
int mult = -1;
if (split.length > 1 && split[1] != null) {
mult = Integer.parseInt(split[1].trim());
}
return new Pair<>(i, mult);
}
/**
* Given a list of steps (see above) to be traversed and a current Form index,
* pop the top step and create a new FormIndex from this step with the current as its parent, then recursively
* call this function with the remaining steps and the new FormIndex
*/
private static FormIndex reduceFormIndex(List<Pair<Integer, Integer>> steps, FormIndex current) {
if (steps.size() == 0) {
return current;
}
Pair<Integer, Integer> currentStep = steps.remove(0);
FormIndex nextLevel = new FormIndex(current, currentStep.first, currentStep.second, null);
return reduceFormIndex(steps, nextLevel);
}
private static String[] parseMultiSelectString(String answer) {
answer = answer.trim();
if (answer.startsWith("[") && answer.endsWith("]")) {
answer = answer.substring(1, answer.length() - 1);
}
String[] ret = answer.split(" ");
for (int i = 0; i < ret.length; i++) {
ret[i] = ret[i].replace(",", "");
}
return ret;
}
public static JSONArray getFullFormJSON(FormEntryModel fem, FormEntryController fec) {
JSONArray ret = new JSONArray();
Walker walker = new Walker(ret, FormIndex.createBeginningOfFormIndex(), fec, fem);
walker.walk();
return ret;
}
public static JSONArray getOneQuestionPerScreenJSON(FormEntryModel fem, FormEntryController fec, FormIndex formIndex) {
FormEntryPrompt[] prompts = fec.getQuestionPrompts(formIndex);
JSONArray ret = new JSONArray();
for (FormEntryPrompt prompt: prompts) {
fem.setQuestionIndex(prompt.getIndex());
JSONObject obj = new JSONObject();
PromptToJson.parseQuestionType(fem, obj);
ret.put(obj);
}
return ret;
}
}
|
package org.opencms.workplace.editors;
import org.opencms.file.CmsFile;
import org.opencms.file.CmsRequestContext;
import org.opencms.file.CmsResource;
import org.opencms.file.CmsResourceFilter;
import org.opencms.file.collectors.I_CmsResourceCollector;
import org.opencms.i18n.CmsEncoder;
import org.opencms.i18n.CmsLocaleManager;
import org.opencms.jsp.CmsJspActionElement;
import org.opencms.lock.CmsLockType;
import org.opencms.main.CmsException;
import org.opencms.main.CmsLog;
import org.opencms.main.OpenCms;
import org.opencms.util.CmsRequestUtil;
import org.opencms.util.CmsStringUtil;
import org.opencms.widgets.A_CmsWidget;
import org.opencms.widgets.I_CmsWidget;
import org.opencms.widgets.I_CmsWidgetDialog;
import org.opencms.widgets.I_CmsWidgetParameter;
import org.opencms.workplace.CmsWorkplace;
import org.opencms.workplace.CmsWorkplaceSettings;
import org.opencms.workplace.editors.directedit.CmsDirectEditButtonSelection;
import org.opencms.xml.CmsXmlContentDefinition;
import org.opencms.xml.CmsXmlEntityResolver;
import org.opencms.xml.CmsXmlException;
import org.opencms.xml.CmsXmlUtils;
import org.opencms.xml.content.CmsXmlContent;
import org.opencms.xml.content.CmsXmlContentErrorHandler;
import org.opencms.xml.content.CmsXmlContentFactory;
import org.opencms.xml.content.CmsXmlContentTab;
import org.opencms.xml.content.CmsXmlContentValueSequence;
import org.opencms.xml.types.CmsXmlNestedContentDefinition;
import org.opencms.xml.types.I_CmsXmlContentValue;
import org.opencms.xml.types.I_CmsXmlSchemaType;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.jsp.JspException;
import org.apache.commons.logging.Log;
/**
* Creates the editor for XML content definitions.<p>
*
* @author Alexander Kandzior
* @author Andreas Zahner
*
* @version $Revision: 1.83 $
*
* @since 6.0.0
*/
public class CmsXmlContentEditor extends CmsEditor implements I_CmsWidgetDialog {
/** Action for checking content before executing the direct edit action. */
public static final int ACTION_CHECK = 151;
/** Action for confirming the XML content structure correction. */
public static final int ACTION_CONFIRMCORRECTION = 152;
/** Value for the action: copy the current locale. */
public static final int ACTION_COPYLOCALE = 141;
/** Action for correction of the XML content structure confirmed. */
public static final int ACTION_CORRECTIONCONFIRMED = 153;
/** Action for optional element creation. */
public static final int ACTION_ELEMENT_ADD = 154;
/** Action for element move down operation. */
public static final int ACTION_ELEMENT_MOVE_DOWN = 155;
/** Action for element move up operation. */
public static final int ACTION_ELEMENT_MOVE_UP = 156;
/** Action for optional element removal. */
public static final int ACTION_ELEMENT_REMOVE = 157;
/** Action for new file creation. */
public static final int ACTION_NEW = 158;
/** Indicates that the content should be checked before executing the direct edit action. */
public static final String EDITOR_ACTION_CHECK = "check";
/** Indicates that the correction of the XML content structure should be confirmed. */
public static final String EDITOR_ACTION_CONFIRMCORRECTION = "confirmcorrect";
/** Indicates an optional element should be created. */
public static final String EDITOR_ACTION_ELEMENT_ADD = "addelement";
/** Indicates an element should be moved down. */
public static final String EDITOR_ACTION_ELEMENT_MOVE_DOWN = "elementdown";
/** Indicates an element should be moved up. */
public static final String EDITOR_ACTION_ELEMENT_MOVE_UP = "elementup";
/** Indicates an optional element should be removed. */
public static final String EDITOR_ACTION_ELEMENT_REMOVE = "removeelement";
/** Indicates a new file should be created. */
public static final String EDITOR_ACTION_NEW = CmsDirectEditButtonSelection.VALUE_NEW;
/** Indicates that the contents of the current locale should be copied to other locales. */
public static final String EDITOR_COPYLOCALE = "copylocale";
/** Indicates that the correction of the XML content structure was confirmed by the user. */
public static final String EDITOR_CORRECTIONCONFIRMED = "correctconfirmed";
/** Parameter name for the request parameter "elementindex". */
public static final String PARAM_ELEMENTINDEX = "elementindex";
/** Parameter name for the request parameter "elementname". */
public static final String PARAM_ELEMENTNAME = "elementname";
/** Parameter name for the request parameter "newlink". */
public static final String PARAM_NEWLINK = "newlink";
/** Constant for the editor type, must be the same as the editors subfolder name in the VFS. */
private static final String EDITOR_TYPE = "xmlcontent";
/** The log object for this class. */
private static final Log LOG = CmsLog.getLog(CmsXmlContentEditor.class);
/** The content object to edit. */
private CmsXmlContent m_content;
/** The element locale. */
private Locale m_elementLocale;
/** File object used to read and write contents. */
private CmsFile m_file;
/** The set of help message ids that have already been used. */
private Set m_helpMessageIds;
/** Indicates if an optional element is included in the form. */
private boolean m_optionalElementPresent;
/** Parameter stores the index of the element to add or remove. */
private String m_paramElementIndex;
/** Parameter stores the name of the element to add or remove. */
private String m_paramElementName;
/** The selected model file for the new resource. */
private String m_paramModelFile;
/** Parameter to indicate if a new XML content resource should be created. */
private String m_paramNewLink;
/** The error handler for the xml content. */
private CmsXmlContentErrorHandler m_validationHandler;
/** Visitor implementation that stored the widgets for the content. */
private CmsXmlContentWidgetVisitor m_widgetCollector;
/**
* Public constructor.<p>
*
* @param jsp an initialized JSP action element
*/
public CmsXmlContentEditor(CmsJspActionElement jsp) {
super(jsp);
}
/**
* Performs the change element language action of the editor.<p>
*/
public void actionChangeElementLanguage() {
// save eventually changed content of the editor
Locale oldLocale = CmsLocaleManager.getLocale(getParamOldelementlanguage());
Locale newLocale = getElementLocale();
try {
setEditorValues(oldLocale);
if (!m_content.validate(getCms()).hasErrors(oldLocale)) {
// no errors found in content
if (!m_content.hasLocale(newLocale)) {
// check if we should copy the content from a default locale
boolean addNew = true;
List locales = OpenCms.getLocaleManager().getDefaultLocales(getCms(), getParamResource());
if (locales.size() > 1) {
// default locales have been set, try to find a match
try {
m_content.copyLocale(locales, newLocale);
addNew = false;
} catch (CmsXmlException e) {
// no matching default locale was available, we will create a new one later
}
}
if (addNew) {
// create new element if selected language element is not present
try {
m_content.addLocale(getCms(), newLocale);
} catch (CmsXmlException e) {
if (LOG.isErrorEnabled()) {
LOG.error(e.getLocalizedMessage(), e);
}
}
}
}
//save to temporary file
writeContent();
// set default action to suppress error messages
setAction(ACTION_DEFAULT);
} else {
// errors found, switch back to old language to show errors
setParamElementlanguage(getParamOldelementlanguage());
// set stored locale to null to reinitialize it
m_elementLocale = null;
}
} catch (Exception e) {
// should usually never happen
if (LOG.isInfoEnabled()) {
LOG.info(e.getLocalizedMessage(), e);
}
}
}
/**
* Deletes the temporary file and unlocks the edited resource when in direct edit mode.<p>
*
* @param forceUnlock if true, the resource will be unlocked anyway
*/
public void actionClear(boolean forceUnlock) {
// delete the temporary file
deleteTempFile();
boolean directEditMode = Boolean.valueOf(getParamDirectedit()).booleanValue();
boolean modified = Boolean.valueOf(getParamModified()).booleanValue();
if (directEditMode || forceUnlock || !modified) {
// unlock the resource when in direct edit mode, force unlock is true or resource was not modified
try {
getCms().unlockResource(getParamResource());
} catch (CmsException e) {
// should usually never happen
if (LOG.isInfoEnabled()) {
LOG.info(e.getLocalizedMessage(), e);
}
}
}
}
/**
* Performs the copy locale action.<p>
*
* @throws JspException if something goes wrong
*/
public void actionCopyElementLocale() throws JspException {
try {
setEditorValues(getElementLocale());
if (!hasValidationErrors()) { // !m_content.validate(getCms()).hasErrors(getElementLocale())) {
// save content of the editor only to the temporary file
writeContent();
// remove eventual release & expiration date from temporary file to make preview work
getCms().setDateReleased(getParamTempfile(), CmsResource.DATE_RELEASED_DEFAULT, false);
getCms().setDateExpired(getParamTempfile(), CmsResource.DATE_EXPIRED_DEFAULT, false);
}
} catch (CmsException e) {
// show error page
showErrorPage(this, e);
}
}
/**
* Performs the delete locale action.<p>
*
* @throws JspException if something goes wrong
*/
public void actionDeleteElementLocale() throws JspException {
try {
Locale loc = getElementLocale();
m_content.removeLocale(loc);
//write the modified xml content
writeContent();
List locales = m_content.getLocales();
if (locales.size() > 0) {
// set first locale as new display locale
Locale newLoc = (Locale)locales.get(0);
setParamElementlanguage(newLoc.toString());
m_elementLocale = newLoc;
} else {
if (LOG.isErrorEnabled()) {
LOG.error(Messages.get().getBundle().key(Messages.LOG_GET_LOCALES_1, getParamResource()));
}
}
} catch (CmsXmlException e) {
// an error occurred while trying to delete the locale, stop action
showErrorPage(e);
} catch (CmsException e) {
// should usually never happen
if (LOG.isInfoEnabled()) {
LOG.info(e.getLocalizedMessage(), e);
}
}
}
/**
* Performs a configurable action performed by the editor.<p>
*
* The default action is: save resource, clear temporary files and publish the resource directly.<p>
*
* @throws IOException if a forward fails
* @throws ServletException of a forward fails
* @throws JspException if including a JSP fails
*/
public void actionDirectEdit() throws IOException, JspException, ServletException {
// get the action class from the OpenCms runtime property
I_CmsEditorActionHandler actionClass = OpenCms.getWorkplaceManager().getEditorActionHandler();
if (actionClass == null) {
// error getting the action class, save content and exit the editor
actionSave();
actionExit();
} else {
actionClass.editorAction(this, getJsp());
}
}
/**
* Performs the exit editor action.<p>
*
* @see org.opencms.workplace.editors.CmsEditor#actionExit()
*/
public void actionExit() throws IOException, JspException, ServletException {
if (getAction() == ACTION_CANCEL) {
// save and exit was cancelled
return;
}
// unlock resource if we are in direct edit mode
actionClear(false);
// close the editor
actionClose();
}
/**
* Moves an element in the xml content either up or down.<p>
*
* Depends on the given action value.<p>
*
* @throws JspException if including the error page fails
*/
public void actionMoveElement() throws JspException {
// set editor values from request
try {
setEditorValues(getElementLocale());
} catch (CmsXmlException e) {
// an error occurred while trying to set the values, stop action
showErrorPage(e);
return;
}
// get the necessary parameters to move the element
int index = 0;
try {
index = Integer.parseInt(getParamElementIndex());
} catch (Exception e) {
// ignore, should not happen
}
// get the value to move
I_CmsXmlContentValue value = m_content.getValue(getParamElementName(), getElementLocale(), index);
if (getAction() == ACTION_ELEMENT_MOVE_DOWN) {
// move down the value
value.moveDown();
} else {
// move up the value
value.moveUp();
}
if (getValidationHandler().hasWarnings(getElementLocale())) {
// there were warnings for the edited content, reset validation handler to avoid display issues
resetErrorHandler();
}
try {
// write the modified content to the temporary file
writeContent();
} catch (CmsException e) {
// an error occurred while trying to save
showErrorPage(e);
}
}
/**
* Creates a new XML content item for editing.<p>
*
* @throws JspException in case something goes wrong
*/
public void actionNew() throws JspException {
// get the collector used to create the new content
int pos = m_paramNewLink.indexOf('|');
String collectorName = m_paramNewLink.substring(0, pos);
String param = m_paramNewLink.substring(pos + 1);
// get the collector used for calculating the next file name
I_CmsResourceCollector collector = OpenCms.getResourceManager().getContentCollector(collectorName);
String newFileName = "";
try {
// one resource serves as a "template" for the new resource
CmsFile templateFile = getCms().readFile(getParamResource(), CmsResourceFilter.IGNORE_EXPIRATION);
CmsXmlContent template = CmsXmlContentFactory.unmarshal(getCloneCms(), templateFile);
Locale locale = (Locale)OpenCms.getLocaleManager().getDefaultLocales(getCms(), getParamResource()).get(0);
// now create a new XML content based on the templates content definition
CmsXmlContent newContent = CmsXmlContentFactory.createDocument(
getCms(),
locale,
template.getEncoding(),
template.getContentDefinition());
// IMPORTANT: calculation of the name MUST be done here so the file name is ensured to be valid
newFileName = collector.getCreateLink(getCms(), collectorName, param);
boolean useModelFile = false;
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(getParamModelFile())) {
getCms().getRequestContext().setAttribute(CmsRequestContext.ATTRIBUTE_MODEL, getParamModelFile());
useModelFile = true;
}
// now create the resource, fill it with the marshalled XML and write it back to the VFS
getCms().createResource(newFileName, templateFile.getTypeId());
// re-read the created resource
CmsFile newFile = getCms().readFile(newFileName, CmsResourceFilter.ALL);
if (!useModelFile) {
newFile.setContents(newContent.marshal());
// write the file with the updated content
getCloneCms().writeFile(newFile);
}
// wipe out parameters for the editor to ensure proper operation
setParamNewLink(null);
setParamAction(null);
setParamResource(newFileName);
setAction(ACTION_DEFAULT);
// create the temporary file to work with
setParamTempfile(createTempFile());
// set the member variables for the content
m_file = getCms().readFile(getParamTempfile(), CmsResourceFilter.ALL);
if (!useModelFile) {
m_content = newContent;
} else {
m_content = CmsXmlContentFactory.unmarshal(getCms(), m_file);
}
} catch (CmsException e) {
if (LOG.isErrorEnabled()) {
LOG.error(Messages.get().getBundle().key(Messages.LOG_CREATE_XML_CONTENT_ITEM_1, m_paramNewLink), e);
}
throw new JspException(e);
} finally {
try {
// delete the new file
getCms().deleteResource(newFileName, CmsResource.DELETE_REMOVE_SIBLINGS);
} catch (CmsException e) {
// ignore
}
}
}
/**
* Performs the preview xml content action in a new browser window.<p>
*
* @throws IOException if redirect fails
* @throws JspException if inclusion of error page fails
*/
public void actionPreview() throws IOException, JspException {
try {
// save content of the editor only to the temporary file
setEditorValues(getElementLocale());
writeContent();
// remove eventual release & expiration date from temporary file to make preview work
getCms().setDateReleased(getParamTempfile(), CmsResource.DATE_RELEASED_DEFAULT, false);
getCms().setDateExpired(getParamTempfile(), CmsResource.DATE_EXPIRED_DEFAULT, false);
} catch (CmsException e) {
// show error page
showErrorPage(this, e);
}
// get preview uri from content handler
String previewUri = m_content.getContentDefinition().getContentHandler().getPreview(
getCms(),
m_content,
getParamTempfile());
// create locale request parameter
StringBuffer param = new StringBuffer(8);
if (previewUri.indexOf('?') != -1) {
param.append("&");
} else {
param.append("?");
}
param.append(CmsLocaleManager.PARAMETER_LOCALE);
param.append("=");
param.append(getParamElementlanguage());
// redirect to the temporary file with currently active element language or to the specified preview uri
sendCmsRedirect(previewUri + param);
}
/**
* Performs the save content action.<p>
*
* @see org.opencms.workplace.editors.CmsEditor#actionSave()
*/
public void actionSave() throws JspException {
actionSave(getElementLocale());
if (getAction() != ACTION_CANCEL) {
// save successful, set save action
setAction(ACTION_SAVE);
}
}
/**
* Performs the save content action.<p>
*
* This is also used when changing the element language.<p>
*
* @param locale the locale to save the content
* @throws JspException if including the error page fails
*/
public void actionSave(Locale locale) throws JspException {
try {
setEditorValues(locale);
// check if content has errors
if (!hasValidationErrors()) {
// no errors found, write content and copy temp file contents
writeContent();
commitTempFile();
// set the modified parameter
setParamModified(Boolean.TRUE.toString());
}
} catch (CmsException e) {
showErrorPage(e);
}
}
/**
* Adds an optional element to the xml content or removes an optional element from the xml content.<p>
*
* Depends on the given action value.<p>
*
* @throws JspException if including the error page fails
*/
public void actionToggleElement() throws JspException {
// set editor values from request
try {
setEditorValues(getElementLocale());
} catch (CmsXmlException e) {
// an error occurred while trying to set the values, stop action
showErrorPage(e);
return;
}
// get the necessary parameters to add/remove the element
int index = 0;
try {
index = Integer.parseInt(getParamElementIndex());
} catch (Exception e) {
// ignore, should not happen
}
if (getAction() == ACTION_ELEMENT_REMOVE) {
// remove the value
m_content.removeValue(getParamElementName(), getElementLocale(), index);
} else {
// add the new value after the clicked element
if (m_content.hasValue(getParamElementName(), getElementLocale())) {
// when other values are present, increase index to use right position
index += 1;
}
m_content.addValue(getCms(), getParamElementName(), getElementLocale(), index);
}
if (getValidationHandler().hasWarnings(getElementLocale())) {
// there were warnings for the edited content, reset validation handler to avoid display issues
resetErrorHandler();
}
try {
// write the modified content to the temporary file
writeContent();
} catch (CmsException e) {
// an error occurred while trying to save
showErrorPage(e);
}
}
/**
* Builds the html String for the element language selector.<p>
*
* This method has to use the resource request parameter because the temporary file is
* not available in the upper button frame.<p>
*
* @param attributes optional attributes for the <select> tag
* @return the html for the element language selectbox
*/
public String buildSelectElementLanguage(String attributes) {
return buildSelectElementLanguage(attributes, getParamResource(), getElementLocale());
}
/**
* @see org.opencms.widgets.I_CmsWidgetDialog#getButtonStyle()
*/
public int getButtonStyle() {
return getSettings().getUserSettings().getEditorButtonStyle();
}
/**
* @see org.opencms.workplace.editors.CmsEditor#getEditorResourceUri()
*/
public String getEditorResourceUri() {
return getSkinUri() + "editors/" + EDITOR_TYPE + "/";
}
/**
* Returns the current element locale.<p>
*
* @return the current element locale
*/
public Locale getElementLocale() {
if (m_elementLocale == null) {
if (CmsStringUtil.isNotEmpty(getParamElementlanguage()) && !"null".equals(getParamElementlanguage())) {
m_elementLocale = CmsLocaleManager.getLocale(getParamElementlanguage());
} else {
initElementLanguage();
m_elementLocale = CmsLocaleManager.getLocale(getParamElementlanguage());
}
}
return m_elementLocale;
}
/**
* @see org.opencms.widgets.I_CmsWidgetDialog#getHelpMessageIds()
*/
public Set getHelpMessageIds() {
if (m_helpMessageIds == null) {
m_helpMessageIds = new HashSet();
}
return m_helpMessageIds;
}
/**
* Returns the index of the element to add or remove.<p>
*
* @return the index of the element to add or remove
*/
public String getParamElementIndex() {
return m_paramElementIndex;
}
/**
* Returns the name of the element to add or remove.<p>
*
* @return the name of the element to add or remove
*/
public String getParamElementName() {
return m_paramElementName;
}
/**
* Returns the parameter that specifies the model file name.<p>
*
* @return the parameter that specifies the model file name
*/
public String getParamModelFile() {
return m_paramModelFile;
}
/**
* Returns the "new link" parameter.<p>
*
* @return the "new link" parameter
*/
public String getParamNewLink() {
return m_paramNewLink;
}
/**
* @see org.opencms.widgets.I_CmsWidgetDialog#getUserAgent()
*/
public String getUserAgent() {
return getJsp().getRequest().getHeader(CmsRequestUtil.HEADER_USER_AGENT);
}
/**
* Returns the different xml editor widgets used in the form to display.<p>
*
* @return the different xml editor widgets used in the form to display
*/
public CmsXmlContentWidgetVisitor getWidgetCollector() {
if (m_widgetCollector == null) {
// create an instance of the widget collector
m_widgetCollector = new CmsXmlContentWidgetVisitor(getElementLocale());
m_content.visitAllValuesWith(m_widgetCollector);
}
return m_widgetCollector;
}
/**
* Generates the HTML form for the XML content editor.<p>
*
* @return the HTML that generates the form for the XML editor
*/
public String getXmlEditorForm() {
// set "editor mode" attribute (required for link replacement in the root site)
getCms().getRequestContext().setAttribute(CmsRequestContext.ATTRIBUTE_EDITOR, Boolean.TRUE);
// add customized message bundle eventually specified in XSD of XML content
addMessages(m_content.getContentDefinition().getContentHandler().getMessages(getLocale()));
// initialize tab lists for error handling before generating the editor form
m_errorTabs = new ArrayList();
m_warningTabs = new ArrayList();
return getXmlEditorForm(m_content.getContentDefinition(), "", true, false).toString();
}
/**
* Generates the HTML for the end of the html editor form page.<p>
*
* @return the HTML for the end of the html editor form page
* @throws JspException if including the error page fails
*/
public String getXmlEditorHtmlEnd() throws JspException {
StringBuffer result = new StringBuffer(16384);
if (m_optionalElementPresent) {
// disabled optional element(s) present, reset widgets to show help bubbles on optional form entries
resetWidgetCollector();
}
try {
// get all widgets from collector
Iterator i = getWidgetCollector().getWidgets().keySet().iterator();
while (i.hasNext()) {
// get the value of the widget
String key = (String)i.next();
I_CmsXmlContentValue value = (I_CmsXmlContentValue)getWidgetCollector().getValues().get(key);
I_CmsWidget widget = (I_CmsWidget)getWidgetCollector().getWidgets().get(key);
result.append(widget.getDialogHtmlEnd(getCms(), this, (I_CmsWidgetParameter)value));
}
// add empty help text layer
result.append("<div class=\"help\" id=\"helpText\" ");
result.append("onmouseover=\"showHelpText();\" onmouseout=\"hideHelpText();\"></div>\n");
// add empty element button layer
result.append("<div class=\"xmlButtons\" id=\"xmlElementButtons\" ");
result.append("onmouseover=\"checkElementButtons(true);\" onmouseout=\"checkElementButtons(false);\"></div>\n");
// return the HTML
return result.toString();
} catch (Exception e) {
showErrorPage(e);
return "";
}
}
/**
* Generates the javascript includes for the used widgets in the editor form.<p>
*
* @return the javascript includes for the used widgets
* @throws JspException if including the error page fails
*/
public String getXmlEditorIncludes() throws JspException {
StringBuffer result = new StringBuffer(1024);
// first include general JQuery JS and UI components
result.append("<script type=\"text/javascript\" src=\"");
result.append(CmsWorkplace.getSkinUri()).append("jquery/packed/jquery.js");
result.append("\"></script>\n");
result.append("<script type=\"text/javascript\" src=\"");
result.append(CmsWorkplace.getSkinUri()).append("jquery/packed/jquery.ui.js");
result.append("\"></script>\n");
// import the JavaScript for JSON helper functions
result.append("<script type=\"text/javascript\" src=\"");
result.append(CmsWorkplace.getSkinUri()).append("commons/json2.js");
result.append("\"></script>\n");
result.append("<link rel=\"stylesheet\" type=\"text/css\" href=\"");
result.append(CmsWorkplace.getSkinUri()).append("jquery/css/ui-ocms/jquery.ui.css");
result.append("\">\n");
result.append("<link rel=\"stylesheet\" type=\"text/css\" href=\"");
result.append(CmsWorkplace.getSkinUri()).append("jquery/css/ui-ocms/jquery.ui.ocms.css");
result.append("\">\n");
try {
// iterate over unique widgets from collector
Iterator i = getWidgetCollector().getUniqueWidgets().iterator();
while (i.hasNext()) {
I_CmsWidget widget = (I_CmsWidget)i.next();
result.append(widget.getDialogIncludes(getCms(), this));
result.append("\n");
}
} catch (Exception e) {
showErrorPage(e);
}
return result.toString();
}
/**
* Generates the javascript initialization calls for the used widgets in the editor form.<p>
*
* @return the javascript initialization calls for the used widgets
* @throws JspException if including the error page fails
*/
public String getXmlEditorInitCalls() throws JspException {
StringBuffer result = new StringBuffer(512);
try {
// iterate over unique widgets from collector
Iterator i = getWidgetCollector().getUniqueWidgets().iterator();
while (i.hasNext()) {
I_CmsWidget widget = (I_CmsWidget)i.next();
result.append(widget.getDialogInitCall(getCms(), this));
}
} catch (Exception e) {
showErrorPage(e);
}
return result.toString();
}
/**
* Generates the JavaScript initialization methods for the used widgets.<p>
*
* @return the JavaScript initialization methods for the used widgets
*
* @throws JspException if an error occurs during JavaScript generation
*/
public String getXmlEditorInitMethods() throws JspException {
StringBuffer result = new StringBuffer(512);
if (m_content.getContentDefinition().getContentHandler().getTabs().size() > 0) {
// we have some tabs defined, initialize them using JQuery
result.append("var xmlSelectedTab = 0;\n");
result.append("var xmlErrorTabs = new Array();\n");
result.append("var xmlWarningTabs = new Array();\n");
result.append("$(document).ready(function(){\n\t$xmltabs = $(\"#xmltabs\").tabs({});\n");
result.append("\t$xmltabs.tabs(\"select\", xmlSelectedTab);\n");
result.append("\tfor (var i=0; i<xmlErrorTabs.length; i++) {\n");
result.append("\t\t$(\"#OcmsTabTab\" + xmlErrorTabs[i]).addClass(\"ui-state-error\");\n");
result.append("\t}\n");
result.append("\tfor (var i=0; i<xmlWarningTabs.length; i++) {\n");
result.append("\t\t$(\"#OcmsTabTab\" + xmlWarningTabs[i]).addClass(\"ui-state-warning\");\n");
result.append("\t}\n");
result.append("});\n");
}
try {
// iterate over unique widgets from collector
Iterator i = getWidgetCollector().getUniqueWidgets().iterator();
while (i.hasNext()) {
I_CmsWidget widget = (I_CmsWidget)i.next();
result.append(widget.getDialogInitMethod(getCms(), this));
result.append("\n");
}
} catch (Exception e) {
showErrorPage(e);
}
return result.toString();
}
/**
* Returns true if the edited content contains validation errors, otherwise false.<p>
*
* @return true if the edited content contains validation errors, otherwise false
*/
public boolean hasValidationErrors() {
return getValidationHandler().hasErrors();
}
/**
* Returns true if the preview is available for the edited xml content.<p>
*
* This method has to use the resource request parameter and read the file from vfs because the temporary file is
* not available in the upper button frame.<p>
*
* @return true if the preview is enabled, otherwise false
*/
public boolean isPreviewEnabled() {
try {
// read the original file because temporary file is not created when opening button frame
CmsFile file = getCms().readFile(getParamResource(), CmsResourceFilter.ALL);
CmsXmlContent content = CmsXmlContentFactory.unmarshal(getCloneCms(), file);
return content.getContentDefinition().getContentHandler().getPreview(
getCms(),
m_content,
getParamResource()) != null;
} catch (Exception e) {
// error reading or unmarshalling, no preview available
return false;
}
}
/**
* Sets the editor values for the locale with the parameters from the request.<p>
*
* Called before saving the xml content, redisplaying the input form,
* changing the language and adding or removing elements.<p>
*
* @param locale the locale of the content to save
* @throws CmsXmlException if something goes wrong
*/
public void setEditorValues(Locale locale) throws CmsXmlException {
List valueNames = getSimpleValueNames(m_content.getContentDefinition(), "", locale);
Iterator i = valueNames.iterator();
while (i.hasNext()) {
String valueName = (String)i.next();
I_CmsXmlContentValue value = m_content.getValue(valueName, locale);
I_CmsWidget widget = value.getContentDefinition().getContentHandler().getWidget(value);
widget.setEditorValue(getCms(), getJsp().getRequest().getParameterMap(), this, (I_CmsWidgetParameter)value);
}
}
/**
* Sets the index of the element to add or remove.<p>
*
* @param elementIndex the index of the element to add or remove
*/
public void setParamElementIndex(String elementIndex) {
m_paramElementIndex = elementIndex;
}
/**
* Sets the name of the element to add or remove.<p>
*
* @param elementName the name of the element to add or remove
*/
public void setParamElementName(String elementName) {
m_paramElementName = elementName;
}
/**
* Sets the parameter that specifies the model file name.<p>
*
* @param paramMasterFile the parameter that specifies the model file name
*/
public void setParamModelFile(String paramMasterFile) {
m_paramModelFile = paramMasterFile;
}
/**
* Sets the "new link" parameter.<p>
*
* @param paramNewLink the "new link" parameter to set
*/
public void setParamNewLink(String paramNewLink) {
m_paramNewLink = CmsEncoder.decode(paramNewLink);
}
/**
* Determines if the element language selector is shown dependent on the available Locales.<p>
*
* @return true, if more than one Locale is available, otherwise false
*/
public boolean showElementLanguageSelector() {
List locales = OpenCms.getLocaleManager().getAvailableLocales(getCms(), getParamResource());
if ((locales == null) || (locales.size() < 2)) {
// for less than two available locales, do not create language selector
return false;
}
return true;
}
/**
* @see org.opencms.workplace.tools.CmsToolDialog#useNewStyle()
*/
public boolean useNewStyle() {
return false;
}
/**
* @see org.opencms.workplace.editors.CmsEditor#commitTempFile()
*/
protected void commitTempFile() throws CmsException {
super.commitTempFile();
m_file = getCloneCms().readFile(getParamResource());
m_content = CmsXmlContentFactory.unmarshal(getCloneCms(), m_file);
}
/**
* Initializes the editor content when opening the editor for the first time.<p>
*
* Not necessary for the xmlcontent editor.<p>
*/
protected void initContent() {
// nothing to be done for the xmlcontent editor form
}
/**
* Initializes the element language for the first call of the editor.<p>
*/
protected void initElementLanguage() {
// get the default locale for the resource
List locales = OpenCms.getLocaleManager().getDefaultLocales(getCms(), getParamResource());
Locale locale = (Locale)locales.get(0);
if (m_content != null) {
// to copy anything we need at least one locale
if ((locales.size() > 1) && (m_content.getLocales().size() > 0) && !m_content.hasLocale(locale)) {
// required locale not available, check if an existing default locale should be copied as "template"
try {
// a list of possible default locales has been set as property, try to find a match
m_content.copyLocale(locales, locale);
writeContent();
} catch (CmsException e) {
// no match was found for the required locale
}
}
if (!m_content.hasLocale(locale)) {
// value may have changed because of the copy operation
locale = (Locale)m_content.getLocales().get(0);
}
}
setParamElementlanguage(locale.toString());
}
/**
* @see org.opencms.workplace.CmsWorkplace#initWorkplaceRequestValues(org.opencms.workplace.CmsWorkplaceSettings, javax.servlet.http.HttpServletRequest)
*/
protected void initWorkplaceRequestValues(CmsWorkplaceSettings settings, HttpServletRequest request) {
// fill the parameter values in the get/set methods
fillParamValues(request);
// set the dialog type
setParamDialogtype(EDITOR_TYPE);
if (getParamNewLink() != null) {
setParamAction(EDITOR_ACTION_NEW);
} else {
// initialize a content object from the temporary file
if ((getParamTempfile() != null) && !"null".equals(getParamTempfile())) {
try {
m_file = getCms().readFile(this.getParamTempfile(), CmsResourceFilter.ALL);
m_content = CmsXmlContentFactory.unmarshal(getCloneCms(), m_file);
} catch (CmsException e) {
// error during initialization, show error page
try {
showErrorPage(this, e);
} catch (JspException exc) {
// should usually never happen
if (LOG.isInfoEnabled()) {
LOG.info(exc);
}
}
}
}
}
// set the action for the JSP switch
if (EDITOR_SAVE.equals(getParamAction())) {
setAction(ACTION_SAVE);
} else if (EDITOR_SAVEEXIT.equals(getParamAction())) {
setAction(ACTION_SAVEEXIT);
} else if (EDITOR_EXIT.equals(getParamAction())) {
setAction(ACTION_EXIT);
} else if (EDITOR_CLOSEBROWSER.equals(getParamAction())) {
// closed browser window accidentally, unlock resource and delete temporary file
actionClear(true);
return;
} else if (EDITOR_ACTION_CHECK.equals(getParamAction())) {
setAction(ACTION_CHECK);
} else if (EDITOR_SAVEACTION.equals(getParamAction())) {
setAction(ACTION_SAVEACTION);
try {
actionDirectEdit();
} catch (Exception e) {
// should usually never happen
if (LOG.isInfoEnabled()) {
LOG.info(e.getLocalizedMessage(), e);
}
}
setAction(ACTION_EXIT);
} else if (EDITOR_COPYLOCALE.equals(getParamAction())) {
setAction(ACTION_COPYLOCALE);
} else if (EDITOR_DELETELOCALE.equals(getParamAction())) {
setAction(ACTION_DELETELOCALE);
} else if (EDITOR_SHOW.equals(getParamAction())) {
setAction(ACTION_SHOW);
} else if (EDITOR_SHOW_ERRORMESSAGE.equals(getParamAction())) {
setAction(ACTION_SHOW_ERRORMESSAGE);
} else if (EDITOR_CHANGE_ELEMENT.equals(getParamAction())) {
setAction(ACTION_SHOW);
actionChangeElementLanguage();
} else if (EDITOR_ACTION_ELEMENT_ADD.equals(getParamAction())) {
setAction(ACTION_ELEMENT_ADD);
try {
actionToggleElement();
} catch (JspException e) {
if (LOG.isErrorEnabled()) {
LOG.error(org.opencms.workplace.Messages.get().getBundle().key(
org.opencms.workplace.Messages.LOG_INCLUDE_ERRORPAGE_FAILED_0));
}
}
if ((getAction() != ACTION_CANCEL) && (getAction() != ACTION_SHOW_ERRORMESSAGE)) {
// no error ocurred, redisplay the input form
setAction(ACTION_SHOW);
}
} else if (EDITOR_ACTION_ELEMENT_REMOVE.equals(getParamAction())) {
setAction(ACTION_ELEMENT_REMOVE);
try {
actionToggleElement();
} catch (JspException e) {
if (LOG.isErrorEnabled()) {
LOG.error(org.opencms.workplace.Messages.get().getBundle().key(
org.opencms.workplace.Messages.LOG_INCLUDE_ERRORPAGE_FAILED_0));
}
}
if ((getAction() != ACTION_CANCEL) && (getAction() != ACTION_SHOW_ERRORMESSAGE)) {
// no error ocurred, redisplay the input form
setAction(ACTION_SHOW);
}
} else if (EDITOR_ACTION_ELEMENT_MOVE_DOWN.equals(getParamAction())) {
setAction(ACTION_ELEMENT_MOVE_DOWN);
try {
actionMoveElement();
} catch (JspException e) {
if (LOG.isErrorEnabled()) {
LOG.error(org.opencms.workplace.Messages.get().getBundle().key(
org.opencms.workplace.Messages.LOG_INCLUDE_ERRORPAGE_FAILED_0));
}
}
if ((getAction() != ACTION_CANCEL) && (getAction() != ACTION_SHOW_ERRORMESSAGE)) {
// no error ocurred, redisplay the input form
setAction(ACTION_SHOW);
}
} else if (EDITOR_ACTION_ELEMENT_MOVE_UP.equals(getParamAction())) {
setAction(ACTION_ELEMENT_MOVE_UP);
try {
actionMoveElement();
} catch (JspException e) {
if (LOG.isErrorEnabled()) {
LOG.error(org.opencms.workplace.Messages.get().getBundle().key(
org.opencms.workplace.Messages.LOG_INCLUDE_ERRORPAGE_FAILED_0));
}
}
if ((getAction() != ACTION_CANCEL) && (getAction() != ACTION_SHOW_ERRORMESSAGE)) {
// no error ocurred, redisplay the input form
setAction(ACTION_SHOW);
}
} else if (EDITOR_ACTION_NEW.equals(getParamAction())) {
setAction(ACTION_NEW);
return;
} else if (EDITOR_PREVIEW.equals(getParamAction())) {
setAction(ACTION_PREVIEW);
} else if (EDITOR_CORRECTIONCONFIRMED.equals(getParamAction())) {
setAction(ACTION_SHOW);
try {
// correct the XML structure before showing the form
correctXmlStructure();
} catch (CmsException e) {
// error during correction
try {
showErrorPage(this, e);
} catch (JspException exc) {
// should usually never happen
if (LOG.isInfoEnabled()) {
LOG.info(exc);
}
}
}
} else {
// initial call of editor
setAction(ACTION_DEFAULT);
try {
// lock resource if autolock is enabled in configuration
if (Boolean.valueOf(getParamDirectedit()).booleanValue()) {
// set a temporary lock in direct edit mode
checkLock(getParamResource(), CmsLockType.TEMPORARY);
} else {
// set common lock
checkLock(getParamResource());
}
// create the temporary file
setParamTempfile(createTempFile());
// initialize a content object from the created temporary file
m_file = getCms().readFile(this.getParamTempfile(), CmsResourceFilter.ALL);
m_content = CmsXmlContentFactory.unmarshal(getCloneCms(), m_file);
// check the XML content against the given XSD
try {
m_content.validateXmlStructure(new CmsXmlEntityResolver(getCms()));
} catch (CmsXmlException eXml) {
// validation failed, check the settings for handling the correction
if (OpenCms.getWorkplaceManager().isXmlContentAutoCorrect()) {
// correct the XML structure automatically according to the XSD
correctXmlStructure();
} else {
// show correction confirmation dialog
setAction(ACTION_CONFIRMCORRECTION);
}
}
} catch (CmsException e) {
// error during initialization
try {
showErrorPage(this, e);
} catch (JspException exc) {
// should usually never happen
if (LOG.isInfoEnabled()) {
LOG.info(exc);
}
}
}
// set the initial element language if not given in request parameters
if (getParamElementlanguage() == null) {
initElementLanguage();
}
}
}
/**
* Returns the html for the element operation buttons add, move, remove.<p>
*
* @param elementName name of the element
* @param index the index of the element
* @param addElement if true, the button to add an element is shown
* @param removeElement if true, the button to remove an element is shown
* @return the html for the element operation buttons
*/
private String buildElementButtons(String elementName, int index, boolean addElement, boolean removeElement) {
StringBuffer jsCall = new StringBuffer(512);
// indicates if at least one button is active
boolean buttonPresent = false;
jsCall.append("showElementButtons('");
jsCall.append(elementName);
jsCall.append("', ");
jsCall.append(index);
jsCall.append(", ");
// build the remove element button if required
if (removeElement) {
jsCall.append(Boolean.TRUE);
buttonPresent = true;
} else {
jsCall.append(Boolean.FALSE);
}
jsCall.append(", ");
// build the move down button (move down in API is move up for content editor)
if (index > 0) {
// build active move down button
jsCall.append(Boolean.TRUE);
buttonPresent = true;
} else {
jsCall.append(Boolean.FALSE);
}
jsCall.append(", ");
// build the move up button (move up in API is move down for content editor)
int indexCount = m_content.getIndexCount(elementName, getElementLocale());
if (index < (indexCount - 1)) {
// build active move up button
jsCall.append(Boolean.TRUE);
buttonPresent = true;
} else {
jsCall.append(Boolean.FALSE);
}
jsCall.append(", ");
// build the add element button if required
if (addElement) {
jsCall.append(Boolean.TRUE);
buttonPresent = true;
} else {
jsCall.append(Boolean.FALSE);
}
jsCall.append(");");
String result;
if (buttonPresent) {
// at least one button active, create mouseover button
String btIcon = "xmledit.png";
String btAction = jsCall.toString();
// determine icon to use and if a direct click action is possible
if (addElement && removeElement) {
btIcon = "xmledit_del_add.png";
} else if (addElement) {
btIcon = "xmledit_add.png";
// create button action to add element on button click
StringBuffer action = new StringBuffer(128);
action.append("addElement('");
action.append(elementName);
action.append("', ");
action.append(index);
action.append(");");
btAction = action.toString();
} else if (removeElement) {
btIcon = "xmledit_del.png";
// create button action to remove element on button click
StringBuffer action = new StringBuffer(128);
action.append("removeElement('");
action.append(elementName);
action.append("', ");
action.append(index);
action.append(");");
btAction = action.toString();
}
StringBuffer href = new StringBuffer(512);
href.append("javascript:");
href.append(btAction);
href.append("\" onmouseover=\"");
href.append(jsCall);
href.append("checkElementButtons(true);\" onmouseout=\"checkElementButtons(false);\" id=\"btimg.");
href.append(elementName).append(".").append(index);
result = button(href.toString(), null, btIcon, Messages.GUI_EDITOR_XMLCONTENT_ELEMENT_BUTTONS_0, 0);
} else {
// no active button, create a spacer
result = buttonBarSpacer(1);
}
return result;
}
/**
* Corrects the XML structure of the edited content according to the XSD.<p>
*
* @throws CmsException if the correction fails
*/
private void correctXmlStructure() throws CmsException {
m_content.setAutoCorrectionEnabled(true);
m_content.correctXmlStructure(getCms());
// write the corrected temporary file
writeContent();
}
/**
* Returns a list of value names containing the complete xpath of each value as String.<p>
*
* @param contentDefinition the content definition to use
* @param pathPrefix the xpath prefix
* @param locale the locale to use
* @return list of value names (containing the xpath of each value)
*/
private List getSimpleValueNames(CmsXmlContentDefinition contentDefinition, String pathPrefix, Locale locale) {
List valueNames = new ArrayList();
Iterator i = contentDefinition.getTypeSequence().iterator();
while (i.hasNext()) {
I_CmsXmlSchemaType type = (I_CmsXmlSchemaType)i.next();
String name = pathPrefix + type.getName();
// get the element sequence of the type
CmsXmlContentValueSequence elementSequence = m_content.getValueSequence(name, locale);
int elementCount = elementSequence.getElementCount();
// loop through elements
for (int j = 0; j < elementCount; j++) {
I_CmsXmlContentValue value = elementSequence.getValue(j);
StringBuffer xPath = new StringBuffer(pathPrefix.length() + 16);
xPath.append(pathPrefix);
xPath.append(CmsXmlUtils.createXpathElement(type.getName(), value.getIndex() + 1));
if (!type.isSimpleType()) {
// recurse into nested type sequence
CmsXmlNestedContentDefinition nestedSchema = (CmsXmlNestedContentDefinition)type;
xPath.append("/");
valueNames.addAll(getSimpleValueNames(
nestedSchema.getNestedContentDefinition(),
xPath.toString(),
locale));
} else {
// this is a simple type, get widget to display
valueNames.add(xPath.toString());
}
}
}
return valueNames;
}
/**
* Returns the error handler for error handling of the edited xml content.<p>
*
* @return the error handler
*/
private CmsXmlContentErrorHandler getValidationHandler() {
if (m_validationHandler == null) {
// errors were not yet checked, do this now and store result in member
m_validationHandler = m_content.validate(getCms());
}
return m_validationHandler;
}
private CmsXmlContentTab m_currentTab;
private List m_errorTabs;
private List m_warningTabs;
/**
* Generates the HTML form for the XML content editor.<p>
*
* This is a recursive method because nested schemas are possible,
* do not call this method directly.<p>
*
* @param contentDefinition the content definition to start with
* @param pathPrefix for nested xml content
* @param showHelpBubble if the code for a help bubble should be generated
*
* @return the HTML that generates the form for the XML editor
*/
private StringBuffer getXmlEditorForm(
CmsXmlContentDefinition contentDefinition,
String pathPrefix,
boolean showHelpBubble,
boolean superTabOpened) {
StringBuffer result = new StringBuffer(1024);
// only show errors if editor is not opened initially
boolean showErrors = (getAction() != ACTION_NEW) && (getAction() != ACTION_DEFAULT);
try {
// check if we are in a nested content definition
boolean nested = CmsStringUtil.isNotEmpty(pathPrefix);
boolean useTabs = false;
boolean tabOpened = false;
int currentTabIndex = 0;
boolean collapseLabel = false;
boolean firstElement = true;
// show error header once if there were validation errors
if (!nested && showErrors && (getValidationHandler().hasErrors())) {
result.append("<div class=\"ui-widget\">");
result.append("<div class=\"ui-state-error ui-corner-all\" style=\"padding: 0pt 0.7em;\"><div style=\"padding: 3px 0;\">");
result.append("<span class=\"ui-icon ui-icon-alert\" style=\"float: left; margin-right: 0.3em;\"></span>");
boolean differentLocaleErrors = false;
if ((getValidationHandler().getErrors(getElementLocale()) == null)
|| (getValidationHandler().getErrors().size() > getValidationHandler().getErrors(getElementLocale()).size())) {
differentLocaleErrors = true;
result.append("<span id=\"xmlerrordialogbutton\" class=\"ui-icon ui-icon-newwin\" style=\"float: left; margin-right: 0.3em;\"></span>");
}
result.append(key(Messages.ERR_EDITOR_XMLCONTENT_VALIDATION_ERROR_TITLE_0));
result.append("</div>");
// show errors in different locales
if (differentLocaleErrors) {
result.append("<div id=\"xmlerrordialog\" style=\"display: none;\">");
// iterate through all found errors
Map locErrors = getValidationHandler().getErrors();
Iterator locErrorsIter = locErrors.entrySet().iterator();
while (locErrorsIter.hasNext()) {
Map.Entry locEntry = (Map.Entry)locErrorsIter.next();
Locale locale = (Locale)locEntry.getKey();
// skip errors in the actual locale
if (getElementLocale().equals(locale)) {
continue;
}
result.append("<div style=\"padding: 3px;\"><strong>");
result.append(key(
Messages.ERR_EDITOR_XMLCONTENT_VALIDATION_ERROR_LANG_1,
new Object[] {locale.getLanguage()}));
result.append("</strong></div>\n");
result.append("<ul>");
// iterate through the found errors in a different locale
Map elErrors = (Map)locEntry.getValue();
Iterator elErrorsIter = elErrors.entrySet().iterator();
while (elErrorsIter.hasNext()) {
Map.Entry elEntry = (Map.Entry)elErrorsIter.next();
String nodeName = (String)elEntry.getKey();
String errorMsg = (String)elEntry.getValue();
// output the error message
result.append("<li>");
result.append(nodeName);
result.append(": ");
result.append(errorMsg);
result.append("</li>\n");
}
result.append("</ul>");
}
result.append("</div>\n");
result.append("<script type=\"text/javascript\">\n");
result.append("$(\"#xmlerrordialog\").dialog({\n");
result.append("\tautoOpen: true,\n");
result.append("\tbgiframe: true,\n");
result.append("\twidth: 500,\n");
result.append("\tposition: 'center',\n");
result.append("\tdialogClass: 'ui-state-error',\n");
result.append("\ttitle: '").append(key(Messages.ERR_EDITOR_XMLCONTENT_VALIDATION_ERROR_TITLE_0)).append(
"',\n");
result.append("\tmaxHeight: 600\n");
result.append("});\n");
result.append("$(\"#xmlerrordialogbutton\").bind(\"click\", function(e) {$(\"#xmlerrordialog\").dialog(\"open\");});\n");
result.append("</script>");
}
result.append("</div></div>");
}
if (!nested) {
// check if tabs should be shown
useTabs = contentDefinition.getContentHandler().getTabs().size() > 0;
if (useTabs) {
// we have some tabs available, generate them on first level
result.append("<div id=\"xmltabs\" class=\"ui-tabs\">\n<ul>\n");
for (Iterator i = contentDefinition.getContentHandler().getTabs().iterator(); i.hasNext();) {
CmsXmlContentTab tab = (CmsXmlContentTab)i.next();
result.append("\t<li id=\"OcmsTabTab").append(tab.getStartName()).append(
"\"><a href=\"#OcmsTab");
result.append(tab.getStartName());
result.append("\"><span>");
result.append(keyDefault(A_CmsWidget.LABEL_PREFIX
+ contentDefinition.getInnerName()
+ "."
+ tab.getTabName(), tab.getTabName()));
result.append("</span></a></li>\n");
}
result.append("</ul>\n");
}
}
// iterate the type sequence
for (Iterator i = contentDefinition.getTypeSequence().iterator(); i.hasNext();) {
// get the type
I_CmsXmlSchemaType type = (I_CmsXmlSchemaType)i.next();
boolean tabCurrentlyOpened = false;
if (useTabs) {
// check if a tab is starting with this element
for (int tabIndex = 0; tabIndex < contentDefinition.getContentHandler().getTabs().size(); tabIndex++) {
CmsXmlContentTab checkTab = (CmsXmlContentTab)contentDefinition.getContentHandler().getTabs().get(
tabIndex);
if (checkTab.getStartName().equals(type.getName())) {
// a tab is starting, add block element
if (tabOpened) {
// close a previously opened tab
result.append("</table>\n</div>\n");
}
result.append("<div id=\"OcmsTab");
result.append(checkTab.getStartName());
result.append("\" class=\"ui-tabs-hide\">\n");
// set necessary values
tabOpened = true;
tabCurrentlyOpened = true;
collapseLabel = checkTab.isCollapsed();
m_currentTab = checkTab;
currentTabIndex = tabIndex;
// leave loop
break;
}
}
// check if a tab should be preselected for an added, removed or moved up/down element
if (CmsStringUtil.isNotEmpty(getParamElementName())) {
// an element was modified, add JS to preselect tab
String elemName = CmsXmlUtils.getFirstXpathElement(getParamElementName());
if ((m_currentTab != null)
&& (elemName.equals(m_currentTab.getStartName()) || (!CmsXmlUtils.isDeepXpath(getParamElementName()) && type.getName().equals(
elemName)))) {
result.append("<script type=\"text/javascript\">\n\txmlSelectedTab = ").append(
currentTabIndex).append(";\n</script>\n");
}
}
}
if (firstElement || tabCurrentlyOpened) {
// create table before first element or if a tab has been opened before
result.append("<table class=\"xmlTable");
if (nested && !superTabOpened) {
// use other style for nested content definition table if tab was not opened on upper level
result.append("Nested");
}
result.append("\">\n");
firstElement = false;
}
CmsXmlContentDefinition nestedContentDefinition = contentDefinition;
if (!type.isSimpleType()) {
// get nested content definition for nested types
CmsXmlNestedContentDefinition nestedSchema = (CmsXmlNestedContentDefinition)type;
nestedContentDefinition = nestedSchema.getNestedContentDefinition();
}
// create xpath to the current element
String name = pathPrefix + type.getName();
// get the element sequence of the current type
CmsXmlContentValueSequence elementSequence = m_content.getValueSequence(name, getElementLocale());
int elementCount = elementSequence.getElementCount();
// check if value is optional or multiple
boolean addValue = false;
if (elementCount < type.getMaxOccurs()) {
addValue = true;
}
boolean removeValue = false;
if (elementCount > type.getMinOccurs()) {
removeValue = true;
}
// assure that at least one element is present in sequence
boolean disabledElement = false;
if (elementCount < 1) {
// current element is disabled, create dummy element
elementCount = 1;
elementSequence.addValue(getCms(), 0);
disabledElement = true;
m_optionalElementPresent = true;
}
// loop through multiple elements
for (int j = 0; j < elementCount; j++) {
// get value and corresponding widget
I_CmsXmlContentValue value = elementSequence.getValue(j);
I_CmsWidget widget = null;
if (type.isSimpleType()) {
widget = contentDefinition.getContentHandler().getWidget(value);
}
// show errors and/or warnings
String key = value.getPath();
if (showErrors
&& getValidationHandler().hasErrors(getElementLocale())
&& getValidationHandler().getErrors(getElementLocale()).containsKey(key)) {
// show error message
if (collapseLabel) {
result.append("<tr><td class=\"xmlTdError\"><img src=\"");
result.append(getEditorResourceUri());
result.append("error.png\" border=\"0\" alt=\"\" align=\"left\" hspace=\"5\">");
result.append(resolveMacros((String)getValidationHandler().getErrors(getElementLocale()).get(
key)));
result.append("</td><td></td></tr>\n");
} else {
result.append("<tr><td></td><td><img src=\"");
result.append(getEditorResourceUri());
result.append("error.png");
result.append("\" border=\"0\" alt=\"\"></td><td class=\"xmlTdError\">");
result.append(resolveMacros((String)getValidationHandler().getErrors(getElementLocale()).get(
key)));
result.append("</td><td></td></tr>\n");
}
// mark tab as error tab if tab is present
String elemName = CmsXmlUtils.getFirstXpathElement(value.getPath());
if (((m_currentTab != null) && !m_errorTabs.contains(m_currentTab))
&& (elemName.equals(m_currentTab.getStartName()) || (!CmsXmlUtils.isDeepXpath(value.getPath()) && type.getName().equals(
elemName)))) {
m_errorTabs.add(m_currentTab);
}
}
// warnings can be additional to errors
if (showErrors
&& getValidationHandler().hasWarnings(getElementLocale())
&& getValidationHandler().getWarnings(getElementLocale()).containsKey(key)) {
// show warning message
if (collapseLabel) {
result.append("<tr><td class=\"xmlTdError\"><img src=\"");
result.append(getEditorResourceUri());
result.append("warning.png\" border=\"0\" alt=\"\" align=\"left\" hspace=\"5\">");
result.append(resolveMacros((String)getValidationHandler().getWarnings(getElementLocale()).get(
key)));
result.append("</td><td></td></tr>\n");
} else {
result.append("<tr><td></td><td><img src=\"");
result.append(getEditorResourceUri());
result.append("warning.png");
result.append("\" border=\"0\" alt=\"\"></td><td class=\"xmlTdWarning\">");
result.append(resolveMacros((String)getValidationHandler().getWarnings(getElementLocale()).get(
key)));
result.append("</td><td></td></tr>\n");
}
// mark tab as warning tab if tab is present
String elemName = CmsXmlUtils.getFirstXpathElement(value.getPath());
if (((m_currentTab != null) && !m_warningTabs.contains(m_currentTab))
&& (elemName.equals(m_currentTab.getStartName()) || (!CmsXmlUtils.isDeepXpath(value.getPath()) && type.getName().equals(
elemName)))) {
m_warningTabs.add(m_currentTab);
}
}
// create label and help bubble cells
result.append("<tr>");
if (!collapseLabel) {
result.append("<td class=\"xmlLabel");
if (disabledElement) {
// element is disabled, mark it with css
result.append("Disabled");
}
result.append("\">");
result.append(keyDefault(A_CmsWidget.getLabelKey((I_CmsWidgetParameter)value), value.getName()));
if (elementCount > 1) {
result.append(" [").append(value.getIndex() + 1).append("]");
}
result.append(": </td>");
if (showHelpBubble && (widget != null) && (value.getIndex() == 0)) {
// show help bubble only on first element of each content definition
result.append(widget.getHelpBubble(getCms(), this, (I_CmsWidgetParameter)value));
} else {
// create empty cell for all following elements
result.append(buttonBarSpacer(16));
}
}
// append individual widget html cell if element is enabled
if (!disabledElement) {
if (widget == null) {
// recurse into nested type sequence
String newPath = CmsXmlUtils.createXpathElement(value.getName(), value.getIndex() + 1);
result.append("<td class=\"maxwidth\">");
boolean showHelp = (j == 0);
superTabOpened = !nested && tabOpened && collapseLabel;
result.append(getXmlEditorForm(
nestedContentDefinition,
pathPrefix + newPath + "/",
showHelp,
superTabOpened));
result.append("</td>");
} else {
// this is a simple type, display widget
result.append(widget.getDialogWidget(getCms(), this, (I_CmsWidgetParameter)value));
}
} else {
// disabled element, show message for optional element
result.append("<td class=\"xmlTdDisabled maxwidth\">");
result.append(key(Messages.GUI_EDITOR_XMLCONTENT_OPTIONALELEMENT_0));
result.append("</td>");
}
// append element operation (add, remove, move) buttons if required
result.append(buildElementButtons(name, value.getIndex(), addValue, removeValue));
// close row
result.append("</tr>\n");
}
}
// close table
result.append("</table>\n");
if (tabOpened) {
// close last open tab
result.append("</div>\n");
}
if (!nested && useTabs) {
// close block element around tabs
result.append("</div>\n");
// mark eventual warning and error tabs
result.append("<script type=\"text/javascript\">\n");
for (Iterator i = m_warningTabs.iterator(); i.hasNext();) {
CmsXmlContentTab checkTab = (CmsXmlContentTab)i.next();
if (!m_errorTabs.contains(checkTab)) {
result.append("\txmlWarningTabs[xmlWarningTabs.length] = \"").append(checkTab.getStartName()).append(
"\";\n");
}
}
for (Iterator i = m_errorTabs.iterator(); i.hasNext();) {
CmsXmlContentTab checkTab = (CmsXmlContentTab)i.next();
result.append("\txmlErrorTabs[xmlErrorTabs.length] = \"").append(checkTab.getStartName()).append(
"\";\n");
}
result.append("</script>\n");
}
} catch (Throwable t) {
LOG.error(Messages.get().getBundle().key(Messages.ERR_XML_EDITOR_0), t);
}
return result;
}
/**
* Resets the error handler member variable to reinitialize the error messages.<p>
*/
private void resetErrorHandler() {
m_validationHandler = null;
}
/**
* Resets the widget collector member variable to reinitialize the widgets.<p>
*
* This is needed to display the help messages of optional elements before building the html end of the form.<p>
*/
private void resetWidgetCollector() {
m_widgetCollector = null;
}
/**
* Writes the xml content to the vfs and re-initializes the member variables.<p>
*
* @throws CmsException if writing the file fails
*/
private void writeContent() throws CmsException {
String decodedContent = m_content.toString();
try {
m_file.setContents(decodedContent.getBytes(getFileEncoding()));
} catch (UnsupportedEncodingException e) {
throw new CmsException(Messages.get().container(Messages.ERR_INVALID_CONTENT_ENC_1, getParamResource()), e);
}
// the file content might have been modified during the write operation
m_file = getCloneCms().writeFile(m_file);
m_content = CmsXmlContentFactory.unmarshal(getCloneCms(), m_file);
}
}
|
package water.persist;
import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.Protocol;
import com.amazonaws.auth.*;
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
import com.amazonaws.regions.RegionUtils;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.S3ClientOptions;
import com.amazonaws.services.s3.model.*;
import org.apache.log4j.Logger;
import water.*;
import water.fvec.FileVec;
import water.fvec.S3FileVec;
import water.fvec.Vec;
import water.util.ByteStreams;
import water.util.RIStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.*;
import static water.H2O.OptArgs.SYSTEM_PROP_PREFIX;
/** Persistence backend for S3 */
public final class PersistS3 extends Persist {
private static final Logger LOG = Logger.getLogger(PersistS3.class);
private static final String KEY_PREFIX = "s3:
private static final int KEY_PREFIX_LEN = KEY_PREFIX.length();
private static final Object _lock = new Object();
private static volatile AmazonS3 _s3;
public static AmazonS3 getClient() {
if (_s3 == null) {
synchronized (_lock) {
if( _s3 == null ) {
try {
H2OAWSCredentialsProviderChain c = new H2OAWSCredentialsProviderChain();
c.setReuseLastProvider(false);
ClientConfiguration cc = s3ClientCfg();
_s3 = configureClient(new AmazonS3Client(c, cc));
} catch( Throwable e ) {
e.printStackTrace();
StringBuilder msg = new StringBuilder();
msg.append(e.getMessage() + "\n");
msg.append("Unable to load S3 credentials.");
throw new RuntimeException(msg.toString(), e);
}
}
}
}
return _s3;
}
/** Modified version of default credentials provider which includes H2O-specific
* credentials provider.
*/
public static class H2OAWSCredentialsProviderChain extends AWSCredentialsProviderChain {
public H2OAWSCredentialsProviderChain() {
super(constructProviderChain());
}
private static List<AWSCredentialsProvider> constructProviderChain() {
final List<AWSCredentialsProvider> providers = new ArrayList<>();
providers.add(new H2ODynamicCredentialsProvider());
providers.add(new H2OArgCredentialsProvider());
providers.add(new InstanceProfileCredentialsProvider());
providers.add(new EnvironmentVariableCredentialsProvider());
providers.add(new SystemPropertiesCredentialsProvider());
providers.add(new ProfileCredentialsProvider());
return providers;
}
}
/**
* Holds basic credentials (Secret key ID + Secret access key) pair.
*/
private static final class H2ODynamicCredentialsProvider implements AWSCredentialsProvider {
@Override
public AWSCredentials getCredentials() {
final IcedS3Credentials s3Credentials = DKV.getGet(IcedS3Credentials.S3_CREDENTIALS_DKV_KEY);
if (s3Credentials != null && s3Credentials.isAWSCredentialsAuth()) {
return new BasicAWSCredentials(s3Credentials._secretKeyId, s3Credentials._secretAccessKey);
} else if (s3Credentials != null && s3Credentials.isAWSSessionTokenAuth()) {
return new BasicSessionCredentials(s3Credentials._secretKeyId, s3Credentials._secretAccessKey,
s3Credentials._sessionToken);
} else {
throw new AmazonClientException("No Amazon S3 credentials set directly.");
}
}
@Override
public void refresh() {
// No actions taken on refresh
}
}
/** A simple credentials provider reading file-based credentials from given
* command argument <code>--aws_credentials</code>.
*/
static class H2OArgCredentialsProvider implements AWSCredentialsProvider {
// Default location of the AWS credentials file
public static final String DEFAULT_CREDENTIALS_LOCATION = "AwsCredentials.properties";
@Override public AWSCredentials getCredentials() {
File credentials = new File(H2O.ARGS.aws_credentials != null ? H2O.ARGS.aws_credentials : DEFAULT_CREDENTIALS_LOCATION);
try {
return new PropertiesCredentials(credentials);
} catch (IOException e) {
LOG.debug(
"Unable to load AWS credentials from file " + credentials +
"; exists? " + credentials.exists() + ", canRead? " + credentials.canRead() +
", size=" + credentials.length() + "; problem: " + e.getMessage());
throw new AmazonClientException(
"PersistS3. Unable to load AWS credentials from file " + credentials + ": " + e.getMessage());
}
}
@Override public void refresh() {}
@Override
public String toString() {
return getClass().getSimpleName();
}
}
public static final class H2SO3InputStream extends RIStream {
Key _k;
long _to;
String[] _bk;
@Override protected InputStream open(long offset) {
return getClient().getObject(new GetObjectRequest(_bk[0], _bk[1]).withRange(offset, _to)).getObjectContent();
}
public H2SO3InputStream(Key k, ProgressMonitor pmon) {
this(k, pmon, 0, Long.MAX_VALUE);
}
public H2SO3InputStream(Key k, ProgressMonitor pmon, long from, long to) {
super(from, pmon);
_k = k;
_to = Math.min(DKV.get(k)._max - 1, to);
_bk = decodeKey(k);
open();
}
}
@Override
public InputStream open(String path) {
String[] bk = decodePath(path);
GetObjectRequest r = new GetObjectRequest(bk[0], bk[1]);
S3Object s3obj = getClient().getObject(r);
return s3obj.getObjectContent();
}
public static InputStream openStream(Key k, RIStream.ProgressMonitor pmon) throws IOException {
return new H2SO3InputStream(k, pmon);
}
public static Key loadKey(ObjectListing listing, S3ObjectSummary obj) throws IOException {
// Note: Some of S3 implementations does not fill bucketName of returned object (for example, Minio).
// So guess it based on returned ObjectListing
String bucketName = obj.getBucketName() == null ? listing.getBucketName() : obj.getBucketName();
return S3FileVec.make(encodePath(bucketName, obj.getKey()),obj.getSize());
}
private static void processListing(ObjectListing listing, String pattern, ArrayList<String> succ, ArrayList<String> fail, boolean doImport) {
if( pattern != null && pattern.isEmpty()) pattern = null;
for( S3ObjectSummary obj : listing.getObjectSummaries() ) {
if (obj.getKey().endsWith("/")) continue;
if (pattern != null && !obj.getKey().matches(pattern)) continue;
try {
if (doImport) {
Key k = loadKey(listing, obj);
succ.add(k.toString());
} else {
succ.add(obj.getKey());
}
} catch( IOException e ) {
fail.add(obj.getKey());
}
}
}
public void importFiles(String path, String pattern, ArrayList<String> files, ArrayList<String> keys, ArrayList<String> fails, ArrayList<String> dels) {
LOG.info("ImportS3 processing (" + path + ")");
// List of processed files
AmazonS3 s3 = getClient();
String [] parts = decodePath(path);
ObjectListing currentList = s3.listObjects(parts[0], parts[1]);
processListing(currentList, pattern, files, fails, true);
while(currentList.isTruncated()){
currentList = s3.listNextBatchOfObjects(currentList);
processListing(currentList, pattern, files, fails, true);
}
keys.addAll(files);
// write barrier was here : DKV.write_barrier();
}
// Read up to 'len' bytes of Value. Value should already be persisted to
// disk. A racing delete can trigger a failure where we get a null return,
// but no crash (although one could argue that a racing load&delete is a bug
// no matter what).
@Override public byte[] load(Value v) {
long start_io_ms = System.currentTimeMillis();
byte[] b = MemoryManager.malloc1(v._max);
Key k = v._key;
long skip = 0;
// Skip offset based on chunk number
if(k._kb[0] == Key.CHK)
skip = FileVec.chunkOffset(k); // The offset
// Too complicate matters, S3 likes to reset connections when H2O hits it
// too hard. We "fix" this by just trying again, assuming we're getting
// hit with a bogus resource limit (H2O doing a parse looks like a DDOS to
// Amazon S3).
S3ObjectInputStream s = null;
while( true ) { // Loop, in case we get premature EOF's
try {
long start_ns = System.nanoTime(); // Blocking i/o call timing - without counting repeats
s = getObjectForKey(k, skip, v._max).getObjectContent();
ByteStreams.readFully(s, b); // delegate work to Google (it reads the byte buffer in a cycle as we did)
assert v.isPersisted();
// TimeLine.record_IOclose(start_ns, start_io_ms, 1/* read */, v._max, Value.S3);
return b;
// Explicitly ignore the following exceptions but
// fail on the rest IOExceptions
} catch( IOException e ) {
ignoreAndWait(e);
} finally {
try {
if( s != null ) s.close();
} catch( IOException e ) {}
}
}
}
private static void ignoreAndWait(final Exception e) {
LOG.debug("Hit the S3 reset problem, waiting and retrying...", e);
try {
Thread.sleep(500);
} catch( InterruptedException ie ) {}
}
// Store Value v to disk.
@Override public void store(Value v) {
if( !v._key.home() ) return;
throw H2O.unimpl(); // VA only
}
/**
* Creates the key for given S3 bucket and key. Returns the H2O key, or null if the key cannot be
* created.
*
* @param bucket
* Bucket name
* @param key
* Key name (S3)
* @return H2O key pointing to the given bucket and key.
*/
public static Key encodeKey(String bucket, String key) {
Key res = encodeKeyImpl(bucket, key);
// assert checkBijection(res, bucket, key);
return res;
}
/**
* Decodes the given H2O key to the S3 bucket and key name. Returns the array of two strings,
* first one is the bucket name and second one is the key name.
*
* @param k
* Key to be decoded.
* @return Pair (array) of bucket name and key name.
*/
public static String[] decodeKey(Key k) {
return decodeKeyImpl(k);
// assert checkBijection(k, res[0], res[1]);
// return res;
}
// private static boolean checkBijection(Key k, String bucket, String key) {
// Key en = encodeKeyImpl(bucket, key);
// String[] de = decodeKeyImpl(k);
// boolean res = Arrays.equals(k._kb, en._kb) && bucket.equals(de[0]) && key.equals(de[1]);
// assert res : "Bijection failure:" + "\n\tKey 1:" + k + "\n\tKey 2:" + en + "\n\tBkt 1:" + bucket + "\n\tBkt 2:"
// + de[0] + "\n\tStr 1:" + key + "\n\tStr 2:" + de[1] + "";
// return res;
private static String encodePath(String bucket, String key){
return KEY_PREFIX + bucket + '/' + key;
}
private static Key encodeKeyImpl(String bucket, String key) {
return Key.make(KEY_PREFIX + bucket + '/' + key);
}
/**
* Decompose S3 name into bucket name and key name
*
* @param s generic s3 path (e.g., "s3://bucketname/my/directory/file.ext")
* @return array of { bucket name, key }
*/
private static String [] decodePath(String s) {
assert s.startsWith(KEY_PREFIX) && s.indexOf('/') >= 0 : "Attempting to decode non s3 key: " + s;
s = s.substring(KEY_PREFIX_LEN);
int dlm = s.indexOf('/');
if(dlm < 0) return new String[]{s,null};
String bucket = s.substring(0, dlm);
String key = s.substring(dlm + 1);
return new String[] { bucket, key };
}
private static String[] decodeKeyImpl(Key k) {
String s = new String((k._kb[0] == Key.CHK)?Arrays.copyOfRange(k._kb, Vec.KEY_PREFIX_LEN, k._kb.length):k._kb);
return decodePath(s);
}
// Gets the S3 object associated with the key that can read length bytes from offset
private static S3Object getObjectForKey(Key k, long offset, long length) throws IOException {
String[] bk = decodeKey(k);
GetObjectRequest r = new GetObjectRequest(bk[0], bk[1]);
r.setRange(offset, offset + length - 1); // Range is *inclusive* according to docs???
return getClient().getObject(r);
}
// Gets the object metadata associated with given key.
private static ObjectMetadata getObjectMetadataForKey(Key k) {
String[] bk = decodeKey(k);
assert (bk.length == 2);
return getClient().getObjectMetadata(bk[0], bk[1]);
}
/** S3 socket timeout property name */
public final static String S3_SOCKET_TIMEOUT_PROP = SYSTEM_PROP_PREFIX + "persist.s3.socketTimeout";
/** S3 connection timeout property name */
public final static String S3_CONNECTION_TIMEOUT_PROP = SYSTEM_PROP_PREFIX + "persist.s3.connectionTimeout";
/** S3 maximal error retry number */
public final static String S3_MAX_ERROR_RETRY_PROP = SYSTEM_PROP_PREFIX + "persist.s3.maxErrorRetry";
/** S3 maximal http connections */
public final static String S3_MAX_HTTP_CONNECTIONS_PROP = SYSTEM_PROP_PREFIX + "persist.s3.maxHttpConnections";
/** S3 force HTTP traffic */
public final static String S3_FORCE_HTTP = SYSTEM_PROP_PREFIX + "persist.s3.force.http";
public final static String S3_END_POINT = SYSTEM_PROP_PREFIX + "persist.s3.endPoint";
/** S3 region, for example "us-east-1",
* see {@link com.amazonaws.regions.Region#getRegion(com.amazonaws.regions.Regions)} for region list */
public final static String S3_REGION = SYSTEM_PROP_PREFIX + "persist.s3.region";
/** Enable S3 path style access via setting the property to true.
* See: {@link com.amazonaws.services.s3.S3ClientOptions#setPathStyleAccess(boolean)} */
public final static String S3_ENABLE_PATH_STYLE = SYSTEM_PROP_PREFIX + "persist.s3.enable.path.style";
static ClientConfiguration s3ClientCfg() {
ClientConfiguration cfg = new ClientConfiguration();
Properties prop = System.getProperties();
if (prop.containsKey(S3_SOCKET_TIMEOUT_PROP)) cfg.setSocketTimeout(Integer.getInteger(S3_SOCKET_TIMEOUT_PROP));
if (prop.containsKey(S3_CONNECTION_TIMEOUT_PROP)) cfg.setConnectionTimeout(Integer.getInteger(S3_CONNECTION_TIMEOUT_PROP));
if (prop.containsKey(S3_MAX_ERROR_RETRY_PROP)) cfg.setMaxErrorRetry(Integer.getInteger(S3_MAX_ERROR_RETRY_PROP));
if (prop.containsKey(S3_MAX_HTTP_CONNECTIONS_PROP)) cfg.setMaxConnections(Integer.getInteger(S3_MAX_HTTP_CONNECTIONS_PROP));
if (prop.containsKey(S3_FORCE_HTTP)) cfg.setProtocol(Protocol.HTTP);
return cfg;
}
static AmazonS3Client configureClient(AmazonS3Client s3Client) {
if (System.getProperty(S3_REGION) != null) {
String region = System.getProperty(S3_REGION);
LOG.debug(String.format("S3 region specified: %s", region) );
s3Client.setRegion(RegionUtils.getRegion(region));
}
// Region overrides end-point settings
if (System.getProperty(S3_END_POINT) != null) {
String endPoint = System.getProperty(S3_END_POINT);
LOG.debug(String.format("S3 endpoint specified: %s", endPoint));
s3Client.setEndpoint(endPoint);
}
if (System.getProperty(S3_ENABLE_PATH_STYLE) != null && Boolean.valueOf(System.getProperty(S3_ENABLE_PATH_STYLE))) {
LOG.debug("S3 path style access enabled");
S3ClientOptions sco = new S3ClientOptions();
sco.setPathStyleAccess(true);
s3Client.setS3ClientOptions(sco);
}
return s3Client;
}
@Override public void delete(Value v) {
throw new UnsupportedOperationException();
}
@Override
public Key uriToKey(URI uri) throws IOException {
AmazonS3 s3 = getClient();
// Decompose URI into bucket, key
String [] parts = decodePath(uri.toString());
try {
ObjectMetadata om = s3.getObjectMetadata(parts[0], parts[1]);
// Voila: create S3 specific key pointing to the file
return S3FileVec.make(encodePath(parts[0], parts[1]), om.getContentLength());
} catch (AmazonServiceException e) {
if (e.getErrorCode().contains("404")) {
throw new IOException(e);
} else {
LOG.error("AWS failed for " + Arrays.toString(parts) + ": " + e.getMessage());
throw e;
}
}
}
@Override
public void cleanUp() { throw H2O.unimpl(); /** user-mode swapping not implemented */}
static class Cache {
long _lastUpdated = 0;
long _timeoutMillis = 5*60*1000;
String [] _cache = new String[0];
public boolean containsKey(String k) { return Arrays.binarySearch(_cache,k) >= 0;}
protected String [] update(){
LOG.debug("Renewing S3 bucket cache.");
List<Bucket> l = getClient().listBuckets();
String [] cache = new String[l.size()];
int i = 0;
for (Bucket b : l) cache[i++] = b.getName();
Arrays.sort(cache);
return _cache = cache;
}
protected String wrapKey(String s) {return "s3:
public ArrayList<String> fetch(String filter, int limit) {
String [] cache = _cache;
if(System.currentTimeMillis() > _lastUpdated + _timeoutMillis) {
cache = update();
_lastUpdated = System.currentTimeMillis();
}
ArrayList<String> res = new ArrayList<>();
int i = Arrays.binarySearch(cache, filter);
if (i < 0) i = -i - 1;
while (i < cache.length && cache[i].startsWith(filter) && (limit < 0 || res.size() < limit))
res.add(wrapKey(cache[i++]));
return res;
}
}
private static class KeyCache extends Cache {
private final String _keyPrefix;
private final String _bucket;
public KeyCache(String bucket){
_bucket = bucket;
_keyPrefix = super.wrapKey(bucket) + "/";
}
@Override
protected String [] update(){
LOG.debug("Renewing S3 cache.");
AmazonS3 s3 = getClient();
ObjectListing currentList = s3.listObjects(_bucket,"");
ArrayList<String> res = new ArrayList<>();
processListing(currentList, null, res, null, false);
while(currentList.isTruncated()){
currentList = s3.listNextBatchOfObjects(currentList);
processListing(currentList, null, res, null, false);
}
Collections.sort(res);
return _cache = res.toArray(new String[res.size()]);
}
@Override
protected String wrapKey(String s) {
return _keyPrefix + s;
}
}
static volatile Cache _bucketCache = new Cache();
static volatile HashMap<String, KeyCache> _keyCaches = new HashMap<>();
@Override
public List<String> calcTypeaheadMatches(String filter, int limit) {
String [] parts = decodePath(filter);
if(parts[1] != null) { // bucket and key prefix
if(_keyCaches.get(parts[0]) == null) {
if(!getClient().doesBucketExist(parts[0]))
return new ArrayList<>();
_keyCaches.put(parts[0], new KeyCache(parts[0]));
}
return _keyCaches.get(parts[0]).fetch(parts[1],limit);
} else { // no key, only bucket prefix
return _bucketCache.fetch(parts[0],limit);
}
}
}
|
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.json.JSONException;
import org.json.JSONObject;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.*;
public class Main extends HttpServlet{
private static String TABLE_CREATION = "CREATE TABLE IF NOT EXISTS profile (user_id SERIAL, name varchar(32), " +
"about_me varchar(1024), village varchar(32), zip_code int, phone_number varchar(16), email varchar(32));";
private static String TABLE_CREATION_2 = "CREATE TABLE IF NOT EXISTS Connections (requester_id int, target_id int, status varchar(32));";
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
Connection connection = null;
try {
connection = getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate(TABLE_CREATION);
stmt.executeUpdate(TABLE_CREATION_2);
}
catch (Exception e) {
resp.setStatus(500);
resp.getWriter().print("Table creation error: " + e.getMessage());
}
StringBuffer jb = new StringBuffer();
String line;
try {
BufferedReader reader = req.getReader();
while ((line = reader.readLine()) != null)
jb.append(line);
}
catch (IOException e) {
resp.setStatus(400);
resp.getWriter().print("Couldn't read in request body: " + getStackTrace(e));
}
try {
JSONObject jsonObject = new JSONObject(jb.toString());
if (req.getRequestURI().endsWith("/createAccount")) {
String name = jsonObject.getString("name");
String about_me = jsonObject.getString("about_me");
String village = jsonObject.getString("village");
String zip_code = jsonObject.getString("zip_code");
String phone_number = jsonObject.getString("phone_number");
String email = jsonObject.getString("email");
String update_sql = "INSERT INTO profile (name, about_me, village, zip_code, phone_number, email) VALUES (?, ?, ?, ?, ?, ?)";
try {
PreparedStatement stmt = connection.prepareStatement(update_sql);
stmt.setString(1, name);
stmt.setString(2, about_me);
stmt.setString(3, village);
stmt.setString(4, zip_code);
stmt.setString(5, phone_number);
stmt.setString(6, email);
stmt.executeUpdate();
stmt.close();
}
catch (SQLException e) {
resp.getWriter().print("SQL ERROR @POST: " + getStackTrace(e));
}
}
else {
resp.setStatus(404);
}
}
catch (JSONException e1) {
resp.setStatus(400);
resp.getWriter().print("Error parsing request JSON: " + getStackTrace(e1));
}
finally {
try {
connection.close();
}
catch (SQLException e) {
resp.getWriter().print("Failed to close connection: " + getStackTrace(e));
}
}
}
private static Connection getConnection() throws URISyntaxException, SQLException {
URI dbUri = new URI(System.getenv("DATABASE_URL"));
String username = dbUri.getUserInfo().split(":")[0];
String password = dbUri.getUserInfo().split(":")[1];
String dbUrl = "jdbc:postgresql://" + dbUri.getHost() + dbUri.getPath();
return DriverManager.getConnection(dbUrl, username, password);
}
public static void main(String[] args) throws Exception {
Server server = new Server(Integer.valueOf(System.getenv("PORT")));
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
context.setContextPath("/");
server.setHandler(context);
|
package io.sweers.barber;
import android.util.AttributeSet;
import android.util.Log;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* Entry point for applications.
* <p>
* Use one of the {@link #style(java.lang.Object, android.util.AttributeSet, int[])} variants to
* style your custom views.
*/
public class Barber {
public static final String SUFFIX = "$$Barbershop";
public static final String ANDROID_PREFIX = "android.";
public static final String JAVA_PREFIX = "java.";
private static final String TAG = "Barber";
private static final IBarbershop<Object> NO_OP = null;
private static boolean debug = false;
private static final Map<Class<?>, IBarbershop<Object>> BARBERSHOPS = new LinkedHashMap<>();
public static void style(Object target, AttributeSet set, int[] attrs) {
style(target, set, attrs, 0);
}
public static void style(Object target, AttributeSet set, int[] attrs, int defStyleAttr) {
style(target, set, attrs, defStyleAttr, 0);
}
public static void style(Object target, AttributeSet set, int[] attrs, int defStyleAttr, int defStyleRes) {
Class<?> targetClass = target.getClass();
if (debug) {
Log.d(TAG, "Looking up barbershop for " + targetClass.getName());
}
IBarbershop<Object> barbershop = findBarbershopForClass(targetClass);
if (barbershop != NO_OP) {
barbershop.style(target, set, attrs, defStyleAttr, defStyleRes);
}
}
/**
* Searches for $$Barbershop class for the given instance, cached for efficiency.
*
* @param cls Source class to find a matching $$Barbershop class for
* @return $$Barbershop class instance
*/
private static IBarbershop<Object> findBarbershopForClass(Class<?> cls) {
IBarbershop<Object> barbershop = BARBERSHOPS.get(cls);
if (barbershop != null) {
if (debug) Log.d(TAG, "HIT: Cached in barbershop map.");
return barbershop;
}
String clsName = cls.getName();
if (clsName.startsWith(ANDROID_PREFIX) || clsName.startsWith(JAVA_PREFIX)) {
if (debug) {
Log.d(TAG, "MISS: Reached framework class. Abandoning search.");
}
return NO_OP;
}
//noinspection TryWithIdenticalCatches
try {
Class<?> barbershopClass = Class.forName(clsName + SUFFIX);
//noinspection unchecked
barbershop = (IBarbershop<Object>) barbershopClass.newInstance();
if (debug) {
Log.d(TAG, "HIT: Class loaded barbershop class.");
}
} catch (ClassNotFoundException e) {
if (debug) {
Log.d(TAG, "Not found. Trying superclass " + cls.getSuperclass().getName());
}
barbershop = findBarbershopForClass(cls.getSuperclass());
} catch (InstantiationException e) {
Log.e(TAG, e.getMessage());
} catch (IllegalAccessException e) {
Log.e(TAG, e.getMessage());
}
BARBERSHOPS.put(cls, barbershop);
return barbershop;
}
/** DO NOT USE. Exposed for generated classes' use. */
public interface IBarbershop<T> {
public void style(final T target, final AttributeSet set, final int[] attrs, final int defStyleAttr, final int defStyleRes);
}
}
|
package mmlib4j.descriptors;
import mmlib4j.filtering.Histogram;
import mmlib4j.gui.WindowImages;
import mmlib4j.images.GrayScaleImage;
import mmlib4j.images.impl.ImageFactory;
import mmlib4j.images.impl.PixelIndexer;
import mmlib4j.utils.ImageBuilder;
public class LocalBinaryPatterns {
int adjX[];
int adjY[];
private static int lutLBPUniform[] = {0, 1, 2, 3, 4, 58 , 5, 6, 7, 58 , 58 , 58 , 8, 58 , 9, 10, 11, 58 , 58 , 58 , 58 , 58 , 58 , 58 , 12, 58 , 58 , 58 , 13, 58 , 14, 15, 16, 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 17, 58 , 58 , 58 , 58 , 58 , 58 , 58 , 18, 58 , 58 , 58 , 19, 58 , 20, 21, 22, 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 23, 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 24, 58 , 58 , 58 , 58 , 58 , 58 , 58 , 25, 58 , 58 , 58 , 26, 58 , 27, 28, 29, 30, 58 , 31, 58 , 58 , 58 , 32, 58 , 58 , 58 , 58 , 58 , 58 , 58 , 33, 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 34, 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 35, 36, 37, 58 , 38, 58 , 58 , 58 , 39, 58 , 58 , 58 , 58 , 58 , 58 , 58 , 40, 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 58 , 41, 42, 43, 58 , 44, 58 , 58 , 58 , 45, 58 , 58 , 58 , 58 , 58 , 58 , 58 , 46, 47, 48, 58 , 49, 58 , 58 , 58 , 50, 51, 52, 58 , 53, 54, 55, 56, 57};
public LocalBinaryPatterns(){
adjX = new int[8];
adjY = new int[8];
adjX[0] = -1; adjY[0] = 0;
adjX[1] = -1; adjY[1] = +1;
adjX[2] = 0; adjY[2] = +1;
adjX[3] = +1; adjY[3] = +1;
adjX[4] = +1; adjY[4] = 0;
adjX[5] = +1; adjY[5] = -1;
adjX[6] = 0; adjY[6] = -1;
adjX[7] = -1; adjY[7] = -1;
}
public int getLBP(GrayScaleImage img, int px, int py, int threshold){
int qx, qy;
int code = 0;
for(int i=0; i < adjX.length; i++){
qx = px + adjX[i];
qy = py + adjY[i];
if(img.getValue(qx, qy) - img.getValue(px, py) >= threshold){
code += Math.pow(2, i);
}
}
return code;
}
public int getLBP(GrayScaleImage img, int px, int py){
return getLBP(img, px, py, 0);
}
public int getLBPUniform(GrayScaleImage img, int px, int py, int threshold){
return lutLBPUniform[getLBP(img, py, py, threshold)];
}
public int getLBPUniform(GrayScaleImage img, int px, int py){
return lutLBPUniform[getLBP(img, px, py)];
}
/**
* computer LBP uniform for all pixels of input image. Output with values between 0 and 58
* @param img
* @return
*/
public GrayScaleImage computerLBPUniform(GrayScaleImage img){
PixelIndexer nearest = PixelIndexer.getNearestBorderIndexer(img.getWidth(), img.getHeight());
PixelIndexer pindex = img.getPixelIndexer();
img.setPixelIndexer( nearest );
GrayScaleImage imgLBPUniform = ImageFactory.createGrayScaleImage(ImageFactory.DEPTH_8BITS, img.getWidth(), img.getHeight());
for(int x=0; x < img.getWidth(); x++){
for(int y=0; y < img.getHeight(); y++){
imgLBPUniform.setPixel(x, y, getLBPUniform(img, x, y));
}
}
img.setPixelIndexer( pindex );
return imgLBPUniform;
}
/**
* computer LBP for all pixels of input image. Output with values between 0 and 58
* @param img
* @return
*/
public GrayScaleImage computerLBP(GrayScaleImage img){
PixelIndexer nearest = PixelIndexer.getNearestBorderIndexer(img.getWidth(), img.getHeight());
PixelIndexer pindex = img.getPixelIndexer();
img.setPixelIndexer( nearest );
GrayScaleImage imgLBPUniform = ImageFactory.createGrayScaleImage(ImageFactory.DEPTH_8BITS, img.getWidth(), img.getHeight());
for(int x=0; x < img.getWidth(); x++){
for(int y=0; y < img.getHeight(); y++){
imgLBPUniform.setPixel(x, y, getLBP(img, x, y));
}
}
img.setPixelIndexer( pindex );
return imgLBPUniform;
}
public static void main(String args[]){
GrayScaleImage img = ImageBuilder.openGrayImage();
LocalBinaryPatterns lbp = new LocalBinaryPatterns();
GrayScaleImage imgLBP = lbp.computerLBP(img);
GrayScaleImage imgLBPUniform = lbp.computerLBPUniform(img);
WindowImages.show(img, imgLBP, new Histogram(imgLBPUniform).getGraphic(), new Histogram(imgLBPUniform).equalisation(), new Histogram(imgLBP).getGraphic());
}
/*
private static int binaryNumber(String s){
int code = 0;
for(int i=0; i < s.length(); i++){
if(s.charAt(i)=='1')
code += Math.pow(2, i);
}
if(hist[code] == 1)
System.out.println(s);
hist[code]++;
return code;
}
static int hist[] = new int[256];
public static void main(String args[]){
System.out.println("U=0");
System.out.println(binaryNumber("00000000"));
System.out.println(binaryNumber("11111111"));
System.out.println("\nU=2");
System.out.println(binaryNumber("01111111")); //8
System.out.println(binaryNumber("10111111"));
System.out.println(binaryNumber("11011111"));
System.out.println(binaryNumber("11101111"));
System.out.println(binaryNumber("11110111"));
System.out.println(binaryNumber("11111011"));
System.out.println(binaryNumber("11111101"));
System.out.println(binaryNumber("11111110"));
System.out.println(binaryNumber("00111111")); //7
System.out.println(binaryNumber("10011111"));
System.out.println(binaryNumber("11001111"));
System.out.println(binaryNumber("11100111"));
System.out.println(binaryNumber("11110011"));
System.out.println(binaryNumber("11111001"));
System.out.println(binaryNumber("11111100"));
System.out.println(binaryNumber("01111110"));
System.out.println(binaryNumber("00011111")); //6
System.out.println(binaryNumber("10001111"));
System.out.println(binaryNumber("11000111"));
System.out.println(binaryNumber("11100011"));
System.out.println(binaryNumber("11110001"));
System.out.println(binaryNumber("11111000"));
System.out.println(binaryNumber("01111100")); //2
System.out.println(binaryNumber("00111110"));
System.out.println(binaryNumber("00001111")); //5
System.out.println(binaryNumber("10000111"));
System.out.println(binaryNumber("11000011"));
System.out.println(binaryNumber("11100001"));
System.out.println(binaryNumber("11110000"));
System.out.println(binaryNumber("01111000")); //4
System.out.println(binaryNumber("00111100"));
System.out.println(binaryNumber("00011110"));
//System.out.println(binaryNumber("00001111"));
System.out.println(binaryNumber("00000111")); //4
System.out.println(binaryNumber("10000011"));
System.out.println(binaryNumber("11000001"));
System.out.println(binaryNumber("11100000"));
System.out.println(binaryNumber("01110000")); //5
System.out.println(binaryNumber("00111000"));
System.out.println(binaryNumber("00011100"));
System.out.println(binaryNumber("00001110"));
//System.out.println(binaryNumber("00000111"));
System.out.println(binaryNumber("00000011")); //3
System.out.println(binaryNumber("10000001"));
//System.out.println(binaryNumber("11000000"));
System.out.println(binaryNumber("11000000")); //6
System.out.println(binaryNumber("01100000"));
System.out.println(binaryNumber("00110000"));
System.out.println(binaryNumber("00011000"));
System.out.println(binaryNumber("00001100"));
System.out.println(binaryNumber("00000110"));
System.out.println(binaryNumber("10000000")); //8
System.out.println(binaryNumber("01000000"));
System.out.println(binaryNumber("00100000"));
System.out.println(binaryNumber("00010000"));
System.out.println(binaryNumber("00001000"));
System.out.println(binaryNumber("00000100"));
System.out.println(binaryNumber("00000010"));
System.out.println(binaryNumber("00000001"));
System.out.println("histograma");
System.out.print("\n int LUT[] = {");
int cont=0;
for(int i=0; i < 256; i++){
if(hist[i] == 1){
System.out.print(cont++ + ", ");
}else
System.out.print("58 , ");
}
System.out.println("\ncont:"+ cont);
}
*/
}
|
import java.sql.*;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.Map;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.net.URI;
import java.net.URISyntaxException;
import static spark.Spark.*;
import spark.template.freemarker.FreeMarkerEngine;
import spark.ModelAndView;
import static spark.Spark.get;
import com.heroku.sdk.jdbc.DatabaseUrl;
public class Main {
public static void mainA(String[] args) {
port(Integer.valueOf(System.getenv("PORT")));
staticFileLocation("/public");
get("/hello", (req, res) -> "Hello World");
get("/", (request, response) -> {
Map<String, Object> attributes = new HashMap<>();
attributes.put("message", "Hello World!");
return new ModelAndView(attributes, "index.ftl");
}, new FreeMarkerEngine());
get("/db", (req, res) -> {
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try {
connection = DatabaseUrl.extract().getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS ticks (tick timestamp)");
stmt.executeUpdate("INSERT INTO ticks VALUES (now())");
ResultSet rs = stmt.executeQuery("SELECT tick FROM ticks");
ArrayList<String> output = new ArrayList<String>();
while (rs.next()) {
output.add( "Read from DB: " + rs.getTimestamp("tick"));
}
attributes.put("results", output);
return new ModelAndView(attributes, "db.ftl");
} catch (Exception e) {
attributes.put("message", "There was an error: " + e);
return new ModelAndView(attributes, "error.ftl");
} finally {
if (connection != null) try{connection.close();} catch(SQLException e){}
}
}, new FreeMarkerEngine());
}
}
|
package io.hawt.web;
import io.hawt.system.AuthInfo;
import io.hawt.system.Authenticator;
import io.hawt.system.ExtractAuthInfoCallback;
import io.hawt.util.Strings;
import org.apache.commons.httpclient.Credentials;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.UsernamePasswordCredentials;
import org.apache.commons.httpclient.auth.AuthScope;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Set;
/**
* A helper object to store the proxy location details
*/
public class ProxyDetails {
private static final transient Logger LOG = LoggerFactory.getLogger(ProxyDetails.class);
private String stringProxyURL;
private String hostAndPort;
private String path = "";
private String userName;
private String password;
private String host;
private int port = 80;
public static final String USER_PARAM = "_user";
public static final String PWD_PARAM = "_pwd";
private static Set<String> ignoreHeaderNames = new HashSet<String>(Arrays.asList(USER_PARAM, PWD_PARAM, "_url", "url"));
public ProxyDetails(HttpServletRequest httpServletRequest) {
this(httpServletRequest.getPathInfo());
String authHeader = httpServletRequest.getHeader(Authenticator.HEADER_AUTHORIZATION);
if (authHeader != null && !authHeader.equals("")) {
final AuthInfo info = new AuthInfo();
Authenticator.extractAuthInfo(authHeader, new ExtractAuthInfoCallback() {
@Override
public void getAuthInfo(String userName, String password) {
info.username = userName;
info.password = password;
}
});
userName = info.username;
password = info.password;
}
// lets add the query parameters
Enumeration<?> iter = httpServletRequest.getParameterNames();
while (iter.hasMoreElements()) {
Object next = iter.nextElement();
if (next instanceof String) {
String name = next.toString();
if (!ignoreHeaderNames.contains(name)) {
String[] values = httpServletRequest.getParameterValues(name);
for (String value : values) {
String prefix = "?";
if (stringProxyURL.contains("?")) {
prefix = "&";
}
stringProxyURL += prefix + name + "=" + value;
}
}
}
}
}
public ProxyDetails(String pathInfo) {
hostAndPort = pathInfo;
if (hostAndPort == null) {
return;
}
while (hostAndPort.startsWith("/")) {
hostAndPort = hostAndPort.substring(1);
}
// remove user/pwd
int idx = hostAndPort.indexOf("@");
if (idx > 0) {
userName = hostAndPort.substring(0, idx);
hostAndPort = hostAndPort.substring(idx + 1);
idx = indexOf(userName, ":", "/");
if (idx > 0) {
password = userName.substring(idx + 1);
userName = userName.substring(0, idx);
}
}
host = hostAndPort;
idx = indexOf(hostAndPort, ":", "/");
if (idx > 0) {
host = hostAndPort.substring(0, idx);
String portText = hostAndPort.substring(idx + 1);
idx = portText.indexOf("/");
if (idx >= 0) {
path = portText.substring(idx);
portText = portText.substring(0, idx);
}
if (Strings.isNotBlank(portText)) {
// portText may be a port unless its default
try {
port = Integer.parseInt(portText);
hostAndPort = host + ":" + port;
} catch (NumberFormatException e) {
port = 80;
// we do not have a port, so path is the portText
path = "/" + portText + path;
hostAndPort = host;
}
} else {
hostAndPort = host;
}
}
stringProxyURL = "http://" + hostAndPort + path;
// we do not support query parameters
if (LOG.isDebugEnabled()) {
LOG.debug("Proxying to " + stringProxyURL + " as user: " + userName);
}
}
@Override
public String toString() {
return "ProxyDetails{" +
userName + "@" + hostAndPort + "/" + stringProxyURL
+ "}";
}
/**
* Returns the lowest index of the given list of values
*/
protected int indexOf(String text, String... values) {
int answer = -1;
for (String value : values) {
int idx = text.indexOf(value);
if (idx >= 0) {
if (answer < 0 || idx < answer) {
answer = idx;
}
}
}
return answer;
}
public HttpClient createHttpClient(HttpMethod httpMethodProxyRequest) {
HttpClient client = new HttpClient();
if (userName != null) {
//client.getParams().setAuthenticationPreemptive(true);
httpMethodProxyRequest.setDoAuthentication(true);
Credentials defaultcreds = new UsernamePasswordCredentials(userName, password);
client.getState().setCredentials(new AuthScope(host, port, AuthScope.ANY_REALM), defaultcreds);
}
return client;
}
public String getStringProxyURL() {
return stringProxyURL;
}
public String getProxyHostAndPort() {
return hostAndPort;
}
public String getProxyPath() {
return path;
}
public String getHost() {
return host;
}
public int getPort() {
return port;
}
public String getUserName() {
return userName;
}
public String getPassword() {
return password;
}
public String getHostAndPort() {
return hostAndPort;
}
public String getPath() {
return path;
}
public boolean isValid() {
return hostAndPort != null;
}
}
|
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import spark.ModelAndView;
import spark.template.freemarker.FreeMarkerEngine;
import javax.validation.ConstraintViolationException;
import javax.validation.ValidationException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import static spark.Spark.*;
public class Main {
private static final Logger LOG = Logger.getLogger(Main.class.getName());
public static String toJson(Object obj) {
try {
return new ObjectMapper().writeValueAsString(obj);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
public static void main(String[] args) {
port(Integer.valueOf(System.getenv("PORT")));
staticFileLocation("/public");
before((req, res) -> {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Request-Method", "*");
res.header("Access-Control-Allow-Headers", "*");
});
|
// import static javax.measure.unit.SI.KILOGRAM;
// import javax.measure.quantity.Mass;
// import org.jscience.physics.model.RelativisticModel;
// import org.jscience.physics.amount.Amount;
import java.sql.*;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.Map;
import java.net.URI;
import java.net.URISyntaxException;
import static spark.Spark.*;
import spark.template.freemarker.FreeMarkerEngine;
import spark.ModelAndView;
import static spark.Spark.get;
import com.heroku.sdk.jdbc.DatabaseUrl;
public class Main {
public static void main(String[] args) {
port(Integer.valueOf(System.getenv("PORT")));
staticFileLocation("/public");
// get("/hello", (req, res) -> {
// RelativisticModel.select();
// String energy = System.getenv().get("ENERGY");
// Amount<Mass> m = Amount.valueOf(energy).to(KILOGRAM);
// return "E=mc^2: " + energy + " = " + m.toString();
// get("/", (request, response) -> {
// Map<String, Object> attributes = new HashMap<>();
// attributes.put("message", "Hello World!");
// return new ModelAndView(attributes, "index.ftl");
// }, new FreeMarkerEngine());
get("/db", (req, res) -> {
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try {
connection = DatabaseUrl.extract().getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS ticks (tick timestamp)");
stmt.executeUpdate("INSERT INTO ticks VALUES (now())");
ResultSet rs = stmt.executeQuery("SELECT tick FROM ticks");
ArrayList<String> output = new ArrayList<String>();
while (rs.next()) {
output.add( "Read from DB: " + rs.getTimestamp("tick"));
}
attributes.put("results", output);
return new ModelAndView(attributes, "db.ftl");
} catch (Exception e) {
attributes.put("message", "There was an error: " + e);
return new ModelAndView(attributes, "error.ftl");
} finally {
if (connection != null) try{connection.close();} catch(SQLException e){}
}
}, new FreeMarkerEngine());
}
}
|
package be.iminds.iot.dianne.builder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Dictionary;
import java.util.Hashtable;
import java.util.List;
import java.util.Map.Entry;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.component.annotations.ReferenceCardinality;
import org.osgi.service.component.annotations.ReferencePolicy;
import org.osgi.service.http.HttpService;
import be.iminds.iot.dianne.nn.runtime.ModuleManager;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
@Component(service = { javax.servlet.Servlet.class },
property = { "alias:String=/dianne/deployer" },
immediate = true)
public class DianneDeployer extends HttpServlet {
private List<ModuleManager> runtimes = Collections.synchronizedList(new ArrayList<ModuleManager>());
@Reference(cardinality=ReferenceCardinality.AT_LEAST_ONE,
policy=ReferencePolicy.DYNAMIC)
public void addModuleManager(ModuleManager m){
runtimes.add(m);
}
public void removeModuleManager(ModuleManager m){
runtimes.remove(m);
}
@Reference
public void setHttpService(HttpService http){
try {
// Use manual registration - problems with whiteboard
http.registerServlet("/dianne/deployer", this, null, null);
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String modulesJsonString = request.getParameter("modules");
JsonObject modulesJson = new JsonParser().parse(modulesJsonString).getAsJsonObject();
for(Entry<String, JsonElement> module : modulesJson.entrySet()){
JsonObject moduleJson = (JsonObject) module.getValue();
// new module
Dictionary<String, Object> properties = new Hashtable<String, Object>();
String id = moduleJson.get("id").getAsString();
String type = moduleJson.get("type").getAsString();
// TODO standardize a configuration key-value format and key names
properties.put("module.id", id);
properties.put("module.type", type);
if(moduleJson.has("next")){
properties.put("module.next", moduleJson.get("next").getAsString());
}
if(moduleJson.has("prev")){
properties.put("module.prev", moduleJson.get("prev").getAsString());
}
// key prefix
String prefix = "module."+type.toLowerCase()+".";
for(Entry<String, JsonElement> property : moduleJson.entrySet()){
String key = property.getKey();
if(key.equals("id")
|| key.equals("type")
|| key.equals("prev")
|| key.equals("next")){
continue;
// this is only for module-specific properties
}
// TODO already infer type here?
properties.put(prefix+property.getKey(), property.getValue().getAsString());
}
// for now deploy all modules on one runtime
try {
runtimes.get(0).deployModule(properties);
} catch (InstantiationException e) {
System.err.println("Failed to deploy module "+id);
e.printStackTrace();
}
}
}
}
|
import java.sql.*;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.Map;
import java.net.URI;
import java.net.URISyntaxException;
import static spark.Spark.*;
import spark.template.freemarker.FreeMarkerEngine;
import spark.ModelAndView;
import static spark.Spark.get;
import com.heroku.sdk.jdbc.DatabaseUrl;
import com.google.gson.Gson;
import spark.Request;
import spark.Response;
import spark.QueryParamsMap;
import spark.Route;
public class Main {
public static void main(String[] args) {
port(Integer.valueOf(System.getenv("PORT")));
staticFileLocation("/public");
get("/hello", (req, res) -> "Hello World");
// get("/", (request, response) -> {
// Map<String, Object> attributes = new HashMap<>();
// attributes.put("message", "Hello World!");
// return new ModelAndView(attributes, "index.ftl");
// }, new FreeMarkerEngine());
get("/db", (req, res) -> {
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try {
connection = DatabaseUrl.extract().getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS guitarists (tick timestamp)");
//stmt.executeUpdate("INSERT INTO guitarists VALUES (now())");
ResultSet rs = stmt.executeQuery("SELECT * FROM guitarists");
ArrayList<String> output = new ArrayList<String>();
while (rs.next()) {
//output.add( "Read from DB: " + rs.getTimestamp("tick"));
output.add( "Read from DB: " + rs.getString("firstname"));
output.add( "Read from DB: " + rs.getString("lastname"));
output.add( "Read from DB: " + rs.getString("instructiontype"));
output.add( "Read from DB: " + rs.getString("zip"));
output.add( "Read from DB: " + rs.getString("guitartype"));
output.add( "Read from DB: " + rs.getString("genre"));
output.add( "Read from DB: " + rs.getString("agerange"));
output.add( "Read from DB: " + rs.getString("skill"));
output.add( "Read from DB: " + rs.getString("focus"));
}
attributes.put("results", output);
return new ModelAndView(attributes, "db.ftl");
} catch (Exception e) {
attributes.put("message", "There was an error: " + e);
return new ModelAndView(attributes, "error.ftl");
} finally {
if (connection != null) try{connection.close();} catch(SQLException e){}
}
}, new FreeMarkerEngine());
/*
post("/api", (req, res) -> userService.createUser(
String firstName = req.queryParams("firstname");
String lastname = req.queryParams("lastname");
//todo : send to database
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try {
connection = DatabaseUrl.extract().getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS ticks (tick timestamp)");
stmt.executeUpdate("INSERT firstname, lastname INTO mytable VALUES (" firstname + "," + lastname +")");
ResultSet rs = stmt.executeQuery("SELECT * FROM mytable");
ArrayList<String> output = new ArrayList<String>();
while (rs.next()) {
output.add( "Read from DB: " + rs.getTimestamp("tick"));
}
attributes.put("results", output);
return new ModelAndView(attributes, "db.ftl");
}
catch (Exception e) {
attributes.put("message", "There was an error: " + e);
return new ModelAndView(attributes, "error.ftl");
}
finally {
if (connection != null) try{connection.close();} catch(SQLException e){}
}
*/
//String instructiontype = req.queryParams("instructiontype");
//String zip = req.queryParams("zip");
//String guitartype = req.queryParams("guitartype");
//String genre = req.queryParams("genre");
//String agerange = req.queryParams("agerange");
//String skill = req.queryParams("skill");
//String focus = req.queryParams("focus");
post("/saveuser", (Request req, Response res) -> {
String firstname = req.queryParams("firstname");
String lastname = req.queryParams("lastname");
String email = req.queryParams("email");
String password = req.queryParams("password");
String instructiontype = req.queryParams("instructiontype");
String zip = req.queryParams("zip");
String guitartype = req.queryParams("guitartype");
String genre = req.queryParams("genre");
String agerange = req.queryParams("agerange");
String skill = req.queryParams("skill");
String focus = req.queryParams("focus");
System.out.println("*** firstname: " + firstname);
//make new db connection, create a new hashmap to be used later for results
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try{
connection = DatabaseUrl.extract().getConnection();
/*
Statement stmt = connection.createStatement();
stmt.execute("INSERT INTO guitarists"
+"(firstname,lastname,email,password,genre,focus,guitartype,agerange,skill,instructiontype,zip)"
+" VALUES( '" +firstname+ "','" +lastname+ "','" +email+ "','" +password+ "','" +genre+ "','" +focus+ "','"
+guitartype+ "','" +agerange+ "','" +skill+ "','" +instructiontype+ "','" +zip+ "')" );
*/
PreparedStatement pstmt = connection.prepareStatement( "INSERT INTO guitarists"
+ "(firstname,lastname,email,password,focus,genre,guitartype,instructiontype,skill,zip,agerange)"
+ " VALUES(?,?,?,?,?,?,?,?,?,?,?)" );
pstmt.setString(1, firstname);
pstmt.setString(2, lastname);
pstmt.setString(3, email);
pstmt.setString(4, password);
pstmt.setString(5, focus);
pstmt.setString(6, genre);
pstmt.setString(7, guitartype);
pstmt.setString(8, instructiontype);
pstmt.setString(9, skill);
pstmt.setString(10, zip);
pstmt.setString(11, agerange);
//now that data has been inserted, query for all records in this table and make an arraylist of objects
ResultSet rs = stmt.executeQuery("SELECT * FROM guitarists");
ArrayList<String> output = new ArrayList<>();
while (rs.next()) {
output.add("Read from DB: " + rs.getTimestamp("tick"));
}
}
catch (Exception e) {
attributes.put("message", "There was an error: " + e);
return new ModelAndView(attributes, "profile.html");
}
finally {
if (connection != null) try{connection.close();} catch(SQLException e){}
}
//res.redirect("db.ftl");
return attributes;
});
}//end of main()
}//end Main Class
|
package org.intermine.bio.postprocess;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.log4j.Logger;
import org.intermine.bio.util.BioQueries;
import org.intermine.model.bio.Chromosome;
import org.intermine.model.bio.DataSet;
import org.intermine.model.bio.DataSource;
import org.intermine.model.bio.Gene;
import org.intermine.model.bio.GeneFlankingRegion;
import org.intermine.model.bio.Location;
import org.intermine.objectstore.ObjectStore;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.objectstore.ObjectStoreWriter;
import org.intermine.objectstore.query.Results;
import org.intermine.objectstore.query.ResultsRow;
import org.intermine.util.DynamicUtil;
import org.intermine.util.StringUtil;
/**
* Create features to represent flanking regions of configurable distance either side of gene
* featues. These will be used in overlap queries.
* @author rns
*
*/
public class CreateFlankingRegions
{
private ObjectStoreWriter osw = null;
private ObjectStore os;
private DataSet dataSet;
private DataSource dataSource;
private Map<Integer, Chromosome> chrs = new HashMap<Integer, Chromosome>();
/**
* The sizes in kb of flanking regions to create.
*/
private static double[] distances = new double[] {0.5, 1, 2, 5, 10};
/**
* The values strings for up/down stream from a gene.
*/
private static String[] directions = new String[] {"upstream", "downstream"};
private static final Logger LOG = Logger.getLogger(CreateFlankingRegions.class);
/**
* Create a new CreateFlankingRegions object that will operate on the given
* ObjectStoreWriter.
*
* @param osw
* the ObjectStoreWriter to use when creating/changing objects
*/
public CreateFlankingRegions(ObjectStoreWriter osw) {
this.osw = osw;
this.os = osw.getObjectStore();
dataSource = (DataSource) DynamicUtil.createObject(Collections
.singleton(DataSource.class));
dataSource.setName("modMine");
try {
dataSource = (DataSource) os.getObjectByExample(dataSource,
Collections.singleton("name"));
} catch (ObjectStoreException e) {
throw new RuntimeException(
"unable to fetch modMine DataSource object", e);
}
}
/**
* Iterate over genes in database and create flanking regions.
*
* @throws ObjectStoreException
* if there is an ObjectStore problem
*/
public void createFlankingFeatures() throws ObjectStoreException {
Results results = BioQueries.findLocationAndObjects(os,
Chromosome.class, Gene.class, false, false, false, 1000);
dataSet = (DataSet) DynamicUtil.createObject(Collections
.singleton(DataSet.class));
dataSet.setTitle("modMine gene flanking regions");
dataSet.setDescription("Gene flanking regions generated by modMine");
dataSet.setVersion("" + new Date()); // current time and date
dataSet.setUrl("http://intermine.modencode.org");
dataSet.setDataSource(dataSource);
Iterator resIter = results.iterator();
int count = 0;
osw.beginTransaction();
while (resIter.hasNext()) {
ResultsRow rr = (ResultsRow) resIter.next();
Integer chrId = (Integer) rr.get(0);
Gene gene = (Gene) rr.get(1);
Location loc = (Location) rr.get(2);
createAndStoreFlankingRegion(getChromosome(chrId), loc, gene);
if ((count % 1000) == 0) {
LOG.info("Created flanking regions for " + count + " genes.");
}
count++;
}
osw.store(dataSet);
osw.commitTransaction();
}
private void createAndStoreFlankingRegion(Chromosome chr, Location geneLoc, Gene gene)
throws ObjectStoreException {
// This code can't cope with chromosomes that don't have a length
if (chr.getLength() == null) {
LOG.warn("Attempted to create GeneFlankingRegions on a chromosome without a length: "
+ chr.getPrimaryIdentifier());
return;
}
// if there is no gene symbol this is a new gene generated by gene models modENCODE
// submissions. This should work by DataSet but WormBase chado isn't setting a DataSet
// correctly
if (StringUtil.isEmpty(gene.getSymbol())) {
return;
}
for (double distance : distances) {
for (String direction : directions) {
String strand = geneLoc.getStrand();
// TODO what do we do if strand not set?
int geneStart = geneLoc.getStart().intValue();
int geneEnd = geneLoc.getEnd().intValue();
int chrLength = chr.getLength().intValue();
// gene touches a chromosome end so there isn't a flanking region
if ((geneStart <= 1) || (geneEnd >= chrLength)) {
continue;
}
GeneFlankingRegion region = (GeneFlankingRegion) DynamicUtil
.createObject(Collections.singleton(GeneFlankingRegion.class));
Location location = (Location) DynamicUtil
.createObject(Collections.singleton(Location.class));
region.setDistance("" + distance + "kb");
region.setDirection(direction);
region.setGene(gene);
region.setChromosome(chr);
region.setChromosomeLocation(location);
region.setOrganism(gene.getOrganism());
region.setPrimaryIdentifier(gene.getPrimaryIdentifier() + " " + distance + "kb "
+ direction);
// this should be some clever algorithm
int start, end;
if (direction.equals("upstream") && strand.equals("1")) {
start = geneStart - (int) Math.round(distance * 1000);
end = geneStart - 1;
} else if (direction.equals("upstream") && strand.equals("-1")) {
start = geneEnd + 1;
end = geneEnd + (int) Math.round(distance * 1000);
} else if (direction.equals("downstream") && strand.equals("1")) {
start = geneEnd + 1;
end = geneEnd + (int) Math.round(distance * 1000);
} else { // direction.equals("downstream") && strand.equals("-1")
start = geneStart - (int) Math.round(distance * 1000);
end = geneStart - 1;
}
// if the region hangs off the start or end of a chromosome set it to finish
// at the end of the chromosome
location.setStart(Math.max(start, 1));
location.setEnd(Math.min(end, chr.getLength()));
location.setStrand(strand);
location.setObject(chr);
location.setSubject(region);
region.setLength(new Integer((location.getEnd() - location.getStart()) + 1));
osw.store(location);
osw.store(region);
}
}
}
private Chromosome getChromosome(Integer chrId) throws ObjectStoreException {
Chromosome chr = chrs.get(chrId);
if (chr == null) {
chr = (Chromosome) os.getObjectById(chrId, Chromosome.class);
chrs.put(chrId, chr);
}
return chr;
}
}
|
package org.intermine.bio.dataconversion;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.Reader;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.biopax.paxtools.controller.PropertyEditor;
import org.biopax.paxtools.controller.Traverser;
import org.biopax.paxtools.controller.Visitor;
import org.biopax.paxtools.io.jena.JenaIOHandler;
import org.biopax.paxtools.io.simpleIO.SimpleEditorMap;
import org.biopax.paxtools.model.BioPAXElement;
import org.biopax.paxtools.model.BioPAXLevel;
import org.biopax.paxtools.model.Model;
import org.biopax.paxtools.model.level2.pathway;
import org.intermine.bio.util.OrganismData;
import org.intermine.bio.util.OrganismRepository;
import org.intermine.dataconversion.ItemWriter;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.xml.full.Item;
/**
* Converts BioPAX files into InterMine objects.
*
* @author Julie Sullivan
*/
public class BioPAXConverter extends BioFileConverter implements Visitor
{
private static final Logger LOG = Logger.getLogger(BioPAXConverter.class);
private static final String DEFAULT_DB_NAME = "UniProt";
private String dbName = DEFAULT_DB_NAME;
private String identifierField = "primaryAccession"; // default value, can be overridden
private String bioentityType = "Protein"; // default value, can be overridden
protected IdResolverFactory resolverFactory;
private Map<String, Item> bioentities = new HashMap<String, Item>();
private Traverser traverser;
private Set<BioPAXElement> visited = new HashSet<BioPAXElement>();
private int depth = 0;
private Item organism, dataset;
private String pathwayRefId = null;
private Set<String> taxonIds = new HashSet<String>();
private OrganismRepository or;
private String dataSourceRefId = null;
private String curated = "false"; // default value, can be overridden
private Map<String, Config> configs = new HashMap<String, Config>();
private static final String PROP_FILE = "biopax_config.properties";
private static String xrefPrefix = "REACT_"; // default value, can be overridden
private Map<String, String> pathways = new HashMap<String, String>();
/**
* Constructor
* @param writer the ItemWriter used to handle the resultant items
* @param intermineModel the Model
* @throws ObjectStoreException if something goes horribly wrong
*/
public BioPAXConverter(ItemWriter writer, org.intermine.metadata.Model intermineModel)
throws ObjectStoreException {
super(writer, intermineModel);
// only construct factory here so can be replaced by mock factory in tests
resolverFactory = new FlyBaseIdResolverFactory("gene");
traverser = new Traverser(new SimpleEditorMap(BioPAXLevel.L2), this);
readConfig();
or = OrganismRepository.getOrganismRepository();
}
private void readConfig() {
Properties props = new Properties();
try {
props.load(getClass().getClassLoader().getResourceAsStream(PROP_FILE));
} catch (IOException e) {
throw new RuntimeException("Problem loading properties '" + PROP_FILE + "'", e);
}
for (Map.Entry<Object, Object> entry: props.entrySet()) {
String key = (String) entry.getKey();
String value = ((String) entry.getValue()).trim();
String[] attributes = key.split("\\.");
if (attributes.length != 2) {
throw new RuntimeException("Problem loading properties '" + PROP_FILE + "' on line "
+ key);
}
String taxonId = attributes[0];
String identifier = attributes[1];
// xref prefix determines which XREF in file to use, eg which identifier
// default is REACT_
if ("xref".equals(taxonId)) {
xrefPrefix = value;
continue;
}
Config config = configs.get(taxonId);
if (config == null) {
config = new Config();
configs.put(taxonId, config);
}
if ("bioentity".equals(identifier)) {
config.setBioentity(value);
} else {
config.setIdentifier(identifier);
config.setDb(value);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void process(Reader reader) throws Exception {
String taxonId = getTaxonId();
if (taxonId == null) {
// this file isn't from an organism specified in the project file
return;
}
setDataset();
setOrganism(taxonId);
setConfig(taxonId);
// navigate through the owl file
JenaIOHandler jenaIOHandler = new JenaIOHandler(null, BioPAXLevel.L2);
Model model = jenaIOHandler.convertFromOWL(new FileInputStream(getCurrentFile()));
Set<pathway> pathwaySet = model.getObjects(pathway.class);
for (pathway pathwayObj : pathwaySet) {
try {
pathwayRefId = getPathway(pathwayObj);
} catch (ObjectStoreException e) {
pathwayRefId = null;
continue;
}
if (pathwayRefId == null) {
// will happen if no stable ID, eg. REACT_123
continue;
}
visited = new HashSet<BioPAXElement>();
traverser.traverse(pathwayObj, model);
}
}
/**
* Sets the list of taxonIds that should be imported if using split input files.
*
* @param taxonIds a space-separated list of taxonIds
*/
public void setBiopaxOrganisms(String taxonIds) {
this.taxonIds = new HashSet<String>(Arrays.asList(StringUtils.split(taxonIds, " ")));
LOG.info("Setting list of organisms to " + this.taxonIds);
}
/**
* @param curated true or false
*/
public void setBiopaxCurated(String curated) {
this.curated = curated;
}
/**
* @param name name of datasource
* @throws ObjectStoreException if storing datasource fails
*/
public void setBiopaxDatasourcename(String name)
throws ObjectStoreException {
Item datasource = createItem("DataSource");
datasource.setAttribute("name", name);
try {
store(datasource);
} catch (ObjectStoreException e) {
throw new ObjectStoreException(e);
}
dataSourceRefId = datasource.getIdentifier();
}
/**
* @param title name of dataset
* @throws ObjectStoreException if storing datasource fails
*/
public void setBiopaxDatasetname(String title)
throws ObjectStoreException {
dataset = createItem("DataSet");
dataset.setAttribute("name", title);
}
/**
* Adds the BioPAX element into the model and traverses the element for its dependent elements.
*
* @param bpe the BioPAX element to be added into the model
* @param model model into which the element will be added
* @param editor editor that is going to be used for traversing functionallity
* @see org.biopax.paxtools.controller.Traverser
*/
public void visit(BioPAXElement bpe, Model model, PropertyEditor editor) {
if (bpe != null) {
if (bpe instanceof org.biopax.paxtools.model.level2.entity) {
org.biopax.paxtools.model.level2.entity entity
= (org.biopax.paxtools.model.level2.entity) bpe;
String className = entity.getModelInterface().getSimpleName();
if (className.equalsIgnoreCase("protein") && StringUtils.isNotEmpty(pathwayRefId)) {
processProteinEntry(entity);
}
}
if (!visited.contains(bpe)) {
visited.add(bpe);
depth++;
traverser.traverse(bpe, model);
depth
}
}
}
private void processProteinEntry(org.biopax.paxtools.model.level2.entity entity) {
String identifier = entity.getNAME();
// there is only one gene
if (identifier.contains(DEFAULT_DB_NAME)) {
processBioentity(identifier, pathwayRefId);
// there are multiple genes
} else {
Set<org.biopax.paxtools.model.level2.xref> uniXrefs = entity.getXREF();
for (org.biopax.paxtools.model.level2.xref xref : uniXrefs) {
identifier = xref.getRDFId();
if (identifier.contains(DEFAULT_DB_NAME)) {
processBioentity(identifier, pathwayRefId);
}
}
}
}
private void processBioentity(String xref, String pathway) {
// db source for this identifier, eg. UniProt, FlyBase
String identifierSource = (xref.contains(dbName) ? dbName : DEFAULT_DB_NAME);
if (StringUtils.isEmpty(identifierSource)) {
return;
}
// remove prefix, eg. UniProt or ENSEMBL
String accession = StringUtils.substringAfter(xref, identifierSource + ":");
if (accession.contains(" ")) {
accession = accession.split(" ")[0];
}
if (accession == null || accession.length() < 2) {
LOG.warn(bioentityType + " not stored:" + xref);
return;
}
Item item = getBioentity(accession);
item.addToCollection("pathways", pathway);
return;
}
private String getPathway(org.biopax.paxtools.model.level2.pathway pathway)
throws ObjectStoreException {
for (org.biopax.paxtools.model.level2.xref xref : pathway.getXREF()) {
String xrefId = xref.getID();
if (StringUtils.isNotEmpty(xrefId) && xrefId.startsWith(xrefPrefix)) {
String identifier = xrefId.substring(xrefPrefix.length());
String refId = pathways.get(identifier);
if (refId == null) {
Item item = createItem("Pathway");
item.setAttribute("identifier", identifier);
item.setAttribute("name", pathway.getNAME());
String comment = getComment(pathway.getCOMMENT());
if (StringUtils.isNotEmpty(comment)) {
item.setAttribute("description", comment);
}
item.setAttribute("curated", curated);
item.addToCollection("dataSets", dataset);
store(item);
refId = item.getIdentifier();
pathways.put(identifier, refId);
}
return refId;
}
}
LOG.warn("couldn't process pathway " + pathway.getNAME());
return null;
}
private String getComment(Set<String> comments) {
if (comments == null || comments.isEmpty()) {
return null;
}
StringBuilder comment = new StringBuilder();
for (String c : comments) {
comment.append(c);
}
return comment.toString();
}
private Item getBioentity(String identifier) {
Item item = bioentities.get(identifier);
if (item == null) {
item = createItem(bioentityType);
item.setAttribute(identifierField, identifier);
item.setReference("organism", organism);
item.addToCollection("dataSets", dataset);
bioentities.put(identifier, item);
}
return item;
}
private void setOrganism(String taxonId)
throws ObjectStoreException {
organism = createItem("Organism");
organism.setAttribute("taxonId", taxonId);
try {
store(organism);
} catch (ObjectStoreException e) {
throw new ObjectStoreException(e);
}
}
private void setDataset()
throws ObjectStoreException {
if (dataset.getReference("dataSource") == null) {
dataset.setReference("dataSource", dataSourceRefId);
try {
store(dataset);
} catch (ObjectStoreException e) {
throw new ObjectStoreException(e);
}
}
}
/**
* Use the file name currently being processed to divine the name of the organism. Return null
* if this taxonId is not in our list of taxonIds to be processed.
* @return the taxonId of current organism
*/
private String getTaxonId() {
File file = getCurrentFile();
String filename = file.getName();
String[] bits = filename.split(" ");
// bad filename eg `Human immunodeficiency virus 1.owl`,
// expecting "Drosophila melanogaster.owl"
if (bits.length != 2) {
String msg = "Bad filename: '" + filename + "'. Expecting filename in the format "
+ "'Drosophila melanogaster.owl'";
LOG.error(msg);
return null;
}
String genus = bits[0];
String species = bits[1].split("\\.")[0];
String organismName = genus + " " + species;
OrganismData od = or.getOrganismDataByGenusSpecies(genus, species);
if (od == null) {
LOG.error("No data for " + organismName + ". Please add to repository.");
return null;
}
int taxonId = od.getTaxonId();
String taxonIdString = String.valueOf(taxonId);
// only process the taxonids set in the project XML file - if any
if (!taxonIds.isEmpty() && !taxonIds.contains(taxonIdString)) {
return null;
}
return taxonIdString;
}
/**
* {@inheritDoc}
*/
@Override
public void close()
throws ObjectStoreException {
for (Item item : bioentities.values()) {
store(item);
}
}
private void setConfig(String taxonId) {
Config config = configs.get(taxonId);
if (config != null) {
dbName = config.getDb();
identifierField = config.getIdentifier();
bioentityType = config.getBioentity();
}
}
/**
* Class to hold the config info for each taxonId.
*/
class Config
{
protected String bioentity;
protected String identifier;
protected String db;
/**
* Constructor.
*/
Config() {
// nothing to do
}
/**
* @return the bioentity
*/
public String getBioentity() {
return bioentity;
}
/**
* @param bioentity the bioentity to set
*/
public void setBioentity(String bioentity) {
this.bioentity = bioentity;
}
/**
* @return the identifier
*/
public String getIdentifier() {
return identifier;
}
/**
* @param identifier the identifier to set
*/
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
/**
* @return the db
*/
public String getDb() {
return db;
}
/**
* @param db the db to set
*/
public void setDb(String db) {
this.db = db;
}
}
}
|
package org.intermine.bio.dataconversion;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.Reader;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.biopax.paxtools.controller.PropertyEditor;
import org.biopax.paxtools.controller.Traverser;
import org.biopax.paxtools.controller.Visitor;
import org.biopax.paxtools.io.jena.JenaIOHandler;
import org.biopax.paxtools.io.simpleIO.SimpleEditorMap;
import org.biopax.paxtools.model.BioPAXElement;
import org.biopax.paxtools.model.BioPAXLevel;
import org.biopax.paxtools.model.Model;
import org.biopax.paxtools.model.level2.pathway;
import org.intermine.bio.util.OrganismData;
import org.intermine.bio.util.OrganismRepository;
import org.intermine.dataconversion.ItemWriter;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.xml.full.Item;
/**
* Converts BioPAX files into InterMine objects.
*
* @author Julie Sullivan
*/
public class BioPAXConverter extends BioFileConverter implements Visitor
{
private static final Logger LOG = Logger.getLogger(BioPAXConverter.class);
private static final String DEFAULT_DB_NAME = "UniProt";
private String dbName = DEFAULT_DB_NAME;
private String identifierField = "primaryAccession"; // default value, can be overridden
private String bioentityType = "Protein"; // default value, can be overridden
protected IdResolverFactory resolverFactory;
private Map<String, Item> bioentities = new HashMap<String, Item>();
private Traverser traverser;
private Set<BioPAXElement> visited = new HashSet<BioPAXElement>();
private int depth = 0;
private Item organism, dataset;
private String pathwayRefId = null;
private Set<String> taxonIds = new HashSet<String>();
private OrganismRepository or;
private String dataSourceRefId = null;
private String curated = "false"; // default value, can be overridden
private Map<String, Config> configs = new HashMap<String, Config>();
private static final String PROP_FILE = "biopax_config.properties";
private static String xrefPrefix = "REACT_"; // default value, can be overridden
private Map<String, Item> xrefToPathway = new HashMap<String, Item>();
private Map<String, String> xrefs = new HashMap<String, String>();
/**
* Constructor
* @param writer the ItemWriter used to handle the resultant items
* @param intermineModel the Model
* @throws ObjectStoreException if something goes horribly wrong
*/
public BioPAXConverter(ItemWriter writer, org.intermine.metadata.Model intermineModel)
throws ObjectStoreException {
super(writer, intermineModel);
// only construct factory here so can be replaced by mock factory in tests
resolverFactory = new FlyBaseIdResolverFactory("gene");
traverser = new Traverser(new SimpleEditorMap(BioPAXLevel.L2), this);
readConfig();
or = OrganismRepository.getOrganismRepository();
}
private void readConfig() {
Properties props = new Properties();
try {
props.load(getClass().getClassLoader().getResourceAsStream(PROP_FILE));
} catch (IOException e) {
throw new RuntimeException("Problem loading properties '" + PROP_FILE + "'", e);
}
for (Map.Entry<Object, Object> entry: props.entrySet()) {
String key = (String) entry.getKey();
String value = ((String) entry.getValue()).trim();
String[] attributes = key.split("\\.");
if (attributes.length != 2) {
throw new RuntimeException("Problem loading properties '" + PROP_FILE + "' on line "
+ key);
}
String taxonId = attributes[0];
String identifier = attributes[1];
// xref prefix determines which XREF in file to use, eg which identifier
// default is REACT_
if ("xref".equals(taxonId)) {
xrefPrefix = value;
continue;
}
Config config = configs.get(taxonId);
if (config == null) {
config = new Config();
configs.put(taxonId, config);
}
if ("bioentity".equals(identifier)) {
config.setBioentity(value);
} else {
config.setIdentifier(identifier);
config.setDb(value);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void process(Reader reader) throws Exception {
String taxonId = getTaxonId();
if (taxonId == null) {
// this file isn't from an organism specified in the project file
return;
}
setDataset();
setOrganism(taxonId);
setConfig(taxonId);
// navigate through the owl file
JenaIOHandler jenaIOHandler = new JenaIOHandler(null, BioPAXLevel.L2);
Model model = jenaIOHandler.convertFromOWL(new FileInputStream(getCurrentFile()));
Set<pathway> pathwaySet = model.getObjects(pathway.class);
for (pathway pathwayObj : pathwaySet) {
try {
pathwayRefId = getPathway(pathwayObj);
} catch (ObjectStoreException e) {
pathwayRefId = null;
continue;
}
visited = new HashSet<BioPAXElement>();
traverser.traverse(pathwayObj, model);
}
}
private void setConfig(String taxonId) {
Config config = configs.get(taxonId);
if (config != null) {
dbName = config.getDb();
identifierField = config.getIdentifier();
bioentityType = config.getBioentity();
}
}
/**
* Sets the list of taxonIds that should be imported if using split input files.
*
* @param taxonIds a space-separated list of taxonIds
*/
public void setBiopaxOrganisms(String taxonIds) {
this.taxonIds = new HashSet<String>(Arrays.asList(StringUtils.split(taxonIds, " ")));
LOG.info("Setting list of organisms to " + this.taxonIds);
}
/**
* @param curated true or false
*/
public void setBiopaxCurated(String curated) {
this.curated = curated;
}
/**
* @param name name of datasource
* @throws ObjectStoreException if storing datasource fails
*/
public void setBiopaxDatasourcename(String name)
throws ObjectStoreException {
Item datasource = createItem("DataSource");
datasource.setAttribute("name", name);
try {
store(datasource);
} catch (ObjectStoreException e) {
throw new ObjectStoreException(e);
}
dataSourceRefId = datasource.getIdentifier();
}
/**
* @param title name of dataset
* @throws ObjectStoreException if storing datasource fails
*/
public void setBiopaxDatasetname(String title)
throws ObjectStoreException {
dataset = createItem("DataSet");
dataset.setAttribute("name", title);
}
/**
* Adds the BioPAX element into the model and traverses the element for its dependent elements.
*
* @param bpe the BioPAX element to be added into the model
* @param model model into which the element will be added
* @param editor editor that is going to be used for traversing functionallity
* @see org.biopax.paxtools.controller.Traverser
*/
public void visit(BioPAXElement bpe, Model model, PropertyEditor editor) {
if (bpe != null) {
if (bpe instanceof org.biopax.paxtools.model.level2.entity) {
org.biopax.paxtools.model.level2.entity entity
= (org.biopax.paxtools.model.level2.entity) bpe;
String className = entity.getModelInterface().getSimpleName();
if (className.equalsIgnoreCase("protein") && StringUtils.isNotEmpty(pathwayRefId)) {
processProteinEntry(entity);
}
} else if (bpe instanceof org.biopax.paxtools.model.level2.unificationXref) {
org.biopax.paxtools.model.level2.unificationXref unificationXref
= (org.biopax.paxtools.model.level2.unificationXref) bpe;
String xref = unificationXref.getID();
if (xref.startsWith(xrefPrefix)) {
// REACT_12345 - lop off the REACT_ bit
String identifier = StringUtils.substringAfter(xref, xrefPrefix);
xrefs.put(unificationXref.getRDFId(), identifier);
}
}
if (!visited.contains(bpe)) {
visited.add(bpe);
depth++;
traverser.traverse(bpe, model);
depth
}
}
}
private void processProteinEntry(org.biopax.paxtools.model.level2.entity entity) {
String identifier = entity.getNAME();
// there is only one gene
if (identifier.contains(DEFAULT_DB_NAME)) {
processBioentity(identifier, pathwayRefId);
// there are multiple genes
} else {
Set<org.biopax.paxtools.model.level2.xref> uniXrefs = entity.getXREF();
for (org.biopax.paxtools.model.level2.xref xref : uniXrefs) {
identifier = xref.getRDFId();
if (identifier.contains(DEFAULT_DB_NAME)) {
processBioentity(identifier, pathwayRefId);
}
}
}
}
private void processBioentity(String xref, String pathway) {
// db source for this identifier, eg. UniProt, FlyBase
String identifierSource = (xref.contains(dbName) ? dbName : DEFAULT_DB_NAME);
if (StringUtils.isEmpty(identifierSource)) {
return;
}
// remove prefix, eg. UniProt or ENSEMBL
String accession = StringUtils.substringAfter(xref, identifierSource + ":");
if (accession.contains(" ")) {
accession = accession.split(" ")[0];
}
if (accession == null || accession.length() < 2) {
LOG.warn(bioentityType + " not stored:" + xref);
return;
}
Item item = getBioentity(accession);
item.addToCollection("pathways", pathway);
return;
}
private String getPathway(org.biopax.paxtools.model.level2.pathway pathway)
throws ObjectStoreException {
Item item = createItem("Pathway");
item.setAttribute("name", pathway.getNAME());
item.setAttribute("curated", curated);
item.addToCollection("dataSets", dataset);
String refId = item.getIdentifier();
for (org.biopax.paxtools.model.level2.xref xref : pathway.getXREF()) {
String xrefId = xref.getRDFId();
xrefToPathway.put(xrefId, item);
}
return refId;
}
private Item getBioentity(String identifier) {
Item item = bioentities.get(identifier);
if (item == null) {
item = createItem(bioentityType);
item.setAttribute(identifierField, identifier);
item.setReference("organism", organism);
item.addToCollection("dataSets", dataset);
bioentities.put(identifier, item);
}
return item;
}
private void setOrganism(String taxonId)
throws ObjectStoreException {
organism = createItem("Organism");
organism.setAttribute("taxonId", taxonId);
try {
store(organism);
} catch (ObjectStoreException e) {
throw new ObjectStoreException(e);
}
}
private void setDataset()
throws ObjectStoreException {
if (dataset.getReference("dataSource") == null) {
dataset.setReference("dataSource", dataSourceRefId);
try {
store(dataset);
} catch (ObjectStoreException e) {
throw new ObjectStoreException(e);
}
}
}
/**
* Use the file name currently being processed to divine the name of the organism. Return null
* if this taxonId is not in our list of taxonIds to be processed.
* @return the taxonId of current organism
*/
private String getTaxonId() {
File file = getCurrentFile();
String filename = file.getName();
String[] bits = filename.split(" ");
// bad filename eg `Human immunodeficiency virus 1.owl`,
// expecting "Drosophila melanogaster.owl"
if (bits.length != 2) {
String msg = "Bad filename: '" + filename + "'. Expecting filename in the format "
+ "'Drosophila melanogaster.owl'";
LOG.error(msg);
return null;
}
String genus = bits[0];
String species = bits[1].split("\\.")[0];
String organismName = genus + " " + species;
OrganismData od = or.getOrganismDataByGenusSpecies(genus, species);
if (od == null) {
LOG.error("No data for " + organismName + ". Please add to repository.");
return null;
}
int taxonId = od.getTaxonId();
String taxonIdString = String.valueOf(taxonId);
// only process the taxonids set in the project XML file - if any
if (!taxonIds.isEmpty() && !taxonIds.contains(taxonIdString)) {
return null;
}
return taxonIdString;
}
/**
* {@inheritDoc}
*/
@Override
public void close()
throws ObjectStoreException {
for (Map.Entry<String, Item> entry : xrefToPathway.entrySet()) {
String xref = entry.getKey();
Item pathway = entry.getValue();
String identifier = xrefs.get(xref);
if (StringUtils.isNotEmpty(identifier)) {
pathway.setAttribute("identifier", identifier);
store(pathway);
}
}
for (Item item : bioentities.values()) {
store(item);
}
}
/**
* Class to hold the config info for each taxonId.
*/
class Config
{
protected String bioentity;
protected String identifier;
protected String db;
/**
* Constructor.
*/
Config() {
// nothing to do
}
/**
* @return the bioentity
*/
public String getBioentity() {
return bioentity;
}
/**
* @param bioentity the bioentity to set
*/
public void setBioentity(String bioentity) {
this.bioentity = bioentity;
}
/**
* @return the identifier
*/
public String getIdentifier() {
return identifier;
}
/**
* @param identifier the identifier to set
*/
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
/**
* @return the db
*/
public String getDb() {
return db;
}
/**
* @param db the db to set
*/
public void setDb(String db) {
this.db = db;
}
}
}
|
package org.intermine.bio.dataconversion;
import java.io.BufferedReader;
import java.io.Reader;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.intermine.dataconversion.ItemWriter;
import org.intermine.metadata.Model;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.xml.full.Attribute;
import org.intermine.xml.full.Item;
/**
* @author Julie Sullivan
*/
public class RnaiConverter extends BioFileConverter
{
private static final String DATASET_TITLE = "GenomeRNAi data set";
private static final String DATA_SOURCE_NAME = "German Cancer Research Center (DKFZ)";
private static final Logger LOG = Logger.getLogger(RnaiConverter.class);
private Map<String, String> genes = new HashMap<String, String>();
private Map<String, String> publications = new HashMap<String, String>();
private Map<String, String> screens = new HashMap<String, String>();
private static final String TAXON_FLY = "7227";
private static final String NCBI = "NCBI Entrez Gene identifiers";
private Item screen;
// RNAi people use "np" to signal that there is no data. There is actually a gene with Np
// as it's symbol, so we have to test for this. All identifiers should start with FBgn
private static final String NO_DATA = "np";
protected IdResolver rslv;
/**
* Constructor
* @param writer the ItemWriter used to handle the resultant items
* @param model the Model
*/
public RnaiConverter(ItemWriter writer, Model model) {
super(writer, model, DATA_SOURCE_NAME, DATASET_TITLE);
}
/**
* {@inheritDoc}
*/
@Override
public void process(Reader reader) throws Exception {
if (rslv == null) {
rslv = IdResolverService.getFlyIdResolver();
}
BufferedReader bufferedReader = new BufferedReader(reader);
String line;
while ((line = bufferedReader.readLine()) != null) {
if (line.startsWith("
processScreen(line);
} else {
String[] cols = line.split("\t");
if (cols.length < 7) {
continue;
}
processResult(cols);
}
}
}
private void processScreen(String line) throws ObjectStoreException {
String[] bits = line.split("=");
if (bits.length != 2) {
return;
}
String key = bits[0];
key = key.replace("
String value = bits[1];
if ("Stable ID".equals(key)) {
screen = createItem("RNAiScreen");
screen.setAttribute("identifier", value);
} else if ("Screen Title".equals(key)) {
screen.setAttribute("name", value);
} else if ("Pubmed ID".equals(key)) {
String refId = getPublication(value);
screen.setReference("publication", refId);
} else if ("Biosource".equals(key)) {
screen.setAttribute("bioSourceType", value);
} else if ("Biomodel".equals(key)) {
screen.setAttribute("bioSourceName", value);
} else if ("Assay".equals(key)) {
screen.setAttribute("assay", value);
} else if ("Method".equals(key)) {
screen.setAttribute("method", value);
} else if ("Reagent Type".equals(key)) {
screen.setAttribute("reagentType", value);
} else if ("Score Type".equals(key)) {
screen.setAttribute("scoreType", value);
} else if ("Cutoff".equals(key)) {
screen.setAttribute("scoreCutoff", value);
} else if (key.startsWith("Library")) {
Attribute attr = screen.getAttribute("library");
String library = null;
if (attr != null) {
library = attr.getValue();
}
if (library == null) {
screen.setAttribute("library", key + ": " + value);
} else {
screen.setAttribute("library", library + "; " + key + ": " + value);
}
}
}
private void storeScreen(String identifier) throws ObjectStoreException {
if (screens.get(identifier) == null) {
store(screen);
screens.put(identifier, screen.getIdentifier());
}
}
private void processResult(String[] line) throws ObjectStoreException {
String screenId = line[0];
String geneRefId = null;
String fbgn = line[2];
String ncbi = line[1];
if (fbgn.contains(",") || ncbi.contains(",")) {
return;
}
if (StringUtils.isEmpty(fbgn) || NO_DATA.equals(fbgn)) {
// some only have entrez IDs and no FBgns. try both
geneRefId = getGene(ncbi);
} else {
geneRefId = getGene(fbgn);
}
String reagentId = line[4];
String score = line[5];
String phenotype = line[6];
String conditions = null;
if (line.length > 7) {
conditions = line[7];
}
storeScreen(screenId);
Item result = createItem("RNAiResult");
if (StringUtils.isNotEmpty(reagentId)) {
result.setAttribute("reagentId", reagentId);
}
if (StringUtils.isNotEmpty(phenotype)) {
result.setAttribute("phenotype", phenotype);
}
if (StringUtils.isNotEmpty(conditions)) {
result.setAttribute("conditions", conditions);
}
if (StringUtils.isNotEmpty(score)) {
result.setAttribute("score", score);
}
if (geneRefId != null) {
result.setReference("gene", geneRefId);
if (StringUtils.isNotEmpty(ncbi)) {
createCrossReference(geneRefId, ncbi, NCBI, true);
}
}
result.setReference("rnaiScreen", screen);
store(result);
}
private String getPublication(String pubmedId) throws ObjectStoreException {
String refId = publications.get(pubmedId);
if (refId == null) {
Item publication = createItem("Publication");
publication.setAttribute("pubMedId", pubmedId);
refId = publication.getIdentifier();
publications.put(pubmedId, refId);
store(publication);
}
return refId;
}
private String getGene(String identifier) throws ObjectStoreException {
if (identifier == null) {
throw new RuntimeException("geneSymbol can't be null");
}
if (rslv == null || !rslv.hasTaxon(TAXON_FLY)) {
return null;
}
int resCount = rslv.countResolutions(TAXON_FLY, identifier);
if (resCount != 1) {
LOG.info("RESOLVER: failed to resolve gene to one identifier, ignoring gene: "
+ identifier + " count: " + resCount + " FBgn: "
+ rslv.resolveId(TAXON_FLY, identifier));
return null;
}
String primaryIdentifier = rslv.resolveId(TAXON_FLY, identifier).iterator().next();
String refId = genes.get(primaryIdentifier);
if (refId == null) {
Item item = createItem("Gene");
item.setAttribute("primaryIdentifier", primaryIdentifier);
item.setReference("organism", getOrganism(TAXON_FLY));
refId = item.getIdentifier();
store(item);
genes.put(primaryIdentifier, refId);
}
return refId;
}
}
|
package org.intermine.bio.web.displayer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.apache.log4j.Logger;
import org.intermine.api.InterMineAPI;
import org.intermine.api.query.PathQueryExecutor;
import org.intermine.api.results.ExportResultsIterator;
import org.intermine.api.results.ResultElement;
import org.intermine.model.InterMineObject;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.pathquery.Constraints;
import org.intermine.pathquery.OrderDirection;
import org.intermine.pathquery.PathQuery;
import org.intermine.web.displayer.ReportDisplayer;
import org.intermine.web.logic.config.ReportDisplayerConfig;
import org.intermine.web.logic.results.ReportObject;
import org.intermine.web.logic.session.SessionMethods;
/**
*
* @author radek
*
*/
public class MetabolicGeneSummaryDisplayer extends ReportDisplayer
{
protected static final Logger LOG = Logger.getLogger(MetabolicGeneSummaryDisplayer.class);
/**
* Construct with config and the InterMineAPI.
* @param config to describe the report displayer
* @param im the InterMine API
*/
public MetabolicGeneSummaryDisplayer(ReportDisplayerConfig config, InterMineAPI im) {
super(config, im);
}
@Override
public void display(HttpServletRequest request, ReportObject reportObject) {
GeneSummary summary = new GeneSummary(reportObject.getObject(), request);
// 1. Pathways count
summary.addCollectionCount("Pathways", "Reactome, KEGG", "pathways", "pathways");
// 2. Diseases count
summary.addCollectionCount("Diseases", "OMIM", "diseases", "diseases");
// 3. Mouse Alleles count
if (summary.isThisAMouser()) {
summary.addCollectionCount("Mouse Alleles (MGI)", "mouse alleles", "alleles",
"MouseAllelesDisplayer");
} else {
summary.addCollectionCount("Mouse Alleles (MGI)", "mouse alleles",
allelesPathQuery(summary.getObjectId()), "MouseAllelesDisplayer");
}
// 4. GOTerm count
summary.addCollectionCount("Gene Ontology", " ",
goTermPathQuery(summary.getObjectId()), "GeneOntologyDisplayer");
// on sapien pages:
if (summary.isThisAHuman()) {
// ArrayExpress Gene Expression Tissues & Tissues
ArrayList arr = new ArrayList();
arr.add(this.arrayAtlasExpressionTissues(summary));
arr.add(this.arrayAtlasExpressionDiseases(summary));
summary.addCustom("Expression", "Array Express (E-MTAB 62)",
arr, "GeneExpressionAtlasTissuesDisplayer",
"metabolicGeneSummaryArrayExpressExpressionDisplayer.jsp");
}
request.setAttribute("summary", summary);
}
private Object arrayAtlasExpressionTissues(GeneSummary summary) {
PathQuery query = new PathQuery(im.getModel());
query.addViews("Gene.atlasExpression.condition", "Gene.atlasExpression.expression");
query.addOrderBy("Gene.atlasExpression.pValue", OrderDirection.ASC);
query.addConstraint(Constraints.eq("Gene.id", summary.getObjectId().toString()), "A");
query.addConstraint(Constraints.lessThan("Gene.atlasExpression.pValue", "1E-20"), "B");
query.addConstraint(Constraints.eq("Gene.atlasExpression.type", "organism_part"), "D");
query.addConstraint(Constraints.greaterThan("Gene.atlasExpression.tStatistic", "10"), "E");
query.addConstraint(Constraints.lessThan("Gene.atlasExpression.tStatistic", "-10"), "F");
query.addConstraint(Constraints.neq("Gene.atlasExpression.condition", "(empty)"), "G");
query.setConstraintLogic("A and B and D and (E or F) and G");
ExportResultsIterator results = summary.getExecutor().execute((PathQuery) query);
HashMap<String, String> tissues = new HashMap<String, String>();
while (results.hasNext()) {
List<ResultElement> item = results.next();
String tissue = item.get(0).getField().toString();
String regulation = item.get(1).getField().toString();
// obviously, we can have the same disease appear 2x (we will), but we don't care...
tissues.put(tissue, regulation);
}
return tissues;
}
private Object arrayAtlasExpressionDiseases(GeneSummary summary) {
PathQuery query = new PathQuery(im.getModel());
query.addViews("Gene.atlasExpression.condition", "Gene.atlasExpression.expression");
query.addOrderBy("Gene.atlasExpression.pValue", OrderDirection.ASC);
query.addConstraint(Constraints.eq("Gene.id", summary.getObjectId().toString()), "A");
query.addConstraint(Constraints.lessThan("Gene.atlasExpression.pValue", "1E-20"), "B");
query.addConstraint(Constraints.eq("Gene.atlasExpression.type", "disease_state"), "D");
query.addConstraint(Constraints.greaterThan("Gene.atlasExpression.tStatistic", "10"), "E");
query.addConstraint(Constraints.lessThan("Gene.atlasExpression.tStatistic", "-10"), "F");
query.addConstraint(Constraints.neq("Gene.atlasExpression.condition", "(empty)"), "G");
query.setConstraintLogic("A and B and D and (E or F) and G");
ExportResultsIterator results = summary.getExecutor().execute((PathQuery) query);
HashMap<String, String> diseases = new HashMap<String, String>();
while (results.hasNext()) {
List<ResultElement> item = results.next();
String disease = item.get(0).getField().toString();
String regulation = item.get(1).getField().toString();
// obviously, we can have the same disease appear 2x (we will), but we don't care...
diseases.put(disease, regulation);
}
return diseases;
}
/**
* EMTAB-62 link generator from ebi.ac.uk
* @param primaryId
* @return
* @deprecated because the image is too big
*/
@SuppressWarnings("unused")
@java.lang.Deprecated
private String emtabExpression(String primaryId) {
if (primaryId != null) {
return "http:
}
return null;
}
/**
* Generate PathQuery to Mousey Alleles
* @param query
* @param objectId
* @return
*/
private PathQuery allelesPathQuery(Integer objectId) {
PathQuery query = new PathQuery(im.getModel());
query.addViews("Gene.homologues.homologue.alleles.primaryIdentifier");
query.addConstraint(Constraints.eq("Gene.homologues.homologue.organism.shortName",
"M. musculus"), "A");
query.addConstraint(Constraints.eq("Gene.id", objectId.toString()), "B");
query.setConstraintLogic("A and B");
return query;
}
/**
* Generate PathQuery to GOTerms
* @param query
* @param objectId
* @return
*/
private PathQuery goTermPathQuery(Integer objectId) {
PathQuery query = new PathQuery(im.getModel());
query.addViews("Gene.goAnnotation.ontologyTerm.name");
query.addOrderBy("Gene.goAnnotation.ontologyTerm.name", OrderDirection.ASC);
query.addConstraint(Constraints.eq("Gene.id", objectId.toString()));
// parents have to be main ontology, to exclude the root terms
query.addConstraint(Constraints.oneOfValues("Gene.goAnnotation.ontologyTerm.parents.name",
GeneOntologyDisplayer.ONTOLOGIES));
// not a NOT relationship
query.addConstraint(Constraints.isNull("Gene.goAnnotation.qualifier"));
return query;
}
/**
*
* Internal wrapper.
* @author radek
*
*/
public class GeneSummary
{
private InterMineObject imObj;
private PathQueryExecutor executor = null;
private LinkedHashMap<String, HashMap<String, Object>> storage;
/**
*
* @param imObj InterMineObject
* @param request Request
*/
public GeneSummary(InterMineObject imObj, HttpServletRequest request) {
this.imObj = imObj;
storage = new LinkedHashMap<String, HashMap<String, Object>>();
executor = im.getPathQueryExecutor(SessionMethods.getProfile(request.getSession()));
}
/**
* Add a custom object to the displayer.
* @param key to show under in the summary
* @param description to show under the title
* @param data to save on the wrapper object
* @param anchor says where we will scroll onlick, an ID attr of the target element
* @param jsp to include that knows how to display us
*/
public void addCustom(String key, String description,
Object data, String anchor, String jsp) {
storage.put(key, createWrapper("custom", data, anchor, description, jsp));
}
/**
* Add collection count to the summary.
* @param key to show under in the summary
* @param description to show under the title
* @param param can be a fieldName or a PathQuery
* @param anchor says where we will scroll onlick, an ID attr of the target element
*/
public void addCollectionCount(String key, String description, Object param,
String anchor) {
if (param instanceof PathQuery) {
try {
storage.put(key, createWrapper("integer", executor.count((PathQuery)
param), anchor, description, null));
} catch (ObjectStoreException e) {
LOG.error("Problem running PathQuery " + e.toString());
}
} else if (param instanceof String) {
Collection<?> coll = null;
try {
coll = (Collection<?>) imObj.getFieldValue(param.toString());
if (coll != null) {
storage.put(key, createWrapper("integer", coll.size(), anchor,
description, null));
}
} catch (IllegalAccessException e) {
LOG.error("The field " + param + " does not exist");
}
} else {
storage.put(key, createWrapper("unknown", param, anchor, description, null));
}
}
private HashMap<String, Object> createWrapper(String type, Object data, String anchor,
String description, String jsp) {
HashMap<String, Object> inner = new HashMap<String, Object>();
inner.put("type", type);
inner.put("data", data);
inner.put("anchor", anchor);
inner.put("description", description);
if (jsp != null) {
inner.put("jsp", jsp);
}
return inner;
}
/**
* Add collection distinct count to the summary. Will get the distinct value referenced
* and get their count.
* @param key to show under in the summary
* @param description to show under the title
* @param param can be a fieldName or a PathQuery
* @param anchor says where we will scroll onlick, an ID attr of the target element
*/
public void addCollectionDistinctCount(String key, String description, Object param,
String anchor) {
if (param instanceof PathQuery) {
ExportResultsIterator results = executor.execute((PathQuery) param);
HashMap<String, Integer> temp = new HashMap<String, Integer>();
while (results.hasNext()) {
List<ResultElement> item = results.next();
String value = item.get(0).getField().toString();
if (!temp.keySet().contains(value)) {
temp.put(value, 0);
}
temp.put(value, temp.get(value) + 1);
}
storage.put(key, createWrapper("map", temp, anchor, description, null));
} else {
storage.put(key, createWrapper("unknown", param, anchor, description, null));
}
}
/**
* Add a link to an image for the summary.
* @param key to show under in the summary
* @param description to show under the title
* @param link refers to the src attr of the img element
* @param anchor says where we will scroll onlick, an ID attr of the target element
*/
public void addImageLink(String key, String link, String anchor, String description) {
storage.put(key, createWrapper("image", link, anchor, description, null));
}
/**
*
* @return InterMineObject ID
*/
public Integer getObjectId() {
return imObj.getId();
}
/**
*
* @return true if we are on a mouseified gene
*/
public Boolean isThisAMouser() {
try {
return "Mus".equals(((InterMineObject) imObj.getFieldValue("organism"))
.getFieldValue("genus"));
} catch (IllegalAccessException e) {
e.printStackTrace();
}
return false;
}
/**
*
* @return true if we are on a sapien gene
*/
public Boolean isThisAHuman() {
try {
return "Homo".equals(((InterMineObject) imObj.getFieldValue("organism"))
.getFieldValue("genus"));
} catch (IllegalAccessException e) {
e.printStackTrace();
}
return false;
}
/**
*
* @return ReportObject primaryIdentifier
*/
public String getPrimaryId() {
try {
return (String) imObj.getFieldValue("primaryIdentifier");
} catch (IllegalAccessException e) {
LOG.error("The field primaryIdentifier does not exist");
}
return null;
}
/**
*
* @return PathQuery Executor
*/
public PathQueryExecutor getExecutor() {
return executor;
}
/**
*
* @return Map of the fields configged here for the JSP to traverse
*/
public LinkedHashMap<String, HashMap<String, Object>> getFields() {
return storage;
}
}
}
|
package messages;
public enum MessageType {
// Base (Intern)
SERVER_SHUTDOWN(MessageGroup.BASE, MessageClass.INTERN),
SERVER_RESET(MessageGroup.BASE, MessageClass.INTERN),
FREE_RESOURCES(MessageGroup.BASE, MessageClass.INTERN),
SET_HARDWARE_STATE(MessageGroup.BASE, MessageClass.INTERN),
// Client
VOID(MessageGroup.CLIENT, MessageClass.SINGLE),
ECHO_REQ(MessageGroup.CLIENT, MessageClass.SINGLE),
ECHO_RES(MessageGroup.CLIENT, MessageClass.SINGLE),
ERROR(MessageGroup.CLIENT, MessageClass.SINGLE),
CLIENT_START(MessageGroup.CLIENT, MessageClass.SINGLE),
CLIENT_CONNECTED(MessageGroup.CLIENT, MessageClass.SINGLE),
CLIENT_CLOSE(MessageGroup.CLIENT, MessageClass.SINGLE),
CLIENT_SHUTDOWN(MessageGroup.CLIENT, MessageClass.SINGLE),
CLIENT_RESET(MessageGroup.CLIENT, MessageClass.SINGLE),
CLIENT_SELF_TESTING(MessageGroup.CLIENT, MessageClass.SINGLE),
// Server
MAX_CLIENT_COUNT(MessageGroup.SERVER, MessageClass.GROUP),
NEW_CLIENT_STARTED(MessageGroup.SERVER, MessageClass.GROUP),
CLIENT_CLOSED(MessageGroup.SERVER, MessageClass.GROUP),
RESET_CLIENT(MessageGroup.SERVER, MessageClass.SINGLE),
SERVER_INFO_REQ(MessageGroup.SERVER, MessageClass.SINGLE),
SERVER_INFO_RES(MessageGroup.SERVER, MessageClass.SINGLE),
CON_CLIENTS_REQ(MessageGroup.SERVER, MessageClass.SINGLE),
CON_CLIENTS_RES(MessageGroup.SERVER, MessageClass.SINGLE),
SELF_TESTING_CLIENT(MessageGroup.SERVER, MessageClass.SINGLE),
// GlobalTimer
GLOBAL_TIMER_EVENT(MessageGroup.TIMER, MessageClass.BROADCAST, MessagePriority.REAL_TIME),
GET_GLOBAL_TIMER(MessageGroup.TIMER, MessageClass.SINGLE),
SET_GLOBAL_TIMER(MessageGroup.TIMER, MessageClass.GROUP),
GET_COLOR_THEME(MessageGroup.TIMER, MessageClass.SINGLE),
SET_COLOR_THEME(MessageGroup.TIMER, MessageClass.GROUP),
COLOR_THEME_EVENT(MessageGroup.TIMER, MessageClass.BROADCAST),
// Environment
GET_ENVIRONMENT(MessageGroup.ENV, MessageClass.SINGLE),
SET_ENVIRONMENT(MessageGroup.ENV, MessageClass.GROUP),
GET_AMBIENCE(MessageGroup.ENV, MessageClass.SINGLE),
SET_AMBIENCE(MessageGroup.ENV, MessageClass.GROUP),
GET_AMBIENT_LIGHT(MessageGroup.ENV, MessageClass.SINGLE),
SET_AMBIENT_LIGHT(MessageGroup.ENV, MessageClass.GROUP),
// Interface
SET_CONNECTIVITY(MessageGroup.INTERFACE, MessageClass.SINGLE),
// System
SET_AUTOMATIC_MODE(MessageGroup.SYSTEM, MessageClass.SINGLE, MessagePriority.REAL_TIME),
SET_EMERGENCY_STOP(MessageGroup.SYSTEM, MessageClass.SINGLE, MessagePriority.REAL_TIME),
SET_STANDBY_MODE(MessageGroup.SYSTEM, MessageClass.SINGLE),
GET_HARDWARE_STATE(MessageGroup.SYSTEM),
HARDWARE_STATE_CHANGED(MessageGroup.SYSTEM, MessageClass.BROADCAST),
HARDWARE_SHUTDOWN(MessageGroup.SYSTEM, MessageClass.SINGLE),
HARDWARE_RESET(MessageGroup.SYSTEM, MessageClass.SINGLE),
// Layouts
GET_LAYOUTS_REQ(MessageGroup.LAYOUTS, MessageClass.SINGLE),
GET_LAYOUTS_RES(MessageGroup.LAYOUTS, MessageClass.SINGLE),
DEL_LAYOUT(MessageGroup.LAYOUTS, MessageClass.SINGLE),
LAYOUT_DELETED(MessageGroup.LAYOUTS, MessageClass.GROUP),
CREATE_LAYOUT_REQ(MessageGroup.LAYOUTS, MessageClass.SINGLE),
CREATE_LAYOUT_RES(MessageGroup.LAYOUTS, MessageClass.SINGLE),
LAYOUT_CREATED(MessageGroup.LAYOUTS, MessageClass.GROUP),
UPDATE_LAYOUT(MessageGroup.LAYOUTS, MessageClass.SINGLE),
LAYOUT_UPDATED(MessageGroup.LAYOUTS, MessageClass.GROUP),
UNLOCK_LAYOUT(MessageGroup.LAYOUTS, MessageClass.SINGLE),
LAYOUT_UNLOCKED(MessageGroup.LAYOUTS, MessageClass.GROUP),
// Layout
GET_LAYOUT_REQ(MessageGroup.LAYOUT, MessageClass.SINGLE),
GET_LAYOUT_RES(MessageGroup.LAYOUT, MessageClass.SINGLE),
SAVE_LAYOUT(MessageGroup.LAYOUT, MessageClass.SINGLE),
// GUI
SYSTEM_NOTICE(MessageGroup.GUI, MessageClass.BROADCAST);
public enum MessageGroup {
BASE,
CLIENT,
SERVER,
TIMER,
ENV,
INTERFACE,
SYSTEM,
LAYOUTS,
LAYOUT,
GUI
}
public enum MessageClass {
INTERN,
SINGLE,
GROUP,
BROADCAST
}
public enum MessagePriority {
REAL_TIME(0),
HIGHEST(250),
VERY_HIGHT(500),
HIGHT(1000),
MIDDLE(2500),
LOW(5000),
VERY_LOW(10000),
LOWEST(25000);
private final long offset;
MessagePriority(long offset) {
this.offset = offset;
}
public long getOffset() {
return offset;
}
}
private final MessageGroup grp;
private final MessagePriority pty;
private final MessageClass cls;
MessageType(MessageGroup grp) {
this(grp, MessageClass.INTERN, MessagePriority.LOWEST);
}
MessageType(MessageGroup grp, MessageClass cls) {
this(grp, cls, MessagePriority.LOWEST);
}
MessageType(MessageGroup grp, MessageClass cls, MessagePriority pty) {
this.grp = grp;
this.pty = pty;
this.cls = cls;
}
public MessageGroup getMessageGroup() {
return grp;
}
public MessagePriority getMessagePriority() {
return pty;
}
public MessageClass getMessageClass() {
return cls;
}
}
|
package play.libs;
import com.ning.http.client.AsyncHttpClient;
import com.ning.http.client.AsyncCompletionHandler;
import com.ning.http.client.FluentCaseInsensitiveStringsMap;
import com.ning.http.client.PerRequestConfig;
import com.ning.http.client.RequestBuilderBase;
import com.ning.http.client.Realm.AuthScheme;
import com.ning.http.client.Realm.RealmBuilder;
import com.ning.http.client.FluentStringsMap;
import java.io.IOException;
import java.io.InputStream;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URI;
import java.util.Collection;
import java.util.Map;
import java.util.HashMap;
import java.util.List;
import java.util.ArrayList;
import org.w3c.dom.Document;
import play.libs.F.Promise;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
/**
* Asynchronous API to to query web services, as an http client.
*
* The value returned is a Promise<Response>, and you should use Play's asynchronous mechanisms to use this response.
*/
public class WS {
private static AsyncHttpClient client() {
return play.api.libs.ws.WS.client();
}
/**
* Prepare a new request. You can then construct it by chaining calls.
*
* @param url the URL to request
*/
public static WSRequestHolder url(String url) {
return new WSRequestHolder(url);
}
/**
* Provides the bridge between Play and the underlying ning request
*/
public static class WSRequest extends RequestBuilderBase<WSRequest> {
private FluentCaseInsensitiveStringsMap headers = new FluentCaseInsensitiveStringsMap();
private String method;
private String url;
public WSRequest(String method) {
super(WSRequest.class, method, false);
this.method = method;
}
private WSRequest auth(String username, String password, AuthScheme scheme) {
this.setRealm((new RealmBuilder())
.setScheme(scheme)
.setPrincipal(username)
.setPassword(password)
.setUsePreemptiveAuth(true)
.build());
return this;
}
/**
* Set an HTTP header.
*/
@Override
public WSRequest setHeader(String name, String value) {
headers.replace(name, value);
return super.setHeader(name, value);
}
/**
* Add an HTTP header (used for headers with mutiple values).
*/
@Override
public WSRequest addHeader(String name, String value) {
if (value == null) {
value = "";
}
headers.add(name, value);
return super.addHeader(name, value);
}
/**
* Defines the request headers.
*/
@Override
public WSRequest setHeaders(FluentCaseInsensitiveStringsMap hdrs) {
headers = (headers == null ? new FluentCaseInsensitiveStringsMap() : new FluentCaseInsensitiveStringsMap(headers));
return super.setHeaders(hdrs);
}
/**
* Defines the request headers.
*/
@Override
public WSRequest setHeaders(Map<String, Collection<String>> hdrs) {
headers = (headers == null ? new FluentCaseInsensitiveStringsMap() : new FluentCaseInsensitiveStringsMap(headers));
return super.setHeaders(hdrs);
}
/**
* Return the headers of the request being constructed
*/
public Map<String, List<String>> getAllHeaders() {
return headers;
}
public List<String> getHeader(String name) {
List<String> hdrs = headers.get(name);
if (hdrs == null) return new ArrayList<String>();
return hdrs;
}
public String getMethod() {
return this.method;
}
@Override
public WSRequest setUrl(String url) {
this.url = url;
return super.setUrl(url);
}
public String getUrl() {
return this.url;
}
public Promise<Response> execute() {
final scala.concurrent.Promise<Response> scalaPromise = scala.concurrent.Promise$.MODULE$.<Response>apply();
try {
WS.client().executeRequest(request, new AsyncCompletionHandler<com.ning.http.client.Response>() {
@Override
public com.ning.http.client.Response onCompleted(com.ning.http.client.Response response) {
final com.ning.http.client.Response ahcResponse = response;
scalaPromise.success(new Response(ahcResponse));
return response;
}
@Override
public void onThrowable(Throwable t) {
scalaPromise.failure(t);
}
});
} catch (IOException exception) {
scalaPromise.failure(exception);
}
return new Promise<Response>(scalaPromise.future());
}
}
/**
* provides the User facing API for building WS request.
*/
public static class WSRequestHolder {
private final String url;
private Map<String, Collection<String>> headers = new HashMap<String, Collection<String>>();
private Map<String, Collection<String>> queryParameters = new HashMap<String, Collection<String>>();
private String username = null;
private String password = null;
private AuthScheme scheme = null;
private SignatureCalculator calculator = null;
private int timeout = 0;
private Boolean followRedirects = null;
public WSRequestHolder(String url) {
this.url = url;
}
/**
* Sets a header with the given name, this can be called repeatedly
*
* @param name
* @param value
*/
public WSRequestHolder setHeader(String name, String value) {
if (headers.containsKey(name)) {
Collection<String> values = headers.get(name);
values.add(value);
} else {
List<String> values = new ArrayList<String>();
values.add(value);
headers.put(name, values);
}
return this;
}
/**
* Sets a query parameter with the given name,this can be called repeatedly
*
* @param name
* @param value
*/
public WSRequestHolder setQueryParameter(String name, String value) {
if (queryParameters.containsKey(name)) {
Collection<String> values = queryParameters.get(name);
values.add(value);
} else {
List<String> values = new ArrayList<String>();
values.add(value);
queryParameters.put(name, values);
}
return this;
}
/**
* Sets the authentication header for the current request using BASIC authentication.
*
* @param username
* @param password
*/
public WSRequestHolder setAuth(String username, String password) {
this.username = username;
this.password = password;
this.scheme = AuthScheme.BASIC;
return this;
}
/**
* Sets the authentication header for the current request.
*
* @param username
* @param password
* @param scheme authentication scheme
*/
public WSRequestHolder setAuth(String username, String password, AuthScheme scheme) {
this.username = username;
this.password = password;
this.scheme = scheme;
return this;
}
public WSRequestHolder sign(SignatureCalculator calculator) {
this.calculator = calculator;
return this;
}
/**
* Sets whether redirects (301, 302) should be followed automatically
*
* @param followRedirects
*/
public WSRequestHolder setFollowRedirects(Boolean followRedirects) {
this.followRedirects = followRedirects;
return this;
}
/**
* Sets the request timeout in milliseconds
*
* @param timeout
*/
public WSRequestHolder setTimeout(int timeout) {
this.timeout = timeout;
return this;
}
/**
* Perform a GET on the request asynchronously.
*/
public Promise<Response> get() {
return execute("GET");
}
/**
* Perform a POST on the request asynchronously.
*
* @param body represented as String
*/
public Promise<Response> post(String body) {
return executeString("POST", body);
}
/**
* Perform a PUT on the request asynchronously.
*
* @param body represented as String
*/
public Promise<Response> put(String body) {
return executeString("PUT", body);
}
/**
* Perform a POST on the request asynchronously.
*
* @param body represented as an InputStream
*/
public Promise<Response> post(InputStream body) {
return executeIS("POST", body);
}
/**
* Perform a PUT on the request asynchronously.
*
* @param body represented as an InputStream
*/
public Promise<Response> put(InputStream body) {
return executeIS("PUT", body);
}
/**
* Perform a POST on the request asynchronously.
*
* @param body represented as a File
*/
public Promise<Response> post(File body) {
return executeFile("POST", body);
}
/**
* Perform a PUT on the request asynchronously.
*
* @param body represented as a File
*/
public Promise<Response> put(File body) {
return executeFile("PUT", body);
}
/**
* Perform a DELETE on the request asynchronously.
*/
public Promise<Response> delete() {
return execute("DELETE");
}
/**
* Perform a HEAD on the request asynchronously.
*/
public Promise<Response> head() {
return execute("HEAD");
}
/**
* Perform an OPTIONS on the request asynchronously.
*/
public Promise<Response> options() {
return execute("OPTIONS");
}
private Promise<Response> execute(String method) {
WSRequest req = new WSRequest(method).setUrl(url)
.setHeaders(headers)
.setQueryParameters(new FluentStringsMap(queryParameters));
return execute(req);
}
private Promise<Response> executeString(String method, String body) {
WSRequest req = new WSRequest(method).setBody(body)
.setUrl(url)
.setHeaders(headers)
.setQueryParameters(new FluentStringsMap(queryParameters));
return execute(req);
}
private Promise<Response> executeIS(String method, InputStream body) {
WSRequest req = new WSRequest(method).setBody(body)
.setUrl(url)
.setHeaders(headers)
.setQueryParameters(new FluentStringsMap(queryParameters));
return execute(req);
}
private Promise<Response> executeFile(String method, File body) {
WSRequest req = new WSRequest(method).setBody(body)
.setUrl(url)
.setHeaders(headers)
.setQueryParameters(new FluentStringsMap(queryParameters));
return execute(req);
}
private Promise<Response> execute(WSRequest req) {
if (this.timeout > 0) {
PerRequestConfig config = new PerRequestConfig();
config.setRequestTimeoutInMs(this.timeout);
req.setPerRequestConfig(config);
}
if (this.followRedirects != null) {
req.setFollowRedirects(this.followRedirects);
}
if (this.username != null && this.password != null && this.scheme != null)
req.auth(this.username, this.password, this.scheme);
if (this.calculator != null)
this.calculator.sign(req);
return req.execute();
}
}
/**
* A WS response.
*/
public static class Response {
private com.ning.http.client.Response ahcResponse;
public Response(com.ning.http.client.Response ahcResponse) {
this.ahcResponse = ahcResponse;
}
/**
* Get the HTTP status code of the response
*/
public int getStatus() {
return ahcResponse.getStatusCode();
}
/**
* Get the HTTP status text of the response
*/
public String getStatusText() {
return ahcResponse.getStatusText();
}
/**
* Get the given HTTP header of the response
*/
public String getHeader(String key) {
return ahcResponse.getHeader(key);
}
/**
* Get the response body as a string
*/
public String getBody() {
try {
return ahcResponse.getResponseBody();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Get the response body as a {@link Document DOM document}
* @return a DOM document
*/
public Document asXml() {
try {
return play.libs.XML.fromInputStream(ahcResponse.getResponseBodyAsStream(), "utf-8");
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Get the response body as a {@link org.codehaus.jackson.JsonNode}
* @return the json response
*/
public JsonNode asJson() {
String json = getBody();
ObjectMapper mapper = new ObjectMapper();
try {
return mapper.readValue(json, JsonNode.class);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Get the response body as a stream
* @return The stream to read the response body from
*/
public InputStream getBodyAsStream() {
try {
return ahcResponse.getResponseBodyAsStream();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Get the response body as a byte array
* @return The byte array
*/
public byte[] asByteArray() {
try {
return ahcResponse.getResponseBodyAsBytes();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Return the request {@link java.net.URI}. Note that if the request got redirected, the value of the
* {@link java.net.URI} will be the last valid redirect url.
*
* @return the request {@link java.net.URI}.
*/
public URI getUri() {
try {
return ahcResponse.getUri();
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
}
/**
* Sign a WS call.
*/
public static interface SignatureCalculator {
/**
* Sign a request
*/
public void sign(WSRequest request);
}
}
|
package us.myles.ViaVersion.bungee.handlers;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.MessageToMessageEncoder;
import us.myles.ViaVersion.api.PacketWrapper;
import us.myles.ViaVersion.api.data.UserConnection;
import us.myles.ViaVersion.api.type.Type;
import us.myles.ViaVersion.bungee.util.BungeePipelineUtil;
import us.myles.ViaVersion.exception.CancelException;
import us.myles.ViaVersion.packets.Direction;
import us.myles.ViaVersion.protocols.base.ProtocolInfo;
import us.myles.ViaVersion.util.PipelineUtil;
import java.util.List;
@ChannelHandler.Sharable
public class BungeeEncodeHandler extends MessageToMessageEncoder<ByteBuf> {
private final UserConnection info;
private boolean handledCompression = false;
public BungeeEncodeHandler(UserConnection info) {
this.info = info;
}
@Override
protected void encode(final ChannelHandlerContext ctx, ByteBuf bytebuf, List<Object> out) throws Exception {
if (bytebuf.readableBytes() == 0) {
throw new CancelException();
}
boolean needsCompress = false;
if (!handledCompression) {
if (ctx.pipeline().names().indexOf("compress") > ctx.pipeline().names().indexOf("via-encoder")) {
// Need to decompress this packet due to bad order
bytebuf = BungeePipelineUtil.decompress(ctx, bytebuf);
ChannelHandler encoder = ctx.pipeline().get("via-decoder");
ChannelHandler decoder = ctx.pipeline().get("via-encoder");
ctx.pipeline().remove(encoder);
ctx.pipeline().remove(decoder);
ctx.pipeline().addAfter("decompress", "via-decoder", encoder);
ctx.pipeline().addAfter("compress", "via-encoder", decoder);
needsCompress = true;
handledCompression = true;
}
}
// Increment sent
info.incrementSent();
if (info.isActive()) {
// Handle ID
int id = Type.VAR_INT.read(bytebuf);
// Transform
ByteBuf oldPacket = bytebuf.copy();
bytebuf.clear();
try {
PacketWrapper wrapper = new PacketWrapper(id, oldPacket, info);
ProtocolInfo protInfo = info.get(ProtocolInfo.class);
protInfo.getPipeline().transform(Direction.OUTGOING, protInfo.getState(), wrapper);
wrapper.writeToBuffer(bytebuf);
} catch (Exception e) {
bytebuf.clear();
throw e;
} finally {
oldPacket.release();
}
}
if (needsCompress) {
ByteBuf old = bytebuf;
bytebuf = BungeePipelineUtil.compress(ctx, bytebuf);
old.release();
out.add(bytebuf);
} else {
out.add(bytebuf.retain());
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
if (PipelineUtil.containsCause(cause, CancelException.class)) return;
super.exceptionCaught(ctx, cause);
}
}
|
package io.totemo.watson.macro;
import net.eq2online.macros.scripting.ScriptActionBase;
import net.eq2online.macros.scripting.api.IMacro;
import net.eq2online.macros.scripting.api.IMacroAction;
import net.eq2online.macros.scripting.api.IMacroActionProcessor;
import net.eq2online.macros.scripting.api.IMacroActionStackEntry;
import net.eq2online.macros.scripting.api.IReturnValue;
import net.eq2online.macros.scripting.api.IScriptAction;
import net.eq2online.macros.scripting.api.IScriptActionProvider;
import net.eq2online.macros.scripting.parser.ScriptContext;
import net.minecraft.client.Minecraft;
import watson.LiteModWatson;
/**
* A Macro/Keybind Mod script action for a WATSON(<string>) command.
*/
public class ScriptActionWatson extends ScriptActionBase
{
/**
* Default constructor.
*/
public ScriptActionWatson()
{
super(ScriptContext.MAIN, "watson");
}
/**
* @see net.eq2online.macros.scripting.api.IMacrosAPIModule#OnInit()
*/
@Override
public void onInit()
{
getContext().getCore().registerScriptAction(this);
}
/**
* @see net.eq2online.macros.scripting.ScriptAction#execute(net.eq2online.macros.scripting.api.IScriptActionProvider,
* net.eq2online.macros.scripting.api.IMacro,
* net.eq2online.macros.scripting.api.IMacroAction, java.lang.String,
* java.lang.String[])
*/
@Override
public IReturnValue execute(IScriptActionProvider provider, IMacro macro,
IMacroAction action, String command, String[] args)
{
LiteModWatson.sendChatMessage(Minecraft.getMinecraft().thePlayer, command);
return null;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#isThreadSafe()
*/
@Override
public boolean isThreadSafe()
{
return false;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#isStackPushOperator()
*/
@Override
public boolean isStackPushOperator()
{
return false;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#isStackPopOperator()
*/
@Override
public boolean isStackPopOperator()
{
return false;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#canBePoppedBy(net.eq2online.macros.scripting.api.IScriptAction)
*/
@Override
public boolean canBePoppedBy(IScriptAction action)
{
return false;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#executeStackPush(net.eq2online.macros.scripting.api.IScriptActionProvider,
* net.eq2online.macros.scripting.api.IMacro,
* net.eq2online.macros.scripting.api.IMacroAction, java.lang.String,
* java.lang.String[])
*/
@Override
public boolean executeStackPush(IScriptActionProvider provider, IMacro macro, IMacroAction instance, String rawParams,
String[] params)
{
return false;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#executeStackPop(net.eq2online.macros.scripting.api.IScriptActionProvider,
* net.eq2online.macros.scripting.api.IMacro,
* net.eq2online.macros.scripting.api.IMacroAction, java.lang.String,
* java.lang.String[], net.eq2online.macros.scripting.api.IMacroAction)
*/
@Override
public boolean executeStackPop(IScriptActionProvider provider, IMacro macro, IMacroAction instance, String rawParams,
String[] params, IMacroAction popAction)
{
return false;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#canBreak(net.eq2online.macros.scripting.api.IMacroActionProcessor,
* net.eq2online.macros.scripting.api.IScriptActionProvider,
* net.eq2online.macros.scripting.api.IMacro,
* net.eq2online.macros.scripting.api.IMacroAction,
* net.eq2online.macros.scripting.api.IMacroAction)
*/
@Override
public boolean canBreak(IMacroActionProcessor processor, IScriptActionProvider provider, IMacro macro, IMacroAction instance,
IMacroAction breakAction)
{
return false;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#isConditionalOperator()
*/
@Override
public boolean isConditionalOperator()
{
return false;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#isConditionalElseOperator(net.eq2online.macros.scripting.api.IScriptAction)
*/
@Override
public boolean isConditionalElseOperator(IScriptAction action)
{
return false;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#matchesConditionalOperator(net.eq2online.macros.scripting.api.IScriptAction)
*/
@Override
public boolean matchesConditionalOperator(IScriptAction action)
{
return false;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#executeConditional(net.eq2online.macros.scripting.api.IScriptActionProvider,
* net.eq2online.macros.scripting.api.IMacro,
* net.eq2online.macros.scripting.api.IMacroAction, java.lang.String,
* java.lang.String[])
*/
@Override
public boolean executeConditional(IScriptActionProvider provider, IMacro macro, IMacroAction instance, String rawParams,
String[] params)
{
return false;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#executeConditionalElse(net.eq2online.macros.scripting.api.IScriptActionProvider,
* net.eq2online.macros.scripting.api.IMacro,
* net.eq2online.macros.scripting.api.IMacroAction, java.lang.String,
* java.lang.String[],
* net.eq2online.macros.scripting.api.IMacroActionStackEntry)
*/
@Override
public void executeConditionalElse(IScriptActionProvider provider, IMacro macro, IMacroAction instance, String rawParams,
String[] params, IMacroActionStackEntry top)
{
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#canExecuteNow(net.eq2online.macros.scripting.api.IScriptActionProvider,
* net.eq2online.macros.scripting.api.IMacro,
* net.eq2online.macros.scripting.api.IMacroAction, java.lang.String,
* java.lang.String[])
*/
@Override
public boolean canExecuteNow(IScriptActionProvider provider, IMacro macro, IMacroAction instance, String rawParams,
String[] params)
{
return true;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#onTick(net.eq2online.macros.scripting.api.IScriptActionProvider)
*/
@Override
public int onTick(IScriptActionProvider provider)
{
return 0;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#isClocked()
*/
@Override
public boolean isClocked()
{
return false;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#isPermissable()
*/
@Override
public boolean isPermissable()
{
return false;
}
@Override
public String getPermissionGroup()
{
return null;
}
@Override
public void registerPermissions(String actionName, String actionGroup)
{
}
@Override
public boolean checkExecutePermission()
{
return true;
}
@Override
public boolean checkPermission(String actionName, String permission)
{
return false;
}
/**
* @see net.eq2online.macros.scripting.api.IScriptAction#onStopped(net.eq2online.macros.scripting.api.IScriptActionProvider,
* net.eq2online.macros.scripting.api.IMacro,
* net.eq2online.macros.scripting.api.IMacroAction)
*/
@Override
public void onStopped(IScriptActionProvider provider, IMacro macro, IMacroAction instance)
{
}
} // class ScriptActionWatson
|
package jade.tools.rma;
import jade.gui.AgentTree;
/**
Refresh the APDescription of a remote platform.
@author Tiziana Trucco - CSELT S.p.A.
@version $Date$ $Revision$
*/
class RefreshAPDescriptionAction extends PlatformAction {
private rma myRMA;
public RefreshAPDescriptionAction(rma anRMA, ActionProcessor actPro) {
super ("RefreshAPDescriptionIcon", "Refresh AP Description", actPro);
myRMA = anRMA;
}
public void doAction(AgentTree.Node node ) {
if(node instanceof AgentTree.RemoteAMSNode){
//System.out.println("Refresh AP Description");
myRMA.addRemotePlatform(((AgentTree.RemoteAMSNode)node).getAmsAID());
}
}
} // End of RefreshAPDescriptionAction
|
package org.xins.server;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.TimeZone;
import org.xins.common.MandatoryArgumentChecker;
import org.xins.common.Utils;
import org.xins.common.collections.InvalidPropertyValueException;
import org.xins.common.collections.MissingRequiredPropertyException;
import org.xins.common.collections.PropertyReader;
import org.xins.common.manageable.BootstrapException;
import org.xins.common.manageable.DeinitializationException;
import org.xins.common.manageable.InitializationException;
import org.xins.common.manageable.Manageable;
import org.xins.common.net.IPAddressUtils;
import org.xins.common.spec.InvalidSpecificationException;
import org.xins.common.text.DateConverter;
import org.xins.common.text.ParseException;
import org.xins.common.xml.Element;
import org.xins.common.xml.ElementBuilder;
import org.xins.logdoc.LogdocSerializable;
/**
* Base class for API implementation classes.
*
* @version $Revision$ $Date$
* @author Ernst de Haan (<a href="mailto:ernst.dehaan@nl.wanadoo.com">ernst.dehaan@nl.wanadoo.com</a>)
*
* @since XINS 1.0.0
*/
public abstract class API
extends Manageable
implements DefaultResultCodes {
// Class fields
/**
* Fully-qualified name of this class.
*/
private static final String CLASSNAME = API.class.getName();
/**
* Successful empty call result.
*/
private static final FunctionResult SUCCESSFUL_RESULT = new FunctionResult();
/**
* The runtime (initialization) property that defines the ACL (access
* control list) rules.
*/
private static final String ACL_PROPERTY = "org.xins.server.acl";
/**
* The name of the build property that specifies the version of the API.
*/
private static final String API_VERSION_PROPERTY = "org.xins.api.version";
/**
* The name of the build property that specifies the hostname of the
* machine the package was built on.
*/
private static final String BUILD_HOST_PROPERTY =
"org.xins.api.build.host";
/**
* The name of the build property that specifies the time the package was
* built.
*/
private static final String BUILD_TIME_PROPERTY =
"org.xins.api.build.time";
/**
* The name of the build property that specifies which version of XINS was
* used to build the package.
*/
private static final String BUILD_XINS_VERSION_PROPERTY =
"org.xins.api.build.version";
// Class functions
// Constructors
protected API(String name)
throws IllegalArgumentException {
// Check preconditions
MandatoryArgumentChecker.check("name", name);
if (name.length() < 1) {
throw new IllegalArgumentException("name.length() (" + name.length() + " < 1");
}
// Initialize fields
_name = name;
_startupTimestamp = System.currentTimeMillis();
_lastStatisticsReset = _startupTimestamp;
_manageableObjects = new ArrayList();
_functionsByName = new HashMap();
_functionList = new ArrayList();
_resultCodesByName = new HashMap();
_resultCodeList = new ArrayList();
_emptyProperties = new RuntimeProperties();
_localIPAddress = IPAddressUtils.getLocalHostIPAddress();
}
// Fields
/**
* The engine that owns this <code>API</code> object.
*/
private Engine _engine;
/**
* The name of this API. Cannot be <code>null</code> and cannot be an empty
* string.
*/
private final String _name;
/**
* List of registered manageable objects. See {@link #add(Manageable)}.
*
* <p />This field is initialized to a non-<code>null</code> value by the
* constructor.
*/
private final List _manageableObjects;
/**
* Map that maps function names to <code>Function</code> instances.
* Contains all functions associated with this API.
*
* <p />This field is initialized to a non-<code>null</code> value by the
* constructor.
*/
private final Map _functionsByName;
/**
* List of all functions. This field cannot be <code>null</code>.
*/
private final List _functionList;
/**
* Map that maps result code names to <code>ResultCode</code> instances.
* Contains all result codes associated with this API.
*
* <p />This field is initialized to a non-<code>null</code> value by the
* constructor.
*/
private final Map _resultCodesByName;
/**
* List of all result codes. This field cannot be <code>null</code>.
*/
private final List _resultCodeList;
/**
* The build-time settings. This field is initialized exactly once by
* {@link #bootstrap(PropertyReader)}. It can be <code>null</code> before
* that.
*/
private PropertyReader _buildSettings;
/**
* The {@link RuntimeProperties} containing the method to verify and access
* the defined runtime properties.
*/
private RuntimeProperties _emptyProperties;
/**
* The runtime-time settings. This field is initialized by
* {@link #init(PropertyReader)}. It can be <code>null</code> before that.
*/
private PropertyReader _runtimeSettings;
/**
* Timestamp indicating when this API instance was created.
*/
private final long _startupTimestamp;
/**
* Last time the statistics were reset. Initially the startup timestamp.
*/
private long _lastStatisticsReset;
/**
* Host name for the machine that was used for this build.
*/
private String _buildHost;
/**
* Time stamp that indicates when this build was done.
*/
private String _buildTime;
/**
* XINS version used to build the web application package.
*/
private String _buildVersion;
/**
* The time zone used when generating dates for output.
*/
private TimeZone _timeZone;
/**
* Version of the API.
*/
private String _apiVersion;
/**
* The access rule list.
*/
private AccessRuleList _accessRuleList;
/**
* Indicates whether the API should wait for the statistic to be unlocked
* before continuing. This field is initially set to <code>false</code>.
*/
private boolean _statisticsLocked;
/**
* The API specification.
*/
private org.xins.common.spec.API _apiSpecification;
/**
* The local IP address.
*/
private String _localIPAddress;
// Methods
/**
* Gets the name of this API.
*
* @return
* the name of this API, never <code>null</code> and never an empty
* string.
*/
public final String getName() {
return _name;
}
/**
* Gets the properties specified in the implementation.
*
* @return
* the runtime properties for the API, cannot be <code>null</code>.
*/
public RuntimeProperties getProperties() {
return _emptyProperties;
}
/**
* Gets the timestamp that indicates when this <code>API</code> instance
* was created.
*
* @return
* the time this instance was constructed, as a number of milliseconds
* since midnight January 1, 1970.
*/
public final long getStartupTimestamp() {
return _startupTimestamp;
}
/**
* Returns the applicable time zone.
*
* @return
* the time zone, not <code>null</code>.
*/
public final TimeZone getTimeZone() {
return _timeZone;
}
protected final void bootstrapImpl(PropertyReader buildSettings)
throws IllegalStateException,
MissingRequiredPropertyException,
InvalidPropertyValueException,
BootstrapException {
// Check state
Manageable.State state = getState();
if (state != BOOTSTRAPPING) {
Log.log_3430(state.getName());
throw new IllegalStateException("State is " + state + " instead of " + BOOTSTRAPPING + '.');
}
// Log the time zone
_timeZone = TimeZone.getDefault();
String tzShortName = _timeZone.getDisplayName(false, TimeZone.SHORT);
String tzLongName = _timeZone.getDisplayName(false, TimeZone.LONG);
Log.log_3404(tzShortName, tzLongName);
// Store the build-time settings
_buildSettings = buildSettings;
// Get build-time properties
_apiVersion = _buildSettings.get(API_VERSION_PROPERTY );
_buildHost = _buildSettings.get(BUILD_HOST_PROPERTY );
_buildTime = _buildSettings.get(BUILD_TIME_PROPERTY );
_buildVersion = _buildSettings.get(BUILD_XINS_VERSION_PROPERTY);
Log.log_3212(_buildHost, _buildTime, _buildVersion, _name, _apiVersion);
// Check if build version identifies a production release of XINS
if (_buildVersion == null || ! Library.isProductionRelease(_buildVersion)) {
Log.log_3228(_buildVersion);
}
// Let the subclass perform initialization
// TODO: What if bootstrapImpl2 throws an unexpected exception?
bootstrapImpl2(buildSettings);
// Bootstrap all instances
int count = _manageableObjects.size();
for (int i = 0; i < count; i++) {
Manageable m = (Manageable) _manageableObjects.get(i);
String className = m.getClass().getName();
Log.log_3213(_name, className);
try {
m.bootstrap(_buildSettings);
Log.log_3214(_name, className);
// Missing property
} catch (MissingRequiredPropertyException exception) {
Log.log_3215(_name, className, exception.getPropertyName());
throw exception;
// Invalid property
} catch (InvalidPropertyValueException exception) {
Log.log_3216(_name, className, exception.getPropertyName(), exception.getPropertyValue(), exception.getReason());
throw exception;
// Catch BootstrapException and any other exceptions not caught
// by previous catch statements
} catch (Throwable exception) {
// Log event
Log.log_3217(exception, _name, className, exception.getMessage());
// Throw a BootstrapException. If necessary, wrap around the
// caught exception
if (exception instanceof BootstrapException) {
throw (BootstrapException) exception;
} else {
throw new BootstrapException(exception);
}
}
}
// Bootstrap all functions
count = _functionList.size();
for (int i = 0; i < count; i++) {
Function f = (Function) _functionList.get(i);
String functionName = f.getName();
Log.log_3220(_name, functionName);
try {
f.bootstrap(_buildSettings);
Log.log_3221(_name, functionName);
// Missing required property
} catch (MissingRequiredPropertyException exception) {
Log.log_3222(_name, functionName, exception.getPropertyName());
throw exception;
// Invalid property value
} catch (InvalidPropertyValueException exception) {
Log.log_3223(_name, functionName, exception.getPropertyName(), exception.getPropertyValue(), exception.getReason());
throw exception;
// Catch BootstrapException and any other exceptions not caught
// by previous catch statements
} catch (Throwable exception) {
// Log this event
Log.log_3224(exception, _name, functionName, exception.getMessage());
// Throw a BootstrapException. If necessary, wrap around the
// caught exception
if (exception instanceof BootstrapException) {
throw (BootstrapException) exception;
} else {
throw new BootstrapException(exception);
}
}
}
}
/**
* Bootstraps this API (implementation method).
*
* <p />The implementation of this method in class {@link API} is empty.
* Custom subclasses can perform any necessary bootstrapping in this
* class.
*
* <p />Note that bootstrapping and initialization are different. Bootstrap
* includes only the one-time configuration of the API based on the
* build-time settings, while the initialization
*
* <p />The {@link #add(Manageable)} may be called from this method,
* and from this method <em>only</em>.
*
* @param buildSettings
* the build-time properties, guaranteed not to be <code>null</code>.
*
* @throws MissingRequiredPropertyException
* if a required property is not given.
*
* @throws InvalidPropertyValueException
* if a property has an invalid value.
*
* @throws BootstrapException
* if the bootstrap fails.
*/
protected void bootstrapImpl2(PropertyReader buildSettings)
throws MissingRequiredPropertyException,
InvalidPropertyValueException,
BootstrapException {
// empty
}
/**
* Stores a reference to the <code>Engine</code> that owns this
* <code>API</code> object.
*
* @param engine
* the {@link Engine} instance, should not be <code>null</code>.
*/
void setEngine(Engine engine) {
_engine = engine;
}
/**
* Triggers re-initialization of this API. This method is meant to be
* called by API function implementations when it is anticipated that the
* API should be re-initialized.
*/
protected final void reinitializeImpl() {
_engine.initAPI();
}
protected final void initImpl(PropertyReader runtimeSettings)
throws MissingRequiredPropertyException,
InvalidPropertyValueException,
InitializationException,
IllegalStateException {
// Check state
Manageable.State state = getState();
if (state != INITIALIZING) {
Log.log_3430(state.getName());
throw new IllegalStateException("State is " + state + " instead of " + INITIALIZING + '.');
}
Log.log_3405(_name);
// Store runtime settings
_runtimeSettings = runtimeSettings;
// Initialize ACL subsystem
String acl = runtimeSettings.get(ACL_PROPERTY);
String aclInterval = runtimeSettings.get(APIServlet.CONFIG_RELOAD_INTERVAL_PROPERTY);
int interval = APIServlet.DEFAULT_CONFIG_RELOAD_INTERVAL;
if (aclInterval != null && aclInterval.trim().length() > 0) {
interval = Integer.parseInt(aclInterval);
}
// Close the previous ACL
if (_accessRuleList != null) {
_accessRuleList.dispose();
}
if (acl == null || acl.trim().length() < 1) {
_accessRuleList = AccessRuleList.EMPTY;
Log.log_3426(ACL_PROPERTY);
} else {
try {
_accessRuleList = AccessRuleList.parseAccessRuleList(acl, interval);
int ruleCount = _accessRuleList.getRuleCount();
Log.log_3427(ruleCount);
} catch (ParseException exception) {
Log.log_3428(ACL_PROPERTY, acl, exception.getMessage());
throw new InvalidPropertyValueException(ACL_PROPERTY, acl, exception.getMessage());
}
}
// Initialize the RuntimeProperties object.
getProperties().init(runtimeSettings);
// Initialize all instances
int count = _manageableObjects.size();
for (int i = 0; i < count; i++) {
Manageable m = (Manageable) _manageableObjects.get(i);
String className = m.getClass().getName();
Log.log_3416(_name, className);
try {
m.init(runtimeSettings);
Log.log_3417(_name, className);
// Missing required property
} catch (MissingRequiredPropertyException exception) {
Log.log_3418(_name, className, exception.getPropertyName());
throw exception;
// Invalid property value
} catch (InvalidPropertyValueException exception) {
Log.log_3419(_name, className, exception.getPropertyName(), exception.getPropertyValue(), exception.getReason());
throw exception;
// Catch InitializationException and any other exceptions not caught
// by previous catch statements
} catch (Throwable exception) {
// Log this event
Log.log_3420(exception, _name, className, exception.getMessage());
if (exception instanceof InitializationException) {
throw (InitializationException) exception;
} else {
throw new InitializationException(exception);
}
}
}
// Initialize all functions
count = _functionList.size();
for (int i = 0; i < count; i++) {
Function f = (Function) _functionList.get(i);
String functionName = f.getName();
Log.log_3421(_name, functionName);
try {
f.init(runtimeSettings);
Log.log_3422(_name, functionName);
// Missing required property
} catch (MissingRequiredPropertyException exception) {
Log.log_3423(_name, functionName, exception.getPropertyName());
throw exception;
// Invalid property value
} catch (InvalidPropertyValueException exception) {
Log.log_3424(_name, functionName, exception.getPropertyName(), exception.getPropertyValue(), exception.getReason());
throw exception;
// Catch InitializationException and any other exceptions not caught
// by previous catch statements
} catch (Throwable exception) {
// Log this event
Log.log_3425(exception, _name, functionName);
// Throw an InitializationException. If necessary, wrap around the
// caught exception
if (exception instanceof InitializationException) {
throw (InitializationException) exception;
} else {
throw new InitializationException(exception);
}
}
}
// TODO: Call initImpl2(PropertyReader) ?
Log.log_3406(_name);
}
protected final void add(Manageable m)
throws IllegalStateException,
IllegalArgumentException {
// Check state
Manageable.State state = getState();
if (state != BOOTSTRAPPING) {
Log.log_3430(state.getName());
throw new IllegalStateException("State is " + state + " instead of " + BOOTSTRAPPING + '.');
}
// Check preconditions
MandatoryArgumentChecker.check("m", m);
String className = m.getClass().getName();
Log.log_3218(_name, className);
// Store the manageable object in the list
_manageableObjects.add(m);
Log.log_3219(_name, className);
}
/**
* Performs shutdown of this XINS API. This method will never throw any
* exception.
*/
protected final void deinitImpl() {
// Deinitialize instances
int count = _manageableObjects.size();
for (int i = 0; i < count; i++) {
Manageable m = (Manageable) _manageableObjects.get(i);
String className = m.getClass().getName();
Log.log_3603(_name, className);
try {
m.deinit();
Log.log_3604(_name, className);
} catch (DeinitializationException exception) {
Log.log_3605(_name, className, exception.getMessage());
} catch (Throwable exception) {
Log.log_3606(exception, _name, className);
}
}
_manageableObjects.clear();
// Deinitialize functions
count = _functionList.size();
for (int i = 0; i < count; i++) {
Function f = (Function) _functionList.get(i);
String functionName = f.getName();
Log.log_3607(_name, functionName);
try {
f.deinit();
Log.log_3608(_name, functionName);
} catch (DeinitializationException exception) {
Log.log_3609(_name, functionName, exception.getMessage());
} catch (Throwable exception) {
Log.log_3610(exception, _name, functionName);
}
}
}
final void functionAdded(Function function)
throws NullPointerException, IllegalStateException {
// Check state
Manageable.State state = getState();
if (state != UNUSABLE) {
Log.log_3430(state.getName());
throw new IllegalStateException("State is " + state + " instead of " + UNUSABLE + '.');
}
_functionsByName.put(function.getName(), function);
_functionList.add(function);
}
/**
* Callback method invoked when a result code is constructed.
*
* @param resultCode
* the result code that is added, not <code>null</code>.
*
* @throws NullPointerException
* if <code>resultCode == null</code>.
*/
final void resultCodeAdded(ResultCode resultCode)
throws NullPointerException {
_resultCodesByName.put(resultCode.getName(), resultCode);
_resultCodeList.add(resultCode);
}
/**
* Returns the function with the specified name.
*
* @param name
* the name of the function, will not be checked if it is
* <code>null</code>.
*
* @return
* the function with the specified name, or <code>null</code> if there
* is no match.
*/
final Function getFunction(String name) {
return (Function) _functionsByName.get(name);
}
/**
* Get the specification of the API.
*
* @return
* the API specifications.
*
* @throws InvalidSpecificationException
* if the specification cannot be found or is invalid.
*
* @see org.xins.common.spec.API.class
*
* @since XINS 1.3.0
*/
public final org.xins.common.spec.API getAPISpecification() throws InvalidSpecificationException {
if (_apiSpecification == null) {
String baseURL = null;
try {
baseURL = _engine.getServletConfig().getServletContext().getResource("specs/").toExternalForm();
} catch (MalformedURLException muex) {
// Leave the variable as null
}
_apiSpecification = new org.xins.common.spec.API(getClass(), baseURL);
}
return _apiSpecification;
}
final FunctionResult handleCall(long start,
FunctionRequest functionRequest,
String ip)
throws IllegalStateException,
NullPointerException,
NoSuchFunctionException,
AccessDeniedException {
final String THIS_METHOD = "handleCall(long,"
+ FunctionRequest.class.getName()
+ ",java.lang.String)";
// Check state first
assertUsable();
// Determine the function name
String functionName = functionRequest.getFunctionName();
// Check the access rule list
boolean allow;
// If no property is defined only localhost is allowed
if (_accessRuleList == AccessRuleList.EMPTY && (ip.equals("127.0.0.1")
|| ip.equals(_localIPAddress))) {
allow = true;
} else {
try {
allow = _accessRuleList.allow(ip, functionName);
// If the IP address cannot be parsed there is a programming error
// somewhere
} catch (ParseException exception) {
final String SUBJECT_CLASS = _accessRuleList.getClass().getName();
final String SUBJECT_METHOD = "allow(java.lang.String,java.lang.String)";
final String DETAIL = "Malformed IP address: \"" + ip + "\".";
throw Utils.logProgrammingError(CLASSNAME,
THIS_METHOD,
SUBJECT_CLASS,
SUBJECT_METHOD,
DETAIL,
exception);
}
}
if (!allow) {
throw new AccessDeniedException(ip, functionName);
}
// Wait until the statistics are returned. This is indicated by
// interrupt()-ing this thread.
while (_statisticsLocked) {
synchronized (this) {
try {
wait();
} catch (InterruptedException iex) {
// as expected
}
}
}
// Handle meta-functions
if (functionName.charAt(0) == '_') {
FunctionResult result;
if ("_NoOp".equals(functionName)) {
result = SUCCESSFUL_RESULT;
} else if ("_GetFunctionList".equals(functionName)) {
result = doGetFunctionList();
} else if ("_GetStatistics".equals(functionName)) {
String detailedArgument = functionRequest.getParameters().get("detailed");
boolean detailed = detailedArgument != null && detailedArgument.equals("true");
String resetArgument = functionRequest.getParameters().get("reset");
if (resetArgument != null && resetArgument.equals("true")) {
_statisticsLocked = true;
result = doGetStatistics(detailed);
doResetStatistics();
_statisticsLocked = false;
synchronized (this) {
notifyAll();
}
} else {
result = doGetStatistics(detailed);
}
} else if ("_GetVersion".equals(functionName)) {
result = doGetVersion();
} else if ("_GetSettings".equals(functionName)) {
result = doGetSettings();
} else if ("_DisableFunction".equals(functionName)) {
result = doDisableFunction(functionRequest.getParameters().get("functionName"));
} else if ("_EnableFunction".equals(functionName)) {
result = doEnableFunction(functionRequest.getParameters().get("functionName"));
} else if ("_ResetStatistics".equals(functionName)) {
result = doResetStatistics();
} else if ("_ReloadProperties".equals(functionName)) {
_engine.reloadPropertiesIfChanged();
result = SUCCESSFUL_RESULT;
} else {
throw new NoSuchFunctionException(functionName);
}
// Determine duration
long duration = System.currentTimeMillis() - start;
// Determine error code, fallback is a zero character
String code = result.getErrorCode();
if (code == null) {
code = "0";
}
// Prepare for transaction logging
LogdocSerializable serStart = new FormattedDate(start);
LogdocSerializable inParams = new FormattedParameters(functionRequest.getParameters());
LogdocSerializable outParams = new FormattedParameters(result.getParameters());
// Log transaction before returning the result
Log.log_3540(serStart, ip, functionName, duration, code, inParams,
outParams);
Log.log_3541(serStart, ip, functionName, duration, code);
return result;
}
// Short-circuit if we are shutting down
if (getState().equals(DEINITIALIZING)) {
Log.log_3611(_name, functionName);
return new FunctionResult("_InternalError");
}
// Get the function object
Function function = getFunction(functionName);
if (function == null) {
throw new NoSuchFunctionException(functionName);
}
// Forward the call to the function
return function.handleCall(start, functionRequest, ip);
}
/**
* Returns a list of all functions in this API. Per function the name and
* the version are returned.
*
* @return
* the call result, never <code>null</code>.
*/
private final FunctionResult doGetFunctionList() {
// Initialize a builder
FunctionResult builder = new FunctionResult();
// Loop over all functions
int count = _functionList.size();
for (int i = 0; i < count; i++) {
// Get some details about the function
Function function = (Function) _functionList.get(i);
String name = function.getName();
String version = function.getVersion();
String enabled = function.isEnabled()
? "true"
: "false";
// Add an element describing the function
ElementBuilder functionElem = new ElementBuilder("function");
functionElem.setAttribute("name", name );
functionElem.setAttribute("version", version);
functionElem.setAttribute("enabled", enabled);
builder.add(functionElem.createElement());
}
return builder;
}
/**
* Converts the specified timestamp to a date string.
*
* @param millis
* the timestamp, as a number of milliseconds since the Epoch.
*
* @return
* the date string, never <code>null</code>.
*/
private final String toDateString(long millis) {
return DateConverter.toDateString(_timeZone, millis);
}
/**
* Returns the call statistics for all functions in this API.
*
* @param detailed
* If <code>true</code>, the unsuccessful result will be returned sorted
* per error code. Otherwise the unsuccessful result won't be displayed
* by error code.
*
* @return
* the call result, never <code>null</code>.
*/
private final FunctionResult doGetStatistics(boolean detailed) {
// Initialize a builder
FunctionResult builder = new FunctionResult();
builder.param("startup", toDateString(_startupTimestamp));
builder.param("lastReset", toDateString(_lastStatisticsReset));
builder.param("now", toDateString(System.currentTimeMillis()));
// Currently available processors
Runtime rt = Runtime.getRuntime();
try {
builder.param("availableProcessors",
String.valueOf(rt.availableProcessors()));
} catch (NoSuchMethodError error) {
// NOTE: Runtime.availableProcessors() is not available in Java 1.3
}
// Heap memory statistics
ElementBuilder heap = new ElementBuilder("heap");
long free = rt.freeMemory();
long total = rt.totalMemory();
heap.setAttribute("used", String.valueOf(total - free));
heap.setAttribute("free", String.valueOf(free));
heap.setAttribute("total", String.valueOf(total));
try {
heap.setAttribute("max", String.valueOf(rt.maxMemory()));
} catch (NoSuchMethodError error) {
// NOTE: Runtime.maxMemory() is not available in Java 1.3
}
builder.add(heap.createElement());
// Function-specific statistics
int count = _functionList.size();
for (int i = 0; i < count; i++) {
Function function = (Function) _functionList.get(i);
FunctionStatistics stats = function.getStatistics();
ElementBuilder functionElem = new ElementBuilder("function");
functionElem.setAttribute("name", function.getName());
// Successful
Element successful = stats.getSuccessfulElement();
functionElem.addChild(successful);
// Unsuccessful
Element[] unsuccessful = stats.getUnsuccessfulElement(detailed);
for(int j = 0; j < unsuccessful.length; j++) {
functionElem.addChild(unsuccessful[j]);
}
builder.add(functionElem.createElement());
}
return builder;
}
/**
* Returns the XINS version.
*
* @return
* the call result, never <code>null</code>.
*/
private final FunctionResult doGetVersion() {
FunctionResult builder = new FunctionResult();
builder.param("java.version", System.getProperty("java.version"));
builder.param("xmlenc.version", org.znerd.xmlenc.Library.getVersion());
builder.param("xins.version", Library.getVersion());
builder.param("api.version", _apiVersion);
return builder;
}
/**
* Returns the settings.
*
* @return
* the call result, never <code>null</code>.
*/
private final FunctionResult doGetSettings() {
final String THIS_METHOD = "doGetSettings()";
FunctionResult builder = new FunctionResult();
// Build settings
Iterator names = _buildSettings.getNames();
ElementBuilder build = new ElementBuilder("build");
while (names.hasNext()) {
String key = (String) names.next();
String value = _buildSettings.get(key);
ElementBuilder property = new ElementBuilder("property");
property.setAttribute("name", key);
property.setText(value);
build.addChild(property.createElement());
}
builder.add(build.createElement());
// Runtime settings
names = _runtimeSettings.getNames();
ElementBuilder runtime = new ElementBuilder("runtime");
while (names.hasNext()) {
String key = (String) names.next();
String value = _runtimeSettings.get(key);
ElementBuilder property = new ElementBuilder("property");
property.setAttribute("name", key);
property.setText(value);
runtime.addChild(property.createElement());
}
builder.add(runtime.createElement());
// System properties
Properties sysProps;
try {
sysProps = System.getProperties();
} catch (SecurityException ex) {
final String SUBJECT_CLASS = "java.lang.System";
final String SUBJECT_METHOD = "getProperties()";
Utils.logProgrammingError(CLASSNAME, THIS_METHOD,
SUBJECT_CLASS, SUBJECT_METHOD,
null, ex);
sysProps = new Properties();
}
Enumeration e = sysProps.propertyNames();
ElementBuilder system = new ElementBuilder("system");
while (e.hasMoreElements()) {
String key = (String) e.nextElement();
String value = sysProps.getProperty(key);
if ( key != null && key.trim().length() > 0
&& value != null && value.trim().length() > 0) {
ElementBuilder property = new ElementBuilder("property");
property.setAttribute("name", key);
property.setText(value);
system.addChild(property.createElement());
}
}
builder.add(system.createElement());
return builder;
}
/**
* Enables a function.
*
* @param functionName
* the name of the function to disable, can be <code>null</code>.
*
* @return
* the call result, never <code>null</code>.
*/
private final FunctionResult doEnableFunction(String functionName) {
// Get the name of the function to enable
if (functionName == null || functionName.length() < 1) {
InvalidRequestResult invalidRequest = new InvalidRequestResult();
invalidRequest.addMissingParameter("functionName");
return invalidRequest;
}
// Get the Function object
Function function = getFunction(functionName);
if (function == null) {
return new InvalidRequestResult();
}
// Enable or disable the function
function.setEnabled(true);
return SUCCESSFUL_RESULT;
}
/**
* Disables a function.
*
* @param functionName
* the name of the function to disable, can be <code>null</code>.
*
* @return
* the call result, never <code>null</code>.
*/
private final FunctionResult doDisableFunction(String functionName) {
// Get the name of the function to disable
if (functionName == null || functionName.length() < 1) {
InvalidRequestResult invalidRequest = new InvalidRequestResult();
invalidRequest.addMissingParameter("functionName");
return invalidRequest;
}
// Get the Function object
Function function = getFunction(functionName);
if (function == null) {
return new InvalidRequestResult();
}
// Enable or disable the function
function.setEnabled(false);
return SUCCESSFUL_RESULT;
}
/**
* Resets the statistics.
*
* @return
* the call result, never <code>null</code>.
*/
private final FunctionResult doResetStatistics() {
// Remember when we last reset the statistics
_lastStatisticsReset = System.currentTimeMillis();
// Function-specific statistics
int count = _functionList.size();
for (int i = 0; i < count; i++) {
Function function = (Function) _functionList.get(i);
function.getStatistics().resetStatistics();
}
return SUCCESSFUL_RESULT;
}
}
|
package com.jaeksoft.searchlib.config;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URISyntaxException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import javax.servlet.http.HttpServletRequest;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.xpath.XPathExpressionException;
import org.apache.lucene.queryParser.ParseException;
import org.w3c.dom.DOMException;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import com.jaeksoft.searchlib.Client;
import com.jaeksoft.searchlib.SearchLibException;
import com.jaeksoft.searchlib.basket.BasketCache;
import com.jaeksoft.searchlib.collapse.CollapseMode;
import com.jaeksoft.searchlib.crawler.FieldMap;
import com.jaeksoft.searchlib.crawler.file.database.FileManager;
import com.jaeksoft.searchlib.crawler.file.database.FilePathManager;
import com.jaeksoft.searchlib.crawler.file.process.CrawlFileMaster;
import com.jaeksoft.searchlib.crawler.web.database.PatternManager;
import com.jaeksoft.searchlib.crawler.web.database.PropertyManager;
import com.jaeksoft.searchlib.crawler.web.database.UrlManager;
import com.jaeksoft.searchlib.crawler.web.process.CrawlMaster;
import com.jaeksoft.searchlib.crawler.web.robotstxt.RobotsTxtCache;
import com.jaeksoft.searchlib.facet.FacetField;
import com.jaeksoft.searchlib.filter.Filter;
import com.jaeksoft.searchlib.filter.FilterList;
import com.jaeksoft.searchlib.function.expression.SyntaxError;
import com.jaeksoft.searchlib.index.IndexAbstract;
import com.jaeksoft.searchlib.index.IndexConfig;
import com.jaeksoft.searchlib.index.IndexGroup;
import com.jaeksoft.searchlib.index.IndexSingle;
import com.jaeksoft.searchlib.parser.ParserSelector;
import com.jaeksoft.searchlib.plugin.IndexPluginTemplateList;
import com.jaeksoft.searchlib.render.Render;
import com.jaeksoft.searchlib.render.RenderJsp;
import com.jaeksoft.searchlib.render.RenderXml;
import com.jaeksoft.searchlib.request.SearchRequest;
import com.jaeksoft.searchlib.request.SearchRequestMap;
import com.jaeksoft.searchlib.result.Result;
import com.jaeksoft.searchlib.schema.Field;
import com.jaeksoft.searchlib.schema.FieldList;
import com.jaeksoft.searchlib.schema.Schema;
import com.jaeksoft.searchlib.snippet.SnippetField;
import com.jaeksoft.searchlib.sort.SortList;
import com.jaeksoft.searchlib.statistics.StatisticsList;
import com.jaeksoft.searchlib.util.XPathParser;
import com.jaeksoft.searchlib.util.XmlWriter;
public abstract class Config {
private IndexAbstract index = null;
private Schema schema = null;
private SearchRequestMap searchRequests = null;
private ExecutorService threadPool = null;
private StatisticsList statisticsList = null;
private BasketCache basketCache = null;
private ParserSelector parserSelector = null;
private UrlManager urlManager = null;
private PatternManager patternManager = null;
private FilePathManager filePatternManager = null;
private FileManager fileManager = null;
private PropertyManager propertyManager = null;
private XPathParser xppConfig = null;
private CrawlMaster webCrawlMaster = null;
private CrawlFileMaster fileCrawlMaster = null;
private FieldMap webCrawlerFieldMap = null;
private FieldMap fileCrawlerFieldMap = null;
private IndexPluginTemplateList indexPluginTemplateList = null;
private RobotsTxtCache robotsTxtCache = null;
private File indexDir;
private final Lock lock = new ReentrantLock(true);
protected Config(File indexDirectory, String configXmlResourceName,
boolean createIndexIfNotExists) throws SearchLibException {
try {
indexDir = indexDirectory;
if (!indexDir.isDirectory())
throw new SearchLibException("Expected to get a directory path");
if (configXmlResourceName == null)
xppConfig = new XPathParser(new File(indexDirectory,
"config.xml"));
else {
xppConfig = new XPathParser(getClass().getResourceAsStream(
configXmlResourceName));
}
index = getIndex(indexDir, xppConfig, createIndexIfNotExists);
schema = Schema.fromXmlConfig(xppConfig
.getNode("/configuration/schema"), xppConfig);
} catch (XPathExpressionException e) {
throw new SearchLibException(e);
} catch (DOMException e) {
throw new SearchLibException(e);
} catch (IOException e) {
throw new SearchLibException(e);
} catch (URISyntaxException e) {
throw new SearchLibException(e);
} catch (ParserConfigurationException e) {
throw new SearchLibException(e);
} catch (SAXException e) {
throw new SearchLibException(e);
} catch (InstantiationException e) {
throw new SearchLibException(e);
} catch (IllegalAccessException e) {
throw new SearchLibException(e);
} catch (ClassNotFoundException e) {
throw new SearchLibException(e);
}
}
public File getIndexDirectory() {
return indexDir;
}
private void saveConfigWithoutLock() throws IOException,
TransformerConfigurationException, SAXException {
File configFile = new File(indexDir, "config_tmp.xml");
if (!configFile.exists())
configFile.createNewFile();
PrintWriter pw = new PrintWriter(configFile);
try {
XmlWriter xmlWriter = new XmlWriter(pw, "UTF-8");
xmlWriter.startElement("configuration");
getIndex().writeXmlConfig(xmlWriter);
getSchema().writeXmlConfig(xmlWriter);
xmlWriter.endElement();
xmlWriter.endDocument();
pw.close();
pw = null;
configFile.renameTo(new File(indexDir, "config.xml"));
} finally {
if (pw != null)
pw.close();
}
}
public void saveParsers() throws IOException,
TransformerConfigurationException, SAXException, SearchLibException {
boolean success = false;
File file = new File(indexDir, "parsers_tmp.xml");
if (!file.exists())
file.createNewFile();
PrintWriter pw = new PrintWriter(file);
try {
XmlWriter xmlWriter = new XmlWriter(pw, "UTF-8");
getParserSelector().writeXmlConfig(xmlWriter);
xmlWriter.endDocument();
success = true;
} finally {
pw.close();
if (success)
file.renameTo(new File(indexDir, "parsers.xml"));
}
}
public void saveRequests() throws IOException,
TransformerConfigurationException, SAXException, SearchLibException {
boolean success = false;
File file = new File(indexDir, "requests_tmp.xml");
if (!file.exists())
file.createNewFile();
PrintWriter pw = new PrintWriter(file);
try {
XmlWriter xmlWriter = new XmlWriter(pw, "UTF-8");
getSearchRequestMap().writeXmlConfig(xmlWriter);
xmlWriter.endDocument();
success = true;
} finally {
pw.close();
if (success)
file.renameTo(new File(indexDir, "requests.xml"));
}
}
public void saveConfig() throws SearchLibException {
lock.lock();
try {
saveConfigWithoutLock();
} catch (TransformerConfigurationException e) {
throw new SearchLibException(e);
} catch (IOException e) {
throw new SearchLibException(e);
} catch (SAXException e) {
throw new SearchLibException(e);
} finally {
lock.unlock();
}
}
protected IndexAbstract getIndex(File indexDir, XPathParser xpp,
boolean createIndexIfNotExists) throws XPathExpressionException,
IOException, URISyntaxException, InstantiationException,
IllegalAccessException, ClassNotFoundException {
lock.lock();
try {
NodeList nodeList = xpp.getNodeList("/configuration/indices/index");
switch (nodeList.getLength()) {
case 0:
return null;
case 1:
return new IndexSingle(indexDir, new IndexConfig(xpp, xpp
.getNode("/configuration/indices/index")),
createIndexIfNotExists);
default:
return new IndexGroup(indexDir, xpp, xpp
.getNode("/configuration/indices"),
createIndexIfNotExists, getThreadPool());
}
} finally {
lock.unlock();
}
}
private ExecutorService getThreadPool() {
lock.lock();
try {
if (threadPool == null)
threadPool = Executors.newCachedThreadPool();
return threadPool;
} finally {
lock.unlock();
}
}
public Schema getSchema() {
return schema;
}
public BasketCache getBasketCache() {
lock.lock();
try {
if (basketCache == null)
basketCache = new BasketCache(100);
return basketCache;
} finally {
lock.unlock();
}
}
public CrawlMaster getWebCrawlMaster() throws SearchLibException {
lock.lock();
try {
if (webCrawlMaster != null)
return webCrawlMaster;
webCrawlMaster = new CrawlMaster(this);
return webCrawlMaster;
} finally {
lock.unlock();
}
}
public CrawlFileMaster getFileCrawlMaster() throws SearchLibException {
lock.lock();
try {
if (fileCrawlMaster != null)
return fileCrawlMaster;
fileCrawlMaster = new CrawlFileMaster(this);
return fileCrawlMaster;
} finally {
lock.unlock();
}
}
public ParserSelector getParserSelector() throws SearchLibException {
lock.lock();
try {
if (parserSelector == null) {
File parserFile = new File(indexDir, "parsers.xml");
if (parserFile.exists()) {
XPathParser xpp = new XPathParser(parserFile);
parserSelector = ParserSelector.fromXmlConfig(xpp, xpp
.getNode("/parsers"));
} else {
Node node = xppConfig.getNode("/configuration/parsers");
if (node != null)
parserSelector = ParserSelector.fromXmlConfig(
xppConfig, node);
}
}
return parserSelector;
} catch (XPathExpressionException e) {
throw new SearchLibException(e);
} catch (DOMException e) {
throw new SearchLibException(e);
} catch (IOException e) {
throw new SearchLibException(e);
} catch (ParserConfigurationException e) {
throw new SearchLibException(e);
} catch (SAXException e) {
throw new SearchLibException(e);
} finally {
lock.unlock();
}
}
public IndexAbstract getIndex() {
return this.index;
}
public IndexPluginTemplateList getIndexPluginTemplateList()
throws SearchLibException {
lock.lock();
try {
if (indexPluginTemplateList != null)
return indexPluginTemplateList;
Node node = xppConfig.getNode("/configuration/indexPlugins");
if (node == null)
return null;
indexPluginTemplateList = IndexPluginTemplateList.fromXmlConfig(
xppConfig, node);
return indexPluginTemplateList;
} catch (IOException e) {
throw new SearchLibException(e);
} catch (XPathExpressionException e) {
throw new SearchLibException(e);
} finally {
lock.unlock();
}
}
public StatisticsList getStatisticsList() throws SearchLibException {
try {
if (statisticsList == null)
statisticsList = StatisticsList.fromXmlConfig(xppConfig,
xppConfig.getNode("/configuration/statistics"));
return statisticsList;
} catch (XPathExpressionException e) {
throw new SearchLibException(e);
} catch (DOMException e) {
throw new SearchLibException(e);
} catch (InstantiationException e) {
throw new SearchLibException(e);
} catch (IllegalAccessException e) {
throw new SearchLibException(e);
} catch (ClassNotFoundException e) {
throw new SearchLibException(e);
} catch (IOException e) {
throw new SearchLibException(e);
}
}
public SearchRequest getNewSearchRequest() {
return new SearchRequest(this);
}
public SearchRequest getNewSearchRequest(String requestName)
throws SearchLibException {
return new SearchRequest(getSearchRequestMap().get(requestName));
}
public SearchRequestMap getSearchRequestMap() throws SearchLibException {
lock.lock();
try {
if (searchRequests == null) {
File requestFile = new File(indexDir, "requests.xml");
if (requestFile.exists()) {
XPathParser xpp = new XPathParser(requestFile);
searchRequests = SearchRequestMap.fromXmlConfig(this, xpp,
xpp.getNode("/requests"));
} else
searchRequests = SearchRequestMap.fromXmlConfig(this,
xppConfig, xppConfig
.getNode("/configuration/requests"));
}
return searchRequests;
} catch (XPathExpressionException e) {
throw new SearchLibException(e);
} catch (DOMException e) {
throw new SearchLibException(e);
} catch (ParseException e) {
throw new SearchLibException(e);
} catch (InstantiationException e) {
throw new SearchLibException(e);
} catch (IllegalAccessException e) {
throw new SearchLibException(e);
} catch (ClassNotFoundException e) {
throw new SearchLibException(e);
} catch (ParserConfigurationException e) {
throw new SearchLibException(e);
} catch (SAXException e) {
throw new SearchLibException(e);
} catch (IOException e) {
throw new SearchLibException(e);
} finally {
lock.unlock();
}
}
public UrlManager getUrlManager() throws SearchLibException {
lock.lock();
try {
if (urlManager == null)
urlManager = new UrlManager((Client) this, indexDir);
return urlManager;
} catch (FileNotFoundException e) {
throw new SearchLibException(e);
} catch (URISyntaxException e) {
throw new SearchLibException(e);
} finally {
lock.unlock();
}
}
public PatternManager getPatternManager() throws SearchLibException {
lock.lock();
try {
if (patternManager == null)
patternManager = new PatternManager(indexDir);
return patternManager;
} finally {
lock.unlock();
}
}
public FilePathManager getFilePathManager() throws SearchLibException {
lock.lock();
try {
if (filePatternManager == null)
filePatternManager = new FilePathManager(indexDir);
return filePatternManager;
} finally {
lock.unlock();
}
}
public FileManager getFileManager() throws SearchLibException {
lock.lock();
try {
if (fileManager == null)
fileManager = new FileManager((Client) this, indexDir);
return fileManager;
} catch (FileNotFoundException e) {
throw new SearchLibException(e);
} catch (URISyntaxException e) {
throw new SearchLibException(e);
} finally {
lock.unlock();
}
}
public PropertyManager getPropertyManager() throws SearchLibException {
lock.lock();
try {
if (propertyManager == null)
propertyManager = new PropertyManager(new File(indexDir,
"crawler-properties.xml"));
return propertyManager;
} catch (IOException e) {
throw new SearchLibException(e);
} finally {
lock.unlock();
}
}
public SearchRequest getNewSearchRequest(HttpServletRequest httpRequest)
throws ParseException, SyntaxError, SearchLibException {
String requestName = httpRequest.getParameter("qt");
if (requestName == null)
requestName = "search";
SearchRequest searchRequest = getNewSearchRequest(requestName);
if (searchRequest == null)
searchRequest = getNewSearchRequest();
String p;
if ((p = httpRequest.getParameter("index")) != null)
searchRequest.setIndexName(p);
if ((p = httpRequest.getParameter("query")) != null)
searchRequest.setQueryString(p);
else if ((p = httpRequest.getParameter("q")) != null)
searchRequest.setQueryString(p);
if ((p = httpRequest.getParameter("start")) != null)
searchRequest.setStart(Integer.parseInt(p));
if ((p = httpRequest.getParameter("rows")) != null)
searchRequest.setRows(Integer.parseInt(p));
if ((p = httpRequest.getParameter("lang")) != null)
searchRequest.setLang(p);
if ((p = httpRequest.getParameter("collapse.mode")) != null)
searchRequest.setCollapseMode(CollapseMode.valueOfLabel(p));
if ((p = httpRequest.getParameter("collapse.field")) != null)
searchRequest.setCollapseField(getSchema().getFieldList().get(p)
.getName());
if ((p = httpRequest.getParameter("collapse.max")) != null)
searchRequest.setCollapseMax(Integer.parseInt(p));
if ((p = httpRequest.getParameter("delete")) != null)
searchRequest.setDelete(true);
if ((p = httpRequest.getParameter("withDocs")) != null)
searchRequest.setWithDocument(true);
if ((p = httpRequest.getParameter("noCache")) != null)
searchRequest.setNoCache(true);
if ((p = httpRequest.getParameter("debug")) != null)
searchRequest.setDebug(true);
String[] values;
if ((values = httpRequest.getParameterValues("fq")) != null) {
FilterList fl = searchRequest.getFilterList();
for (String value : values)
if (value != null)
if (value.trim().length() > 0)
fl.add(value, Filter.Source.REQUEST);
}
if ((values = httpRequest.getParameterValues("rf")) != null) {
FieldList<Field> rf = searchRequest.getReturnFieldList();
for (String value : values)
if (value != null)
if (value.trim().length() > 0)
rf
.add(new Field(getSchema().getFieldList().get(
value)));
}
if ((values = httpRequest.getParameterValues("hl")) != null) {
FieldList<SnippetField> snippetFields = searchRequest
.getSnippetFieldList();
for (String value : values)
snippetFields.add(new SnippetField(getSchema().getFieldList()
.get(value).getName()));
}
if ((values = httpRequest.getParameterValues("fl")) != null) {
FieldList<Field> returnFields = searchRequest.getReturnFieldList();
for (String value : values)
returnFields.add(getSchema().getFieldList().get(value));
}
if ((values = httpRequest.getParameterValues("sort")) != null) {
SortList sortList = searchRequest.getSortList();
for (String value : values)
sortList.add(value);
}
if ((values = httpRequest.getParameterValues("facet")) != null) {
FieldList<FacetField> facetList = searchRequest.getFacetFieldList();
for (String value : values)
facetList.add(FacetField.buildFacetField(value, false));
}
if ((values = httpRequest.getParameterValues("facet.multi")) != null) {
FieldList<FacetField> facetList = searchRequest.getFacetFieldList();
for (String value : values)
facetList.add(FacetField.buildFacetField(value, true));
}
return searchRequest;
}
public Render getRender(HttpServletRequest request, Result result) {
Render render = null;
String p;
if ((p = request.getParameter("render")) != null) {
if ("jsp".equals(p))
render = new RenderJsp(request.getParameter("jsp"), result);
}
if (render == null)
render = new RenderXml(result);
return render;
}
public RobotsTxtCache getRobotsTxtCache() {
lock.lock();
try {
if (robotsTxtCache != null)
return robotsTxtCache;
robotsTxtCache = new RobotsTxtCache();
return robotsTxtCache;
} finally {
lock.unlock();
}
}
public FieldMap getWebCrawlerFieldMap() throws SearchLibException {
lock.lock();
try {
if (webCrawlerFieldMap == null)
webCrawlerFieldMap = new FieldMap(new File(indexDir,
"webcrawler-mapping.xml"));
return webCrawlerFieldMap;
} catch (IOException e) {
throw new SearchLibException(e);
} catch (XPathExpressionException e) {
throw new SearchLibException(e);
} catch (ParserConfigurationException e) {
throw new SearchLibException(e);
} catch (SAXException e) {
throw new SearchLibException(e);
} finally {
lock.unlock();
}
}
public FieldMap getFileCrawlerFieldMap() throws SearchLibException {
lock.lock();
try {
if (fileCrawlerFieldMap == null)
fileCrawlerFieldMap = new FieldMap(new File(indexDir,
"filecrawler-mapping.xml"));
return fileCrawlerFieldMap;
} catch (IOException e) {
throw new SearchLibException(e);
} catch (XPathExpressionException e) {
throw new SearchLibException(e);
} catch (ParserConfigurationException e) {
throw new SearchLibException(e);
} catch (SAXException e) {
throw new SearchLibException(e);
} finally {
lock.unlock();
}
}
}
|
package org.dynmap.bukkit;
import org.bukkit.Bukkit;
import org.dynmap.Log;
import org.dynmap.bukkit.helper.BukkitVersionHelper;
import org.dynmap.bukkit.helper.BukkitVersionHelperCB;
import org.dynmap.bukkit.helper.BukkitVersionHelperGlowstone;
import org.dynmap.bukkit.helper.v113_2.BukkitVersionHelperSpigot113_2;
import org.dynmap.bukkit.helper.v114_1.BukkitVersionHelperSpigot114_1;
import org.dynmap.bukkit.helper.v115.BukkitVersionHelperSpigot115;
import org.dynmap.bukkit.helper.v116.BukkitVersionHelperSpigot116;
import org.dynmap.bukkit.helper.v116_2.BukkitVersionHelperSpigot116_2;
import org.dynmap.bukkit.helper.v116_3.BukkitVersionHelperSpigot116_3;
import org.dynmap.bukkit.helper.v116_4.BukkitVersionHelperSpigot116_4;
public class Helper {
public static final BukkitVersionHelper getHelper() {
if (BukkitVersionHelper.helper == null) {
String v = Bukkit.getServer().getVersion();
Log.info("version=" + v);
if (v.contains("MCPC")) {
Log.severe("*********************************************************************************");
Log.severe("* MCPC-Plus is no longer supported via the Bukkit version of Dynmap. *");
Log.severe("* Install the appropriate Forge version of Dynmap. *");
Log.severe("* Add the DynmapCBBridge plugin to enable support for Dynmap-compatible plugins *");
Log.severe("*********************************************************************************");
}
else if(v.contains("BukkitForge")) {
Log.severe("*********************************************************************************");
Log.severe("* BukkitForge is not supported via the Bukkit version of Dynmap. *");
Log.severe("* Install the appropriate Forge version of Dynmap. *");
Log.severe("* Add the DynmapCBBridge plugin to enable support for Dynmap-compatible plugins *");
Log.severe("*********************************************************************************");
}
else if(Bukkit.getServer().getClass().getName().contains("GlowServer")) {
Log.info("Loading Glowstone support");
BukkitVersionHelper.helper = new BukkitVersionHelperGlowstone();
}
else if (v.contains("(MC: 1.16)") || v.contains("(MC: 1.16.1")) {
BukkitVersionHelper.helper = new BukkitVersionHelperSpigot116();
}
else if (v.contains("(MC: 1.16.2)")) {
BukkitVersionHelper.helper = new BukkitVersionHelperSpigot116_2();
}
else if (v.contains("(MC: 1.16.3)")) {
BukkitVersionHelper.helper = new BukkitVersionHelperSpigot116_3();
}
else if (v.contains("(MC: 1.16.")) {
BukkitVersionHelper.helper = new BukkitVersionHelperSpigot116_4();
}
else if (v.contains("(MC: 1.15)") || v.contains("(MC: 1.15.")) {
BukkitVersionHelper.helper = new BukkitVersionHelperSpigot115();
}
else if (v.contains("(MC: 1.14)") || v.contains("(MC: 1.14.1)") || v.contains("(MC: 1.14.2)") ||
v.contains("(MC: 1.14.3)") || v.contains("(MC: 1.14.4)")) {
BukkitVersionHelper.helper = new BukkitVersionHelperSpigot114_1();
}
else if (v.contains("(MC: 1.13.2)")) {
BukkitVersionHelper.helper = new BukkitVersionHelperSpigot113_2();
}
else {
BukkitVersionHelper.helper = new BukkitVersionHelperCB();
}
}
return BukkitVersionHelper.helper;
}
}
|
package io.compgen.common;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.channels.FileChannel;
import java.util.Iterator;
import java.util.zip.GZIPInputStream;
public abstract class AbstractLineReader<T> implements Iterable<T> {
final private Reader reader;
final protected FileChannel channel;
private BufferedReader iteratorReader = null;
public AbstractLineReader(String filename) throws IOException {
if (filename.equals("-")) {
this.reader = new InputStreamReader(System.in);
this.channel = null;
} else {
FileInputStream fis = new FileInputStream(filename);
this.channel = fis.getChannel();
if (filename.endsWith(".gz")) {
this.reader = new InputStreamReader(new GZIPInputStream(fis));
} else {
this.reader = new InputStreamReader(fis);
}
}
}
public AbstractLineReader(InputStream is) {
this(is, null);
}
public AbstractLineReader(InputStream is, FileChannel channel) {
this.reader = new InputStreamReader(is);
this.channel = channel;
}
public void close() throws IOException {
if (this.iteratorReader != null) {
this.iteratorReader.close();
} else {
this.reader.close();
}
}
protected abstract T convertLine(String line);
@Override
public Iterator<T> iterator() {
iteratorReader = new BufferedReader(reader);
return new Iterator<T>() {
String next = readnext();
private String readnext() {
String line = null;
while (line == null) {
try {
line = iteratorReader.readLine();
} catch (IOException e) {
line = null;
}
if (line == null) {
break;
}
}
if (line == null) {
try {
iteratorReader.close();
} catch (IOException e) {
}
}
return line;
}
@Override
public boolean hasNext() {
return (next != null);
}
@Override
public T next() {
T out = null;
while (out == null) {
out = convertLine(next);
next = readnext();
}
return out;
}
@Override
public void remove() {
}
};
}
}
|
package org.apache.lucene.index;
import java.util.Vector;
import java.io.IOException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.InputStream;
import org.apache.lucene.store.OutputStream;
final class SegmentInfos extends Vector {
public int counter = 0; // used to name new segments
public final SegmentInfo info(int i) {
return (SegmentInfo)elementAt(i);
}
public final void read(Directory directory) throws IOException {
InputStream input = directory.openFile("segments");
try {
counter = input.readInt(); // read counter
for (int i = input.readInt(); i > 0; i--) { // read segmentInfos
SegmentInfo si = new SegmentInfo(input.readString(), input.readInt(),
directory);
addElement(si);
}
} finally {
input.close();
}
}
public final void write(Directory directory) throws IOException {
OutputStream output = directory.createFile("segments.new");
try {
output.writeInt(counter); // write counter
output.writeInt(size()); // write infos
for (int i = 0; i < size(); i++) {
SegmentInfo si = info(i);
output.writeString(si.name);
output.writeInt(si.docCount);
}
} finally {
output.close();
}
// install new segment info
directory.renameFile("segments.new", "segments");
}
}
|
package org.apache.velocity.convert;
import java.io.File;
import java.io.FileWriter;
import org.apache.oro.text.perl.Perl5Util;
import org.apache.velocity.util.StringUtils;
import org.apache.tools.ant.DirectoryScanner;
/**
* This class will convert a WebMacro template to
* a Velocity template. Uses the ORO Regexp package to do the
* rewrites. Note, it isn't 100% perfect, but will definitely get
* you about 99.99% of the way to a converted system. Please
* see the website documentation for more information on how to use
* this class.
*
* @author <a href="mailto:jvanzyl@periapt.com">Jason van Zyl</a>
* @author <a href="mailto:dlr@finemaltcoding.com">Daniel Rall</a>
* @version $Id: WebMacro.java,v 1.12 2001/05/08 05:39:36 dlr Exp $
*/
public class WebMacro
{
/** Name of the original webmacro template */
protected String orignalTemplate;
/** Regular expression tool */
protected Perl5Util perl;
/** Path separator property */
protected String pathSeparator = File.separator;
protected final static String VM_EXT = ".vm";
protected final static String WM_EXT = ".wm";
/**
* The regexes to use for substition. The regexes come
* in pairs. The first is the string to match, the
* second is the substitution to make.
*/
protected String[] res =
{
// Make #if directive match the Velocity directive style.
"#if\\s*[(]\\s*(.*\\S)\\s*[)]\\s*(#begin|{)[ \\t]?",
"#if( $1 )",
// Remove the WM #end #else #begin usage.
"[ \\t]?(#end|})\\s*#else\\s*(#begin|{)[ \\t]?(\\w)",
"#else#**#$3", // avoid touching a followup word with embedded comment
"[ \\t]?(#end|})\\s*#else\\s*(#begin|{)[ \\t]?",
"#else",
// Convert WM style #foreach to Velocity directive style.
"#foreach\\s+(\\$\\w+)\\s+in\\s+(\\$[^\\s#]+)\\s*(#begin|{)[ \\t]?",
"#foreach( $1 in $2 )",
// Change the "}" to #end. Have to get more
// sophisticated here. Will assume either {}
// and no javascript, or #begin/#end with the
// possibility of javascript.
"\n}", // assumes that javascript is indented, WMs not!!!
"\n#end",
// Convert WM style #set to Velocity directive style.
"#set\\s+(\\$[^\\s=]+)\\s*=\\s*(.*\\S)[ \\t]*",
"#set( $1 = $2 )",
"(##[# \\t\\w]*)\\)", // fix comments included at end of line
")$1",
// Convert WM style #parse to Velocity directive style.
"#parse\\s+([^\\s#]+)[ \\t]?",
"#parse( $1 )",
// Convert WM style #include to Velocity directive style.
"#include\\s+([^\\s
"#include( $1 )",
// Convert WM formal reference to VTL syntax.
"\\$\\(([^\\)]+)\\)",
"${$1}",
"\\${([^}\\(]+)\\(([^}]+)}\\)", // fix encapsulated brakets: {(})
"${$1($2)}",
// Velocity currently does not permit leading underscore.
"\\$_",
"$l_",
"\\${(_[^}]+)}", // within a formal reference
"${l$1}",
// Convert explicitly terminated WM statements to VTL syntax.
"(^|[^\\\\])\\$([\\w]+);",
"$1${$2}",
// Change extensions when seen.
"\\.wm",
".vm"
};
/**
* Iterate through the set of find/replace regexes
* that will convert a given WM template to a VM template
*/
public void convert(String target)
{
File file = new File(target);
if (!file.exists())
{
System.err.println
("The specified template or directory does not exist");
System.exit(1);
}
if (file.isDirectory())
{
String basedir = file.getAbsolutePath();
String newBasedir = basedir + VM_EXT;
DirectoryScanner ds = new DirectoryScanner();
ds.setBasedir(basedir);
ds.addDefaultExcludes();
ds.scan();
String[] files = ds.getIncludedFiles();
for (int i = 0; i < files.length; i++)
writeTemplate(files[i], basedir, newBasedir);
}
else
{
writeTemplate(file.getAbsolutePath(), "", "");
}
}
/**
* Write out the converted template to the given named file
* and base directory.
*/
private boolean writeTemplate(String file, String basedir,
String newBasedir)
{
if (file.indexOf(WM_EXT) < 0)
return false;
System.out.println("Converting " + file + "...");
String template;
String templateDir;
String newTemplate;
File outputDirectory;
if (basedir.length() == 0)
{
template = file;
templateDir = "";
newTemplate = convertName(file);
}
else
{
template = basedir + pathSeparator + file;
templateDir = newBasedir + extractPath(file);
outputDirectory = new File(templateDir);
if (! outputDirectory.exists())
outputDirectory.mkdirs();
newTemplate = newBasedir + pathSeparator +
convertName(file);
}
String convertedTemplate = convertTemplate(template);
try
{
FileWriter fw = new FileWriter(newTemplate);
fw.write(convertedTemplate);
fw.close();
}
catch (Exception e)
{
e.printStackTrace();
}
return true;
}
/**
* Gets the path segment of the full path to a file (i.e. one
* which originally included the file name).
*/
private final String extractPath(String file)
{
int lastSepPos = file.lastIndexOf(pathSeparator);
return (lastSepPos == -1 ? "" :
pathSeparator + file.substring(0, lastSepPos));
}
/**
* Simple extension conversion of .wm to .vm
*/
private String convertName(String name)
{
if (name.indexOf(WM_EXT) > 0)
return name.substring(0, name.indexOf(WM_EXT)) + VM_EXT;
else
return name;
}
/**
* How to use this little puppy :-)
*/
private static final void usage()
{
System.err.println("Usage: convert-wm <template.wm | directory>");
System.exit(1);
}
/**
* Apply find/replace regexes to our WM template
*/
public String convertTemplate(String template)
{
orignalTemplate = StringUtils.fileContentsToString(template);
// overcome current velocity 0.71 limitation
if ( !orignalTemplate.endsWith("\n") )
orignalTemplate += "\n";
perl = new Perl5Util();
for (int i = 0; i < res.length; i += 2)
{
while (perl.match("/" + res[i] + "/", orignalTemplate))
{
orignalTemplate = perl.substitute(
"s/" + res[i] + "/" + res[i+1] + "/g", orignalTemplate);
}
}
return orignalTemplate;
}
/**
* Main hook for the conversion process.
*/
public static void main(String[] args)
{
if (args.length < 1)
usage();
WebMacro converter = new WebMacro();
converter.convert(args[0]);
}
}
|
package org.burroloco.donkey.job;
import au.net.netstorm.boost.bullet.log.Log;
import au.net.netstorm.boost.gunge.lifecycle.Stop;
import org.burroloco.config.core.Config;
import org.burroloco.config.core.WeakConfig;
import org.burroloco.donkey.config.PollingInterval;
import org.burroloco.util.snooze.Snoozer;
public class Poller implements Job, Stop {
private boolean started = false;
WeakConfig weak;
Snoozer snoozer;
Job delegate;
Log log;
public void go(Config config) {
init();
while (started) {
delegate.go(config);
snooze(config);
}
}
public void stop() {
started = false;
log.info("Poller stopped.");
}
private void init() {
started = true;
log.info("Poller started.");
}
private void snooze(Config config) {
Long interval = weak.get(config, PollingInterval.class);
snoozer.snooze(interval);
}
}
|
package org.anodyneos.xp.standard;
import javax.servlet.jsp.el.ELException;
import org.anodyneos.xp.XpContentHandler;
import org.anodyneos.xp.XpException;
import org.anodyneos.xp.tagext.XpTagSupport;
import org.xml.sax.SAXException;
import java.util.Collection;
/**
* @author jvas
*
* To change the template for this generated type comment go to Window -
* Preferences - Java - Code Generation - Code and Comments
*/
public final class ForEachTag extends XpTagSupport {
private String var;
private Object items;
private String varStatus;
private int begin = 0;
private int end = 0;
private int step = 1;
private Object savedVar;
private Object savedVarStatus;
public ForEachTag() {
super();
}
public void doTag(XpContentHandler out) throws XpException, ELException, SAXException {
if (begin > end || step < 1) {
return;
}
saveVars();
if (items != null){
Object[] arrItems;
if (!(items instanceof Object[])){
// TODO test with various collection types
arrItems = ((Collection)items).toArray();
}else{
arrItems = (Object[])items;
}
for (int i = 0; i < arrItems.length; i+=step) {
if (null != var) {
getXpContext().setAttribute(var, arrItems[i]);
}
getXpBody().invoke(out);
}
}else{
for (int i = begin; i <= end; i+=step) {
if (null != var) {
getXpContext().setAttribute(var, Integer.toString(i));
}
getXpBody().invoke(out);
}
}
restoreVars();
}
private void saveVars() {
if (var != null) {
savedVar = getXpContext().getAttribute(var);
}
if (varStatus != null) {
savedVarStatus = getXpContext().getAttribute(varStatus);
}
}
private void restoreVars() {
if (var != null) {
getXpContext().setAttribute(var, savedVar);
}
if (varStatus != null) {
getXpContext().setAttribute(varStatus, savedVarStatus);
}
}
/**
* @param begin
* The begin to set.
*/
public void setBegin(int begin) {
this.begin = begin;
}
/**
* @param end
* The end to set.
*/
public void setEnd(int end) {
this.end = end;
}
/**
* @param items
* The items to set.
*/
public void setItems(Object items) {
this.items = items;
}
/**
* @param step
* The step to set.
*/
public void setStep(int step) {
this.step = step;
}
/**
* @param var
* The var to set.
*/
public void setVar(String var) {
this.var = var;
}
/**
* @param varStatus
* The varStatus to set.
*/
public void setVarStatus(String varStatus) {
this.varStatus = varStatus;
}
}
|
package org.opencb.cellbase.app.cli;
import com.beust.jcommander.ParameterException;
import org.apache.commons.lang.StringUtils;
import org.opencb.cellbase.core.CellBaseConfiguration.SpeciesProperties.Species;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
public class DownloadCommandParser extends CommandParser {
private File ensemblScriptsFolder;
private CliOptionsParser.DownloadCommandOptions downloadCommandOptions;
private static final String[] variationFiles = {"variation.txt.gz", "variation_feature.txt.gz",
"transcript_variation.txt.gz", "variation_synonym.txt.gz", "seq_region.txt.gz", "source.txt.gz",
"attrib.txt.gz", "attrib_type.txt.gz", "seq_region.txt.gz", "structural_variation_feature.txt.gz",
"study.txt.gz", "phenotype.txt.gz", "phenotype_feature.txt.gz", "phenotype_feature_attrib.txt.gz",
"motif_feature_variation.txt.gz", "genotype_code.txt.gz", "allele_code.txt.gz",
"population_genotype.txt.gz", "population.txt.gz", "allele.txt.gz"};
private static final String[] regulationFiles = {"AnnotatedFeatures.gff.gz", "MotifFeatures.gff.gz", "RegulatoryFeatures_MultiCell.gff.gz"};
private String ensemblVersion;
private String ensemblRelease;
public DownloadCommandParser(CliOptionsParser.DownloadCommandOptions downloadCommandOptions) {
super(downloadCommandOptions.commonOptions.logLevel, downloadCommandOptions.commonOptions.verbose,
downloadCommandOptions.commonOptions.conf);
this.downloadCommandOptions = downloadCommandOptions;
this.ensemblScriptsFolder = new File(System.getProperty("basedir") + "/bin/ensembl-scripts/");
}
/**
* Parse specific 'download' command options
*/
public void parse() {
try {
checkParameters();
Path outputDir = Paths.get(downloadCommandOptions.outputDir);
makeDir(outputDir);
// We need to get the Species object from the CLI name
// This can be the scientific or common name, or the ID
Species speciesToDownload = null;
for(Species species: configuration.getAllSpecies()) {
if(downloadCommandOptions.species.equalsIgnoreCase(species.getScientificName())
|| downloadCommandOptions.species.equalsIgnoreCase(species.getCommonName())
|| downloadCommandOptions.species.equalsIgnoreCase(species.getId())) {
speciesToDownload = species;
break;
}
}
// If everything is right we launch the download
if(speciesToDownload != null) {
processSpecies(speciesToDownload, outputDir);
}else {
logger.error("Species '{}' not valid", downloadCommandOptions.species);
}
} catch (ParameterException e) {
logger.error("Error in 'download' command line: " + e.getMessage());
} catch (IOException | InterruptedException e) {
logger.error("Error downloading '" + downloadCommandOptions.species + "' files: " + e.getMessage());
}
}
private void checkParameters() {
if (!downloadCommandOptions.sequence && !downloadCommandOptions.gene && !downloadCommandOptions.variation
&& !downloadCommandOptions.regulation && !downloadCommandOptions.protein) {
throw new ParameterException("At least one 'download' option must be selected: sequence, gene, variation, regulation, protein");
}
}
private void processSpecies(Species sp, Path outputDir) throws IOException, InterruptedException {
logger.info("Processing species " + sp.getScientificName());
// output folder
String spShortName = sp.getScientificName().toLowerCase().replaceAll("\\.", "").replaceAll("\\)", "").replaceAll("[-(/]", " ").replaceAll("\\s+", "_");
Path spFolder = outputDir.resolve(spShortName);
makeDir(spFolder);
// We need to find which is the Ensembl host URL.
// This is different depending on if is a vertebrate species.
String ensemblHostUrl;
if (configuration.getSpecies().getVertebrates().contains(sp)) {
ensemblHostUrl = configuration.getDownload().getEnsembl().getUrl().getHost();
} else {
ensemblHostUrl = configuration.getDownload().getEnsemblGenomes().getUrl().getHost();
}
// Getting the assembly. By default the first assembly in the configuration.json
Species.Assembly assembly = null;
if(downloadCommandOptions.assembly == null || downloadCommandOptions.assembly.equals("")) {
assembly = sp.getAssemblies().get(0);
}else {
for (Species.Assembly assembly1 : sp.getAssemblies()) {
if(downloadCommandOptions.assembly.equalsIgnoreCase(assembly1.getName())) {
assembly = assembly1;
break;
}
}
}
// Checking that the species and assembly are correct
if(ensemblHostUrl == null || assembly == null) {
logger.error("Something is not correct, check the species '{}' or the assembly '{}'",
downloadCommandOptions.species, downloadCommandOptions.assembly);
return;
}
ensemblVersion = assembly.getEnsemblVersion();
ensemblRelease = "release-" + ensemblVersion.split("_")[0];
// download sequence, gene, variation, regulation and protein
if (downloadCommandOptions.sequence && speciesHasInfoToDownload(sp, "genome_sequence")) {
downloadSequence(sp, spShortName, assembly.getName(), spFolder, ensemblHostUrl);
}
if (downloadCommandOptions.gene && speciesHasInfoToDownload(sp, "gene")) {
downloadGene(sp, spShortName, spFolder, ensemblHostUrl);
}
if (downloadCommandOptions.variation && speciesHasInfoToDownload(sp, "variation")) {
downloadVariation(sp, spShortName, assembly.getName(), spFolder, ensemblHostUrl);
}
if (downloadCommandOptions.regulation && speciesHasInfoToDownload(sp, "regulation")) {
downloadRegulation(sp, spShortName, assembly.getName(), spFolder, ensemblHostUrl);
}
if (downloadCommandOptions.protein && speciesHasInfoToDownload(sp, "protein")) {
downloadProtein(sp, spShortName, assembly.getName(), spFolder, ensemblHostUrl);
}
}
private boolean speciesHasInfoToDownload(Species sp, String info) {
boolean hasInfo = true;
if (sp.getData() == null || !sp.getData().contains(info)) {
logger.warn("Specie " + sp.getScientificName() + " has no " + info + " information available to download");
hasInfo = false;
}
return hasInfo;
}
private void downloadSequence(Species sp, String shortName, String assembly, Path spFolder, String host) throws IOException, InterruptedException {
logger.info("Downloading genome-sequence information ...");
Path sequenceFolder = spFolder.resolve("sequence");
makeDir(sequenceFolder);
String url;
if (configuration.getSpecies().getVertebrates().contains(sp)) {
url = host + "/" + ensemblRelease;
} else {
url = host + "/" + ensemblRelease + "/" + getPhylo(sp);
}
/*
* PROTEIN METHODS
*/
private void downloadProtein(Species sp, String shortName, String assembly, Path spFolder, String host)
throws IOException, InterruptedException {
logger.info("Downloading protein information ...");
Path proteinFolder = spFolder.resolve("protein");
makeDir(proteinFolder);
String proteinUrl = configuration.getDownload().getUniprot().getHost();
downloadFile(proteinUrl, proteinFolder.resolve("uniprot_sprot.xml.gz").toString());
}
private void getProteinFunctionPredictionMatrices(Species sp, Path geneFolder) throws IOException, InterruptedException {
logger.info("Downloading protein function prediction matrices ...");
// run protein_function_prediction_matrices.pl
String proteinFunctionProcessLogFile = geneFolder.resolve("protein_function_prediction_matrices.log").toString();
List<String> args = Arrays.asList( "--species", sp.getScientificName(), "--outdir", geneFolder.toString(),
"--ensembl-libs", configuration.getDownload().getEnsembl().getLibs());
boolean proteinFunctionPredictionMatricesObtaines = runCommandLineProcess(ensemblScriptsFolder,
"./protein_function_prediction_matrices.pl",
args,
proteinFunctionProcessLogFile);
// check output
if (proteinFunctionPredictionMatricesObtaines) {
logger.info("Protein function prediction matrices created OK");
} else {
logger.error("Protein function prediction matrices for " + sp.getScientificName() + " cannot be downloaded");
}
}
private void makeDir(Path folderPath) throws IOException {
if(!Files.exists(folderPath)) {
Files.createDirectory(folderPath);
}
}
private void downloadFile(String url, String outputFileName) throws IOException, InterruptedException {
List<String> wgetArgs = Arrays.asList("--tries=10", url, "-O", outputFileName, "-o", outputFileName + ".log");
boolean downloaded = runCommandLineProcess(null, "wget", wgetArgs, null);
if (downloaded) {
logger.info(outputFileName + " created OK");
} else {
logger.warn(url + " cannot be downloaded");
}
}
private boolean runCommandLineProcess(File workingDirectory, String binPath, List<String> args, String logFilePath) throws IOException, InterruptedException {
ProcessBuilder builder = getProcessBuilder(workingDirectory, binPath, args, logFilePath);
logger.debug("Executing command: " + StringUtils.join(builder.command(), " "));
Process process = builder.start();
process.waitFor();
// Check process output
boolean executedWithoutErrors = true;
int genomeInfoExitValue = process.exitValue();
if (genomeInfoExitValue != 0) {
logger.warn("Error executing {}, error code: {}. More info in log file: {}", binPath, genomeInfoExitValue, logFilePath);
executedWithoutErrors = false;
}
return executedWithoutErrors;
}
private ProcessBuilder getProcessBuilder(File workingDirectory, String binPath, List<String> args, String logFilePath) {
List<String> commandArgs = new ArrayList<>();
commandArgs.add(binPath);
commandArgs.addAll(args);
ProcessBuilder builder = new ProcessBuilder(commandArgs);
// working directoy and error and output log outputs
if (workingDirectory != null) {
builder.directory(workingDirectory);
}
builder.redirectErrorStream(true);
if (logFilePath != null) {
builder.redirectOutput(ProcessBuilder.Redirect.appendTo(new File(logFilePath)));
}
return builder;
}
}
|
package net.commotionwireless.olsrinfo;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.Socket;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* @author Hans-Christoph Steiner
*
*/
public class OlsrInfo {
String host = "127.0.0.1";
int port = 2006;
public OlsrInfo() {
}
public OlsrInfo(String sethost) {
host = sethost;
}
public OlsrInfo(String sethost, int setport) {
host = sethost;
port = setport;
}
public String[] request(String req) throws IOException {
Socket sock = null;
BufferedReader in = null;
PrintWriter out = null;
List<String> retlist = new ArrayList<String>();
try {
sock = new Socket(host, port);
in = new BufferedReader(new InputStreamReader(sock.getInputStream()));
out = new PrintWriter(sock.getOutputStream(), true);
} catch (UnknownHostException e) {
throw new IOException();
} catch (IOException e) {
System.err.println("Couldn't get I/O for socket to " + host + ":" + Integer.toString(port));
}
out.println(req);
String line;
while((line = in.readLine()) != null) {
if(! line.equals(""))
retlist.add(line);
}
// the txtinfo plugin drops the connection once it outputs
out.close();
in.close();
sock.close();
return retlist.toArray(new String[retlist.size()]);
}
public String[][] command(String cmd) {
String[] data = null;
int startpos = 0;
final Set<String> supportedCommands = new HashSet<String>(Arrays.asList(
new String[] {
"/neigh",
"/link",
"/route",
"/hna",
"/mid",
"/topo",
}
));
if(! supportedCommands.contains(cmd))
System.out.println("Unsupported command: " + cmd);
try {
data = request(cmd);
} catch (IOException e) {
System.err.println("Couldn't get I/O for socket to " + host + ":" + Integer.toString(port));
}
for(int i = 0; i < data.length; i++) {
if(data[i].startsWith("Table: ")) {
startpos = i + 2;
break;
}
}
int fields = data[startpos + 1].split("\t").length;
String[][] ret = new String[data.length - startpos][fields];
for (int i = 0; i < ret.length; i++)
ret[i] = data[i + startpos].split("\t");
return ret;
}
/**
* 2-hop neighbors on the mesh
* @return array of per-IP arrays of IP address, SYM, MPR, MPRS, Willingness, and 2 Hop Neighbors
*/
public String[][] neighbors() {
return command("/neigh");
}
/**
* direct connections on the mesh, i.e. nodes with direct IP connectivity via Ad-hoc
* @return array of per-IP arrays of Local IP, Remote IP, Hysteresis, LQ, NLQ, and Cost
*/
public String[][] links() {
return command("/link");
}
/**
* IP routes to nodes on the mesh
* @return array of per-IP arrays of Destination, Gateway IP, Metric, ETX, and Interface
*/
public String[][] routes() {
return command("/route");
}
/**
* Host and Network Association (for supporting dynamic internet gateways)
* @return array of per-IP arrays of Destination and Gateway
*/
public String[][] hna() {
return command("/hna");
}
/**
* Multiple Interface Declaration
* @return array of per-IP arrays of IP address and Aliases
*/
public String[][] mid() {
return command("/mid");
}
/**
* topology of the whole mesh
* @return array of per-IP arrays of Destination IP, Last hop IP, LQ, NLQ, and Cost
*/
public String[][] topology() {
return command("/topo");
}
/**
* for testing from the command line
*/
public static void main(String[] args) throws IOException {
OlsrInfo txtinfo = new OlsrInfo();
System.out.println("NEIGHBORS
for(String[] s : txtinfo.neighbors()) {
for(String t : s)
System.out.print(t + ",");
System.out.println();
}
System.out.println("LINKS
for(String[] s : txtinfo.links()) {
for(String t : s)
System.out.print(t + ",");
System.out.println();
}
System.out.println("ROUTES
for(String[] s : txtinfo.routes()) {
for(String t : s)
System.out.print(t + ",");
System.out.println();
}
System.out.println("HNA
for(String[] s : txtinfo.hna()) {
for(String t : s)
System.out.print(t + ",");
System.out.println();
}
System.out.println("MID
for(String[] s : txtinfo.mid()) {
for(String t : s)
System.out.print(t + ",");
System.out.println();
}
System.out.println("TOPOLOGY
for(String[] s : txtinfo.topology()) {
for(String t : s)
System.out.print(t + ",");
System.out.println();
}
}
}
|
package markehme.factionsplus;
import java.io.File;
import java.io.IOException;
import java.util.Set;
import java.util.logging.Logger;
import markehme.factionsplus.config.Config;
import markehme.factionsplus.extras.LWCBase;
import markehme.factionsplus.extras.LWCFunctions;
import markehme.factionsplus.extras.Metrics;
import markehme.factionsplus.extras.Metrics.Graph;
import markehme.factionsplus.listeners.CoreListener;
import markehme.factionsplus.listeners.FPConfigLoadedListener;
import net.milkbowl.vault.permission.Permission;
import org.bukkit.Bukkit;
import org.bukkit.Server;
import org.bukkit.event.HandlerList;
import org.bukkit.plugin.Plugin;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.RegisteredServiceProvider;
import com.massivecraft.factions.Factions;
import com.massivecraft.factions.entity.MConf;
import com.onarandombox.MultiversePortals.MultiversePortals;
import com.sk89q.worldedit.bukkit.WorldEditPlugin;
import com.sk89q.worldguard.bukkit.WorldGuardPlugin;
public class FactionsPlus extends FactionsPlusPlugin {
public static FactionsPlus instance;
public static Logger log = Logger.getLogger("Minecraft");
Factions factions;
public static Permission permission = null;
public static boolean isWorldEditEnabled = false;
public static boolean isWorldGuardEnabled = false;
public static boolean isMultiversePortalsEnabled = false;
public final CoreListener corelistener = new CoreListener();
public static WorldEditPlugin worldEditPlugin = null;
public static WorldGuardPlugin worldGuardPlugin = null;
public static MultiversePortals multiversePortalsPlugin = null;
public static String version;
public static String FactionsVersion;
private static Metrics metrics = null;
public static Set<String> ignoredPvPWorlds = null;
public static Set<String> noClaimingWorlds = null;
public static Set<String> noPowerLossWorlds = null;
public static Server server;
public static boolean update_avab;
public static File thefile;
public FactionsPlus() {
super();
if ( null != instance ) {
throw bailOut( "This was not expected, getting new-ed again without getting unloaded first.\n" +
"Safest way to reload is to stop and start the server!" );
}
instance = this;
}
@Override
public void onEnable() {
try {
super.onEnable();
try {
Class.forName("com.massivecraft.factions.entity.MConf");
} catch (ClassNotFoundException ex) {
warn("Could not find Factions 2.x - please update to Factions 2.x.");
info("You are required to use 0.5.x for Factions 1.x");
disableSelf();
return;
}
thefile = getFile();
ignoredPvPWorlds = MConf.get().worldsIgnorePvP;
noClaimingWorlds = MConf.get().worldsNoClaiming;
noPowerLossWorlds = MConf.get().worldsNoPowerLoss;
version = getDescription().getVersion();
Config.init();
PluginManager pm = this.getServer().getPluginManager();
FactionsVersion = pm.getPlugin( "Factions" ).getDescription().getVersion();
info("Factions v" + FactionsVersion );
pm.registerEvents( new FPConfigLoadedListener(), this );
Config.reload();
pm.registerEvents( this.corelistener, this );
server = getServer();
FactionsPlusCommandManager.setup();
RegisteredServiceProvider<Permission> permissionProvider = getServer().getServicesManager().getRegistration( net.milkbowl.vault.permission.Permission.class );
if ( permissionProvider != null ) {
permission = permissionProvider.getProvider();
}
if( pm.isPluginEnabled( "WorldEdit" ) ) {
worldEditPlugin = (WorldEditPlugin) getServer().getPluginManager().getPlugin( "WorldEdit" );
isWorldEditEnabled = true;
}
if( pm.isPluginEnabled( "WorldGuard" ) ) {
worldGuardPlugin = ( WorldGuardPlugin ) getServer().getPluginManager().getPlugin( "WorldGuard" );
isWorldGuardEnabled = true;
}
if( pm.isPluginEnabled( "Multiverse-Portals" ) ) {
Plugin MVc = getServer().getPluginManager().getPlugin( "Multiverse-Portals" );
if (MVc instanceof MultiversePortals) {
multiversePortalsPlugin = ( MultiversePortals ) MVc;
isMultiversePortalsEnabled = true;
}
}
try {
metrics = new Metrics( this );
Graph factionsVersionGraph = metrics.createGraph("Factions Version");
factionsVersionGraph.addPlotter(new Metrics.Plotter(FactionsVersion) {
@Override
public int getValue() {
return 1;
}
});
metrics.start();
} catch ( IOException e ) {
info( "Metrics could not start up: "+e.getMessage() );
}
} catch (Throwable t) {
FactionsPlus.severe( t );
if ( isEnabled() ) {
disableSelf();
}
} // try
} // onEnable
@Override
public void onDisable() {
Throwable failed = null;
try {
try {
if(EssentialsIntegration.isHooked()) {
EssentialsIntegration.onDisable();
}
} catch ( Throwable t ) {
failed = t;
severe( t, "Exception on unhooking Essentials" );
}
try {
Config.deInit();
} catch ( Throwable t ) {
failed = t;
severe( t, "Exception on disabling Config" );
}
try {
FactionsPlusCommandManager.disableSubCommands();
} catch(Throwable t) {
failed = t;
severe( t, "Exception on removing FactionsPlus commands" );
}
try {
if ( LWCBase.isLWCPluginPresent() ) {
LWCFunctions.unhookLWC();
}
} catch ( Throwable t ) {
failed = t;
severe( t, "Exception on unhooking LWC" );
}
update_avab = false; // reset this here
try {
//FactionsPlusUpdate.ensureNotRunning();
} catch ( Throwable t ) {
failed = t;
severe( t, "Exception on disabling Updates" );
}
try {
getServer().getServicesManager().unregisterAll( this );
} catch ( Throwable t ) {
failed = t;
severe( t, "Exception on unregistering services" );
}
try {
HandlerList.unregisterAll( FactionsPlus.instance );
} catch ( Throwable t ) {
failed = t;
severe( t, "Exception on unregistering from HandlerList" );
}
try {
// This will deInit metrics, but it will be enabled again onEnable.
getServer().getScheduler().cancelTasks( this );
} catch ( Throwable t ) {
failed = t;
severe( t, "Exception when canceling schedule tasks" );
}
try {
if(Bukkit.getScoreboardManager().getMainScoreboard().getObjective( FactionsPlusScoreboard.objective_name ) != null &&
(Config._extras._scoreboards.showScoreboardOfFactions._ || Config._extras._scoreboards.showScoreboardOfMap._ )) {
Bukkit.getScoreboardManager().getMainScoreboard().getObjective( FactionsPlusScoreboard.objective_name ).unregister();
}
} catch( Exception t ) {
failed = t;
severe( t, "Exception when removing scoreboard" );
}
//TODO: investigate why nag author happens ... even though we seem to be shuttind down task correctly
//some tasks still remain from both FP and Vault at this point if doing a server `reload` as soon as you see "[FactionsPlus] Ready."
// List<BukkitWorker> workers = Bukkit.getScheduler().getActiveWorkers();
// info("Active Workers: "+workers.size());
// for ( BukkitWorker bukkitWorker : workers ) {
// info(" workerOwner: "+bukkitWorker.getOwner()+" taskId="+bukkitWorker.getTaskId()
// +", "+bukkitWorker.getThread().getName());
if ( null == failed ) {
info( "Disabled successfuly." );
}
} catch ( Throwable t ) {
failed = t;
} finally {
if ( null != failed ) {
info( "Did not disable successfuly! Please check over exceptions." );
}
}
} // onDisable
}
|
package me.flibio.minigamecore.arena;
import me.flibio.minigamecore.events.ArenaStateChangeEvent;
import org.spongepowered.api.Game;
import org.spongepowered.api.Sponge;
import org.spongepowered.api.effect.sound.SoundType;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.event.Listener;
import org.spongepowered.api.event.block.ChangeBlockEvent;
import org.spongepowered.api.event.entity.DamageEntityEvent;
import org.spongepowered.api.event.network.ClientConnectionEvent;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.text.channel.MessageChannel;
import org.spongepowered.api.text.serializer.TextSerializers;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
public abstract class Arena {
private CopyOnWriteArrayList<ArenaState> arenaStates = new CopyOnWriteArrayList<ArenaState>(getDefaultArenaStates());
private ConcurrentHashMap<ArenaState,Runnable> runnables = new ConcurrentHashMap<ArenaState,Runnable>();
private CopyOnWriteArrayList<Player> onlinePlayers = new CopyOnWriteArrayList<Player>();
private ArenaState arenaState;
private ArenaData arenaData;
private Game game;
/**
* An arena is an object that can handle spawn locations, lobbies, games, and more.
* @param arenaName
* The name of the arena
* @param game
* An instance of the game
* @param plugin
* An instance of the main class of your plugin
*/
public Arena(String arenaName, Game game, Object plugin) {
this.arenaData = new ArenaData(arenaName);
this.game = game;
this.arenaState = ArenaStates.LOBBY_WAITING;
game.getEventManager().registerListeners(plugin, this);
}
/**
* Adds an online player
* @param player
* The player to add
*/
public abstract void addOnlinePlayer(Player player);
/**
* Removes an online player
* @param player
* The player to remove
*/
public abstract void removeOnlinePlayer(Player player);
/**
* Gets all of the players in an arena
* @return
* All the players in the arena
*/
public CopyOnWriteArrayList<Player> getOnlinePlayers() {
return onlinePlayers;
}
/**
* Calls an state change on the arena
* @param changeTo
* The state to change the arena to
*/
public void arenaStateChange(ArenaState changeTo) {
if(!arenaStates.contains(changeTo)) {
return;
}
arenaState = changeTo;
//Post the arena state change event
game.getEventManager().post(new ArenaStateChangeEvent(this));
//Run a runnable if it is set
if(arenaStateRunnableExists(changeTo)) {
runnables.get(changeTo).run();
}
}
//Other Arena Properties
/**
* Gets the arena data
* @return
* The arena data
*/
public ArenaData getData() {
return arenaData;
}
/**
* Sets the arena data
* @param data
* The arena data to set
*/
public void overrideData(ArenaData data) {
arenaData = data;
}
/**
* Gets the state of the arena
* @return
* The state of the arena
*/
public ArenaState getArenaState() {
return arenaState;
}
/**
* Adds a new arena state
* @param state
* The arena state to add
* @return
* If the method was successful or not
*/
public boolean addArenaState(ArenaState state) {
//Check ifthe state exists
if(arenaStateExists(state)) {
return false;
} else {
arenaStates.add(state);
return true;
}
}
/**
* Removes an arena state
* @param state
* The arena state to remove
* @return
* If the method was successful or not
*/
public boolean removeArenaState(ArenaState state) {
//Check ifthe state is a default state
if(getDefaultArenaStates().contains(state)||!arenaStateExists(state)) {
return false;
} else {
if(runnables.keySet().contains(state)) {
runnables.remove(state);
}
arenaStates.remove(state);
return true;
}
}
/**
* Checks if an arena state exists
* @param arenaState
* The arena state to check for
* @return
* If the arena state exists
*/
public boolean arenaStateExists(ArenaState arenaState) {
return arenaStates.contains(arenaState);
}
/**
* Gets a list of the default arena states
* @return
* A list of the default arena states
*/
public List<ArenaState> getDefaultArenaStates() {
return Arrays.asList(ArenaStates.LOBBY_WAITING,ArenaStates.LOBBY_COUNTDOWN,ArenaStates.GAME_COUNTDOWN,
ArenaStates.GAME_PLAYING,ArenaStates.GAME_OVER,ArenaStates.COUNTDOWN_CANCELLED);
}
/**
* Adds an arena state runnable
* @param state
* The state to add
* @param runnable
* The runnable to add
* @return
* If the method was successful or not
*/
public boolean addArenaStateRunnable(ArenaState state, Runnable runnable) {
if(!arenaStateExists(state)||arenaStateRunnableExists(state)) {
return false;
}
runnables.put(state, runnable);
return true;
}
/**
* Removes an arena state runnable
* @param state
* The arena state to remove
* @return
* If the method was successful or not
*/
public boolean removeArenaStateRunnable(ArenaState state) {
if(!arenaStateExists(state)||!arenaStateRunnableExists(state)) {
return false;
}
runnables.remove(state);
return true;
}
/**
* Checks if an arena state runnable exists
* @param state
* The state to check for
* @return
* If the arena state runnable exists
*/
public boolean arenaStateRunnableExists(ArenaState state) {
return runnables.keySet().contains(state);
}
/**
* Gets an arena state runnable
* @param state
* The state to get the runnable of
* @return
* The arena state runnable
*/
public Optional<Runnable> getArenaStateRunnable(ArenaState state) {
if(arenaStateRunnableExists(state)) {
return Optional.of(runnables.get(state));
} else {
return Optional.empty();
}
}
/**
* Deserializes XML formatted text. Example:
* <c n="red">Something went wrong!</c>
* @param text
* The text to deserialize
* @return
* The deserialzed text
*/
public Text deserialize(String text) {
return TextSerializers.TEXT_XML.deserialize(text);
}
/**
* Deserializes XML formatted text. Example:
* <c n="green">%name% has joined the game!</c>
* @param text
* The text to deserialize
* @param old
* The string to replace before deserialization
* @param replacement
* What to replace the string with
* @return
* The deserialzed text
*/
public Text deserialize(String text, String old, String replacement) {
text.replaceAll(old, replacement);
return TextSerializers.TEXT_XML.deserialize(text);
}
/**
* Broadcasts the message to the entire server
* @param text
* The text to broadcast
*/
public void broadcast(Text text) {
Sponge.getGame().getServer().getBroadcastChannel().send(text);
}
/**
* Plays a sound to all players in the game
* @param type
* The type of sound to play
* @param volume
* The volume of the sound
* @param pitch
* The pitch of the sound
*/
public void broadcastSound(SoundType type, int volume, int pitch) {
for(Player player : onlinePlayers) {
player.playSound(type, player.getLocation().getPosition(), volume, pitch);
}
}
//Listeners
@Listener
public void onPlayerDisconnect(ClientConnectionEvent.Disconnect event) {
if(arenaData.isTriggerPlayerEvents()) {
Player player = event.getTargetEntity();
removeOnlinePlayer(player);
event.setChannel(MessageChannel.TO_NONE);
}
}
@Listener
public void onPlayerJoin(ClientConnectionEvent.Join event) {
if(arenaData.isTriggerPlayerEvents()) {
Player player = event.getTargetEntity();
addOnlinePlayer(player);
event.setChannel(MessageChannel.TO_NONE);
}
}
@Listener
public void onBlockModify(ChangeBlockEvent event) {
Optional<Player> playerOptional = event.getCause().first(Player.class);
if(!playerOptional.isPresent()) return;
if(!arenaData.isModifyLobbyBlocks()) {
if(arenaState.equals(ArenaStates.COUNTDOWN_CANCELLED)||arenaState.equals(ArenaStates.LOBBY_COUNTDOWN)||
arenaState.equals(ArenaStates.LOBBY_WAITING)) {
event.setCancelled(true);
}
}
}
@Listener
public void onPlayerDamage(DamageEntityEvent event) {
Optional<Player> playerOptional = event.getCause().first(Player.class);
if(event.getTargetEntity() instanceof Player||playerOptional.isPresent()) {
if(!arenaData.isAllowLobbyDamage()) {
if(arenaState.equals(ArenaStates.COUNTDOWN_CANCELLED)||arenaState.equals(ArenaStates.LOBBY_COUNTDOWN)||
arenaState.equals(ArenaStates.LOBBY_WAITING)) {
event.setCancelled(true);
}
}
}
}
}
|
package justaway.signinwithtwitter;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import twitter4j.Status;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.AsyncTask;
import android.support.v4.util.LruCache;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.TextView;
public class TwitterAdapter extends ArrayAdapter<twitter4j.Status> {
private Context context;
private ArrayList<twitter4j.Status> statuses = new ArrayList<twitter4j.Status>();
private LayoutInflater inflater;
private int layout;
private LruCache<String, Bitmap> mMemoryCache;
public TwitterAdapter(Context context, int textViewResourceId) {
super(context, textViewResourceId);
this.inflater = (LayoutInflater) context
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
this.context = context;
this.layout = textViewResourceId;
// Get max available VM memory, exceeding this amount will throw an
// OutOfMemory exception. Stored in kilobytes as LruCache takes an
// int in its constructor.
final int maxMemory = (int) (Runtime.getRuntime().maxMemory() / 1024);
// Use 1/8th of the available memory for this memory cache.
final int cacheSize = maxMemory / 2;
mMemoryCache = new LruCache<String, Bitmap>(cacheSize) {
@SuppressLint("NewApi")
@Override
protected int sizeOf(String key, Bitmap bitmap) {
// The cache size will be measured in kilobytes rather than
// number of items.
return bitmap.getByteCount() / 1024;
}
};
}
@Override
public void add(twitter4j.Status status) {
super.add(status);
this.statuses.add(status);
}
@Override
public void insert(twitter4j.Status status, int index) {
super.insert(status, index);
this.statuses.add(index, status);
}
@Override
public void clear() {
super.clear();
this.statuses.clear();
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View view = convertView;
if (view == null) {
// null
view = inflater.inflate(this.layout, null);
}
Status status = (Status) statuses.get(position);
if (status == null) {
return view;
}
Status retweet = status.getRetweetedStatus();
if (retweet == null) {
renderStatus(view, status, null);
} else {
renderStatus(view, retweet, status);
}
return view;
}
private void renderStatus(View view, Status status, Status retweet) {
((TextView) view.findViewById(R.id.display_name)).setText(status
.getUser().getName());
((TextView) view.findViewById(R.id.screen_name)).setText("@"
+ status.getUser().getScreenName());
((TextView) view.findViewById(R.id.status)).setText(status.getText());
((TextView) view.findViewById(R.id.datetime)).setText(status
.getCreatedAt().toString());
((TextView) view.findViewById(R.id.via)).setText("via "
+ getClientName(status.getSource()));
if (retweet != null) {
((TextView) view.findViewById(R.id.retweet_by))
.setText("Retweet By " + retweet.getUser().getScreenName()
+ "(" + retweet.getUser().getName() + ") and "
+ String.valueOf(status.getRetweetCount())
+ " others");
ImageView icon = (ImageView) view.findViewById(R.id.retweet_icon);
ProgressBar wait = (ProgressBar) view
.findViewById(R.id.retweet_wait);
renderIcon(wait, icon, retweet.getUser().getMiniProfileImageURL());
view.findViewById(R.id.retweet).setVisibility(View.VISIBLE);
} else {
view.findViewById(R.id.retweet).setVisibility(View.GONE);
}
ImageView icon = (ImageView) view.findViewById(R.id.icon);
ProgressBar wait = (ProgressBar) view.findViewById(R.id.WaitBar);
renderIcon(wait, icon, status.getUser().getBiggerProfileImageURL());
icon.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// TODO: UserProfileActivity
System.out.println("icon touch!");
}
});
}
private void renderIcon(ProgressBar wait, ImageView icon, String url) {
wait.setVisibility(View.VISIBLE);
icon.setVisibility(View.GONE);
String tag = (String) icon.getTag();
if (tag != null && tag == url) {
Log.d("Justaway", "[image] " + url + " exists.");
} else {
icon.setTag(url);
Bitmap image = mMemoryCache.get(url);
if (image == null) {
Log.d("Justaway", "[cache] " + url + " loading.");
ImageGetTask task = new ImageGetTask(icon, wait);
task.execute(url);
} else {
icon.setImageBitmap(image);
icon.setVisibility(View.VISIBLE);
wait.setVisibility(View.GONE);
}
}
}
private String getClientName(String source) {
String[] tokens = source.split("[<>]");
if (tokens.length > 1) {
return tokens[2];
} else {
return tokens[0];
}
}
class ImageGetTask extends AsyncTask<String, Void, Bitmap> {
private ImageView image;
private String tag;
private ProgressBar bar;
public ImageGetTask(ImageView image, ProgressBar bar) {
this.image = image;
this.bar = bar;
this.tag = image.getTag().toString();
}
@Override
protected Bitmap doInBackground(String... params) {
// HttpBitmap
synchronized (context) {
try {
URL imageUrl = new URL(params[0]);
InputStream imageIs;
imageIs = imageUrl.openStream();
Bitmap bitmap = BitmapFactory.decodeStream(imageIs);
return bitmap;
} catch (MalformedURLException e) {
return null;
} catch (IOException e) {
return null;
}
}
}
@Override
protected void onPostExecute(Bitmap bitmap) {
// Tag
// Tag
if (tag.equals(image.getTag())) {
if (bitmap != null) {
mMemoryCache.put(tag, bitmap);
image.setImageBitmap(bitmap);
} else {
// image.setImageDrawable(context.getResources().getDrawable(
// R.drawable.x));
}
image.setVisibility(View.VISIBLE);
bar.setVisibility(View.GONE);
}
}
}
}
|
package com.bbn.bue.common;
import com.bbn.bue.common.collections.CollectionUtils;
import com.bbn.bue.common.files.FileUtils;
import com.bbn.bue.common.parameters.Parameters;
import com.bbn.bue.common.symbols.Symbol;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Ordering;
import com.google.common.collect.Sets;
import com.google.common.io.Files;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import static com.google.common.base.Predicates.in;
/**
* Given a list of files and a number of splits, creates training/test file lists for
* cross-validation. When the files cannot be evenly divided across all splits, extra files are
* distributed as evenly as possible, starting with the first folds. For example, dividing 11 items
* into three folds will result in folds of size (4, 4, 3).
*/
public final class MakeCrossValidationBatches {
private MakeCrossValidationBatches() {
throw new UnsupportedOperationException();
}
public static void main(String[] argv) {
// we wrap the main method in this way to
// ensure a non-zero return value on failure
try {
trueMain(argv);
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
private static void errorExit(final String msg) {
System.err.println("Error: " + msg);
System.exit(1);
}
private static void trueMain(String[] argv) throws IOException {
if (argv.length != 1) {
errorExit("Usage: MakeCrossValidationBatches params");
}
final Parameters parameters = Parameters.loadSerifStyle(new File(argv[0]));
final File fileList = parameters.getExistingFile("com.bbn.bue.common.crossValidation.fileList");
final File outputDirectory = parameters.getCreatableDirectory(
"com.bbn.bue.common.crossValidation.outputDir");
final String outputName = parameters.getString("com.bbn.bue.common.crossValidation.outputName");
final int numBatches = parameters.getPositiveInteger(
"com.bbn.bue.common.crossValidation.numBatches");
final int randomSeed = parameters.getInteger("com.bbn.bue.common.crossValidation.randomSeed");
final boolean useDocIdMap = parameters.getBoolean("com.bbn.bue.common.crossValidation.useDocIdMap");
if (numBatches < 1) {
errorExit("Bad numBatches value: Need one or more batches to divide data into");
}
if (useDocIdMap) {
// For the document ID map
final ImmutableMap<Symbol, File> docIdMap =
FileUtils.loadSymbolToFileMap(Files.asCharSource(fileList, Charsets.UTF_8));
// Get the list of docids and shuffle them
final ArrayList<Symbol> docIds = Lists.newArrayList(docIdMap.keySet());
if (numBatches > docIds.size()) {
errorExit("Bad numBatches value: Cannot create more batches than there are input files");
}
Collections.shuffle(docIds, new Random(randomSeed));
// Divide into folds
final ImmutableList<ImmutableList<Symbol>> folds =
CollectionUtils.partitionAlmostEvenly(docIds, numBatches);
int batchNum = 0;
int totalTest = 0;
for (final List<Symbol> docIdsForBatch : folds) {
final Set<Symbol> testDocIds = ImmutableSet.copyOf(docIdsForBatch);
final Set<Symbol> trainDocIds =
Sets.difference(ImmutableSet.copyOf(docIds), testDocIds).immutableCopy();
final Map<Symbol, File> trainDocIdMap =
Maps.filterKeys(docIdMap, in(trainDocIds));
final Map<Symbol, File> testDocIdMap =
Maps.filterKeys(docIdMap, in(testDocIds));
// Write out file maps
final File trainingMapOutputFile = new File(outputDirectory, outputName + "." +
StringUtils.padWithMax(batchNum, numBatches - 1) + ".training.docIDToFileMap");
FileUtils.writeSymbolToFileMap(trainDocIdMap, Files.asCharSink(trainingMapOutputFile,
Charsets.UTF_8));
final File testMapOutputFile = new File(outputDirectory, outputName + "." +
StringUtils.padWithMax(batchNum, numBatches - 1) + ".test.docIDToFileMap");
FileUtils.writeSymbolToFileMap(testDocIdMap, Files.asCharSink(testMapOutputFile,
Charsets.UTF_8));
// Write out file lists
final ImmutableList<File> trainingFilesForBatch = ImmutableList.copyOf(trainDocIdMap.values());
final ImmutableList<File> testFilesForBatch = ImmutableList.copyOf(testDocIdMap.values());
final File trainingOutputFile = new File(outputDirectory, outputName + "." +
StringUtils.padWithMax(batchNum, numBatches - 1) + ".training.list");
FileUtils.writeFileList(trainingFilesForBatch,
Files.asCharSink(trainingOutputFile,
Charsets.UTF_8));
final File testOutputFile = new File(outputDirectory, outputName + "." +
StringUtils.padWithMax(batchNum, numBatches - 1) + ".test.list");
FileUtils.writeFileList(testFilesForBatch,
Files.asCharSink(testOutputFile,
Charsets.UTF_8));
++batchNum;
totalTest += testDocIdMap.size();
}
if(totalTest != docIdMap.size()) {
errorExit("Test files created are not the same length as the input");
}
} else {
// Load the list of files and shuffle.
final List<File> inputFiles = Lists.newArrayList(FileUtils.loadFileList(fileList));
if (Sets.newHashSet(inputFiles).size() != inputFiles.size()) {
errorExit("Input file list contains duplicate entries");
}
else if (numBatches > inputFiles.size()) {
errorExit("Bad numBatches value: Cannot create more batches than there are input files");
}
Collections.shuffle(inputFiles, new Random(randomSeed));
// Divide into folds
final ImmutableList<ImmutableList<File>> folds =
CollectionUtils.partitionAlmostEvenly(inputFiles, numBatches);
int batchNum = 0;
int totalTest = 0;
for (final List<File> testFilesForBatch : folds) {
final Set<File> trainingFilesForBatch =
Sets.difference(ImmutableSet.copyOf(inputFiles), ImmutableSet.copyOf(testFilesForBatch))
.immutableCopy();
final File trainingOutputFile = new File(outputDirectory, outputName + "." +
StringUtils.padWithMax(batchNum, numBatches - 1) + ".training.list");
FileUtils.writeFileList(Ordering.natural().sortedCopy(trainingFilesForBatch),
Files.asCharSink(trainingOutputFile,
Charsets.UTF_8));
final File testOutputFile = new File(outputDirectory, outputName + "." +
StringUtils.padWithMax(batchNum, numBatches - 1) + ".test.list");
FileUtils.writeFileList(Ordering.natural().sortedCopy(testFilesForBatch),
Files.asCharSink(testOutputFile,
Charsets.UTF_8));
++batchNum;
totalTest += testFilesForBatch.size();
}
if(totalTest != inputFiles.size()) {
errorExit("Test files created are not the same length as the input");
}
}
}
}
|
package net.wurstclient.features.mods;
import net.minecraft.network.play.client.CPacketPlayer;
import net.wurstclient.compatibility.WConnection;
import net.wurstclient.compatibility.WMinecraft;
import net.wurstclient.events.listeners.UpdateListener;
@Mod.Info(
description = "Protects you from fall damage.\n" + "Bypasses AntiCheat.",
name = "NoFall",
tags = "no fall",
help = "Mods/NoFall")
@Mod.Bypasses(ghostMode = false, latestNCP = false, olderNCP = false)
public final class NoFallMod extends Mod implements UpdateListener
{
@Override
public void onEnable()
{
wurst.events.add(UpdateListener.class, this);
}
@Override
public void onDisable()
{
wurst.events.remove(UpdateListener.class, this);
}
@Override
public void onUpdate()
{
if(WMinecraft.getPlayer().fallDistance > 2)
WConnection.sendPacket(new CPacketPlayer(true));
}
}
|
// This source code is available under agreement available at
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
package org.talend.components.common;
import static org.talend.daikon.properties.property.PropertyFactory.*;
import org.talend.daikon.properties.PropertiesImpl;
import org.talend.daikon.properties.presentation.Form;
import org.talend.daikon.properties.property.Property;
public class ProxyProperties extends PropertiesImpl {
public enum ProxyType {
HTTP,
HTTPS,
SOCKS,
FTP
}
public Property<Boolean> useProxy = newBoolean("useProxy").setRequired(); //$NON-NLS-1$
private static final String HOST = "host";
public Property<String> host = newProperty(HOST).setRequired();
private static final String PORT = "port";
public Property<Integer> port = newInteger(PORT).setRequired();
private static final String USERPASSWORD = "userPassword";
public UserPasswordProperties userPassword = new UserPasswordProperties(USERPASSWORD);
public ProxyProperties(String name) {
super(name);
}
@Override
public void setupProperties() {
super.setupProperties();
userPassword.userId.setRequired(false);
userPassword.password.setRequired(false);
}
@Override
public void setupLayout() {
super.setupLayout();
Form form = Form.create(this, Form.MAIN);
form.addRow(useProxy);
form.addRow(host);
form.addRow(port);
form.addRow(userPassword.getForm(Form.MAIN));
}
public void afterUseProxy() {
refreshLayout(getForm(Form.MAIN));
}
@Override
public void refreshLayout(Form form) {
super.refreshLayout(form);
if (form.getName().equals(Form.MAIN)) {
boolean isUseProxy = useProxy.getValue();
form.getWidget(HOST).setHidden(!isUseProxy);
form.getWidget(PORT).setHidden(!isUseProxy);
form.getWidget(USERPASSWORD).setHidden(!isUseProxy);
}
}
}
|
package org.cinchapi.concourse.server;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryUsage;
import java.net.ServerSocket;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executors;
import javax.management.InstanceAlreadyExistsException;
import javax.management.MBeanRegistrationException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.NotCompliantMBeanException;
import javax.management.ObjectName;
import org.apache.thrift.TException;
import org.apache.thrift.server.TServer;
import org.apache.thrift.server.TThreadPoolServer;
import org.apache.thrift.server.TThreadPoolServer.Args;
import org.apache.thrift.transport.TServerSocket;
import org.apache.thrift.transport.TTransportException;
import org.cinchapi.concourse.security.AccessManager;
import org.cinchapi.concourse.server.io.FileSystem;
import org.cinchapi.concourse.server.jmx.ConcourseServerMXBean;
import org.cinchapi.concourse.server.jmx.ManagedOperation;
import org.cinchapi.concourse.server.storage.AtomicOperation;
import org.cinchapi.concourse.server.storage.AtomicStateException;
import org.cinchapi.concourse.server.storage.Compoundable;
import org.cinchapi.concourse.server.storage.Engine;
import org.cinchapi.concourse.server.storage.Transaction;
import org.cinchapi.concourse.shell.CommandLine;
import org.cinchapi.concourse.thrift.AccessToken;
import org.cinchapi.concourse.thrift.ConcourseService;
import org.cinchapi.concourse.thrift.TObject;
import org.cinchapi.concourse.thrift.ConcourseService.Iface;
import org.cinchapi.concourse.thrift.Operator;
import org.cinchapi.concourse.thrift.TransactionToken;
import org.cinchapi.concourse.time.Time;
import org.cinchapi.concourse.util.Logger;
import org.cinchapi.concourse.util.Version;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.base.Throwables;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import static org.cinchapi.concourse.server.GlobalState.*;
/**
* Accepts requests from clients to read and write data in Concourse. The server
* is configured with a {@code concourse.prefs} file.
*
* @author jnelson
*/
public class ConcourseServer implements
ConcourseService.Iface,
ConcourseServerMXBean {
/**
* Run the server...
*
* @param args
* @throws TTransportException
* @throws MalformedObjectNameException
* @throws NotCompliantMBeanException
* @throws MBeanRegistrationException
* @throws InstanceAlreadyExistsException
*/
public static void main(String... args) throws TTransportException,
MalformedObjectNameException, InstanceAlreadyExistsException,
MBeanRegistrationException, NotCompliantMBeanException {
final ConcourseServer server = new ConcourseServer();
// Ensure the application is properly configured
MemoryUsage heap = ManagementFactory.getMemoryMXBean()
.getHeapMemoryUsage();
if(heap.getInit() < MIN_HEAP_SIZE) {
System.err.println("Cannot initialize Concourse Server with "
+ "a heap smaller than " + MIN_HEAP_SIZE + " bytes");
System.exit(127);
}
// Register MXBean
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
ObjectName name = new ObjectName(
"org.cinchapi.concourse.server.jmx:type=ConcourseServerMXBean");
mbs.registerMBean(server, name);
// Start the server...
Thread serverThread = new Thread(new Runnable() {
@Override
public void run() {
try {
CommandLine.displayWelcomeBanner();
server.start();
}
catch (TTransportException e) {
e.printStackTrace();
System.exit(-1);
}
}
}, "main");
serverThread.start();
// Prepare for graceful shutdown...
// NOTE: It may be necessary to run the Java VM with
// -Djava.net.preferIPv4Stack=true
final Thread shutdownThread = new Thread(new Runnable() {
@Override
public void run() {
try {
ServerSocket socket = new ServerSocket(SHUTDOWN_PORT);
socket.accept(); // block until a shutdown request is made
Logger.info("Shutdown request received");
server.stop();
socket.close();
}
catch (Exception e) {
e.printStackTrace();
}
}
}, "Shutdown");
shutdownThread.setDaemon(true);
shutdownThread.start();
// Add a shutdown hook that launches the official {@link ShutdownRunner}
// in cases where the server process is directly killed (i.e. from the
// tanuki scripts)
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
ShutdownRunner.main();
try {
shutdownThread.join();
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
});
}
protected static final int SERVER_PORT = 1717; // This may become
// configurable in a
// prefs file in a
// future release.
private static final int NUM_WORKER_THREADS = 100; // This may become
// configurable in a
// prefs file in a
// future release.
private static final int MIN_HEAP_SIZE = 268435456; // 256 MB
/**
* Contains the credentials used by the {@link #manager}. This file is
* typically located in the root of the server installation.
*/
private static final String ACCESS_FILE = ".access";
private final TServer server;
/**
* The Engine controls all the logic for data storage and retrieval.
*/
private final Engine engine;
/**
* The AccessManager controls access to the server.
*/
private final AccessManager manager;
/**
* The server maintains a collection of {@link Transaction} objects to
* ensure that client requests are properly routed. When the client makes a
* call to {@link #stage(AccessToken)}, a Transaction is started on the
* server and a {@link TransactionToken} is used for the client to reference
* that Transaction in future calls.
*/
private final Map<TransactionToken, Transaction> transactions = Maps
.newHashMap();
/**
* Construct a ConcourseServer that listens on {@link #SERVER_PORT} and
* stores data in {@link Properties#DATA_HOME}.
*
* @throws TTransportException
*/
public ConcourseServer() throws TTransportException {
this(SERVER_PORT, BUFFER_DIRECTORY, DATABASE_DIRECTORY);
}
/**
* Construct a ConcourseServer that listens on {@code port} and store data
* in {@code dbStore} and {@code bufferStore}.
*
* @param port
* @param bufferStore
* @param dbStore
* @throws TTransportException
*/
public ConcourseServer(int port, String bufferStore, String dbStore)
throws TTransportException {
FileSystem.mkdirs(bufferStore);
FileSystem.mkdirs(dbStore);
TServerSocket socket = new TServerSocket(port);
ConcourseService.Processor<Iface> processor = new ConcourseService.Processor<Iface>(
this);
Args args = new TThreadPoolServer.Args(socket);
args.processor(processor);
args.maxWorkerThreads(NUM_WORKER_THREADS);
args.executorService(Executors
.newCachedThreadPool(new ThreadFactoryBuilder().setNameFormat(
"Server" + "-%d").build()));
this.server = new TThreadPoolServer(args);
this.engine = new Engine(bufferStore, dbStore);
this.manager = AccessManager.create(ACCESS_FILE);
}
@Override
public void abort(AccessToken creds, TransactionToken transaction)
throws TException {
authenticate(creds);
Preconditions.checkArgument(transaction.getAccessToken().equals(creds)
&& transactions.containsKey(transaction));
transactions.remove(transaction).abort();
}
@Override
public boolean add(String key, TObject value, long record,
AccessToken creds, TransactionToken transaction) throws TException {
authenticate(creds);
Preconditions.checkArgument((transaction != null
&& transaction.getAccessToken().equals(creds) && transactions
.containsKey(transaction)) || transaction == null);
return transaction != null ? transactions.get(transaction).add(key,
value, record) : engine.add(key, value, record);
}
@Override
public Map<Long, String> audit(long record, String key, AccessToken creds,
TransactionToken transaction) throws TException {
authenticate(creds);
if(transaction != null) {
Preconditions.checkArgument(transaction.getAccessToken().equals(
creds)
&& transactions.containsKey(transaction));
Transaction t = transactions.get(transaction);
return Strings.isNullOrEmpty(key) ? t.audit(record) : t.audit(key,
record);
}
return Strings.isNullOrEmpty(key) ? engine.audit(record) : engine
.audit(key, record);
}
@Override
public void clear(String key, long record, AccessToken creds,
TransactionToken transaction) throws TException {
checkAccess(creds, transaction);
AtomicOperation operation = null;
while (operation == null || !operation.commit()) {
operation = doClear(key, record,
transaction != null ? transactions.get(transaction)
: engine);
}
}
@Override
public boolean commit(AccessToken creds, TransactionToken transaction)
throws TException {
authenticate(creds);
Preconditions.checkArgument(transaction.getAccessToken().equals(creds)
&& transactions.containsKey(transaction));
return transactions.remove(transaction).commit();
}
@Override
public Set<String> describe(long record, long timestamp, AccessToken creds,
TransactionToken transaction) throws TException {
authenticate(creds);
if(transaction != null) {
Preconditions.checkArgument(transaction.getAccessToken().equals(
creds)
&& transactions.containsKey(transaction));
Transaction t = transactions.get(transaction);
return timestamp == 0 ? t.describe(record) : t.describe(record,
timestamp);
}
return timestamp == 0 ? engine.describe(record) : engine.describe(
record, timestamp);
}
@ManagedOperation
@Override
public String dump(String id) {
return engine.dump(id);
}
@Override
public Set<TObject> fetch(String key, long record, long timestamp,
AccessToken creds, TransactionToken transaction) throws TException {
authenticate(creds);
if(transaction != null) {
Preconditions.checkArgument(transaction.getAccessToken().equals(
creds)
&& transactions.containsKey(transaction));
Transaction t = transactions.get(transaction);
return timestamp == 0 ? t.fetch(key, record) : t.fetch(key, record,
timestamp);
}
return timestamp == 0 ? engine.fetch(key, record) : engine.fetch(key,
record, timestamp);
}
@Override
public Set<Long> find(String key, Operator operator, List<TObject> values,
long timestamp, AccessToken creds, TransactionToken transaction)
throws TException {
authenticate(creds);
TObject[] tValues = values.toArray(new TObject[values.size()]);
if(transaction != null) {
Preconditions.checkArgument(transaction.getAccessToken().equals(
creds)
&& transactions.containsKey(transaction));
Transaction t = transactions.get(transaction);
return timestamp == 0 ? t.find(key, operator, tValues) : t.find(
timestamp, key, operator, tValues);
}
return timestamp == 0 ? engine.find(key, operator, tValues) : engine
.find(timestamp, key, operator, tValues);
}
@Override
@ManagedOperation
public String getServerVersion() {
return Version.getVersion(ConcourseServer.class).toString();
}
@Override
@ManagedOperation
public void grant(byte[] username, byte[] password) {
manager.grant(ByteBuffer.wrap(username), ByteBuffer.wrap(password));
username = null;
password = null;
}
@Override
@ManagedOperation
public boolean login(byte[] username, byte[] password) {
// NOTE: Any existing sessions for the user will be invalidated.
try {
AccessToken token = login(ByteBuffer.wrap(username),
ByteBuffer.wrap(password));
username = null;
password = null;
if(token != null) {
logout(token);
return true;
}
else {
return false;
}
}
catch (SecurityException e) {
return false;
}
catch (TException e) {
throw Throwables.propagate(e);
}
}
@Override
public AccessToken login(ByteBuffer username, ByteBuffer password)
throws TException {
validate(username, password);
return manager.authorize(username);
}
@Override
public void logout(AccessToken creds) throws TException {
authenticate(creds);
manager.deauthorize(creds);
}
@Override
public boolean ping(long record, AccessToken creds,
TransactionToken transaction) throws TException {
authenticate(creds);
Preconditions.checkArgument((transaction != null
&& transaction.getAccessToken().equals(creds) && transactions
.containsKey(transaction)) || transaction == null);
return transaction != null ? !transactions.get(transaction)
.describe(record).isEmpty() : !engine.describe(record)
.isEmpty();
}
@Override
public boolean remove(String key, TObject value, long record,
AccessToken creds, TransactionToken transaction) throws TException {
authenticate(creds);
Preconditions.checkArgument((transaction != null
&& transaction.getAccessToken().equals(creds) && transactions
.containsKey(transaction)) || transaction == null);
return transaction != null ? transactions.get(transaction).remove(key,
value, record) : engine.remove(key, value, record);
}
@Override
public void revert(String key, long record, long timestamp,
AccessToken creds, TransactionToken transaction) throws TException {
checkAccess(creds, transaction);
AtomicOperation operation = null;
while (operation == null || !operation.commit()) {
operation = doRevert(key, record, timestamp,
transaction != null ? transactions.get(transaction)
: engine);
}
}
@Override
@ManagedOperation
public void revoke(byte[] username) {
manager.revoke(ByteBuffer.wrap(username));
username = null;
}
@Override
public Set<Long> search(String key, String query, AccessToken creds,
TransactionToken transaction) throws TException {
authenticate(creds);
if(transaction != null) {
Preconditions.checkArgument(transaction.getAccessToken().equals(
creds)
&& transactions.containsKey(transaction));
Transaction t = transactions.get(transaction);
return t.search(key, query);
}
return engine.search(key, query);
}
@Override
public void set0(String key, TObject value, long record, AccessToken creds,
TransactionToken transaction) throws TException {
checkAccess(creds, transaction);
AtomicOperation operation = null;
while (operation == null || !operation.commit()) {
operation = doSet(key, value, record,
transaction != null ? transactions.get(transaction)
: engine);
}
}
@Override
public TransactionToken stage(AccessToken creds) throws TException {
authenticate(creds);
TransactionToken token = new TransactionToken(creds, Time.now());
Transaction transaction = engine.startTransaction();
transactions.put(token, transaction);
Logger.info("Started Transaction {}", transaction.hashCode());
return token;
}
/**
* Start the server.
*
* @throws TTransportException
*/
public void start() throws TTransportException {
engine.start();
System.out.println("The Concourse server has started");
server.serve();
}
/**
* Stop the server.
*/
public void stop() {
if(server.isServing()) {
server.stop();
engine.stop();
System.out.println("The Concourse server has stopped");
}
}
@Override
public boolean verify(String key, TObject value, long record,
long timestamp, AccessToken creds, TransactionToken transaction)
throws TException {
authenticate(creds);
if(transaction != null) {
Preconditions.checkArgument(transaction.getAccessToken().equals(
creds)
&& transactions.containsKey(transaction));
Transaction t = transactions.get(transaction);
return timestamp == 0 ? t.verify(key, value, record) : t.verify(
key, value, record, timestamp);
}
return timestamp == 0 ? engine.verify(key, value, record) : engine
.verify(key, value, record, timestamp);
}
@Override
public boolean verifyAndSwap(String key, TObject expected, long record,
TObject replacement, AccessToken creds, TransactionToken transaction)
throws TException {
checkAccess(creds, transaction);
AtomicOperation operation = AtomicOperation
.start(transaction != null ? transactions.get(transaction)
: engine);
try {
return (operation.verify(key, expected, record)
&& operation.remove(key, expected, record) && operation
.add(key, replacement, record)) ? operation.commit()
: false;
}
catch (AtomicStateException e) {
return false;
}
}
/**
* Verify that {@code token} is valid.
*
* @param token
* @throws SecurityException
*/
private void authenticate(AccessToken token) throws SecurityException {
if(!manager.validate(token)) {
throw new SecurityException("Invalid access token");
}
}
private void checkAccess(AccessToken creds, TransactionToken transaction)
throws SecurityException, IllegalArgumentException {
authenticate(creds);
Preconditions.checkArgument((transaction != null
&& transaction.getAccessToken().equals(creds) && transactions
.containsKey(transaction)) || transaction == null);
}
/**
* Start an {@link AtomicOperation} with {@code store} as the destination
* and do the work to clear {@code key} in {@code record}.
*
* @param key
* @param record
* @param store
* @return the AtomicOperation
*/
private AtomicOperation doClear(String key, long record, Compoundable store) {
AtomicOperation operation = AtomicOperation.start(store);
try {
Set<TObject> values = operation.fetch(key, record);
for (TObject value : values) {
operation.remove(key, value, record);
}
return operation;
}
catch (AtomicStateException e) {
return null;
}
}
/**
* Start an {@link AtomicOperation} with {@code store} as the destination
* and do the work to revert {@code key} in {@code record} to
* {@code timestamp}.
*
* @param key
* @param record
* @param timestamp
* @param store
* @return the AtomicOperation that must be committed
*/
private AtomicOperation doRevert(String key, long record, long timestamp,
Compoundable store) {
AtomicOperation operation = AtomicOperation.start(store);
try {
Set<TObject> past = operation.fetch(key, record, timestamp);
Set<TObject> present = operation.fetch(key, record);
Set<TObject> xor = Sets.symmetricDifference(past, present);
for (TObject value : xor) {
if(present.contains(value)) {
operation.remove(key, value, record);
}
else {
operation.add(key, value, record);
}
}
return operation;
}
catch (AtomicStateException e) {
return null;
}
}
/**
* Start an {@link AtomicOperation} with {@code store} as the destination
* and do the work to set {@code key} as {@code value} in {@code record}.
*
* @param key
* @param value
* @param record
* @param store
* @return
*/
private AtomicOperation doSet(String key, TObject value, long record,
Compoundable store) {
// NOTE: We cannot use the #clear() method because our removes must be
// defined in terms of the AtomicOperation for true atomic safety.
AtomicOperation operation = AtomicOperation.start(store);
try {
Set<TObject> values = operation.fetch(key, record);
for (TObject oldValue : values) {
operation.remove(key, oldValue, record);
}
operation.add(key, value, record);
return operation;
}
catch (AtomicStateException e) {
return null;
}
}
/**
* Validate that the {@code username} and {@code password} pair represent
* correct credentials. If not, throw a SecurityException.
*
* @param username
* @param password
* @throws SecurityException
*/
private void validate(ByteBuffer username, ByteBuffer password)
throws SecurityException {
if(!manager.validate(username, password)) {
throw new SecurityException(
"Invalid username/password combination.");
}
}
}
|
package com.yahoo.container.jdisc;
import com.google.common.util.concurrent.AtomicDouble;
import com.google.inject.AbstractModule;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.yahoo.cloud.config.SlobroksConfig;
import com.yahoo.component.Vtag;
import com.yahoo.component.provider.ComponentRegistry;
import com.yahoo.config.ConfigInstance;
import com.yahoo.config.subscription.ConfigInterruptedException;
import com.yahoo.container.Container;
import com.yahoo.container.QrConfig;
import com.yahoo.container.core.ChainsConfig;
import com.yahoo.container.core.config.HandlersConfigurerDi;
import com.yahoo.container.di.CloudSubscriberFactory;
import com.yahoo.container.di.config.Subscriber;
import com.yahoo.container.di.config.SubscriberFactory;
import com.yahoo.container.http.filter.FilterChainRepository;
import com.yahoo.container.jdisc.component.Deconstructor;
import com.yahoo.container.jdisc.metric.DisableGuiceMetric;
import com.yahoo.jdisc.Metric;
import com.yahoo.jdisc.application.Application;
import com.yahoo.jdisc.application.BindingRepository;
import com.yahoo.jdisc.application.ContainerActivator;
import com.yahoo.jdisc.application.ContainerBuilder;
import com.yahoo.jdisc.application.DeactivatedContainer;
import com.yahoo.jdisc.application.GuiceRepository;
import com.yahoo.jdisc.application.OsgiFramework;
import com.yahoo.jdisc.handler.RequestHandler;
import com.yahoo.jdisc.service.ClientProvider;
import com.yahoo.jdisc.service.ServerProvider;
import com.yahoo.jrt.Acceptor;
import com.yahoo.jrt.ListenFailedException;
import com.yahoo.jrt.Spec;
import com.yahoo.jrt.Supervisor;
import com.yahoo.jrt.Transport;
import com.yahoo.jrt.slobrok.api.Register;
import com.yahoo.jrt.slobrok.api.SlobrokList;
import com.yahoo.log.LogSetup;
import com.yahoo.messagebus.network.rpc.SlobrokConfigSubscriber;
import com.yahoo.net.HostName;
import com.yahoo.vespa.config.ConfigKey;
import com.yahoo.yolean.Exceptions;
import com.yahoo.yolean.UncheckedInterruptedException;
import java.util.Collection;
import java.util.Collections;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Phaser;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import java.util.logging.Logger;
import static com.yahoo.collections.CollectionUtil.first;
/**
* @author Tony Vaagenes
*/
public final class ConfiguredApplication implements Application {
private static final Logger log = Logger.getLogger(ConfiguredApplication.class.getName());
private final Object monitor = new Object();
private final Set<ClientProvider> startedClients = createIdentityHashSet();
private final Set<ServerProvider> startedServers = createIdentityHashSet();
private final SubscriberFactory subscriberFactory;
private final Metric metric;
private final ContainerActivator activator;
private final String configId;
private final OsgiFramework osgiFramework;
private final com.yahoo.jdisc.Timer timerSingleton;
private final AtomicBoolean dumpHeapOnShutdownTimeout = new AtomicBoolean(false);
private final AtomicDouble shutdownTimeoutS = new AtomicDouble(50.0);
// Subscriber that is used when this is not a standalone-container. Subscribes
// to config to make sure that container will be registered in slobrok (by {@link com.yahoo.jrt.slobrok.api.Register})
// if slobrok config changes (typically slobroks moving to other nodes)
private final Optional<SlobrokConfigSubscriber> slobrokConfigSubscriber;
private final ShutdownDeadline shutdownDeadline;
//TODO: FilterChainRepository should instead always be set up in the model.
private final FilterChainRepository defaultFilterChainRepository =
new FilterChainRepository(new ChainsConfig(new ChainsConfig.Builder()),
new ComponentRegistry<>(),
new ComponentRegistry<>(),
new ComponentRegistry<>(),
new ComponentRegistry<>());
private final OsgiFramework restrictedOsgiFramework;
private final Phaser nonTerminatedContainerTracker = new Phaser(1);
private HandlersConfigurerDi configurer;
private Thread reconfigurerThread;
private Thread portWatcher;
private QrConfig qrConfig;
private Register slobrokRegistrator = null;
private Supervisor supervisor = null;
private Acceptor acceptor = null;
static {
LogSetup.initVespaLogging("Container");
log.log(Level.INFO, "Starting jdisc" + (Vtag.currentVersion.isEmpty() ? "" : " at version " + Vtag.currentVersion));
}
/**
* Do not delete this method even if it's empty.
* Calling this methods forces this class to be loaded,
* which runs the static block.
*/
@SuppressWarnings("UnusedDeclaration")
public static void ensureVespaLoggingInitialized() {
}
@Inject
public ConfiguredApplication(ContainerActivator activator,
OsgiFramework osgiFramework,
com.yahoo.jdisc.Timer timer,
SubscriberFactory subscriberFactory,
Metric metric) {
this.activator = activator;
this.osgiFramework = osgiFramework;
this.timerSingleton = timer;
this.subscriberFactory = subscriberFactory;
this.metric = metric;
this.configId = System.getProperty("config.id");
this.slobrokConfigSubscriber = (subscriberFactory instanceof CloudSubscriberFactory)
? Optional.of(new SlobrokConfigSubscriber(configId))
: Optional.empty();
this.restrictedOsgiFramework = new DisableOsgiFramework(new RestrictedBundleContext(osgiFramework.bundleContext()));
this.shutdownDeadline = new ShutdownDeadline(configId);
}
@Override
public void start() {
qrConfig = getConfig(QrConfig.class, true);
reconfigure(qrConfig);
hackToInitializeServer(qrConfig);
ContainerBuilder builder = createBuilderWithGuiceBindings();
configurer = createConfigurer(builder.guiceModules().activate());
initializeAndActivateContainer(builder, () -> {});
startReconfigurerThread();
portWatcher = new Thread(this::watchPortChange, "configured-application-port-watcher");
portWatcher.setDaemon(true);
portWatcher.start();
slobrokRegistrator = registerInSlobrok(qrConfig); // marks this as up
}
/**
* The container has no RPC methods, but we still need an RPC server
* to register in Slobrok to enable orchestration.
*/
private Register registerInSlobrok(QrConfig qrConfig) {
if ( ! qrConfig.rpc().enabled()) return null;
// 1. Set up RPC server
supervisor = new Supervisor(new Transport("slobrok")).setDropEmptyBuffers(true);
Spec listenSpec = new Spec(qrConfig.rpc().port());
try {
acceptor = supervisor.listen(listenSpec);
}
catch (ListenFailedException e) {
throw new RuntimeException("Could not create rpc server listening on " + listenSpec, e);
}
// 2. Register it in slobrok
SlobrokList slobrokList = getSlobrokList();
Spec mySpec = new Spec(HostName.getLocalhost(), acceptor.port());
slobrokRegistrator = new Register(supervisor, slobrokList, mySpec);
slobrokRegistrator.registerName(qrConfig.rpc().slobrokId());
log.log(Level.INFO, "Registered name '" + qrConfig.rpc().slobrokId() +
"' at " + mySpec + " with: " + slobrokList);
return slobrokRegistrator;
}
// Different ways of getting slobrok config depending on whether we have a subscriber (regular setup)
// or need to get the config directly (standalone container)
private SlobrokList getSlobrokList() {
SlobrokList slobrokList;
if (slobrokConfigSubscriber.isPresent()) {
slobrokList = slobrokConfigSubscriber.get().getSlobroks();
} else {
slobrokList = new SlobrokList();
SlobroksConfig slobrokConfig = getConfig(SlobroksConfig.class, true);
slobrokList.setup(slobrokConfig.slobrok().stream().map(SlobroksConfig.Slobrok::connectionspec).toArray(String[]::new));
}
return slobrokList;
}
private void unregisterInSlobrok() {
if (slobrokRegistrator != null)
slobrokRegistrator.shutdown();
if (acceptor != null)
acceptor.shutdown().join();
if (supervisor != null)
supervisor.transport().shutdown().join();
}
private static void hackToInitializeServer(QrConfig config) {
try {
Container.get().setupFileAcquirer(config.filedistributor());
Container.get().setupUrlDownloader();
} catch (Exception e) {
log.log(Level.SEVERE, "Caught exception when initializing server. Exiting.", e);
Runtime.getRuntime().halt(1);
}
}
private <T extends ConfigInstance> T getConfig(Class<T> configClass, boolean isInitializing) {
Subscriber subscriber = subscriberFactory.getSubscriber(Collections.singleton(new ConfigKey<>(configClass, configId)),
configClass.getName());
try {
subscriber.waitNextGeneration(isInitializing);
return configClass.cast(first(subscriber.config().values()));
} finally {
subscriber.close();
}
}
private void watchPortChange() {
Subscriber subscriber = subscriberFactory.getSubscriber(Collections.singleton(new ConfigKey<>(QrConfig.class, configId)),
"portWatcher");
try {
while (true) {
subscriber.waitNextGeneration(false);
QrConfig newConfig = QrConfig.class.cast(first(subscriber.config().values()));
reconfigure(qrConfig);
if (qrConfig.rpc().port() != newConfig.rpc().port()) {
com.yahoo.protect.Process.logAndDie(
"Rpc port config has changed from " +
qrConfig.rpc().port() + " to " + newConfig.rpc().port() +
". This we can not handle without a restart so we will just bail out.");
}
log.fine("Received new QrConfig :" + newConfig);
}
} finally {
subscriber.close();
}
}
void reconfigure(QrConfig qrConfig) {
dumpHeapOnShutdownTimeout.set(qrConfig.shutdown().dumpHeapOnTimeout());
shutdownTimeoutS.set(qrConfig.shutdown().timeout());
}
private void initializeAndActivateContainer(ContainerBuilder builder, Runnable cleanupTask) {
addHandlerBindings(builder, Container.get().getRequestHandlerRegistry(),
configurer.getComponent(ApplicationContext.class).discBindingsConfig);
List<ServerProvider> currentServers = Container.get().getServerProviderRegistry().allComponents();
for (ServerProvider server : currentServers) {
builder.serverProviders().install(server);
}
activateContainer(builder, cleanupTask);
startAndStopServers(currentServers);
startAndRemoveClients(Container.get().getClientProviderRegistry().allComponents());
log.info("Switching to the latest deployed set of configurations and components. " +
"Application config generation: " + configurer.generation());
metric.set("application_generation", configurer.generation(), metric.createContext(Map.of()));
}
private void activateContainer(ContainerBuilder builder, Runnable onPreviousContainerTermination) {
DeactivatedContainer deactivated = activator.activateContainer(builder);
if (deactivated != null) {
nonTerminatedContainerTracker.register();
deactivated.notifyTermination(() -> {
try {
onPreviousContainerTermination.run();
} finally {
nonTerminatedContainerTracker.arriveAndDeregister();
}
});
}
}
private ContainerBuilder createBuilderWithGuiceBindings() {
ContainerBuilder builder = activator.newContainerBuilder();
setupGuiceBindings(builder.guiceModules());
return builder;
}
private void startReconfigurerThread() {
reconfigurerThread = new Thread(() -> {
while ( ! Thread.interrupted()) {
try {
ContainerBuilder builder = createBuilderWithGuiceBindings();
// Block until new config arrives, and it should be applied
Runnable cleanupTask = configurer.waitForNextGraphGeneration(builder.guiceModules().activate(), false);
initializeAndActivateContainer(builder, cleanupTask);
} catch (UncheckedInterruptedException | ConfigInterruptedException e) {
break;
} catch (Exception | LinkageError e) { // LinkageError: OSGi problems
tryReportFailedComponentGraphConstructionMetric(configurer, e);
log.log(Level.SEVERE,
"Reconfiguration failed, your application package must be fixed, unless this is a " +
"JNI reload issue: " + Exceptions.toMessageString(e), e);
} catch (Error e) {
com.yahoo.protect.Process.logAndDie("java.lang.Error on reconfiguration: We are probably in " +
"a bad state and will terminate", e);
}
}
log.fine("Shutting down HandlersConfigurerDi");
}, "configured-application-reconfigurer");
reconfigurerThread.start();
}
private static void tryReportFailedComponentGraphConstructionMetric(HandlersConfigurerDi configurer, Throwable error) {
try {
// We need the Metric instance from previous component graph to report metric values
// Metric may not be available if this is the initial component graph (since metric wiring is done through the config model)
Metric metric = configurer.getComponent(Metric.class);
Metric.Context metricContext = metric.createContext(Map.of("exception", error.getClass().getSimpleName()));
metric.add("jdisc.application.failed_component_graphs", 1L, metricContext);
} catch (Exception e) {
log.log(Level.WARNING, "Failed to report metric for failed component graph: " + e.getMessage(), e);
}
}
private void startAndStopServers(List<ServerProvider> currentServers) {
synchronized (monitor) {
Set<ServerProvider> serversToClose = createIdentityHashSet(startedServers);
serversToClose.removeAll(currentServers);
for (ServerProvider server : serversToClose) {
server.close();
startedServers.remove(server);
}
for (ServerProvider server : currentServers) {
if (!startedServers.contains(server)) {
server.start();
startedServers.add(server);
}
}
}
}
private void startAndRemoveClients(List<ClientProvider> currentClients) {
synchronized (monitor) {
Set<ClientProvider> clientToRemove = createIdentityHashSet(startedClients);
clientToRemove.removeAll(currentClients);
for (ClientProvider client : clientToRemove) {
startedClients.remove(client);
}
for (ClientProvider client : currentClients) {
if (!startedClients.contains(client)) {
client.start();
startedClients.add(client);
}
}
}
}
private HandlersConfigurerDi createConfigurer(Injector discInjector) {
return new HandlersConfigurerDi(subscriberFactory,
Container.get(),
configId,
new Deconstructor(),
discInjector,
osgiFramework);
}
private void setupGuiceBindings(GuiceRepository modules) {
modules.install(new AbstractModule() {
@Override
protected void configure() {
bind(Metric.class).to(DisableGuiceMetric.class);
bind(OsgiFramework.class).toInstance(restrictedOsgiFramework);
bind(com.yahoo.jdisc.Timer.class).toInstance(timerSingleton);
bind(FilterChainRepository.class).toInstance(defaultFilterChainRepository);
}
});
}
@Override
public void stop() {
shutdownDeadline.schedule((long)(shutdownTimeoutS.get() * 1000), dumpHeapOnShutdownTimeout.get());
shutdownReconfigurerThread();
log.info("Stop: Closing servers");
startAndStopServers(List.of());
startAndRemoveClients(List.of());
log.info("Stop: Shutting container down");
activateContainer(null, () -> {
configurer.shutdown();
slobrokConfigSubscriber.ifPresent(SlobrokConfigSubscriber::shutdown);
Container.get().shutdown();
unregisterInSlobrok();
LogSetup.cleanup();
log.info("Stop: Finished");
});
nonTerminatedContainerTracker.arriveAndAwaitAdvance();
}
// TODO Do more graceful shutdown of reconfigurer thread. The interrupt may leave the container in state where
// graceful shutdown is impossible or may hang.
private void shutdownReconfigurerThread() {
if (reconfigurerThread == null) return;
reconfigurerThread.interrupt();
try {
//Workaround for component constructors masking InterruptedException.
while (reconfigurerThread.isAlive()) {
reconfigurerThread.interrupt();
long millis = 200;
reconfigurerThread.join(millis);
}
} catch (InterruptedException e) {
log.info("Interrupted while joining on HandlersConfigurer reconfigure thread.");
Thread.currentThread().interrupt();
}
}
@Override
public void destroy() {
shutdownDeadline.cancel();
log.info("Destroy: completed");
}
private static void addHandlerBindings(ContainerBuilder builder,
ComponentRegistry<RequestHandler> requestHandlerRegistry,
JdiscBindingsConfig discBindingsConfig) {
for (Map.Entry<String, JdiscBindingsConfig.Handlers> handlerEntry : discBindingsConfig.handlers().entrySet()) {
String id = handlerEntry.getKey();
JdiscBindingsConfig.Handlers handlerConfig = handlerEntry.getValue();
RequestHandler handler = requestHandlerRegistry.getComponent(id);
if (handler == null) {
throw new RuntimeException("Binding configured for non-jdisc request handler " + id);
}
bindUri(builder.serverBindings(), handlerConfig.serverBindings(), handler);
bindUri(builder.clientBindings(), handlerConfig.clientBindings(), handler);
}
}
private static void bindUri(BindingRepository<RequestHandler> bindings, List<String> uriPatterns,
RequestHandler target) {
for (String uri : uriPatterns) {
bindings.bind(uri, target);
}
}
private static <E> Set<E> createIdentityHashSet() {
return Collections.newSetFromMap(new IdentityHashMap<>());
}
private static <E> Set<E> createIdentityHashSet(Collection<E> items) {
Set<E> set = createIdentityHashSet();
set.addAll(items);
return set;
}
public static final class ApplicationContext {
final JdiscBindingsConfig discBindingsConfig;
public ApplicationContext(com.yahoo.container.jdisc.JdiscBindingsConfig discBindingsConfig) {
this.discBindingsConfig = discBindingsConfig;
}
}
}
|
package whelk.importer;
import io.prometheus.client.Counter;
import se.kb.libris.util.marc.Datafield;
import se.kb.libris.util.marc.Field;
import se.kb.libris.util.marc.MarcRecord;
import whelk.Document;
import whelk.IdGenerator;
import whelk.JsonLd;
import whelk.Whelk;
import whelk.component.ElasticSearch;
import whelk.component.PostgreSQLComponent;
import whelk.converter.MarcJSONConverter;
import whelk.converter.marc.MarcFrameConverter;
import whelk.util.LegacyIntegrationTools;
import whelk.util.PropertyLoader;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.sql.*;
import java.util.*;
class XL
{
private Whelk m_whelk;
private Parameters m_parameters;
private Properties m_properties;
private MarcFrameConverter m_marcFrameConverter;
// The predicates listed here are those that must always be represented as lists in jsonld, even if the list
// has only a single member.
private Set<String> m_forcedSetTerms;
private final static String IMPORT_SYSTEM_CODE = "batch import";
XL(Parameters parameters) throws IOException
{
m_parameters = parameters;
m_properties = PropertyLoader.loadProperties("secret");
PostgreSQLComponent storage = new PostgreSQLComponent(m_properties.getProperty("sqlUrl"), m_properties.getProperty("sqlMaintable"));
ElasticSearch elastic = new ElasticSearch(m_properties.getProperty("elasticHost"), m_properties.getProperty("elasticCluster"), m_properties.getProperty("elasticIndex"));
m_whelk = new Whelk(storage, elastic);
m_whelk.loadCoreData();
m_forcedSetTerms = new JsonLd(m_whelk.getDisplayData(), m_whelk.getVocabData()).getForcedSetTerms();
m_marcFrameConverter = new MarcFrameConverter();
}
/**
* Write a ISO2709 MarcRecord to LibrisXL. returns a resource ID if the resulting document (merged or new) was in "bib".
* This ID should then be passed (as 'relatedWithBibResourceId') when importing any subsequent related holdings post.
* Returns null when supplied a hold post.
*/
String importISO2709(MarcRecord incomingMarcRecord,
String relatedWithBibResourceId,
Counter importedBibRecords,
Counter importedHoldRecords,
Counter enrichedBibRecords,
Counter enrichedHoldRecords,
Counter encounteredMulBibs)
throws Exception
{
String collection = "bib"; // assumption
if (incomingMarcRecord.getLeader(6) == 'u' || incomingMarcRecord.getLeader(6) == 'v' ||
incomingMarcRecord.getLeader(6) == 'x' || incomingMarcRecord.getLeader(6) == 'y')
collection = "hold";
Set<String> duplicateIDs = getDuplicates(incomingMarcRecord, collection, relatedWithBibResourceId);
String resultingResourceId = null;
//System.err.println("Incoming [" + collection + "] document had: " + duplicateIDs.size() + " existing duplicates:\n" + duplicateIDs);
if (duplicateIDs.size() == 0) // No coinciding documents, simple import
{
resultingResourceId = importNewRecord(incomingMarcRecord, collection, relatedWithBibResourceId);
if (collection.equals("bib"))
importedBibRecords.inc();
else
importedHoldRecords.inc();
}
else if (duplicateIDs.size() == 1)
{
// Enrich (or "merge")
resultingResourceId = enrichRecord( (String) duplicateIDs.toArray()[0], incomingMarcRecord, collection, relatedWithBibResourceId );
if (collection.equals("bib"))
enrichedBibRecords.inc();
else
enrichedHoldRecords.inc();
}
else
{
// Multiple coinciding documents.
encounteredMulBibs.inc();
if (m_parameters.getEnrichMulDup())
{
for (String id : duplicateIDs)
{
enrichRecord( id, incomingMarcRecord, collection, relatedWithBibResourceId );
}
}
if (collection.equals("bib"))
{
// In order to keep the program deterministic, the bib post to which subsequent holdings should attach
// when there are multiple duplicates is defined as the one with the "lowest" alpha numeric id.
List<String> duplicateList = new ArrayList<>(duplicateIDs);
Collections.sort(duplicateList);
resultingResourceId = m_whelk.getStorage().getThingId(duplicateList.get(0));
}
else
resultingResourceId = null;
}
return resultingResourceId;
}
private String importNewRecord(MarcRecord marcRecord, String collection, String relatedWithBibResourceId)
{
// Delete any existing 001 fields
String generatedId = IdGenerator.generate();
if (marcRecord.getControlfields("001").size() != 0)
{
marcRecord.getFields().remove(marcRecord.getControlfields("001").get(0));
}
// Always write a new 001. If one existed in the imported post it was moved to 035a.
// If it was not (because it was a valid libris id) then it was checked as a duplicate and
// duplicateIDs.size would be > 0, and we would not be here.
marcRecord.addField(marcRecord.createControlfield("001", generatedId));
Document rdfDoc = convertToRDF(marcRecord, generatedId);
if (collection.equals("hold"))
rdfDoc.setHoldingFor(relatedWithBibResourceId);
if (!m_parameters.getReadOnly())
{
m_whelk.store(rdfDoc, IMPORT_SYSTEM_CODE, null, collection, false);
}
else
System.out.println("Would now (if --live had been specified) have written the following json-ld to whelk as a new record:\n"
+ rdfDoc.getDataAsString());
if (collection.equals("bib"))
return rdfDoc.getThingIdentifiers().get(0);
return null;
}
private String enrichRecord(String ourId, MarcRecord incomingMarcRecord, String collection, String relatedWithBibResourceId)
throws IOException
{
Document rdfDoc = convertToRDF(incomingMarcRecord, ourId);
if (collection.equals("hold"))
rdfDoc.setHoldingFor(relatedWithBibResourceId);
if (!m_parameters.getReadOnly())
{
try
{
m_whelk.storeAtomicUpdate(ourId, false, IMPORT_SYSTEM_CODE, null, collection, false,
(Document doc) ->
{
if (collection.equals("bib"))
{
String encodingLevel = doc.getEncodingLevel();
if (encodingLevel == null || !encodingLevel.equals("marc:PartialPreliminaryLevel"))
throw new TooHighEncodingLevelException();
}
try
{
enrich( doc, rdfDoc );
} catch (IOException e)
{
throw new UncheckedIOException(e);
}
});
}
catch (TooHighEncodingLevelException e)
{
System.out.println("Not enriching id: " + ourId + ", because it no longer has encoding level marc:PartialPreliminaryLevel");
}
}
else
{
Document doc = m_whelk.getStorage().load( ourId );
enrich( doc, rdfDoc );
System.out.println("Would now (if --live had been specified) have written the following (merged) json-ld to whelk:\n");
System.out.println("id:\n" + doc.getShortId());
System.out.println("data:\n" + doc.getDataAsString());
}
if (collection.equals("bib"))
return rdfDoc.getThingIdentifiers().get(0);
return null;
}
private void enrich(Document mutableDocument, Document withDocument)
throws IOException
{
JsonldSerializer serializer = new JsonldSerializer();
List<String[]> withTriples = serializer.deserialize(withDocument.data);
List<String[]> originalTriples = serializer.deserialize(mutableDocument.data);
Graph originalGraph = new Graph(originalTriples);
Graph withGraph = new Graph(withTriples);
// This is temporary, these special rules should not be hardcoded here, but rather obtained from (presumably)
// whelk-core's marcframe.json.
Map<String, Graph.PREDICATE_RULES> specialRules = new HashMap<>();
specialRules.put("created", Graph.PREDICATE_RULES.RULE_PREFER_ORIGINAL);
specialRules.put("controlNumber", Graph.PREDICATE_RULES.RULE_PREFER_ORIGINAL);
specialRules.put("modified", Graph.PREDICATE_RULES.RULE_PREFER_INCOMING);
specialRules.put("marc:encLevel", Graph.PREDICATE_RULES.RULE_PREFER_ORIGINAL);
originalGraph.enrichWith(withGraph, specialRules);
Map enrichedData = JsonldSerializer.serialize(originalGraph.getTriples(), m_forcedSetTerms);
JsonldSerializer.normalize(enrichedData, mutableDocument.getShortId());
mutableDocument.data = enrichedData;
}
private Document convertToRDF(MarcRecord marcRecord, String id)
{
// The conversion process needs a 001 field to work correctly.
if (marcRecord.getControlfields("001").size() == 0)
marcRecord.addField(marcRecord.createControlfield("001", id));
Map convertedData = m_marcFrameConverter.convert(MarcJSONConverter.toJSONMap(marcRecord), id);
Document convertedDocument = new Document(convertedData);
convertedDocument.setId(id);
return convertedDocument;
}
private Set<String> getDuplicates(MarcRecord marcRecord, String collection, String relatedWithBibResourceId)
throws SQLException
{
switch (collection)
{
case "bib":
return getBibDuplicates(marcRecord);
case "hold":
return getHoldDuplicates(marcRecord, relatedWithBibResourceId);
default:
return new HashSet<>();
}
}
private Set<String> getHoldDuplicates(MarcRecord marcRecord, String relatedWithBibResourceId)
throws SQLException
{
Set<String> duplicateIDs = new HashSet<>();
// Assumes the post being imported carries a valid libris id in 001, and "SE-LIBR" or "LIBRIS" in 003
duplicateIDs.addAll(getDuplicatesOnLibrisID(marcRecord, "hold"));
duplicateIDs.addAll(getDuplicatesOnHeldByHoldingFor(marcRecord, relatedWithBibResourceId));
return duplicateIDs;
}
private Set<String> getBibDuplicates(MarcRecord marcRecord)
throws SQLException
{
Set<String> duplicateIDs = new HashSet<>();
for (Parameters.DUPLICATION_TYPE dupType : m_parameters.getDuplicationTypes())
{
switch (dupType)
{
case DUPTYPE_ISBNA: // International Standard Book Number (only from subfield A)
for (Field field : marcRecord.getFields("020"))
{
String isbn = DigId.grepIsbna( (Datafield) field );
if (isbn != null)
{
duplicateIDs.addAll(getDuplicatesOnISBN( isbn.toLowerCase() ));
duplicateIDs.addAll(getDuplicatesOnISBN( isbn.toUpperCase() ));
}
}
break;
case DUPTYPE_ISBNZ: // International Standard Book Number (only from subfield Z)
for (Field field : marcRecord.getFields("020"))
{
String isbn = DigId.grepIsbnz( (Datafield) field );
if (isbn != null)
{
duplicateIDs.addAll(getDuplicatesOnISBN( isbn.toLowerCase() ));
duplicateIDs.addAll(getDuplicatesOnISBN( isbn.toUpperCase() ));
}
}
break;
case DUPTYPE_ISSNA: // International Standard Serial Number (only from marc 022_A)
for (Field field : marcRecord.getFields("022"))
{
String issn = DigId.grepIssn( (Datafield) field, 'a' );
if (issn != null)
{
duplicateIDs.addAll(getDuplicatesOnISSN( issn.toLowerCase() ));
duplicateIDs.addAll(getDuplicatesOnISSN( issn.toUpperCase() ));
}
}
break;
case DUPTYPE_ISSNZ: // International Standard Serial Number (only from marc 022_Z)
for (Field field : marcRecord.getFields("022"))
{
String issn = DigId.grepIssn( (Datafield) field, 'z' );
if (issn != null)
{
duplicateIDs.addAll(getDuplicatesOnISSN( issn.toLowerCase() ));
duplicateIDs.addAll(getDuplicatesOnISSN( issn.toUpperCase() ));
}
}
break;
case DUPTYPE_035A:
// Unique id number in another system. The 035a of the post being imported will be checked against
// the @graph,0,systemNumber array of existing posts
duplicateIDs.addAll(getDuplicatesOn035a(marcRecord));
break;
case DUPTYPE_LIBRISID:
// Assumes the post being imported carries a valid libris id in 001, and "SE-LIBR" or "LIBRIS" in 003
duplicateIDs.addAll(getDuplicatesOnLibrisID(marcRecord, "bib"));
break;
}
}
return duplicateIDs;
}
private List<String> getDuplicatesOnLibrisID(MarcRecord marcRecord, String collection)
throws SQLException
{
String librisId = DigId.grepLibrisId(marcRecord);
if (librisId == null)
return new ArrayList<>();
// completely numeric? = classic voyager id.
// In theory an xl id could (though insanely unlikely) also be numeric :(
if (librisId.matches("[0-9]+"))
{
librisId = "http://libris.kb.se/"+collection+"/"+librisId;
}
else if ( ! librisId.startsWith(Document.getBASE_URI().toString()))
{
librisId = Document.getBASE_URI().toString() + librisId;
}
try(Connection connection = m_whelk.getStorage().getConnection();
PreparedStatement statement = getOnId_ps(connection, librisId);
ResultSet resultSet = statement.executeQuery())
{
return collectIDs(resultSet);
}
}
private List<String> getDuplicatesOn035a(MarcRecord marcRecord)
throws SQLException
{
List<String> results = new ArrayList<>();
for (Field field : marcRecord.getFields("035"))
{
String systemNumber = DigId.grep035a( (Datafield) field );
try(Connection connection = m_whelk.getStorage().getConnection();
PreparedStatement statement = getOnSystemNumber_ps(connection, systemNumber);
ResultSet resultSet = statement.executeQuery())
{
results.addAll( collectIDs(resultSet) );
}
}
return results;
}
private List<String> getDuplicatesOnISBN(String isbn)
throws SQLException
{
if (isbn == null)
return new ArrayList<>();
String numericIsbn = isbn.replaceAll("-", "");
try(Connection connection = m_whelk.getStorage().getConnection();
PreparedStatement statement = getOnISBN_ps(connection, numericIsbn);
ResultSet resultSet = statement.executeQuery())
{
return collectIDs(resultSet);
}
}
private List<String> getDuplicatesOnISSN(String issn)
throws SQLException
{
if (issn == null)
return new ArrayList<>();
String numericIssn = issn.replaceAll("-", "");
try(Connection connection = m_whelk.getStorage().getConnection();
PreparedStatement statement = getOnISSN_ps(connection, numericIssn);
ResultSet resultSet = statement.executeQuery())
{
return collectIDs(resultSet);
}
}
private List<String> getDuplicatesOnHeldByHoldingFor(MarcRecord marcRecord, String relatedWithBibResourceId)
throws SQLException
{
if (marcRecord.getFields("852").size() < 1)
return new ArrayList<>();
Datafield df = (Datafield) marcRecord.getFields("852").get(0);
if (df.getSubfields("b").size() < 1)
return new ArrayList<>();
String sigel = df.getSubfields("b").get(0).getData();
String library = LegacyIntegrationTools.legacySigelToUri(sigel);
try(Connection connection = m_whelk.getStorage().getConnection();
PreparedStatement statement = getOnHeldByHoldingFor_ps(connection, library, relatedWithBibResourceId);
ResultSet resultSet = statement.executeQuery())
{
return collectIDs(resultSet);
}
}
private PreparedStatement getOnId_ps(Connection connection, String id)
throws SQLException
{
String query = "SELECT id FROM lddb__identifiers WHERE iri = ?";
PreparedStatement statement = connection.prepareStatement(query);
statement.setString(1, id);
return statement;
}
/**
* "System number" is our ld equivalent of marc's 035a
*/
private PreparedStatement getOnSystemNumber_ps(Connection connection, String systemNumber)
throws SQLException
{
String query = "SELECT id FROM lddb WHERE data#>'{@graph,0,identifiedBy}' @> ?";
PreparedStatement statement = connection.prepareStatement(query);
statement.setObject(1, "[{\"@type\": \"SystemNumber\", \"value\": \"" + systemNumber + "\"}]", java.sql.Types.OTHER);
return statement;
}
private PreparedStatement getOnISBN_ps(Connection connection, String isbn)
throws SQLException
{
// required to be completely numeric (base 11, 0-9+x).
if (!isbn.matches("[\\dxX]+"))
isbn = "0";
String query = "SELECT id FROM lddb WHERE data#>'{@graph,1,identifiedBy}' @> ?";
PreparedStatement statement = connection.prepareStatement(query);
statement.setObject(1, "[{\"@type\": \"ISBN\", \"value\": \"" + isbn + "\"}]", java.sql.Types.OTHER);
return statement;
}
private PreparedStatement getOnISSN_ps(Connection connection, String issn)
throws SQLException
{
// required to be completely numeric (base 11, 0-9+x).
if (!issn.matches("[\\dxX]+"))
issn = "0";
String query = "SELECT id FROM lddb WHERE data#>'{@graph,1,identifiedBy}' @> ?";
PreparedStatement statement = connection.prepareStatement(query);
statement.setObject(1, "[{\"@type\": \"ISSN\", \"value\": \"" + issn + "\"}]", java.sql.Types.OTHER);
return statement;
}
private PreparedStatement getOnHeldByHoldingFor_ps(Connection connection, String heldBy, String holdingForId)
throws SQLException
{
String libraryUri = LegacyIntegrationTools.legacySigelToUri(heldBy);
String query =
"SELECT lddb.id from lddb " +
"INNER JOIN lddb__identifiers id1 ON lddb.data#>>'{@graph,1,itemOf,@id}' = id1.iri " +
"INNER JOIN lddb__identifiers id2 ON id1.id = id2.id " +
"WHERE " +
"data#>>'{@graph,1,heldBy,@id}' = ? " +
"AND " +
"id2.iri = ?";
PreparedStatement statement = connection.prepareStatement(query);
statement.setString(1, libraryUri);
statement.setString(2, holdingForId);
return statement;
}
private List<String> collectIDs(ResultSet resultSet)
throws SQLException
{
List<String> ids = new ArrayList<>();
while (resultSet.next())
{
ids.add(resultSet.getString("id"));
}
return ids;
}
private class TooHighEncodingLevelException extends RuntimeException {}
}
|
package whelk.importer;
import groovy.lang.Tuple;
import io.prometheus.client.Counter;
import se.kb.libris.util.marc.Datafield;
import se.kb.libris.util.marc.Field;
import se.kb.libris.util.marc.MarcRecord;
import se.kb.libris.util.marc.impl.MarcRecordImpl;
import se.kb.libris.utils.isbn.ConvertException;
import se.kb.libris.utils.isbn.Isbn;
import se.kb.libris.utils.isbn.IsbnException;
import se.kb.libris.utils.isbn.IsbnParser;
import whelk.Document;
import whelk.IdGenerator;
import whelk.Whelk;
import whelk.converter.MarcJSONConverter;
import whelk.converter.marc.MarcFrameConverter;
import whelk.exception.TooHighEncodingLevelException;
import whelk.filter.LinkFinder;
import whelk.util.LegacyIntegrationTools;
import whelk.util.PropertyLoader;
import whelk.triples.*;
import java.io.IOException;
import java.sql.*;
import java.util.*;
import java.util.function.BiFunction;
class XL
{
private static final String ENC_PRELIMINARY_STATUS = "marc:PartialPreliminaryLevel";
private static final String ENC_PREPUBLICATION_STATUS = "marc:PrepublicationLevel";
private static final String ENC_ABBREVIVATED_STATUS = "marc:AbbreviatedLevel";
private static final String ENC_MINMAL_STATUS = "marc:MinimalLevel";
private Whelk m_whelk;
private LinkFinder m_linkfinder;
private Parameters m_parameters;
private Properties m_properties;
private MarcFrameConverter m_marcFrameConverter;
private static boolean verbose = false;
// The predicates listed here are those that must always be represented as lists in jsonld, even if the list
// has only a single member.
private Set<String> m_repeatableTerms;
private final String IMPORT_SYSTEM_CODE;
XL(Parameters parameters) throws IOException
{
m_parameters = parameters;
verbose = m_parameters.getVerbose();
m_properties = PropertyLoader.loadProperties("secret");
m_whelk = Whelk.createLoadedSearchWhelk(m_properties);
m_repeatableTerms = m_whelk.getJsonld().getRepeatableTerms();
m_marcFrameConverter = m_whelk.createMarcFrameConverter();
m_linkfinder = new LinkFinder(m_whelk.getStorage());
if (parameters.getChangedIn() != null)
IMPORT_SYSTEM_CODE = parameters.getChangedIn();
else
IMPORT_SYSTEM_CODE = "batch import";
}
/**
* Write a ISO2709 MarcRecord to LibrisXL. returns a resource ID if the resulting document (merged or new) was in "bib".
* This ID should then be passed (as 'relatedWithBibResourceId') when importing any subsequent related holdings post.
* Returns null when supplied a hold post.
*/
String importISO2709(MarcRecord incomingMarcRecord,
String relatedWithBibResourceId,
Counter importedBibRecords,
Counter importedHoldRecords,
Counter enrichedBibRecords,
Counter enrichedHoldRecords,
Counter encounteredMulBibs)
throws Exception
{
String collection = "bib"; // assumption
if (incomingMarcRecord.getLeader(6) == 'u' || incomingMarcRecord.getLeader(6) == 'v' ||
incomingMarcRecord.getLeader(6) == 'x' || incomingMarcRecord.getLeader(6) == 'y')
collection = "hold";
Set<String> duplicateIDs = getDuplicates(incomingMarcRecord, collection, relatedWithBibResourceId);
String resultingResourceId = null;
//System.err.println("Incoming [" + collection + "] document had: " + duplicateIDs.size() + " existing duplicates:\n" + duplicateIDs);
// If an incoming holding record is marked deleted, attempt to find any duplicates for it in Libris and delete them.
if (collection.equals("hold") && incomingMarcRecord.getLeader(5) == 'd')
{
for (String id : duplicateIDs)
m_whelk.remove(id, IMPORT_SYSTEM_CODE, null);
return null;
}
if (duplicateIDs.size() == 0) // No coinciding documents, simple import
{
resultingResourceId = importNewRecord(incomingMarcRecord, collection, relatedWithBibResourceId, null);
if (collection.equals("bib"))
importedBibRecords.inc();
else
importedHoldRecords.inc();
}
else if (duplicateIDs.size() == 1) // merge, keep or replace
{
// replace
if ((m_parameters.getReplaceBib() && collection.equals("bib")) ||
m_parameters.getReplaceHold() && collection.equals("hold"))
{
String idToReplace = duplicateIDs.iterator().next();
resultingResourceId = importNewRecord(incomingMarcRecord, collection, relatedWithBibResourceId, idToReplace);
}
// merge
else if ((m_parameters.getMergeBib() && collection.equals("bib")) ||
m_parameters.getMergeHold() && collection.equals("hold"))
{
resultingResourceId = enrichRecord((String) duplicateIDs.toArray()[0], incomingMarcRecord, collection, relatedWithBibResourceId);
}
// Keep existing
else
{
if (collection.equals("bib"))
{
String duplicateId = (String) duplicateIDs.toArray()[0];
if (!duplicateId.startsWith(Document.getBASE_URI().toString()))
duplicateId = Document.getBASE_URI().toString() + duplicateId;
resultingResourceId = m_whelk.getStorage().getThingId(duplicateId);
}
else
resultingResourceId = null;
}
}
else
{
// Multiple coinciding documents.
encounteredMulBibs.inc();
if (m_parameters.getEnrichMulDup())
{
for (String id : duplicateIDs)
{
enrichRecord( id, incomingMarcRecord, collection, relatedWithBibResourceId );
}
}
if (collection.equals("bib"))
{
// In order to keep the program deterministic, the bib post to which subsequent holdings should attach
// when there are multiple duplicates is defined as the one with the "lowest" alpha numeric id.
List<String> duplicateList = new ArrayList<>(duplicateIDs);
Collections.sort(duplicateList);
String selectedDuplicateId = duplicateList.get(0);
if (!selectedDuplicateId.startsWith(Document.getBASE_URI().toString()))
selectedDuplicateId = Document.getBASE_URI().toString() + selectedDuplicateId;
resultingResourceId = m_whelk.getStorage().getThingId(selectedDuplicateId);
}
else
resultingResourceId = null;
}
return resultingResourceId;
}
private String importNewRecord(MarcRecord marcRecord, String collection, String relatedWithBibResourceId, String replaceSystemId)
{
String incomingId = IdGenerator.generate();
if (replaceSystemId != null)
incomingId = replaceSystemId;
Document rdfDoc = convertToRDF(marcRecord, incomingId);
if (collection.equals("hold"))
rdfDoc.setHoldingFor(relatedWithBibResourceId);
if (!m_parameters.getReadOnly())
{
rdfDoc.setRecordStatus(ENC_PRELIMINARY_STATUS);
// Doing a replace (but preserving old IDs)
if (replaceSystemId != null)
{
try
{
m_whelk.getStorage().storeAtomicUpdate(replaceSystemId, false, IMPORT_SYSTEM_CODE, m_parameters.getChangedBy(),
(Document doc) ->
{
String existingEncodingLevel = doc.getEncodingLevel();
String newEncodingLevel = rdfDoc.getEncodingLevel();
if (!collection.equals("hold"))
if (existingEncodingLevel == null || !mayOverwriteExistingEncodingLevel(existingEncodingLevel, newEncodingLevel))
throw new TooHighEncodingLevelException();
List<String> recordIDs = doc.getRecordIdentifiers();
List<String> thingIDs = doc.getThingIdentifiers();
String controlNumber = doc.getControlNumber();
List<Tuple> typedIDs = doc.getTypedRecordIdentifiers();
List<String> systemNumbers = new ArrayList<>();
for (Tuple tuple : typedIDs)
if (tuple.get(0).equals("SystemNumber"))
systemNumbers.add( (String) tuple.get(1) );
doc.data = rdfDoc.data;
// The mainID must remain unaffected.
doc.deepPromoteId(recordIDs.get(0));
for (String recordID : recordIDs)
doc.addRecordIdentifier(recordID);
for (String thingID : thingIDs)
doc.addThingIdentifier(thingID);
for (String systemNumber : systemNumbers)
doc.addTypedRecordIdentifier("SystemNumber", systemNumber);
if (controlNumber != null)
doc.setControlNumber(controlNumber);
});
}
catch (TooHighEncodingLevelException e)
{
if ( verbose )
{
System.out.println("info: Not replacing id: " + replaceSystemId + ", because it no longer has encoding level marc:PartialPreliminaryLevel");
}
}
}
else
{
// Doing simple "new"
m_whelk.createDocument(rdfDoc, IMPORT_SYSTEM_CODE, m_parameters.getChangedBy(), collection, false);
}
}
else
{
if ( verbose )
{
System.out.println("info: Would now (if --live had been specified) have written the following json-ld to whelk as a new record:\n"
+ rdfDoc.getDataAsString());
}
}
if (collection.equals("bib"))
return rdfDoc.getThingIdentifiers().get(0);
return null;
}
private String enrichRecord(String ourId, MarcRecord incomingMarcRecord, String collection, String relatedWithBibResourceId)
throws IOException
{
Document rdfDoc = convertToRDF(incomingMarcRecord, ourId);
if (collection.equals("hold"))
rdfDoc.setHoldingFor(relatedWithBibResourceId);
if (!m_parameters.getReadOnly())
{
try
{
m_whelk.storeAtomicUpdate(ourId, false, IMPORT_SYSTEM_CODE, m_parameters.getChangedBy(),
(Document doc) ->
{
if (collection.equals("bib"))
{
String existingEncodingLevel = doc.getEncodingLevel();
String newEncodingLevel = rdfDoc.getEncodingLevel();
if (existingEncodingLevel == null || !mayOverwriteExistingEncodingLevel(existingEncodingLevel, newEncodingLevel))
throw new TooHighEncodingLevelException();
}
enrich( doc, rdfDoc );
});
}
catch (TooHighEncodingLevelException e)
{
if ( verbose )
{
System.out.println("info: Not enriching id: " + ourId + ", because it no longer has encoding level marc:PartialPreliminaryLevel");
}
}
}
else
{
Document doc = m_whelk.getStorage().load( ourId );
enrich( doc, rdfDoc );
if ( verbose )
{
System.out.println("info: Would now (if --live had been specified) have written the following (merged) json-ld to whelk:\n");
System.out.println("id:\n" + doc.getShortId());
System.out.println("data:\n" + doc.getDataAsString());
}
}
if (collection.equals("bib"))
return rdfDoc.getThingIdentifiers().get(0);
return null;
}
private boolean mayOverwriteExistingEncodingLevel(String existingEncodingLevel, String newEncodingLevel)
{
if (newEncodingLevel == null || existingEncodingLevel == null)
return false;
switch (newEncodingLevel)
{
case ENC_PRELIMINARY_STATUS:
if (existingEncodingLevel.equals(ENC_PRELIMINARY_STATUS))
return true;
break;
case ENC_PREPUBLICATION_STATUS:
if (existingEncodingLevel.equals(ENC_PRELIMINARY_STATUS) || existingEncodingLevel.equals(ENC_PREPUBLICATION_STATUS)) // 5 || 8
return true;
break;
case ENC_ABBREVIVATED_STATUS:
if (existingEncodingLevel.equals(ENC_PRELIMINARY_STATUS) || existingEncodingLevel.equals(ENC_PREPUBLICATION_STATUS)) // 5 || 8
return true;
break;
case ENC_MINMAL_STATUS:
if (existingEncodingLevel.equals(ENC_PRELIMINARY_STATUS) || existingEncodingLevel.equals(ENC_PREPUBLICATION_STATUS)) // 5 || 8
return true;
break;
}
return false;
}
private void enrich(Document mutableDocument, Document withDocument)
{
JsonldSerializer serializer = new JsonldSerializer();
List<String[]> withTriples = serializer.deserialize(withDocument.data);
List<String[]> originalTriples = serializer.deserialize(mutableDocument.data);
Graph originalGraph = new Graph(originalTriples);
Graph withGraph = new Graph(withTriples);
// This is temporary, these special rules should not be hardcoded here, but rather obtained from (presumably)
// whelk-core's marcframe.json.
Map<String, Graph.PREDICATE_RULES> specialRules = new HashMap<>();
for (String term : m_repeatableTerms)
specialRules.put(term, Graph.PREDICATE_RULES.RULE_AGGREGATE);
specialRules.put("created", Graph.PREDICATE_RULES.RULE_PREFER_ORIGINAL);
specialRules.put("controlNumber", Graph.PREDICATE_RULES.RULE_PREFER_ORIGINAL);
specialRules.put("modified", Graph.PREDICATE_RULES.RULE_PREFER_INCOMING);
specialRules.put("marc:encLevel", Graph.PREDICATE_RULES.RULE_PREFER_ORIGINAL);
originalGraph.enrichWith(withGraph, specialRules);
Map enrichedData = JsonldSerializer.serialize(originalGraph.getTriples(), m_repeatableTerms);
boolean deleteUnreferencedData = true;
JsonldSerializer.normalize(enrichedData, mutableDocument.getShortId(), deleteUnreferencedData);
mutableDocument.data = enrichedData;
}
private Document convertToRDF(MarcRecord _marcRecord, String id)
{
MarcRecord marcRecord = cloneMarcRecord(_marcRecord);
while (marcRecord.getControlfields("001").size() > 0)
marcRecord.getFields().remove(marcRecord.getControlfields("001").get(0));
marcRecord.addField(marcRecord.createControlfield("001", id));
// Filter out 887 fields, as the converter cannot/should not handle them
Iterator<Field> it = marcRecord.getFields().iterator();
while (it.hasNext()){
Field field = it.next();
if (field.getTag().equals("887"))
it.remove();
}
Map convertedData = m_marcFrameConverter.convert(MarcJSONConverter.toJSONMap(marcRecord), id);
Document convertedDocument = new Document(convertedData);
convertedDocument.deepReplaceId(Document.getBASE_URI().toString()+id);
m_linkfinder.normalizeIdentifiers(convertedDocument);
return convertedDocument;
}
private MarcRecord cloneMarcRecord(MarcRecord original)
{
MarcRecord clone = new MarcRecordImpl();
for (Field f : original.getFields())
clone.addField(f);
clone.setLeader(original.getLeader());
for (Object key : original.getProperties().keySet())
clone.setProperty( (String) key, original.getProperties().get(key));
clone.setOriginalData(original.getOriginalData());
return clone;
}
private Set<String> getDuplicates(MarcRecord marcRecord, String collection, String relatedWithBibResourceId)
throws SQLException, IsbnException
{
switch (collection)
{
case "bib":
return getBibDuplicates(marcRecord);
case "hold":
return getHoldDuplicates(marcRecord, relatedWithBibResourceId);
default:
return new HashSet<>();
}
}
private Set<String> getHoldDuplicates(MarcRecord marcRecord, String relatedWithBibResourceId)
throws SQLException
{
Set<String> duplicateIDs = new HashSet<>();
// Assumes the post being imported carries a valid libris id in 001, and "SE-LIBR" or "LIBRIS" in 003
duplicateIDs.addAll(getDuplicatesOnLibrisID(marcRecord, "hold"));
duplicateIDs.addAll(getDuplicatesOnHeldByHoldingFor(marcRecord, relatedWithBibResourceId));
return duplicateIDs;
}
private Set<String> getBibDuplicates(MarcRecord marcRecord)
throws SQLException, IsbnException
{
Set<String> duplicateIDs = new HashSet<>();
// Perform an temporary conversion to use for duplicate checking. This conversion will
// then be discarded. The real conversion cannot take place until any duplicates are
// found (because the correct ID needs to be known when converting). Chicken and egg problem.
Document rdfDoc = convertToRDF(marcRecord, IdGenerator.generate());
for (Parameters.DUPLICATION_TYPE dupType : m_parameters.getDuplicationTypes())
{
switch (dupType)
{
case DUPTYPE_ISBNA: // International Standard Book Number (only from subfield A)
for (String isbn : rdfDoc.getIsbnValues())
{
duplicateIDs.addAll(getDuplicatesOnIsbn( isbn.toUpperCase(), this::getOnIsbn_ps ));
}
break;
case DUPTYPE_ISBNZ: // International Standard Book Number (only from subfield Z)
for (String isbn : rdfDoc.getIsbnHiddenValues())
{
duplicateIDs.addAll(getDuplicatesOnIsbn( isbn.toUpperCase(), this::getOnIsbnHidden_ps ));
}
break;
case DUPTYPE_ISSNA: // International Standard Serial Number (only from marc 022_A)
for (String issn : rdfDoc.getIssnValues())
{
duplicateIDs.addAll(getDuplicatesOnIssn( issn.toUpperCase() ));
}
break;
case DUPTYPE_ISSNZ: // International Standard Serial Number (only from marc 022_Z)
for (String issn : rdfDoc.getIssnHiddenValues())
{
duplicateIDs.addAll(getDuplicatesOnIssnHidden( issn.toUpperCase() ));
}
break;
case DUPTYPE_035A:
// Unique id number in another system.
duplicateIDs.addAll(getDuplicatesOn035a(marcRecord));
break;
case DUPTYPE_LIBRISID:
// Assumes the post being imported carries a valid libris id in 001, and "SE-LIBR" or "LIBRIS" in 003
duplicateIDs.addAll(getDuplicatesOnLibrisID(marcRecord, "bib"));
break;
}
/* THIS FUNCTIONALITY IS TESTED (AND WAS USED IN PRODUCTION), BUT WAS DISABLED BECAUSE
OF UNCERTAINTY IF IT ACTUALLY IMPROVED THINGS. THE RESULT OF USING THIS CODE IS, THAT
RECORDS WHERE THE SAME ISBN/ISSN IS (INCORRECTLY) USED FOR MORE THAN ONE PUBLICATION
ARE SPLIT INTO SEPRATE RECORDS (GOOD). THERE ARE HOWEVER CERTAIN COMBINATIONS OF TYPES
THAT SHOULD NOT BE SPLIT (BAD). NET IMPROVEMENT? INCONCLUSIVE.
// If the type currently being checked is NOT 001 or 035$a, filter the candidates based on
// instance @type and work @type ("materialtyp").
if (dupType != Parameters.DUPLICATION_TYPE.DUPTYPE_LIBRISID &&
dupType != Parameters.DUPLICATION_TYPE.DUPTYPE_035A)
{
Iterator<String> it = duplicateIDs.iterator();
while (it.hasNext())
{
String candidateID = it.next();
Document candidate = m_whelk.getStorage().loadEmbellished(candidateID, m_whelk.getJsonld());
String incomingInstanceType = rdfDoc.getThingType();
String existingInstanceType = candidate.getThingType();
String incomingWorkType = rdfDoc.getWorkType();
String existingWorkType = candidate.getWorkType();
// Unrelated work types? -> not a valid match
if (!m_whelk.getJsonld().isSubClassOf(incomingWorkType, existingWorkType) &&
!m_whelk.getJsonld().isSubClassOf(existingWorkType, incomingWorkType))
{
it.remove();
continue;
}
// If A is Electronic and B is Instance or vice versa, do not consider documents matching. This is
// frail since Electronic is a subtype of Instance.
// HERE BE DRAGONS.
if ((incomingInstanceType.equals("Electronic") && existingInstanceType.equals("Instance")) ||
(incomingInstanceType.equals("Instance") && existingInstanceType.equals("Electronic")))
{
it.remove();
}
}
}
*/
// If duplicates have already been found, do not try any more duplicate types.
if (!duplicateIDs.isEmpty())
break;
}
return duplicateIDs;
}
private List<String> getDuplicatesOnLibrisID(MarcRecord marcRecord, String collection)
throws SQLException
{
String librisId = DigId.grepLibrisId(marcRecord);
if (librisId == null)
return new ArrayList<>();
// completely numeric? = classic voyager id.
// In theory an xl id could (though insanely unlikely) also be numeric :(
if (librisId.matches("[0-9]+"))
{
librisId = "http://libris.kb.se/"+collection+"/"+librisId;
}
else if ( ! librisId.startsWith(Document.getBASE_URI().toString()))
{
librisId = Document.getBASE_URI().toString() + librisId;
}
try(Connection connection = m_whelk.getStorage().getConnection();
PreparedStatement statement = getOnId_ps(connection, librisId);
ResultSet resultSet = statement.executeQuery())
{
return collectIDs(resultSet);
}
}
private List<String> getDuplicatesOn035a(MarcRecord marcRecord)
throws SQLException
{
List<String> results = new ArrayList<>();
for (Field field : marcRecord.getFields("035"))
{
String systemNumber = DigId.grep035a( (Datafield) field );
try(Connection connection = m_whelk.getStorage().getConnection();
PreparedStatement statement = getOnSystemNumber_ps(connection, systemNumber);
ResultSet resultSet = statement.executeQuery())
{
results.addAll( collectIDs(resultSet) );
}
}
return results;
}
private List<String> getDuplicatesOnIsbn(String isbn, BiFunction<Connection, String, PreparedStatement> getPreparedStatement)
throws SQLException, IsbnException
{
boolean hyphens = false;
if (isbn == null)
return new ArrayList<>();
List<String> duplicateIDs = new ArrayList<>();
try(Connection connection = m_whelk.getStorage().getConnection();
PreparedStatement statement = getPreparedStatement.apply(connection, isbn);
ResultSet resultSet = statement.executeQuery())
{
duplicateIDs.addAll( collectIDs(resultSet) );
}
Isbn typedIsbn = IsbnParser.parse(isbn);
if (typedIsbn == null)
return duplicateIDs;
int otherType = typedIsbn.getType() == Isbn.ISBN10 ? Isbn.ISBN13 : Isbn.ISBN10;
String numericIsbn = typedIsbn.toString(hyphens);
try(Connection connection = m_whelk.getStorage().getConnection();
PreparedStatement statement = getPreparedStatement.apply(connection, numericIsbn);
ResultSet resultSet = statement.executeQuery())
{
duplicateIDs.addAll( collectIDs(resultSet) );
}
// Collect additional duplicates with the other ISBN form (if conversion is possible)
try
{
typedIsbn = typedIsbn.convert(otherType);
} catch (ConvertException ce)
{
return duplicateIDs;
}
numericIsbn = typedIsbn.toString(hyphens);
try(Connection connection = m_whelk.getStorage().getConnection();
PreparedStatement statement = getPreparedStatement.apply(connection, numericIsbn);
ResultSet resultSet = statement.executeQuery())
{
duplicateIDs.addAll( collectIDs(resultSet) );
}
return duplicateIDs;
}
private List<String> getDuplicatesOnIssn(String issn)
throws SQLException
{
if (issn == null)
return new ArrayList<>();
try(Connection connection = m_whelk.getStorage().getConnection();
PreparedStatement statement = getOnIssn_ps(connection, issn);
ResultSet resultSet = statement.executeQuery())
{
return collectIDs(resultSet);
}
}
private List<String> getDuplicatesOnIssnHidden(String issn)
throws SQLException
{
if (issn == null)
return new ArrayList<>();
try(Connection connection = m_whelk.getStorage().getConnection();
PreparedStatement statement = getOnIssnHidden_ps(connection, issn);
ResultSet resultSet = statement.executeQuery())
{
return collectIDs(resultSet);
}
}
private List<String> getDuplicatesOnHeldByHoldingFor(MarcRecord marcRecord, String relatedWithBibResourceId)
throws SQLException
{
if (marcRecord.getFields("852").size() < 1)
return new ArrayList<>();
Datafield df = (Datafield) marcRecord.getFields("852").get(0);
if (df.getSubfields("b").size() < 1)
return new ArrayList<>();
String sigel = df.getSubfields("b").get(0).getData();
String library = LegacyIntegrationTools.legacySigelToUri(sigel);
try(Connection connection = m_whelk.getStorage().getConnection();
PreparedStatement statement = getOnHeldByHoldingFor_ps(connection, library, relatedWithBibResourceId);
ResultSet resultSet = statement.executeQuery())
{
return collectIDs(resultSet);
}
}
private PreparedStatement getOnId_ps(Connection connection, String id)
throws SQLException
{
String query = "SELECT lddb__identifiers.id FROM lddb__identifiers JOIN lddb ON lddb__identifiers.id = lddb.id WHERE lddb__identifiers.iri = ? AND lddb.deleted = false";
PreparedStatement statement = connection.prepareStatement(query);
statement.setString(1, id);
return statement;
}
/**
* "System number" is our ld equivalent of marc's 035a
*/
private PreparedStatement getOnSystemNumber_ps(Connection connection, String systemNumber)
throws SQLException
{
String query = "SELECT id FROM lddb WHERE deleted = false AND data#>'{@graph,0,identifiedBy}' @> ?";
PreparedStatement statement = connection.prepareStatement(query);
statement.setObject(1, "[{\"@type\": \"SystemNumber\", \"value\": \"" + systemNumber + "\"}]", java.sql.Types.OTHER);
return statement;
}
private PreparedStatement getOnIsbn_ps(Connection connection, String isbn)
{
try
{
String query = "SELECT id FROM lddb WHERE deleted = false AND data#>'{@graph,1,identifiedBy}' @> ?";
PreparedStatement statement = connection.prepareStatement(query);
statement.setObject(1, "[{\"@type\": \"ISBN\", \"value\": \"" + isbn + "\"}]", java.sql.Types.OTHER);
return statement;
} catch (SQLException se)
{
throw new RuntimeException(se);
}
}
private PreparedStatement getOnIssn_ps(Connection connection, String issn)
throws SQLException
{
String query = "SELECT id FROM lddb WHERE deleted = false AND data#>'{@graph,1,identifiedBy}' @> ?";
PreparedStatement statement = connection.prepareStatement(query);
statement.setObject(1, "[{\"@type\": \"ISSN\", \"value\": \"" + issn + "\"}]", java.sql.Types.OTHER);
return statement;
}
private PreparedStatement getOnIsbnHidden_ps(Connection connection, String isbn)
{
try
{
String query = "SELECT id FROM lddb WHERE deleted = false AND data#>'{@graph,1,indirectlyIdentifiedBy}' @> ?";
PreparedStatement statement = connection.prepareStatement(query);
statement.setObject(1, "[{\"@type\": \"ISBN\", \"value\": \"" + isbn + "\"}]", java.sql.Types.OTHER);
return statement;
} catch (SQLException se)
{
throw new RuntimeException(se);
}
}
private PreparedStatement getOnIssnHidden_ps(Connection connection, String issn)
throws SQLException
{
String query = "SELECT id FROM lddb WHERE deleted = false AND ( data#>'{@graph,1,identifiedBy}' @> ? OR data#>'{@graph,1,identifiedBy}' @> ?)";
PreparedStatement statement = connection.prepareStatement(query);
statement.setObject(1, "[{\"@type\": \"ISSN\", \"marc:canceledIssn\": [\"" + issn + "\"]}]", java.sql.Types.OTHER);
statement.setObject(2, "[{\"@type\": \"ISSN\", \"marc:canceledIssn\": \"" + issn + "\"}]", java.sql.Types.OTHER);
return statement;
}
private PreparedStatement getOnHeldByHoldingFor_ps(Connection connection, String heldBy, String holdingForId)
throws SQLException
{
String libraryUri = LegacyIntegrationTools.legacySigelToUri(heldBy);
// Here be dragons. The always-works query is this:
/*String query =
"SELECT lddb.id from lddb " +
"INNER JOIN lddb__identifiers id1 ON lddb.data#>>'{@graph,1,itemOf,@id}' = id1.iri " +
"INNER JOIN lddb__identifiers id2 ON id1.id = id2.id " +
"WHERE " +
"data#>>'{@graph,1,heldBy,@id}' = ? " +
"AND " +
"id2.iri = ?";*/
// This query REQUIRES that links be on the primary ID only. This works beacuse of link-finding step2, but if
// that should ever change this query would break.
String query = "SELECT id from lddb WHERE deleted = false AND data#>>'{@graph,1,heldBy,@id}' = ? AND data#>>'{@graph,1,itemOf,@id}' = ? AND deleted = false";
PreparedStatement statement = connection.prepareStatement(query);
statement.setString(1, libraryUri);
statement.setString(2, holdingForId);
return statement;
}
private List<String> collectIDs(ResultSet resultSet)
throws SQLException
{
List<String> ids = new ArrayList<>();
while (resultSet.next())
{
ids.add(resultSet.getString("id"));
}
return ids;
}
//private class TooHighEncodingLevelException extends RuntimeException {}
}
|
package java.awt;
import java.awt.image.ColorModel;
import java.awt.image.ImageObserver;
import java.awt.image.ImageProducer;
import java.io.File;
import java.net.URL;
import java.util.Collections;
import java.util.Hashtable;
import java.util.WeakHashMap;
import java.util.Map;
import java.util.Iterator;
import sun.awt.image.ByteArrayImageSource;
import sun.awt.image.FileImageSource;
import sun.awt.image.URLImageSource;
import org.videolan.BDJXletContext;
import org.videolan.Logger;
abstract class BDToolkitBase extends Toolkit {
private EventQueue eventQueue = new EventQueue();
private BDGraphicsEnvironment localEnv = new BDGraphicsEnvironment();
private BDGraphicsConfiguration defaultGC = (BDGraphicsConfiguration)localEnv.getDefaultScreenDevice().getDefaultConfiguration();
private static Hashtable cachedImages = new Hashtable();
private static final Logger logger = Logger.getLogger(BDToolkit.class.getName());
// mapping of Components to AppContexts, WeakHashMap<Component,AppContext>
private static final Map contextMap = Collections.synchronizedMap(new WeakHashMap());
public BDToolkitBase () {
}
public static void setFocusedWindow(Window window) {
}
public static void shutdownDisc() {
try {
Toolkit toolkit = getDefaultToolkit();
if (toolkit instanceof BDToolkit) {
((BDToolkit)toolkit).shutdown();
}
} catch (Throwable t) {
logger.error("shutdownDisc() failed: " + t);
t.printStackTrace();
}
}
protected void shutdown() {
/*
if (eventQueue != null) {
BDJHelper.stopEventQueue(eventQueue);
eventQueue = null;
}
*/
cachedImages.clear();
contextMap.clear();
}
public Dimension getScreenSize() {
Rectangle dims = defaultGC.getBounds();
return new Dimension(dims.width, dims.height);
}
Graphics getGraphics(Window window) {
if (!(window instanceof BDRootWindow)) {
System.err.println("getGraphics(): not BDRootWindow");
throw new Error("Not implemented");
}
return new BDWindowGraphics((BDRootWindow)window);
}
GraphicsEnvironment getLocalGraphicsEnvironment() {
return localEnv;
}
public int getScreenResolution() {
return 72;
}
public ColorModel getColorModel() {
return defaultGC.getColorModel();
}
public String[] getFontList() {
return BDFontMetrics.getFontList();
}
public FontMetrics getFontMetrics(Font font) {
return BDFontMetrics.getFontMetrics(font);
}
static void clearCache(BDImage image) {
synchronized (cachedImages) {
Iterator i = cachedImages.entrySet().iterator();
while (i.hasNext()) {
Map.Entry entry = (Map.Entry) i.next();
if (entry.getValue() == image) {
i.remove();
return;
}
}
}
}
public Image getImage(String filename) {
if (cachedImages.containsKey(filename))
return (Image)cachedImages.get(filename);
Image newImage = createImage(filename);
if (newImage != null)
cachedImages.put(filename, newImage);
return newImage;
}
public Image getImage(URL url) {
if (cachedImages.containsKey(url))
return (Image)cachedImages.get(url);
Image newImage = createImage(url);
if (newImage != null)
cachedImages.put(url, newImage);
return newImage;
}
public Image createImage(String filename) {
if (!new File(filename).isAbsolute()) {
URL url = BDJXletContext.getCurrentResource(filename);
if (url != null) {
logger.warning("" + filename + " translated to " + url);
return createImage(url);
}
}
ImageProducer ip = new FileImageSource(filename);
Image newImage = createImage(ip);
return newImage;
}
public Image createImage(URL url) {
ImageProducer ip = new URLImageSource(url);
Image newImage = createImage(ip);
return newImage;
}
public Image createImage(byte[] imagedata,
int imageoffset,
int imagelength) {
ImageProducer ip = new ByteArrayImageSource(imagedata, imageoffset, imagelength);
Image newImage = createImage(ip);
return newImage;
}
public Image createImage(ImageProducer producer) {
return new BDImageConsumer(producer);
}
public Image createImage(Component component, int width, int height) {
return new BDImage(component, width, height, defaultGC);
}
public boolean prepareImage(Image image, int width, int height, ImageObserver observer) {
if (!(image instanceof BDImageConsumer))
return true;
BDImageConsumer img = (BDImageConsumer)image;
return img.prepareImage(observer);
}
public int checkImage(Image image, int width, int height,
ImageObserver observer) {
if (!(image instanceof BDImageConsumer)) {
return ImageObserver.ALLBITS;
}
BDImageConsumer img = (BDImageConsumer)image;
return img.checkImage(observer);
}
public void beep() {
}
public static void addComponent(Component component) {
BDJXletContext context = BDJXletContext.getCurrentContext();
if (context == null) {
logger.warning("addComponent() outside of app context");
return;
}
contextMap.put(component, context);
}
public static EventQueue getEventQueue(Component component) {
if (component != null) {
do {
BDJXletContext ctx = (BDJXletContext)contextMap.get(component);
if (ctx != null) {
EventQueue eq = ctx.getEventQueue();
if (eq == null) {
logger.warning("getEventQueue() failed: no context event queue");
}
return eq;
}
component = component.getParent();
} while (component != null);
logger.warning("getEventQueue() failed: no context");
return null;
}
logger.warning("getEventQueue() failed: no component");
return null;
}
protected EventQueue getSystemEventQueueImpl() {
BDJXletContext ctx = BDJXletContext.getCurrentContext();
if (ctx != null) {
EventQueue eq = ctx.getEventQueue();
if (eq != null) {
return eq;
}
}
logger.warning("getSystemEventQueue(): no context");
return eventQueue;
}
}
|
package nl.sense_os.service.motion;
import java.math.BigDecimal;
import java.util.ArrayList;
import nl.sense_os.service.R;
import nl.sense_os.service.constants.SenseDataTypes;
import nl.sense_os.service.constants.SensePrefs;
import nl.sense_os.service.constants.SensePrefs.Main.Motion;
import nl.sense_os.service.constants.SensorData.DataPoint;
import nl.sense_os.service.constants.SensorData.SensorNames;
import nl.sense_os.service.provider.SNTP;
import nl.sense_os.service.states.EpiStateMonitor;
import org.json.JSONArray;
import org.json.JSONObject;
import android.annotation.SuppressLint;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Build;
import android.os.Handler;
import android.os.PowerManager;
import android.os.PowerManager.WakeLock;
import android.util.FloatMath;
import android.util.Log;
/**
* Represents the main motion sensor. Listens for events from the Android SensorManager and parses
* the results.<br/>
* <br/>
* The resulting data is divided over several separate sensors in CommonSense: *
* <ul>
* <li>accelerometer</li>
* <li>gyroscope</li>
* <li>motion energy</li>
* <li>linear acceleration</li>
* </ul>
* Besides these basic sensors, the sensor can also gather data for high-speed epilepsy detection
* and fall detection.
*
* @author Ted Schmidt <ted@sense-os.nl>
* @author Steven Mulder <steven@sense-os.nl>
*/
public class MotionSensor implements SensorEventListener {
private static MotionSensor instance = null;
protected MotionSensor(Context context) {
this.context = context;
}
public static MotionSensor getInstance(Context context) {
if(instance == null) {
instance = new MotionSensor(context);
}
return instance;
}
/**
* BroadcastReceiver that listens for screen state changes. Re-registers the motion sensor when
* the screen turns off.
*/
private class ScreenOffListener extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
// Check action just to be on the safe side.
if (false == intent.getAction().equals(Intent.ACTION_SCREEN_OFF)) {
return;
}
SharedPreferences prefs = context.getSharedPreferences(SensePrefs.MAIN_PREFS,
Context.MODE_PRIVATE);
boolean useFix = prefs.getBoolean(Motion.SCREENOFF_FIX, false);
if (useFix) {
// wait half a second and re-register
Runnable restartSensing = new Runnable() {
@Override
public void run() {
// Unregisters the motion listener and registers it again.
// Log.v(TAG, "Screen went off, re-registering the Motion sensor");
stopMotionSensing();
startMotionSensing(sampleDelay);
};
};
new Handler().postDelayed(restartSensing, 500);
}
}
}
private static final String TAG = "Sense Motion";
private final BroadcastReceiver screenOffListener = new ScreenOffListener();
private final FallDetector fallDetector = new FallDetector();
private final Context context;
private boolean isFallDetectMode;
private boolean isEnergyMode;
private boolean isEpiMode;
private boolean isUnregisterWhenIdle;
private boolean firstStart = true;
private ArrayList<Sensor> sensors;
private final long[] lastSampleTimes = new long[50];
private Handler motionHandler = new Handler();
private boolean motionSensingActive = false;
private Runnable motionThread = null;
private long sampleDelay = 0; // in milliseconds
private long[] lastLocalSampleTimes = new long[50];
private long localBufferTime = 15 * 1000;
private long firstTimeSend = 0;
private JSONArray[] dataBuffer = new JSONArray[10];
// members for calculating the avg speed change during a time period, for motion energy sensor
private static final long ENERGY_SAMPLE_LENGTH = 500;
private long energySampleStart = 0;
private long prevEnergySampleTime;
private double avgSpeedChange;
private int avgSpeedCount;
private boolean hasLinAccSensor;
private float[] gravity = { 0, 0, SensorManager.GRAVITY_EARTH };
// members for waking up the device for sampling
private static final String ACTION_WAKEUP_ALARM = "nl.sense_os.service.MotionWakeUp";
private static final int ALARM_ID = 256;
private BroadcastReceiver wakeReceiver;
private WakeLock wakeLock;
private boolean isRegistered;
private long lastRegistered = -1;
private static final long DELAY_AFTER_REGISTRATION = 500;
/**
* Calculates the linear acceleration of a raw accelerometer sample. Tries to determine the
* gravity component by putting the signal through a first-order low-pass filter.
*
* @param values
* Array with accelerometer values for the three axes.
* @return The approximate linear acceleration of the sample.
*/
private float[] calcLinAcc(float[] values) {
// low-pass filter raw accelerometer data to approximate the gravity
final float alpha = 0.8f; // filter constants should depend on sample rate
gravity[0] = alpha * gravity[0] + (1 - alpha) * values[0];
gravity[1] = alpha * gravity[1] + (1 - alpha) * values[1];
gravity[2] = alpha * gravity[2] + (1 - alpha) * values[2];
return new float[] { values[0] - gravity[0], values[1] - gravity[1], values[2] - gravity[2] };
}
@SuppressWarnings("deprecation")
private JSONObject createJsonValue(SensorEvent event) {
final Sensor sensor = event.sensor;
final JSONObject json = new JSONObject();
int axis = 0;
try {
for (double value : event.values) {
// scale to three decimal precision
value = BigDecimal.valueOf(value).setScale(3, 0).doubleValue();
switch (axis) {
case 0:
if (sensor.getType() == Sensor.TYPE_ACCELEROMETER
|| sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD
|| sensor.getType() == Sensor.TYPE_LINEAR_ACCELERATION) {
json.put("x-axis", value);
} else if (sensor.getType() == Sensor.TYPE_ORIENTATION
|| sensor.getType() == Sensor.TYPE_GYROSCOPE) {
json.put("azimuth", value);
} else {
Log.e(TAG, "Unexpected sensor type creating JSON value");
return null;
}
break;
case 1:
if (sensor.getType() == Sensor.TYPE_ACCELEROMETER
|| sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD
|| sensor.getType() == Sensor.TYPE_LINEAR_ACCELERATION) {
json.put("y-axis", value);
} else if (sensor.getType() == Sensor.TYPE_ORIENTATION
|| sensor.getType() == Sensor.TYPE_GYROSCOPE) {
json.put("pitch", value);
} else {
Log.e(TAG, "Unexpected sensor type creating JSON value");
return null;
}
break;
case 2:
if (sensor.getType() == Sensor.TYPE_ACCELEROMETER
|| sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD
|| sensor.getType() == Sensor.TYPE_LINEAR_ACCELERATION) {
json.put("z-axis", value);
} else if (sensor.getType() == Sensor.TYPE_ORIENTATION
|| sensor.getType() == Sensor.TYPE_GYROSCOPE) {
json.put("roll", value);
} else {
Log.e(TAG, "Unexpected sensor type creating JSON value");
return null;
}
break;
default:
Log.w(TAG, "Unexpected sensor value! More than three axes?!");
}
axis++;
}
} catch (Exception e) {
Log.e(TAG, "Exception creating motion JSON value", e);
return null;
}
return json;
}
/**
* Measures the speed change and determines the average, for the motion energy sensor.
*
* @param event
* The sensor change event with accelerometer or linear acceleration data.
*/
private void doEnergySample(SensorEvent event) {
float[] linAcc = null;
// approximate linear acceleration if we have no special sensor for it
if (!hasLinAccSensor && Sensor.TYPE_ACCELEROMETER == event.sensor.getType()) {
linAcc = calcLinAcc(event.values);
} else if (hasLinAccSensor && Sensor.TYPE_LINEAR_ACCELERATION == event.sensor.getType()) {
linAcc = event.values;
} else {
// sensor is not the right type
return;
}
// calculate speed change and adjust average
if (null != linAcc) {
// record the start of the motion sample
if (avgSpeedCount == 0) {
energySampleStart = System.currentTimeMillis();
}
float timeStep = (System.currentTimeMillis() - prevEnergySampleTime) / 1000f;
prevEnergySampleTime = System.currentTimeMillis();
if (timeStep > 0 && timeStep < 1) {
float accLength = FloatMath.sqrt((float) (Math.pow(linAcc[0], 2)
+ Math.pow(linAcc[1], 2) + Math.pow(linAcc[2], 2)));
// float speedChange = accLength * timeStep;
// Log.v(TAG, "Speed change: " + speedChange);
avgSpeedChange = (avgSpeedCount * avgSpeedChange + accLength) / (avgSpeedCount + 1);
avgSpeedCount++;
}
}
}
private void doEpiSample(Sensor sensor, JSONObject json) {
if (dataBuffer[sensor.getType()] == null) {
dataBuffer[sensor.getType()] = new JSONArray();
}
dataBuffer[sensor.getType()].put(json);
if (lastLocalSampleTimes[sensor.getType()] == 0) {
lastLocalSampleTimes[sensor.getType()] = System.currentTimeMillis();
}
if (System.currentTimeMillis() > lastLocalSampleTimes[sensor.getType()] + localBufferTime) {
// send the stuff
// pass message to the MsgHandler
Intent i = new Intent(context.getString(R.string.action_sense_new_data));
i.putExtra(DataPoint.SENSOR_NAME, SensorNames.ACCELEROMETER_EPI);
i.putExtra(DataPoint.SENSOR_DESCRIPTION, sensor.getName());
i.putExtra(
DataPoint.VALUE,
"{\"interval\":"
+ Math.round(localBufferTime / dataBuffer[sensor.getType()].length())
+ ",\"data\":" + dataBuffer[sensor.getType()].toString() + "}");
i.putExtra(DataPoint.DATA_TYPE, SenseDataTypes.JSON_TIME_SERIES);
i.putExtra(DataPoint.TIMESTAMP, lastLocalSampleTimes[sensor.getType()]);
context.startService(i);
dataBuffer[sensor.getType()] = new JSONArray();
lastLocalSampleTimes[sensor.getType()] = System.currentTimeMillis();
if (firstTimeSend == 0) {
firstTimeSend = System.currentTimeMillis();
}
}
}
private void doFallSample(SensorEvent event) {
float aX = event.values[1];
float aY = event.values[0];
float aZ = event.values[2];
float accVecSum = FloatMath.sqrt(aX * aX + aY * aY + aZ * aZ);
if (fallDetector.fallDetected(accVecSum)) {
sendFallMessage(true); // send msg
}
}
public long getSampleDelay() {
return sampleDelay;
}
/**
* @return Time stamp of the oldest sample, or -1 if not all sensors have sampled yet.
*/
private long getOldestSampleTime() {
int count = 0;
long oldestSample = Long.MAX_VALUE;
for (long time : lastSampleTimes) {
if (time != 0) {
count++;
if (time < oldestSample) {
oldestSample = time;
}
}
}
if (count < sensors.size()) {
return -1;
} else {
return oldestSample;
}
}
/**
* @return true if it is too long since the sensor was registered.
*/
private boolean isTimeToRegister() {
return motionSensingActive
&& !isRegistered
&& System.currentTimeMillis() - getOldestSampleTime() + DELAY_AFTER_REGISTRATION > sampleDelay;
}
/**
* @return true if all active sensors have recently passed a data point.
*/
private boolean isTimeToUnregister() {
boolean unregister = isUnregisterWhenIdle;
// only unregister if all sensors have submitted a new sample
long oldestSample = getOldestSampleTime();
if (oldestSample == -1) {
unregister = false;
} else {
unregister = unregister && (lastRegistered < oldestSample);
}
// only unregister when sample delay is large enough
unregister = unregister && sampleDelay > DELAY_AFTER_REGISTRATION;
// only unregister when fall detection is not active
unregister = unregister && !isFallDetectMode;
// only unregister when energy sample has finished
unregister = unregister
&& (!isEnergyMode || (energySampleStart != 0 && System.currentTimeMillis()
- energySampleStart > ENERGY_SAMPLE_LENGTH));
return unregister;
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
// do nothing
}
@SuppressWarnings("deprecation")
@Override
public void onSensorChanged(SensorEvent event) {
if (!motionSensingActive) {
Log.w(TAG, "Motion sensor value received when sensor is inactive! (Re)try stopping...");
stopMotionSensing();
return;
}
final Sensor sensor = event.sensor;
// pass sensor value to fall detector first
if (isFallDetectMode && sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
doFallSample(event);
}
// if motion energy sensor is active, determine energy of every sample
boolean isEnergySample = !hasLinAccSensor && Sensor.TYPE_ACCELEROMETER == sensor.getType()
|| hasLinAccSensor && Sensor.TYPE_LINEAR_ACCELERATION == sensor.getType();
if (isEnergyMode && isEnergySample) {
doEnergySample(event);
}
// check sensor delay
if (System.currentTimeMillis() > lastSampleTimes[sensor.getType()] + sampleDelay) {
lastSampleTimes[sensor.getType()] = System.currentTimeMillis();
} else {
// new sample is too soon
// unregister when sensor listener when we can
if (isTimeToUnregister()) {
// unregister the listener and start again in sampleDelay seconds
unregisterSensors();
motionHandler.postDelayed(motionThread = new Runnable() {
@Override
public void run() {
registerSensors();
}
}, sampleDelay - DELAY_AFTER_REGISTRATION);
}
return;
}
// Epi-mode is only interested in the accelerometer
if (isEpiMode && sensor.getType() != Sensor.TYPE_ACCELEROMETER) {
return;
}
// determine sensor name
String sensorName = "";
switch (sensor.getType()) {
case Sensor.TYPE_ACCELEROMETER:
sensorName = SensorNames.ACCELEROMETER;
break;
case Sensor.TYPE_ORIENTATION:
sensorName = SensorNames.ORIENT;
break;
case Sensor.TYPE_MAGNETIC_FIELD:
sensorName = SensorNames.MAGNETIC_FIELD;
break;
case Sensor.TYPE_GYROSCOPE:
sensorName = SensorNames.GYRO;
break;
case Sensor.TYPE_LINEAR_ACCELERATION:
sensorName = SensorNames.LIN_ACCELERATION;
break;
default:
Log.w(TAG, "Unexpected sensor type: " + sensor.getType());
return;
}
// prepare JSON object to send to MsgHandler
final JSONObject json = createJsonValue(event);
if (null == json) {
// error occurred creating the JSON object
return;
}
// add the data to the buffer if we are in Epi-mode:
if (isEpiMode) {
doEpiSample(sensor, json);
} else {
sendNormalMessage(sensor, sensorName, json);
}
// send motion energy message
if (isEnergyMode && isEnergySample) {
sendEnergyMessage();
}
}
/**
* Registers for updates from the device's motion sensors.
*/
private synchronized void registerSensors() {
if (!isRegistered) {
// Log.v(TAG, "Register the motion sensor for updates");
SensorManager mgr = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
int delay = isFallDetectMode || isEpiMode || isEnergyMode ? SensorManager.SENSOR_DELAY_GAME
: SensorManager.SENSOR_DELAY_NORMAL;
for (Sensor sensor : sensors) {
mgr.registerListener(this, sensor, delay);
}
isRegistered = true;
} else {
// Log.v(TAG, "Did not register for motion sensor updates: already registered");
}
}
/**
* Sends message with average motion energy to the MsgHandler.
*/
private void sendEnergyMessage() {
if (avgSpeedCount > 1) {
// Log.v(TAG, NAME_MOTION_ENERGY + " value. Count: " + avgSpeedCount);
// round to three decimals
float value = BigDecimal.valueOf(avgSpeedChange).setScale(3, 0).floatValue();
// prepare intent to send to MsgHandler
Intent i = new Intent(context.getString(R.string.action_sense_new_data));
i.putExtra(DataPoint.SENSOR_NAME, SensorNames.MOTION_ENERGY);
i.putExtra(DataPoint.SENSOR_DESCRIPTION, SensorNames.MOTION_ENERGY);
i.putExtra(DataPoint.VALUE, value);
i.putExtra(DataPoint.DATA_TYPE, SenseDataTypes.FLOAT);
i.putExtra(DataPoint.TIMESTAMP, SNTP.getInstance().getTime());
context.startService(i);
}
avgSpeedChange = 0;
avgSpeedCount = 0;
}
private void sendFallMessage(boolean fall) {
Intent i = new Intent(context.getString(R.string.action_sense_new_data));
i.putExtra(DataPoint.SENSOR_NAME, SensorNames.FALL_DETECTOR);
i.putExtra(DataPoint.SENSOR_DESCRIPTION, fallDetector.demo ? "demo fall" : "human fall");
i.putExtra(DataPoint.VALUE, fall);
i.putExtra(DataPoint.DATA_TYPE, SenseDataTypes.BOOL);
i.putExtra(DataPoint.TIMESTAMP, SNTP.getInstance().getTime());
context.startService(i);
}
private void sendNormalMessage(Sensor sensor, String sensorName, JSONObject json) {
Intent i = new Intent(context.getString(R.string.action_sense_new_data));
i.putExtra(DataPoint.SENSOR_NAME, sensorName);
i.putExtra(DataPoint.SENSOR_DESCRIPTION, sensor.getName());
i.putExtra(DataPoint.VALUE, json.toString());
i.putExtra(DataPoint.DATA_TYPE, SenseDataTypes.JSON);
i.putExtra(DataPoint.TIMESTAMP, SNTP.getInstance().getTime());
context.startService(i);
}
public void setSampleDelay(long sampleDelay) {
this.sampleDelay = sampleDelay;
}
@SuppressWarnings("deprecation")
public void startMotionSensing(long sampleDelay) {
motionHandler = new Handler();
final SharedPreferences mainPrefs = context.getSharedPreferences(SensePrefs.MAIN_PREFS,
Context.MODE_PRIVATE);
isEpiMode = mainPrefs.getBoolean(Motion.EPIMODE, false);
isEnergyMode = mainPrefs.getBoolean(Motion.MOTION_ENERGY, false);
isUnregisterWhenIdle = mainPrefs.getBoolean(Motion.UNREG, true);
if (isEpiMode) {
sampleDelay = 0;
Log.v(TAG, "Start epi state sensor");
context.startService(new Intent(context, EpiStateMonitor.class));
}
// check if the fall detector is enabled
isFallDetectMode = mainPrefs.getBoolean(Motion.FALL_DETECT, false);
if (fallDetector.demo == mainPrefs.getBoolean(Motion.FALL_DETECT_DEMO, false)) {
isFallDetectMode = true;
Log.v(TAG, "Start epi state sensor");
context.startService(new Intent(context, EpiStateMonitor.class));
}
if (firstStart && isFallDetectMode) {
sendFallMessage(false);
firstStart = false;
}
SensorManager mgr = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
sensors = new ArrayList<Sensor>();
// add accelerometer
if (null != mgr.getDefaultSensor(Sensor.TYPE_ACCELEROMETER)) {
sensors.add(mgr.getDefaultSensor(Sensor.TYPE_ACCELEROMETER));
}
if (!isEpiMode) {
// add orientation sensor
if (null != mgr.getDefaultSensor(Sensor.TYPE_ORIENTATION)) {
sensors.add(mgr.getDefaultSensor(Sensor.TYPE_ORIENTATION));
}
// add gyroscope
if (null != mgr.getDefaultSensor(Sensor.TYPE_GYROSCOPE)) {
sensors.add(mgr.getDefaultSensor(Sensor.TYPE_GYROSCOPE));
}
// add linear acceleration
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
// only devices with gingerbread+ have linear acceleration sensors
if (null != mgr.getDefaultSensor(Sensor.TYPE_LINEAR_ACCELERATION)) {
sensors.add(mgr.getDefaultSensor(Sensor.TYPE_LINEAR_ACCELERATION));
hasLinAccSensor = true;
}
}
}
motionSensingActive = true;
setSampleDelay(sampleDelay);
registerSensors();
startWakeUpAlarms();
enableScreenOffListener(true);
}
private void enableScreenOffListener(boolean enable) {
if (enable) {
// Register the receiver for SCREEN OFF events
IntentFilter filter = new IntentFilter(Intent.ACTION_SCREEN_OFF);
context.registerReceiver(screenOffListener, filter);
} else {
// Unregister the receiver for SCREEN OFF events
try {
context.unregisterReceiver(screenOffListener);
} catch (IllegalArgumentException e) {
// Log.v(TAG, "Ignoring exception when unregistering screen off listener");
}
}
}
/**
* Sets a periodic alarm that makes sure the the device is awake for a short while for every
* sample.
*/
@SuppressLint("Wakelock")
private void startWakeUpAlarms() {
// register receiver for wake up alarm
wakeReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
// Log.v(TAG, "Wake up! " + new SimpleDateFormat("k:mm:ss.SSS").format(new Date()));
if (null == wakeLock) {
PowerManager powerMgr = (PowerManager) context
.getSystemService(Context.POWER_SERVICE);
wakeLock = powerMgr.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, TAG);
}
if (!wakeLock.isHeld()) {
Log.i(TAG, "Acquire wake lock for 500ms");
wakeLock.acquire(500);
} else {
// Log.v(TAG, "Wake lock already held");
}
if (isTimeToRegister()) {
// Log.v(TAG, "Time to register!");
registerSensors();
}
}
};
context.registerReceiver(wakeReceiver, new IntentFilter(ACTION_WAKEUP_ALARM));
// schedule alarm to go off and wake up the receiver
Intent wakeUp = new Intent(ACTION_WAKEUP_ALARM);
PendingIntent operation = PendingIntent.getBroadcast(context, ALARM_ID, wakeUp, 0);
final AlarmManager mgr = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE);
mgr.cancel(operation);
mgr.setRepeating(AlarmManager.ELAPSED_REALTIME_WAKEUP, 0, sampleDelay, operation);
}
/**
* Unregisters the listener for updates from the motion sensors, and stops waking up the device
* for sampling.
*/
public void stopMotionSensing() {
// Log.v(TAG, "Stop motion sensor");
try {
motionSensingActive = false;
unregisterSensors();
if (motionThread != null) {
motionHandler.removeCallbacks(motionThread);
motionThread = null;
}
} catch (Exception e) {
Log.e(TAG, e.getMessage());
}
if (isEpiMode || isFallDetectMode) {
Log.v(TAG, "Stop epi state sensor");
context.stopService(new Intent(context, EpiStateMonitor.class));
}
enableScreenOffListener(false);
stopWakeUpAlarms();
}
/**
* Stops the periodic alarm to wake up the device and take a sample.
*/
private void stopWakeUpAlarms() {
// unregister wake up receiver
try {
context.unregisterReceiver(wakeReceiver);
} catch (IllegalArgumentException e) {
// do nothing
}
// cancel the wake up alarm
Intent wakeUp = new Intent(ACTION_WAKEUP_ALARM);
PendingIntent operation = PendingIntent.getBroadcast(context, ALARM_ID, wakeUp, 0);
final AlarmManager mgr = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE);
mgr.cancel(operation);
}
private synchronized void unregisterSensors() {
if (isRegistered) {
// Log.v(TAG, "Unregister the motion sensor for updates");
((SensorManager) context.getSystemService(Context.SENSOR_SERVICE))
.unregisterListener(this);
lastRegistered = System.currentTimeMillis();
} else {
// Log.v(TAG, "Did not unregister for motion sensor updates: already unregistered");
}
isRegistered = false;
}
}
|
package org.jasig.portal.car;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.security.SecureClassLoader;
import java.util.StringTokenizer;
import org.jasig.portal.services.LogService;
/**
* Loads classes and resources from installed CARs via the CarResources class.
* If classes are visible via the parent class loader then they will be used
* in place of those in the CARs. This is a singleton so that we have a single
* unified class namespace for all car resources preventing linkage errors and
* class cast exceptions.
* @author Mark Boyd <mark.boyd@engineer.com>
* @version $Revision$
*/
public class CarClassLoader
extends SecureClassLoader
{
public final static String RCS_ID = "@(#) $Header$";
/**
Create a CarClassLoader. This method has package scope so that
CarResources can instantiate it and hold the single instance to be
aquired via its getClassLoader() method.
*/
CarClassLoader()
{
super();
}
/**
Create a CarClassloader with the indicated parent class loader. See
comment for zero parameter constructor for description of package scoping.
*/
CarClassLoader( ClassLoader cl )
{
super( cl );
}
/**
Implement the overloading of findClass to return classes that are
available from installed CAR files. Class loading precedes with the
parent classloader first which delegates to this class loader if the
classes aren't found.
*/
public Class findClass( final String name )
throws ClassNotFoundException
{
PrivilegedExceptionAction action = new PrivilegedExceptionAction()
{
public Object run()
throws ClassNotFoundException
{
byte[] buf = null;
String pkgName = getPackageName(name);
InputStream in = null;
try
{
String file = name.replace( '.', '/' ) + ".class";
CarResources crs = CarResources.getInstance();
int size = (int) crs.getResourceSize( file );
in = crs.getResourceAsStream( file );
if ( in == null || size == -1 )
throw new Exception( "Car resource " +
file + " not found." );
buf = new byte[size];
int offSet = 0;
int totalRead = 0;
int bytesRead = 0;
int remaining = size;
while( totalRead < size )
{
bytesRead = in.read( buf, offSet, remaining );
remaining -= bytesRead;
offSet += bytesRead;
totalRead += bytesRead;
}
}
catch( Exception e )
{
throw new ClassNotFoundException( name,
e );
} finally {
try {
if (in != null) {
in.close();
}
} catch (IOException ioe) {
LogService.log(LogService.ERROR,
"CarClassLoader::findClass() Could not close inputStream "
+ ioe);
}
}
// package must be defined prior to defined
// the class.
createPackage( pkgName );
return defineTheClass( name, buf, 0, buf.length);
}
};
try
{
return ( Class ) AccessController.doPrivileged( action );
}
catch( PrivilegedActionException pae )
{
throw (ClassNotFoundException) pae.getException();
}
}
/**
Create and return the Class object from the passed in class bytes. This
code enables the inner class used in findClass() to call into the
superclass's defineClass method. It has protected scope in the
superclass and hence is not visible to an innner class but is visible
to this class.
*/
private Class defineTheClass( String n, byte[] b, int offset, int len )
{
return super.defineClass( n, b, offset, len );
}
/**
* Creates the package name for the calling class, which is null
* by default based on the JavaDoc for ClassLoader. The package
* must be created prior to defining the Class.
*
* @param pkgName the package to create.
**/
private void createPackage(String pkgName)
{
// package must be defined before the class
// according to the API docs.
try
{
if ( null != pkgName && null == getPackage(pkgName))
definePackage( pkgName, "", "", "", "", "", "", null );
}
catch( IllegalArgumentException iae )
{
// do nothing, assume a synchronization issue
// where one thread had set it prior to another
// doing so.. small window, but could happen.
}
}
/**
* Returns a package name from a package/classname path. If the
* package is not available (default package), then null is
* returned.
*
* @param name the package/class name.
* @return the package name (dot notation) or null if not found
*/
private String getPackageName( String name )
{
if ( name.indexOf(".") != -1 )
{
StringBuffer sb = new StringBuffer();
StringTokenizer st = new StringTokenizer(name,".");
int tokens = st.countTokens();
int count = 1;
while(st.hasMoreTokens())
{
if ( count < tokens )
{
sb.append(st.nextToken());
if ( count != (tokens-1) )
sb.append(".");
}
else
break;
count++;
}
return sb.toString();
}
else
return null;
}
/**
Returns a URL pointing to a car resource if a suitable resource is
found in the loaded set of CAR files or null if one is not found.
*/
public URL findResource( String res )
{
return CarResources.getInstance().findResource( res );
}
}
|
package soot.jimple.infoflow.solver.fastSolver;
import heros.DontSynchronize;
import heros.FlowFunction;
import heros.FlowFunctionCache;
import heros.FlowFunctions;
import heros.IFDSTabulationProblem;
import heros.InterproceduralCFG;
import heros.SynchronizedBy;
import heros.ZeroedFlowFunctions;
import heros.solver.CountingThreadPoolExecutor;
import heros.solver.PathEdge;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import soot.SootMethod;
import soot.Unit;
import soot.jimple.infoflow.util.ConcurrentHashSet;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
/**
* A solver for an {@link IFDSTabulationProblem}. This solver is not based on the IDESolver
* implementation in Heros for performance reasons.
*
* @param <N> The type of nodes in the interprocedural control-flow graph. Typically {@link Unit}.
* @param <D> The type of data-flow facts to be computed by the tabulation problem.
* @param <M> The type of objects used to represent methods. Typically {@link SootMethod}.
* @param <I> The type of inter-procedural control-flow graph being used.
* @see IFDSTabulationProblem
*/
public class IFDSSolver<N,D,M,I extends InterproceduralCFG<N, M>> {
public static CacheBuilder<Object, Object> DEFAULT_CACHE_BUILDER = CacheBuilder.newBuilder().concurrencyLevel
(Runtime.getRuntime().availableProcessors()).initialCapacity(10000).softValues();
protected static final Logger logger = LoggerFactory.getLogger(IFDSSolver.class);
//enable with -Dorg.slf4j.simpleLogger.defaultLogLevel=trace
public static final boolean DEBUG = logger.isDebugEnabled();
protected CountingThreadPoolExecutor executor;
@DontSynchronize("only used by single thread")
protected int numThreads;
@SynchronizedBy("thread safe data structure, consistent locking when used")
protected final JumpFunctions<N,D> jumpFn;
@SynchronizedBy("thread safe data structure, only modified internally")
protected final I icfg;
//stores summaries that were queried before they were computed
//see CC 2010 paper by Naeem, Lhotak and Rodriguez
@SynchronizedBy("consistent lock on 'incoming'")
protected final Table<N,D,Map<N,Set<D>>> endSummary = HashBasedTable.create();
//edges going along calls
//see CC 2010 paper by Naeem, Lhotak and Rodriguez
@SynchronizedBy("consistent lock on field")
protected final Table<N,D,Map<N,Set<D>>> incoming = HashBasedTable.create();
@DontSynchronize("stateless")
protected final FlowFunctions<N, D, M> flowFunctions;
@DontSynchronize("only used by single thread")
protected final Map<N,Set<D>> initialSeeds;
@DontSynchronize("benign races")
public long propagationCount;
@DontSynchronize("stateless")
protected final D zeroValue;
@DontSynchronize("readOnly")
protected final FlowFunctionCache<N,D,M> ffCache;
@DontSynchronize("readOnly")
protected final boolean followReturnsPastSeeds;
/**
* Creates a solver for the given problem, which caches flow functions and edge functions.
* The solver must then be started by calling {@link #solve()}.
*/
public IFDSSolver(IFDSTabulationProblem<N,D,M,I> tabulationProblem) {
this(tabulationProblem, DEFAULT_CACHE_BUILDER);
}
/**
* Creates a solver for the given problem, constructing caches with the given {@link CacheBuilder}. The solver must then be started by calling
* {@link #solve()}.
* @param flowFunctionCacheBuilder A valid {@link CacheBuilder} or <code>null</code> if no caching is to be used for flow functions.
* @param edgeFunctionCacheBuilder A valid {@link CacheBuilder} or <code>null</code> if no caching is to be used for edge functions.
*/
public IFDSSolver(IFDSTabulationProblem<N,D,M,I> tabulationProblem, @SuppressWarnings("rawtypes") CacheBuilder flowFunctionCacheBuilder) {
if(logger.isDebugEnabled())
flowFunctionCacheBuilder = flowFunctionCacheBuilder.recordStats();
this.zeroValue = tabulationProblem.zeroValue();
this.icfg = tabulationProblem.interproceduralCFG();
FlowFunctions<N, D, M> flowFunctions = tabulationProblem.autoAddZero() ?
new ZeroedFlowFunctions<N,D,M>(tabulationProblem.flowFunctions(), tabulationProblem.zeroValue()) : tabulationProblem.flowFunctions();
if(flowFunctionCacheBuilder!=null) {
ffCache = new FlowFunctionCache<N,D,M>(flowFunctions, flowFunctionCacheBuilder);
flowFunctions = ffCache;
} else {
ffCache = null;
}
this.flowFunctions = flowFunctions;
this.initialSeeds = tabulationProblem.initialSeeds();
this.jumpFn = new JumpFunctions<N,D>();
this.followReturnsPastSeeds = tabulationProblem.followReturnsPastSeeds();
this.numThreads = Math.max(1,tabulationProblem.numThreads());
this.executor = getExecutor();
}
/**
* Runs the solver on the configured problem. This can take some time.
*/
public void solve() {
submitInitialSeeds();
awaitCompletionComputeValuesAndShutdown();
}
/**
* Schedules the processing of initial seeds, initiating the analysis.
* Clients should only call this methods if performing synchronization on
* their own. Normally, {@link #solve()} should be called instead.
*/
protected void submitInitialSeeds() {
for(Entry<N, Set<D>> seed: initialSeeds.entrySet()) {
N startPoint = seed.getKey();
for(D val: seed.getValue())
propagate(zeroValue, startPoint, val, null, false);
jumpFn.addFunction(zeroValue, startPoint, zeroValue);
}
}
/**
* Awaits the completion of the exploded super graph. When complete, computes result values,
* shuts down the executor and returns.
*/
protected void awaitCompletionComputeValuesAndShutdown() {
{
//run executor and await termination of tasks
runExecutorAndAwaitCompletion();
}
if(logger.isDebugEnabled())
printStats();
//ask executor to shut down;
//this will cause new submissions to the executor to be rejected,
//but at this point all tasks should have completed anyway
executor.shutdown();
//similarly here: we await termination, but this should happen instantaneously,
//as all tasks should have completed
runExecutorAndAwaitCompletion();
}
/**
* Runs execution, re-throwing exceptions that might be thrown during its execution.
*/
private void runExecutorAndAwaitCompletion() {
try {
executor.awaitCompletion();
} catch (InterruptedException e) {
e.printStackTrace();
}
Throwable exception = executor.getException();
if(exception!=null) {
throw new RuntimeException("There were exceptions during IDE analysis. Exiting.",exception);
}
}
/**
* Dispatch the processing of a given edge. It may be executed in a different thread.
* @param edge the edge to process
*/
protected void scheduleEdgeProcessing(PathEdge<N,D> edge){
// If the executor has been killed, there is little point
// in submitting new tasks
if (executor.isTerminating())
return;
executor.execute(new PathEdgeProcessingTask(edge));
propagationCount++;
}
/**
* Lines 13-20 of the algorithm; processing a call site in the caller's context.
*
* For each possible callee, registers incoming call edges.
* Also propagates call-to-return flows and summarized callee flows within the caller.
*
* @param edge an edge whose target node resembles a method call
*/
private void processCall(PathEdge<N,D> edge) {
final D d1 = edge.factAtSource();
final N n = edge.getTarget(); // a call node; line 14...
logger.trace("Processing call to {}", n);
final D d2 = edge.factAtTarget();
List<N> returnSiteNs = icfg.getReturnSitesOfCallAt(n);
//for each possible callee
Set<M> callees = icfg.getCalleesOfCallAt(n);
for(M sCalledProcN: callees) { //still line 14
//compute the call-flow function
FlowFunction<D> function = flowFunctions.getCallFlowFunction(n, sCalledProcN);
Set<D> res = computeCallFlowFunction(function, d1, d2);
//for each callee's start point(s)
Set<N> startPointsOf = icfg.getStartPointsOf(sCalledProcN);
for(N sP: startPointsOf) {
//for each result node of the call-flow function
for(D d3: res) {
//create initial self-loop
propagate(d3, sP, d3, n, false); //line 15
//register the fact that <sp,d3> has an incoming edge from <n,d2>
Map<N, Set<D>> endSumm;
synchronized (incoming) {
//line 15.1 of Naeem/Lhotak/Rodriguez
addIncoming(sP,d3,n,d2);
//line 15.2, copy to avoid concurrent modification exceptions by other threads
endSumm = endSummary(sP, d3);
}
//still line 15.2 of Naeem/Lhotak/Rodriguez
//for each already-queried exit value <eP,d4> reachable from <sP,d3>,
//create new caller-side jump functions to the return sites
//because we have observed a potentially new incoming edge into <sP,d3>
for(Entry<N, Set<D>> entry: endSumm.entrySet()) {
N eP = entry.getKey();
for (D d4 : entry.getValue()) {
//for each return site
for(N retSiteN: returnSiteNs) {
//compute return-flow function
FlowFunction<D> retFunction = flowFunctions.getReturnFlowFunction(n, sCalledProcN, eP, retSiteN);
//for each target value of the function
for(D d5: computeReturnFlowFunction(retFunction, d4, n, Collections.singleton(d2)))
propagate(d1, retSiteN, d5, n, false);
}
}
}
}
}
}
//line 17-19 of Naeem/Lhotak/Rodriguez
//process intra-procedural flows along call-to-return flow functions
for (N returnSiteN : returnSiteNs) {
FlowFunction<D> callToReturnFlowFunction = flowFunctions.getCallToReturnFlowFunction(n, returnSiteN);
for(D d3: computeCallToReturnFlowFunction(callToReturnFlowFunction, d1, d2))
propagate(d1, returnSiteN, d3, n, false);
}
}
/**
* Computes the call flow function for the given call-site abstraction
* @param callFlowFunction The call flow function to compute
* @param d1 The abstraction at the current method's start node.
* @param d2 The abstraction at the call site
* @return The set of caller-side abstractions at the callee's start node
*/
protected Set<D> computeCallFlowFunction
(FlowFunction<D> callFlowFunction, D d1, D d2) {
return callFlowFunction.computeTargets(d2);
}
/**
* Computes the call-to-return flow function for the given call-site
* abstraction
* @param callToReturnFlowFunction The call-to-return flow function to
* compute
* @param d1 The abstraction at the current method's start node.
* @param d2 The abstraction at the call site
* @return The set of caller-side abstractions at the return site
*/
protected Set<D> computeCallToReturnFlowFunction
(FlowFunction<D> callToReturnFlowFunction, D d1, D d2) {
return callToReturnFlowFunction.computeTargets(d2);
}
/**
* Lines 21-32 of the algorithm.
*
* Stores callee-side summaries.
* Also, at the side of the caller, propagates intra-procedural flows to return sites
* using those newly computed summaries.
*
* @param edge an edge whose target node resembles a method exits
*/
protected void processExit(PathEdge<N,D> edge) {
final N n = edge.getTarget(); // an exit node; line 21...
M methodThatNeedsSummary = icfg.getMethodOf(n);
final D d1 = edge.factAtSource();
final D d2 = edge.factAtTarget();
//for each of the method's start points, determine incoming calls
Map<N,Set<D>> inc = new HashMap<N, Set<D>>();
Set<N> startPointsOf = icfg.getStartPointsOf(methodThatNeedsSummary);
for(N sP: startPointsOf) {
//line 21.1 of Naeem/Lhotak/Rodriguez
//register end-summary
synchronized (incoming) {
inc.putAll(incoming(d1, sP));
addEndSummary(sP, d1, n, d2);
}
}
//for each incoming call edge already processed
//(see processCall(..))
if (inc != null)
for (Entry<N,Set<D>> entry: inc.entrySet()) {
//line 22
N c = entry.getKey();
//for each return site
for(N retSiteC: icfg.getReturnSitesOfCallAt(c)) {
//compute return-flow function
FlowFunction<D> retFunction = flowFunctions.getReturnFlowFunction(c, methodThatNeedsSummary,n,retSiteC);
Set<D> targets = computeReturnFlowFunction(retFunction, d2, c, entry.getValue());
//for each incoming-call value
for(D d4: entry.getValue()) {
synchronized (jumpFn) { // some other thread might change jumpFn on the way
//for each jump function coming into the call, propagate to return site using the composed function
for(D d3: jumpFn.reverseLookup(c,d4))
//for each target value at the return site
//line 23
for(D d5: targets) {
propagate(d3, retSiteC, d5, c, false);
}
}
}
}
}
//handling for unbalanced problems where we return out of a method with a fact for which we have no incoming flow
//note: we propagate that way only values that originate from ZERO, as conditionally generated values should only
//be propagated into callers that have an incoming edge for this condition
if(followReturnsPastSeeds && (inc == null || inc.isEmpty()) && d1.equals(zeroValue)) {
// only propagate up if we
Set<N> callers = icfg.getCallersOf(methodThatNeedsSummary);
for(N c: callers) {
for(N retSiteC: icfg.getReturnSitesOfCallAt(c)) {
FlowFunction<D> retFunction = flowFunctions.getReturnFlowFunction(c, methodThatNeedsSummary,n,retSiteC);
Set<D> targets = computeReturnFlowFunction(retFunction, d2, c, Collections.singleton(zeroValue));
for(D d5: targets)
propagate(zeroValue, retSiteC, d5, c, true);
}
}
//in cases where there are no callers, the return statement would normally not be processed at all;
//this might be undesirable if the flow function has a side effect such as registering a taint;
//instead we thus call the return flow function will a null caller
if(callers.isEmpty()) {
FlowFunction<D> retFunction = flowFunctions.getReturnFlowFunction(null, methodThatNeedsSummary,n,null);
retFunction.computeTargets(d2);
}
}
}
/**
* Computes the return flow function for the given set of caller-side
* abstractions.
* @param retFunction The return flow function to compute
* @param d2 The abstraction at the exit node in the callee
* @param callSite The call site
* @param callerSideDs The abstractions at the call site
* @return The set of caller-side abstractions at the return site
*/
protected Set<D> computeReturnFlowFunction
(FlowFunction<D> retFunction, D d2, N callSite, Set<D> callerSideDs) {
return retFunction.computeTargets(d2);
}
/**
* Lines 33-37 of the algorithm.
* Simply propagate normal, intra-procedural flows.
* @param edge
*/
private void processNormalFlow(PathEdge<N,D> edge) {
final D d1 = edge.factAtSource();
final N n = edge.getTarget();
final D d2 = edge.factAtTarget();
for (N m : icfg.getSuccsOf(n)) {
FlowFunction<D> flowFunction = flowFunctions.getNormalFlowFunction(n,m);
Set<D> res = computeNormalFlowFunction(flowFunction, d1, d2);
for (D d3 : res)
propagate(d1, m, d3, null, false);
}
}
/**
* Computes the normal flow function for the given set of start and end
* abstractions.
* @param flowFunction The normal flow function to compute
* @param d1 The abstraction at the method's start node
* @param d1 The abstraction at the current node
* @return The set of abstractions at the successor node
*/
protected Set<D> computeNormalFlowFunction
(FlowFunction<D> flowFunction, D d1, D d2) {
return flowFunction.computeTargets(d2);
}
/**
* Propagates the flow further down the exploded super graph.
* @param sourceVal the source value of the propagated summary edge
* @param target the target statement
* @param targetVal the target value at the target statement
* @param relatedCallSite for call and return flows the related call statement, <code>null</code> otherwise
* (this value is not used within this implementation but may be useful for subclasses of {@link IFDSSolver})
* @param isUnbalancedReturn <code>true</code> if this edge is propagating an unbalanced return
* (this value is not used within this implementation but may be useful for subclasses of {@link IFDSSolver})
*/
protected void propagate(D sourceVal, N target, D targetVal,
/* deliberately exposed to clients */ N relatedCallSite,
/* deliberately exposed to clients */ boolean isUnbalancedReturn) {
if (!jumpFn.addFunction(sourceVal, target, targetVal))
return;
PathEdge<N,D> edge = new PathEdge<N,D>(sourceVal, target, targetVal);
scheduleEdgeProcessing(edge);
if(targetVal!=zeroValue)
logger.trace("EDGE: <{},{}> -> <{},{}>", icfg.getMethodOf(target), sourceVal, target, targetVal);
}
private Map<N, Set<D>> endSummary(N sP, D d3) {
Map<N, Set<D>> map = endSummary.get(sP, d3);
if(map==null) return Collections.emptyMap();
return map;
}
private void addEndSummary(N sP, D d1, N eP, D d2) {
synchronized (incoming) {
Map<N, Set<D>> summaries = endSummary.get(sP, d1);
if(summaries==null) {
summaries = new ConcurrentHashMap<N, Set<D>>();
endSummary.put(sP, d1, summaries);
}
Set<D> d2s = summaries.get(eP);
if (d2s == null) {
d2s = new ConcurrentHashSet<D>();
summaries.put(eP,d2s);
}
d2s.add(d2);
}
}
private Map<N, Set<D>> incoming(D d1, N sP) {
Map<N, Set<D>> map = incoming.get(sP, d1);
if(map==null) return Collections.emptyMap();
return map;
}
protected void addIncoming(N sP, D d3, N n, D d2) {
if (n.toString().contains("specialinvoke this.<soot.jimple.infoflow.test.OverwriteTestCode: void setData(java.lang.String)>(null)") && d3.toString().contains("this"))
System.out.println("x");
synchronized (incoming) {
Map<N, Set<D>> summaries = incoming.get(sP, d3);
if(summaries==null) {
summaries = new ConcurrentHashMap<N, Set<D>>();
incoming.put(sP, d3, summaries);
}
Set<D> set = summaries.get(n);
if(set==null) {
set = new ConcurrentHashSet<D>();
summaries.put(n,set);
}
set.add(d2);
}
}
/**
* Factory method for this solver's thread-pool executor.
*/
protected CountingThreadPoolExecutor getExecutor() {
return new CountingThreadPoolExecutor(1, this.numThreads, 30, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
}
/**
* Returns a String used to identify the output of this solver in debug mode.
* Subclasses can overwrite this string to distinguish the output from different solvers.
*/
protected String getDebugName() {
return "FAST IFDS SOLVER";
}
public void printStats() {
if(logger.isDebugEnabled()) {
if(ffCache!=null)
ffCache.printStats();
} else {
logger.info("No statistics were collected, as DEBUG is disabled.");
}
}
private class PathEdgeProcessingTask implements Runnable {
private final PathEdge<N,D> edge;
public PathEdgeProcessingTask(PathEdge<N,D> edge) {
this.edge = edge;
}
public void run() {
if(icfg.isCallStmt(edge.getTarget())) {
processCall(edge);
} else {
//note that some statements, such as "throw" may be
//both an exit statement and a "normal" statement
if(icfg.isExitStmt(edge.getTarget())) {
processExit(edge);
}
if(!icfg.getSuccsOf(edge.getTarget()).isEmpty()) {
processNormalFlow(edge);
}
}
}
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.frc1675.subsystems;
import edu.wpi.first.wpilibj.AnalogChannel;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.Relay;
import edu.wpi.first.wpilibj.command.Subsystem;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import org.frc1675.RobotMap;
import org.frc1675.commands.CompressorWork;
/**
*
* @author team1675
*/
public class CompressorSystem extends Subsystem {
private Compressor compressor;
private AnalogChannel pressureSensor;
public CompressorSystem(){
compressor = new Compressor(RobotMap.HIGH_PRESSURE_SWITCH, RobotMap.COMPRESSOR_SPIKE);
pressureSensor = new AnalogChannel(RobotMap.PRESSURE_SENSOR);
pressureSensor.setAverageBits(10);
}
public void work(){
if (!compressor.getPressureSwitchValue()){
compressor.setRelayValue(Relay.Value.kForward);
}else{
compressor.setRelayValue(Relay.Value.kOff);
}
SmartDashboard.putNumber("Working Pressure", (pressureSensor.getAverageVoltage()*20.0));
}
public void init(){
compressor.start();
}
public void stop(){
compressor.stop();
}
public void initDefaultCommand() {
setDefaultCommand(new CompressorWork());
}
}
|
package org.frc836.database;
import android.provider.BaseColumns;
/* This file was auto-generated by SQLITEContractGen.py.
Abandon all hope ye who edit here.*/
public final class FRCScoutingContract {
public FRCScoutingContract() {};
public static abstract class CONFIGURATION_LU_Entry implements BaseColumns {
public static final String TABLE_NAME = "configuration_lu";
public static final String COLUMN_NAME_ID = "id";
public static final String COLUMN_NAME_CONFIGURATION_DESC = "configuration_desc";
public static final String COLUMN_NAME_TIMESTAMP = "timestamp";
public static final String COLUMN_NAME_INVALID = "invalid";
}
public static abstract class EVENT_LU_Entry implements BaseColumns {
public static final String TABLE_NAME = "event_lu";
public static final String COLUMN_NAME_ID = "id";
public static final String COLUMN_NAME_EVENT_NAME = "event_name";
public static final String COLUMN_NAME_MATCH_URL = "match_url";
public static final String COLUMN_NAME_TIMESTAMP = "timestamp";
public static final String COLUMN_NAME_INVALID = "invalid";
}
public static abstract class FACT_CYCLE_DATA_Entry implements BaseColumns {
public static final String TABLE_NAME = "fact_cycle_data";
public static final String COLUMN_NAME_ID = "id";
public static final String COLUMN_NAME_EVENT_ID = "event_id";
public static final String COLUMN_NAME_MATCH_ID = "match_id";
public static final String COLUMN_NAME_TEAM_ID = "team_id";
public static final String COLUMN_NAME_CYCLE_NUM = "cycle_num";
public static final String COLUMN_NAME_NEAR_POSS = "near_poss";
public static final String COLUMN_NAME_WHITE_POSS = "white_poss";
public static final String COLUMN_NAME_FAR_POSS = "far_poss";
public static final String COLUMN_NAME_TRUSS = "truss";
public static final String COLUMN_NAME_CATCH = "catch";
public static final String COLUMN_NAME_HIGH = "high";
public static final String COLUMN_NAME_LOW = "low";
public static final String COLUMN_NAME_ASSISTS = "assists";
public static final String COLUMN_NAME_TIMESTAMP = "timestamp";
public static final String COLUMN_NAME_INVALID = "invalid";
}
public static abstract class FACT_MATCH_DATA_Entry implements BaseColumns {
public static final String TABLE_NAME = "fact_match_data";
public static final String COLUMN_NAME_ID = "id";
public static final String COLUMN_NAME_EVENT_ID = "event_id";
public static final String COLUMN_NAME_MATCH_ID = "match_id";
public static final String COLUMN_NAME_TEAM_ID = "team_id";
public static final String COLUMN_NAME_AUTO_HIGH = "auto_high";
public static final String COLUMN_NAME_AUTO_HIGH_HOT = "auto_high_hot";
public static final String COLUMN_NAME_AUTO_LOW = "auto_low";
public static final String COLUMN_NAME_AUTO_LOW_HOT = "auto_low_hot";
public static final String COLUMN_NAME_HIGH = "high";
public static final String COLUMN_NAME_LOW = "low";
public static final String COLUMN_NAME_AUTO_MOBILE = "auto_mobile";
public static final String COLUMN_NAME_AUTO_GOALIE = "auto_goalie";
public static final String COLUMN_NAME_NUM_CYCLES = "num_cycles";
public static final String COLUMN_NAME_FOUL = "foul";
public static final String COLUMN_NAME_TECH_FOUL = "tech_foul";
public static final String COLUMN_NAME_TIP_OVER = "tip_over";
public static final String COLUMN_NAME_YELLOW_CARD = "yellow_card";
public static final String COLUMN_NAME_RED_CARD = "red_card";
public static final String COLUMN_NAME_NOTES = "notes";
public static final String COLUMN_NAME_TIMESTAMP = "timestamp";
public static final String COLUMN_NAME_INVALID = "invalid";
}
public static abstract class NOTES_OPTIONS_Entry implements BaseColumns {
public static final String TABLE_NAME = "notes_options";
public static final String COLUMN_NAME_ID = "id";
public static final String COLUMN_NAME_OPTION_TEXT = "option_text";
public static final String COLUMN_NAME_TIMESTAMP = "timestamp";
public static final String COLUMN_NAME_INVALID = "invalid";
}
public static abstract class ROBOT_LU_Entry implements BaseColumns {
public static final String TABLE_NAME = "robot_lu";
public static final String COLUMN_NAME_ID = "id";
public static final String COLUMN_NAME_TEAM_ID = "team_id";
public static final String COLUMN_NAME_ROBOT_PHOTO = "robot_photo";
public static final String COLUMN_NAME_TIMESTAMP = "timestamp";
public static final String COLUMN_NAME_INVALID = "invalid";
}
public static abstract class SCOUT_PIT_DATA_Entry implements BaseColumns {
public static final String TABLE_NAME = "scout_pit_data";
public static final String COLUMN_NAME_ID = "id";
public static final String COLUMN_NAME_TEAM_ID = "team_id";
public static final String COLUMN_NAME_CONFIGURATION_ID = "configuration_id";
public static final String COLUMN_NAME_WHEEL_TYPE_ID = "wheel_type_id";
public static final String COLUMN_NAME_WHEEL_BASE_ID = "wheel_base_id";
public static final String COLUMN_NAME_AUTONOMOUS_MODE = "autonomous_mode";
public static final String COLUMN_NAME_AUTO_HIGH = "auto_high";
public static final String COLUMN_NAME_AUTO_LOW = "auto_low";
public static final String COLUMN_NAME_AUTO_HOT = "auto_hot";
public static final String COLUMN_NAME_AUTO_MOBILE = "auto_mobile";
public static final String COLUMN_NAME_AUTO_GOALIE = "auto_goalie";
public static final String COLUMN_NAME_TRUSS = "truss";
public static final String COLUMN_NAME_CATCH = "catch";
public static final String COLUMN_NAME_ACTIVE_CONTROL = "active_control";
public static final String COLUMN_NAME_LAUNCH_BALL = "launch_ball";
public static final String COLUMN_NAME_SCORE_HIGH = "score_high";
public static final String COLUMN_NAME_SCORE_LOW = "score_low";
public static final String COLUMN_NAME_MAX_HEIGHT = "max_height";
public static final String COLUMN_NAME_SCOUT_COMMENTS = "scout_comments";
public static final String COLUMN_NAME_TIMESTAMP = "timestamp";
public static final String COLUMN_NAME_INVALID = "invalid";
}
public static abstract class WHEEL_BASE_LU_Entry implements BaseColumns {
public static final String TABLE_NAME = "wheel_base_lu";
public static final String COLUMN_NAME_ID = "id";
public static final String COLUMN_NAME_WHEEL_BASE_DESC = "wheel_base_desc";
public static final String COLUMN_NAME_TIMESTAMP = "timestamp";
public static final String COLUMN_NAME_INVALID = "invalid";
}
public static abstract class WHEEL_TYPE_LU_Entry implements BaseColumns {
public static final String TABLE_NAME = "wheel_type_lu";
public static final String COLUMN_NAME_ID = "id";
public static final String COLUMN_NAME_WHEEL_TYPE_DESC = "wheel_type_desc";
public static final String COLUMN_NAME_TIMESTAMP = "timestamp";
public static final String COLUMN_NAME_INVALID = "invalid";
}
public static final String[] SQL_CREATE_ENTRIES = {
"CREATE TABLE IF NOT EXISTS configuration_lu (\n" +
" id integer primary key autoincrement,\n" +
" configuration_desc text NOT NULL,\n" +
" timestamp timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,\n" +
" invalid unsigned tinyint(1) NOT NULL DEFAULT '0'\n" +
");",
"INSERT INTO configuration_lu \n" +
"SELECT 1 AS id, 'Wide' AS configuration_desc, '2014-01-25 10:18:13' AS timestamp, 0 AS invalid\n" +
"UNION SELECT 2, 'Long', '2014-01-25 10:18:13', 0\n" +
"UNION SELECT 3, 'Square', '2014-01-25 10:18:13', 0\n" +
"UNION SELECT 4, 'Round', '2014-01-25 10:18:13', 0\n" +
"UNION SELECT 5, 'Hex', '2014-01-25 10:18:13', 0\n" +
"UNION SELECT 6, 'Triangle', '2014-01-25 10:18:13', 0\n" +
"UNION SELECT 7, 'Other', '2014-01-25 10:18:13', 0;",
"CREATE TABLE IF NOT EXISTS event_lu (\n" +
" id integer primary key autoincrement,\n" +
" event_name text NOT NULL,\n" +
" match_url text NOT NULL,\n" +
" timestamp timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,\n" +
" invalid unsigned tinyint(1) NOT NULL DEFAULT '0'\n" +
");",
"INSERT INTO event_lu \n" +
"SELECT 1 AS id, 'Central Illinois Regional' AS event_name, 'http://www2.usfirst.org/2014comp/Events/ILIL/ScheduleQual.html' AS match_url, '2014-01-25 10:19:26' AS timestamp, 0 AS invalid\n" +
"UNION SELECT 2, 'Palmetto Regional', 'http://www2.usfirst.org/2014comp/Events/SCMB/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 3, 'Alamo Regional', 'http://www2.usfirst.org/2014comp/Events/TXSA/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 4, 'Greater Toronto West Regional', 'http://www2.usfirst.org/2014comp/Events/ONTO2/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 5, 'Inland Empire Regional', 'http://www2.usfirst.org/2014comp/Events/CASB/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 6, 'Israel Regional', 'http://www2.usfirst.org/2014comp/Events/ISTA/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 7, 'Greater Toronto East Regional', 'http://www2.usfirst.org/2014comp/Events/ONTO/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 8, 'Arkansas Regional', 'http://www2.usfirst.org/2014comp/Events/ARFA/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 9, 'San Diego Regional', 'http://www2.usfirst.org/2014comp/Events/CASD/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 10, 'Crossroads Regional', 'http://www2.usfirst.org/2014comp/Events/INTH/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 11, 'Lake Superior Regional', 'http://www2.usfirst.org/2014comp/Events/MNDU/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 12, 'Northern Lights Regional', 'http://www2.usfirst.org/2014comp/Events/MNDU2/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 13, 'Hub City Regional', 'http://www2.usfirst.org/2014comp/Events/TXLU/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 14, 'Central Valley Regional', 'http://www2.usfirst.org/2014comp/Events/CAMA/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 15, 'Mexico City Regional', 'http://www2.usfirst.org/2014comp/Events/MXMC/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 16, 'Sacramento Regional', 'http://www2.usfirst.org/2014comp/Events/CASA/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 17, 'Orlando Regional', 'http://www2.usfirst.org/2014comp/Events/FLOR/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 18, 'Greater Kansas City Regional', 'http://www2.usfirst.org/2014comp/Events/MOKC/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 19, 'St. Louis Regional', 'http://www2.usfirst.org/2014comp/Events/MOSL/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 20, 'North Carolina Regional', 'http://www2.usfirst.org/2014comp/Events/NCRE/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 21, 'New York Tech Valley Regional', 'http://www2.usfirst.org/2014comp/Events/NYTR/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 22, 'Dallas Regional', 'http://www2.usfirst.org/2014comp/Events/TXDA/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 23, 'Utah Regional', 'http://www2.usfirst.org/2014comp/Events/UTWV/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 24, 'Waterloo Regional', 'http://www2.usfirst.org/2014comp/Events/ONWA/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 25, 'Festival de Robotique FRC a Montreal Regional', 'http://www2.usfirst.org/2014comp/Events/QCMO/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 26, 'Arizona Regional', 'http://www2.usfirst.org/2014comp/Events/AZCH/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 27, 'Los Angeles Regional', 'http://www2.usfirst.org/2014comp/Events/CALB/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 28, 'Boilermaker Regional', 'http://www2.usfirst.org/2014comp/Events/INWL/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 29, 'Buckeye Regional', 'http://www2.usfirst.org/2014comp/Events/OHCL/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 30, 'Virginia Regional', 'http://www2.usfirst.org/2014comp/Events/VARI/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 31, 'Wisconsin Regional', 'http://www2.usfirst.org/2014comp/Events/WIMI/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 32, 'North Bay Regional', 'http://www2.usfirst.org/2014comp/Events/ONNB/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 33, 'Peachtree Regional', 'http://www2.usfirst.org/2014comp/Events/GADU/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 34, 'Hawaii Regional', 'http://www2.usfirst.org/2014comp/Events/HIHO/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 35, 'Minnesota North Star Regional', 'http://www2.usfirst.org/2014comp/Events/MNMI2/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 36, 'Minnesota 1000 Lakes Regional', 'http://www2.usfirst.org/2014comp/Events/MNMI/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 37, 'SBPLI Long Island Regional', 'http://www2.usfirst.org/2014comp/Events/NYLI/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 38, 'Finger Lakes Regional', 'http://www2.usfirst.org/2014comp/Events/NYRO/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 39, 'Queen City Regional', 'http://www2.usfirst.org/2014comp/Events/OHCI/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 40, 'Oklahoma Regional', 'http://www2.usfirst.org/2014comp/Events/OKOK/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 41, 'Greater Pittsburgh Regional', 'http://www2.usfirst.org/2014comp/Events/PAPI/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 42, 'Smoky Mountains Regional', 'http://www2.usfirst.org/2014comp/Events/TNKN/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 43, 'Greater DC Regional', 'http://www2.usfirst.org/2014comp/Events/DCWA/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 44, 'Western Canada Regional', 'http://www2.usfirst.org/2014comp/Events/ABCA/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 45, 'Windsor Essex Great Lakes Regional', 'http://www2.usfirst.org/2014comp/Events/ONWI/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 46, 'Silicon Valley Regional', 'http://www2.usfirst.org/2014comp/Events/CASJ/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 47, 'Colorado Regional', 'http://www2.usfirst.org/2014comp/Events/CODE/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 48, 'South Florida Regional', 'http://www2.usfirst.org/2014comp/Events/FLFO/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 49, 'Midwest Regional', 'http://www2.usfirst.org/2014comp/Events/ILCH/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 50, 'Bayou Regional', 'http://www2.usfirst.org/2014comp/Events/LAKE/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 51, 'Chesapeake Regional', 'http://www2.usfirst.org/2014comp/Events/MDBA/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 52, 'Las Vegas Regional', 'http://www2.usfirst.org/2014comp/Events/NVLV/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 53, 'New York City Regional', 'http://www2.usfirst.org/2014comp/Events/NYNY/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 54, 'Lone Star Regional', 'http://www2.usfirst.org/2014comp/Events/TXHO/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 55, 'Michigan FRC State Championship', 'http://www2.usfirst.org/2014comp/Events/MICMP/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 56, 'Mid-Atlantic Robotics FRC Region Championship', 'http://www2.usfirst.org/2014comp/Events/MRCMP/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 57, 'New England FRC Region Championship', 'http://www2.usfirst.org/2014comp/Events/NECMP/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 58, 'Autodesk PNW FRC Championship', 'http://www2.usfirst.org/2014comp/Events/PNCMP/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 59, 'Center Line FIRST Robotics District Competition', 'http://www2.usfirst.org/2014comp/Events/MICEN/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 60, 'Southfield FIRST Robotics District Competition', 'http://www2.usfirst.org/2014comp/Events/MISOU/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 61, 'Kettering University FIRST Robotics District Competition', 'http://www2.usfirst.org/2014comp/Events/MIKET/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 62, 'Gull Lake FIRST Robotics District Competition', 'http://www2.usfirst.org/2014comp/Events/MIGUL/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 63, 'Escanaba FIRST Robotics District Competition', 'http://www2.usfirst.org/2014comp/Events/MIESC/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 64, 'Howell FIRST Robotics District Competition', 'http://www2.usfirst.org/2014comp/Events/MIHOW/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 65, 'West Michigan FIRST Robotics District Competition', 'http://www2.usfirst.org/2014comp/Events/MIWMI/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 66, 'Great Lakes Bay Region FIRST Robotics District Competition', 'http://www2.usfirst.org/2014comp/Events/MIMID/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 67, 'Traverse City FIRST Robotics District Competition', 'http://www2.usfirst.org/2014comp/Events/MITVC/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 68, 'Livonia FIRST Robotics District Competition', 'http://www2.usfirst.org/2014comp/Events/MILIV/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 69, 'St. Joseph FIRST Robotics District Competition', 'http://www2.usfirst.org/2014comp/Events/MISJO/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 70, 'Waterford FIRST Robotics District Competition', 'http://www2.usfirst.org/2014comp/Events/MIWAT/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 71, 'Lansing FIRST Robotics District Competition', 'http://www2.usfirst.org/2014comp/Events/MILAN/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 72, 'Bedford FIRST Robotics District Competition', 'http://www2.usfirst.org/2014comp/Events/MIBED/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 73, 'Troy FIRST Robotics District Competition', 'http://www2.usfirst.org/2014comp/Events/MITRY/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 74, 'MAR FIRST Robotics Mt. Olive District Competition', 'http://www2.usfirst.org/2014comp/Events/NJFLA/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 75, 'MAR FIRST Robotics Hatboro-Horsham District Competition', 'http://www2.usfirst.org/2014comp/Events/PAHAT/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 76, 'MAR FIRST Robotics Springside Chestnut Hill District Competition', 'http://www2.usfirst.org/2014comp/Events/PAPHI/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 77, 'MAR FIRST Robotics Cliffton District Competition', 'http://www2.usfirst.org/2014comp/Events/NJCLI/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 78, 'MAR FIRST Robotics Lenape-Seneca District Competition', 'http://www2.usfirst.org/2014comp/Events/NJTAB/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 79, 'MAR FIRST Robotics Bridgewater-Raritan District Competition', 'http://www2.usfirst.org/2014comp/Events/NJBRI/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 80, 'Granite State District Event', 'http://www2.usfirst.org/2014comp/Events/NHNAS/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 81, 'UNH District Event', 'http://www2.usfirst.org/2014comp/Events/NHDUR/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 82, 'Groton District Event', 'http://www2.usfirst.org/2014comp/Events/CTGRO/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 83, 'WPI District Event', 'http://www2.usfirst.org/2014comp/Events/NAWOR/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 84, 'Rhode Island District Event', 'http://www2.usfirst.org/2014comp/Events/RISMI/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 85, 'Southington District Event', 'http://www2.usfirst.org/2014comp/Events/CTSOU/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 86, 'Northeastern University District Event', 'http://www2.usfirst.org/2014comp/Events/MABOS/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 87, 'Hartford District Event', 'http://www2.usfirst.org/2014comp/Events/CTHAR/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 88, 'Pinetree District Event', 'http://www2.usfirst.org/2014comp/Events/MELEW/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 89, 'PNW FIRST Robotics Auburn Mountainview District Event', 'http://www2.usfirst.org/2014comp/Events/WAAMV/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 90, 'PNW FIRST Robotics Oregon City District Event', 'http://www2.usfirst.org/2014comp/Events/ORORE/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 91, 'PNW FIRST Robotics Glacier Peak District Event', 'http://www2.usfirst.org/2014comp/Events/WASNO/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 92, 'PNW FIRST Robotics Eastern Washington University District Event', 'http://www2.usfirst.org/2014comp/Events/WACHE/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 93, 'PNW FIRST Robotics Mt. Vernon District Event', 'http://www2.usfirst.org/2014comp/Events/WAMOU/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 94, 'PNW FIRST Robotics Wilsonville District Event', 'http://www2.usfirst.org/2014comp/Events/ORWIL/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 95, 'PNW FIRST Robotics Shorewood District Event', 'http://www2.usfirst.org/2014comp/Events/WASHO/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 96, 'PNW FIRST Robotics Auburn District Event', 'http://www2.usfirst.org/2014comp/Events/WAAHS/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 97, 'PNW FIRST Robotics Central Washington University District Event', 'http://www2.usfirst.org/2014comp/Events/WAELO/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 98, 'PNW FIRST Robotics Oregon State University District Event', 'http://www2.usfirst.org/2014comp/Events/OROSU/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 99, 'Championship - Archimedes', 'http://www2.usfirst.org/2014comp/Events/Archimedes/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 100, 'Championship - Curie', 'http://www2.usfirst.org/2014comp/Events/Curie/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 101, 'Championship - Galileo', 'http://www2.usfirst.org/2014comp/Events/Galileo/ScheduleQual.html', '2014-01-25 10:19:26', 0\n" +
"UNION SELECT 102, 'Championship - Newton', 'http://www2.usfirst.org/2014comp/Events/Newton/ScheduleQual.html', '2014-01-25 10:19:26', 0;",
"CREATE TABLE IF NOT EXISTS fact_cycle_data (\n" +
" id integer primary key autoincrement,\n" +
" event_id unsigned int(5) NOT NULL,\n" +
" match_id unsigned int(3) NOT NULL,\n" +
" team_id unsigned int(5) NOT NULL,\n" +
" cycle_num unsigned int(3) NOT NULL,\n" +
" near_poss unsigned tinyint(1) NOT NULL,\n" +
" white_poss unsigned tinyint(1) NOT NULL,\n" +
" far_poss unsigned tinyint(1) NOT NULL,\n" +
" truss unsigned tinyint(1) NOT NULL,\n" +
" catch unsigned tinyint(1) NOT NULL,\n" +
" high unsigned tinyint(1) NOT NULL,\n" +
" low unsigned tinyint(1) NOT NULL,\n" +
" assists unsigned int(3) NOT NULL,\n" +
" timestamp timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,\n" +
" invalid unsigned tinyint(1) NOT NULL DEFAULT '0'\n" +
");",
"CREATE TABLE IF NOT EXISTS fact_match_data (\n" +
" id integer primary key autoincrement,\n" +
" event_id unsigned int(5) NOT NULL,\n" +
" match_id unsigned int(3) NOT NULL,\n" +
" team_id unsigned int(5) NOT NULL,\n" +
" auto_high unsigned int(1) NOT NULL,\n" +
" auto_high_hot unsigned int(1) NOT NULL,\n" +
" auto_low unsigned int(1) NOT NULL,\n" +
" auto_low_hot unsigned int(1) NOT NULL,\n" +
" high unsigned int(1) NOT NULL,\n" +
" low unsigned int(1) NOT NULL,\n" +
" auto_mobile unsigned tinyint(1) NOT NULL,\n" +
" auto_goalie unsigned tinyint(1) NOT NULL,\n" +
" num_cycles unsigned int(3) NOT NULL,\n" +
" foul unsigned tinyint(1) NOT NULL,\n" +
" tech_foul unsigned tinyint(1) NOT NULL,\n" +
" tip_over unsigned tinyint(1) NOT NULL,\n" +
" yellow_card unsigned tinyint(1) NOT NULL,\n" +
" red_card unsigned tinyint(1) NOT NULL,\n" +
" notes text NOT NULL,\n" +
" timestamp timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,\n" +
" invalid unsigned tinyint(1) NOT NULL DEFAULT '0'\n" +
");",
"CREATE TABLE IF NOT EXISTS notes_options (\n" +
" id integer primary key autoincrement,\n" +
" option_text text NOT NULL,\n" +
" timestamp timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,\n" +
" invalid unsigned tinyint(1) NOT NULL DEFAULT '0'\n" +
");",
"INSERT INTO notes_options \n" +
"SELECT 1 AS id, 'No Show' AS option_text, '2014-01-25 10:21:11' AS timestamp, 0 AS invalid\n" +
"UNION SELECT 2, 'Non-functional', '2014-01-25 10:21:11', 0\n" +
"UNION SELECT 3, 'Defender', '2014-01-25 10:21:11', 0\n" +
"UNION SELECT 4, 'Catcher', '2014-01-25 10:21:11', 0;",
"CREATE TABLE IF NOT EXISTS robot_lu (\n" +
" id integer primary key autoincrement,\n" +
" team_id unsigned int(5) NOT NULL,\n" +
" robot_photo text NOT NULL,\n" +
" timestamp timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,\n" +
" invalid unsigned tinyint(1) NOT NULL DEFAULT '0'\n" +
");",
"CREATE TABLE IF NOT EXISTS scout_pit_data (\n" +
" id integer primary key autoincrement,\n" +
" team_id unsigned int(10) NOT NULL,\n" +
" configuration_id unsigned int(10) NOT NULL,\n" +
" wheel_type_id unsigned int(10) NOT NULL,\n" +
" wheel_base_id unsigned int(10) NOT NULL,\n" +
" autonomous_mode tinyint(1) NOT NULL,\n" +
" auto_high unsigned tinyint(1) NOT NULL,\n" +
" auto_low unsigned tinyint(1) NOT NULL,\n" +
" auto_hot unsigned tinyint(1) NOT NULL,\n" +
" auto_mobile unsigned tinyint(1) NOT NULL,\n" +
" auto_goalie unsigned tinyint(1) NOT NULL,\n" +
" truss unsigned tinyint(1) NOT NULL,\n" +
" catch unsigned tinyint(1) NOT NULL,\n" +
" active_control unsigned tinyint(1) NOT NULL,\n" +
" launch_ball unsigned tinyint(1) NOT NULL,\n" +
" score_high tinyint(1) NOT NULL,\n" +
" score_low tinyint(1) NOT NULL,\n" +
" max_height unsigned int(10) NOT NULL,\n" +
" scout_comments text NOT NULL,\n" +
" timestamp timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,\n" +
" invalid unsigned tinyint(1) NOT NULL DEFAULT '0'\n" +
");",
"CREATE TABLE IF NOT EXISTS wheel_base_lu (\n" +
" id integer primary key autoincrement,\n" +
" wheel_base_desc text NOT NULL,\n" +
" timestamp timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,\n" +
" invalid unsigned tinyint(1) NOT NULL DEFAULT '0'\n" +
");",
"INSERT INTO wheel_base_lu \n" +
"SELECT 1 AS id, '2 Wheel Drive' AS wheel_base_desc, '2014-01-25 10:23:22' AS timestamp, 0 AS invalid\n" +
"UNION SELECT 2, '4 Wheel Drive', '2014-01-25 10:23:22', 0\n" +
"UNION SELECT 3, '6 Wheel Drive', '2014-01-25 10:23:22', 0\n" +
"UNION SELECT 5, 'Crab Drive', '2014-01-25 10:23:22', 0\n" +
"UNION SELECT 6, 'Swerve Drive', '2014-01-25 10:23:22', 0\n" +
"UNION SELECT 7, 'Tank Drive', '2014-01-25 10:23:22', 0\n" +
"UNION SELECT 8, 'Other', '2014-01-25 10:23:22', 0\n" +
"UNION SELECT 4, '8 Wheel Drive (or more)', '2014-01-25 10:23:22', 0;",
"CREATE TABLE IF NOT EXISTS wheel_type_lu (\n" +
" id integer primary key autoincrement,\n" +
" wheel_type_desc text NOT NULL,\n" +
" timestamp timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,\n" +
" invalid unsigned tinyint(1) NOT NULL DEFAULT '0'\n" +
");",
"INSERT INTO wheel_type_lu \n" +
"SELECT 1 AS id, 'Kit Wheels' AS wheel_type_desc, '2014-01-25 10:25:14' AS timestamp, 0 AS invalid\n" +
"UNION SELECT 2, 'IFI', '2014-01-25 10:25:14', 0\n" +
"UNION SELECT 3, 'Omni', '2014-01-25 10:25:14', 0\n" +
"UNION SELECT 4, 'Mecanum', '2014-01-25 10:25:14', 0\n" +
"UNION SELECT 5, 'Nylon', '2014-01-25 10:25:14', 0\n" +
"UNION SELECT 6, 'Rubber', '2014-01-25 10:25:14', 0\n" +
"UNION SELECT 7, 'Tank Tread', '2014-01-25 10:25:14', 0\n" +
"UNION SELECT 8, 'Swerve', '2014-01-25 10:25:14', 0\n" +
"UNION SELECT 9, 'Custom', '2014-01-25 10:25:14', 0\n" +
"UNION SELECT 10, 'Pneumatic', '2014-01-25 10:25:14', 0;"};
public static final String[] SQL_DELETE_ENTRIES =
{"DROP TABLE IF EXISTS configuration_lu;",
"DROP TABLE IF EXISTS event_lu;",
"DROP TABLE IF EXISTS fact_cycle_data;",
"DROP TABLE IF EXISTS fact_match_data;",
"DROP TABLE IF EXISTS notes_options;",
"DROP TABLE IF EXISTS robot_lu;",
"DROP TABLE IF EXISTS scout_pit_data;",
"DROP TABLE IF EXISTS wheel_base_lu;",
"DROP TABLE IF EXISTS wheel_type_lu;"};
}
|
package com.github.mizool.core.rest.errorhandling;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import lombok.Builder;
import lombok.Value;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Multimaps;
import com.google.common.collect.SetMultimap;
import com.google.common.collect.Sets;
@Value
@Builder(toBuilder = true)
public class ErrorMessageDto
{
Map<String, Collection<ErrorDto>> errors;
Map<String, Object> globalParameters;
public ErrorMessageDto combineWith(ErrorMessageDto other)
{
return toBuilder().errors(combineErrors(other).asMap())
.globalParameters(combineGlobalParameters(other))
.build();
}
private SetMultimap<String, ErrorDto> combineErrors(ErrorMessageDto other)
{
SetMultimap<String, ErrorDto> combined = HashMultimap.create();
if (errors != null)
{
errors.forEach(combined::putAll);
}
if (other.getErrors() != null)
{
other.getErrors()
.forEach(combined::putAll);
}
return Multimaps.unmodifiableSetMultimap(combined);
}
private Map<String, Object> combineGlobalParameters(ErrorMessageDto other)
{
Map<String, Object> result;
Map<String, Object> otherGlobalParameters = other.getGlobalParameters();
if (globalParameters == null && otherGlobalParameters == null)
{
result = null;
}
else if (globalParameters != null && otherGlobalParameters == null)
{
result = globalParameters;
}
else if (globalParameters == null && otherGlobalParameters != null)
{
result = otherGlobalParameters;
}
else
{
Set<String> combinedKeys = Sets.union(globalParameters.keySet(), otherGlobalParameters.keySet());
result = combinedKeys.stream()
.collect(ImmutableMap.toImmutableMap(key -> key,
key -> resolveValueInOrder(key, otherGlobalParameters, globalParameters)));
}
return result;
}
private Object resolveValueInOrder(String key, Map<String, Object> first, Map<String, Object> second)
{
return first.getOrDefault(key, second.get(key));
}
}
|
package com.mulya;
import com.mulya.beans.RuleBean;
import com.mulya.enums.Case;
import com.mulya.enums.Gender;
import com.mulya.enums.NamePart;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.List;
import static org.testng.Assert.assertEquals;
@Test
public class PetrovichDeclinationMakerTest {
private PetrovichDeclinationMaker maker;
@BeforeMethod
public void setUp() throws Exception {
maker = PetrovichDeclinationMaker.getInstance();
}
public void testApplyModToName() throws Exception {
assertEquals(maker.applyModToName("--", "test"), "te");
assertEquals(maker.applyModToName("--st", "test"), "test");
assertEquals(maker.applyModToName("st", "test"), "testst");
assertEquals(maker.applyModToName("", "test"), "test");
}
public void testFindModInRuleBeanList() throws Exception {
List<RuleBean> ruleBeanList = Arrays.asList(
new RuleBean(Gender.MALE.getValue(), Arrays.asList("--11", "--12", "--13", "--14", "--15"), Arrays.asList("one")),
new RuleBean(Gender.MALE.getValue(), Arrays.asList("--21", "--22", "--23", "--24", "--25"), Arrays.asList("two"))
);
assertEquals(maker.findModInRuleBeanList(ruleBeanList, Gender.MALE, Case.GENITIVE, "testone"), "--11");
assertEquals(maker.findModInRuleBeanList(ruleBeanList, Gender.FEMALE, Case.GENITIVE, "testone"), null);
assertEquals(maker.findModInRuleBeanList(ruleBeanList, Gender.ANDROGYNOUS, Case.GENITIVE, "testone"), null);
assertEquals(maker.findModInRuleBeanList(ruleBeanList, Gender.MALE, Case.DATIVE, "testone"), "--12");
assertEquals(maker.findModInRuleBeanList(ruleBeanList, Gender.MALE, Case.DATIVE, "teston"), null);
assertEquals(maker.findModInRuleBeanList(ruleBeanList, Gender.MALE, Case.GENITIVE, "testtwo"), "--21");
assertEquals(maker.findModInRuleBeanList(ruleBeanList, Gender.FEMALE, Case.GENITIVE, "testtwo"), null);
assertEquals(maker.findModInRuleBeanList(ruleBeanList, Gender.ANDROGYNOUS, Case.GENITIVE, "testone"), null);
assertEquals(maker.findModInRuleBeanList(ruleBeanList, Gender.MALE, Case.DATIVE, "testtwo"), "--22");
assertEquals(maker.findModInRuleBeanList(ruleBeanList, Gender.MALE, Case.DATIVE, "testtw"), null);
}
public void testMake() throws Exception {
assertEquals(maker.make(NamePart.FIRSTNAME, Gender.MALE, Case.GENITIVE, "Ринат"), "Рината");
assertEquals(maker.make(NamePart.FIRSTNAME, Gender.MALE, Case.DATIVE, "Ринат"), "Ринату");
assertEquals(maker.make(NamePart.FIRSTNAME, Gender.MALE, Case.ACCUSATIVE, "Ринат"), "Рината");
assertEquals(maker.make(NamePart.FIRSTNAME, Gender.MALE, Case.INSTRUMENTAL, "Ринат"), "Ринатом");
assertEquals(maker.make(NamePart.FIRSTNAME, Gender.MALE, Case.PREPOSITIONAL, "Ринат"), "Ринате");
assertEquals(maker.make(NamePart.LASTNAME, Gender.MALE, Case.GENITIVE, "Мулюков"), "Мулюкова");
assertEquals(maker.make(NamePart.LASTNAME, Gender.MALE, Case.DATIVE, "Мулюков"), "Мулюкову");
assertEquals(maker.make(NamePart.LASTNAME, Gender.MALE, Case.ACCUSATIVE, "Мулюков"), "Мулюкова");
assertEquals(maker.make(NamePart.LASTNAME, Gender.MALE, Case.INSTRUMENTAL, "Мулюков"), "Мулюковым");
assertEquals(maker.make(NamePart.LASTNAME, Gender.MALE, Case.PREPOSITIONAL, "Мулюков"), "Мулюкове");
assertEquals(maker.make(NamePart.MIDDLENAME, Gender.MALE, Case.GENITIVE, "Рашитович"), "Рашитовича");
assertEquals(maker.make(NamePart.MIDDLENAME, Gender.MALE, Case.DATIVE, "Рашитович"), "Рашитовичу");
assertEquals(maker.make(NamePart.MIDDLENAME, Gender.MALE, Case.ACCUSATIVE, "Рашитович"), "Рашитовича");
assertEquals(maker.make(NamePart.MIDDLENAME, Gender.MALE, Case.INSTRUMENTAL, "Рашитович"), "Рашитовичем");
assertEquals(maker.make(NamePart.MIDDLENAME, Gender.MALE, Case.PREPOSITIONAL, "Рашитович"), "Рашитовиче");
}
public void testMake2() throws Exception {
assertEquals(maker.male.firstname().toGenitive("Ринат"), "Рината");
assertEquals(maker.male.firstname().toDative("Ринат"), "Ринату");
assertEquals(maker.male.firstname().toAccusative("Ринат"), "Рината");
assertEquals(maker.male.firstname().toInstrumental("Ринат"), "Ринатом");
assertEquals(maker.male.firstname().toPrepositional("Ринат"), "Ринате");
assertEquals(maker.male.lastname().toGenitive("Мулюков"), "Мулюкова");
assertEquals(maker.male.lastname().toDative("Мулюков"), "Мулюкову");
assertEquals(maker.male.lastname().toAccusative("Мулюков"), "Мулюкова");
assertEquals(maker.male.lastname().toInstrumental("Мулюков"), "Мулюковым");
assertEquals(maker.male.lastname().toPrepositional("Мулюков"), "Мулюкове");
assertEquals(maker.male.middlename().toGenitive("Рашитович"), "Рашитовича");
assertEquals(maker.male.middlename().toDative("Рашитович"), "Рашитовичу");
assertEquals(maker.male.middlename().toAccusative("Рашитович"), "Рашитовича");
assertEquals(maker.male.middlename().toInstrumental("Рашитович"), "Рашитовичем");
assertEquals(maker.male.middlename().toPrepositional("Рашитович"), "Рашитовиче");
}
@Test
public void test() throws Exception {
assertEquals(maker.female.firstname().toGenitive("Мария"), "Марии");
assertEquals(maker.female.firstname().toDative("Мария"), "Марии");
assertEquals(maker.female.firstname().toAccusative("Мария"), "Марию");
assertEquals(maker.female.firstname().toInstrumental("Мария"), "Марией");
assertEquals(maker.female.firstname().toPrepositional("Мария"), "Марии");
}
}
|
package org.joml.sampling;
import java.util.ArrayList;
import org.joml.Vector2f;
import org.joml.Vector3f;
/**
* Creates samples using the "Best Candidate" algorithm.
*
* @author Kai Burjack
*/
public class BestCandidateSampling {
public static class Sphere {
/**
* Implementation of a Hierarchical Triangular Mesh structure to index the sample points on the unit sphere for accelerating 1-nearest neighbor searches.
*
* @author Kai Burjack
*/
private static final class Node {
private static final int MAX_OBJECTS_PER_NODE = 32;
private float v0x, v0y, v0z;
private float v1x, v1y, v1z;
private float v2x, v2y, v2z;
private float cx, cy, cz;
private float arc;
private ArrayList objects;
private Node[] children;
Node() {
this.children = new Node[8];
float s = 1f;
this.arc = 2.0f * (float) Math.PI;
this.children[0] = new Node(-s, 0, 0, 0, 0, s, 0, s, 0);
this.children[1] = new Node(0, 0, s, s, 0, 0, 0, s, 0);
this.children[2] = new Node(s, 0, 0, 0, 0, -s, 0, s, 0);
this.children[3] = new Node(0, 0, -s, -s, 0, 0, 0, s, 0);
this.children[4] = new Node(-s, 0, 0, 0, -s, 0, 0, 0, s);
this.children[5] = new Node(0, 0, s, 0, -s, 0, s, 0, 0);
this.children[6] = new Node(s, 0, 0, 0, -s, 0, 0, 0, -s);
this.children[7] = new Node(0, 0, -s, 0, -s, 0, -s, 0, 0);
}
private Node(float x0, float y0, float z0, float x1, float y1, float z1, float x2, float y2, float z2) {
this.v0x = x0;
this.v0y = y0;
this.v0z = z0;
this.v1x = x1;
this.v1y = y1;
this.v1z = z1;
this.v2x = x2;
this.v2y = y2;
this.v2z = z2;
cx = (v0x + v1x + v2x) / 3.0f;
cy = (v0y + v1y + v2y) / 3.0f;
cz = (v0z + v1z + v2z) / 3.0f;
float invCLen = 1.0f / (float) Math.sqrt(cx * cx + cy * cy + cz * cz);
cx *= invCLen;
cy *= invCLen;
cz *= invCLen;
// Compute maximum radius around triangle centroid
float arc1 = greatCircleDist(cx, cy, cz, v0x, v0y, v0z);
float arc2 = greatCircleDist(cx, cy, cz, v1x, v1y, v1z);
float arc3 = greatCircleDist(cx, cy, cz, v2x, v2y, v2z);
float dist = Math.max(Math.max(arc1, arc2), arc3);
/*
* Convert radius into diameter, but also take into account the linear
* arccos approximation we use.
* This value was obtained empirically!
*/
dist *= 1.7f;
this.arc = dist;
}
private void split() {
float w0x = v1x + v2x;
float w0y = v1y + v2y;
float w0z = v1z + v2z;
float len0 = 1.0f / (float) Math.sqrt(w0x * w0x + w0y * w0y + w0z * w0z);
w0x *= len0;
w0y *= len0;
w0z *= len0;
float w1x = v0x + v2x;
float w1y = v0y + v2y;
float w1z = v0z + v2z;
float len1 = 1.0f / (float) Math.sqrt(w1x * w1x + w1y * w1y + w1z * w1z);
w1x *= len1;
w1y *= len1;
w1z *= len1;
float w2x = v0x + v1x;
float w2y = v0y + v1y;
float w2z = v0z + v1z;
float len2 = 1.0f / (float) Math.sqrt(w2x * w2x + w2y * w2y + w2z * w2z);
w2x *= len2;
w2y *= len2;
w2z *= len2;
children = new Node[4];
children[0] = new Node(v0x, v0y, v0z, w2x, w2y, w2z, w1x, w1y, w1z);
children[1] = new Node(v1x, v1y, v1z, w0x, w0y, w0z, w2x, w2y, w2z);
children[2] = new Node(v2x, v2y, v2z, w1x, w1y, w1z, w0x, w0y, w0z);
children[3] = new Node(w0x, w0y, w0z, w1x, w1y, w1z, w2x, w2y, w2z);
}
private void insertIntoChild(Vector3f o) {
for (int i = 0; i < children.length; i++) {
Node c = children[i];
if (isPointOnSphericalTriangle(o.x, o.y, o.z, c.v0x, c.v0y, c.v0z, c.v1x, c.v1y, c.v1z, c.v2x, c.v2y, c.v2z, 1E-6f)) {
c.insert(o);
return;
}
}
}
void insert(Vector3f object) {
if (children != null) {
insertIntoChild(object);
return;
}
if (objects != null && objects.size() == MAX_OBJECTS_PER_NODE) {
split();
for (int i = 0; i < MAX_OBJECTS_PER_NODE; i++) {
insertIntoChild((Vector3f) objects.get(i));
}
objects = null;
insertIntoChild(object);
} else {
if (objects == null)
objects = new ArrayList(MAX_OBJECTS_PER_NODE);
objects.add(object);
}
}
private static boolean isPointOnSphericalTriangle(float x, float y, float z, float v0X, float v0Y, float v0Z, float v1X, float v1Y, float v1Z, float v2X, float v2Y,
float v2Z, float epsilon) {
float edge1X = v1X - v0X;
float edge1Y = v1Y - v0Y;
float edge1Z = v1Z - v0Z;
float edge2X = v2X - v0X;
float edge2Y = v2Y - v0Y;
float edge2Z = v2Z - v0Z;
float pvecX = y * edge2Z - z * edge2Y;
float pvecY = z * edge2X - x * edge2Z;
float pvecZ = x * edge2Y - y * edge2X;
float det = edge1X * pvecX + edge1Y * pvecY + edge1Z * pvecZ;
if (det > -epsilon && det < epsilon)
return false;
float tvecX = -v0X;
float tvecY = -v0Y;
float tvecZ = -v0Z;
float invDet = 1.0f / det;
float u = (tvecX * pvecX + tvecY * pvecY + tvecZ * pvecZ) * invDet;
if (u < 0.0f || u > 1.0f)
return false;
float qvecX = tvecY * edge1Z - tvecZ * edge1Y;
float qvecY = tvecZ * edge1X - tvecX * edge1Z;
float qvecZ = tvecX * edge1Y - tvecY * edge1X;
float v = (x * qvecX + y * qvecY + z * qvecZ) * invDet;
if (v < 0.0f || u + v > 1.0f)
return false;
float t = (edge2X * qvecX + edge2Y * qvecY + edge2Z * qvecZ) * invDet;
return t >= epsilon;
}
private int child(float x, float y, float z) {
for (int i = 0; i < children.length; i++) {
Node c = children[i];
if (isPointOnSphericalTriangle(x, y, z, c.v0x, c.v0y, c.v0z, c.v1x, c.v1y, c.v1z, c.v2x, c.v2y, c.v2z, 1E-5f)) {
return i;
}
}
// No child found. This can happen in 'nearest()' when querying possible nearby nodes
return 0;
}
private float greatCircleDist(float x1, float y1, float z1, float x2, float y2, float z2) {
float dot = x1 * x2 + y1 * y2 + z1 * z2;
/*
* Just use a linear function, because we (mostly) do less-than comparisons on the result.
* We just need a linear function which:
* f(-1) = PI
* f(0) = PI/2
* f(1) = 0
*/
return (float) (-Math.PIHalf * dot + Math.PIHalf);
//return (float) Math.acos(dot);
}
float nearest(float x, float y, float z, float n) {
float gcd = greatCircleDist(x, y, z, cx, cy, cz);
/*
* If great-circle-distance between query point and centroid is larger than the current smallest distance 'n' plus the great circle diameter 'arc', we abort here,
* because then it is not possible for any point in the triangle patch to be closer to the query point than 'n'.
*/
/*
* Yes, we are subtracting two great-circle distances from one another here, which we did not even compute correctly
* using our overly linear arccos approximation. But the 1.7 factor above will take care that we still stay conservative
* enough here and not rejecting triangle patches which would contain samples nearer than 'n'.
*/
if (gcd - arc > n)
return n;
float nr = n;
if (children != null) {
if (children.length == 8) {
for (int i = child(x, y, z), c = 0; c < 8; i = (i + 1) & 7, c++) {
float n1 = children[i].nearest(x, y, z, nr);
nr = Math.min(n1, nr);
}
} else {
for (int i = child(x, y, z), c = 0; c < 4; i = (i + 1) & 3, c++) {
float n1 = children[i].nearest(x, y, z, nr);
nr = Math.min(n1, nr);
}
}
return nr;
}
for (int i = 0; objects != null && i < objects.size(); i++) {
Vector3f o = (Vector3f) objects.get(i);
float d = greatCircleDist(o.x, o.y, o.z, x, y, z);
if (d < nr) {
nr = d;
}
}
return nr;
}
}
private final Random rnd;
private final Node otree;
/**
* Create a new instance of {@link Sphere}, initialize the random number generator with the given <code>seed</code> and generate <code>numSamples</code>
* number of 'best candidate' sample positions on the unit sphere with each sample tried <code>numCandidates</code> number of times, and call the given
* <code>callback</code> for each sample generate.
*
* @param seed
* the seed to initialize the random number generator with
* @param numSamples
* the number of samples to generate
* @param numCandidates
* the number of candidates to test for each sample
* @param callback
* will be called for each sample generated
*/
public Sphere(long seed, int numSamples, int numCandidates, Callback3d callback) {
this.rnd = new Random(seed);
this.otree = new Node();
compute(numSamples, numCandidates, callback);
}
private void compute(int numSamples, int numCandidates, Callback3d callback) {
for (int i = 0; i < numSamples; i++) {
float bestX = 0, bestY = 0, bestZ = 0, bestDist = 0.0f;
for (int c = 0; c < numCandidates; c++) {
float x1, x2;
do {
x1 = rnd.nextFloat() * 2.0f - 1.0f;
x2 = rnd.nextFloat() * 2.0f - 1.0f;
} while (x1 * x1 + x2 * x2 > 1.0f);
float sqrt = (float) Math.sqrt(1.0 - x1 * x1 - x2 * x2);
float x = 2 * x1 * sqrt;
float y = 2 * x2 * sqrt;
float z = 1.0f - 2.0f * (x1 * x1 + x2 * x2);
float minDist = otree.nearest(x, y, z, Float.POSITIVE_INFINITY);
if (minDist > bestDist) {
bestDist = minDist;
bestX = x;
bestY = y;
bestZ = z;
}
}
callback.onNewSample(bestX, bestY, bestZ);
otree.insert(new Vector3f(bestX, bestY, bestZ));
}
}
}
/**
* Simple quatree that can handle points and 1-nearest neighbor search.
*
* @author Kai Burjack
*/
private static class QuadTree {
private static final int MAX_OBJECTS_PER_NODE = 32;
// Constants for the quadrants of the quadtree
private static final int PXNY = 0;
private static final int NXNY = 1;
private static final int NXPY = 2;
private static final int PXPY = 3;
private float minX, minY, hs;
private ArrayList objects;
private QuadTree[] children;
QuadTree(float minX, float minY, float size) {
this.minX = minX;
this.minY = minY;
this.hs = size * 0.5f;
}
private void split() {
children = new QuadTree[4];
children[NXNY] = new QuadTree(minX, minY, hs);
children[PXNY] = new QuadTree(minX + hs, minY, hs);
children[NXPY] = new QuadTree(minX, minY + hs, hs);
children[PXPY] = new QuadTree(minX + hs, minY + hs, hs);
}
private void insertIntoChild(Vector2f o) {
float xm = minX + hs;
float ym = minY + hs;
if (o.x >= xm) {
if (o.y >= ym) {
children[PXPY].insert(o);
} else {
children[PXNY].insert(o);
}
} else {
if (o.y >= ym) {
children[NXPY].insert(o);
} else {
children[NXNY].insert(o);
}
}
}
void insert(Vector2f object) {
if (children != null) {
insertIntoChild(object);
return;
}
if (objects != null && objects.size() == MAX_OBJECTS_PER_NODE) {
split();
for (int i = 0; i < objects.size(); i++) {
insertIntoChild((Vector2f) objects.get(i));
}
objects = null;
insertIntoChild(object);
} else {
if (objects == null)
objects = new ArrayList(32);
objects.add(object);
}
}
private int quadrant(float x, float y) {
if (x < minX + hs) {
if (y < minY + hs)
return NXNY;
return NXPY;
}
if (y < minY + hs)
return PXNY;
return PXPY;
}
float nearest(float x, float y, float n) {
float nr = n;
if (x < minX - n || x > minX + hs * 2 + n || y < minY - n || y > minY + hs * 2 + n) {
return nr;
}
if (children != null) {
for (int i = quadrant(x, y), c = 0; c < 4; i = (i + 1) & 3, c++) {
float n1 = children[i].nearest(x, y, nr);
nr = Math.min(n1, nr);
}
return nr;
}
float nr2 = nr * nr;
for (int i = 0; objects != null && i < objects.size(); i++) {
Vector2f o = (Vector2f) objects.get(i);
float d = o.distanceSquared(x, y);
if (d < nr2) {
nr2 = d;
}
}
return (float) Math.sqrt(nr2);
}
}
/**
* Generates Best Candidate samples on a unit disk.
*
* @author Kai Burjack
*/
public static class Disk {
private final Random rnd;
private final QuadTree qtree;
/**
* Create a new instance of {@link Disk}, initialize the random number generator with the given <code>seed</code> and generate <code>numSamples</code>
* number of 'best candidate' sample positions on the unit disk with each sample tried <code>numCandidates</code> number of times, and call the given <code>callback</code>
* for each sample generate.
*
* @param seed
* the seed to initialize the random number generator with
* @param numSamples
* the number of samples to generate
* @param numCandidates
* the number of candidates to test for each sample
* @param callback
* will be called for each sample generated
*/
public Disk(long seed, int numSamples, int numCandidates, Callback2d callback) {
this.rnd = new Random(seed);
this.qtree = new QuadTree(-1, -1, 2);
generate(numSamples, numCandidates, callback);
}
private void generate(int numSamples, int numCandidates, Callback2d callback) {
for (int i = 0; i < numSamples; i++) {
float bestX = 0, bestY = 0, bestDist = 0.0f;
for (int c = 0; c < numCandidates; c++) {
float x, y;
do {
x = rnd.nextFloat() * 2.0f - 1.0f;
y = rnd.nextFloat() * 2.0f - 1.0f;
} while (x * x + y * y > 1.0f);
float minDist = qtree.nearest(x, y, Float.POSITIVE_INFINITY);
if (minDist > bestDist) {
bestDist = minDist;
bestX = x;
bestY = y;
}
}
callback.onNewSample(bestX, bestY);
qtree.insert(new Vector2f(bestX, bestY));
}
}
}
/**
* Generates Best Candidate samples on a unit quad.
*
* @author Kai Burjack
*/
public static class Quad {
private final Random rnd;
private final QuadTree qtree;
/**
* Create a new instance of {@link Quad}, initialize the random number generator with the given <code>seed</code> and generate <code>numSamples</code>
* number of 'best candidate' sample positions on the unit quad with each sample tried <code>numCandidates</code> number of times, and call the given <code>callback</code>
* for each sample generate.
*
* @param seed
* the seed to initialize the random number generator with
* @param numSamples
* the number of samples to generate
* @param numCandidates
* the number of candidates to test for each sample
* @param callback
* will be called for each sample generated
*/
public Quad(long seed, int numSamples, int numCandidates, Callback2d callback) {
this.rnd = new Random(seed);
this.qtree = new QuadTree(-1, -1, 2);
generate(numSamples, numCandidates, callback);
}
private void generate(int numSamples, int numCandidates, Callback2d callback) {
for (int i = 0; i < numSamples; i++) {
float bestX = 0, bestY = 0, bestDist = 0.0f;
for (int c = 0; c < numCandidates; c++) {
float x = rnd.nextFloat() * 2.0f - 1.0f;
float y = rnd.nextFloat() * 2.0f - 1.0f;
float minDist = qtree.nearest(x, y, Float.POSITIVE_INFINITY);
if (minDist > bestDist) {
bestDist = minDist;
bestX = x;
bestY = y;
}
}
callback.onNewSample(bestX, bestY);
qtree.insert(new Vector2f(bestX, bestY));
}
}
}
}
|
package de.danoeh.antennapod.core.service.download;
import android.text.TextUtils;
import android.util.Log;
import com.squareup.okhttp.OkHttpClient;
import com.squareup.okhttp.Protocol;
import com.squareup.okhttp.Request;
import com.squareup.okhttp.Response;
import com.squareup.okhttp.ResponseBody;
import org.apache.commons.io.IOUtils;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.SocketTimeoutException;
import java.net.URI;
import java.net.UnknownHostException;
import java.util.Collections;
import java.util.Date;
import de.danoeh.antennapod.core.ClientConfig;
import de.danoeh.antennapod.core.R;
import de.danoeh.antennapod.core.feed.FeedImage;
import de.danoeh.antennapod.core.feed.FeedMedia;
import de.danoeh.antennapod.core.util.DateUtils;
import de.danoeh.antennapod.core.util.DownloadError;
import de.danoeh.antennapod.core.util.StorageUtils;
import de.danoeh.antennapod.core.util.URIUtil;
import okio.ByteString;
public class HttpDownloader extends Downloader {
private static final String TAG = "HttpDownloader";
private static final int BUFFER_SIZE = 8 * 1024;
public HttpDownloader(DownloadRequest request) {
super(request);
}
@Override
protected void download() {
File destination = new File(request.getDestination());
final boolean fileExists = destination.exists();
if (request.isDeleteOnFailure() && fileExists) {
Log.w(TAG, "File already exists");
if (request.getFeedfileType() != FeedImage.FEEDFILETYPE_FEEDIMAGE) {
onFail(DownloadError.ERROR_FILE_EXISTS, null);
return;
} else {
onSuccess();
return;
}
}
OkHttpClient httpClient = AntennapodHttpClient.getHttpClient();
RandomAccessFile out = null;
InputStream connection;
ResponseBody responseBody = null;
try {
final URI uri = URIUtil.getURIFromRequestUrl(request.getSource());
Request.Builder httpReq = new Request.Builder().url(uri.toURL())
.header("User-Agent", ClientConfig.USER_AGENT);
if(request.getFeedfileType() == FeedMedia.FEEDFILETYPE_FEEDMEDIA) {
// set header explicitly so that okhttp doesn't do transparent gzip
Log.d(TAG, "addHeader(\"Accept-Encoding\", \"identity\")");
httpReq.addHeader("Accept-Encoding", "identity");
}
if(!TextUtils.isEmpty(request.getLastModified())) {
String lastModified = request.getLastModified();
Date lastModifiedDate = DateUtils.parse(lastModified);
if(lastModifiedDate != null) {
long threeDaysAgo = System.currentTimeMillis() - 1000 * 60 * 60 * 24 * 3;
if (lastModifiedDate.getTime() > threeDaysAgo) {
Log.d(TAG, "addHeader(\"If-Modified-Since\", \"" + lastModified + "\")");
httpReq.addHeader("If-Modified-Since", lastModified);
}
} else {
Log.d(TAG, "addHeader(\"If-None-Match\", \"" + lastModified + "\")");
httpReq.addHeader("If-None-Match", lastModified);
}
}
// add authentication information
String userInfo = uri.getUserInfo();
if (userInfo != null) {
String[] parts = userInfo.split(":");
if (parts.length == 2) {
String credentials = encodeCredentials(parts[0], parts[1], "ISO-8859-1");
httpReq.header("Authorization", credentials);
}
} else if (!TextUtils.isEmpty(request.getUsername()) && request.getPassword() != null) {
String credentials = encodeCredentials(request.getUsername(), request.getPassword(),
"ISO-8859-1");
httpReq.header("Authorization", credentials);
}
// add range header if necessary
if (fileExists) {
request.setSoFar(destination.length());
httpReq.addHeader("Range", "bytes=" + request.getSoFar() + "-");
Log.d(TAG, "Adding range header: " + request.getSoFar());
}
Response response;
try {
response = httpClient.newCall(httpReq.build()).execute();
} catch(IOException e) {
Log.e(TAG, e.toString());
if(e.getMessage().contains("PROTOCOL_ERROR")) {
httpClient.setProtocols(Collections.singletonList(Protocol.HTTP_1_1));
response = httpClient.newCall(httpReq.build()).execute();
}
else {
throw e;
}
}
responseBody = response.body();
String contentEncodingHeader = response.header("Content-Encoding");
boolean isGzip = false;
if(!TextUtils.isEmpty(contentEncodingHeader)) {
isGzip = TextUtils.equals(contentEncodingHeader.toLowerCase(), "gzip");
}
Log.d(TAG, "Response code is " + response.code());
if(!response.isSuccessful() && response.code() == HttpURLConnection.HTTP_UNAUTHORIZED) {
Log.d(TAG, "Authorization failed, re-trying with UTF-8 encoding");
if (userInfo != null) {
String[] parts = userInfo.split(":");
if (parts.length == 2) {
String credentials = encodeCredentials(parts[0], parts[1], "UTF-8");
httpReq.header("Authorization", credentials);
}
} else if (!TextUtils.isEmpty(request.getUsername()) && request.getPassword() != null) {
String credentials = encodeCredentials(request.getUsername(), request.getPassword(),
"UTF-8");
httpReq.header("Authorization", credentials);
}
response = httpClient.newCall(httpReq.build()).execute();
responseBody = response.body();
contentEncodingHeader = response.header("Content-Encoding");
if(!TextUtils.isEmpty(contentEncodingHeader)) {
isGzip = TextUtils.equals(contentEncodingHeader.toLowerCase(), "gzip");
}
}
if(!response.isSuccessful() && response.code() == HttpURLConnection.HTTP_NOT_MODIFIED) {
Log.d(TAG, "Feed '" + request.getSource() + "' not modified since last update, Download canceled");
onCancelled();
return;
}
if (!response.isSuccessful() || response.body() == null) {
final DownloadError error;
final String details;
if (response.code() == HttpURLConnection.HTTP_UNAUTHORIZED) {
error = DownloadError.ERROR_UNAUTHORIZED;
details = String.valueOf(response.code());
} else if(response.code() == HttpURLConnection.HTTP_FORBIDDEN) {
error = DownloadError.ERROR_FORBIDDEN;
details = String.valueOf(response.code());
} else {
error = DownloadError.ERROR_HTTP_DATA_ERROR;
details = String.valueOf(response.code());
}
onFail(error, details);
return;
}
if (!StorageUtils.storageAvailable()) {
onFail(DownloadError.ERROR_DEVICE_NOT_FOUND, null);
return;
}
if(request.getFeedfileType() == FeedMedia.FEEDFILETYPE_FEEDMEDIA) {
String contentType = response.header("Content-Type");
Log.d(TAG, "content type: " + contentType);
if(contentType.startsWith("text/")) {
onFail(DownloadError.ERROR_FILE_TYPE, null);
return;
}
}
connection = new BufferedInputStream(responseBody.byteStream());
String contentRangeHeader = (fileExists) ? response.header("Content-Range") : null;
if (fileExists && response.code() == HttpURLConnection.HTTP_PARTIAL
&& !TextUtils.isEmpty(contentRangeHeader)) {
String start = contentRangeHeader.substring("bytes ".length(),
contentRangeHeader.indexOf("-"));
request.setSoFar(Long.parseLong(start));
Log.d(TAG, "Starting download at position " + request.getSoFar());
out = new RandomAccessFile(destination, "rw");
out.seek(request.getSoFar());
} else {
destination.delete();
destination.createNewFile();
out = new RandomAccessFile(destination, "rw");
}
byte[] buffer = new byte[BUFFER_SIZE];
int count = 0;
request.setStatusMsg(R.string.download_running);
Log.d(TAG, "Getting size of download");
request.setSize(responseBody.contentLength() + request.getSoFar());
Log.d(TAG, "Size is " + request.getSize());
if (request.getSize() < 0) {
request.setSize(DownloadStatus.SIZE_UNKNOWN);
}
long freeSpace = StorageUtils.getFreeSpaceAvailable();
Log.d(TAG, "Free space is " + freeSpace);
if (request.getSize() != DownloadStatus.SIZE_UNKNOWN && request.getSize() > freeSpace) {
onFail(DownloadError.ERROR_NOT_ENOUGH_SPACE, null);
return;
}
Log.d(TAG, "Starting download");
try {
while (!cancelled && (count = connection.read(buffer)) != -1) {
out.write(buffer, 0, count);
request.setSoFar(request.getSoFar() + count);
int progressPercent = (int)(100.0 * request.getSoFar() / request.getSize());
request.setProgressPercent(progressPercent);
}
} catch(IOException e) {
Log.e(TAG, Log.getStackTraceString(e));
}
if (cancelled) {
onCancelled();
} else {
// check if size specified in the response header is the same as the size of the
// written file. This check cannot be made if compression was used
if (!isGzip && request.getSize() != DownloadStatus.SIZE_UNKNOWN &&
request.getSoFar() != request.getSize()) {
onFail(DownloadError.ERROR_IO_ERROR, "Download completed but size: " +
request.getSoFar() + " does not equal expected size " + request.getSize());
return;
} else if(request.getSize() > 0 && request.getSoFar() == 0){
onFail(DownloadError.ERROR_IO_ERROR, "Download completed, but nothing was read");
return;
}
String lastModified = response.header("Last-Modified");
if(lastModified != null) {
request.setLastModified(lastModified);
} else {
request.setLastModified(response.header("ETag"));
}
onSuccess();
}
} catch (IllegalArgumentException e) {
e.printStackTrace();
onFail(DownloadError.ERROR_MALFORMED_URL, e.getMessage());
} catch (SocketTimeoutException e) {
e.printStackTrace();
onFail(DownloadError.ERROR_CONNECTION_ERROR, e.getMessage());
} catch (UnknownHostException e) {
e.printStackTrace();
onFail(DownloadError.ERROR_UNKNOWN_HOST, e.getMessage());
} catch (IOException e) {
e.printStackTrace();
onFail(DownloadError.ERROR_IO_ERROR, e.getMessage());
} catch (NullPointerException e) {
// might be thrown by connection.getInputStream()
e.printStackTrace();
onFail(DownloadError.ERROR_CONNECTION_ERROR, request.getSource());
} finally {
IOUtils.closeQuietly(out);
AntennapodHttpClient.cleanup();
IOUtils.closeQuietly(responseBody);
}
}
private void onSuccess() {
Log.d(TAG, "Download was successful");
result.setSuccessful();
}
private void onFail(DownloadError reason, String reasonDetailed) {
Log.d(TAG, "onFail() called with: " + "reason = [" + reason + "], " +
"reasonDetailed = [" + reasonDetailed + "]");
result.setFailed(reason, reasonDetailed);
if (request.isDeleteOnFailure()) {
cleanup();
}
}
private void onCancelled() {
Log.d(TAG, "Download was cancelled");
result.setCancelled();
cleanup();
}
/**
* Deletes unfinished downloads.
*/
private void cleanup() {
if (request.getDestination() != null) {
File dest = new File(request.getDestination());
if (dest.exists()) {
boolean rc = dest.delete();
Log.d(TAG, "Deleted file " + dest.getName() + "; Result: "
+ rc);
} else {
Log.d(TAG, "cleanup() didn't delete file: does not exist.");
}
}
}
public static String encodeCredentials(String username, String password, String charset) {
try {
String credentials = username + ":" + password;
byte[] bytes = credentials.getBytes(charset);
String encoded = ByteString.of(bytes).base64();
return "Basic " + encoded;
} catch (UnsupportedEncodingException e) {
throw new AssertionError();
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.