answer
stringlengths 17
10.2M
|
|---|
// $Id: Protocol.java,v 1.11 2004/04/21 11:57:00 yaron-r Exp $
package org.jgroups.stack;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jgroups.Event;
import org.jgroups.util.Queue;
import org.jgroups.util.QueueClosedException;
import org.jgroups.util.Util;
import EDU.oswego.cs.dl.util.concurrent.Mutex;
import java.util.Properties;
import java.util.Vector;
class UpHandler extends Thread {
private Queue mq = null;
private Protocol handler = null;
private ProtocolObserver observer = null;
protected Log log = LogFactory.getLog(this.getClass());
public UpHandler(Queue mq, Protocol handler, ProtocolObserver observer) {
this.mq = mq;
this.handler = handler;
this.observer = observer;
if (handler != null) {
setName("UpHandler (" + handler.getName() + ")");
} else {
setName("UpHandler");
}
setDaemon(true);
}
public void setObserver(ProtocolObserver observer) {
this.observer = observer;
}
/**
* Removes events from mq and calls handler.up(evt)
*/
public void run() {
Event evt;
while (!mq.closed()) {
try {
handler.blockUntilUpReady(); //If 'up' is not ready, block.
evt = (Event) mq.remove();
if (evt == null) {
if (log.isWarnEnabled()) {
log.warn("removed null event");
}
continue;
}
if (observer != null) { // call debugger hook (if installed)
if (observer.up(evt, mq.size()) == false) { // false means discard event
return;
}
}
handler.up(evt);
evt = null;
} catch (QueueClosedException queue_closed) {
break;
} catch (Throwable e) {
if (log.isWarnEnabled()) {
log.warn(getName() + " exception: " + e);
}
e.printStackTrace();
}
}
}
}
class DownHandler extends Thread {
private Queue mq = null;
private Protocol handler = null;
private ProtocolObserver observer = null;
protected Log log = LogFactory.getLog(this.getClass());
public DownHandler(Queue mq, Protocol handler, ProtocolObserver observer) {
this.mq = mq;
this.handler = handler;
this.observer = observer;
if (handler != null) {
setName("DownHandler (" + handler.getName() + ")");
} else {
setName("DownHandler");
}
setDaemon(true);
}
public void setObserver(ProtocolObserver observer) {
this.observer = observer;
}
/**
* Removes events from mq and calls handler.down(evt)
*/
public void run() {
Event evt;
while (!mq.closed()) {
try {
handler.blockUntilDownReady(); //If 'up' is not ready, block.
evt = (Event) mq.remove();
if (evt == null) {
if (log.isWarnEnabled()) {
log.warn("removed null event");
}
continue;
}
if (observer != null) { // call debugger hook (if installed)
if (observer.down(evt, mq.size()) == false) { // false means discard event
continue;
}
}
int type = evt.getType();
if (type == Event.ACK || type == Event.START || type == Event.STOP) {
if (handler.handleSpecialDownEvent(evt) == false) {
continue;
}
}
handler.down(evt);
evt = null;
} catch (QueueClosedException queue_closed) {
break;
} catch (Throwable e) {
if (log.isWarnEnabled()) {
log.warn(getName() + " exception is " + e);
}
e.printStackTrace();
}
}
}
}
/**
* The Protocol class provides a set of common services for protocol layers. Each layer has to be a subclass of Protocol
* and override a number of methods (typically just <code>up()</code>, <code>Down</code> and <code>getName</code>.
* Layers are stacked in a certain order to form a protocol stack. <a href=org.jgroups.Event.html>Events</a> are passed
* from lower layers to upper ones and vice versa. E.g. a Message received by the UDP layer at the bottom will be passed
* to its higher layer as an Event. That layer will in turn pass the Event to its layer and so on, until a layer handles
* the Message and sends a response or discards it, the former resulting in another Event being passed down the
* stack.<p> Each layer has 2 FIFO queues, one for up Events and one for down Events. When an Event is received by a
* layer (calling the internal upcall <code>ReceiveUpEvent</code>), it is placed in the up-queue where it will be
* retrieved by the up-handler thread which will invoke method <code>Up</code> of the layer. The same applies for Events
* traveling down the stack. Handling of the up-handler and down-handler threads and the 2 FIFO queues is donw by the
* Protocol class, subclasses will almost never have to override this behavior.<p> The important thing to bear in mind
* is that Events have to passed on between layers in FIFO order which is guaranteed by the Protocol implementation and
* must be guranteed by subclasses implementing their on Event queuing.<p> <b>Note that each class implementing
* interface Protocol MUST provide an empty, public constructor !</b>
*/
public abstract class Protocol {
protected Properties props = null;
protected Protocol up_prot = null, down_prot = null;
protected ProtocolStack stack = null;
protected Queue up_queue = new Queue(), down_queue = new Queue();
protected UpHandler up_handler = null;
protected int up_thread_prio = -1;
protected DownHandler down_handler = null;
protected int down_thread_prio = -1;
protected ProtocolObserver observer = null; // hook for debugger
private final long THREAD_JOIN_TIMEOUT = 1000;
protected boolean down_thread = true; // determines whether the down_handler thread should be started
protected boolean up_thread = true; // determines whether the up_handler thread should be started
protected Log log = LogFactory.getLog(this.getClass());
private final Mutex m_upLock = new Mutex(); // obtain lock if not ready for up, release when ready
private final Mutex m_downLock = new Mutex(); // obtain lock if not ready for down, release when ready
/**
* Configures the protocol initially. A configuration string consists of name=value
* items, separated by a ';' (semicolon), e.g.:<pre>
* "loopback=false;unicast_inport=4444"
* </pre>
*/
public boolean setProperties(Properties props) {
this.props = props;
return true;
}
/**
* Called by Configurator. Removes 2 properties which are used by the Protocol directly and then calls setProperties(),
* which might invoke the setProperties() method of the actual protocol instance.
*/
public boolean setPropertiesInternal(Properties props) {
String str;
this.props = (Properties) props.clone();
str = props.getProperty("down_thread");
if (str != null) {
down_thread = new Boolean(str).booleanValue();
props.remove("down_thread");
}
str = props.getProperty("down_thread_prio");
if (str != null) {
down_thread_prio = Integer.parseInt(str);
props.remove("down_thread_prio");
}
str = props.getProperty("up_thread");
if (str != null) {
up_thread = new Boolean(str).booleanValue();
props.remove("up_thread");
}
str = props.getProperty("up_thread_prio");
if (str != null) {
up_thread_prio = Integer.parseInt(str);
props.remove("up_thread_prio");
}
return setProperties(props);
}
public Properties getProperties() {
return props;
}
public void setObserver(ProtocolObserver observer) {
this.observer = observer;
observer.setProtocol(this);
if (up_handler != null) {
up_handler.setObserver(observer);
}
if (down_handler != null) {
down_handler.setObserver(observer);
}
}
/**
* Called after instance has been created (null constructor) and before protocol is started. Properties are already
* set. Other protocols are not yet connected and events cannot yet be sent.
*
* @throws Exception Thrown if protocol cannot be initialized successfully. This will cause the ProtocolStack to fail,
* so the channel constructor will throw an exception
*/
public void init() throws Exception {
}
/**
* This method is called on a {@link org.jgroups.Channel#connect(String)}. Starts work. Protocols are connected and
* queues are ready to receive events. Will be called <em>from bottom to top</em>. This call will replace the
* <b>START</b> and <b>START_OK</b> events.
*
* @throws Exception Thrown if protocol cannot be started successfully. This will cause the ProtocolStack to fail, so
* {@link org.jgroups.Channel#connect(String)} will throw an exception
*/
public void start() throws Exception {
}
/**
* This method is called on a {@link org.jgroups.Channel#disconnect()}. Stops work (e.g. by closing multicast socket).
* Will be called <em>from top to bottom</em>. This means that at the time of the method invocation the neighbor
* protocol below is still working. This method will replace the <b>STOP</b>, <b>STOP_OK</b>, <b>CLEANUP</b> and
* <b>CLEANUP_OK</b> events. The ProtocolStack guarantees that when this method is called all messages in the down
* queue will have been flushed
*/
public void stop() {
}
/**
* This method is called on a {@link org.jgroups.Channel#close()}. Does some cleanup; after the call the VM will
* terminate
*/
public void destroy() {
}
public Queue getUpQueue() {
return up_queue;
} // used by Debugger (ProtocolView)
public Queue getDownQueue() {
return down_queue;
} // used by Debugger (ProtocolView)
/**
* List of events that are required to be answered by some layer above.
*
* @return Vector (of Integers)
*/
public Vector requiredUpServices() {
return null;
}
/**
* List of events that are required to be answered by some layer below.
*
* @return Vector (of Integers)
*/
public Vector requiredDownServices() {
return null;
}
/**
* List of events that are provided to layers above (they will be handled when sent down from above).
*
* @return Vector (of Integers)
*/
public Vector providedUpServices() {
return null;
}
/**
* List of events that are provided to layers below (they will be handled when sent down from below).
*
* @return Vector (of Integers)
*/
public Vector providedDownServices() {
return null;
}
public abstract String getName(); // all protocol names have to be unique !
public Protocol getUpProtocol() {
return up_prot;
}
public Protocol getDownProtocol() {
return down_prot;
}
public void setUpProtocol(Protocol up_prot) {
this.up_prot = up_prot;
}
public void setDownProtocol(Protocol down_prot) {
this.down_prot = down_prot;
}
public void setProtocolStack(ProtocolStack stack) {
this.stack = stack;
}
/**
* Used internally. If overridden, call this method first. Only creates the up_handler thread if down_thread is true
*/
public void startUpHandler() {
if (up_thread) {
if (up_handler == null) {
up_handler = new UpHandler(up_queue, this, observer);
if (up_thread_prio >= 0) {
try {
up_handler.setPriority(up_thread_prio);
} catch (Throwable t) {
if (log.isErrorEnabled()) {
log.error("priority " + up_thread_prio +
" could not be set for thread: " + Util.getStackTrace(t));
}
}
}
up_handler.start();
}
}
}
/**
* Used internally. If overridden, call this method first. Only creates the down_handler thread if down_thread is true
*/
public void startDownHandler() {
if (down_thread) {
if (down_handler == null) {
down_handler = new DownHandler(down_queue, this, observer);
if (down_thread_prio >= 0) {
try {
down_handler.setPriority(down_thread_prio);
} catch (Throwable t) {
if (log.isErrorEnabled()) {
log.error("priority " + down_thread_prio +
" could not be set for thread: " + Util.getStackTrace(t));
}
}
}
down_handler.start();
}
}
}
/**
* Used internally. If overridden, call parent's method first
*/
public void stopInternal() {
up_queue.close(false); // this should terminate up_handler thread
if (up_handler != null && up_handler.isAlive()) {
try {
up_handler.join(THREAD_JOIN_TIMEOUT);
} catch (Exception ex) {
}
if (up_handler != null && up_handler.isAlive()) {
up_handler.interrupt(); // still alive ? let's just kill it without mercy...
try {
up_handler.join(THREAD_JOIN_TIMEOUT);
} catch (Exception ex) {
}
if (up_handler != null && up_handler.isAlive()) {
if (log.isErrorEnabled()) {
log.error("up_handler thread for " + getName() +
" was interrupted (in order to be terminated), but is still alive");
}
}
}
}
up_handler = null;
down_queue.close(false); // this should terminate down_handler thread
if (down_handler != null && down_handler.isAlive()) {
try {
down_handler.join(THREAD_JOIN_TIMEOUT);
} catch (Exception ex) {
}
if (down_handler != null && down_handler.isAlive()) {
down_handler.interrupt(); // still alive ? let's just kill it without mercy...
try {
down_handler.join(THREAD_JOIN_TIMEOUT);
} catch (Exception ex) {
}
if (down_handler != null && down_handler.isAlive()) {
if (log.isErrorEnabled()) {
log.error("down_handler thread for " + getName() +
" was interrupted (in order to be terminated), but is is still alive");
}
}
}
}
down_handler = null;
}
/**
* Internal method, should not be called by clients. Used by ProtocolStack. I would have used the 'friends' modifier,
* but this is available only in C++ ... If the up_handler thread is not available (down_thread == false), then
* directly call the up() method: we will run on the caller's thread (e.g. the protocol layer below us).
*/
protected void receiveUpEvent(Event evt) {
if (up_handler == null) {
if (observer != null) { // call debugger hook (if installed)
if (observer.up(evt, up_queue.size()) == false) { // false means discard event
return;
}
}
up(evt);
return;
}
try {
up_queue.add(evt);
} catch (Exception e) {
if (log.isWarnEnabled()) {
log.warn("exception: " + e);
}
}
}
/**
* Internal method, should not be called by clients. Used by ProtocolStack. I would have used the 'friends' modifier,
* but this is available only in C++ ... If the down_handler thread is not available (down_thread == false), then
* directly call the down() method: we will run on the caller's thread (e.g. the protocol layer above us).
*/
protected void receiveDownEvent(Event evt) {
if (down_handler == null) {
if (observer != null) { // call debugger hook (if installed)
if (observer.down(evt, down_queue.size()) == false) { // false means discard event
return;
}
}
int type = evt.getType();
if (type == Event.ACK || type == Event.START || type == Event.STOP) {
if (handleSpecialDownEvent(evt) == false) {
return;
}
}
down(evt);
return;
}
try {
down_queue.add(evt);
} catch (Exception e) {
if (log.isWarnEnabled()) {
log.warn("exception: " + e);
}
}
}
/**
* Causes the event to be forwarded to the next layer up in the hierarchy. Typically called by the implementation of
* <code>Up</code> (when done).
*/
public void passUp(Event evt) {
if (observer != null) { // call debugger hook (if installed)
if (observer.passUp(evt) == false) { // false means don't pass up (=discard) event
return;
}
}
if (up_prot != null) {
up_prot.receiveUpEvent(evt);
} else if (log.isErrorEnabled()) {
log.error("no upper layer available");
}
}
/**
* Causes the event to be forwarded to the next layer down in the hierarchy.Typically called by the implementation of
* <code>Down</code> (when done).
*/
public void passDown(Event evt) {
if (observer != null) { // call debugger hook (if installed)
if (observer.passDown(evt) == false) { // false means don't pass down (=discard) event
return;
}
}
if (down_prot != null) {
down_prot.receiveDownEvent(evt);
} else if (log.isErrorEnabled()) {
log.error("no lower layer available");
}
}
/**
* An event was received from the layer below. Usually the current layer will want to examine the event type and -
* depending on its type - perform some computation (e.g. removing headers from a MSG event type, or updating the
* internal membership list when receiving a VIEW_CHANGE event). Finally the event is either a) discarded, or b) an
* event is sent down the stack using <code>passDown()</code> or c) the event (or another event) is sent up the stack
* using <code>passUp()</code>.
*/
public void up(Event evt) {
passUp(evt);
}
/**
* Call this method before calling 'up'. This method will block (not return) until 'up' is ready.
*/
public void blockUntilUpReady() {
try {
m_upLock.acquire();
} catch (InterruptedException ex) {
//this is ok
}
m_upLock.release();
}
/**
* An event is to be sent down the stack. The layer may want to examine its type and perform some action on it,
* depending on the event's type. If the event is a message MSG, then the layer may need to add a header to it (or do
* nothing at all) before sending it down the stack using <code>passDown()</code>. In case of a GET_ADDRESS event
* (which tries to retrieve the stack's address from one of the bottom layers), the layer may need to send a new
* response event back up the stack using <code>passUp()</code>.
*/
public void down(Event evt) {
passDown(evt);
}
/**
* Call this method before calling 'down'. This method will block (not return) until 'down' is ready.
*/
public void blockUntilDownReady() {
try {
m_downLock.acquire();
} catch (InterruptedException ex) {
//this is ok
}
m_downLock.release();
}
/**
* These are special internal events that should not be handled by protocols
*
* @return boolean True: the event should be passed further down the stack. False: the event should be discarded (not
* passed down the stack)
*/
protected boolean handleSpecialDownEvent(Event evt) {
switch (evt.getType()) {
case Event.ACK:
if (down_prot == null) {
passUp(new Event(Event.ACK_OK));
return false; // don't pass down the stack
}
case Event.START:
try {
start();
// if we're the transport protocol, reply with a START_OK up the stack
if (down_prot == null) {
passUp(new Event(Event.START_OK, Boolean.TRUE));
return false; // don't pass down the stack
} else {
return true; // pass down the stack
}
} catch (Exception e) {
passUp(new Event(Event.START_OK, new Exception("exception caused by " + getName() + ".start(): " + e)));
return false;
}
case Event.STOP:
stop();
if (down_prot == null) {
passUp(new Event(Event.STOP_OK, Boolean.TRUE));
return false; // don't pass down the stack
} else {
return true; // pass down the stack
}
default:
return true; // pass down by default
}
}
}
|
package org.jitsi.videobridge;
import java.util.*;
import net.java.sip.communicator.service.protocol.*;
import org.jitsi.service.neomedia.*;
import org.jitsi.videobridge.osgi.*;
import org.jitsi.videobridge.xmpp.*;
import org.jivesoftware.whack.*;
import org.osgi.framework.*;
import org.xmpp.component.*;
/**
* Provides the <tt>main</tt> entry point of the Jitsi Videobridge application
* which implements an external Jabber component.
* <p>
* Jitsi Videobridge implements two application programming interfaces (APIs):
* XMPP and REST (HTTP/JSON). The APIs to be activated by the application are
* specified with the command-line argument <tt>--apis=</tt> the value of which
* is a comma-separated list of <tt>xmpp</tt> and <tt>rest</tt>. The default
* value is <tt>xmpp</tt> (i.e. if the command-line argument <tt>--apis=</tt> is
* not explicitly specified, the application behaves as if <tt>--args=xmpp</tt>
* is specified). For example, specify <tt>--apis=rest,xmpp</tt> on the comamnd
* line to simultaneously enable the two APIs.
* </p>
*
* @author Lyubomir Marinov
*/
public class Main
{
/**
* The name of the command-line argument which specifies the application
* programming interfaces (APIs) to enable for Jitsi Videobridge.
*/
private static final String APIS_ARG_NAME = "--apis=";
/**
* The name of the command-line argument which specifies the XMPP domain
* to use.
*/
private static final String DOMAIN_ARG_NAME = "--domain=";
/**
* The <tt>Object</tt> which synchronizes the access to the state related to
* the decision whether the application is to exit. At the time of this
* writing, the application just runs until it is killed.
*/
private static final Object exitSyncRoot = new Object();
/**
* The name of the command-line argument which specifies the IP address or
* the name of the XMPP host to connect to.
*/
private static final String HOST_ARG_NAME = "--host=";
/**
* The default value of the {@link #HOST_ARG_NAME} command-line argument if
* it is not explicitly provided.
*/
private static final String HOST_ARG_VALUE = "localhost";
/**
* The name of the command-line argument which specifies the value of the
* <tt>System</tt> property
* {@link DefaultStreamConnector#MAX_PORT_NUMBER_PROPERTY_NAME}.
*/
private static final String MAX_PORT_ARG_NAME = "--max-port=";
/**
* The default value of the {@link #MAX_PORT_ARG_NAME} command-line argument
* if it is not explicitly provided.
*/
private static final String MAX_PORT_ARG_VALUE = "20000";
/**
* The name of the command-line argument which specifies the value of the
* <tt>System</tt> property
* {@link DefaultStreamConnector#MIN_PORT_NUMBER_PROPERTY_NAME}.
*/
private static final String MIN_PORT_ARG_NAME = "--min-port=";
/**
* The default value of the {@link #MIN_PORT_ARG_NAME} command-line argument
* if
* it is not explicitly provided.
*/
private static final String MIN_PORT_ARG_VALUE = "10000";
/**
* The name of the command-line argument which specifies the port of the
* XMPP host to connect on.
*/
private static final String PORT_ARG_NAME = "--port=";
/**
* The default value of the {@link #PORT_ARG_NAME} command-line argument if
* it is not explicitly provided.
*/
private static final int PORT_ARG_VALUE = 5275;
/**
* The name of the command-line argument which specifies the secret key for
* the sub-domain of the Jabber component implemented by this application
* with which it is to authenticate to the XMPP server to connect to.
*/
private static final String SECRET_ARG_NAME = "--secret=";
/**
* Represents the <tt>main</tt> entry point of the Jitsi Videobridge
* application which implements an external Jabber component.
*
* @param args the arguments provided to the application on the command line
* @throws Exception if anything goes wrong and the condition cannot be
* gracefully handled during the execution of the application
*/
public static void main(String[] args)
throws Exception
{
// Parse the command-line arguments.
List<String> apis = new LinkedList<String>();
String host = null;
String maxPort = MAX_PORT_ARG_VALUE;
String minPort = MIN_PORT_ARG_VALUE;
int port = PORT_ARG_VALUE;
String secret = "";
String domain = null;
for (String arg : args)
{
if (arg.startsWith(APIS_ARG_NAME))
{
for (String api
: arg.substring(APIS_ARG_NAME.length()).split(","))
{
if ((api != null)
&& (api.length() != 0)
&& !apis.contains(api))
{
apis.add(api);
}
}
}
else if (arg.startsWith(DOMAIN_ARG_NAME))
{
domain = arg.substring(DOMAIN_ARG_NAME.length());
}
else if (arg.startsWith(HOST_ARG_NAME))
{
host = arg.substring(HOST_ARG_NAME.length());
}
else if (arg.startsWith(MAX_PORT_ARG_NAME))
{
maxPort = arg.substring(MAX_PORT_ARG_NAME.length());
}
else if (arg.startsWith(MIN_PORT_ARG_NAME))
{
minPort = arg.substring(MIN_PORT_ARG_NAME.length());
}
else if (arg.startsWith(PORT_ARG_NAME))
{
port = Integer.parseInt(arg.substring(PORT_ARG_NAME.length()));
}
else if (arg.startsWith(SECRET_ARG_NAME))
{
secret = arg.substring(SECRET_ARG_NAME.length());
}
}
if (apis.isEmpty())
apis.add(Videobridge.XMPP_API);
if (host == null)
host = (domain == null) ? HOST_ARG_VALUE : domain;
/*
* Before initializing the application programming interfaces (APIs) of
* Jitsi Videobridge, set any System properties which they use and which
* may be specified by the command-line arguments.
*/
System.setProperty(
Videobridge.REST_API_PNAME,
Boolean.toString(apis.contains(Videobridge.REST_API)));
System.setProperty(
Videobridge.XMPP_API_PNAME,
Boolean.toString(apis.contains(Videobridge.XMPP_API)));
if ((maxPort != null) && (maxPort.length() != 0))
{
// Jingle Raw UDP transport
System.setProperty(
DefaultStreamConnector.MAX_PORT_NUMBER_PROPERTY_NAME,
maxPort);
// Jingle ICE-UDP transport
System.setProperty(
OperationSetBasicTelephony
.MAX_MEDIA_PORT_NUMBER_PROPERTY_NAME,
maxPort);
}
if ((minPort != null) && (minPort.length() != 0))
{
// Jingle Raw UDP transport
System.setProperty(
DefaultStreamConnector.MIN_PORT_NUMBER_PROPERTY_NAME,
minPort);
// Jingle ICE-UDP transport
System.setProperty(
OperationSetBasicTelephony
.MIN_MEDIA_PORT_NUMBER_PROPERTY_NAME,
minPort);
}
/*
* Start OSGi. It will invoke the application programming interfaces
* (APIs) of Jitsi Videobridge. Each of them will keep the application
* alive.
*/
OSGi.start(
new BundleActivator()
{
@Override
public void start(BundleContext bundleContext)
throws Exception
{
// TODO Auto-generated method stub
}
@Override
public void stop(BundleContext bundleContext)
throws Exception
{
// TODO Auto-generated method stub
}
});
// Start Jitsi Videobridge as an external Jabber component.
if (apis.contains(Videobridge.XMPP_API))
{
ExternalComponentManager componentManager
= new ExternalComponentManager(host, port);
String subdomain = ComponentImpl.SUBDOMAIN;
componentManager.setMultipleAllowed(subdomain, true);
componentManager.setSecretKey(subdomain, secret);
if (domain != null)
componentManager.setServerName(domain);
Component component = new ComponentImpl();
componentManager.addComponent(subdomain, component);
/*
* The application has nothing more to do but wait for ComponentImpl
* to perform its duties. Presently, there is no specific shutdown
* procedure and the application just gets killed.
*/
do
{
boolean interrupted = false;
synchronized (exitSyncRoot)
{
try
{
exitSyncRoot.wait();
}
catch (InterruptedException ie)
{
interrupted = true;
}
}
if (interrupted)
Thread.currentThread().interrupt();
}
while (true);
}
}
}
|
package org.kered.dko;
import java.io.File;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.logging.Logger;
import javax.sql.DataSource;
import org.kered.dko.DBQuery.JoinInfo;
import org.kered.dko.Field.FK;
import org.kered.dko.persistence.ColumnAccess;
import org.kered.dko.persistence.QueryExecution;
import org.kered.dko.persistence.QuerySize;
class UsageMonitor<T extends Table> {
private static final long ONE_DAY = 1000*60*60*24;
private static final long FORTY_FIVE_DAYS = ONE_DAY * 45;
private static final int MIN_WARN_COUNT = 8;
private static final String WARN_OFF = "To turn these warnings off, "
+ "call: Context.getThreadContext().enableUsageWarnings(false);";
Map<StackTraceKey,M.Long> counter = new HashMap<StackTraceKey,M.Long>();
private static final Logger log = Logger.getLogger("org.kered.dko.recommendations");
long objectCount = 0;
private final StackTraceElement[] st;
private Set<Field<?>> surpriseFields = null;
private DBQuery<T> query;
private boolean selectOptimized = false;
private Set<Field<?>> pks = new HashSet<Field<?>>();
public long rowCount = 0;
private final int queryHash;
private final Class<T> queryType;
private int stackHash;
private QueryExecution qe;
private Set<Field<?>> selectedFieldSet;
private Set<Field<?>> seenFields = new HashSet<Field<?>>();
private DataSource ds;
private boolean newQE;
private boolean shutdown = false;
private static long warnBadFKUsageCount = 0;
private synchronized void shutdown() {
if (shutdown) return;
try {
shutdown = true;
updateColumnAccesses();
warnBadFKUsage();
questionUnusedColumns();
} catch (final Throwable t) {
t.printStackTrace();
log.severe(t.toString());
}
}
@Override
protected void finalize() throws Throwable {
shutdown();
super.finalize();
}
private void updateColumnAccesses() {
Map<String, Map<String, ColumnAccess>> used;
if (newQE) {
used = new HashMap<String, Map<String, ColumnAccess>>();
} else {
try {
used = ColumnAccess.ALL.where(ColumnAccess.QUERY_EXECUTION_ID.eq(qe==null ? null : qe.getId())).mapBy(ColumnAccess.TABLE_NAME, ColumnAccess.COLUMN_NAME);
} catch (SQLException e) {
e.printStackTrace();
used = new HashMap<String, Map<String, ColumnAccess>>();
}
}
long threshold = System.currentTimeMillis() - ONE_DAY;
if (seenFields==null) {
System.err.println("Well, seenFields shouldn't be null here, but it is. WTF?");
}
for (Field<?> f : seenFields) {
String tableName = Util.getTABLE_NAME(f.TABLE);
Map<String, ColumnAccess> columns = used.get(tableName);
ColumnAccess ca = columns==null ? null : columns.get(f.NAME);
if (ca == null) {
ca = new ColumnAccess()
.setColumnName(f.NAME)
.setTableName(tableName)
.setQueryExecutionIdFK(qe)
.setLastSeen(System.currentTimeMillis());
try {
ca.insert(ds);
} catch (SQLException e) {
e.printStackTrace();
}
} else if (ca.getLastSeen() < threshold) {
ca.setLastSeen(System.currentTimeMillis());
try {
ca.update(ds);
} catch (SQLException e) {
e.printStackTrace();
}
}
}
}
private void questionUnusedColumns() {
final Set<Field<?>> unusedColumns = new LinkedHashSet<Field<?>>(this.selectedFieldSet);
unusedColumns.removeAll(seenFields);
unusedColumns.removeAll(pks);
final List<String> unusedColumnDescs = new ArrayList<String>();
for (final Field<?> field : unusedColumns) {
unusedColumnDescs.add(field.TABLE.getSimpleName() +"."+ field.JAVA_NAME);
}
if (!selectOptimized && !unusedColumnDescs.isEmpty() && objectCount > MIN_WARN_COUNT) {
final String msg = "The following columns were never accessed:\n\t"
+ Util.join(", ", unusedColumnDescs) + "\nin the query created here:\n\t"
+ Util.join("\n\t", (Object[]) st) + "\n"
+ "You might consider not querying these fields by using the "
+ "deferFields(Field<?>...) method on your query.\n"
+ WARN_OFF;
log.info(msg);
}
}
static <T extends Table> UsageMonitor build(final DBQuery<T> query) {
Class<T> type = query.getType();
if (QueryExecution.class.equals(type)) return null;
if (QuerySize.class.equals(type)) return null;
if (ColumnAccess.class.equals(type)) return null;
if (org.kered.dko.persistence.Util.getDS()==null) return null;
return new UsageMonitor<T>(query);
}
private UsageMonitor(final DBQuery<T> query) {
ds = org.kered.dko.persistence.Util.getDS();
// grab the current stack trace
final StackTraceElement[] tmp = Thread.currentThread().getStackTrace();
int i=1;
while (i<tmp.length && tmp[i].getClassName().startsWith("org.kered.dko")) ++i;
st = new StackTraceElement[tmp.length-i];
System.arraycopy(tmp, i, st, 0, st.length);
stackHash = Util.join(",", st).hashCode();
queryHash = query.hashCode();
qe = QueryExecution.ALL.use(ds)
.where(QueryExecution.STACK_HASH.eq(stackHash))
.with(ColumnAccess.FK_QUERY_EXECUTION)
.orderBy(Constants.DIRECTION.DESCENDING, QueryExecution.LAST_SEEN)
.first();
this.newQE = qe==null;
if (newQE) {
qe = new QueryExecution()
.setStackHash(stackHash)
.setQueryHash(queryHash)
.setDescription(query + " @ "+ st[0])
.setLastSeen(System.currentTimeMillis());
try {
qe.insert(ds);
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
// update last_seen if older than a day
if (qe!=null && (qe.getLastSeen()==null || qe.getLastSeen() < System.currentTimeMillis() - ONE_DAY)) {
qe.setLastSeen(System.currentTimeMillis());
try {
qe.update(ds);
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
this.query = query;
this.queryType = query.getType();
//System.err.println("queryHash "+ queryHash +" "+ query.hashCode());
//System.err.println("queryHash "+ queryHash);
// get pks for all tables
for (final TableInfo table : query.tableInfos) {
for (final Field<?> f : Util.getPK(table.tableClass).GET_FIELDS()) {
pks.add(f);
}
}
for (final JoinInfo join : query.joinsToOne) {
for (final Field<?> f : Util.getPK(join.reffedTableInfo.tableClass).GET_FIELDS()) {
pks.add(f);
}
for (final Field<?> f : Util.getPK(join.reffingTableInfo.tableClass).GET_FIELDS()) {
pks.add(f);
}
}
for (final JoinInfo join : query.joinsToMany) {
for (final Field<?> f : Util.getPK(join.reffedTableInfo.tableClass).GET_FIELDS()) {
pks.add(f);
}
for (final Field<?> f : Util.getPK(join.reffingTableInfo.tableClass).GET_FIELDS()) {
pks.add(f);
}
}
pks = Collections.unmodifiableSet(pks);
}
private void warnBadFKUsage() {
if (objectCount > MIN_WARN_COUNT) {
for (final Entry<StackTraceKey, M.Long> e : counter.entrySet()) {
final M.Long v = e.getValue();
final long percent = v.i*100/objectCount;
if (percent > 50) {
final StackTraceKey k = e.getKey();
final String msg = "This code has lazily accessed a foreign key relationship "+ percent
+"% of the time. This caused "+ v.i +" more queries to the "
+"database than necessary. You should consider adding .with("
+ k.fk.referencing.getSimpleName() +"."+ k.fk.name
+") to your join. This happened at:\n\t"
+ Util.join("\n\t", (Object[]) k.a)
+"\nwhile iterating over a query created here:\n\t"
+ Util.join("\n\t", (Object[]) st) +"\n"
+ WARN_OFF;
log.warning(msg);
warnBadFKUsageCount += 1;
}
}
}
}
void accessedFkCallback(final Table table, final FK<? extends Table> fk) {
final StackTraceElement[] tmp = Thread.currentThread().getStackTrace();
final StackTraceElement[] st = new StackTraceElement[tmp.length-3];
System.arraycopy(tmp, 3, st, 0, st.length);
final StackTraceKey key = new StackTraceKey(fk, st);
M.Long x = counter.get(key);
if (x == null) counter.put(key, x = new M.Long());
x.i++;
}
static class StackTraceKey {
private final StackTraceElement[] a;
private final FK<? extends Table> fk;
StackTraceKey(final FK<? extends Table> fk, final StackTraceElement[] a) {
this.a = a;
this.fk = fk;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(a);
result = prime * result + ((fk == null) ? 0 : fk.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final StackTraceKey other = (StackTraceKey) obj;
if (!Arrays.equals(a, other.a))
return false;
if (fk == null) {
if (other.fk != null)
return false;
} else if (!fk.equals(other.fk))
return false;
return true;
}
}
void __NOSCO_PRIVATE_accessedColumnCallback(final Table table, final Field<?> field) {
if (!seenFields.add(field)) return;
if (selectedFieldSet.contains(field)) return;
if (surpriseFields==null) surpriseFields = Collections.synchronizedSet(new HashSet<Field<?>>());
if (surpriseFields.add(field)) {
final StackTraceElement[] tmp = Thread.currentThread().getStackTrace();
final StackTraceElement[] st = new StackTraceElement[tmp.length-3];
System.arraycopy(tmp, 3, st, 0, st.length);
final String msg = "Optimizer was surprised by field "+ field.TABLE.getSimpleName()
+"."+ field.JAVA_NAME +" here:\n\t"
+ Util.join("\n\t", st) +"\nIf this happens once it's normal "
+"(the optimizer will learn to include it the next time this is run).\n"
+"But if this is happening every time you run "
+"please report this as a bug to http://code.google.com/p/nosco/";
log.info(msg);
}
}
void setSelectedFields(final Field<?>[] selectedFields) {
if (selectedFields==null) throw new IllegalArgumentException("selectedFields cannot be null");
this.selectedFieldSet = new HashSet<Field<?>>();
for (Field<?> f : selectedFields) selectedFieldSet.add(f);
}
DBQuery<T> getSelectOptimizedQuery() {
try {
if (!query.optimizeSelectFields()) return query;
if (!Context.selectOptimizationsEnabled()) {
//System.err.println("getOptimizedQuery !selectOptimizationsEnabled");
return query;
}
if (newQE) return query;
Map<String, Map<String, ColumnAccess>> used = qe.getColumnAccessSet().mapBy(ColumnAccess.TABLE_NAME, ColumnAccess.COLUMN_NAME);
//final Map<Field<?>,Long> used = qc.get(stackTraceHashString);
//System.err.println("used "+ used +" @ "+ this.queryHash);
final Set<Field<?>> deffer = new HashSet<Field<?>>();
final List<Field<?>> originalSelectedFields = query.getSelectFields(false);
long threshold = qe.getLastSeen() - FORTY_FIVE_DAYS;
for (final Field<?> f : originalSelectedFields) {
Map<String, ColumnAccess> columns = used.get(Util.getTABLE_NAME(f.TABLE));
if (columns==null) continue;
ColumnAccess ca = columns.get(f.NAME);
if (ca==null || ca.getLastSeen() < threshold) {
deffer.add(f);
}
}
if (deffer.isEmpty()) return query;
deffer.removeAll(pks);
if (deffer.size()==originalSelectedFields.size() && originalSelectedFields.size()>0) {
// make sure we don't remove every field!
deffer.remove(originalSelectedFields.get(0));
}
//System.err.println("getOptimizedQuery optimized!");
this.selectOptimized = true;
return (DBQuery<T>) query.deferFields(deffer);
} catch (SQLException e) {
e.printStackTrace();
return query;
} finally {
query = null;
}
}
static void doNothing() {
// do nothing; just make sure the class loads
return;
}
private static File PERF_CACHE = null;
private final static String README_TEXT = "Welcome to DKO!\n\n" +
"This directory contains runtime profiles for programs that use the nosco library.\n" +
"It is always safe to delete. Your programs will just run a little slower the next\n" +
"time or two they start up. Thanks for visiting!\n\nhttp://code.google.com/p/nosco/\n";
private final static long START = System.currentTimeMillis();
private final static long cutoff = START - ONE_DAY * 100;
private final static Thread loadPerformanceInfo = new Thread() {
@Override
public void run() {
final File CACHE_DIR;;
String dir = System.getProperty(Constants.PROPERTY_CACHE_DIR);
if (dir == null) dir = System.getProperty(Constants.PROPERTY_CACHE_DIR_OLD);
if (dir == null) {
final File BASE_DIR = new File(System.getProperty("user.home"));
CACHE_DIR = new File(BASE_DIR, ".dko_optimizations");
} else {
CACHE_DIR = new File(dir);
}
}
};
static {
loadPerformanceInfo.start();
}
static {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
}
});
}
private final static BlockingQueue<UsageMonitor> querySizes = new LinkedBlockingQueue<UsageMonitor>();
void saveSizeOfQuery() {
if (this.queryType.getPackage().getName().startsWith("org.kered.dko"))
return;
querySizes.add(this);
}
static Thread saveQuerySizes = new Thread() {
@Override
public void run() {
while (true) {
try {
final UsageMonitor um = querySizes.take();
DataSource ds = org.kered.dko.persistence.Util.getDS();
if (ds==null) {
log.warning("I could not load the usage monitor's datasource, so I'm stopping collecting performance metrics.");
return;
}
// final int id = Math.abs(um.queryHashCode);
final int id = um.queryHash;
final QuerySize qs = QuerySize.ALL.use(ds).get(
QuerySize.ID.eq(id));
// if (qs!=null && qs.getHashCode()!=hash) {
// qs = QuerySize.ALL.get(QuerySize.HASH_CODE.eq(hash));
if (qs == null) {
new QuerySize()
.setId(id)
.setHashCode(um.queryHash)
.setSchemaName(
Util.getSCHEMA_NAME(um.queryType))
.setTableName(Util.getTABLE_NAME(um.queryType))
.setRowCount(um.rowCount).insert(ds);
} else {
qs.setRowCount(ma(um.rowCount, qs.getRowCount()));
qs.update(ds);
}
} catch (final InterruptedException e) {
e.printStackTrace();
} catch (final SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
private long ma(Long a, Long b) {
final int MA = 5;
if (a == null)
a = 0l;
if (b == null)
b = 0l;
return (a + (MA - 1) * b) / MA;
}
};
static {
saveQuerySizes.setDaemon(true);
saveQuerySizes.start();
}
}
|
/*
* $Id: StreamUtil.java,v 1.19 2011-11-29 06:50:30 tlipkis Exp $
*/
package org.lockss.util;
import java.io.*;
import java.util.zip.*;
import java.security.MessageDigest;
import org.lockss.daemon.LockssWatchdog;
/**
* This is a class to contain generic stream utilities
*
* @author Emil Aalto
* @version 0.0
*/
public class StreamUtil {
static Logger log = Logger.getLogger("StreamUtil");
private static final int BUFFER_SIZE = 256;
static final int COPY_WDOG_CHECK_EVERY_BYTES = 1024 * 1024;
/**
* Copy bytes from an InputStream to an Outputstream until EOF. The
* OutputStream is flushed, neither stream is closed.
* @param is input stream
* @param os output stream
* @return number of bytes copied
* @throws IOException
*/
public static long copy(InputStream is, OutputStream os) throws IOException {
return copy(is, os, -1);
}
/**
* Copy bytes from an InputStream to an Outputstream until EOF,
* occasionally poking the watchdog. The OutputStream is flushed,
* neither stream is closed.
* @param is input stream
* @param os output stream
* @param wdog if non-null, a LockssWatchdog that will be poked at
* approximately twice its required rate.
* @return number of bytes copied
* @throws IOException
*/
public static long copy(InputStream is, OutputStream os,
LockssWatchdog wdog) throws IOException {
return copy(is, os, -1, wdog);
}
/**
* Copy up to len bytes from an InputStream to an Outputstream. The
* OutputStream is flushed, neither stream is closed.
* @param is input stream
* @param os output stream
* @param len number of bytes to copy; -1 means copy to EOF
* @return number of bytes copied
* @throws IOException
*/
public static long copy(InputStream is, OutputStream os, long len)
throws IOException {
return copy(is, os, len, null);
}
/**
* Copy up to len bytes from InputStream to Outputstream, occasionally
* poking a watchdog. The OutputStream is flushed, neither stream is
* closed.
* @param is input stream
* @param os output stream
* @param len number of bytes to copy; -1 means copy to EOF
* @param wdog if non-null, a LockssWatchdog that will be poked at
* approximately twice its required rate.
* @return number of bytes copied
* @throws IOException
*/
public static long copy(InputStream is, OutputStream os, long len,
LockssWatchdog wdog)
throws IOException {
return copy(is, os, len, wdog, false, null);
}
/**
* Copy up to len bytes from InputStream to Outputstream, occasionally
* poking a watchdog. The OutputStream is flushed, neither stream is
* closed.
* @param is input stream
* @param os output stream
* @param len number of bytes to copy; -1 means copy to EOF
* @param wdog if non-null, a LockssWatchdog that will be poked at
* approximately twice its required rate.
* @param wrapExceptions if true, exceptions that occur while reading
* from the input stream will be wrapped in a {@link
* StreamUtil#InputException} and exceptions that occur while writing to
* or closing the output stream will be wrapped in a {@link
* StreamUtil#OutputException}.
* @return number of bytes copied
* @throws IOException
*/
public static long copy(InputStream is, OutputStream os, long len,
LockssWatchdog wdog, boolean wrapExceptions)
throws IOException {
return copy(is, os, len, wdog, wrapExceptions, null);
}
/**
* Copy up to len bytes from InputStream to Outputstream, occasionally
* poking a watchdog. The OutputStream is flushed, neither stream is
* closed.
* @param is input stream
* @param os output stream
* @param len number of bytes to copy; -1 means copy to EOF
* @param wdog if non-null, a LockssWatchdog that will be poked at
* approximately twice its required rate.
* @param wrapExceptions if true, exceptions that occur while reading
* from the input stream will be wrapped in a {@link
* StreamUtil#InputException} and exceptions that occur while writing to
* or closing the output stream will be wrapped in a {@link
* StreamUtil#OutputException}.
* @param md a MessageDigest algorithm that, when not null, receives all input
* @return number of bytes copied
* @throws IOException
*/
public static long copy(InputStream is, OutputStream os, long len,
LockssWatchdog wdog, boolean wrapExceptions, MessageDigest md)
throws IOException {
if (is == null || os == null || len == 0) {
return 0;
}
long wnext = 0, wcnt = 0, wint = 0;
if (wdog != null) {
wint = wdog.getWDogInterval() / 4;
wnext = TimeBase.nowMs() + wint;
}
byte[] buf = new byte[BUFFER_SIZE];
long rem = (len > 0) ? len : Long.MAX_VALUE;
long ncopied = 0;
int nread;
while (rem > 0) {
try {
nread = is.read(buf, 0, rem > BUFFER_SIZE ? BUFFER_SIZE : (int)rem);
} catch (IOException e) {
if (wrapExceptions) {
throw new InputException(e);
} else {
throw e;
}
}
if (nread <= 0) {
break;
}
if (md != null) {
md.update(buf, 0, nread);
}
try {
os.write(buf, 0, nread);
} catch (IOException e) {
if (wrapExceptions) {
throw new OutputException(e);
} else {
throw e;
}
}
ncopied += nread;
rem -= nread;
if (wdog != null) {
if ((wcnt += nread) > COPY_WDOG_CHECK_EVERY_BYTES) {
log.debug2("checking: "+ wnext);
if (TimeBase.nowMs() > wnext) {
log.debug2("poke: " + wcnt);
wdog.pokeWDog();
wnext = TimeBase.nowMs() + wint;
}
wcnt = 0;
}
}
}
try {
os.flush();
} catch (IOException e) {
if (wrapExceptions) {
throw new OutputException(e);
} else {
throw e;
}
}
return ncopied;
}
/**
* This function copies the contents of a Reader to a Writer
* It buffers the copying, and closes neither.
* @param reader reader
* @param writer writer
* @return number of charscopied
* @throws IOException
*/
public static long copy(Reader reader, Writer writer) throws IOException {
if (reader == null || writer == null) {
return 0;
}
long totalCharCount = 0;
char[] chars = new char[BUFFER_SIZE];
int count;
while ((count = reader.read(chars)) > 0) {
totalCharCount += count;
writer.write(chars, 0, count);
}
writer.flush();
return totalCharCount;
}
public static class InputException extends IOException {
public InputException(IOException cause) {
super(cause);
}
public IOException getIOCause() {
return (IOException)getCause();
}
}
public static class OutputException extends IOException {
public OutputException(IOException cause) {
super(cause);
}
public IOException getIOCause() {
return (IOException)getCause();
}
}
/** Read size bytes from stream into buf. Keeps trying to read until
* enough bytes have been read or EOF or error.
* @param ins stream to read from
* @param buf buffer to read into
* @param size number of bytes to read
* @return number of bytes read, which will be less than size iff EOF is
* reached
* @throws IOException
*/
public static int readBytes(InputStream ins, byte[] buf, int size)
throws IOException {
int off = 0;
while ( off < size) {
int nread = ins.read(buf, off, size - off);
if (nread == -1) {
return off;
}
off += nread;
}
return off;
}
/** Read size chars from reader into buf. Keeps trying to read until
* enough chars have been read or EOF or error.
* @param reader reader to read from
* @param buf buffer to read into
* @param size number of chars to read
* @return number of chars read, which will be less than size iff EOF is
* reached
* @throws IOException
*/
public static int readChars(Reader reader, char[] buf, int size)
throws IOException {
int off = 0;
while (off < size) {
int nread = reader.read(buf, off, size - off);
if (nread == -1) {
return off;
}
off += nread;
}
return off;
}
/** Read from two input streams and compare their contents. The streams
* are not closed, and may get left at any position.
* @param ins1 1st stream
* @param ins2 2nd stream
* @return true iff streams have same contents and reach EOF at the same
* point.
* @throws IOException
*/
public static boolean compare(InputStream ins1, InputStream ins2)
throws IOException {
byte[] b1 = new byte[BUFFER_SIZE];
byte[] b2 = new byte[BUFFER_SIZE];
while (true) {
int len1 = readBytes(ins1, b1, BUFFER_SIZE);
int len2 = readBytes(ins2, b2, BUFFER_SIZE);
if (len1 != len2) return false;
if (len1 == 0) return true;
for (int ix = 0; ix < len1; ix++) {
if (b1[ix] != b2[ix]) return false;
}
}
}
/** Return an InputStream that uncompresses the data on the input
* stream (normally an HTTP response stream)
* @param instr raw InputStream
* @param contentEncoding value of HTTP Content-Encoding: header
* @return The wrapped stream, or the original stream if contentEncoding
* is null or "identity"
* @throws UnsupportedEncodingException
*/
public static InputStream getUncompressedInputStream(InputStream instr,
String contentEncoding)
throws IOException, UnsupportedEncodingException {
InputStream res;
if (contentEncoding == null ||
contentEncoding.equalsIgnoreCase("identity")) {
res = instr;
} else if (contentEncoding.equalsIgnoreCase("gzip") ||
contentEncoding.equalsIgnoreCase("x-gzip")) {
log.debug3("Wrapping in GZIPInputStream");
res = new GZIPInputStream(instr);
} else if (contentEncoding.equalsIgnoreCase("deflate")) {
log.debug3("Wrapping in InflaterInputStream");
res = new InflaterInputStream(instr);
} else {
throw new UnsupportedEncodingException(contentEncoding);
}
return res;
}
/** Return a Reader that reads from the InputStream. If the specified
* encoding is not found, tries {@link Constants#DEFAULT_ENCODING}. If
* the supplied InputStream is a ReaderInputStream, returns the
* underlying Reader.
* @param in the InputStream to be wrapped
* @param encoding the charset
*/
public static Reader getReader(InputStream in, String encoding) {
if (in instanceof ReaderInputStream) {
ReaderInputStream ris = (ReaderInputStream)in;
return ris.getReader();
}
if (encoding == null) {
encoding = Constants.DEFAULT_ENCODING;
}
try {
return new InputStreamReader(in, encoding);
} catch (UnsupportedEncodingException e1) {
log.error("No such encoding: " + encoding + ", trying " +
Constants.DEFAULT_ENCODING);
try {
return new InputStreamReader(in, Constants.DEFAULT_ENCODING);
} catch (UnsupportedEncodingException e2) {
log.critical("Default encoding not found: " +
Constants.DEFAULT_ENCODING);
throw new RuntimeException(("UnsupportedEncodingException for both " +
encoding + " and " +
Constants.DEFAULT_ENCODING),
e1);
}
}
}
}
|
package org.nutz.mvc.impl;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.nutz.ioc.Ioc;
import org.nutz.ioc.annotation.InjectName;
import org.nutz.ioc.loader.annotation.IocBean;
import org.nutz.json.Json;
import org.nutz.lang.Lang;
import org.nutz.lang.Mirror;
import org.nutz.lang.Strings;
import org.nutz.lang.segment.Segments;
import org.nutz.lang.util.ClassMeta;
import org.nutz.lang.util.ClassMetaReader;
import org.nutz.lang.util.Context;
import org.nutz.log.Log;
import org.nutz.log.Logs;
import org.nutz.mvc.ActionFilter;
import org.nutz.mvc.ActionInfo;
import org.nutz.mvc.EntryDeterminer;
import org.nutz.mvc.HttpAdaptor;
import org.nutz.mvc.ModuleScanner;
import org.nutz.mvc.Mvcs;
import org.nutz.mvc.NutConfig;
import org.nutz.mvc.ObjectInfo;
import org.nutz.mvc.annotation.AdaptBy;
import org.nutz.mvc.annotation.At;
import org.nutz.mvc.annotation.By;
import org.nutz.mvc.annotation.Chain;
import org.nutz.mvc.annotation.DELETE;
import org.nutz.mvc.annotation.Encoding;
import org.nutz.mvc.annotation.Fail;
import org.nutz.mvc.annotation.Filters;
import org.nutz.mvc.annotation.GET;
import org.nutz.mvc.annotation.Modules;
import org.nutz.mvc.annotation.Ok;
import org.nutz.mvc.annotation.POST;
import org.nutz.mvc.annotation.PUT;
import org.nutz.mvc.annotation.PathMap;
import org.nutz.resource.Scans;
public abstract class Loadings {
private static final Log log = Logs.get();
public static ActionInfo createInfo(Class<?> type) {
ActionInfo ai = new ActionInfo();
evalEncoding(ai, Mirror.getAnnotationDeep(type, Encoding.class));
evalHttpAdaptor(ai, Mirror.getAnnotationDeep(type, AdaptBy.class));
evalActionFilters(ai, Mirror.getAnnotationDeep(type, Filters.class));
evalPathMap(ai, Mirror.getAnnotationDeep(type, PathMap.class));
evalOk(ai, Mirror.getAnnotationDeep(type, Ok.class));
evalFail(ai, Mirror.getAnnotationDeep(type, Fail.class));
evalAt(ai, Mirror.getAnnotationDeep(type, At.class), type.getSimpleName(), false);
evalActionChainMaker(ai, Mirror.getAnnotationDeep(type, Chain.class));
evalModule(ai, type);
if (Mvcs.DISPLAY_METHOD_LINENUMBER) {
InputStream ins = type.getClassLoader().getResourceAsStream(type.getName().replace(".", "/") + ".class");
if (ins != null) {
try {
ClassMeta meta = ClassMetaReader.build(ins);
ai.setMeta(meta);
}
catch (Exception e) {
}
}
}
return ai;
}
public static ActionInfo createInfo(Method method) {
ActionInfo ai = new ActionInfo();
evalEncoding(ai, Mirror.getAnnotationDeep(method, Encoding.class));
evalHttpAdaptor(ai, Mirror.getAnnotationDeep(method, AdaptBy.class));
evalActionFilters(ai, Mirror.getAnnotationDeep(method, Filters.class));
evalOk(ai, Mirror.getAnnotationDeep(method, Ok.class));
evalFail(ai, Mirror.getAnnotationDeep(method, Fail.class));
evalHttpMethod(ai, method, Mirror.getAnnotationDeep(method, At.class));
evalAt(ai, Mirror.getAnnotationDeep(method, At.class), method.getName(), true);
evalActionChainMaker(ai, Mirror.getAnnotationDeep(method, Chain.class));
ai.setMethod(method);
return ai;
}
private static EntryDeterminer determiner = null;
public static Set<Class<?>> scanModules(Ioc ioc, Class<?> mainModule, EntryDeterminer determiner) {
Loadings.determiner = determiner;
Modules ann = mainModule.getAnnotation(Modules.class);
boolean scan = null == ann ? true : ann.scanPackage();
Set<Class<?>> forScans = new HashSet<Class<?>>();
Set<Class<?>> modules = new HashSet<Class<?>>();
forScans.add(mainModule);
if (null != ann) {
// ann.scanPackage true
for (Class<?> module : ann.value()) {
forScans.add(module);
}
for (String str : ann.by()) {
ModuleScanner ms;
// Ioc
if (str.startsWith("ioc:")) {
String nm = str.substring("ioc:".length());
ms = ioc.get(ModuleScanner.class, nm);
}
else {
try {
Class<?> klass = Lang.loadClass(str);
Mirror<?> mi = Mirror.me(klass);
ms = (ModuleScanner) mi.born();
}
catch (ClassNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
Collection<Class<?>> col = ms.scan();
if (null != col)
for (Class<?> type : col) {
if (isModule(type)) {
modules.add(type);
}
}
}
if (ann.packages() != null && ann.packages().length > 0) {
for (String packageName : ann.packages()) {
scanModuleInPackage(modules, packageName);
}
}
}
for (Class<?> type : forScans) {
// mawm maven,typetypejar
try {
URL location = type.getProtectionDomain().getCodeSource().getLocation();
if (log.isDebugEnabled())
log.debugf("module class location '%s'", location);
}
catch (NullPointerException e) {
// AndroidgetProtectionDomain,just pass
}
//Scans.me().registerLocation(type);
}
for (Class<?> type : forScans) {
if (scan) {
scanModuleInPackage(modules, type.getPackage().getName());
}
else {
if (isModule(type)) {
if (log.isDebugEnabled())
log.debugf(" > Found @At : '%s'", type.getName());
modules.add(type);
} else if (log.isTraceEnabled()) {
log.tracef(" > ignore '%s'", type.getName());
}
}
}
return modules;
}
protected static void scanModuleInPackage(Set<Class<?>> modules, String packageName) {
if (log.isDebugEnabled())
log.debugf(" > scan '%s'", packageName);
List<Class<?>> subs = Scans.me().scanPackage(packageName);
checkModule(modules, subs);
}
/**
* @param modules
* @param subs
*/
private static void checkModule(Set<Class<?>> modules, List<Class<?>> subs) {
for (Class<?> sub : subs) {
try {
if (isModule(sub)) {
if (log.isDebugEnabled())
log.debugf(" >> add '%s'", sub.getName());
modules.add(sub);
} else if (log.isTraceEnabled()) {
log.tracef(" >> ignore '%s'", sub.getName());
}
}
catch (Exception e) {
throw new RuntimeException("something happen when handle class=" + sub.getName(), e);
}
}
}
public static void evalHttpMethod(ActionInfo ai, Method method, At at) {
if (Mirror.getAnnotationDeep(method, GET.class) != null)
ai.getHttpMethods().add("GET");
if (Mirror.getAnnotationDeep(method, POST.class) != null)
ai.getHttpMethods().add("POST");
if (Mirror.getAnnotationDeep(method, PUT.class) != null)
ai.getHttpMethods().add("PUT");
if (Mirror.getAnnotationDeep(method, DELETE.class) != null)
ai.getHttpMethods().add("DELETE");
if (at != null) {
for (String m : at.methods())
ai.getHttpMethods().add(m.toUpperCase());
}
}
public static void evalActionChainMaker(ActionInfo ai, Chain cb) {
if (null != cb) {
ai.setChainName(cb.value());
}
}
public static void evalAt(ActionInfo ai, At at, String def, boolean isMethod) {
if (null != at) {
if (null == at.value() || at.value().length == 0) {
ai.setPaths(Lang.array("/" + def.toLowerCase()));
} else {
ai.setPaths(at.value());
}
if (!Strings.isBlank(at.key()))
ai.setPathKey(at.key());
if (at.top())
ai.setPathTop(true);
} else if (isMethod) {
// EntryDetermineraction@At
ai.setPaths(Lang.array("/" + def.toLowerCase()));
}
}
@SuppressWarnings("unchecked")
private static void evalPathMap(ActionInfo ai, PathMap pathMap) {
if (pathMap != null) {
ai.setPathMap(Json.fromJson(Map.class, pathMap.value()));
}
}
public static void evalFail(ActionInfo ai, Fail fail) {
if (null != fail) {
ai.setFailView(fail.value());
}
}
public static void evalOk(ActionInfo ai, Ok ok) {
if (null != ok) {
ai.setOkView(ok.value());
}
}
public static void evalModule(ActionInfo ai, Class<?> type) {
ai.setModuleType(type);
String beanName = null;
// 5.10.3IocBean.namebean Modify By QinerG@gmai.com
InjectName innm = Mirror.getAnnotationDeep(type,InjectName.class);
IocBean iocBean = Mirror.getAnnotationDeep(type,IocBean.class);
if (innm == null && iocBean == null) // TODO
return;
if (iocBean != null) {
beanName = iocBean.name();
}
if (Strings.isBlank(beanName)) {
if (innm != null && !Strings.isBlank(innm.value())) {
beanName = innm.value();
} else {
beanName = Strings.lowerFirst(type.getSimpleName());
}
}
ai.setInjectName(beanName);
}
@SuppressWarnings({"unchecked", "rawtypes"})
public static void evalActionFilters(ActionInfo ai, Filters filters) {
if (null != filters) {
List<ObjectInfo<? extends ActionFilter>> list = new ArrayList<ObjectInfo<? extends ActionFilter>>(filters.value().length);
for (By by : filters.value()) {
list.add(new ObjectInfo(by.type(), by.args()));
}
ai.setFilterInfos(list.toArray(new ObjectInfo[list.size()]));
}
}
@SuppressWarnings({"unchecked", "rawtypes"})
public static void evalHttpAdaptor(ActionInfo ai, AdaptBy ab) {
if (null != ab) {
ai.setAdaptorInfo((ObjectInfo<? extends HttpAdaptor>) new ObjectInfo(ab.type(),
ab.args()));
}
}
public static void evalEncoding(ActionInfo ai, Encoding encoding) {
if (null == encoding) {
ai.setInputEncoding(org.nutz.lang.Encoding.UTF8);
ai.setOutputEncoding(org.nutz.lang.Encoding.UTF8);
} else {
ai.setInputEncoding(Strings.sNull(encoding.input(), org.nutz.lang.Encoding.UTF8));
ai.setOutputEncoding(Strings.sNull(encoding.output(), org.nutz.lang.Encoding.UTF8));
}
}
public static <T> T evalObj(NutConfig config, Class<T> type, String[] args) {
Context context = config.getLoadingContext();
for (int i = 0; i < args.length; i++) {
args[i] = Segments.replace(args[i], context);
}
// Ioc
if (args.length == 1 && args[0].startsWith("ioc:")) {
String name = Strings.trim(args[0].substring(4));
return config.getIoc().get(type, name);
}
return Mirror.me(type).born((Object[]) args);
}
public static boolean isModule(Class<?> classZ) {
int classModify = classZ.getModifiers();
if (!Modifier.isPublic(classModify)
|| Modifier.isAbstract(classModify)
|| Modifier.isInterface(classModify))
return false;
for (Method method : classZ.getMethods())
if (determiner.isEntry(classZ, method))
return true;
return false;
}
}
|
package lpn.parser;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Set;
import java.lang.Math;
import verification.timed_state_exploration.zoneProject.IntervalPair;
public class ExprTree {
String op;
char isit; // b=Boolean, i=Integer, c=Continuous, n=Number, t=Truth value,
// w=bitWise, a=Arithmetic, r=Relational, l=Logical
double lvalue, uvalue;
String variable;
double real;
boolean logical;
ExprTree r1, r2;
private String tokvalue = "";
private int position = 0;
public int token = 0;
private ExprTree newresult;
private ArrayList<String> booleanSignals, integerSignals, continuousSignals;
private LhpnFile lhpn;
public String expression;
public ExprTree() {
}
/**
* This constructor is used in PlatuGrammar.g to convert LPNs from USF to LhpnFile.
* All LPNs from USF use integer variables only. So only integer signals are dealt with here.
* @param expression
*/
public ExprTree(String expression) {
this.expression = expression;
booleanSignals = new ArrayList<String>();
integerSignals = new ArrayList<String>();
continuousSignals = new ArrayList<String>();
// intexpr_gettok(expression);
// intexpr_L(expression);
}
public ExprTree(LhpnFile lhpn) {
this.lhpn = lhpn;
String[] bools = lhpn.getBooleanVars();
String[] conts = lhpn.getContVars();
String[] ints = lhpn.getIntVars();
booleanSignals = new ArrayList<String>();
integerSignals = new ArrayList<String>();
continuousSignals = new ArrayList<String>();
for (int j = 0; j < bools.length; j++) {
booleanSignals.add(bools[j]);
}
for (int j = 0; j < conts.length; j++) {
continuousSignals.add(conts[j]);
}
for (int j = 0; j < ints.length; j++) {
integerSignals.add(ints[j]);
}
}
public ExprTree(Abstraction abstraction) {
this.lhpn = abstraction;
String[] bools = abstraction.getBooleanVars();
String[] conts = abstraction.getContVars();
String[] ints = abstraction.getIntVars();
booleanSignals = new ArrayList<String>();
integerSignals = new ArrayList<String>();
continuousSignals = new ArrayList<String>();
for (int j = 0; j < bools.length; j++) {
booleanSignals.add(bools[j]);
}
for (int j = 0; j < conts.length; j++) {
continuousSignals.add(conts[j]);
}
for (int j = 0; j < ints.length; j++) {
integerSignals.add(ints[j]);
}
}
public ExprTree(Transition transition) {
}
ExprTree(char willbe, int lNV, int uNV, String var) {
op = "";
r1 = null;
r2 = null;
isit = willbe;
if ((isit == 'b') || (isit == 't'))
logical = true;
else
logical = false;
uvalue = uNV;
lvalue = lNV;
variable = var;
real = 0;
}
public ExprTree(ExprTree nr1, ExprTree nr2, String nop, char willbe) {
op = nop;
r1 = nr1;
r2 = nr2;
isit = willbe;
if ((isit == 'r') || (isit == 'l')) {
logical = true;
uvalue = 1;
lvalue = 0;
} else {
logical = false;
uvalue = INFIN;
lvalue = -INFIN;
}
variable = null;
}
public ExprTree(ExprTree source) {
if (source.op != null) {
op = source.op;
}
isit = source.isit;
lvalue = source.lvalue;
uvalue = source.uvalue;
if (source.variable != null) {
variable = source.variable;
}
real = source.real;
logical = source.logical;
if (source.r1 != null) {
r1 = source.r1;
}
if (source.r2 != null) {
r2 = source.r2;
}
if (source.tokvalue != null) {
tokvalue = source.tokvalue;
}
position = source.position;
token = source.token;
if (source.newresult != null) {
newresult = source.newresult;
}
if (source.booleanSignals != null) {
booleanSignals = source.booleanSignals;
}
if (source.integerSignals != null) {
integerSignals = source.integerSignals;
}
if (source.continuousSignals != null) {
continuousSignals = source.continuousSignals;
}
if (source.lhpn != null) {
lhpn = source.lhpn;
}
}
public int intexpr_gettok(String expr) {
char c;
boolean readword;
boolean readnum;
boolean readsci;
boolean readsign;
readword = false;
readnum = false;
readsci = false;
readsign = false;
tokvalue = "";
while (position < expr.length()) {
c = expr.charAt(position);
position++;
switch (c) {
case '(':
case ')':
case '[':
case ']':
case ',':
case '~':
case '|':
case '&':
case '*':
case '^':
case '/':
case '%':
case '=':
case '<':
case '>':
if ((!readword) && (!readnum) && (!readsci)) {
return (c);
} else {
position
return (WORD);
}
case '+':
case '-':
if ((readsci) && (!readnum) && (readsign)) {
tokvalue += c;
readsign = false;
break;
}
if ((readsci) && (!readnum) && (!readsign)) {
return -1;
} else if ((!readword) && (!readnum) && (!readsci)) {
return (c);
} else {
position
return (WORD);
}
case ' ':
if (readword) {
return (WORD);
}
break;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
if (!readword) {
readnum = true;
}
tokvalue += c;
break;
case '.':
if (readsci) {
return -1;
} else if (!readword) {
readnum = true;
}
tokvalue += c;
break;
case 'E':
case 'e':
if (readsci) {
return -1;
} else if (readnum) {
readsci = true;
readnum = false;
readsign = true;
tokvalue += c;
break;
}
default:
if ((readnum) || (readsci)) {
return -1;
}
readword = true;
tokvalue += c;
break;
}
}
if ((!readword) && (!readnum)) {
return (END_OF_STRING);
} else if (readword || readnum) {
return (WORD);
}
return -1;
}
public boolean intexpr_U(String expr) {
double temp;
switch (token) {
case WORD:
if (tokvalue.toLowerCase().equals("and")) {
token = intexpr_gettok(expr);
if ((token) != '(') {
System.out.print("ERROR: Expected a (\n");
return false;
}
token = intexpr_gettok(expr);
if (!intexpr_R(expr)) {
return false;
}
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
position = newresult.position;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN)
&& ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = ((int) (this).lvalue)
& ((int) newresult.lvalue);
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "&", 'w');
}
(token) = intexpr_gettok(expr);
} else if (tokvalue.toLowerCase().equals("or")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ',') {
System.out.printf("ERROR: Expected a ,\n");
return false;
}
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
position = newresult.position;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN)
&& ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = (int) (this).lvalue
| (int) newresult.lvalue;
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "|", 'w');
}
(token) = intexpr_gettok(expr);
} else if (tokvalue.toLowerCase().equals("xor")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ',') {
System.out.printf("ERROR: Expected a ,\n");
return false;
}
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
position = newresult.position;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN)
&& ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = (int) (this).lvalue
^ (int) newresult.lvalue;
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "X", 'w');
}
(token) = intexpr_gettok(expr);
} else if (tokvalue.toLowerCase().equals("min")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ',') {
System.out.printf("ERROR: Expected a ,\n");
return false;
}
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
position = newresult.position;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN)
&& ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = Math.min((this).lvalue, newresult.lvalue);
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "m", 'a');
}
(token) = intexpr_gettok(expr);
} else if (tokvalue.toLowerCase().equals("max")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ',') {
System.out.printf("ERROR: Expected a ,\n");
return false;
}
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
position = newresult.position;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN)
&& ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = Math.max((this).lvalue, newresult.lvalue);
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "M", 'a');
}
(token) = intexpr_gettok(expr);
} else if (tokvalue.toLowerCase().equals("idiv")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ',') {
System.out.printf("ERROR: Expected a ,\n");
return false;
}
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
position = newresult.position;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN)
&& ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = Math
.floor((this).lvalue / newresult.lvalue);
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "i", 'a');
}
(token) = intexpr_gettok(expr);
} else if (tokvalue.toLowerCase().equals("bit")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ',') {
System.out.printf("ERROR: Expected a ,\n");
return false;
}
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
position = newresult.position;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN)
&& ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 't';
(this).lvalue = ((int) (this).lvalue >> (int) newresult.lvalue) & 1;
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "[]", 'l');
}
(token) = intexpr_gettok(expr);
} else if (tokvalue.toLowerCase().equals("floor")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
// simplify if operands are static
if (((this).isit == 'n') || ((this).isit == 't')
&& ((this).lvalue == (this).uvalue)
&& ((this).lvalue != INFIN)
&& ((this).lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = Math.floor((this).lvalue);
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), null, "f", 'a');
}
(token) = intexpr_gettok(expr);
} else if (tokvalue.toLowerCase().equals("ceil")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
// simplify if operands are static
if (((this).isit == 'n') || ((this).isit == 't')
&& ((this).lvalue == (this).uvalue)
&& ((this).lvalue != INFIN)
&& ((this).lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = Math.ceil((this).lvalue);
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), null, "c", 'a');
}
(token) = intexpr_gettok(expr);
} else if (tokvalue.toLowerCase().equals("not")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
// simplify if operands are static
if (((this).isit == 'n') || ((this).isit == 't')
&& ((this).lvalue == (this).uvalue)
&& ((this).lvalue != INFIN)
&& ((this).lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = ~(int) (this).lvalue;
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), null, "~", 'w');
}
(token) = intexpr_gettok(expr);
} else if (tokvalue.toLowerCase().equals("int")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_L(expr))
return false;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
// simplify if operands are static
if (((this).isit == 'n') || ((this).isit == 't')) {
// DO NOTHING
} else {
setNodeValues((this), null, "INT", 'l');
}
(token) = intexpr_gettok(expr);
} else if (tokvalue.equals("uniform")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ',') {
System.out.printf("ERROR: Expected a ,\n");
return false;
}
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
position = newresult.position;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
setNodeValues((this), newresult, "uniform", 'a');
(token) = intexpr_gettok(expr);
} else if (tokvalue.equals("normal")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ',') {
System.out.printf("ERROR: Expected a ,\n");
return false;
}
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
position = newresult.position;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
setNodeValues((this), newresult, "normal", 'a');
(token) = intexpr_gettok(expr);
} else if (tokvalue.equals("gamma")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ',') {
System.out.printf("ERROR: Expected a ,\n");
return false;
}
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
position = newresult.position;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
setNodeValues((this), newresult, "gamma", 'a');
(token) = intexpr_gettok(expr);
} else if (tokvalue.equals("lognormal")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ',') {
System.out.printf("ERROR: Expected a ,\n");
return false;
}
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
position = newresult.position;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
setNodeValues((this), newresult, "lognormal", 'a');
(token) = intexpr_gettok(expr);
} else if (tokvalue.equals("binomial")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ',') {
System.out.printf("ERROR: Expected a ,\n");
return false;
}
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
position = newresult.position;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
setNodeValues((this), newresult, "binomial", 'a');
(token) = intexpr_gettok(expr);
} else if (tokvalue.equals("exponential")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
setNodeValues((this), null, "exponential", 'a');
(token) = intexpr_gettok(expr);
} else if (tokvalue.equals("chisq")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
setNodeValues((this), null, "chisq", 'a');
(token) = intexpr_gettok(expr);
} else if (tokvalue.equals("laplace")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
setNodeValues((this), null, "laplace", 'a');
(token) = intexpr_gettok(expr);
} else if (tokvalue.equals("cauchy")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
setNodeValues((this), null, "cauchy", 'a');
(token) = intexpr_gettok(expr);
} else if (tokvalue.equals("rayleigh")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
setNodeValues((this), null, "rayleigh", 'a');
(token) = intexpr_gettok(expr);
} else if (tokvalue.equals("poisson")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
setNodeValues((this), null, "poisson", 'a');
(token) = intexpr_gettok(expr);
} else if (tokvalue.equals("bernoulli")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
setNodeValues((this), null, "bernoulli", 'a');
(token) = intexpr_gettok(expr);
} else if (tokvalue.equals("rate")) {
(token) = intexpr_gettok(expr);
if ((token) != '(') {
System.out.printf("ERROR: Expected a (\n");
return false;
}
(token) = intexpr_gettok(expr);
if (!intexpr_R(expr))
return false;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
setNodeValues((this), null, "rate", 'a');
(token) = intexpr_gettok(expr);
} else if ((tokvalue.equals("true")) || tokvalue.equals("TRUE")) {
setVarValues('t', 1, 1, null);
(token) = intexpr_gettok(expr);
}
else if (tokvalue.equals("t") && !booleanSignals.contains(tokvalue) && !integerSignals.contains(tokvalue)
&& !continuousSignals.contains(tokvalue)) {
setVarValues('t', 1, 1, null);
(token) = intexpr_gettok(expr);
}
else if (tokvalue.equals("T") && !booleanSignals.contains(tokvalue) && !integerSignals.contains(tokvalue)
&& !continuousSignals.contains(tokvalue)) {
setVarValues('t', 1, 1, null);
(token) = intexpr_gettok(expr);
}
else if ((tokvalue.equals("false")) || tokvalue.equals("FALSE")) {
setVarValues('t', 0, 0, null);
(token) = intexpr_gettok(expr);
}
else if (tokvalue.equals("f") && !booleanSignals.contains(tokvalue) && !integerSignals.contains(tokvalue)
&& !continuousSignals.contains(tokvalue)) {
setVarValues('t', 0, 0, null);
(token) = intexpr_gettok(expr);
}
else if (tokvalue.equals("F") && !booleanSignals.contains(tokvalue) && !integerSignals.contains(tokvalue)
&& !continuousSignals.contains(tokvalue)) {
setVarValues('t', 0, 0, null);
(token) = intexpr_gettok(expr);
} else if ((tokvalue.toLowerCase().equals("unknown"))) {
setVarValues('t', 0, 1, null);
(token) = intexpr_gettok(expr);
} else if (tokvalue.toLowerCase().equals("inf")) {
setVarValues('n', INFIN, INFIN, null);
token = intexpr_gettok(expr);
} else {
// do boolean lookup here!!!
if (booleanSignals.contains(tokvalue)) {
setVarValues('b', 0, 1, tokvalue);
(token) = intexpr_gettok(expr);
return true;
}
else if (integerSignals.contains(tokvalue)) {
setVarValues('i', -INFIN, INFIN, tokvalue);
(token) = intexpr_gettok(expr);
return true;
}
else if (continuousSignals.contains(tokvalue)) {
setVarValues('c', -INFIN, INFIN, tokvalue);
(token) = intexpr_gettok(expr);
return true;
}
if (tokvalue.equals("")) {
System.out.printf(
"U1:ERROR(%s): Expected a ID, Number, or a (\n",
tokvalue);
return false;
} else if ((int) (tokvalue.charAt(0)) > ('9')
|| ((int) (tokvalue.charAt(0)) < '0')) {
System.out.printf(
"U1:ERROR(%s): Expected a ID, Number, or a (\n",
tokvalue);
return false;
}
temp = Double.parseDouble(tokvalue);
setVarValues('n', temp, temp, null);
token = intexpr_gettok(expr);
}
break;
case '(':
(token) = intexpr_gettok(expr);
if (!intexpr_L(expr))
return false;
if ((token) != ')') {
System.out.printf("ERROR: Expected a )\n");
return false;
}
(token) = intexpr_gettok(expr);
break;
default:
System.out.printf("U2:ERROR: Expected a ID, Number, or a (\n");
return false;
}
return true;
}
public boolean intexpr_T(String expr) {
switch (token) {
case WORD:
case '(':
if (!intexpr_U(expr))
return false;
break;
case '-':
(token) = intexpr_gettok(expr);
if (!intexpr_U(expr))
return false;
// simplify if operands are static
if ((((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& ((this).lvalue != INFIN) && ((this).lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = -((this).lvalue);
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), null, "U-", 'a');
}
break;
default:
System.out.printf("T:ERROR: Expected a ID, Number, (, or -\n");
return false;
}
return true;
}
public boolean intexpr_C(String expr) {
newresult = new ExprTree(this);
switch (token) {
case '*':
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_T(expr))
return false;
token = newresult.token;
position = newresult.position;
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN) && ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = (this).lvalue * newresult.lvalue;
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "*", 'a');
}
if (!intexpr_C(expr))
return false;
break;
case '^':
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_T(expr))
return false;
token = newresult.token;
position = newresult.position;
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN) && ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = Math.pow(lvalue, newresult.lvalue);
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "^", 'a');
}
if (!intexpr_C(expr))
return false;
break;
case '/':
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_T(expr))
return false;
token = newresult.token;
position = newresult.position;
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN) && ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = (this).lvalue / newresult.lvalue;
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "/", 'a');
}
if (!intexpr_C(expr))
return false;
break;
case '%':
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_T(expr))
return false;
token = newresult.token;
position = newresult.position;
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN) && ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = (this).lvalue % newresult.lvalue;
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "%", 'a');
}
if (!intexpr_C(expr))
return false;
break;
case '+':
case '-':
case ')':
case '[':
case ']':
case '|':
case '&':
case '=':
case '<':
case '>':
case ',':
case IMPLIES:
case END_OF_STRING:
break;
case '(':
case WORD:
if (!newresult.intexpr_T(expr))
return false;
token = newresult.token;
position = newresult.position;
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN) && ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = (this).lvalue * newresult.lvalue;
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "*", 'a');
}
if (!intexpr_C(expr))
return false;
break;
default:
System.out.printf("ERROR: Expected a * or /\n");
return false;
}
return true;
}
public boolean intexpr_B(String expr) {
newresult = new ExprTree(this);
switch (token) {
case '+':
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_S(expr))
return false;
token = newresult.token;
position = newresult.position;
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN) && ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = (this).lvalue + newresult.lvalue;
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "+", 'a');
}
if (!intexpr_B(expr))
return false;
break;
case '-':
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_S(expr))
return false;
token = newresult.token;
position = newresult.position;
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN) && ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 'n';
(this).lvalue = (this).lvalue - newresult.lvalue;
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "-", 'a');
}
if (!intexpr_B(expr))
return false;
break;
case ')':
case '[':
case ']':
case '|':
case '&':
case '=':
case '<':
case '>':
case ',':
case IMPLIES:
case END_OF_STRING:
break;
default:
System.out.printf("ERROR: Expected a + or -\n");
return false;
}
return true;
}
public boolean intexpr_S(String expr) {
switch (token) {
case WORD:
case '(':
case '-':
if (!intexpr_T(expr))
return false;
if (!intexpr_C(expr))
return false;
break;
default:
System.out.printf("S:ERROR: Expected a ID, Number, (, or -\n");
return false;
}
return true;
}
public boolean intexpr_R(String expr) {
switch (token) {
case WORD:
case '(':
case '-':
if (!intexpr_S(expr))
return false;
if (!intexpr_B(expr))
return false;
break;
default:
System.out.printf("R:ERROR: Expected a ID, Number, (, or -\n");
return false;
}
return true;
}
public boolean intexpr_P(String expr) {
newresult = new ExprTree(this);
int spos, i;
String ineq = "";
String comp;
switch (token) {
case '=':
spos = position;
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
tokvalue = newresult.tokvalue;
position = newresult.position;
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN) && ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 't';
if (this.lvalue == newresult.lvalue) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
} else {
if ((this).isit == 'c') {
comp = variable;
comp += "=";
// int paren = 0;
// for (i = spos; i < position; i++) {
// if (expr.charAt(i) == '(')
// paren++;
// if (expr.charAt(i) == ')')
// paren--;
// ineq = ineq + expr.charAt(i);
comp += ineq;
if (booleanSignals.contains(comp)) {
this.isit = 'b';
this.variable = comp;
this.lvalue = 0;
this.uvalue = 1;
return true;
} else {
booleanSignals.add(comp);
this.isit = 'b';
this.variable = comp;
this.lvalue = 0;
this.uvalue = 1;
return true;
}
} else {
setNodeValues((this), newresult, "==", 'r');
}
}
break;
case '>':
spos = position;
(token) = intexpr_gettok(expr);
newresult.token = token;
if ((token) == '=') {
spos = position;
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
tokvalue = newresult.tokvalue;
position = newresult.position;
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN)
&& ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 't';
if ((this).lvalue >= newresult.lvalue) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, ">=", 'r');
}
} else {
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
tokvalue = newresult.tokvalue;
position = newresult.position;
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN)
&& ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 't';
if ((this).lvalue > newresult.lvalue) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, ">", 'r');
}
}
break;
case '<':
spos = position;
(token) = intexpr_gettok(expr);
if ((token) == '=') {
spos = position;
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
tokvalue = newresult.tokvalue;
position = newresult.position;
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN)
&& ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 't';
if ((this).lvalue <= newresult.lvalue) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "<=", 'r');
}
} else {
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
tokvalue = newresult.tokvalue;
position = newresult.position;
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN)
&& ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 't';
if ((this).lvalue < newresult.lvalue) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "<", 'r');
}
}
break;
case '[':
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_R(expr))
return false;
token = newresult.token;
tokvalue = newresult.tokvalue;
position = newresult.position;
if ((token) != ']') {
System.out.printf("ERROR: Expected a ]\n");
return false;
}
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN) && ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 't';
(this).lvalue = (((int) (this).lvalue) >> ((int) newresult.lvalue)) & 1;
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "[]", 'l');
}
(token) = intexpr_gettok(expr);
break;
case '&':
case '|':
case ')':
case IMPLIES:
case END_OF_STRING:
break;
default:
System.out.printf("ERROR: Expected a [, =, <, or >\n");
return false;
}
return true;
}
public boolean intexpr_O(String expr) {
switch (token) {
case WORD:
case '(':
case '-':
if (!intexpr_R(expr))
return false;
if (!intexpr_P(expr))
return false;
break;
default:
System.out.printf("O:ERROR: Expected a ID, Number, or a (\n");
return false;
}
return true;
}
public boolean intexpr_N(String expr) {
switch (token) {
case WORD:
case '-':
case '(':
if (!intexpr_O(expr))
return false;
break;
case '~':
(token) = intexpr_gettok(expr);
if (!intexpr_O(expr))
return false;
// simplify if operands are static
if ((((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& ((this).lvalue != INFIN) && ((this).lvalue != -INFIN)) {
(this).isit = 't';
if (this.lvalue == 1) {
this.lvalue = 0;
} else {
this.lvalue = 1;
}
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), null, "!", 'l');
}
break;
default:
System.out.printf("N:ERROR: Expected a ID, Number, (, or -\n");
return false;
}
return true;
}
public boolean intexpr_E(String expr) {
newresult = new ExprTree(this);
switch (token) {
case '&':
token = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_N(expr))
return false;
token = newresult.token;
position = newresult.position;
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN) && ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 't';
if ((this.lvalue == 0) || (newresult.lvalue == 0)) {
this.lvalue = 0;
} else {
this.lvalue = 1;
}
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "&&", 'l');
}
if (!intexpr_E(expr))
return false;
break;
case '|':
case ')':
case IMPLIES:
case END_OF_STRING:
break;
default:
System.out.printf("ERROR(%c): Expected an &\n", (token));
return false;
}
return true;
}
public boolean intexpr_D(String expr) {
newresult = new ExprTree(this);
switch (token) {
case '|':
(token) = intexpr_gettok(expr);
newresult.token = token;
newresult.tokvalue = tokvalue;
newresult.position = position;
if (!newresult.intexpr_M(expr))
return false;
token = newresult.token;
position = newresult.position;
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN) && ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 't';
if (this.lvalue != 0 || newresult.lvalue != 0) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
} else {
setNodeValues((this), newresult, "||", 'l');
}
if (!intexpr_D(expr))
return false;
break;
case ')':
case END_OF_STRING:
break;
case IMPLIES:
(token) = intexpr_gettok(expr);
if (!intexpr_M(expr))
return false;
// simplify if operands are static
if (((newresult.isit == 'n') || (newresult.isit == 't'))
&& (((this).isit == 'n') || ((this).isit == 't'))
&& ((this).lvalue == (this).uvalue)
&& (newresult.lvalue == newresult.uvalue)
&& ((this).lvalue != INFIN) && ((this).lvalue != -INFIN)
&& (newresult.lvalue != INFIN)
&& (newresult.lvalue != -INFIN)) {
(this).isit = 't';
if (this.lvalue != 0 || newresult.lvalue == 0) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
} else {
setNodeValues(this, newresult, "->", 'l');
}
if (!intexpr_D(expr))
return false;
break;
default:
System.out.printf("ERROR: Expected an | or ->\n");
return false;
}
return true;
}
public boolean intexpr_M(String expr) {
switch (token) {
case WORD:
case '(':
case '~':
case '-':
if (!intexpr_N(expr))
return false;
if (!intexpr_E(expr))
return false;
break;
default:
System.out.printf("M: ERROR: Expected a ID, Number, (, or -\n");
return false;
}
return true;
}
public boolean intexpr_L(String expr) {
switch (token) {
case WORD:
case '(':
case '~':
case '-':
if (!intexpr_M(expr))
return false;
if (!intexpr_D(expr))
return false;
break;
default:
System.out.printf("L:ERROR: Expected a ID, Number, (, or -\n");
return false;
}
return true;
}
public String toString() {
String result = "";
result = getElement("LHPN");
return result;
}
public String toString(String type) {
String result = "";
result = getElement(type);
return result;
}
public String toString(String type, String lhpnSbml) {
String result = "";
result = getElement(lhpnSbml);
if (type.equals("continuous") || type.equals("integer")) {
if (isit == 't') {
if (uvalue == 0) {
result = "0";
} else {
result = "1";
}
}
} else {
if (isit == 'n') {
if (uvalue == 0) {
result = "FALSE";
} else {
result = "TRUE";
}
}
}
return result;
}
public boolean implies(ExprTree expr) {
if (isEqual(expr)) {
return true;
}
if (expr.isit == 'l' && expr.op.equals("||")) {
if (implies(expr.r1) || implies(expr.r2)) {
return true;
}
} else if (expr.isit == 'l' && expr.op.equals("&&")) {
if (implies(expr.r1) && implies(expr.r2)) {
return true;
}
}
switch (isit) {
case 't': // Truth value
if (uvalue == 1 && lvalue == 1) {
return false;
} else if (uvalue == 0 && lvalue == 0) {
return true;
} else {
return false;
}
case 'r': // Relational
if (op.contains(">")) {
if (expr.isit == 'r' && expr.op.contains(">")) {
if (r2.lvalue > expr.r2.uvalue) {
return true;
} else if (r2.lvalue == expr.r2.uvalue
&& op.length() >= expr.op.length()) {
return true;
}
}
} else if (op.contains("<")) {
if (expr.isit == 'r' && expr.op.contains("<")) {
if (r2.lvalue < expr.r2.uvalue) {
return true;
} else if (r2.lvalue == expr.r2.uvalue
&& op.length() >= expr.op.length()) {
return true;
}
}
}
return false;
case 'l': // Logical
if (op.equals("&&")) {
if (expr.isit == 'b') {
if (r1.implies(expr) || r2.implies(expr)) {
return true;
}
}
} else if (op.equals("||")) {
if (expr.isit == 'b') {
if (r1.implies(expr) && r2.implies(expr)) {
return true;
}
}
}
return false;
case 'b': // Boolean
case 'i': // Integer
case 'c': // Continuous
case 'n': // Number
case 'w': // bitWise
case 'a': // Arithmetic
default:
return false;
}
}
public boolean containsVar(String var) {
switch (isit) {
case 'b': // Boolean
case 'i': // Integer
case 'c': // Continuous
if (variable.equals(var))
return true;
return false;
case 'r': // Relational
case 'l': // Logical
case 'a': // Arithmetic
case 'w': // bitWise
if (r1 != null) {
if (r1.containsVar(var)) {
return true;
}
}
if (r2 != null) {
if (r2.containsVar(var)) {
return true;
}
}
return false;
case 'n': // Number
case 't': // Truth value
default:
return false;
}
}
public ArrayList<String> getVars() {
ArrayList<String> vars = new ArrayList<String>();
switch (isit) {
case 'b': // Boolean
case 'i': // Integer
case 'c': // Continuous
if (!vars.contains(variable))
vars.add(variable);
break;
case 'r': // Relational
case 'l': // Logical
case 'a': // Arithmetic
case 'w': // bitWise
if (r1 != null)
vars.addAll(r1.getVars());
if (r2 != null)
vars.addAll(r2.getVars());
break;
case 'n': // Number
case 't': // Truth value
default:
break;
}
return vars;
}
public void scaleVals(Double scaleFactor) {
switch (isit) {
case 'b': // Boolean
case 'i': // Integer
case 'c': // Continuous
break;
case 'r': // Relational
case 'l': // Logical
case 'a': // Arithmetic
case 'w': // bitWise
if (r1 != null)
r1.scaleVals(scaleFactor);
if (r2 != null)
r2.scaleVals(scaleFactor);
break;
case 'n': // Number
variable = String
.valueOf((int) (Double.parseDouble(variable) * scaleFactor));
break;
case 't': // Truth value
default:
break;
}
}
public boolean containsCont() {
switch (isit) {
case 'b': // Boolean
case 't': // Truth value
return false;
case 'i': // Integer
case 'c': // Continuous
case 'r': // Relational
case 'a': // Arithmetic
case 'n': // Number
return true;
case 'l': // Logical
case 'w': // bitWise
boolean r1cont = false,
r2cont = false;
if (r1 != null)
r1cont = r1.containsCont();
if (r2 != null)
r2cont = r2.containsCont();
return (r1cont || r2cont);
}
return false;
}
public void replace(String var, String type, ExprTree e) {
if (this == e) {
return;
}
boolean simplify = false;
switch (isit) {
case 'b': // Boolean
case 'i': // Integer
case 'c': // Continuous
if (variable.equals(var)) {
if (e.isit == 'a' || e.isit == 'r' || e.isit == 'l'
|| e.isit == 'w') {
setNodeValues(e.r1, e.r2, e.op, e.isit);
} else {
setVarValues(e.isit, e.lvalue, e.uvalue, e.variable);
}
}
return;
case 'w': // bitWise
case 'l': // Logical
case 'r': // Relational
case 'a': // Arithmetic
if (r1 != null || r2 != null) {
if (r1 != null)
r1.replace(var, type, e);
if (r2 != null)
r2.replace(var, type, e);
break;
}
// simplify if operands are static
if (op.equals("&&")) {
if ((r1.isit == 'n') || (r1.isit == 't')) {
if (r1.lvalue == 0) {
setVarValues('t', 0.0, 0.0, null);
simplify = true;
} else {
if (r2.isit == 'l' || r2.isit == 'a' || r2.isit == 'w'
|| r2.isit == 'r') {
setNodeValues(r2.r1, r2.r2, r2.op, r2.isit);
} else {
setVarValues(r2.isit, r2.lvalue, r2.uvalue,
r2.variable);
}
}
} else if (((r2).isit == 'n') || ((r2).isit == 't')) {
if (r2.lvalue == 0) {
setVarValues('t', 0.0, 0.0, null);
simplify = true;
} else {
if (r1.isit == 'l' || r1.isit == 'a' || r1.isit == 'w'
|| r1.isit == 'r') {
setNodeValues(r1.r1, r1.r2, r1.op, r1.isit);
} else {
setVarValues(r1.isit, r1.lvalue, r1.uvalue,
r1.variable);
}
}
}
} else if (op.equals("||")) {
if ((r1.isit == 'n') || (r1.isit == 't')) {
if (r1.lvalue == 1) {
setVarValues('t', 1.0, 1.0, null);
simplify = true;
} else {
if (r2.isit == 'l' || r2.isit == 'a' || r2.isit == 'w'
|| r2.isit == 'r') {
setNodeValues(r2.r1, r2.r2, r2.op, r2.isit);
} else {
setVarValues(r2.isit, r2.lvalue, r2.uvalue,
r2.variable);
}
}
} else if (((r2).isit == 'n') || ((r2).isit == 't')) {
if (r2.lvalue == 1) {
setVarValues('t', 1.0, 1.0, null);
simplify = true;
} else {
if (r1.isit == 'l' || r1.isit == 'a' || r1.isit == 'w'
|| r1.isit == 'r') {
setNodeValues(r1.r1, r1.r2, r1.op, r1.isit);
} else {
setVarValues(r1.isit, r1.lvalue, r1.uvalue,
r1.variable);
}
}
} else if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 't';
if (r1.lvalue != 0 || r2.lvalue != 0) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("->")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 't';
if (r1.lvalue != 0 || r2.lvalue == 0) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("!")) {
if (((r1).isit == 'n') || ((r1).isit == 't')) {
(this).isit = 't';
if (r1.lvalue == 1) {
this.lvalue = 0;
} else {
this.lvalue = 1;
}
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("==")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 't';
if (r1.lvalue == r2.lvalue) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals(">=")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 't';
if ((r1).lvalue >= r2.lvalue) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals(">")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 't';
if ((r1).lvalue > r2.lvalue) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("<=")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 't';
if ((r1).lvalue <= r2.lvalue) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("<")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 't';
if ((r1).lvalue < r2.lvalue) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("&")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = ((int) (r1).lvalue) & ((int) r2.lvalue);
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("|")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = (int) (r1).lvalue | (int) r2.lvalue;
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("X")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = (int) (r1).lvalue ^ (int) r2.lvalue;
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("m")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = Math.min((r1).lvalue, r2.lvalue);
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("M")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = Math.max((r1).lvalue, r2.lvalue);
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("i")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = Math.floor((r1).lvalue / (r2).lvalue);
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("f")) {
if (((r1).isit == 'n') || ((r1).isit == 't')) {
(this).isit = 'n';
(this).lvalue = Math.floor((r1).lvalue);
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("c")) {
if (((r1).isit == 'n') || ((r1).isit == 't')) {
(this).isit = 'n';
(this).lvalue = Math.ceil((r1).lvalue);
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("~")) {
if (((r1).isit == 'n') || ((r1).isit == 't')) {
(this).isit = 'n';
(this).lvalue = ~(int) (r1).lvalue;
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("[]")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 't';
(this).lvalue = (((int) (r1).lvalue) >> ((int) r2.lvalue)) & 1;
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("U-")) {
if (((r1).isit == 'n') || ((r1).isit == 't')) {
(this).isit = 'n';
(this).lvalue = -((r1).lvalue);
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("*")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = (r1).lvalue * r2.lvalue;
(this).uvalue = (this).lvalue;
}
} else if (op.equals("/")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = (r1).lvalue / r2.lvalue;
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("%")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = (r1).lvalue % r2.lvalue;
(this).uvalue = (this).lvalue;
simplify = true;
}
} else if (op.equals("+")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = (r1).lvalue + r2.lvalue;
(this).uvalue = (this).lvalue;
} else if ((r1.isit == 'n') || (r1.isit == 't')) {
if (r1.lvalue == 0 && r1.uvalue == 0) {
setNodeValues(r2.r1, r2.r2, r2.op, r2.isit);
}
} else if (((r2).isit == 'n') || ((r2).isit == 't')) {
if (r2.lvalue == 0 && r2.uvalue == 0) {
setNodeValues(r1.r1, r1.r2, r1.op, r1.isit);
}
}
} else if (op.equals("-")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = (r1).lvalue - r2.lvalue;
(this).uvalue = (this).lvalue;
simplify = true;
}
}
break;
case 't': // Truth value
if (lvalue != 0 && uvalue != 0) {
lvalue = 1;
uvalue = 1;
} else if (lvalue != 0 || uvalue != 0) {
lvalue = 0;
uvalue = 1;
}
return;
case 'n': // Number
break;
}
if (simplify) {
if (type.equals("integer") || type.equals("continuous")) {
isit = 'n';
} else {
isit = 't';
if (lvalue != 0 && uvalue != 0) {
lvalue = 1;
uvalue = 1;
} else if (lvalue != 0 || uvalue != 0) {
lvalue = 0;
uvalue = 1;
}
}
}
}
public void replaceVar(String var1, String var2) {
switch (isit) {
case 'b': // Boolean
case 'i': // Integer
case 'c': // Continuous
if (variable.equals(var1)) {
variable = var2;
}
return;
case 'w': // bitWise
case 'l': // Logical
case 'r': // Relational
case 'a': // Arithmetic
if (r1 != null)
r1.replaceVar(var1, var2);
if (r2 != null)
r2.replaceVar(var1, var2);
case 't': // Truth value
case 'n': // Number
break;
}
}
public char getChange(HashMap<String, String> variables) {
switch (isit) {
case 'b': // Boolean
if (variables.containsKey(variable)) {
if (variables.get(variable).toString().toLowerCase().equals("false"))
return 'F';
if (variables.get(variable).toString().toLowerCase().equals("true"))
return 'T';
else {
return 'X';
}
} else {
return 'U';
}
case 't': // Truth value
/*
if (uvalue == 0)
return 'F';
else if (lvalue == 1)
return 'T';
*/
return 'U';
case 'l': // Logical
if (op.equals("||")) {
if (r1.getChange(variables) == 'T'
|| r2.getChange(variables) == 'T') {
return 'T';
} else if (r1.getChange(variables) == 'X'
|| r2.getChange(variables) == 'X') {
return 'X';
} else if (r1.getChange(variables) == 't') {
if (r2.getChange(variables) == 'f') {
return 'X';
} else {
return 't';
}
} else if (r2.getChange(variables) == 't') {
if (r1.getChange(variables) == 'f') {
return 'X';
} else {
return 't';
}
} else if (r1.getChange(variables) == 'f'
|| r2.getChange(variables) == 'f') {
return 'f';
} else if (r1.getChange(variables) == 'F') {
if (r2.getChange(variables) == 'F') {
return 'F';
} else {
return 'f';
}
} else if (r2.getChange(variables) == 'F') {
return 'f';
}
return 'U';
} else if (op.equals("&&")) {
if (r1.getChange(variables) == 'F'
|| r2.getChange(variables) == 'F') {
return 'F';
} else if (r1.getChange(variables) == 'X'
|| r2.getChange(variables) == 'X') {
return 'X';
} else if (r1.getChange(variables) == 'f') {
if (r2.getChange(variables) == 't') {
return 'X';
} else {
return 'f';
}
} else if (r2.getChange(variables) == 'f') {
if (r1.getChange(variables) == 't') {
return 'X';
} else {
return 'f';
}
} else if (r1.getChange(variables) == 't'
|| r2.getChange(variables) == 't') {
return 't';
} else if (r1.getChange(variables) == 'T') {
if (r2.getChange(variables) == 'T') {
return 'T';
} else {
return 't';
}
} else if (r2.getChange(variables) == 'T') {
return 't';
}
return 'U';
} else if (op.equals("!")) {
if (r1.getChange(variables) == 'T') {
return 'F';
} else if (r1.getChange(variables) == 'F') {
return 'T';
} else if (r1.getChange(variables) == 't') {
return 'f';
} else if (r1.getChange(variables) == 'f') {
return 't';
}
return r1.getChange(variables);
} else if (op.equals("->")) {
if (r1.getChange(variables) == 'T'
|| r2.getChange(variables) == 'F') {
return 'T';
} else if (r1.getChange(variables) == 'X'
|| r2.getChange(variables) == 'X') {
return 'X';
} else if (r1.getChange(variables) == 't') {
if (r2.getChange(variables) == 't') {
return 'X';
} else {
return 't';
}
} else if (r2.getChange(variables) == 'f') {
if (r1.getChange(variables) == 'f') {
return 'X';
} else {
return 't';
}
} else if (r1.getChange(variables) == 'f') {
return 'f';
} else if (r2.getChange(variables) == 't') {
return 'f';
} else if (r1.getChange(variables) == 'F') {
if (r2.getChange(variables) == 'T') {
return 'F';
} else {
return 'f';
}
} else if (r2.getChange(variables) == 'T') {
return 'f';
}
return 'U';
}
case 'r': // Relational
boolean flag = false;
for (String var : getVars()) {
if (variables.containsKey(var)) {
flag = true;
break;
}
}
if (!flag) {
return 'U';
}
if (op.equals("==")) {
if (r1.evaluateExpr(variables) == r2.evaluateExpr(variables)) {
return 'T';
} else if (new Double(r1.evaluateExpr(variables))
.equals(Double.NaN)
|| new Double(r2.evaluateExpr(variables))
.equals(Double.NaN)) {
return 'X';
}
return 'F';
} else if (op.equals(">=")) {
if (r1.evaluateExpr(variables) >= r2.evaluateExpr(variables)) {
return 'T';
} else if (new Double(r2.evaluateExpr(variables))
.equals(Double.NaN)
|| new Double(r1.evaluateExpr(variables))
.equals(Double.NaN)) {
return 'X';
}
return 'F';
} else if (op.equals("<=")) {
if (r1.evaluateExpr(variables) <= r2.evaluateExpr(variables)) {
return 'T';
} else if (new Double(r1.evaluateExpr(variables))
.equals(Double.NaN)
|| new Double(r2.evaluateExpr(variables))
.equals(Double.NaN)) {
return 'X';
}
return 'F';
} else if (op.equals(">")) {
if (r1.evaluateExpr(variables) > r2.evaluateExpr(variables)) {
return 'T';
} else if (new Double(r1.evaluateExpr(variables))
.equals(Double.NaN)
|| new Double(r2.evaluateExpr(variables))
.equals(Double.NaN)) {
return 'X';
}
return 'F';
} else if (op.equals("<")) {
if (r1.evaluateExpr(variables) < r2.evaluateExpr(variables)) {
return 'T';
} else if (new Double(r1.evaluateExpr(variables))
.equals(Double.NaN)
|| new Double(r2.evaluateExpr(variables))
.equals(Double.NaN)) {
return 'X';
}
return 'F';
}
return 'X';
case 'i': // Integer
if (variables.containsKey(variable)) {
if (Integer.parseInt(variables.get(variable)) == 0.0) {
return 'F';
} else {
return 'T';
}
} else {
return 'U';
}
case 'c': // Continuous
return 'X';
case 'n': // Number
if (uvalue == 0.0 && lvalue == 0.0) {
return 'F';
} else {
return 'T';
}
}
return 'X';
}
public boolean becomesFalse(HashMap<String, String> variables) {
switch (isit) {
case 'b': // Boolean
if (variables.containsKey(variable))
if (variables.get(variable).toString().toLowerCase().equals(
"false"))
return true;
return false;
case 't': // Truth value
if (lvalue == 0)
return true;
return false;
case 'l': // Logical
if (op.equals("||")) {
if (r1.becomesFalse(variables) && r2.becomesFalse(variables)) {
return true;
}
return false;
} else if (op.equals("&&")) {
if ((r1.becomesFalse(variables) && !r2.becomesTrue(variables))
|| (!r1.becomesTrue(variables) && r2
.becomesFalse(variables)))
return true;
return false;
} else if (op.equals("==")) {
if (!(r1.isEqual(r2) || r1.evaluateExpr(variables) == r2
.evaluateExpr(variables)))
return true;
return false;
} else if (op.equals("!")) {
if (r1.becomesTrue(variables))
return true;
return false;
} else if (op.equals("->")) {
if (r1.becomesFalse(variables) || r2.becomesTrue(variables)) {
return true;
}
return false;
} else if (op.equals("[]")) {
if (!(evaluateExpr(variables) == 0.0)) {
return true;
}
return false;
}
case 'w': // bitWise
if (op.equals("&")) {
if (!(evaluateExpr(variables) == 0.0)) {
return true;
}
return false;
} else if (op.equals("|")) {
if (!(evaluateExpr(variables) == 0.0)) {
return true;
}
return false;
} else if (op.equals("X")) {
if (!(evaluateExpr(variables) == 0.0)) {
return true;
}
return false;
} else if (op.equals("~")) {
if (!(evaluateExpr(variables) == 0.0)) {
return true;
}
return false;
}
case 'r': // Relational
if (r1.isit == 'i') {
if (!variables.containsKey(r1.variable)) {
return false;
}
if (op.equals("==")) {
if (r1.evaluateExpr(variables) == r2
.evaluateExpr(variables)) {
return false;
}
return true;
} else if (op.equals(">=")) {
if (r1.evaluateExpr(variables) >= r2
.evaluateExpr(variables)) {
return false;
}
return true;
} else if (op.equals("<=")) {
if (r1.evaluateExpr(variables) <= r2
.evaluateExpr(variables)) {
return false;
}
return true;
} else if (op.equals(">")) {
if (r1.evaluateExpr(variables) > r2.evaluateExpr(variables)) {
return false;
}
return true;
} else if (op.equals("<")) {
if (r1.evaluateExpr(variables) < r2.evaluateExpr(variables)) {
return false;
}
return true;
}
return true;
} else {
return true;
}
case 'i': // Integer
if (variables.containsKey(variable)) {
if (Integer.parseInt(variables.get(variable)) == 0.0) {
return true;
} else {
return false;
}
} else {
return false;
}
case 'c': // Continuous
return true;
case 'a': // Arithmetic
boolean contains = false;
for (String s : getVars()) {
if (variables.containsKey(s)) {
contains = true;
break;
}
}
if (!contains) {
return false;
}
if (!(evaluateExpr(variables) == 0.0)) {
return false;
} else {
return true;
}
case 'n': // Number
if (uvalue == 0.0 && lvalue == 0.0) {
return true;
} else {
return false;
}
}
return false;
}
public boolean becomesTrue(HashMap<String, String> variables) {
switch (isit) {
case 'b': // Boolean
if (variables.containsKey(variable)) {
if (variables.get(variable).toString().matches("[\\d[\\.]]+")) {
if (Double.parseDouble(variables.get(variable).toString()) != 0) {
return true;
}
}
if (variables.get(variable).toString().toLowerCase().equals(
"true"))
return true;
}
return false;
case 'i': // Integer
if (variables.containsKey(variable)) {
if (!variables.get(variable).equals("0.0")) {
return true;
}
}
return false;
case 'c': // Continuous
return true;
case 'n': // Number
case 't': // Truth value
if (uvalue != 0)
return true;
return false;
case 'l': // Logical
if (op.equals("||")) {
if (r1.becomesTrue(variables) || r2.becomesTrue(variables))
return true;
return false;
} else if (op.equals("&&")) {
if ((r1.becomesTrue(variables) && !r2.becomesFalse(variables))
|| (!r1.becomesFalse(variables) && r2
.becomesTrue(variables)))
return true;
return false;
} else if (op.equals("==")) {
if (r1.isEqual(r2, variables)
|| r1.evaluateExpr(variables) == r2
.evaluateExpr(variables))
return true;
return false;
} else if (op.equals("!")) {
if (r1.becomesFalse(variables))
return true;
return false;
} else if (op.equals("->")) {
if (r1.becomesTrue(variables) || r2.becomesFalse(variables)) {
return true;
} else {
return false;
}
}
case 'w': // bitWise
if (op.equals("&")) {
if (evaluateExpr(variables) == 0.0) {
return false;
}
return true;
} else if (op.equals("|")) {
if (evaluateExpr(variables) == 0.0) {
return false;
}
return true;
} else if (op.equals("X")) {
if (evaluateExpr(variables) == 0.0) {
return false;
}
return true;
} else if (op.equals("~")) {
if (evaluateExpr(variables) == 0.0) {
return false;
}
return true;
} else if (op.equals("[]")) {
if (evaluateExpr(variables) == 0.0) {
return false;
}
return true;
}
case 'r': // Relational
if (r1.isit == 'i') {
if (!variables.containsKey(r1.variable)) {
return false;
}
if (op.equals("==")) {
if (!(r1.evaluateExpr(variables) == r2
.evaluateExpr(variables))) {
return false;
}
return true;
} else if (op.equals(">=")) {
if (!(r1.evaluateExpr(variables) >= r2
.evaluateExpr(variables))) {
return false;
}
return true;
} else if (op.equals("<=")) {
if (!(r1.evaluateExpr(variables) <= r2
.evaluateExpr(variables))) {
return false;
}
return true;
} else if (op.equals(">")) {
if (!(r1.evaluateExpr(variables) > r2
.evaluateExpr(variables))) {
return false;
}
return true;
} else if (op.equals("<")) {
if (!(r1.evaluateExpr(variables) < r2
.evaluateExpr(variables))) {
return false;
}
return true;
}
return true;
} else {
return true;
}
case 'a': // Arithmetic
boolean contains = false;
for (String s : getVars()) {
if (variables.containsKey(s)) {
contains = true;
break;
}
}
if (!contains) {
return false;
}
if (!(evaluateExpr(variables) != 0.0)) {
return false;
} else {
return true;
}
}
return true;
}
public String getElement(String type) {
boolean sbmlFlag;
sbmlFlag = type.equals("SBML");
Boolean verilog = type.equalsIgnoreCase("Verilog");
String result = "";
switch (isit) {
case 'b': // Boolean
case 'i': // Integer
case 'c': // Continuous
if (!sbmlFlag) {
result = variable;
} else {
if (isit == 'b') {
result = "eq(" + variable + ",1)";
} else {
result = variable;
}
}
break;
case 'n': // Number
// long term solution: create initial assignment
// short term solution: initialize all inf, -inf, [-inf, inf] to 0
// initialize [l,u] to (l+u)/2
Double tempuval = uvalue;
Double templval = lvalue;
if ((uvalue == lvalue) || tempuval.toString().equals("")) {
if (lvalue == INFIN) {
result = "inf";
} else if (lvalue == -INFIN) {
result = "-inf";
} else {
if (tempuval % 1 == 0) {
int tempval = (int) (tempuval / 1);
result = new Integer(tempval).toString();
} else {
result = tempuval.toString();
}
}
} else {
String lval;
if (lvalue == INFIN) {
lval = "inf";
} else if (lvalue == -INFIN) {
lval = "-inf";
} else {
if (tempuval % 1 == 0) {
int tempval = (int) (templval / 1);
lval = new Integer(tempval).toString();
} else {
lval = templval.toString();
}
}
String uval;
if (uvalue == INFIN) {
uval = "inf";
} else if (uvalue == -INFIN) {
uval = "-inf";
} else {
if (tempuval % 1 == 0) {
int tempval = (int) (tempuval / 1);
uval = new Integer(tempval).toString();
} else {
uval = tempuval.toString();
}
}
if (verilog) {
result = "uniform(" + lval + "," + uval + ")";
} else {
result = "uniform(" + lval + "," + uval + ")";
}
}
break;
case 't': // Truth value
if (uvalue == 0 && lvalue == 0) {
if (verilog)
result = "0";
else
result = "FALSE";
} else if (uvalue == 1 && lvalue == 1) {
if (verilog)
result = "1";
else
result = "TRUE";
} else {
if (sbmlFlag | verilog) {
result = "0";
}
result = "UNKNOWN";
}
break;
case 'w': // bitWise
if (op.equals("&")) {
if (r1 != null && r2 != null) {
if (sbmlFlag) {
result = "BITAND(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
} else if (verilog) {
result = r1.getElement(type) + "&"
+ r2.getElement(type);
} else {
result = "and(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
}
} else if (op.equals("|")) {
if (r1 != null && r2 != null) {
if (sbmlFlag) {
result = "BITOR(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
} else if (verilog) {
result = r1.getElement(type) + "|"
+ r2.getElement(type);
} else {
result = "or(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
}
} else if (op.equals("!")) {
if (r1 != null && r2 != null) {
if (sbmlFlag) {
result = "BITNOT(" + r1.getElement(type) + ")";
} else if (verilog) {
result = "~" + r1.getElement(type);
} else {
result = "not(" + r1.getElement(type) + ")";
}
}
} else if (op.equals("X")) {
if (r1 != null && r2 != null) {
if (sbmlFlag) {
result = "XOR(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
} else if (verilog) {
result = r1.getElement(type) + "^"
+ r2.getElement(type);
} else {
result = "exor(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
}
}
break;
case 'a': // Arithmetic
case 'r': // Relational
case 'l': // Logical
if (op.equals("!")) {
if (r1 != null) {
if (r1.isit == 'b' || r1.isit == 'i' || r1.isit == 'c'
|| r1.isit == 'n' || r1.isit == 't') {
if (sbmlFlag) {
result = "not(" + r1.getElement(type) + ")";
} else if (verilog) {
result = "!" + r1.getElement(type);
} else {
result = "~" + r1.getElement(type);
}
} else {
if (sbmlFlag) {
result = "not(" + r1.getElement(type) + ")";
} else if (verilog) {
result = "!" + "(" + r1.getElement(type) + ")";
} else {
result = "~" + "(" + r1.getElement(type) + ")";
}
}
}
break;
} else {
if (op.equals("&&")) {
if (r1.isit == 'r'
|| (r1.isit == 'l' && r1.op.equals("||"))) {
if (r1 != null) {
if (sbmlFlag) {
result = "and(" + r1.getElement(type) + ",";
} else if (verilog) {
result = "(" + r1.getElement(type) + ")&&";
} else {
result = "(" + r1.getElement(type) + ")";
}
}
} else {
if (r1 != null) {
if (sbmlFlag) {
result = "and(" + r1.getElement(type) + ",";
} else if (verilog) {
result = r1.getElement(type) + "&&";
} else {
result = r1.getElement(type);
}
}
}
if (!sbmlFlag && !verilog) {
result = result + "&";
}
if (r2.isit == 'r'
|| (r2.isit == 'l' && r2.op.equals("||"))) {
if (r2 != null) {
if (sbmlFlag) {
result = result + r2.getElement(type) + ")";
} else {
result = result + "(" + r2.getElement(type)
+ ")";
}
}
} else {
if (r2 != null) {
if (sbmlFlag) {
result = result + r2.getElement(type) + ")";
} else {
result = result + r2.getElement(type);
}
}
}
} else if (op.equals("||")) {
if (r1.isit == 'r') {
if (r1 != null) {
if (sbmlFlag) {
result = "or(" + r1.getElement(type) + ",";
} else if (verilog) {
result = "(" + r1.getElement(type) + ")||";
} else {
result = "(" + r1.getElement(type) + ")";
}
}
} else {
if (r1 != null) {
if (sbmlFlag) {
result = "or(" + r1.getElement(type) + ",";
} else if (verilog) {
result = r1.getElement(type) + "||";
} else {
result = r1.getElement(type);
}
}
}
if (!sbmlFlag && !verilog) {
result = result + "|";
}
if (r2.isit == 'r') {
if (r2 != null) {
if (sbmlFlag) {
result = result + r2.getElement(type) + ")";
} else {
result = result + "(" + r2.getElement(type)
+ ")";
}
}
} else {
if (r2 != null) {
if (sbmlFlag) {
result = result + r2.getElement(type) + ")";
} else {
result = result + r2.getElement(type);
}
}
}
} else if (op.equals("f")) {
if (r1 != null) {
if (r1.isit == 'n') {
result = new Integer((int) Math.floor(r1.lvalue))
.toString();
} else {
if (sbmlFlag) {
result = "floor(" + r1.getElement(type) + ")";
} else if (verilog) {
result = "$floor(" + r1.getElement(type) + ")";
} else {
result = "floor(" + r1.getElement(type) + ")";
}
}
}
} else if (op.equals("c")) {
if (r1 != null) {
if (r1.isit == 'n') {
result = new Integer((int) Math.ceil(r1.lvalue))
.toString();
} else {
if (sbmlFlag) {
result = "ceil(" + r1.getElement(type) + ")";
} else if (verilog) {
result = "$ceil(" + r1.getElement(type) + ")";
} else {
result = "ceil(" + r1.getElement(type) + ")";
}
}
}
} else if (op.equals("m")) {
if (r1 != null && r2 != null) {
if (r1.isit == 'n' && r2.isit == 'n') {
if (r1.lvalue < r2.lvalue) {
result = r1.getElement(type);
} else {
result = r2.getElement(type);
}
} else {
if (sbmlFlag) {
result = "piecewise(" + r1.getElement(type)
+ ",leq(" + r1.getElement(type) + ","
+ r2.getElement(type) + "),"
+ r2.getElement(type) + ")";
//} else if (verilog) {
//result = "min(" + r1.getElement(type) + ","
// + r2.getElement(type) + ")";
} else if (verilog) {
result = "("+r1.getElement(type) +"<"+r2.getElement(type) +"?"+r1.getElement(type) +":"+r2.getElement(type) +")";
} else {
result = "min(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
}
}
} else if (op.equals("M")) {
if (r1 != null && r2 != null) {
if (r1.isit == 'n' && r2.isit == 'n') {
if (r1.lvalue > r2.lvalue) {
result = r1.getElement(type);
} else {
result = r2.getElement(type);
}
} else {
if (sbmlFlag) {
result = "piecewise(" + r1.getElement(type)
+ ",geq(" + r1.getElement(type) + ","
+ r2.getElement(type) + "),"
+ r2.getElement(type) + ")";
//} else if (verilog) {
//result = "max(" + r1.getElement(type) + ","
//+ r2.getElement(type) + ")";
} else if (verilog) {
result = "("+r1.getElement(type) +">"+r2.getElement(type) +"?"+r1.getElement(type) +":"+r2.getElement(type) +")";
} else {
result = "max(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
}
}
} else if (op.equals("i")) {
if (r1 != null && r2 != null) {
if (sbmlFlag) {
result = "floor(" + r1.getElement(type) + "/"
+ r2.getElement(type) + ")";
} else if (verilog) {
result = "floor(" + r1.getElement(type) + "/"
+ r2.getElement(type) + ")";
} else {
result = "idiv(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
}
} else if (op.equals("uniform")) {
if (r1 != null && r2 != null) {
if (verilog) {
result = "uniform(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
} else {
result = "uniform(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
}
} // TODO: Add verilog functions for other distributions
else if (op.equals("[]")) {
if (r1 != null && r2 != null) {
result = "BIT(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
} else if (op.equals("normal")) {
if (r1 != null && r2 != null) {
result = "normal(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
} else if (op.equals("gamma")) {
if (r1 != null && r2 != null) {
result = "gamma(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
} else if (op.equals("lognormal")) {
if (r1 != null && r2 != null) {
result = "lognormal(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
} else if (op.equals("binomial")) {
if (r1 != null && r2 != null) {
result = "binomial(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
} else if (op.equals("exponential")) {
if (r1 != null) {
result = "exponential(" + r1.getElement(type) + ")";
}
} else if (op.equals("chisq")) {
if (r1 != null) {
result = "chisq(" + r1.getElement(type) + ")";
}
} else if (op.equals("laplace")) {
if (r1 != null) {
result = "laplace(" + r1.getElement(type) + ")";
}
} else if (op.equals("cauchy")) {
if (r1 != null) {
result = "cauchy(" + r1.getElement(type) + ")";
}
} else if (op.equals("rayleigh")) {
if (r1 != null) {
result = "rayleigh(" + r1.getElement(type) + ")";
}
} else if (op.equals("poisson")) {
if (r1 != null) {
result = "poisson(" + r1.getElement(type) + ")";
}
} else if (op.equals("bernoulli")) {
if (r1 != null) {
result = "bernoulli(" + r1.getElement(type) + ")";
}
} else if (op.equals("rate")) {
if (r1 != null) {
result = "rate(" + r1.getElement(type) + ")";
}
} else if (op.equals("INT")) {
if (r1 != null) {
if (sbmlFlag) {
result = "piecewise(1," + r1.getElement(type)
+ ",0 )";
} else {
result = "INT(" + r1.getElement(type) + ")";
}
}
} else if (op.equals("==")) {
if (r1 != null) {
if (sbmlFlag) {
result = "eq(" + r1.getElement(type) + ",";
} else if (verilog) {
result = r1.getElement(type) + "==";
} else {
result = r1.getElement(type);
}
}
if (!sbmlFlag && !verilog) {
result = result + "=";
}
if (r2 != null) {
if (sbmlFlag) {
result = result + r2.getElement(type) + ")";
} else {
result = result + r2.getElement(type);
}
}
} else if (op.equals("+")) {
if (r1.isit == 'n' && r1.lvalue >= 0 && r2.isit == 'a'
&& r2.op.equals("uniform")) {
ExprTree tempUniform = new ExprTree(r2);
r1.setNodeValues(r1, tempUniform.r1, "+", 'a');
r2.setNodeValues(r1, tempUniform.r2, "+", 'a');
isit = 'a';
op = "uniform";
} else if (r1.isit == 'a' && r1.op.equals("uniform")
&& r2.isit == 'n' && r2.lvalue >= 0) {
ExprTree tempUniform = new ExprTree(r1);
r1.setNodeValues(r2, tempUniform.r1, "+", 'a');
r2.setNodeValues(r2, tempUniform.r2, "+", 'a');
isit = 'a';
op = "uniform";
} else {
try {
String r1String = r1.getElement(type);
String r2String = r2.getElement(type);
result = new Float(Float.parseFloat(r1String)
+ Float.parseFloat(r2String)).toString();
} catch (NumberFormatException e) {
if (r1.isit == 'b'
|| r1.isit == 'i'
|| r1.isit == 'c'
|| r1.isit == 'n'
|| r1.isit == 't'
|| (r1.isit == 'a' && (r1.op.equals("+")
|| r1.op.equals("-")
|| r1.op.equals("*")
|| r1.op.equals("/") || r1.op
.equals("^")))) {
if (r1 != null) {
result = r1.getElement(type);
}
} else {
if (r1 != null) {
result = "(" + r1.getElement(type) + ")";
}
}
result = result + "+";
if (r2.isit == 'b'
|| r2.isit == 'i'
|| r2.isit == 'c'
|| r2.isit == 'n'
|| r2.isit == 't'
|| (r2.isit == 'a' && (r2.op.equals("+")
|| r2.op.equals("-")
|| r2.op.equals("*")
|| r2.op.equals("/") || r2.op
.equals("^")))) {
if (r2 != null) {
result = result + r2.getElement(type);
}
} else {
if (r2 != null) {
result = result + "(" + r2.getElement(type)
+ ")";
}
}
}
}
} else if (op.equals("-")) {
if (r1.isit == 'a' && r1.op.equals("uniform")
&& r2.isit == 'n' && r2.lvalue >= 0) {
ExprTree tempUniform = new ExprTree(r1);
r1.setNodeValues(tempUniform.r1, r2, "-", 'a');
r2.setNodeValues(tempUniform.r2, r2, "-", 'a');
isit = 'a';
op = "uniform";
} else {
try {
String r1String = r1.getElement(type);
String r2String = r2.getElement(type);
result = new Float(Float.parseFloat(r1String)
- Float.parseFloat(r2String)).toString();
} catch (NumberFormatException e) {
if (r1.isit == 'b'
|| r1.isit == 'i'
|| r1.isit == 'c'
|| r1.isit == 'n'
|| r1.isit == 't'
|| (r1.isit == 'a' && (r1.op.equals("+")
|| r1.op.equals("-")
|| r1.op.equals("*")
|| r1.op.equals("/") || r1.op
.equals("^")))) {
if (r1 != null) {
result = r1.getElement(type);
}
} else {
if (r1 != null) {
result = "(" + r1.getElement(type) + ")";
}
}
result = result + "-";
if (r2.isit == 'b'
|| r2.isit == 'i'
|| r2.isit == 'c'
|| r2.isit == 'n'
|| r2.isit == 't'
|| (r2.isit == 'a' && (r2.op.equals("-")
|| r2.op.equals("*")
|| r2.op.equals("/") || r2.op
.equals("^")))) {
if (r2 != null) {
result = result + r2.getElement(type);
}
} else {
if (r2 != null) {
result = result + "(" + r2.getElement(type)
+ ")";
}
}
}
}
} else if (op.equals("*")) {
if (r1.isit == 'n' && r1.lvalue >= 0 && r2.isit == 'a'
&& r2.op.equals("uniform")) {
ExprTree tempUniform = new ExprTree(r2);
r1.setNodeValues(r1, tempUniform.r1, "*", 'a');
r2.setNodeValues(r1, tempUniform.r2, "*", 'a');
isit = 'a';
op = "uniform";
} else if (r1.isit == 'a' && r1.op.equals("uniform")
&& r2.isit == 'n' && r2.lvalue >= 0) {
ExprTree tempUniform = new ExprTree(r1);
r1.setNodeValues(r2, tempUniform.r1, "*", 'a');
r2.setNodeValues(r2, tempUniform.r2, "*", 'a');
isit = 'a';
op = "uniform";
} else {
try {
String r1String = r1.getElement(type);
String r2String = r2.getElement(type);
result = new Float(Float.parseFloat(r1String)
* Float.parseFloat(r2String)).toString();
} catch (NumberFormatException e) {
if (r1.isit == 'b'
|| r1.isit == 'i'
|| r1.isit == 'c'
|| r1.isit == 'n'
|| r1.isit == 't'
|| (r1.isit == 'a' && (r1.op.equals("*")
|| r1.op.equals("/") || r1.op
.equals("^")))) {
if (r1 != null) {
result = r1.getElement(type);
}
} else {
if (r1 != null) {
result = "(" + r1.getElement(type) + ")";
}
}
result = result + "*";
if (r2.isit == 'b'
|| r2.isit == 'i'
|| r2.isit == 'c'
|| r2.isit == 'n'
|| r2.isit == 't'
|| (r2.isit == 'a' && (r2.op.equals("*")
|| r2.op.equals("/") || r2.op
.equals("^")))) {
if (r2 != null) {
result = result + r2.getElement(type);
}
} else {
if (r2 != null) {
result = result + "(" + r2.getElement(type)
+ ")";
}
}
}
}
} else if (op.equals("/")) {
if (r1.isit == 'a' && r1.op.equals("uniform")
&& r2.isit == 'n' && r2.lvalue >= 0) {
ExprTree tempUniform = new ExprTree(r1);
r1.setNodeValues(tempUniform.r1, r2, "/", 'a');
r2.setNodeValues(tempUniform.r2, r2, "/", 'a');
isit = 'a';
op = "uniform";
} else {
try {
String r1String = r1.getElement(type);
String r2String = r2.getElement(type);
result = new Float(Float.parseFloat(r1String)
/ Float.parseFloat(r2String)).toString();
} catch (NumberFormatException e) {
if (r1.isit == 'b'
|| r1.isit == 'i'
|| r1.isit == 'c'
|| r1.isit == 'n'
|| r1.isit == 't'
|| (r1.isit == 'a' && (r1.op.equals("*")
|| r1.op.equals("/") || r1.op
.equals("^")))) {
if (r1 != null) {
result = r1.getElement(type);
}
} else {
if (r1 != null) {
result = "(" + r1.getElement(type) + ")";
}
}
result = result + "/";
if (r2.isit == 'b'
|| r2.isit == 'i'
|| r2.isit == 'c'
|| r2.isit == 'n'
|| r2.isit == 't'
|| (r2.isit == 'a' && (r2.op.equals("/") || r2.op
.equals("^")))) {
if (r2 != null) {
result = result + r2.getElement(type);
}
} else {
if (r2 != null) {
result = result + "(" + r2.getElement(type)
+ ")";
}
}
}
}
} else if (op.equals("^")) {
try {
String r1String = r1.getElement(type);
String r2String = r2.getElement(type);
result = new Integer(Integer.parseInt(r1String)
^ Integer.parseInt(r2String)).toString();
} catch (NumberFormatException e) {
if (r1.isit == 'b'
|| r1.isit == 'i'
|| r1.isit == 'c'
|| r1.isit == 'n'
|| r1.isit == 't'
|| (r1.isit == 'a' && (r1.op.equals("*")
|| r1.op.equals("/") || r1.op
.equals("^")))) {
if (r1 != null) {
result = "(" + r1.getElement(type) + ")";
}
} else {
if (r1 != null) {
result = "(" + r1.getElement(type) + ")";
}
}
result = result + "^";
if (r2.isit == 'b'
|| r2.isit == 'i'
|| r2.isit == 'c'
|| r2.isit == 'n'
|| r2.isit == 't'
|| (r2.isit == 'a' && (r2.op.equals("/") || r2.op
.equals("^")))) {
if (r2 != null) {
result = result + "(" + r2.getElement(type)
+ ")";
}
} else {
if (r2 != null) {
result = result + "(" + r2.getElement(type)
+ ")";
}
}
}
}
// relational ops: geq, leq, gt, lt
// mod
else {
if (!sbmlFlag) {
if (r1 != null) {
if (r1.isit == 'b' || r1.isit == 'i'
|| r1.isit == 'c' || r1.isit == 'n'
|| r1.isit == 't') {
result = r1.getElement(type);
} else {
result = "(" + r1.getElement(type) + ")";
}
}
result = result + op;
if (r2 != null) {
if (r2.isit == 'b' || r2.isit == 'i'
|| r2.isit == 'c' || r2.isit == 'n'
|| r2.isit == 't') {
result = result + r2.getElement(type);
} else {
result = result + "(" + r2.getElement(type)
+ ")";
}
}
}
if (sbmlFlag) {
if (op.equals("<=")) {
if (r1 != null && r2 != null) {
result = "leq(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
}
if (op.equals(">=")) {
if (r1 != null && r2 != null) {
result = "geq(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
}
if (op.equals(">")) {
if (r1 != null && r2 != null) {
result = "gt(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
}
if (op.equals("<")) {
if (r1 != null && r2 != null) {
result = "lt(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
}
if (op.equals("%")) {
if (r1 != null && r2 != null) {
result = "mod(" + r1.getElement(type) + ","
+ r2.getElement(type) + ")";
}
}
}
}
}
}
return result;
}
public ExprTree minimizeUniforms() {
if (r1 != null) {
r1.minimizeUniforms();
}
if (r2 != null) {
r2.minimizeUniforms();
}
if (isit == 'a' && op.equals("m")) {
if (r1.isit == 'n' && r2.isit == 'n') {
isit = 'n';
if (r1.lvalue < r2.lvalue) {
lvalue = r1.lvalue;
} else {
lvalue = r2.lvalue;
}
r1 = null;
r2 = null;
} else if (r1.isit == 'a' && r1.op.equals("uniform")
&& r2.isit == 'a' && r2.op.equals("uniform")) {
ExprTree l1 = r1.r1;
ExprTree l2 = r2.r1;
ExprTree u1 = r1.r2;
ExprTree u2 = r2.r2;
op = "uniform";
r1.op = "m";
r2.op = "m";
r1.r1 = l1;
r1.r2 = l2;
r2.r1 = u1;
r2.r2 = u2;
}
}
if (isit == 'a' && op.equals("M")) {
if (r1.isit == 'n' && r2.isit == 'n') {
isit = 'n';
if (r1.lvalue < r2.lvalue) {
lvalue = r2.lvalue;
} else {
lvalue = r1.lvalue;
}
r1 = null;
r2 = null;
} else if (r1.isit == 'a' && r1.op.equals("uniform")
&& r2.isit == 'a' && r2.op.equals("uniform")) {
ExprTree l1 = r1.r1;
ExprTree l2 = r2.r1;
ExprTree u1 = r1.r2;
ExprTree u2 = r2.r2;
op = "uniform";
r1.op = "M";
r2.op = "M";
r1.r1 = l1;
r1.r2 = l2;
r2.r1 = u1;
r2.r2 = u2;
}
}
if (isit == 'a' && op.equals("+")) {
if (r1.isit == 'a' && r1.op.equals("uniform") && r2.isit == 'a'
&& r2.op.equals("uniform")) {
ExprTree l1 = r1.r1;
ExprTree l2 = r2.r1;
ExprTree u1 = r1.r2;
ExprTree u2 = r2.r2;
op = "uniform";
r1.op = "+";
r2.op = "+";
r1.r1 = l1;
r1.r2 = l2;
r2.r1 = u1;
r2.r2 = u2;
}
}
if (isit == 'a' && op.equals("-")) {
if (r1.isit == 'a' && r1.op.equals("uniform") && r2.isit == 'a'
&& r2.op.equals("uniform")) {
ExprTree l1 = r1.r1;
ExprTree l2 = r2.r1;
ExprTree u1 = r1.r2;
ExprTree u2 = r2.r2;
op = "uniform";
r1.op = "+";
r2.op = "+";
r1.r1 = l1;
r1.r2 = u2;
r2.r1 = u1;
r2.r2 = l2;
}
}
if (isit == 'a' && op.equals("c")) {
if (r1.isit == 'a' && r1.op.equals("uniform")) {
ExprTree l1 = r1.r1;
ExprTree u1 = r1.r2;
op = "uniform";
r1 = new ExprTree(l1, null, "c", 'a');
r2 = new ExprTree(u1, null, "c", 'a');
}
}
if (isit == 'a' && op.equals("f")) {
if (r1.isit == 'a' && r1.op.equals("uniform")) {
ExprTree l1 = r1.r1;
ExprTree u1 = r1.r2;
op = "uniform";
r1 = new ExprTree(l1, null, "f", 'a');
r2 = new ExprTree(u1, null, "f", 'a');
}
}
if (isit == 'a' && op.equals("uniform")) {
if (r1.isit == 'a' && r1.op.equals("uniform")) {
r1 = r1.r1;
}
if (r2.isit == 'a' && r2.op.equals("uniform")) {
r2 = r2.r2;
}
}
return this;
}
public boolean isEqual(ExprTree expr) {
if (isit == expr.isit) {
boolean same = false;
switch (isit) {
case 'b': // Boolean
case 'i': // Integer
case 'c': // Continuous
if (variable.equals(expr.variable)) {
same = true;
}
break;
case 'n': // Number
case 't': // Truth value
if (uvalue == expr.uvalue && lvalue == expr.lvalue) {
same = true;
}
break;
case 'w': // bitWise
case 'a': // Arithmetic
case 'r': // Relational
case 'l': // Logical
if (op.equals(expr.op)) {
same = true;
}
}
if (same) {
boolean r1Same = false, r2Same = false;
if (r1 == null) {
if (expr.r1 == null) {
r1Same = true;
}
} else if (r1.isEqual(expr.r1)) {
r1Same = true;
}
if (r2 == null) {
if (expr.r2 == null) {
r2Same = true;
}
} else if (r2.isEqual(expr.r2)) {
r2Same = true;
}
if (r1Same && r2Same) {
return true;
}
}
}
return false;
}
private boolean isEqual(ExprTree expr, HashMap<String, String> variables) {
if (isit == expr.isit) {
boolean same = false;
switch (isit) {
case 'b': // Boolean
case 'i': // Integer
case 'c': // Continuous
if (variables.containsKey(variable)) {
if (variables.containsKey(expr.variable)) {
if (variables.get(variable).equals(
variables.get(expr.variable)))
same = true;
}
} else if (variable.equals(expr.variable)) {
same = true;
}
break;
case 'n': // Number
case 't': // Truth value
if (uvalue == expr.uvalue && lvalue == expr.lvalue) {
same = true;
} else if (variables.containsKey(expr.variable)) {
if (uvalue == lvalue) {
if (uvalue == 1.0
&& variables.get(expr.variable).toLowerCase()
.equals("true"))
same = true;
else if (uvalue == 0.0
&& variables.get(expr.variable).toLowerCase()
.equals("false"))
same = true;
}
}
break;
case 'w': // bitWise
case 'a': // Arithmetic
case 'r': // Relational
case 'l': // Logical
if (op.equals(expr.op)) {
same = true;
}
}
if (same) {
boolean r1Same = false, r2Same = false;
if (r1 == null) {
if (expr.r1 == null) {
r1Same = true;
}
} else if (r1.isEqual(expr.r1)) {
r1Same = true;
}
if (r2 == null) {
if (expr.r2 == null) {
r2Same = true;
}
} else if (r2.isEqual(expr.r2)) {
r2Same = true;
}
if (r1Same && r2Same) {
return true;
}
}
}
return false;
}
private void setVarValues(char willbe, double lNV, double uNV, String var) {
op = "";
r1 = null;
r2 = null;
isit = willbe;
if ((isit == 'b') || (isit == 't'))
logical = true;
else
logical = false;
uvalue = uNV;
lvalue = lNV;
variable = var;
real = 0;
}
public void setNodeValues(ExprTree nr1, ExprTree nr2, String nop,
char willbe) {
ExprTree r1temp = null, r2temp = null;
if (nr1 != null) {
r1temp = new ExprTree(nr1);
}
if (nr2 != null) {
r2temp = new ExprTree(nr2);
}
r1 = r1temp;
r2 = r2temp;
op = nop;
isit = willbe;
if ((isit == 'r') || (isit == 'l')) {
logical = true;
uvalue = 1;
lvalue = 0;
} else {
logical = false;
uvalue = INFIN;
lvalue = -INFIN;
}
variable = null;
// simplify if operands are static
if (isit == 'a' || isit == 'r' || isit == 'l' || isit == 'w') {
if (op.equals("&&")) {
if ((r1.isit == 'n') || (r1.isit == 't')) {
if (r1.lvalue == 0) {
setVarValues('t', 0.0, 0.0, null);
} else {
if (r2.isit == 'l' || r2.isit == 'a' || r2.isit == 'w'
|| r2.isit == 'r') {
setNodeValues(r2.r1, r2.r2, r2.op, r2.isit);
} else {
setVarValues(r2.isit, r2.lvalue, r2.uvalue,
r2.variable);
}
}
} else if (((r2).isit == 'n') || ((r2).isit == 't')) {
if (r2.lvalue == 0) {
setVarValues('t', 0.0, 0.0, null);
} else {
if (r1.isit == 'l' || r1.isit == 'a' || r1.isit == 'w'
|| r1.isit == 'r') {
setNodeValues(r1.r1, r1.r2, r1.op, r1.isit);
} else {
setVarValues(r1.isit, r1.lvalue, r1.uvalue,
r1.variable);
}
}
} else if (r1.equals(r2)) {
if (r1.isit == 'l' || r1.isit == 'a' || r1.isit == 'w'
|| r1.isit == 'r') {
setNodeValues(r1.r1, r1.r2, r1.op, r1.isit);
} else {
setVarValues(r1.isit, r1.lvalue, r1.uvalue, r1.variable);
}
} else {
ExprTree notE = new ExprTree(this);
notE.setNodeValues((this), null, "!", 'l');
if (r1.equals(notE) || notE.equals(r1)) {
setVarValues('t', 0.0, 0.0, null);
}
}
} else if (op.equals("||")) {
if ((r1.isit == 'n') || (r1.isit == 't')) {
if (r1.lvalue != 0) {
setVarValues('t', 1.0, 1.0, null);
} else {
if (r2.isit == 'l' || r2.isit == 'a' || r2.isit == 'w'
|| r2.isit == 'r') {
setNodeValues(r2.r1, r2.r2, r2.op, r2.isit);
} else {
setVarValues(r2.isit, r2.lvalue, r2.uvalue,
r2.variable);
}
}
} else if (((r2).isit == 'n') || ((r2).isit == 't')) {
if (r2.lvalue != 0) {
setVarValues('t', 1.0, 1.0, null);
} else {
if (r1.isit == 'l' || r1.isit == 'a' || r1.isit == 'w'
|| r1.isit == 'r') {
setNodeValues(r1.r1, r1.r2, r1.op, r1.isit);
} else {
setVarValues(r1.isit, r1.lvalue, r1.uvalue,
r1.variable);
}
}
} else if (r1.equals(r2)) {
if (r1.isit == 'l' || r1.isit == 'a' || r1.isit == 'w'
|| r1.isit == 'r') {
setNodeValues(r1.r1, r1.r2, r1.op, r1.isit);
} else {
setVarValues(r1.isit, r1.lvalue, r1.uvalue, r1.variable);
}
} else {
ExprTree notE = new ExprTree(this);
notE.setNodeValues((this), null, "!", 'l');
if (r1.equals(notE) || notE.equals(r1)) {
setVarValues('t', 1.0, 1.0, null);
}
}
} else if (op.equals("->")) {
if (r1.isit == 'n' || r1.isit == 't') {
if (r1.lvalue != 0) {
if (r2.isit == 'l' || r2.isit == 'a' || r2.isit == 'w'
|| r2.isit == 'r') {
setNodeValues(r2.r1, r2.r2, r2.op, r2.isit);
} else {
setVarValues(r2.isit, r2.lvalue, r2.uvalue,
r2.variable);
}
} else if (r1.uvalue == 0) {
setVarValues('t', 1.0, 1.0, null);
}
} else if (r2.isit == 't' || r2.isit == 'n') {
if (r2.lvalue != 0) {
setVarValues('t', 1.0, 1.0, null);
} else if (r2.uvalue == 0) {
ExprTree notE = new ExprTree(r2);
notE.setNodeValues((this), null, "!", 'l');
setNodeValues(notE.r1, notE.r2, notE.op, notE.isit);
}
}
} else if (op.equals("!")) {
if (((r1).isit == 'n') || ((r1).isit == 't')) {
(this).isit = 't';
if (r1.lvalue == 1) {
this.lvalue = 0;
} else {
this.lvalue = 1;
}
(this).uvalue = (this).lvalue;
}
} else if (op.equals("==")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 't';
if (r1.lvalue == r2.lvalue) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
}
} else if (op.equals(">=")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 't';
if ((r1).lvalue >= r2.lvalue) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
}
} else if (op.equals(">")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 't';
if ((r1).lvalue > r2.lvalue) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
}
} else if (op.equals("<=")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 't';
if ((r1).lvalue <= r2.lvalue) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
}
} else if (op.equals("<")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 't';
if ((r1).lvalue < r2.lvalue) {
this.lvalue = 1;
} else {
this.lvalue = 0;
}
(this).uvalue = (this).lvalue;
}
} else if (op.equals("&")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = ((int) (r1).lvalue) & ((int) r2.lvalue);
(this).uvalue = (this).lvalue;
}
} else if (op.equals("|")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = (int) (r1).lvalue | (int) r2.lvalue;
(this).uvalue = (this).lvalue;
}
} else if (isit == 'w' && op.equals("X")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = (int) (r1).lvalue ^ (int) r2.lvalue;
(this).uvalue = (this).lvalue;
}
} else if (op.equals("m")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = Math.min((r1).lvalue, r2.lvalue);
(this).uvalue = (this).lvalue;
}
} else if (op.equals("M")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = Math.max((r1).lvalue, r2.lvalue);
(this).uvalue = (this).lvalue;
}
} else if (op.equals("i")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = Math.floor((r1).lvalue / r2.lvalue);
(this).uvalue = (this).lvalue;
}
} else if (op.equals("f")) {
if (((r1).isit == 'n') || ((r1).isit == 't')) {
(this).isit = 'n';
(this).lvalue = Math.floor((r1).lvalue);
(this).uvalue = (this).lvalue;
}
} else if (op.equals("c")) {
if (((r1).isit == 'n') || ((r1).isit == 't')) {
(this).isit = 'n';
(this).lvalue = Math.ceil((r1).lvalue);
(this).uvalue = (this).lvalue;
}
} else if (op.equals("~")) {
if (((r1).isit == 'n') || ((r1).isit == 't')) {
(this).isit = 'n';
(this).lvalue = ~(int) (r1).lvalue;
(this).uvalue = (this).lvalue;
}
} else if (op.equals("[]")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 't';
(this).lvalue = (((int) (r1).lvalue) >> ((int) r2.lvalue)) & 1;
(this).uvalue = (this).lvalue;
}
} else if (op.equals("U-")) {
if (((r1).isit == 'n') || ((r1).isit == 't')) {
(this).isit = 'n';
(this).lvalue = -((r1).lvalue);
(this).uvalue = (this).lvalue;
}
} else if (op.equals("*")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = (r1).lvalue * r2.lvalue;
(this).uvalue = (this).lvalue;
} else if (r1.isit == 'n' || r1.isit == 't') {
if (r1.lvalue == 0 && r1.uvalue == 0) {
setVarValues('t', 0.0, 0.0, null);
} else if (r1.lvalue == 1 && r1.uvalue == 1) {
if (r2.isit == 'l' || r2.isit == 'a' || r2.isit == 'w'
|| r2.isit == 'r') {
setNodeValues(r2.r1, r2.r2, r2.op, r2.isit);
} else {
setVarValues(r2.isit, r2.lvalue, r2.uvalue,
r2.variable);
}
}
} else if (r2.isit == 'n' || r2.isit == 't') {
if (r2.lvalue == 0 && r2.uvalue == 0) {
setVarValues('t', 0.0, 0.0, null);
} else if (r2.lvalue == 1 && r2.uvalue == 1) {
if (r1.isit == 'l' || r1.isit == 'a' || r1.isit == 'w'
|| r1.isit == 'r') {
setNodeValues(r1.r1, r1.r2, r1.op, r1.isit);
} else {
setVarValues(r1.isit, r1.lvalue, r1.uvalue,
r1.variable);
}
}
}
} else if (op.equals("/")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = (r1).lvalue / r2.lvalue;
(this).uvalue = (this).lvalue;
} else if ((r1.isit == 'n' || r1.isit == 't') && r1.uvalue == 0
&& r1.lvalue == 0) {
setVarValues('n', 0.0, 0.0, null);
} else if ((r2.isit == 'n' || r2.isit == 't') && r2.lvalue == 1
&& r2.uvalue == 1) {
if (r1.isit == 'l' || r1.isit == 'a' || r1.isit == 'w'
|| r1.isit == 'r') {
setNodeValues(r1.r1, r1.r2, r1.op, r1.isit);
} else {
setVarValues(r1.isit, r1.lvalue, r1.uvalue, r1.variable);
}
}
} else if (op.equals("%")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = (r1).lvalue % r2.lvalue;
(this).uvalue = (this).lvalue;
} else if ((r2.isit == 'n' || r2.isit == 't')
&& r2.lvalue == 1.0 && r2.uvalue == 1.0) {
setVarValues('n', 0.0, 0.0, null);
} else if ((r1.isit == 'n' || r1.isit == 't')
&& r1.lvalue == 1.0 && r1.uvalue == 1.0) {
setVarValues('n', 1.0, 1.0, null);
}
} else if (op.equals("+")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = (r1).lvalue + r2.lvalue;
(this).uvalue = (this).lvalue;
} else if ((r1.isit == 'n' || r1.isit == 't') && r1.lvalue == 0
&& r1.uvalue == 0) {
if (r2.isit == 'l' || r2.isit == 'a' || r2.isit == 'w'
|| r2.isit == 'r') {
setNodeValues(r2.r1, r2.r2, r2.op, r2.isit);
} else {
setVarValues(r2.isit, r2.lvalue, r2.uvalue, r2.variable);
}
} else if ((r2.isit == 'n' || r2.isit == 't') && r2.lvalue == 0
&& r2.uvalue == 0) {
if (r1.isit == 'l' || r1.isit == 'a' || r1.isit == 'w'
|| r1.isit == 'r') {
setNodeValues(r1.r1, r1.r2, r1.op, r1.isit);
} else {
setVarValues(r1.isit, r1.lvalue, r1.uvalue, r1.variable);
}
}
} else if (op.equals("-")) {
if (((r1.isit == 'n') || (r1.isit == 't'))
&& (((r2).isit == 'n') || ((r2).isit == 't'))) {
(this).isit = 'n';
(this).lvalue = (r1).lvalue - r2.lvalue;
(this).uvalue = (this).lvalue;
} else if ((r1.isit == 'n' || r1.isit == 't') && r1.lvalue == 0
&& r1.uvalue == 0) {
setNodeValues(r2, null, "U-", 'a');
} else if ((r2.isit == 'n' || r2.isit == 't') && r2.lvalue == 0
&& r2.uvalue == 0) {
if (r1.isit == 'l' || r1.isit == 'a' || r1.isit == 'w'
|| r1.isit == 'r') {
setNodeValues(r1.r1, r1.r2, r1.op, r1.isit);
} else {
setVarValues(r1.isit, r1.lvalue, r1.uvalue, r1.variable);
}
}
}
}
}
public double evaluateExpr(HashMap<String, String> variables) {
double left;
double right;
switch (isit) {
case 'b': // Boolean
if (variables != null) {
if (!variables.containsKey(variable)
|| variables.get(variable).toLowerCase().equals(
"unknown"))
return Double.NaN;
if (variables.get(variable).toLowerCase().equals("true") ||
variables.get(variable).equals("1")) {
return 1.0;
} else {
return 0.0;
}
} else {
return Double.NaN;
}
case 'c': // Continuous
return Double.NaN;
case 'i': // Integer
if (variables != null) {
try {
return Double.parseDouble(variables.get(variable));
} catch (Exception e) {
return Double.NaN;
}
} else {
return Double.NaN;
}
case 'n': // Number
if (uvalue == lvalue) {
return uvalue;
} else {
return ((uvalue - lvalue) * new java.util.Random().nextDouble())
+ lvalue;
}
case 't': // Truth value
if (uvalue == 1 && lvalue == 1) {
return 1.0;
} else if (uvalue == 0 && lvalue == 0) {
return 0.0;
} else {
return Double.NaN;
}
case 'w': // bitWise
if (r1 != null) {
left = r1.evaluateExpr(variables);
} else {
left = Double.NaN;
}
if (r2 != null) {
right = r2.evaluateExpr(variables);
} else {
right = Double.NaN;
}
if (op.equals("&")) {
return ((int) left) & ((int) right);
} else if (op.equals("|")) {
return ((int) left) | ((int) right);
} else if (op.equals("!")) {
return ~((int) left);
} else if (op.equals("X")) {
return ((int) left) ^ ((int) right);
}
case 'a': // Arithmetic
case 'r': // Relational
case 'l': // Logical
if (op.equals("!")) {
if (r1 != null) {
if (r1.evaluateExpr(variables) == 1.0) {
return 0.0;
} else if (r1.evaluateExpr(variables) == 0.0) {
return 1.0;
} else {
return Double.NaN;
}
} else if (r2 != null) {
if (r2.evaluateExpr(variables) == 1.0) {
return 0.0;
} else if (r2.evaluateExpr(variables) == 0.0) {
return 1.0;
} else {
return Double.NaN;
}
} else {
return Double.NaN;
}
} else {
if (r1 != null) {
left = r1.evaluateExpr(variables);
} else {
left = Double.NaN;
}
if (r2 != null) {
right = r2.evaluateExpr(variables);
} else {
right = Double.NaN;
}
if (op.equals("&&")) {
if (left == 1.0 && right == 1.0) {
return 1.0;
} else if (left == 0.0 || right == 0.0) {
return 0.0;
} else
return Double.NaN;
} else if (op.equals("||")) {
if (left == 1.0 || right == 1.0) {
return 1.0;
} else if (left == 0.0 && right == 0.0) {
return 0.0;
} else
return Double.NaN;
} else if (op.equals("==")) {
if (left == Double.NaN || right == Double.NaN) {
return Double.NaN;
} else if (left == right) {
return 1.0;
} else if (left != right) {
return 0.0;
} else {
return Double.NaN;
}
} else if (op.equals("->")) {
if (left == 0.0 && (right == 1.0 || right == 0.0)) {
return 1.0;
} else if (left == 1.0 && right == 1.0) {
return 1.0;
} else if (left == 1.0 && right == 0.0) {
return 0.0;
} else {
return Double.NaN;
}
} else if (op.equals("+")) {
return left + right;
} else if (op.equals("*")) {
return left * right;
} else if (op.equals("/")) {
return left / right;
} else if (op.equals("%")) {
return left % right;
} else if (op.equals("^")) {
return Math.pow(left, right);
} else if (op.equals("[]")) {
return (((int) left) >> ((int) right)) & 1;
} else if (op.equals("f")) {
return Math.floor(left);
} else if (op.equals("c")) {
return Math.ceil(left);
} else if (op.equals("m")) {
return Math.min(left, right);
} else if (op.equals("M")) {
return Math.max(left, right);
} else if (op.equals("i")) {
return ((int) left) / ((int) right);
} else if (op.equals("uniform")) {
return Double.NaN;
} else if (op.equals("normal")) {
return Double.NaN;
} else if (op.equals("gamma")) {
return Double.NaN;
} else if (op.equals("lognormal")) {
return Double.NaN;
} else if (op.equals("binomial")) {
return Double.NaN;
} else if (op.equals("exponential")) {
return Double.NaN;
} else if (op.equals("chisq")) {
return Double.NaN;
} else if (op.equals("laplace")) {
return Double.NaN;
} else if (op.equals("cauchy")) {
return Double.NaN;
} else if (op.equals("rayleigh")) {
return Double.NaN;
} else if (op.equals("poisson")) {
return Double.NaN;
} else if (op.equals("bernoulli")) {
return Double.NaN;
} else if (op.equals("rate")) {
return Double.NaN;
} else if (op.equals("INT")) {
return ((int) left);
} else if (op.equals("<")) {
if (left < right) {
return 1.0;
} else if (left >= right) {
return 0.0;
} else {
return Double.NaN;
}
} else if (op.equals(">")) {
if (left > right) {
return 1.0;
} else if (left <= right) {
return 0.0;
} else {
return Double.NaN;
}
} else if (op.equals("<=")) {
if (left <= right) {
return 1.0;
} else if (left > right) {
return 0.0;
} else {
return Double.NaN;
}
} else if (op.equals(">=")) {
if (left >= right) {
return 1.0;
} else if (left < right) {
return 0.0;
} else {
return Double.NaN;
}
} else {
return Double.NaN;
}
}
}
return Double.NaN;
}
private static final int WORD = 1;
private static final int IMPLIES = 7;
private static final int END_OF_STRING = 2;
private static final int INFIN = 2147483647;
public String getOp()
{
return op;
}
public ExprTree getLeftChild()
{
return r1;
}
public ExprTree getRightChild()
{
return r2;
}
@SuppressWarnings("unchecked")
public ExprTree clone(){
ExprTree ET = new ExprTree(); // ET phone home.
ET.op = op;
ET.isit = isit;
ET.lvalue = lvalue;
ET.uvalue = uvalue;
ET.variable = variable;
ET.real = real;
ET.logical = logical;
ET.r1 = r1 != null ? r1.clone() : null;
ET.r2 = r2 != null ? r2.clone() : null;
ET.tokvalue = tokvalue;
ET.position = position;
ET.token = token;
ET.newresult = newresult != null ? newresult.clone() : null;
//private ArrayList<String> booleanSignals, integerSignals, continuousSignals;
ET.booleanSignals = (ArrayList<String>) booleanSignals.clone();
ET.integerSignals = (ArrayList<String>) integerSignals.clone();
ET.continuousSignals = (ArrayList<String>) continuousSignals;
ET.lhpn = lhpn;
ET.expression = expression;
return ET;
}
public ExprTree getExprTree() {
token = this.intexpr_gettok(expression);
this.intexpr_L(expression);
return this;
}
public void setIntegerSignals(Set<String> signalSet) {
for (String s : signalSet) {
integerSignals.add(s);
}
}
public IntervalPair evaluateExprBound(HashMap<String, String> variables){
// void exprsn::eval(lhpnStateADT cur_state,int nevents){
// char log_val;
// int tl1,tl2,tu1,tu2,i,j,k;
// int preciser = 1;
// if (op!=""){
// //printf("%s, eval left child\n",op.c_str());
// r1->eval(cur_state,nevents);
// if ((r1->lvalue == -INFIN)||(r1->uvalue == INFIN)){
// lvalue = -INFIN;
// uvalue = INFIN;
// return;
// if (r2){
// //printf("eval right child\n");
// r2->eval(cur_state,nevents);
// if ((r2->lvalue == -INFIN)||(r2->uvalue == INFIN)){
// lvalue = -INFIN;
// uvalue = INFIN;
// return;
// if (op=="||"){
// // logical OR
// if (r1->logical){
// tl1 = r1->lvalue;
// tu1 = r1->uvalue;
// else{//convert numeric r1 to boolean
// if ((r1->lvalue == 0)&&(r1->uvalue == 0)){//false
// tl1 = tu1 = 0;
// else if ((r1->lvalue < 0)&&(r1->uvalue < 0)||
// (r1->lvalue > 0)&&(r1->uvalue > 0)){//true
// tl1 = tu1 = 1;
// else{
// tl1 = 0;
// tu1 = 1;
// if (r2->logical){
// tl2 = r2->lvalue;
// tu2 = r2->uvalue;
// else{//convert numeric r2 to boolean
// if ((r2->lvalue == 0)&&(r2->uvalue == 0)){//false
// tl2 = tu2 = 0;
// else if ((r2->lvalue < 0)&&(r2->uvalue < 0)||
// (r2->lvalue > 0)&&(r2->uvalue > 0)){//true
// tl2 = tu2 = 1;
// else{
// tl2 = 0;
// tu2 = 1;
// lvalue = tl1 || tl2;
// uvalue = tu1 || tu2;
// }else if(op=="&&"){
// // logical AND
// if (r1->logical){
// tl1 = r1->lvalue;
// tu1 = r1->uvalue;
// else{//convert numeric r1 to boolean
// if ((r1->lvalue == 0)&&(r1->uvalue == 0)){//false
// tl1 = tu1 = 0;
// else if ((r1->lvalue < 0)&&(r1->uvalue < 0)||
// (r1->lvalue > 0)&&(r1->uvalue > 0)){//true
// tl1 = tu1 = 1;
// else{
// tl1 = 0;
// tu1 = 1;
// if (r2->logical){
// tl2 = r2->lvalue;
// tu2 = r2->uvalue;
// else{//convert numeric r2 to boolean
// if ((r2->lvalue == 0)&&(r2->uvalue == 0)){//false
// tl2 = tu2 = 0;
// else if ((r2->lvalue < 0)&&(r2->uvalue < 0)||
// (r2->lvalue > 0)&&(r2->uvalue > 0)){//true
// tl2 = tu2 = 1;
// else{
// tl2 = 0;
// tu2 = 1;
// lvalue = tl1 && tl2;
// uvalue = tu1 && tu2;
// #ifdef __LHPN_EVAL__
// printf("and: [%d,%d](%c)&[%d,%d](%c) = [%d,%d]\n",r1->lvalue,
// r1->uvalue,r1->isit,r2->lvalue,r2->uvalue,r2->isit,lvalue,uvalue);
// #endif
// }else if(op=="->"){
// // implication operator
// if (r1->logical){
// tl1 = r1->lvalue;
// tu1 = r1->uvalue;
// else{//convert numeric r1 to boolean
// if ((r1->lvalue == 0)&&(r1->uvalue == 0)){//false
// tl1 = tu1 = 0;
// else if ((r1->lvalue < 0)&&(r1->uvalue < 0)||
// (r1->lvalue > 0)&&(r1->uvalue > 0)){//true
// tl1 = tu1 = 1;
// else{
// tl1 = 0;
// tu1 = 1;
// if (r2->logical){
// tl2 = r2->lvalue;
// tu2 = r2->uvalue;
// else{//convert numeric r2 to boolean
// if ((r2->lvalue == 0)&&(r2->uvalue == 0)){//false
// tl2 = tu2 = 0;
// else if ((r2->lvalue < 0)&&(r2->uvalue < 0)||
// (r2->lvalue > 0)&&(r2->uvalue > 0)){//true
// tl2 = tu2 = 1;
// else{
// tl2 = 0;
// tu2 = 1;
// lvalue = tl1 || !tl2;
// uvalue = tu1 || !tu2;
// }else if(op=="!"){
// // logical NOT
// if (r1->logical){
// tl1 = r1->lvalue;
// tu1 = r1->uvalue;
// else{//convert numeric r1 to boolean
// if ((r1->lvalue == 0)&&(r1->uvalue == 0)){//false
// tl1 = tu1 = 0;
// else if ((r1->lvalue < 0)&&(r1->uvalue < 0)||
// (r1->lvalue > 0)&&(r1->uvalue > 0)){//true
// tl1 = tu1 = 1;
// else{
// tl1 = 0;
// tu1 = 1;
// if (tl1 == tu1){
// lvalue = 1- tl1;
// uvalue = 1- tl1;
// #ifdef __LHPN_EVAL__
// printf("not: [%d,%d](%c) = [%d,%d]\n",r1->lvalue,
// r1->uvalue,r1->isit,lvalue,uvalue);
// #endif
// //printf("negation: ~[%d,%d] = [%d,%d]\n",r1->lvalue,r1->uvalue,
// // lvalue,uvalue);
// }else if(op=="=="){
// // "equality" operator
// // true if same point value
// if ((r1->lvalue == r1->uvalue) && (r2->lvalue == r2->uvalue) &&
// (r1->lvalue == r2->uvalue))
// lvalue = uvalue = 1;
// // false if no overlap
// else if ((r1->lvalue > r2->uvalue)||(r2->lvalue > r1->uvalue))
// lvalue = uvalue = 0;
// // maybe if overlap
// else{
// lvalue = 0;
// uvalue = 1;
// #ifdef __LHPN_EVAL__
// printf("[%d,%d]==[%d,%d]=[%d,%d]\n",r1->lvalue,r1->uvalue ,r2->lvalue,r2->uvalue,lvalue,uvalue);
// #endif
// }else if(op==">"){
// // "greater than" operator
// //true if lower1 > upper2
// if (r1->lvalue > r2->uvalue)
// lvalue = uvalue = 1;
// //false if lower2 >= upper1
// else if (r2->lvalue >= r1->uvalue)
// lvalue = uvalue = 0;
// // maybe if overlap
// else{
// lvalue = 0;
// uvalue = 1;
// }else if(op==">="){
// // "greater than or equal" operator
// //true if lower1 >= upper2
// if (r1->lvalue >= r2->uvalue)
// lvalue = uvalue = 1;
// //false if lower2 > upper1
// else if (r2->lvalue > r1->uvalue)
// lvalue = uvalue = 0;
// // maybe if overlap
// else{
// lvalue = 0;
// uvalue = 1;
// }else if(op=="<"){
// // "less than" operator
// //true if lower2 > upper1
// if (r2->lvalue > r1->uvalue)
// lvalue = uvalue = 1;
// //false if lower1 >= upper2
// else if (r1->lvalue >= r2->uvalue)
// lvalue = uvalue = 0;
// // maybe if overlap
// else{
// lvalue = 0;
// uvalue = 1;
// }else if(op=="<="){
// // "less than or equal" operator
// //true if lower2 >= upper1
// if (r2->lvalue >= r1->uvalue)
// lvalue = uvalue = 1;
// //false if lower1 > upper2
// else if (r1->lvalue > r2->uvalue)
// lvalue = uvalue = 0;
// // maybe if overlap
// else{
// lvalue = 0;
// uvalue = 1;
// #ifdef __LHPN_EVAL__
// printf("[%d,%d]<=[%d,%d]=[%d,%d]\n",r1->lvalue,r1->uvalue ,r2->lvalue,r2->uvalue,lvalue,uvalue);
// #endif
// }else if(op=="[]"){//NEEDS WORK
// // bit extraction operator
// // Only extract if both are point values.
// if ((r1->lvalue == r1->uvalue)&&(r2->lvalue == r2->uvalue)){
// lvalue = uvalue = (r1->lvalue >> r2->uvalue) & 1;
// else {
// if (!preciser)
// lvalue = 0;
// uvalue = 1;
// else {
// uvalue = 0;
// lvalue = 1;
// for (i = r1->lvalue;i<=r1->uvalue;i++)
// for (j = r2->lvalue;j<=r2->uvalue;j++){
// k = (i >> j) & 1;
// lvalue &= k;
// uvalue |= k;
// if (lvalue < uvalue)
// return;
// }else if(op=="+"){
// lvalue = r1->lvalue + r2->lvalue;
// uvalue = r1->uvalue + r2->uvalue;
// }else if(op=="-"){
// lvalue = r1->lvalue - r2->uvalue;
// uvalue = r1->uvalue - r2->lvalue;
// }else if(op=="*"){
// tl1 = r1->lvalue * r2->lvalue;
// tl2 = r1->uvalue * r2->uvalue;
// tu1 = r1->lvalue * r2->uvalue;
// tu2 = r1->uvalue * r2->lvalue;
// lvalue = min(min(min(tl1,tl2),tu1),tu2);
// uvalue = max(max(max(tl1,tl2),tu1),tu2);
// }else if(op=="^"){
// tl1 = pow((double)r1->lvalue,(double)r2->lvalue);
// tl2 = pow((double)r1->uvalue,(double)r2->uvalue);
// tu1 = pow((double)r1->lvalue,(double)r2->uvalue);
// tu2 = pow((double)r1->uvalue,(double)r2->lvalue);
// lvalue = min(min(min(tl1,tl2),tu1),tu2);
// uvalue = max(max(max(tl1,tl2),tu1),tu2);
// }else if(op=="u"){
// lvalue = r1->lvalue;
// uvalue = r2->uvalue;
// }else if(op=="/"){
// //ropughly integer division.
// //DON"T KNOW WHAT FLOATING POINT PART IS!!!!!
// tl1 = floor(r1->lvalue / r2->lvalue);
// tl2 = floor(r1->uvalue / r2->uvalue);
// tu1 = floor(r1->lvalue / r2->uvalue);
// tu2 = floor(r1->uvalue / r2->lvalue);
// lvalue = min(min(min(tl1,tl2),tu1),tu2);
// tl1 = ceil(r1->lvalue / r2->lvalue);
// tl2 = ceil(r1->uvalue / r2->uvalue);
// tu1 = ceil(r1->lvalue / r2->uvalue);
// tu2 = ceil(r1->uvalue / r2->lvalue);
// uvalue = max(max(max(tl1,tl2),tu1),tu2);
// }else if(op=="%"){//NEEDS WORK
// if (!preciser){
// // Only calculate if both are point values.
// if ((r1->lvalue == r1->uvalue)&&(r2->lvalue == r2->uvalue)){
// lvalue = uvalue = r1->lvalue % r2->uvalue;
// else{
// lvalue = min(0,max(-(max(abs(r2->lvalue),abs(r2->lvalue))-1),r1->lvalue));
// uvalue = max(0,min(max(abs(r2->lvalue),abs(r2->uvalue))-1,r1->uvalue));
// else{
// uvalue = -INFIN;
// lvalue = INFIN;
// for (i = r1->lvalue;i<=r1->uvalue;i++)
// for (j = r2->lvalue;j<=r2->uvalue;j++){
// k = i%j;
// if (k < lvalue)
// lvalue = k;
// if (k > uvalue)
// uvalue = k;
// }else if(op=="U-"){
// lvalue = -(r1->uvalue);
// uvalue = -(r1->lvalue);
// }else if(op=="INT"){
// lvalue = r1->uvalue;
// uvalue = r1->lvalue;
// }else if(op=="BOOL"){
// if ((r1->lvalue == 0)&& (r1->uvalue == 0))
// lvalue = uvalue = 0;
// else if (((r1->lvalue > 0) && (r1->uvalue > 0))||
// ((r1->lvalue < 0) && (r1->uvalue < 0)))
// lvalue = uvalue = 1;
// else {
// lvalue = 0;
// uvalue = 1;
// }else if(op=="&"){
// if ((r1->lvalue!=r1->uvalue)||(r2->lvalue!=r2->uvalue)) {
// if (!preciser){
// lvalue = min(r1->lvalue+r2->lvalue,0);
// uvalue = max((r1->uvalue),(r2->uvalue));
// else{
// uvalue = -INFIN;
// lvalue = INFIN;
// for (i = r1->lvalue;i<=r1->uvalue;i++)
// for (j = r2->lvalue;j<=r2->uvalue;j++){
// k = i&j;
// if (k < lvalue)
// lvalue = k;
// if (k > uvalue)
// uvalue = k;
// else {
// lvalue = (r1->lvalue & r2->lvalue);
// uvalue = (r1->lvalue & r2->lvalue);
// #ifdef __LHPN_EVAL__
// printf("BITWISE AND: [%d,%d](%c)&[%d,%d](%c) = [%d,%d]\n",r1->lvalue,
// r1->uvalue,r1->isit,r2->lvalue,r2->uvalue,r2->isit,lvalue,uvalue);
// #endif
// }else if(op=="|"){
// if ((r1->lvalue!=r1->uvalue)||(r2->lvalue!=r2->uvalue)) {
// lvalue = min(r1->lvalue,r2->lvalue);
// uvalue = max(r1->uvalue + r2->uvalue,-1);
// } else {
// lvalue = (r1->lvalue | r2->lvalue);
// uvalue = (r1->lvalue | r2->lvalue);
// }else if(op=="m"){
// lvalue = min(r1->lvalue,r2->lvalue);
// uvalue = min(r1->uvalue,r2->uvalue);
// }else if(op=="M"){
// lvalue = max(r1->lvalue,r2->lvalue);
// uvalue = max(r1->uvalue,r2->uvalue);
// }else if(op=="i"){
// tl1 = r1->lvalue / r2->lvalue;
// tl2 = r1->uvalue / r2->uvalue;
// tu1 = r1->lvalue / r2->uvalue;
// tu2 = r1->uvalue / r2->lvalue;
// lvalue = min(min(min(tl1,tl2),tu1),tu2);
// uvalue = max(max(max(tl1,tl2),tu1),tu2);
// }else if(op=="X"){
// lvalue = min(min(r1->lvalue-r2->uvalue,r2->lvalue-r1->uvalue),0);
// uvalue = max(max(r1->uvalue + r2->uvalue,-(r1->lvalue + r2->lvalue)),-1);
//// }else if(op=="floor"){
//// lvalue = floor(r1->lvalue);
//// uvalue = floor(r1->uvalue);
//// }else if(op=="round"){
//// lvalue = round(r1->lvalue);
//// uvalue = round(r1->uvalue);
//// }else if(op=="ceil"){
//// lvalue = ceil(r1->lvalue);
//// uvalue = ceil(r1->uvalue);
// }else if(op=="~"){
// //bitwise negation operator (1's complement)
// lvalue = -((r1->uvalue)+1);
// uvalue = -((r1->lvalue)+1);
// }else if(isit == 'd'){
// for (i = 1;i<cur_state->z->size;i++){
// if (cur_state->z->curClocks[i].enabled == index){
// lvalue = cur_state->r->bound[cur_state->z->curClocks[i].enabled-nevents].lower;
// uvalue = cur_state->r->bound[cur_state->z->curClocks[i].enabled-nevents].upper;
// #ifdef __LHPN_EVAL__
// printf("lv=%d,uv=%d,index=%d,i=%d\n",lvalue, uvalue,index,i);
// #endif
// break;
// }else{
// if ((isit == 'i')||(isit == 'c')){
// for (i = 1;i<cur_state->z->size;i++){
// if (cur_state->z->curClocks[i].enabled == index){
// if (i>=cur_state->z->dbmEnd){
// lvalue = -1*(cur_state->z->matrix[0][i]);
// uvalue = cur_state->z->matrix[i][0];
// else{// uses lower rate bound for both????
// lvalue = -1*(cur_state->z->matrix[0][i])*
// cur_state->r->bound[cur_state->z->curClocks[i].enabled-nevents].current;
// uvalue = cur_state->z->matrix[i][0]*
// cur_state->r->bound[cur_state->z->curClocks[i].enabled-nevents].current;
// #ifdef __LHPN_EVAL__
// printf("lv=%d,uv=%d,index=%d,i=%d\n",lvalue, uvalue,index,i);
// #endif
// break;
// }else if (isit == 'b'){
// log_val = cur_state->m->state[index];
// if (log_val == '1'){
// lvalue = 1;
// uvalue = 1;
// } else if (log_val == 'X'){
// lvalue = 0;
// uvalue = 1;
// } else if (log_val == '0'){
// lvalue = 0;
// uvalue = 0;
// #ifdef __LHPN_EVAL__
// printf("successful lookup of boolean %d,%c[%d,%d]\n",index,
// cur_state->m->state[index],lvalue,uvalue);
// #endif
// }else if ((isit == 'n')||(isit == 't')){
// // values already stored, no need to evaluate!
return null;
}
}
|
package fd;
import java.util.ArrayList;
import java.util.Collections;
/**
* @author barcick
*/
public final class FixMethods {
private FixMethods() {
}
//Ivan's idt with my stub
public static ArrayList<Boolean> calculate(ArrayList<Double> array, int th, int originInterval) {
ArrayList<Boolean> result = new ArrayList<>(Collections.nCopies(array.size(), Boolean.FALSE));
//:TODO stub
// for (int i = 0; i < array.size(); i++) {
// result.add(Boolean.FALSE);
int start = 1;
int interval = originInterval;
while (start + interval < array.size()) {
if (dispersion(array, start, interval) <= th) {
while (dispersion(array, start, interval) <= th) {
interval++;
if (start + interval >= array.size()) {
break;
}
}
interval
for (int i = start; i < start + interval; i++) {
if (start + interval >= array.size()) {
break;
}
result.set(i, Boolean.TRUE);
System.out.print(i + " ");
System.out.println(array.get(i));
}
start += interval;
interval = originInterval;
} else {
start++;
}
}
return result;
}
//my IDT
public static ArrayList<Boolean> idt(ArrayList<Double> array, int th, int interval) {
ArrayList<Boolean> result = new ArrayList<>(Collections.nCopies(array.size(), Boolean.FALSE));
int wt = interval;
int start = 0;
while (start + wt <= array.size()) {
if (dispersion(array, start, wt) <= th) {
while (dispersion(array, start, wt) <= th && (start + wt) < array.size()) {
wt++;
}
wt
for (int i = start; i < start + wt; i++) {
// System.out.print(i + " ");
// System.out.println("true");
result.set(i, Boolean.TRUE);
}
start += wt;
wt = interval;
} else {
for (int i = start; i < start + wt; i++) {
// System.out.print(i + " ");
// System.out.println("false");
result.set(i, Boolean.FALSE);
}
start = start + wt;
wt = interval;
}
}
// System.out.println(result);
return result;
}
public static void idt2(ArrayList<ArrayList<Double>> arrayList, int th, int interval) {
int wt = interval;
int start = 0;
for (ArrayList<Double> array : arrayList) {
while (start + wt <= array.size()) {
if (dispersion(array, start, wt) <= th) {
while (dispersion(array, start, wt) <= th && (start + wt) < array.size()) {
wt++;
}
wt
for (int i = start; i < start + wt; i++) {
System.out.print(i + " ");
System.out.print(array.get(i));
System.out.println("true");
// mfile.write(i + ";" + array.get(i) + ";" + "true" + System.lineSeparator());
// mfile.write(Counterr.count[i] + ";" + y[i] + ";" + "true" + System.lineSeparator());
}
start += wt;
wt = interval;
} else {
for (int i = start; i < start + wt; i++) {
System.out.print(i + " ");
System.out.println(array.get(i));
System.out.println("false");
// mfile.write(i + ";" + array.get(i) + ";" + "false" + System.lineSeparator());
}
start = start + wt;
wt = interval;
}
}
}
}
private static double dispersion(ArrayList<Double> array, int start, int n) {
double ysr = 0, res = 0;
for (int i = start; i < start + n; i++) {
ysr += array.get(i);
}
ysr = ysr / n;
for (int i = start; i < start + n; i++) {
res += Math.pow((array.get(i) - ysr), 2);
}
res = res / n;
return res;
}
}
|
package lpn.parser;
import java.io.*;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import biomodel.util.Utility;
import main.Log;
import verification.Verification;
import verification.platu.lpn.DualHashMap;
import verification.platu.stategraph.StateGraph;
import verification.timed_state_exploration.zoneProject.InequalityVariable;
public class LhpnFile {
protected String separator;
protected HashMap<String, Transition> transitions;
protected HashMap<String, Place> places;
//protected HashMap<String, Integer> placesIndices;
protected HashMap<String, Variable> booleans;
protected HashMap<String, Variable> continuous;
protected HashMap<String, Variable> integers;
protected ArrayList<Variable> variables;
protected ArrayList<String> properties;
protected Log log;
protected String label;
protected int tranIndex;
protected int lpnIndex;
/*
* Cached value of the map that associates a variable name with its
* index. This field is initialized when a call to getVarIndexMap
* is made.
*/
protected DualHashMap<String, Integer> _varIndexMap;
/*
* Cached value of the map that associates a continuous variable with
* its index. This field is initialized when a call to getContinuousIndexMap
* is made.
*/
DualHashMap<String, Integer> _continuousIndexMap;
/*
* Cached value of the array of all the places in this LPN. This field is
* initialized when a call to getPlaceList is made.
*/
protected String[] placeList;
/*
* Cached value of all the transition in this LPN. This field is
* initialized when a call to getAllTransitions is made.
*/
protected Transition[] allTransitions;
/*
* The i-th array in this list stores THIS Lpn's variable indices of the shared variables
* between this LPN and another LPN whose lpnIndex is i.
*/
protected List<int[]> thisIndexList;
/*
* The i-th array in this list stores THE i-th Lpn's variable indices of the
* shared variables between this LPN and the i-th LPN.
*/
protected List<int[]> otherIndexList;
/*
* The local state graph that corresponds to this LPN.
*/
protected StateGraph stateGraph;
public LhpnFile(Log log) {
if (File.separator.equals("\\")) {
separator = "\\\\";
}
else {
separator = File.separator;
}
this.log = log;
transitions = new HashMap<String, Transition>();
places = new HashMap<String, Place>();
booleans = new HashMap<String, Variable>();
continuous = new HashMap<String, Variable>();
integers = new HashMap<String, Variable>();
variables = new ArrayList<Variable>();
properties = new ArrayList<String>();
lpnIndex = 0;
tranIndex = 0;
}
public LhpnFile() {
if (File.separator.equals("\\")) {
separator = "\\\\";
}
else {
separator = File.separator;
}
transitions = new HashMap<String, Transition>();
places = new HashMap<String, Place>();
booleans = new HashMap<String, Variable>();
continuous = new HashMap<String, Variable>();
integers = new HashMap<String, Variable>();
variables = new ArrayList<Variable>();
properties = new ArrayList<String>();
lpnIndex = 0;
tranIndex = 0;
thisIndexList = new ArrayList<int[]>();
otherIndexList = new ArrayList<int[]>();
}
public void save(String filename) {
try {
String file = filename;
PrintStream p = new PrintStream(new FileOutputStream(filename));
StringBuffer buffer = new StringBuffer();
HashMap<String, Integer> boolOrder = new HashMap<String, Integer>();
int i = 0;
boolean flag = false;
for (String s : booleans.keySet()) {
if (booleans.get(s) != null) {
if (booleans.get(s).isInput()) {
if (!flag) {
buffer.append(".inputs ");
flag = true;
}
buffer.append(s + " ");
boolOrder.put(s, i);
i++;
}
}
}
for (String s : continuous.keySet()) {
if (continuous.get(s) != null) {
if (continuous.get(s).isInput()) {
if (!flag) {
buffer.append(".inputs ");
flag = true;
}
buffer.append(s + " ");
}
}
}
for (String s : integers.keySet()) {
if (integers.get(s) != null) {
if (integers.get(s).isInput()) {
if (!flag) {
buffer.append(".inputs ");
flag = true;
}
buffer.append(s + " ");
}
}
}
if (flag) buffer.append("\n");
flag = false;
for (String s : booleans.keySet()) {
if (booleans.get(s) != null) {
if (!flag) {
buffer.append(".outputs ");
flag = true;
}
if (booleans.get(s).isOutput()) {
buffer.append(s + " ");
boolOrder.put(s, i);
i++;
}
}
}
for (String s : continuous.keySet()) {
if (continuous.get(s) != null) {
if (continuous.get(s).isOutput()) {
if (!flag) {
buffer.append(".outputs ");
flag = true;
}
buffer.append(s + " ");
}
}
}
for (String s : integers.keySet()) {
if (integers.get(s) != null) {
if (integers.get(s).isOutput()) {
if (!flag) {
buffer.append(".outputs ");
flag = true;
}
buffer.append(s + " ");
}
}
}
if (flag) buffer.append("\n");
flag = false;
for (String s : booleans.keySet()) {
if (booleans.get(s) != null) {
if (!flag) {
buffer.append(".internal ");
flag = true;
}
if (booleans.get(s).isInternal()) {
buffer.append(s + " ");
boolOrder.put(s, i);
i++;
}
}
}
for (String s : continuous.keySet()) {
if (continuous.get(s) != null) {
if (continuous.get(s).isInternal()) {
if (!flag) {
buffer.append(".internal ");
flag = true;
}
buffer.append(s + " ");
}
}
}
for (String s : integers.keySet()) {
if (integers.get(s) != null) {
if (integers.get(s).isInternal()) {
if (!flag) {
buffer.append(".internal ");
flag = true;
}
buffer.append(s + " ");
}
}
}
if (flag) buffer.append("\n");
if (!transitions.isEmpty()) {
buffer.append(".dummy ");
for (String s : transitions.keySet()) {
buffer.append(s + " ");
}
buffer.append("\n");
}
if (!continuous.isEmpty() || !integers.isEmpty()) {
buffer.append("#@.variables ");
for (String s : continuous.keySet()) {
buffer.append(s + " ");
}
for (String s : integers.keySet()) {
buffer.append(s + " ");
}
buffer.append("\n");
}
if (!transitions.isEmpty()) {
flag = false;
for (Transition t : transitions.values()) {
if (t.isFail()) {
if (!flag) {
buffer.append("#@.failtrans ");
}
buffer.append(t.getLabel() + " ");
flag = true;
}
}
if (flag) {
buffer.append("\n");
}
flag = false;
for (Transition t : transitions.values()) {
if (t.isPersistent()) {
if (!flag) {
buffer.append("#@.non_disabling ");
}
buffer.append(t.getLabel() + " ");
flag = true;
}
}
if (flag) {
buffer.append("\n");
}
}
if (!places.isEmpty()) {
buffer.append("#|.places ");
for (String s : places.keySet()) {
buffer.append(s + " ");
}
buffer.append("\n");
}
if (!booleans.isEmpty()) {
flag = false;
for (i = 0; i < boolOrder.size(); i++) {
for (String s : booleans.keySet()) {
if (boolOrder.get(s).equals(i)) {
if (!flag) {
buffer.append("#@.init_state [");
flag = true;
}
if (booleans.get(s).getInitValue().equals("true")) {
buffer.append("1");
} else if (booleans.get(s).getInitValue().equals(
"false")) {
buffer.append("0");
} else {
buffer.append("X");
}
}
}
}
if (flag) {
buffer.append("]\n");
}
}
if (!transitions.isEmpty()) {
buffer.append(".graph\n");
for (Transition t : transitions.values()) {
for (Place s : t.getPreset()) {
buffer.append(s.getName() + " " + t.getLabel() + "\n");
}
for (Place s : t.getPostset()) {
buffer.append(t.getLabel() + " " + s.getName() + "\n");
}
}
}
flag = false;
if (!places.keySet().isEmpty()) {
for (Place place : places.values()) {
if (place.isMarked()) {
if (!flag) {
buffer.append(".marking {");
flag = true;
}
buffer.append(place.getName() + " ");
}
}
if (flag) {
buffer.append("}\n");
}
}
if (properties != null && !properties.isEmpty()) {
// if (!properties.contains("none"))
// properties.add(0, "none");
for (String property : properties) {
buffer.append("#@.property ");
buffer.append(property + "\n");
}
}
if (!continuous.isEmpty() || !integers.isEmpty()) {
buffer.append("#@.init_vals {");
for (Variable var : continuous.values()) {
buffer.append("<" + var.getName() + "="
+ var.getInitValue() + ">");
}
for (Variable var : integers.values()) {
buffer.append("<" + var.getName() + "="
+ var.getInitValue() + ">");
}
if (!continuous.isEmpty()) {
buffer.append("}\n#@.init_rates {");
for (Variable var : continuous.values()) {
buffer.append("<" + var.getName() + "="
+ var.getInitRate() + ">");
}
}
buffer.append("}\n");
}
if (!transitions.isEmpty()) {
flag = false;
for (Transition t : transitions.values()) {
if (t.getEnabling() != null && !t.getEnabling().equals("")) {
if (!flag) {
buffer.append("#@.enablings {");
flag = true;
}
buffer.append("<" + t.getLabel() + "=["
+ t.getEnabling() + "]>");
}
}
if (flag) {
buffer.append("}\n");
}
flag = false;
for (Transition t : transitions.values()) {
HashMap<String, String> contAssign = t.getContAssignments();
if (!contAssign.isEmpty()) {
if (!flag) {
buffer.append("#@.assignments {");
flag = true;
}
for (String var : contAssign.keySet()) {
buffer.append("<" + t.getLabel() + "=[" + var + ":="
+ contAssign.get(var) + "]>");
}
}
HashMap<String, String> intAssign = t.getIntAssignments();
if (!intAssign.isEmpty()) {
if (!flag) {
buffer.append("#@.assignments {");
flag = true;
}
for (String var : intAssign.keySet()) {
buffer.append("<" + t.getLabel() + "=[" + var + ":="
+ intAssign.get(var) + "]>");
}
}
}
if (flag) {
buffer.append("}\n");
}
flag = false;
for (Transition t : transitions.values()) {
HashMap<String, String> rateAssign = t.getRateAssignments();
for (String var : rateAssign.keySet()) {
if (!var.equals("")) {
if (!flag) {
buffer.append("#@.rate_assignments {");
flag = true;
}
buffer.append("<" + t.getLabel() + "=[" + var + ":="
+ t.getRateAssignment(var) + "]>");
}
}
}
if (flag) {
buffer.append("}\n");
}
flag = false;
for (Transition t : transitions.values()) {
if (t.containsDelay()) {
if (!flag) {
buffer.append("#@.delay_assignments {");
flag = true;
}
buffer.append("<" + t.getLabel() + "=[" + t.getDelay()
+ "]>");
}
}
if (flag) {
buffer.append("}\n");
}
flag = false;
for (Transition t : transitions.values()) {
if (t.containsPriority()) {
if (!flag) {
buffer.append("#@.priority_assignments {");
flag = true;
}
buffer.append("<" + t.getLabel() + "=["
+ t.getPriority() + "]>");
}
}
if (flag) {
buffer.append("}\n");
}
}
flag = false;
for (Transition t : transitions.values()) {
HashMap<String, String> boolAssign = t.getBoolAssignments();
for (String var : boolAssign.keySet()) {
if (!flag) {
buffer.append("#@.boolean_assignments {");
flag = true;
}
buffer.append("<" + t.getLabel() + "=[" + var + ":="
+ boolAssign.get(var) + "]>");
}
}
if (flag) {
buffer.append("}\n");
}
buffer.append("#@.continuous ");
for (String s : continuous.keySet()) {
buffer.append(s + " ");
}
buffer.append("\n");
if (buffer.toString().length() > 0) {
buffer.append(".end\n");
}
p.print(buffer);
p.close();
if (log != null) {
log.addText("Saving:\n" + file + "\n");
}
} catch (Exception e) {
e.printStackTrace();
}
}
public void load(String filename) {
StringBuffer data = new StringBuffer();
label = filename.split(separator)[filename.split(separator).length - 1].replace(".lpn","");
try {
BufferedReader in = new BufferedReader(new FileReader(filename));
String str;
while ((str = in.readLine()) != null) {
data.append(str + "\n");
}
in.close();
} catch (IOException e) {
e.printStackTrace();
throw new IllegalStateException("Error opening file");
}
parseProperty(data);
parsePlaces(data);
parseControlFlow(data);
parseVars(data);
parseIntegers(data);
parseInOut(data);
parseMarking(data);
boolean error = parseEnabling(data);
error = parseAssign(data, error);
error = parseRateAssign(data, error);
error = parseDelayAssign(data, error);
error = parsePriorityAssign(data, error);
error = parseBooleanAssign(data, error);
error = parseTransitionRate(data, error);
parseFailTransitions(data);
parsePersistentTransitions(data);
if (!error) {
Utility
.createErrorMessage("Invalid Expressions",
"The input file contained invalid expressions. See console for details.");
}
}
public void printDot(String filename) {
try {
String file = filename;
PrintStream p = new PrintStream(new FileOutputStream(filename));
StringBuffer buffer = new StringBuffer();
buffer.append("digraph G {\nsize=\"7.5,10\"\n");
buffer.append("Inits [shape=plaintext,label=\"");
for (Variable v : booleans.values()) {
buffer.append(v.getName() + " = " + v.getInitValue() + "\\n");
}
for (Variable v : integers.values()) {
buffer.append(v.getName() + " = " + v.getInitValue() + "\\n");
}
for (Variable v : continuous.values()) {
buffer.append(v.getName() + " = " + v.getInitValue() + "\\n" + v.getName() + "' = " + v.getInitRate() + "\\n");
}
buffer.append("\"]\n");
for (Transition t : transitions.values()) {
buffer.append(t.getLabel() + " [shape=plaintext,label=\"" + t.getLabel());
if (t.containsEnabling()) {
if (t.isPersistent()) {
buffer.append("\\n{" + t.getEnabling() + "}");
}
else {
buffer.append("\\n{" + t.getEnabling() + "}");
}
}
if (t.containsDelay()) {
buffer.append("\\n[" + t.getDelay() + "]");
}
if (t.containsAssignment()) {
buffer.append("\\n<");
boolean flag = false;
if (t.containsBooleanAssignment()) {
HashMap<String, String> map = t.getBoolAssignments();
for (String v : map.keySet()) {
if (flag) {
buffer.append(",");
}
else {
flag = true;
}
buffer.append(v + ":="
+ t.getBoolAssignment(v));
}
}
if (t.containsContinuousAssignment()) {
HashMap<String, String> map = t.getContAssignments();
for (String v : map.keySet()) {
if (flag) {
buffer.append(",");
}
else {
flag = true;
}
buffer.append(v + ":=" + t.getContAssignment(v));
}
}
if (t.containsIntegerAssignment()) {
HashMap<String, String> map = t.getIntAssignments();
for (String v : map.keySet()) {
if (flag) {
buffer.append(",");
}
else {
flag = true;
}
buffer.append(v + ":=" + t.getIntAssignment(v));
}
}
if (t.containsRateAssignment()) {
HashMap<String, String> map = t.getRateAssignments();
for (String v : map.keySet()) {
if (flag) {
buffer.append(",");
}
else {
flag = true;
}
buffer.append(v + "':=" + t.getRateAssignment(v));
}
}
buffer.append(">");
}
buffer.append("\"");
if (t.isFail()&&t.isPersistent()) {
buffer.append(",fontcolor=purple");
}
else if (t.isFail()) {
buffer.append(",fontcolor=red");
}
else if (t.isPersistent()) {
buffer.append(",fontcolor=blue");
}
buffer.append("];\n");
}
for (Place place : places.values()) {
buffer.append(place.getName() + " [label=\"" + place.getName() + "\"];\n" + place.getName()
+ " [shape=circle,width=0.40,height=0.40]\n");
if (place.isMarked()) {
buffer
.append(place.getName()
+ " [height=.3,width=.3,peripheries=2,style=filled,color=black,fontcolor=white];\n");
}
Transition[] postset = place.getPostset();
for (Transition t : postset) {
buffer.append(place.getName() + " -> " + t.getLabel() + "\n");
}
}
for (Transition t : transitions.values()) {
Place[] postset = t.getPostset();
for (Place place : postset) {
buffer.append(t.getLabel() + " -> " + place.getName() + "\n");
}
}
buffer.append("}\n");
p.print(buffer);
p.close();
if (log != null) {
log.addText("Saving:\n" + file + "\n");
}
}
catch (FileNotFoundException e) {
e.printStackTrace();
}
}
public void addTransition(String name) {
Transition trans = new Transition(name, tranIndex++, this);
transitions.put(name, trans);
}
// public void addTransition(String name, Properties prop) {
// Transition trans = new Transition(name, variables, this);
// for (String p : prop.getProperty("preset").split("\\s")) {
// trans.addPreset(places.get(p));
// for (String p : prop.getProperty("postset").split("\\s")) {
// trans.addPostset(places.get(p));
// transitions.put(name, trans);
public void addTransition(Transition t) {
transitions.put(t.getLabel(), t);
}
public void addPlace(String name, Boolean ic) {
Place place = new Place(name, ic);
places.put(name, place);
}
public void addEnabling(String transition, String enabling) {
transitions.get(transition).addEnabling(enabling);
}
public void addProperty(String prop) {
properties.add(prop);
}
public void removeProperty(String prop) {
properties.remove(prop);
}
public void addMovement(String fromName, String toName) {
if (isTransition(fromName)) {
transitions.get(fromName).addPostset(places.get(toName));
places.get(toName).addPreset(transitions.get(fromName));
} else {
transitions.get(toName).addPreset(places.get(fromName));
places.get(fromName).addPostset(transitions.get(toName));
}
}
public void addInput(String name, String ic) {
Variable var = new Variable(name, "boolean", ic, Variable.INPUT);
booleans.put(name, var);
if (!variables.contains(var)) {
variables.add(var);
}
}
public void addInput(String name, String type, String ic) {
Variable var = new Variable(name, type, ic, Variable.INPUT);
if (type.equals("boolean"))
booleans.put(name, var);
else if (type.equals("integer"))
integers.put(name, var);
else if (type.equals("continuous"))
continuous.put(name, var);
if (!variables.contains(var)) {
variables.add(var);
}
}
public void addOutput(String name, String ic) {
Variable var = new Variable(name, "boolean", ic, Variable.OUTPUT);
booleans.put(name, var);
if (!variables.contains(var)) {
variables.add(var);
}
}
public void addOutput(String name, String type, String ic) {
Variable var = new Variable(name, type, ic, Variable.OUTPUT);
if (type.equals("boolean"))
booleans.put(name, var);
else if (type.equals("integer"))
integers.put(name, var);
else if (type.equals("continuous"))
continuous.put(name, var);
if (!variables.contains(var)) {
variables.add(var);
}
}
public void addInternal(String name, String type, String ic) {
Variable var = new Variable(name, type, ic, Variable.INTERNAL);
if (type.equals("boolean"))
booleans.put(name, var);
else if (type.equals("integer"))
integers.put(name, var);
else if (type.equals("continuous"))
continuous.put(name, var);
if (!variables.contains(var)) {
variables.add(var);
}
}
public void addBoolean(String name, String ic) {
Variable var = new Variable(name, "boolean", ic);
booleans.put(name, var);
if (!variables.contains(var)) {
variables.add(var);
}
}
public void addContinuous(String name) {
// Check if the name is already present. If not, add the variable.
if(!continuous.containsKey(name)){
Variable var = new Variable(name, "continuous");
continuous.put(name, var);
}
// Variable var = new Variable(name, "continuous");
// continuous.put(name, var);
// if (!variables.contains(var)) {
// variables.add(var);
}
public void addContinuous(String name, Properties initCond) {
// Variable var = new Variable(name, "continuous", initCond);
// continuous.put(name, var);
// if (!variables.contains(var)) {
// variables.add(var);
// First check if this variable has already been introduced.
// If the variable has already been added, then just change the
// values of the variable.
// if(variables.contains(var)){
// // Find the index of the previous added variable.
// int index = variables.indexOf(var);
// // Get the previous created variable.
// Variable contVar = variables.get(index);
// // Set the values of the variable.
// contVar.addInitCond(initCond);
// if(continuous.containsValue(var)){
// else{
// // Since the variable has not already been added, then add the
// // variable to the fields 'continuous' and 'variables'.
// continuous.put(name, var);
// variables.add(var);
// Holds the Variable if it was created before.
Variable contVar = null;
// Search for the Variable.
for(Variable var : continuous.values()){
if(var.getName().equals(name)){
contVar = var;
}
}
// If contVar contains null, then a previous existing Variable was not found.
if(contVar != null){
contVar.addInitCond(initCond);
}
else{
continuous.put(name, new Variable(name, "continuous", initCond));
}
}
public void addContinuous(String name, String initVal, String initRate) {
Properties initCond = new Properties();
initCond.setProperty("value", initVal);
initCond.setProperty("rate", initRate);
Variable var = new Variable(name, "continuous", initCond);
continuous.put(name, var);
// if (!variables.contains(var)) {
// variables.add(var);
}
public void addInteger(String name, String ic) {
Variable var = new Variable(name, "integer", ic);
integers.put(name, var);
if (!variables.contains(var)) {
variables.add(var);
}
}
public void addTransitionRate(String transition, String rate) {
transitions.get(transition).addDelay("exponential(" + rate + ")");
}
public void addBoolAssign(String transition, String variable,
String assignment) {
if (!variables.contains(variable)) {
addOutput(variable, "unknown");
}
transitions.get(transition).addIntAssign(variable, assignment);
}
public void addRateAssign(String transition, String variable, String rate) {
transitions.get(transition).addRateAssign(variable, rate);
}
public void addIntAssign(String transition, String variable, String assign) {
transitions.get(transition).addIntAssign(variable, assign);
}
public void changePlaceName(String oldName, String newName) {
places.get(oldName).setName(newName);
places.put(newName, places.get(oldName));
places.remove(oldName);
}
public void changeTransitionName(String oldName, String newName) {
transitions.get(oldName).setName(newName);
transitions.put(newName, transitions.get(oldName));
transitions.remove(oldName);
}
public void changeDelay(String t, String delay) {
transitions.get(t).addDelay(delay);
}
public void changePriority(String t, String priority) {
transitions.get(t).addDelay(priority);
}
public void changeInitialMarking(String p, boolean marking) {
places.get(p).setMarking(marking);
}
public void changeVariableName(String oldName, String newName) {
if (isContinuous(oldName)) {
continuous.put(newName, continuous.get(oldName));
continuous.remove(oldName);
} else if (isBoolean(oldName)) {
booleans.put(newName, booleans.get(oldName));
booleans.remove(oldName);
} else if (isInteger(oldName)) {
integers.put(newName, integers.get(oldName));
integers.remove(oldName);
}
}
public void changeContInitCond(String var, Properties initCond) {
continuous.get(var).addInitCond(initCond);
}
public void changeIntegerInitCond(String var, String initCond) {
integers.get(var).addInitValue(initCond);
}
public String[] getAllIDs() {
String[] ids = new String[transitions.size() + places.size()
+ variables.size()];
int i = 0;
for (String t : transitions.keySet()) {
ids[i++] = t;
}
for (String p : places.keySet()) {
ids[i++] = p;
}
for (Variable v : variables) {
ids[i++] = v.getName();
}
return ids;
}
public ArrayList<String> getProperties() {
return properties;
}
public String[] getTransitionList() {
String[] transitionList = new String[transitions.size()];
int i = 0;
for (String t : transitions.keySet()) {
transitionList[i++] = t;
}
return transitionList;
}
public Transition[] getAllTransitions() {
if (allTransitions == null) {
allTransitions = new Transition[transitions.size()];
for (String t: transitions.keySet()) {
allTransitions[transitions.get(t).getIndex()] = transitions.get(t);
}
return allTransitions;
}
else
return allTransitions;
}
public Transition getTransition(int index) {
return getAllTransitions()[index];
}
public ArrayList<String> getTransitionListArrayList() {
ArrayList<String> transitionList = new ArrayList<String>(transitions.size());
int i = 0;
for (String t: transitions.keySet()) {
transitionList.add(i++, t);
}
return transitionList;
}
public Transition getTransition(String transition) {
return transitions.get(transition);
}
public boolean isRandomBoolAssignTree(String transition, String variable) {
if (transitions.get(transition).getBoolAssignTree(variable) == null)
return false;
if (transitions.get(transition).getBoolAssignTree(variable).op.equals("exponential")
|| transitions.get(transition).getBoolAssignTree(variable).op.equals("uniform")) {
return true;
}
return false;
}
public boolean isRandomContAssignTree(String transition, String variable) {
if (transitions.get(transition).getContAssignTree(variable) == null)
return false;
if (transitions.get(transition).getContAssignTree(variable).op.equals("exponential")
|| transitions.get(transition).getContAssignTree(variable).op.equals("uniform")) {
return true;
}
return false;
}
public boolean isRandomIntAssignTree(String transition, String variable) {
if (transitions.get(transition).getIntAssignTree(variable) == null)
return false;
if (transitions.get(transition).getIntAssignTree(variable).op.equals("exponential")
|| transitions.get(transition).getIntAssignTree(variable).op.equals("uniform")) {
return true;
}
return false;
}
public boolean isExpTransitionRateTree(String transition) {
if (transitions.get(transition).getDelayTree() == null)
return false;
if (transitions.get(transition).getDelayTree().op.equals("exponential")) {
return true;
}
return false;
}
public ExprTree getTransitionRateTree(String transition) {
if (transitions.get(transition).getDelayTree() == null)
return null;
if (transitions.get(transition).getDelayTree().op.equals("exponential")) {
return transitions.get(transition).getDelayTree().r1;
}
return null;
}
public ExprTree getDelayTree(String transition) {
if (transitions.get(transition).getDelayTree() == null) {
return null;
}
else {
return transitions.get(transition).getDelayTree();
}
}
public ExprTree getEnablingTree(String transition) {
return transitions.get(transition).getEnablingTree();
}
public String getLabel() {
return label;
}
public int getLpnIndex() {
return lpnIndex;
}
public String[] getPlaceList() {
if (this.placeList == null) {
placeList = new String[places.size()];
int i = 0;
for (String t : places.keySet()) {
placeList[i++] = t;
}
return placeList;
}
else
return this.placeList;
}
// public ArrayList<String> getAllPlaces() {
// if(placeList == null) {
// int i = 0;
// for (String t: places.keySet()) {
// placeList.add(i++, t);
// return placeList;
// else
// return placeList;
public Place getPlace(String place) {
return places.get(place);
}
public String[] getPreset(String name) {
if (isTransition(name)) {
String[] preset = new String[transitions.get(name).getPreset().length];
int i = 0;
for (Place p : transitions.get(name).getPreset()) {
preset[i++] = p.getName();
}
return preset;
} else if (places.containsKey(name)) {
String[] preset = new String[places.get(name).getPreset().length];
int i = 0;
for (Transition t : places.get(name).getPreset()) {
preset[i++] = t.getLabel();
}
return preset;
} else {
return null;
}
}
public int[] getPresetIndex(String name) {
if (isTransition(name)) {
int[] preset = new int[transitions.get(name).getPreset().length];
Place[] presetPlaces = transitions.get(name).getPreset();
for (int i=0; i<presetPlaces.length; i++) {
for (int placeIndex=0; placeIndex<this.getPlaceList().length; placeIndex++) {
if (this.getPlaceList()[placeIndex] == presetPlaces[i].getName()) {
preset[i] = placeIndex;
}
}
}
return preset;
}
else if (places.containsKey(name)) {
int[] preset = new int[places.get(name).getPreset().length];
int i = 0;
for (Transition t : places.get(name).getPreset()) {
preset[i++] = t.getIndex();
}
return preset;
} else {
return null;
}
}
public String[] getPostset(String name) {
if (isTransition(name)) {
String[] postset = new String[transitions.get(name).getPostset().length];
int i = 0;
for (Place p : transitions.get(name).getPostset()) {
postset[i++] = p.getName();
}
return postset;
} else if (places.containsKey(name)) {
String[] postset = new String[places.get(name).getPostset().length];
int i = 0;
for (Transition t : places.get(name).getPostset()) {
postset[i++] = t.getLabel();
}
return postset;
} else {
return null;
}
}
public int[] getPostsetIndex(String name) {
if (isTransition(name)) {
int[] postset = new int[transitions.get(name).getPostset().length];
Place[] postPlaces = transitions.get(name).getPostset();
for (int i=0; i<postPlaces.length; i++) {
for (int placeIndex=0; placeIndex<this.getPlaceList().length; placeIndex++) {
if (this.getPlaceList()[placeIndex] == postPlaces[i].getName()) {
postset[i] = placeIndex;
}
}
}
return postset;
}
else if (places.containsKey(name)) {
int[] postset = new int[places.get(name).getPostset().length];
int i = 0;
for (Transition t : places.get(name).getPostset()) {
postset[i++] = t.getIndex();
}
return postset;
} else {
return null;
}
}
public String[] getControlFlow() {
ArrayList<String> movements = new ArrayList<String>();
for (Transition t : transitions.values()) {
for (Place p : t.getPostset()) {
movements.add(t.getLabel() + " " + p.getName());
}
for (Place p : t.getPreset()) {
movements.add(p.getName() + " " + t.getLabel());
}
}
String[] array = new String[movements.size()];
int i = 0;
for (String s : movements) {
array[i++] = s;
}
return array;
}
public boolean getInitialMarking(String place) {
return places.get(place).isMarked();
}
public int[] getInitialMarkingsArray() {
int[] initialMarkings = new int[this.getPlaceList().length];
int i = 0;
for (String place : this.getPlaceList()) {
if(places.get(place).isMarked()) {
initialMarkings[i] = 1;
}
else {
initialMarkings[i] = 0;
}
i++;
}
return initialMarkings;
}
public String[] getVariables() {
String[] vars = new String[variables.size()+continuous.keySet().size()];
int i = 0;
for (Variable v : variables) {
vars[i++] = v.getName();
}
for(String contName : continuous.keySet()){
vars[i++] = contName;
}
return vars;
}
public DualHashMap<String, Integer> getVarIndexMap() {
if(_varIndexMap == null){
int i = 0;
HashMap<String, Integer> varIndexHashMap = new HashMap<String, Integer>();
for (Variable v: variables) {
varIndexHashMap.put(v.getName(), i);
i++;
}
DualHashMap<String, Integer> varIndexMap = new DualHashMap<String, Integer>(varIndexHashMap, variables.size());
_varIndexMap = varIndexMap;
return varIndexMap;
}
else{
return _varIndexMap;
}
}
/**
* Gives a map that associates the name of a continuous variable to its index.
* @return
* The map that associates the continuous variable and name.
*/
public DualHashMap<String, Integer> getContinuousIndexMap(){
if(_continuousIndexMap == null){
int i=allTransitions.length;
HashMap<String, Integer> contVarIndexHashMap = new HashMap<String, Integer>();
for(Variable v : continuous.values()){
contVarIndexHashMap.put(v.getName(), i);
i++;
}
DualHashMap<String, Integer> contIndexMap =
new DualHashMap<String, Integer> (contVarIndexHashMap, variables.size());
_continuousIndexMap = contIndexMap;
return contIndexMap;
}
else{
return _continuousIndexMap;
}
}
public HashMap<String, String> getAllVarsWithValuesAsString(int[] varValueVector) {
DualHashMap<String, Integer> varIndexMap = this.getVarIndexMap();
HashMap<String, String> varToValueMap = new HashMap<String, String>();
// varValue is a map between variable names and their values.
for (int i = 0; i < varValueVector.length; i++) {
String var = varIndexMap.getKey(i);
varToValueMap.put(var, varValueVector[i] + "");
}
return varToValueMap;
}
public HashMap<String, Integer> getAllVarsWithValuesAsInt(int[] varValueVector) {
DualHashMap<String, Integer> varIndexMap = this.getVarIndexMap();
HashMap<String, Integer> varToValueMap = new HashMap<String, Integer>();
// varValue is map between variable names and their values.
for (int i = 0; i < varValueVector.length; i++) {
String var = varIndexMap.getKey(i);
varToValueMap.put(var, varValueVector[i]);
}
return varToValueMap;
}
public Variable getVariable(String name) {
if (isBoolean(name)) {
return booleans.get(name);
} else if (isContinuous(name)) {
return continuous.get(name);
} else if (isInteger(name)) {
return integers.get(name);
}
return null;
}
public Variable getVariable(int index){
return variables.get(index);
}
public int getVariableIndex(String name){
return getVarIndexMap().get(name);
}
public String getContVarName(int index){
//int counter = 0;
// The index of the continuous variable is determined by
// the order it is returned by the 'continuous' fields
// iterator.
// for(String name : continuous.keySet()){
// if(counter == index){
// return name;
// counter++;
DualHashMap<String, Integer> variableMap = getContinuousIndexMap();
//return null;
return variableMap.getKey(index);
}
public Variable getContVar(int index){
// Convert the index into a name.
String name = getContVarName(index);
return continuous.get(name);
}
public int getContVarIndex(String name){
DualHashMap<String, Integer> contVarIndecies = getContinuousIndexMap();
return contVarIndecies.getValue(name);
}
public HashMap<String, String> getBoolInputs() {
HashMap<String, String> inputs = new HashMap<String, String>();
for (Variable v : booleans.values()) {
if (!v.isOutput()) {
inputs.put(v.getName(), v.getInitValue());
}
}
return inputs;
}
public HashMap<String, String> getBoolOutputs() {
HashMap<String, String> outputs = new HashMap<String, String>();
for (Variable v : booleans.values()) {
if (v.isOutput()) {
outputs.put(v.getName(), v.getInitValue());
}
}
return outputs;
}
public HashMap<String, String> getAllInputs() {
HashMap<String, String> inputs = new HashMap<String, String>();
for (Variable v : booleans.values()) {
if (v.isInput()) {
inputs.put(v.getName(), v.getInitValue());
}
}
for (Variable v : integers.values()) {
if (v.isInput()) {
inputs.put(v.getName(), v.getInitValue());
}
}
for (Variable v : continuous.values()) {
if (v.isInput()) {
inputs.put(v.getName(), v.getInitValue());
}
}
return inputs;
}
public HashMap<String, String> getAllInternals() {
HashMap<String, String> internals = new HashMap<String, String>();
for (Variable v : booleans.values()) {
if (v.isInternal()) {
internals.put(v.getName(), v.getInitValue());
}
}
for (Variable v : integers.values()) {
if (v.isInternal()) {
internals.put(v.getName(), v.getInitValue());
}
}
for (Variable v : continuous.values()) {
if (v.isInternal()) {
internals.put(v.getName(), v.getInitValue());
}
}
return internals;
}
public HashMap<String, String> getAllOutputs() {
HashMap<String, String> outputs = new HashMap<String, String>();
for (Variable v : booleans.values()) {
if (v.isOutput()) {
outputs.put(v.getName(), v.getInitValue());
}
}
for (Variable v : integers.values()) {
if (v.isOutput()) {
outputs.put(v.getName(), v.getInitValue());
}
}
for (Variable v : continuous.values()) {
if (v.isOutput()) {
outputs.put(v.getName(), v.getInitValue());
}
}
return outputs;
}
public HashMap<String, String> getBooleans() {
HashMap<String, String> bools = new HashMap<String, String>();
for (Variable v : booleans.values()) {
bools.put(v.getName(), v.getInitValue());
}
return bools;
}
public HashMap<String, Properties> getContinuous() {
HashMap<String, Properties> tempCont = new HashMap<String, Properties>();
for (Variable v : continuous.values()) {
Properties prop = new Properties();
prop.setProperty("value", v.getInitValue());
prop.setProperty("rate", v.getInitRate());
tempCont.put(v.getName(), prop);
}
return tempCont;
}
public HashMap<String, String> getIntegers() {
HashMap<String, String> tempInt = new HashMap<String, String>();
for (Variable v : integers.values()) {
tempInt.put(v.getName(), v.getInitValue());
}
return tempInt;
}
public String[] getBooleanVars() {
String[] vars = new String[booleans.size()];
int i = 0;
for (String v : booleans.keySet()) {
vars[i++] = v;
}
return vars;
}
public String[] getBooleanVars(String transition) {
Set<String> set = transitions.get(transition).getBoolAssignments()
.keySet();
String[] array = new String[set.size()];
int i = 0;
for (String s : set) {
array[i++] = s;
}
return array;
}
public String[] getContVars() {
String[] vars = new String[continuous.size()];
int i = 0;
for (String v : continuous.keySet()) {
vars[i++] = v;
}
return vars;
}
public String[] getContVars(String transition) {
Set<String> set = transitions.get(transition).getContAssignments()
.keySet();
String[] array = new String[set.size()];
int i = 0;
for (String s : set) {
array[i++] = s;
}
return array;
}
public String[] getRateVars(String transition) {
Set<String> set = transitions.get(transition).getRateAssignments()
.keySet();
String[] array = new String[set.size()];
int i = 0;
for (String s : set) {
array[i++] = s;
}
return array;
}
public String[] getIntVars() {
String[] vars = new String[integers.size()];
int i = 0;
for (String v : integers.keySet()) {
vars[i++] = v;
}
return vars;
}
public String[] getIntVars(String transition) {
Set<String> set = transitions.get(transition).getIntAssignments()
.keySet();
String[] array = new String[set.size()];
int i = 0;
for (String s : set) {
array[i++] = s;
}
return array;
}
public String getInitialVal(String var) {
if (isBoolean(var)) {
return booleans.get(var).getInitValue();
} else if (isInteger(var)) {
return integers.get(var).getInitValue();
} else {
return continuous.get(var).getInitValue();
}
}
public String getInitialRate(String var) {
if (isContinuous(var)) {
return continuous.get(var).getInitRate();
} else
return null;
}
/**
* This method converts all variable values (boolean, continuous and integer) to int.
* @param var
* @return
*/
public int getInitVariableVector(String var) {
if (isBoolean(var)) {
if(booleans.get(var).getInitValue().equals("true"))
return 1;
else
return 0;
}
else if (isInteger(var)) {
return Integer.parseInt(integers.get(var).getInitValue());
}
else {
// Continuous variable is not accepted here.
// return (int) Double.parseDouble(continuous.get(var).getInitValue());
System.out.println(var + " is neither boolean or integer variable. ");
new NullPointerException().printStackTrace();
System.exit(1);
return 0;
}
}
public String getBoolAssign(String transition, String variable) {
return transitions.get(transition).getBoolAssignment(variable);
}
public ExprTree getBoolAssignTree(String transition, String variable) {
return transitions.get(transition).getBoolAssignTree(variable);
}
public String getContAssign(String transition, String variable) {
return transitions.get(transition).getContAssignment(variable);
}
public ExprTree getContAssignTree(String transition, String variable) {
return transitions.get(transition).getContAssignTree(variable);
}
public String getRateAssign(String transition, String variable) {
return transitions.get(transition).getRateAssignment(variable);
}
public ExprTree getRateAssignTree(String transition, String variable) {
return transitions.get(transition).getRateAssignTree(variable);
}
public String getIntAssign(String transition, String variable) {
return transitions.get(transition).getIntAssignment(variable);
}
public ExprTree getIntAssignTree(String transition, String variable) {
return transitions.get(transition).getIntAssignTree(variable);
}
public void removeTransition(String name) {
if (!transitions.containsKey(name)) {
return;
}
for (Place p : transitions.get(name).getPreset()) {
removeMovement(p.getName(), name);
}
for (Place p : transitions.get(name).getPostset()) {
removeMovement(name, p.getName());
}
transitions.remove(name);
}
public void removePlace(String name) {
if (name != null && places.containsKey(name)) {
for (Transition t : places.get(name).getPreset()) {
removeMovement(t.getLabel(), name);
}
for (Transition t : places.get(name).getPostset()) {
removeMovement(name, t.getLabel());
}
places.remove(name);
}
}
public void renamePlace(String oldName, String newName) {
if (oldName != null && places.containsKey(oldName)) {
places.put(newName, places.get(oldName));
places.get(newName).changeName(newName);
places.remove(oldName);
}
}
public void renameTransition(String oldName, String newName) {
if (oldName != null && transitions.containsKey(oldName)) {
transitions.put(newName, transitions.get(oldName));
transitions.get(newName).changeName(newName);
transitions.remove(oldName);
}
}
public void removeMovement(String from, String to) {
if (isTransition(from)) {
transitions.get(from).removePostset(places.get(to));
places.get(to).removePreset(transitions.get(from));
} else {
transitions.get(to).removePreset(places.get(from));
places.get(from).removePostset(transitions.get(to));
}
}
public void removeInput(String name) {
if (name != null && booleans.containsKey(name)) {
booleans.remove(name);
variables.remove(booleans.get(name));
}
}
public void removeBoolean(String name) {
if (name != null && booleans.containsKey(name)) {
booleans.remove(name);
variables.remove(booleans.get(name));
}
}
public void removeOutput(String name) {
if (name != null && booleans.containsKey(name)) {
booleans.remove(name);
variables.remove(booleans.get(name));
}
}
public void removeContinuous(String name) {
if (name != null && continuous.containsKey(name)) {
continuous.remove(name);
variables.remove(continuous.get(name));
}
}
public void removeInteger(String name) {
if (name != null && integers.containsKey(name)) {
integers.remove(name);
variables.remove(integers.get(name));
}
}
public boolean removeVar(String name) {
for (Transition t : transitions.values()) {
if (t.containsAssignment(name)) {
return false;
}
}
if (name != null && continuous.containsKey(name)) {
removeContinuous(name);
} else if (name != null && booleans.containsKey(name)) {
removeBoolean(name);
} else if (name != null && integers.containsKey(name)) {
removeInteger(name);
} else {
for (Variable v : variables) {
if (v.getName().equals(name)) {
variables.remove(v);
break;
}
}
}
return true;
}
public void removeAllAssignVar(String name) {
for (Transition t : transitions.values()) {
if (t.containsAssignment(name)) {
t.removeAssignment(name);
}
}
}
public void setLabel(String label) {
this.label = label;
}
public void setLpnIndex(int index) {
this.lpnIndex = index;
}
public boolean isTransition(String name) {
return transitions.containsKey(name);
}
public boolean isInput(String var) {
if (isContinuous(var)) {
return continuous.get(var).isInput();
}
else if (isInteger(var)) {
return integers.get(var).isInput();
}
else if (isBoolean(var)) {
return booleans.get(var).isInput();
}
return false;
}
public boolean isOutput(String var) {
if (isContinuous(var)) {
return continuous.get(var).isOutput();
}
else if (isInteger(var)) {
return integers.get(var).isOutput();
}
else if (isBoolean(var)) {
return booleans.get(var).isOutput();
}
return false;
}
public boolean isBoolean(String var) {
return booleans.containsKey(var);
}
public boolean isContinuous(String var) {
return continuous.containsKey(var);
}
public boolean isInteger(String var) {
return integers.containsKey(var);
}
public boolean isMarked(String place) {
return places.get(place).isMarked();
}
public boolean containsTransition(String name) {
return transitions.containsKey(name);
}
public boolean containsMovement(String name) {
if (places.containsKey(name)) {
return places.get(name).isConnected();
} else {
return transitions.get(name).isConnected();
}
}
public boolean containsMovement(String from, String to) {
if (isTransition(from)) {
return transitions.get(from).containsPostset(to);
} else {
return places.get(from).containsPostset(to);
}
}
public Abstraction abstractLhpn(Verification pane) {
Abstraction abstraction = new Abstraction(this, pane);
return abstraction;
}
private void parseProperty(StringBuffer data) {
Pattern pattern = Pattern.compile(PROPERTY);
Matcher lineMatcher = pattern.matcher(data.toString());
while (lineMatcher.find()) {
properties.add(lineMatcher.group(1));
}
}
private void parseInOut(StringBuffer data) {
Properties varOrder = new Properties();
Pattern inLinePattern = Pattern.compile(INPUT);
Matcher inLineMatcher = inLinePattern.matcher(data.toString());
Integer i = 0;
Integer inLength = 0;
Integer outLength = 0;
if (inLineMatcher.find()) {
Pattern inPattern = Pattern.compile(WORD);
Matcher inMatcher = inPattern.matcher(inLineMatcher.group(1));
while (inMatcher.find()) {
String var = inMatcher.group();
if (isContinuous(var)) {
continuous.get(var).setPort("input");
}
else if (isInteger(var)) {
integers.get(var).setPort("input");
}
else {
varOrder.setProperty(i.toString(), var);
i++;
inLength++;
}
}
}
Pattern outPattern = Pattern.compile(OUTPUT);
Matcher outLineMatcher = outPattern.matcher(data.toString());
if (outLineMatcher.find()) {
Pattern output = Pattern.compile(WORD);
Matcher outMatcher = output.matcher(outLineMatcher.group(1));
while (outMatcher.find()) {
String var = outMatcher.group();
if (isContinuous(var)) {
continuous.get(var).setPort("output");
}
else if (isInteger(var)) {
integers.get(var).setPort("output");
}
else {
varOrder.setProperty(i.toString(), var);
i++;
outLength++;
}
}
}
Pattern internalPattern = Pattern.compile(INTERNAL);
Matcher internalLineMatcher = internalPattern.matcher(data.toString());
if (internalLineMatcher.find()) {
Pattern internal = Pattern.compile(WORD);
Matcher internalMatcher = internal.matcher(internalLineMatcher.group(1));
while (internalMatcher.find()) {
String var = internalMatcher.group();
if (isContinuous(var)) {
continuous.get(var).setPort("internal");
}
else if (isInteger(var)) {
integers.get(var).setPort("internal");
}
else {
varOrder.setProperty(i.toString(), var);
i++;
}
}
}
Pattern initState = Pattern.compile(INIT_STATE);
Matcher initMatcher = initState.matcher(data.toString());
if (initMatcher.find()) {
Pattern initDigit = Pattern.compile("[01X]+");
Matcher digitMatcher = initDigit.matcher(initMatcher.group());
digitMatcher.find();
String[] initArray = new String[digitMatcher.group().length()];
Pattern bit = Pattern.compile("[01X]");
Matcher bitMatcher = bit.matcher(digitMatcher.group());
i = 0;
while (bitMatcher.find()) {
initArray[i] = bitMatcher.group();
i++;
}
for (i = 0; i < inLength; i++) {
String name = varOrder.getProperty(i.toString());
if (initArray[i].equals("1")) {
addInput(name, "true");
} else if (initArray[i].equals("0")) {
addInput(name, "false");
} else {
addInput(name, "unknown");
}
}
for (i = inLength; i < inLength + outLength; i++) {
String name = varOrder.getProperty(i.toString());
if (initArray[i].equals("1") && name != null) {
addOutput(name, "true");
} else if (initArray[i].equals("0") && name != null) {
addOutput(name, "false");
} else {
addOutput(name, "unknown");
}
}
for (i = inLength + outLength; i < initArray.length; i++) {
String name = varOrder.getProperty(i.toString());
if (initArray[i].equals("1") && name != null) {
addBoolean(name, "true");
booleans.get(name).setPort("internal");
} else if (initArray[i].equals("0") && name != null) {
addBoolean(name, "false");
booleans.get(name).setPort("internal");
} else {
addBoolean(name, "unknown");
booleans.get(name).setPort("internal");
}
}
} else {
if (varOrder.size() != 0) {
System.out.println("WARNING: Boolean variables have not been initialized.");
for (i = 0; i < varOrder.size(); i++) {
if (i < inLength) {
addInput(varOrder.getProperty(i.toString()), "unknown");
} else {
addOutput(varOrder.getProperty(i.toString()), "unknown");
}
}
}
}
for (Variable var : continuous.values()) {
if (var.getPort() == null) {
var.setPort("internal");
}
}
for (Variable var : integers.values()) {
if (var.getPort() == null) {
var.setPort("internal");
}
}
for (Variable var : booleans.values()) {
if (var.getPort() == null) {
var.setPort("internal");
}
}
}
private void parseControlFlow(StringBuffer data) {
Pattern pattern = Pattern.compile(TRANSITION);
Matcher lineMatcher = pattern.matcher(data.toString());
if (lineMatcher.find()) {
lineMatcher.group(1);
String name = lineMatcher.group(1).replaceAll("\\+/", "P");
name = name.replaceAll("-/", "M");
Pattern transPattern = Pattern.compile(WORD);
Matcher transMatcher = transPattern.matcher(name);
while (transMatcher.find()) {
addTransition(transMatcher.group());
}
Pattern placePattern = Pattern.compile(PLACE);
Matcher placeMatcher = placePattern.matcher(data.toString());
while (placeMatcher.find()) {
String temp = placeMatcher.group(1).replaceAll("\\+", "P");
temp = temp.replaceAll("-", "M");
String[] tempPlace = temp.split("\\s");
if (isTransition(tempPlace[0])) {
if (!places.containsKey(tempPlace[1])) {
addPlace(tempPlace[1], false);
}
} else {
if (!places.containsKey(tempPlace[0])) {
addPlace(tempPlace[0], false);
}
}
addMovement(tempPlace[0], tempPlace[1]);
}
}
}
private void parseVars(StringBuffer data) {
Properties initCond = new Properties();
Properties initValue = new Properties();
Properties initRate = new Properties();
Pattern linePattern = Pattern.compile(CONTINUOUS);
Matcher lineMatcher = linePattern.matcher(data.toString());
if (lineMatcher.find()) {
Pattern varPattern = Pattern.compile(WORD);
Matcher varMatcher = varPattern.matcher(lineMatcher.group(1));
while (varMatcher.find()) {
addContinuous(varMatcher.group());
}
Pattern initLinePattern = Pattern.compile(VARS_INIT);
Matcher initLineMatcher = initLinePattern.matcher(data.toString());
if (initLineMatcher.find()) {
Pattern initPattern = Pattern.compile(INIT_COND);
Matcher initMatcher = initPattern.matcher(initLineMatcher
.group(1));
while (initMatcher.find()) {
if (continuous.containsKey(initMatcher.group(1))) {
initValue.put(initMatcher.group(1), initMatcher
.group(2));
}
}
}
Pattern rateLinePattern = Pattern.compile(INIT_RATE);
Matcher rateLineMatcher = rateLinePattern.matcher(data.toString());
if (rateLineMatcher.find()) {
Pattern ratePattern = Pattern.compile(INIT_COND);
Matcher rateMatcher = ratePattern.matcher(rateLineMatcher
.group(1));
while (rateMatcher.find()) {
initRate.put(rateMatcher.group(1), rateMatcher.group(2));
}
}
for (String s : continuous.keySet()) {
if (initValue.containsKey(s)) {
initCond.put("value", initValue.get(s));
} else {
if (continuous.get(s).getInitValue() != null) // Added this
// condition
// for
// mergeLPN
// methods
// sake. SB
initCond.put("value", continuous.get(s).getInitValue());
else
initCond.put("value", "[-inf,inf]");
}
if (initRate.containsKey(s)) {
initCond.put("rate", initRate.get(s));
} else {
if (continuous.get(s).getInitRate() != null) // Added this
// condition
// for
// mergeLPN
// methods
// sake. SB
initCond.put("rate", continuous.get(s).getInitRate());
else
initCond.put("rate", "[-inf,inf]");
}
addContinuous(s, initCond);
}
}
}
private void parseIntegers(StringBuffer data) {
String initCond = "0";
Properties initValue = new Properties();
Pattern linePattern = Pattern.compile(VARIABLES);
Matcher lineMatcher = linePattern.matcher(data.toString());
if (lineMatcher.find()) {
Pattern varPattern = Pattern.compile(WORD);
Matcher varMatcher = varPattern.matcher(lineMatcher.group(1));
while (varMatcher.find()) {
if (!continuous.containsKey(varMatcher.group())) {
addInteger(varMatcher.group(), initCond);
}
}
Pattern initLinePattern = Pattern.compile(VARS_INIT);
Matcher initLineMatcher = initLinePattern.matcher(data.toString());
if (initLineMatcher.find()) {
Pattern initPattern = Pattern.compile(INIT_COND);
Matcher initMatcher = initPattern.matcher(initLineMatcher
.group(1));
while (initMatcher.find()) {
if (integers.containsKey(initMatcher.group(1))) {
initValue.put(initMatcher.group(1), initMatcher
.group(2));
}
}
}
for (String s : integers.keySet()) {
if (initValue.get(s) != null) {
initCond = initValue.get(s).toString();
} else {
if (integers.get(s).getInitValue() != null) // Added this
// condition for
// mergeLPN
// methods sake.
initCond = integers.get(s).getInitValue();
else
initCond = "[-inf,inf]";
}
addInteger(s, initCond);
}
}
}
private void parsePlaces(StringBuffer data) {
Pattern linePattern = Pattern.compile(PLACES_LINE);
Matcher lineMatcher = linePattern.matcher(data.toString());
if (lineMatcher.find()) {
Pattern markPattern = Pattern.compile(MARKING);
Matcher markMatcher = markPattern.matcher(lineMatcher.group(1));
while (markMatcher.find()) {
String name = markMatcher.group();
Place place = new Place(name);
places.put(name, place);
}
}
}
private void parseMarking(StringBuffer data) {
Pattern linePattern = Pattern.compile(MARKING_LINE);
Matcher lineMatcher = linePattern.matcher(data.toString());
if (lineMatcher.find()) {
Pattern markPattern = Pattern.compile(MARKING);
Matcher markMatcher = markPattern.matcher(lineMatcher.group(1));
while (markMatcher.find()) {
places.get(markMatcher.group()).setMarking(true);
}
}
}
private boolean parseEnabling(StringBuffer data) {
boolean error = true;
Pattern linePattern = Pattern.compile(ENABLING_LINE);
Matcher lineMatcher = linePattern.matcher(data.toString());
if (lineMatcher.find()) {
Pattern enabPattern = Pattern.compile(ENABLING);
Matcher enabMatcher = enabPattern.matcher(lineMatcher.group(1));
while (enabMatcher.find()) {
if (transitions.get(enabMatcher.group(1)).addEnabling(
enabMatcher.group(2)) == false) {
error = false;
}
}
}
return error;
}
private boolean parseAssign(StringBuffer data, boolean error) {
Pattern linePattern = Pattern.compile(ASSIGNMENT_LINE);
Matcher lineMatcher = linePattern.matcher(data.toString());
Pattern rangePattern = Pattern.compile(RANGE);
if (lineMatcher.find()) {
Pattern assignPattern = Pattern.compile(ASSIGNMENT);
Matcher assignMatcher = assignPattern.matcher(lineMatcher.group(1)
.replace("\\s", ""));
Pattern varPattern = Pattern.compile(ASSIGN_VAR);
Matcher varMatcher;
while (assignMatcher.find()) {
Transition transition = transitions.get(assignMatcher.group(1));
varMatcher = varPattern.matcher(assignMatcher.group(2));
if (varMatcher.find()) {
String variable = varMatcher.group(1);
String assignment = varMatcher.group(2);
if (isInteger(variable)) {
Matcher rangeMatcher = rangePattern.matcher(assignment);
if (rangeMatcher.find()) {
if (rangeMatcher.group(1).matches(INTEGER)
&& rangeMatcher.group(2).matches(INTEGER)) {
if (Integer.parseInt(rangeMatcher.group(1)) == Integer
.parseInt(rangeMatcher.group(2))) {
transition.addIntAssign(variable,
rangeMatcher.group(1));
}
}
if (transition.addIntAssign(variable, "uniform("
+ rangeMatcher.group(1) + ","
+ rangeMatcher.group(2) + ")") == false) {
error = false;
}
} else {
if (transition.addIntAssign(variable, assignment) == false) {
error = false;
}
}
} else {
Matcher rangeMatcher = rangePattern.matcher(assignment);
if (rangeMatcher.find()) {
if (rangeMatcher.group(1).matches(INTEGER)
&& rangeMatcher.group(2).matches(INTEGER)) {
if (Integer.parseInt(rangeMatcher.group(1)) == Integer
.parseInt(rangeMatcher.group(2))) {
if (transition.addContAssign(variable,
rangeMatcher.group(1)) == false) {
error = false;
}
}
}
if (transition.addContAssign(variable, "uniform("
+ rangeMatcher.group(1) + ","
+ rangeMatcher.group(2) + ")") == false) {
error = false;
}
} else if (transition.addContAssign(variable,
assignment) == false) {
error = false;
}
}
}
}
}
return error;
}
private boolean parseRateAssign(StringBuffer data, boolean error) {
Pattern linePattern = Pattern.compile(RATE_ASSIGNMENT_LINE);
Matcher lineMatcher = linePattern.matcher(data.toString());
Pattern rangePattern = Pattern.compile(RANGE);
if (lineMatcher.find()) {
Pattern assignPattern = Pattern.compile(ASSIGNMENT);
Matcher assignMatcher = assignPattern.matcher(lineMatcher.group(1)
.replace("\\s", ""));
Pattern varPattern = Pattern.compile(ASSIGN_VAR);
Matcher varMatcher;
while (assignMatcher.find()) {
Transition transition = transitions.get(assignMatcher.group(1));
varMatcher = varPattern.matcher(assignMatcher.group(2));
while (varMatcher.find()) {
String variable = varMatcher.group(1);
String assignment = varMatcher.group(2);
Matcher rangeMatcher = rangePattern.matcher(assignment);
if (rangeMatcher.find()) {
if (rangeMatcher.group(1).matches(INTEGER)
&& rangeMatcher.group(2).matches(INTEGER)) {
if (Integer.parseInt(rangeMatcher.group(1)) == Integer
.parseInt(rangeMatcher.group(2))) {
if (transition.addRateAssign(variable,
rangeMatcher.group(1)) == false) {
error = false;
}
}
}
if (transition.addRateAssign(variable, "uniform("
+ rangeMatcher.group(1) + ","
+ rangeMatcher.group(2) + ")") == false) {
error = false;
}
} else if (transition.addRateAssign(variable, assignment) == false) {
error = false;
}
}
}
}
return error;
}
private boolean parseDelayAssign(StringBuffer data, boolean error) {
Pattern linePattern = Pattern.compile(DELAY_LINE);
Matcher lineMatcher = linePattern.matcher(data.toString());
if (lineMatcher.find()) {
Pattern delayPattern = Pattern.compile(DELAY);
Matcher delayMatcher = delayPattern.matcher(lineMatcher.group(1)
.replace("\\s", ""));
while (delayMatcher.find()) {
Transition transition = transitions.get(delayMatcher.group(1));
Pattern rangePattern = Pattern.compile(RANGE);
Matcher rangeMatcher = rangePattern.matcher(delayMatcher
.group(2));
String delay;
if (rangeMatcher.find()) {
if (rangeMatcher.group(1).equals(rangeMatcher.group(2))) {
delay = rangeMatcher.group(1);
} else {
delay = "uniform(" + rangeMatcher.group(1) + ","
+ rangeMatcher.group(2) + ")";
}
} else {
delay = delayMatcher.group(2);
}
if (transition.addDelay(delay) == false) {
error = false;
}
}
}
for (Transition t : transitions.values()) {
if (t.getDelay() == null) {
t.addDelay("0");
}
}
return error;
}
private boolean parsePriorityAssign(StringBuffer data, boolean error) {
Pattern linePattern = Pattern.compile(PRIORITY_LINE);
Matcher lineMatcher = linePattern.matcher(data.toString());
if (lineMatcher.find()) {
Pattern priorityPattern = Pattern.compile(PRIORITY);
Matcher priorityMatcher = priorityPattern.matcher(lineMatcher
.group(1).replace("\\s", ""));
while (priorityMatcher.find()) {
Transition transition = transitions.get(priorityMatcher
.group(1));
String priority = priorityMatcher.group(2);
if (transition.addPriority(priority) == false) {
error = false;
}
}
}
return error;
}
private boolean parseBooleanAssign(StringBuffer data, boolean error) {
Pattern linePattern = Pattern.compile(BOOLEAN_LINE);
Matcher lineMatcher = linePattern.matcher(data.toString().replace("\\s", ""));
if (lineMatcher.find()) {
Pattern transPattern = Pattern.compile(BOOLEAN_TRANS);
Matcher transMatcher = transPattern.matcher(lineMatcher.group(1)
.replace("\\s", ""));
Pattern assignPattern = Pattern.compile(BOOLEAN_ASSIGN);
while (transMatcher.find()) {
Transition transition = transitions.get(transMatcher.group(1));
Matcher assignMatcher = assignPattern.matcher(transMatcher
.group(2));
for (int i = 0; i < booleans.size(); i++) {
while (assignMatcher.find()) {
String variable = assignMatcher.group(1);
String assignment = assignMatcher.group(2);
if (transition.addBoolAssign(variable, assignment) == false) {
error = false;
}
}
}
}
}
return error;
}
private boolean parseTransitionRate(StringBuffer data, boolean error) {
Pattern linePattern = Pattern.compile(TRANS_RATE_LINE);
Matcher lineMatcher = linePattern.matcher(data.toString());
if (lineMatcher.find()) {
Pattern delayPattern = Pattern.compile(ENABLING);
Matcher delayMatcher = delayPattern.matcher(lineMatcher.group(1));
while (delayMatcher.find()) {
Transition transition = transitions.get(delayMatcher.group(1));
if (transition.addDelay("exponential(" + delayMatcher.group(2)
+ ")") == false) {
error = false;
}
}
}
return error;
}
private void parseFailTransitions(StringBuffer data) {
Pattern linePattern = Pattern.compile(FAIL_LINE);
Matcher lineMatcher = linePattern.matcher(data.toString());
if (lineMatcher.find()) {
for (String s : lineMatcher.group(1).split("\\s")) {
if (!s.equals("")) {
transitions.get(s).setFail(true);
}
}
}
}
private void parsePersistentTransitions(StringBuffer data) {
Pattern linePattern = Pattern.compile(PERSISTENT_LINE);
Matcher lineMatcher = linePattern.matcher(data.toString());
if (lineMatcher.find()) {
for (String s : lineMatcher.group(1).split("\\s")) {
if (!s.equals("")) {
transitions.get(s).setPersistent(true);
}
}
}
}
/**
* This method extracts the boolean variables associated with inequalities
* involved in the boolean assignments and the enabling conditions on
* transitions.
*/
public void parseBooleanInequalities(){
/*
* The expression trees for the boolean assignment and the enabling
* conditions are contained in the Transitions.
*/
HashMap<InequalityVariable, InequalityVariable> inequalities =
new HashMap<InequalityVariable, InequalityVariable>();
for(Transition T : transitions.values()){
// Extract the inequalities from the boolean expression.
for(ExprTree E : T.getBoolAssignTrees().values()){
parseBooleanInequalities(E, inequalities);
}
// Extract the inequalities from the enabling condition.
parseBooleanInequalities(T.getEnablingTree(), inequalities, T);
}
// Add the inequalities to the booleans and variables.
for(InequalityVariable iv : inequalities.keySet()){
booleans.put("$" + iv.toString(), iv);
variables.add(iv);
}
}
/**
* Extracts the boolean variables associated with inequalities from a
* single ExprTree. This method is not meant for use with enabling
* conditions since it will not register the transition with the
* inequality variable.
* @param ET
* The expression tree for which the inequalities are extracted.
*/
private void parseBooleanInequalities(ExprTree ET,
HashMap<InequalityVariable, InequalityVariable> previousInequalities){
parseBooleanInequalities(ET, previousInequalities, null);
}
/**
* Extracts the boolean variables associated with inequalities from a
* single ExprTree.
* @param ET
* The expression tree for which the inequalities are extracted.
* @param T
* If this expression tree is from an enabling condition, then the Transition
* whose enabling condition that gave rise to this expression tree. Null
* otherwise.
*/
private void parseBooleanInequalities(ExprTree ET,
HashMap<InequalityVariable, InequalityVariable> previousInequalities, Transition T){
/*
* This method servers as a driver method for a recursive like search.
* The basic idea is to explore the tree until an inequality is found,
* create a new variable for the inequality including the subtree rooted
* at that node, and replace the node with a boolean variable.
*/
// Create a list of operators to match.
String[] operators = new String[]{"<", "<=", ">", ">=", "="};
// Get the nodes containing inequalities.
ArrayList<ExprTree> inequalities = new ArrayList<ExprTree>();
findInequalityNodes(ET, operators, inequalities);
parseBooleanInequalities(inequalities, previousInequalities, T);
}
/**
* Extracts boolean inequalities at the nodes of relational operators provided.
* @param inequalities
* An ExprTree array containing nodes whose roots are a relational
* operator.
* @param T
* If this expression tree is from an enabling condition, then the Transition
* whose enabling condition that gave rise to this expression tree. Null
* otherwise.
*/
private void
parseBooleanInequalities(ArrayList<ExprTree> inequalities,
HashMap<InequalityVariable, InequalityVariable> previousInequalities, Transition T){
// For each node, create an InequalityVariable, add it to the set of variables,
// and replace the current node of the ExprTree with the InequaltiyVariable.
// HashMap<InequalityVariable, InequalityVariable> variableMap =
// new HashMap<InequalityVariable, InequalityVariable>();
for(ExprTree ET : inequalities){ // ET phone home.
// Extract the expression for naming the new InequalityVariable.
String booleanName = "$" + ET.toString();
// Create the InequalityVariable.
InequalityVariable newVariable = new InequalityVariable(booleanName, "false",
ET.shallowclone(), this);
// Check if the Variable is present already.
Variable v = booleans.get(booleanName);
if(v != null){
// Check if it is an InequalityVariable.
if(!(v instanceof InequalityVariable)){
throw new IllegalStateException("Name collision. The extracted "
+ "name for an InequalityVariable matches a name already "
+ "given to a boolean variable.");
}
else{
InequalityVariable iv = (InequalityVariable) v;
// Not needed anymore since the list is not dynamically change
// anymore.
// iv.increaseCount();
}
}
else{
// Register variable with the continuous variable.
// This is taken care of by the constructor.
// TODO : finish.
// Add the variable
//booleans.put(booleanName, newVariable);
//variables.add(newVariable);
// Check if we have seen this variable before.
InequalityVariable seenBefore = previousInequalities.get(newVariable);
if(seenBefore == null){
// We have not seen this variable before, so add it to the
// list.
previousInequalities.put(newVariable, newVariable);
if(T != null){
// If there is a transition, register it.
newVariable.addTransition(T);
}
}
else if(T != null){
// We've seen this variable before. So no need to add it. Just
// need to register the transition. If the transition is null
// there is nothing to do.
seenBefore.addTransition(T);
}
}
// Replace the node into a boolean value.
// The the type.
ET.isit = 'b';
ET.logical = true;
// Change the name.
ET.variable = booleanName;
// Change the op.
ET.op = "";
// Remove the branches.
ET.r1 = null;
ET.r2 = null;
}
//return variableMap;
}
/**
* Searches an expression tree and finds the nodes that contain an operator.
* @param ET
* The ExprTree to search.
* @param operators
* The operators to find.
* @param nodes
* The list to add the found nodes to.
*/
private void findInequalityNodes(ExprTree ET, String[] operators,
ArrayList<ExprTree> nodes){
// Check if ET node is null, if so return. (Base case 1 for recursion.)
if(ET == null){
return;
}
// Check if ET is a node we want. (Base case 2 for recursion.
// Relations cannot be nested.)
for(int i=0; i<operators.length; i++){
// Extract the operators to check.
String op = ET.getOp();
String oplist = operators[i];
// Avoid issue of leading and trailing spaces.
op = op.trim();
oplist = oplist.trim();
if(oplist.equals(op)){
// If the two are equal, then this node is an inequality.
// Check that this inequality involves a continuous variable
// and not a discrete variable which is handled by the
// untimed code currently.
if(ET.containsExactlyCont()){
// So add it to the list and return. After determining.
nodes.add(ET);
}
return;
}
}
// The node is not an inequality so search the children trees.
findInequalityNodes(ET.getLeftChild(), operators, nodes);
findInequalityNodes(ET.getRightChild(), operators, nodes);
}
public int getTotalNumberOfContVars(){
return continuous.size();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((booleans == null) ? 0 : booleans.hashCode());
result = prime * result
+ ((continuous == null) ? 0 : continuous.hashCode());
result = prime * result
+ ((integers == null) ? 0 : integers.hashCode());
result = prime * result + ((label == null) ? 0 : label.hashCode());
result = prime * result + lpnIndex;
result = prime * result
+ ((variables == null) ? 0 : variables.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
LhpnFile other = (LhpnFile) obj;
if (booleans == null) {
if (other.booleans != null)
return false;
} else if (!booleans.equals(other.booleans))
return false;
if (continuous == null) {
if (other.continuous != null)
return false;
} else if (!continuous.equals(other.continuous))
return false;
if (integers == null) {
if (other.integers != null)
return false;
} else if (!integers.equals(other.integers))
return false;
if (label == null) {
if (other.label != null)
return false;
} else if (!label.equals(other.label))
return false;
if (lpnIndex != other.lpnIndex)
return false;
if (variables == null) {
if (other.variables != null)
return false;
} else if (!variables.equals(other.variables))
return false;
return true;
}
/**
* This method fills integer arrays varIndexArrayThisLpn and varIndexArrayOtherLpn.
* Both arrays stores indices of shared variables between this lpn and otherLpn.
* variable index from THIS lpn.
* @param varIndexArrayThisLpn
* @param varIndexArrayOtherLpn
* @param otherLpn
*/
public void genIndexLists(int[] varIndexArrayThisLpn, int[] varIndexArrayOtherLpn, LhpnFile otherLpn){
int arrayIndex = 0;
DualHashMap<String, Integer> otherVarIndexMap = otherLpn.getVarIndexMap();
String[] interfaceVars = otherLpn.getInterfaceVariables();
for(int i = 0; i < interfaceVars.length; i++){
String var = interfaceVars[i];
Integer thisIndex = this._varIndexMap.getValue(var);
if(thisIndex != null){
varIndexArrayThisLpn[arrayIndex] = thisIndex;
varIndexArrayOtherLpn[arrayIndex] = otherVarIndexMap.getValue(var);
arrayIndex++;
}
}
}
private String[] getInterfaceVariables() {
int size = getAllInputs().keySet().size() + getAllOutputs().keySet().size();
String[] interfaceVariables = new String[size];
HashSet<String> interfaceSet = new HashSet<String>();
int i = 0;
for(String input : getAllInputs().keySet()){
interfaceVariables[i++] = input;
interfaceSet.add(input);
}
for(String output : getAllOutputs().keySet()){
if(interfaceSet.contains(output))
continue;
interfaceVariables[i++] = output;
}
return interfaceVariables;
}
public void setThisIndexList(List<int[]> indexList){
this.thisIndexList = indexList;
}
public void setOtherIndexList(List<int[]> indexList){
this.otherIndexList = indexList;
}
public List<int[]> getThisIndexList(){
return this.thisIndexList;
}
public List<int[]> getOtherIndexList(){
return this.otherIndexList;
}
public int[] getThisIndexArray(int i){
return this.thisIndexList.get(i);
}
public int[] getOtherIndexArray(int i){
return this.otherIndexList.get(i);
}
private static final String PROPERTY = "#@\\.property ([^@]*)\\n";
private static final String INPUT = "\\.inputs([[\\s[^\\n]]\\w+]*?)\\n";
private static final String OUTPUT = "\\.outputs([[\\s[^\\n]]\\w+]*?)\\n";
private static final String INTERNAL = "\\.internal([[\\s[^\\n]]\\w+]*?)\\n";
private static final String INIT_STATE = "#@\\.init_state \\[(\\w*)\\]";
private static final String TRANSITION = "\\.dummy([^\\n]*?)\\n";
private static final String WORD = "(\\S+)";
private static final String INTEGER = "([-\\d]+)";
private static final String PLACE = "\\n([\\w_\\+-/&&[^\\.
private static final String CONTINUOUS = "#@\\.continuous ([.[^\\n]]*)\\n";
private static final String VARS_INIT = "#@\\.init_vals \\{([\\S[^\\}]]*?)\\}";
private static final String INIT_RATE = "#@\\.init_rates \\{([\\S[^\\}]]*?)\\}";
private static final String INIT_COND = "<(\\w+)=([\\S^>]*?)>";
private static final String VARIABLES = "#@\\.variables ([.[^\\n]]*)\\n";
private static final String PLACES_LINE = "#\\|\\.places ([.[^\\n]]*)\\n";
private static final String MARKING = "\\w+";
private static final String MARKING_LINE = "\\.marking \\{(.*)\\}";
private static final String ENABLING_LINE = "#@\\.enablings \\{([.[^\\}]]*?)\\}";
private static final String ENABLING = "<([\\S[^=]]+?)=\\[([^\\]]+?)\\]>?";
private static final String ASSIGNMENT_LINE = "#@\\.assignments \\{([.[^\\}]]*?)\\}";
private static final String RATE_ASSIGNMENT_LINE = "#@\\.rate_assignments \\{([.[^\\}]]*?)\\}";
private static final String ASSIGNMENT = "<([\\S[^=]]+?)=\\[(\\S+?)\\]>";
private static final String ASSIGN_VAR = "([^:]+?):=(.+)";
private static final String DELAY_LINE = "#@\\.delay_assignments \\{([\\S[^\\}]]*?)\\}";
private static final String DELAY = "<([\\w_]+)=\\[(\\S+?)\\]>";
private static final String RANGE = "\\[([\\w-]+?),([\\w-]+?)\\]";
private static final String PRIORITY_LINE = "#@\\.priority_assignments \\{([\\S[^\\}]]*?)\\}";
private static final String PRIORITY = "<([\\w_]+)=\\[(\\S+?)\\]>";
private static final String TRANS_RATE_LINE = "#@\\.transition_rates \\{([\\S[^\\}]]*?)\\}";
private static final String FAIL_LINE = "#@\\.failtrans ([.[^\\n]]*)\\n";
private static final String PERSISTENT_LINE = "#@\\.non_disabling ([.[^\\n]]*)\\n";
private static final String BOOLEAN_LINE = "#@\\.boolean_assignments \\{([\\S[^\\}]]*?)\\}";
private static final String BOOLEAN_TRANS = "<(\\S+?)=\\[(\\S*?)\\]>";
private static final String BOOLEAN_ASSIGN = "([^:]+?):=(.+)";
@Override
public String toString() {
return "LhpnFile [label=" + label + ", lpnIndex=" + lpnIndex + "]";
}
public StateGraph getStateGraph() {
return this.stateGraph;
}
public void addStateGraph(StateGraph stateGraph) {
if (this.stateGraph == null)
this.stateGraph = stateGraph;
}
}
|
package owltools.cli;
import java.awt.Color;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.coode.owlapi.manchesterowlsyntax.ManchesterOWLSyntaxEditorParser;
import org.obolibrary.oboformat.model.FrameMergeException;
import org.semanticweb.elk.owlapi.ElkReasonerFactory;
import org.semanticweb.owlapi.expression.OWLEntityChecker;
import org.semanticweb.owlapi.expression.ParserException;
import org.semanticweb.owlapi.expression.ShortFormEntityChecker;
import org.semanticweb.owlapi.io.OWLFunctionalSyntaxOntologyFormat;
import org.semanticweb.owlapi.io.RDFXMLOntologyFormat;
import org.semanticweb.owlapi.model.AxiomType;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom;
import org.semanticweb.owlapi.model.OWLAnnotationSubject;
import org.semanticweb.owlapi.model.OWLAnonymousClassExpression;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLClassAssertionAxiom;
import org.semanticweb.owlapi.model.OWLClassAxiom;
import org.semanticweb.owlapi.model.OWLClassExpression;
import org.semanticweb.owlapi.model.OWLDeclarationAxiom;
import org.semanticweb.owlapi.model.OWLEntity;
import org.semanticweb.owlapi.model.OWLEquivalentClassesAxiom;
import org.semanticweb.owlapi.model.OWLNamedIndividual;
import org.semanticweb.owlapi.model.OWLNamedObject;
import org.semanticweb.owlapi.model.OWLObject;
import org.semanticweb.owlapi.model.OWLObjectProperty;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.model.OWLOntologyFormat;
import org.semanticweb.owlapi.model.OWLOntologyID;
import org.semanticweb.owlapi.model.OWLOntologyIRIMapper;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.semanticweb.owlapi.model.OWLOntologyStorageException;
import org.semanticweb.owlapi.model.OWLSubClassOfAxiom;
import org.semanticweb.owlapi.model.RemoveAxiom;
import org.semanticweb.owlapi.model.SetOntologyID;
import org.semanticweb.owlapi.reasoner.FreshEntityPolicy;
import org.semanticweb.owlapi.reasoner.InferenceType;
import org.semanticweb.owlapi.reasoner.Node;
import org.semanticweb.owlapi.reasoner.NodeSet;
import org.semanticweb.owlapi.reasoner.OWLReasoner;
import org.semanticweb.owlapi.reasoner.OWLReasonerFactory;
import org.semanticweb.owlapi.reasoner.SimpleConfiguration;
import org.semanticweb.owlapi.util.AutoIRIMapper;
import org.semanticweb.owlapi.util.BidirectionalShortFormProviderAdapter;
import org.semanticweb.owlapi.util.SimpleShortFormProvider;
import org.semanticweb.owlapi.vocab.OWLRDFVocabulary;
import owltools.gaf.GafDocument;
import owltools.gaf.GafObjectsBuilder;
import owltools.gaf.GeneAnnotation;
import owltools.gaf.inference.AnnotationPredictor;
import owltools.gaf.inference.CompositionalClassPredictor;
import owltools.gaf.inference.Prediction;
import owltools.gfx.GraphicsConfig;
import owltools.gfx.GraphicsConfig.RelationConfig;
import owltools.gfx.OWLGraphLayoutRenderer;
import owltools.graph.OWLGraphEdge;
import owltools.graph.OWLGraphWrapper;
import owltools.graph.OWLQuantifiedProperty;
import owltools.graph.OWLQuantifiedProperty.Quantifier;
import owltools.idmap.IDMapPairWriter;
import owltools.idmap.IDMappingPIRParser;
import owltools.idmap.UniProtIDMapParser;
import owltools.io.ChadoGraphClosureRenderer;
import owltools.io.CompactGraphClosureReader;
import owltools.io.CompactGraphClosureRenderer;
import owltools.io.GraphClosureRenderer;
import owltools.io.GraphReader;
import owltools.io.GraphRenderer;
import owltools.io.OWLPrettyPrinter;
import owltools.io.ParserWrapper;
import owltools.io.TableToAxiomConverter;
import owltools.mooncat.Mooncat;
import owltools.ontologyrelease.OntologyMetadata;
import owltools.sim.DescriptionTreeSimilarity;
import owltools.sim.MultiSimilarity;
import owltools.sim.OWLObjectPair;
import owltools.sim.Reporter;
import owltools.sim.SimEngine;
import owltools.sim.SimEngine.SimilarityAlgorithmException;
import owltools.sim.SimSearch;
import owltools.sim.Similarity;
import uk.ac.manchester.cs.factplusplus.owlapiv3.FaCTPlusPlusReasonerFactory;
import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory;
import de.derivo.sparqldlapi.Query;
import de.derivo.sparqldlapi.QueryEngine;
import de.derivo.sparqldlapi.QueryResult;
import de.derivo.sparqldlapi.exceptions.QueryEngineException;
import de.derivo.sparqldlapi.exceptions.QueryParserException;
import de.tudresden.inf.lat.jcel.owlapi.main.JcelReasoner;
/**
* An instance of this class can execute owltools commands in sequence.
*
* This is typically wrapper from within a main() method
*
*
* @author cjm
*
*/
public class CommandRunner {
private static Logger LOG = Logger.getLogger(CommandRunner.class);
OWLGraphWrapper g = null;
GafDocument gafdoc = null;
OWLOntology queryOntology = null;
Map<OWLClass,OWLClassExpression> queryExpressionMap = null;
public class Opts {
int i = 0;
String[] args;
boolean helpMode = false;
public Opts(String[] args) {
super();
this.i = 0;
this.args = args;
}
public Opts(List<String> args) {
super();
this.i = 0;
this.args = args.toArray(new String[args.size()]);
}
public boolean hasArgs() {
return i < args.length;
}
public boolean hasOpts() {
return hasArgs() && args[i].startsWith("-");
}
public boolean nextEq(String eq) {
if (helpMode) {
System.out.println(" "+eq);
return false;
}
if (eq.contains("|")) {
return nextEq(eq.split("\\|"));
}
if (hasArgs()) {
if (args[i].equals(eq)) {
i++;
return true;
}
}
return false;
}
private boolean nextEq(String[] eqs) {
for (String eq : eqs) {
if (nextEq(eq))
return true;
}
return false;
}
public boolean hasOpt(String opt) {
for (int j=i; j<args.length; j++) {
if (args[j].equals(opt))
return true;
}
return false;
}
public boolean nextEq(Collection<String> eqs) {
for (String eq : eqs) {
if (nextEq(eq))
return true;
}
return false;
}
public String nextOpt() {
String opt = args[i];
i++;
return opt;
}
public String peekArg() {
if (hasArgs())
return args[i];
return null;
}
public boolean nextArgIsHelp() {
if (hasArgs() && (args[i].equals("-h")
|| args[i].equals("--help"))) {
nextOpt();
return true;
}
return false;
}
public void fail() {
System.err.println("cannot process: "+args[i]);
System.exit(1);
}
public void info(String params, String desc) {
if (this.nextArgIsHelp()) {
System.out.println(args[i-2]+" "+params+"\t "+desc);
System.exit(0);
}
}
}
public class OptionException extends Exception {
public OptionException(String msg) {
super(msg);
}
}
public List<String> parseArgString(String str) {
List<String> args = new ArrayList<String>();
int p = 0;
StringBuffer ns = new StringBuffer();
while (p < str.length()) {
if (str.charAt(p) == ' ') {
if (ns.length() > 0) {
args.add(ns.toString());
ns = new StringBuffer();
}
}
else {
ns.append(str.charAt(p));
}
p++;
}
if (ns.length() > 0) {
args.add(ns.toString());
}
return args;
}
public void run(String[] args) throws OWLOntologyCreationException, IOException, FrameMergeException, SimilarityAlgorithmException, OWLOntologyStorageException, OptionException, URISyntaxException {
Opts opts = new Opts(args);
run(opts);
}
public void run(Opts opts) throws OWLOntologyCreationException, IOException, FrameMergeException, SimilarityAlgorithmException, OWLOntologyStorageException, OptionException, URISyntaxException {
List<String> paths = new ArrayList<String>();
String reasonerClassName = "com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory";
OWLReasoner reasoner = null;
String reasonerName = "pellet";
boolean createNamedRestrictions = false;
boolean createDefaultInstances = false;
boolean merge = false;
OWLOntology simOnt = null;
Set<OWLSubClassOfAxiom> removedSubClassOfAxioms = null;
OWLPrettyPrinter owlpp;
GraphicsConfig gfxCfg = new GraphicsConfig();
//Configuration config = new PropertiesConfiguration("owltools.properties");
String similarityAlgorithmName = "JaccardSimilarity";
ParserWrapper pw = new ParserWrapper();
while (opts.hasArgs()) {
if (opts.nextArgIsHelp()) {
help();
opts.helpMode = true;
}
//String opt = opts.nextOpt();
//System.out.println("processing arg: "+opt);
if (opts.nextEq("--pellet")) {
reasonerClassName = "com.clarkparsia.pellet.owlapiv3.Reasoner";
reasonerName = "pellet";
}
else if (opts.nextEq("--hermit")) {
reasonerClassName = "org.semanticweb.HermiT.Reasoner";
reasonerName = "hermit";
}
else if (opts.nextEq("--reasoner")) {
reasonerName = opts.nextOpt();
g.setReasoner(createReasoner(g.getSourceOntology(),reasonerName,g.getManager()));
}
else if (opts.nextEq("--no-reasoner")) {
reasonerClassName = "";
reasonerName = "";
}
else if (opts.nextEq("-r") || opts.nextEq("--namerestr")) {
createNamedRestrictions = true;
}
else if (opts.nextEq("-i") || opts.nextEq("--inst")) {
createDefaultInstances = true;
}
else if (opts.nextEq("--log-info")) {
Logger.getRootLogger().setLevel(Level.INFO);
}
else if (opts.nextEq("--log-debug")) {
Logger.getRootLogger().setLevel(Level.DEBUG);
}
else if (opts.nextEq("--no-debug")) {
Logger.getRootLogger().setLevel(Level.OFF);
}
else if (opts.nextEq("--monitor-memory")) {
g.getConfig().isMonitorMemory = true;
}
else if (opts.nextEq("--list-classes")) {
Set<OWLClass> clss = g.getSourceOntology().getClassesInSignature();
for (OWLClass c : clss) {
System.out.println(c);
}
}
else if (opts.nextEq("--query-ontology")) {
opts.info("[-m]", "specify an ontology that has classes to be used as queries. See also: --reasoner-query");
boolean isMerge = false;
while (opts.hasOpts()) {
if (opts.nextEq("-m"))
isMerge = true;
else
opts.nextOpt();
}
queryOntology = pw.parse(opts.nextOpt());
queryExpressionMap = new HashMap<OWLClass,OWLClassExpression>();
for (OWLClass qc : queryOntology.getClassesInSignature()) {
for (OWLClassExpression ec : qc.getEquivalentClasses(queryOntology)) {
queryExpressionMap.put(qc, ec);
}
}
if (isMerge) {
g.mergeOntology(queryOntology);
}
}
else if (opts.nextEq("--merge")) {
opts.info("ONT", "merges ONT into current source ontology");
g.mergeOntology(pw.parse(opts.nextOpt()));
}
else if (opts.nextEq("--map-iri")) {
//OWLOntologyIRIMapper iriMapper = new SimpleIRIMapper();
}
else if (opts.nextEq("--auto-iri")) {
File file = new File(opts.nextOpt());
OWLOntologyIRIMapper iriMapper = new AutoIRIMapper(file, false);
pw.getManager().addIRIMapper(iriMapper);
}
else if (opts.nextEq("--remove-imports-declarations")) {
OWLOntology ont = g.getManager().createOntology(g.getSourceOntology().getOntologyID().getOntologyIRI());
for (OWLAxiom a : g.getSourceOntology().getAxioms()) {
g.getManager().addAxiom(ont, a);
}
g.setSourceOntology(ont);
}
else if (opts.nextEq("--create-ontology")) {
String iri = opts.nextOpt();
if (!iri.startsWith("http:")) {
iri = "http://purl.obolibrary.org/obo/"+iri;
}
g = new OWLGraphWrapper(iri);
}
else if (opts.nextEq("--merge-support-ontologies")) {
for (OWLOntology ont : g.getSupportOntologySet())
g.mergeOntology(ont);
g.setSupportOntologySet(new HashSet<OWLOntology>());
}
else if (opts.nextEq("--add-support-from-imports")) {
g.addSupportOntologiesFromImportsClosure();
}
else if (opts.nextEq("-m") || opts.nextEq("--mcat")) {
catOntologies(opts);
}
else if (opts.nextEq("--info")) {
opts.info("","show ontology statistics");
for (OWLOntology ont : g.getAllOntologies()) {
summarizeOntology(ont);
}
}
else if (opts.nextEq("--save-closure")) {
opts.info("[-c] FILENAME", "write out closure of graph.");
GraphRenderer gcw;
if (opts.nextEq("-c")) {
opts.info("", "compact storage option.");
gcw = new CompactGraphClosureRenderer(opts.nextOpt());
}
else {
gcw = new GraphClosureRenderer(opts.nextOpt());
}
gcw.render(g);
}
else if (opts.nextEq("--read-closure")) {
opts.info("FILENAME", "reads closure previously saved using --save-closure (compact format only)");
GraphReader gr = new CompactGraphClosureReader(g);
gr.read(opts.nextOpt());
LOG.info("RESTORED CLOSURE CACHE");
LOG.info("size="+g.inferredEdgeBySource.size());
}
else if (opts.nextEq("--save-closure-for-chado")) {
opts.info("OUTPUTFILENAME",
"saves the graph closure in a format that is oriented towards loading into a Chado database");
boolean isChain = opts.nextEq("--chain");
ChadoGraphClosureRenderer gcw = new ChadoGraphClosureRenderer(opts.nextOpt());
gcw.isChain = isChain;
gcw.render(g);
}
else if (opts.nextEq("--make-taxon-set")) {
String idspace = null;
if (opts.nextEq("-s"))
idspace = opts.nextOpt();
owlpp = new OWLPrettyPrinter(g);
OWLClass tax = (OWLClass)this.resolveEntity(opts);
Set<OWLObject> taxAncs = g.getAncestorsReflexive(tax);
Set<OWLClass> taxSet = new HashSet<OWLClass>();
for (OWLClass c : g.getSourceOntology().getClassesInSignature()) {
String cid = g.getIdentifier(c);
if (idspace != null && !cid.startsWith(idspace+":"))
continue;
boolean isExcluded = false;
for (OWLGraphEdge e : g.getOutgoingEdgesClosure(c)) {
if (isExcluded)
break;
for (OWLGraphEdge te : g.getOutgoingEdges(e.getTarget())) {
OWLObjectProperty tp = te.getSingleQuantifiedProperty().getProperty();
if (tp != null) {
String tpl = g.getLabel(tp);
if (tpl == null)
continue;
OWLObject rt = te.getTarget();
// temp hack until RO is stable
if (tpl.equals("only_in_taxon") || tpl.equals("only in taxon")) {
if (!taxAncs.contains(rt) &&
!g.getAncestors(rt).contains(tax)) {
isExcluded = true;
break;
}
}
else if (tpl.equals("never_in_taxon") || tpl.equals("never in taxon")) {
if (taxAncs.contains(rt)) {
isExcluded = true;
break;
}
}
}
}
}
if (isExcluded) {
LOG.info("excluding: "+owlpp.render(c));
}
else {
taxSet.add(c);
System.out.println(cid);
}
}
}
else if (opts.nextEq("--query-cw")) {
opts.info("", "closed-world query");
owlpp = new OWLPrettyPrinter(g);
for (OWLClass qc : queryExpressionMap.keySet()) {
System.out.println(" CWQueryClass: "+qc);
System.out.println(" CWQueryClass: "+owlpp.render(qc)+" "+qc.getIRI().toString());
OWLClassExpression ec = queryExpressionMap.get(qc);
System.out.println(" CWQueryExpression: "+owlpp.render(ec));
Set<OWLObject> results = g.queryDescendants(ec);
for (OWLObject result : results) {
if (result instanceof OWLClass) {
System.out.println(" "+owlpp.render((OWLClass)result));
}
}
}
}
else if (opts.nextEq("--sparql-dl")) {
opts.info("\"QUERY-TEXT\"", "executes a SPARQL-DL query using the reasoner");
/* Examples:
* SELECT * WHERE { SubClassOf(?x,?y)}
*/
if (reasoner == null) {
reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager());
}
String q = opts.nextOpt();
System.out.println("Q="+q);
try {
QueryEngine engine;
Query query = Query.create(q);
engine = QueryEngine.create(g.getManager(), reasoner, true);
QueryResult result = engine.execute(query);
if(query.isAsk()) {
System.out.print("Result: ");
if(result.ask()) {
System.out.println("yes");
}
else {
System.out.println("no");
}
}
else {
if(!result.ask()) {
System.out.println("Query has no solution.\n");
}
else {
System.out.println("Results:");
System.out.print(result);
System.out.println("
System.out.println("Size of result set: " + result.size());
}
}
} catch (QueryParserException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (QueryEngineException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
else if (opts.nextEq("--i2c")) {
opts.info("[-s]", "Converts individuals to classes");
boolean isReplaceOntology = false;
while (opts.hasOpts()) {
if (opts.nextEq("-s")) {
isReplaceOntology = true;
}
else {
break;
}
}
Set<OWLAxiom> axs = new HashSet<OWLAxiom>();
OWLOntology ont = g.getSourceOntology();
for (OWLNamedIndividual i : ont.getIndividualsInSignature()) {
OWLClass c = g.getDataFactory().getOWLClass(i.getIRI());
for (OWLClassExpression ce : i.getTypes(ont)) {
axs.add(g.getDataFactory().getOWLSubClassOfAxiom(c, ce));
}
//g.getDataFactory().getOWLDe
for (OWLClassAssertionAxiom ax : ont.getClassAssertionAxioms(i)) {
g.getManager().removeAxiom(ont, ax);
}
for (OWLDeclarationAxiom ax : ont.getDeclarationAxioms(i)) {
g.getManager().removeAxiom(ont, ax);
}
//g.getDataFactory().getOWLDeclarationAxiom(owlEntity)
}
if (isReplaceOntology) {
for (OWLAxiom ax : g.getSourceOntology().getAxioms()) {
g.getManager().removeAxiom(ont, ax);
}
}
for (OWLAxiom axiom : axs) {
g.getManager().addAxiom(ont, axiom);
}
}
else if (opts.nextEq("--init-reasoner")) {
opts.info("[-r reasonername]", "Creates a reasoner object");
while (opts.hasOpts()) {
if (opts.nextEq("-r")) {
reasonerName = opts.nextOpt();
}
else {
break;
}
}
reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager());
}
else if (opts.nextEq("--reasoner-query")) {
opts.info("[-r reasonername] [-m] CLASS-EXPRESSION",
"Queries current ontology for descendants of CE using reasoner");
boolean isManifest = false;
while (opts.hasOpts()) {
if (opts.nextEq("-r")) {
reasonerName = opts.nextOpt();
if (reasonerName.toLowerCase().equals("elk"))
isManifest = true;
}
else if (opts.nextEq("-m")) {
opts.info("",
"manifests the class exression as a class equivalent to query CE and uses this as a query; required for Elk");
isManifest = true;
}
else {
break;
}
}
String expression = opts.nextOpt();
owlpp = new OWLPrettyPrinter(g);
OWLEntityChecker entityChecker;
entityChecker = new ShortFormEntityChecker(
new BidirectionalShortFormProviderAdapter(
g.getManager(),
Collections.singleton(g.getSourceOntology()),
new SimpleShortFormProvider()));
ManchesterOWLSyntaxEditorParser parser =
new ManchesterOWLSyntaxEditorParser(g.getDataFactory(), expression);
parser.setOWLEntityChecker(entityChecker);
try {
OWLClassExpression ce = parser.parseClassExpression();
System.out.println("# QUERY: "+owlpp.render(ce));
if (isManifest) {
OWLClass qc = g.getDataFactory().getOWLClass(IRI.create("http://owltools.org/Q"));
OWLEquivalentClassesAxiom ax = g.getDataFactory().getOWLEquivalentClassesAxiom(ce, qc);
g.getManager().addAxiom(g.getSourceOntology(), ax);
ce = qc;
}
if (reasoner == null) {
reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager());
}
for (OWLClass r : reasoner.getSubClasses(ce, false).getFlattened()) {
System.out.println(owlpp.render(r));
}
} catch (ParserException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
else if (opts.nextEq("--reasoner-ask-all")) {
opts.info("[-r REASONERNAME] [-s] [-a] AXIOMTYPE", "list all inferred equivalent named class pairs");
boolean isReplaceOntology = false;
boolean isAddToCurrentOntology = false;
while (opts.hasOpts()) {
if (opts.nextEq("-r")) {
reasonerName = opts.nextOpt();
}
else if (opts.nextEq("-s")) {
isReplaceOntology = true;
}
else if (opts.nextEq("-a")) {
isAddToCurrentOntology = true;
}
else {
break;
}
}
if (reasoner == null) {
reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager());
}
Set<OWLAxiom> iAxioms = new HashSet<OWLAxiom>();
String q = opts.nextOpt().toLowerCase();
owlpp = new OWLPrettyPrinter(g);
for (OWLClass c : g.getSourceOntology().getClassesInSignature()) {
if (q.startsWith("e")) {
for (OWLClass ec : reasoner.getEquivalentClasses(c)) {
System.out.println(owlpp.render(c)+"\t"+owlpp.render(ec));
}
}
else if (q.startsWith("s")) {
for (OWLClass ec : reasoner.getSuperClasses(c, true).getFlattened()) {
System.out.println(owlpp.render(c)+"\t"+owlpp.render(ec));
}
}
}
if (q.startsWith("i")) {
for (OWLNamedIndividual i : g.getSourceOntology().getIndividualsInSignature()) {
for (OWLClass ce : reasoner.getTypes(i, true).getFlattened()) {
System.out.println(owlpp.render(i)+"\t"+owlpp.render(ce));
iAxioms.add(g.getDataFactory().getOWLClassAssertionAxiom(ce, i)); }
}
}
OWLOntology ont = g.getSourceOntology();
if (isReplaceOntology) {
Set<OWLAxiom> allAxioms = ont.getAxioms();
g.getManager().removeAxioms(ont, allAxioms);
g.getManager().addAxioms(ont, iAxioms);
}
if (isAddToCurrentOntology) {
g.getManager().addAxioms(ont, iAxioms);
}
}
else if (opts.nextEq("--run-reasoner")) {
opts.info("[-r reasonername] [--assert-implied]", "infer new relationships");
boolean isAssertImplied = false;
boolean isDirect = true;
while (opts.hasOpts()) {
if (opts.nextEq("-r")) {
reasonerName = opts.nextOpt();
}
else if (opts.nextEq("--assert-implied")) {
isAssertImplied = true;
}
else if (opts.nextEq("--indirect")) {
isDirect = false;
}
else {
break;
}
}
owlpp = new OWLPrettyPrinter(g);
boolean isQueryProcessed = false;
if (reasoner == null) {
reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager());
}
if (opts.hasOpts()) {
if (opts.nextEq("-i")) {
OWLClass qc = (OWLClass)resolveEntity(opts);
System.out.println("Getting individuals of class: "+qc);
for (Node<OWLNamedIndividual> ni : reasoner.getInstances(qc, false)) {
for (OWLNamedIndividual i : ni.getEntities()) {
System.out.println(i);
}
}
isQueryProcessed = true;
}
}
if (queryExpressionMap != null) {
// Assume --query-ontontology -m ONT has been processed
for (OWLClass qc : queryExpressionMap.keySet()) {
System.out.println(" CWQueryClass: "+owlpp.render(qc)+" "+qc.getIRI().toString());
OWLClassExpression ec = queryExpressionMap.get(qc);
System.out.println(" CWQueryExpression: "+owlpp.render(ec));
// note jcel etc will not take class expressions
NodeSet<OWLClass> results = reasoner.getSubClasses(qc, false);
for (OWLClass result : results.getFlattened()) {
if (reasoner.isSatisfiable(result)) {
System.out.println(" "+owlpp.render(result));
}
else {
// will not report unsatisfiable classes, as they trivially
//LOG.error("unsatisfiable: "+owlpp.render(result));
}
}
}
isQueryProcessed = true;
}
if (!isQueryProcessed) {
if (removedSubClassOfAxioms != null) {
System.out.println("attempting to recapitulate "+removedSubClassOfAxioms.size()+" axioms");
for (OWLSubClassOfAxiom a : removedSubClassOfAxioms) {
OWLClassExpression sup = a.getSuperClass();
if (sup instanceof OWLClass) {
boolean has = false;
for (Node<OWLClass> isup : reasoner.getSuperClasses(a.getSubClass(),true)) {
if (isup.getEntities().contains(sup)) {
has = true;
break;
}
}
System.out.print(has ? "POSITIVE: " : "NEGATIVE: ");
System.out.println(owlpp.render(a));
}
}
}
System.out.println("all inferences");
System.out.println("Consistent? "+reasoner.isConsistent());
for (OWLObject obj : g.getAllOWLObjects()) {
if (obj instanceof OWLClass) {
Set<OWLClassExpression> assertedSuperclasses =
((OWLClass) obj).getSuperClasses(g.getSourceOntology());
//System.out.println(obj+ " #subclasses:"+
// reasoner.getSubClasses((OWLClassExpression) obj, false).getFlattened().size());
for (OWLClass sup : reasoner.getSuperClasses((OWLClassExpression) obj, isDirect).getFlattened()) {
if (assertedSuperclasses.contains(sup)) {
continue;
}
System.out.println("INFERENCE: "+owlpp.render(obj)+" SubClassOf "+owlpp.render(sup));
if (isAssertImplied) {
OWLSubClassOfAxiom sca = g.getDataFactory().getOWLSubClassOfAxiom((OWLClass)obj, sup);
g.getManager().addAxiom(g.getSourceOntology(), sca);
}
}
for (OWLClass ec : reasoner.getEquivalentClasses(((OWLClassExpression) obj)).getEntities()) {
if (!ec.equals(obj))
System.out.println("INFERENCE: "+owlpp.render(obj)+" EquivalentTo "+owlpp.render(ec));
}
}
}
}
}
else if (opts.nextEq("--stash-subclasses")) {
opts.info("", "removes all subclasses in current source ontology; after reasoning, try to re-infer these");
boolean isDefinedOnly = true; // TODO - option
removedSubClassOfAxioms = new HashSet<OWLSubClassOfAxiom>();
System.out.println("Testing "+removedSubClassOfAxioms.size()+" SubClassOf axioms for stashing");
HashSet<RemoveAxiom> rmaxs = new HashSet<RemoveAxiom>();
for (OWLSubClassOfAxiom a : g.getSourceOntology().getAxioms(AxiomType.SUBCLASS_OF)) {
OWLClassExpression subc = a.getSubClass();
if (!(subc instanceof OWLClass)) {
continue;
}
OWLClassExpression supc = a.getSuperClass();
if (!(supc instanceof OWLClass)) {
continue;
}
if (isDefinedOnly) {
if (((OWLClass)subc).getEquivalentClasses(g.getSourceOntology()).size() == 0) {
continue;
}
if (((OWLClass)supc).getEquivalentClasses(g.getSourceOntology()).size() == 0) {
continue;
}
}
RemoveAxiom rmax = new RemoveAxiom(g.getSourceOntology(),a);
rmaxs.add(rmax);
removedSubClassOfAxioms.add(g.getDataFactory().getOWLSubClassOfAxiom(a.getSubClass(), a.getSuperClass()));
}
for (RemoveAxiom rmax : rmaxs) {
g.getManager().applyChange(rmax);
}
}
else if (opts.nextEq("--list-cycles")) {
for (OWLObject x : g.getAllOWLObjects()) {
for (OWLObject y : g.getAncestors(x)) {
if (g.getAncestors(y).contains(x)) {
System.out.println(x + " in-cycle-with "+y);
}
}
}
}
else if (opts.nextEq("-a|--ancestors")) {
opts.info("LABEL", "list edges in graph closure to root nodes");
//System.out.println("i= "+i);
OWLObject obj = resolveEntity(opts);
System.out.println(obj+ " "+obj.getClass());
Set<OWLGraphEdge> edges = g.getOutgoingEdgesClosureReflexive(obj);
showEdges(edges);
}
else if (opts.nextEq("--ancestors-with-ic")) {
opts.info("LABEL [-p COMPARISON_PROPERTY_URI]", "list edges in graph closure to root nodes, with the IC of the target node");
SimEngine se = new SimEngine(g);
if (opts.nextEq("-p")) {
se.comparisonProperty = g.getOWLObjectProperty(opts.nextOpt());
}
//System.out.println("i= "+i);
OWLObject obj = resolveEntity(opts);
System.out.println(obj+ " "+obj.getClass());
Set<OWLGraphEdge> edges = g.getOutgoingEdgesClosureReflexive(obj);
for (OWLGraphEdge e : edges) {
System.out.println(e);
System.out.println(" TARGET IC:"+se.getInformationContent(e.getTarget()));
}
}
else if (opts.nextEq("--get-ic")) {
opts.info("LABEL [-p COMPARISON_PROPERTY_URI]", "calculate information content for class");
SimEngine se = new SimEngine(g);
if (opts.nextEq("-p")) {
se.comparisonProperty = g.getOWLObjectProperty(opts.nextOpt());
}
//System.out.println("i= "+i);
OWLObject obj = resolveEntity( opts);
System.out.println(obj+ " "+" // IC:"+se.getInformationContent(obj));
}
else if (opts.nextEq("--ancestor-nodes")) {
opts.info("LABEL", "list nodes in graph closure to root nodes");
//System.out.println("i= "+i);
OWLObject obj = resolveEntity( opts);
System.out.println(obj+ " "+obj.getClass());
for (OWLObject a : g.getAncestors(obj))
System.out.println(a);
}
else if (opts.nextEq("--parents-named")) {
opts.info("LABEL", "list direct outgoing edges to named classes");
//System.out.println("i= "+i);
OWLObject obj = resolveEntity( opts);
System.out.println(obj+ " "+obj.getClass());
Set<OWLGraphEdge> edges = g.getOutgoingEdges(obj);
showEdges( edges);
}
else if (opts.nextEq("--parents")) {
opts.info("LABEL", "list direct outgoing edges");
//System.out.println("i= "+i);
OWLObject obj = resolveEntity( opts);
System.out.println(obj+ " "+obj.getClass());
Set<OWLGraphEdge> edges = g.getPrimitiveOutgoingEdges(obj);
showEdges( edges);
}
else if (opts.nextEq("--grandparents")) {
opts.info("LABEL", "list direct outgoing edges and their direct outgoing edges");
//System.out.println("i= "+i);
OWLObject obj = resolveEntity( opts);
System.out.println(obj+ " "+obj.getClass());
Set<OWLGraphEdge> edges = g.getPrimitiveOutgoingEdges(obj);
for (OWLGraphEdge e1 : edges) {
System.out.println(e1);
for (OWLGraphEdge e2 : g.getPrimitiveOutgoingEdges(e1.getTarget())) {
System.out.println(" "+e2);
}
}
}
else if (opts.nextEq("--subsumers")) {
opts.info("LABEL", "list named subsumers and subsuming expressions");
//System.out.println("i= "+i);
OWLObject obj = resolveEntity( opts);
Set<OWLObject> ancs = g.getSubsumersFromClosure(obj);
for (OWLObject a : ancs) {
System.out.println(a);
}
}
else if (opts.nextEq("--incoming-edges")) {
opts.info("LABEL", "list edges in graph to leaf nodes");
OWLObject obj = resolveEntity( opts);
System.out.println(obj+ " "+obj.getClass());
Set<OWLGraphEdge> edges = g.getIncomingEdges(obj);
showEdges( edges);
}
else if (opts.nextEq("--descendant-edges")) {
opts.info("LABEL", "list edges in graph closure to leaf nodes");
OWLObject obj = resolveEntity( opts);
System.out.println(obj+ " "+obj.getClass());
Set<OWLGraphEdge> edges = g.getIncomingEdgesClosure(obj);
showEdges( edges);
}
else if (opts.nextEq("--descendants")) {
opts.info("LABEL", "show all descendant nodes");
//System.out.println("i= "+i);
OWLObject obj = resolveEntity( opts);
System.out.println(obj+ " "+obj.getClass());
Set<OWLObject> ds = g.getDescendants(obj);
for (OWLObject d : ds)
System.out.println(d);
}
else if (opts.nextEq("--subsumed-by")) {
opts.info("LABEL", "show all descendant nodes");
//System.out.println("i= "+i);
OWLObject obj = resolveEntity( opts);
System.out.println(obj+ " "+obj.getClass());
Set<OWLObject> ds = g.queryDescendants((OWLClass)obj);
for (OWLObject d : ds)
System.out.println(d);
}
else if (opts.nextEq("--lcsx")) {
opts.info("LABEL", "anonymous class expression 1");
OWLObject a = resolveEntity( opts);
opts.info("LABEL", "anonymous class expression 2");
OWLObject b = resolveEntity( opts);
System.out.println(a+ " // "+a.getClass());
System.out.println(b+ " // "+b.getClass());
SimEngine se = new SimEngine(g);
OWLClassExpression lcs = se.getLeastCommonSubsumerSimpleClassExpression(a, b);
System.out.println("LCS:"+lcs);
}
else if (opts.nextEq("--lcsx-all")) {
opts.info("LABEL", "ont 1");
String ont1 = opts.nextOpt();
opts.info("LABEL", "ont 2");
String ont2 = opts.nextOpt();
if (simOnt == null) {
simOnt = g.getManager().createOntology();
}
SimEngine se = new SimEngine(g);
Set <OWLObject> objs1 = new HashSet<OWLObject>();
Set <OWLObject> objs2 = new HashSet<OWLObject>();
System.out.println(ont1+" -vs- "+ont2);
for (OWLObject x : g.getAllOWLObjects()) {
if (! (x instanceof OWLClass))
continue;
String id = g.getIdentifier(x);
if (id.startsWith(ont1)) {
objs1.add(x);
}
if (id.startsWith(ont2)) {
objs2.add(x);
}
}
Set lcsh = new HashSet<OWLClassExpression>();
owlpp = new OWLPrettyPrinter(g);
owlpp.hideIds();
for (OWLObject a : objs1) {
for (OWLObject b : objs2) {
OWLClassExpression lcs = se.getLeastCommonSubsumerSimpleClassExpression(a, b);
if (lcs instanceof OWLAnonymousClassExpression) {
if (lcsh.contains(lcs))
continue;
lcsh.add(lcs);
String label = owlpp.render(lcs);
IRI iri = IRI.create("http://purl.obolibrary.org/obo/U_"+
g.getIdentifier(a).replaceAll(":", "_")+"_"
+"_"+g.getIdentifier(b).replaceAll(":", "_"));
OWLClass namedClass = g.getDataFactory().getOWLClass(iri);
// TODO - use java obol to generate meaningful names
OWLEquivalentClassesAxiom ax = g.getDataFactory().getOWLEquivalentClassesAxiom(namedClass , lcs);
g.getManager().addAxiom(simOnt, ax);
g.getManager().addAxiom(simOnt,
g.getDataFactory().getOWLAnnotationAssertionAxiom(
g.getDataFactory().getOWLAnnotationProperty(OWLRDFVocabulary.RDFS_LABEL.getIRI()),
iri,
g.getDataFactory().getOWLLiteral(label)));
LOG.info("LCSX:"+owlpp.render(a)+" -vs- "+owlpp.render(b)+" = "+label);
//LOG.info(" Adding:"+owlpp.render(ax));
LOG.info(" Adding:"+ax);
}
}
}
}
else if (opts.nextEq("-l") || opts.nextEq("--list-axioms")) {
opts.info("LABEL", "lists all axioms for entity matching LABEL");
OWLObject obj = resolveEntity( opts);
owlpp = new OWLPrettyPrinter(g);
owlpp.print("## Showing axiom for: "+obj);
Set<OWLAxiom> axioms = g.getSourceOntology().getReferencingAxioms((OWLEntity) obj);
owlpp.print(axioms);
Set<OWLAnnotationAssertionAxiom> aaxioms = g.getSourceOntology().getAnnotationAssertionAxioms(((OWLNamedObject) obj).getIRI());
for (OWLAxiom a : aaxioms) {
System.out.println(owlpp.render(a));
}
}
else if (opts.nextEq("-d") || opts.nextEq("--draw")) {
opts.info("[-o FILENAME] [-f FMT] LABEL/ID", "generates a file tmp.png made using QuickGO code");
String imgf = "tmp.png";
String fmt = "png";
while (opts.hasOpts()) {
if (opts.nextEq("-o")) {
opts.info("FILENAME", "name of png file to save (defaults to tmp.png)");
imgf = opts.nextOpt();
}
else if (opts.nextEq("-f")) {
opts.info("FMT", "image format. See ImageIO docs for a list. Default: png");
fmt = opts.nextOpt();
if (imgf.equals("tmp.png")) {
imgf = "tmp."+fmt;
}
}
else if (opts.nextEq("-p")) {
OWLObjectProperty p = resolveObjectProperty(opts.nextOpt());
RelationConfig rc = gfxCfg.new RelationConfig();
rc.color = Color.MAGENTA;
gfxCfg.relationConfigMap.put(p, rc);
}
else {
break;
}
}
//System.out.println("i= "+i);
OWLObject obj = resolveEntity( opts);
System.out.println(obj);
OWLGraphLayoutRenderer r = new OWLGraphLayoutRenderer(g);
r.graphicsConfig = gfxCfg;
r.addObject(obj);
r.renderImage(fmt, new FileOutputStream(imgf));
//Set<OWLGraphEdge> edges = g.getOutgoingEdgesClosureReflexive(obj);
//showEdges( edges);
}
else if (opts.nextEq("--draw-all")) {
opts.info("", "draws ALL objects in the ontology (caution: small ontologies only)");
//System.out.println("i= "+i);
OWLGraphLayoutRenderer r = new OWLGraphLayoutRenderer(g);
r.addAllObjects();
r.renderImage("png", new FileOutputStream("tmp.png"));
}
else if (opts.nextEq("--dump-node-attributes")) {
opts.info("", "dumps all nodes attributes in CytoScape compliant format");
FileOutputStream fos;
PrintStream stream = null;
try {
fos = new FileOutputStream(opts.nextOpt());
stream = new PrintStream(new BufferedOutputStream(fos));
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
stream.println("Label");
for (OWLObject obj : g.getAllOWLObjects()) {
String label = g.getLabel(obj);
if (label != null)
stream.println(g.getIdentifier(obj)+"\t=\t"+label);
}
stream.close();
}
else if (opts.nextEq("--dump-sif")) {
opts.info("", "dumps CytoScape compliant sif format");
FileOutputStream fos;
PrintStream stream = null;
try {
fos = new FileOutputStream(opts.nextOpt());
stream = new PrintStream(new BufferedOutputStream(fos));
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
for (OWLObject x : g.getAllOWLObjects()) {
for (OWLGraphEdge e : g.getOutgoingEdges(x)) {
OWLQuantifiedProperty qp = e.getSingleQuantifiedProperty();
String label;
if (qp.getProperty() != null)
label = qp.getProperty().toString();
else
label = qp.getQuantifier().toString();
if (label != null)
stream.println(g.getIdentifier(x)+"\t"+label+"\t"+g.getIdentifier(e.getTarget()));
}
}
stream.close();
}
else if (opts.nextEq("--all-class-ic")) {
opts.info("", "show calculated Information Content for all classes");
SimEngine se = new SimEngine(g);
Similarity sa = se.getSimilarityAlgorithm(similarityAlgorithmName);
// no point in caching, as we only check descendants of each object once
g.getConfig().isCacheClosure = false;
for (OWLObject obj : g.getAllOWLObjects()) {
if (se.hasInformationContent(obj)) {
System.out.println(obj+"\t"+se.getInformationContent(obj));
}
}
}
else if (opts.nextEq("--sim-method")) {
opts.info("metric", "sets deafult similarity metric. Type --all to show all TODO");
similarityAlgorithmName = opts.nextOpt();
}
else if (opts.nextEq("--sim-all")) {
opts.info("", "calculates similarity between all pairs");
Double minScore = null;
SimEngine se = new SimEngine(g);
if (opts.hasOpts()) {
if (opts.nextEq("-m|--min")) {
minScore = Double.valueOf(opts.nextOpt());
}
else if (opts.nextEq("-s|--subclass-of")) {
se.comparisonSuperclass = resolveEntity(opts);
}
}
//Similarity metric = se.getSimilarityAlgorithm(similarityAlgorithmName);
//SimilarityAlgorithm metric = se.new JaccardSimilarity();
se.calculateSimilarityAllByAll(similarityAlgorithmName, minScore);
//System.out.println(metric.getClass().getName());
}
else if (opts.nextEq("--sim")) {
Reporter reporter = new Reporter(g);
opts.info("[-m metric] A B", "calculates similarity between A and B");
boolean nr = false;
Vector<OWLObjectPair> pairs = new Vector<OWLObjectPair>();
String subSimMethod = null;
boolean isAll = false;
SimEngine se = new SimEngine(g);
while (opts.hasOpts()) {
System.out.println("sub-opts for --sim");
if (opts.nextEq("-m")) {
similarityAlgorithmName = opts.nextOpt();
}
else if (opts.nextEq("-p")) {
se.comparisonProperty = g.getOWLObjectProperty(opts.nextOpt());
}
else if (opts.nextEq("--min-ic")) {
se.minimumIC = Double.valueOf(opts.nextOpt());
}
else if (opts.nextEq("--sub-method")) {
opts.info("MethodName","sets the method used to compare all attributes in a MultiSim test");
subSimMethod = opts.nextOpt();
}
else if (opts.nextEq("--query")) {
OWLObject q = resolveEntity(opts.nextOpt());
SimSearch search = new SimSearch(se, reporter);
isAll = true;
boolean isClasses = true;
boolean isInstances = true;
int MAX_PAIRS = 50; // todo - make configurable
while (opts.hasOpts()) {
if (opts.nextEq("-i"))
isClasses = false;
else if (opts.nextEq("-c"))
isInstances = false;
else if (opts.nextEq("--max-hits"))
MAX_PAIRS = Integer.parseInt(opts.nextOpt());
else
break;
}
search.setMaxHits(MAX_PAIRS);
OWLObject cc = resolveEntity(opts.nextOpt());
Set<OWLObject> candidates = g.queryDescendants((OWLClass)cc, isInstances, isClasses);
candidates.remove(cc);
search.setCandidates(candidates);
System.out.println(" numCandidates:"+candidates.size());
List<OWLObject> hits = search.search(q);
System.out.println(" hits:"+hits.size());
int n = 0;
for (OWLObject hit : hits) {
if (n < MAX_PAIRS)
pairs.add(new OWLObjectPair(q,hit));
n++;
System.out.println("HIT:"+n+"\t"+g.getLabelOrDisplayId(hit));
}
while (opts.nextEq("--include")) {
OWLObjectPair pair = new OWLObjectPair(q,resolveEntity(opts.nextOpt()));
if (!pairs.contains(pair)) {
pairs.add(pair);
System.out.println("adding_extra_pair:"+pair);
}
else {
System.out.println("extra_pair_alrwady_added:"+pair);
}
}
}
else if (opts.nextEq("-a|--all")) {
isAll = true;
boolean isClasses = true;
boolean isInstances = true;
if (opts.nextEq("-i"))
isClasses = false;
if (opts.nextEq("-c"))
isInstances = false;
OWLObject anc = resolveEntity(opts.nextOpt());
System.out.println("Set1:"+anc+" "+anc.getClass());
Set<OWLObject> objs = g.queryDescendants((OWLClass)anc, isInstances, isClasses);
objs.remove(anc);
System.out.println(" Size1:"+objs.size());
Set<OWLObject> objs2 = objs;
if (opts.nextEq("--vs")) {
OWLObject anc2 = resolveEntity(opts.nextOpt());
System.out.println("Set2:"+anc2+" "+anc2.getClass());
objs2 = g.queryDescendants((OWLClass)anc2, isInstances, isClasses);
objs2.remove(anc2);
System.out.println(" Size2:"+objs2.size());
}
for (OWLObject a : objs) {
if (!(a instanceof OWLNamedObject)) {
continue;
}
for (OWLObject b : objs2) {
if (!(b instanceof OWLNamedObject)) {
continue;
}
if (a.equals(b))
continue;
//if (a.compareTo(b) <= 0)
// continue;
OWLObjectPair pair = new OWLObjectPair(a,b);
System.out.println("Scheduling:"+pair);
pairs.add(pair);
}
}
}
else if (opts.nextEq("-s|--subclass-of")) {
se.comparisonSuperclass = resolveEntity(opts);
}
else if (opts.nextEq("--no-create-reflexive")) {
nr = true;
}
else {
// not recognized - end of this block of opts
break;
//System.err.println("???"+opts.nextOpt());
}
}
if (isAll) {
// TODO
//se.calculateSimilarityAllByAll(similarityAlgorithmName, 0.0);
}
else {
pairs.add(new OWLObjectPair(resolveEntity(opts.nextOpt()),
resolveEntity(opts.nextOpt())));
}
for (OWLObjectPair pair : pairs) {
OWLObject oa = pair.getA();
OWLObject ob = pair.getB();
Similarity metric = se.getSimilarityAlgorithm(similarityAlgorithmName);
if (nr) {
((DescriptionTreeSimilarity)metric).forceReflexivePropertyCreation = false;
}
if (subSimMethod != null)
((MultiSimilarity)metric).setSubSimMethod(subSimMethod);
System.out.println("comparing: "+oa+" vs "+ob);
Similarity r = se.calculateSimilarity(metric, oa, ob);
//System.out.println(metric+" = "+r);
metric.print();
metric.report(reporter);
if (simOnt == null) {
simOnt = g.getManager().createOntology();
}
if (opts.hasOpt("--save-sim")) {
metric.addResultsToOWLOntology(simOnt);
}
}
}
else if (opts.nextEq("-o|--output")) {
opts.info("FILE", "writes source ontology -- MUST BE specified as IRI, e.g. file://`pwd`/foo.owl");
OWLOntologyFormat ofmt = new RDFXMLOntologyFormat();
if ( g.getSourceOntology().getOntologyID() != null && g.getSourceOntology().getOntologyID().getOntologyIRI() != null) {
String ontURIStr = g.getSourceOntology().getOntologyID().getOntologyIRI().toString();
System.out.println("saving:"+ontURIStr);
}
if (opts.nextEq("-f")) {
String ofmtname = opts.nextOpt();
if (ofmtname.equals("functional")) {
ofmt = new OWLFunctionalSyntaxOntologyFormat();
}
}
pw.saveOWL(g.getSourceOntology(), ofmt, opts.nextOpt());
//pw.saveOWL(g.getSourceOntology(), opts.nextOpt());
}
else if (opts.nextEq("--save-sim")) {
opts.info("FILE", "saves similarity results as an OWL ontology. Use after --sim or --sim-all");
pw.saveOWL(simOnt, opts.nextOpt());
}
else if (opts.nextEq("--merge-sim")) {
opts.info("FILE", "merges similarity results into source OWL ontology. Use after --sim or --sim-all");
g.mergeOntology(simOnt);
}
else if (opts.nextEq("--list-axioms")) {
for (OWLAxiom a : g.getSourceOntology().getAxioms()) {
System.out.println("AX:"+a);
}
}
else if (opts.nextEq("--show-metadata")) {
OntologyMetadata omd = new OntologyMetadata();
omd.generate(g);
}
else if (opts.nextEq("--follow-subclass")) {
opts.info("", "follow subclass axioms (and also equivalence axioms) in graph traversal.\n"+
" default is to follow ALL. if this is specified then only explicitly specified edges followed");
if (g.getConfig().graphEdgeIncludeSet == null)
g.getConfig().graphEdgeIncludeSet = new HashSet<OWLQuantifiedProperty>();
g.getConfig().graphEdgeIncludeSet.add(new OWLQuantifiedProperty(Quantifier.SUBCLASS_OF));
}
else if (opts.nextEq("--follow-property")) {
opts.info("PROP-LABEL", "follow object properties of this type in graph traversal.\n"+
" default is to follow ALL. if this is specified then only explicitly specified edges followed");
OWLObjectProperty p = (OWLObjectProperty) resolveEntity( opts);
if (g.getConfig().graphEdgeIncludeSet == null)
g.getConfig().graphEdgeIncludeSet = new HashSet<OWLQuantifiedProperty>();
g.getConfig().graphEdgeIncludeSet.add(new OWLQuantifiedProperty(p, null));
}
else if (opts.nextEq("--exclude-property")) {
opts.info("PROP-LABEL", "exclude object properties of this type in graph traversal.\n"+
" default is to exclude NONE.");
OWLObjectProperty p = g.getOWLObjectProperty(opts.nextOpt());
System.out.println("Excluding "+p+" "+p.getClass());
if (g.getConfig().graphEdgeExcludeSet == null)
g.getConfig().graphEdgeExcludeSet = new HashSet<OWLQuantifiedProperty>();
g.getConfig().graphEdgeExcludeSet.add(new OWLQuantifiedProperty(p, null));
}
else if (opts.nextEq("--exclude-metaclass")) {
opts.info("METACLASS-LABEL", "exclude classes of this type in graph traversal.\n"+
" default is to follow ALL classes");
OWLClass c = (OWLClass) resolveEntity( opts);
g.getConfig().excludeMetaClass = c;
}
else if (opts.nextEq("--parse-tsv")) {
opts.info("[-s] [-p PROPERTY] [-a AXIOMTYPE] [-t INDIVIDUALSTYPE] FILE", "parses a tabular file to OWL axioms");
TableToAxiomConverter ttac = new TableToAxiomConverter(g);
ttac.config.axiomType = AxiomType.CLASS_ASSERTION;
while (opts.hasOpts()) {
if (opts.nextEq("-s|--switch")) {
opts.info("", "switch subject and object");
ttac.config.isSwitchSubjectObject = true;
}
else if (opts.nextEq("-l|--label")) {
ttac.config.setPropertyToLabel();
ttac.config.axiomType = AxiomType.ANNOTATION_ASSERTION;
}
else if (opts.nextEq("--comment")) {
ttac.config.setPropertyToComment();
ttac.config.axiomType = AxiomType.ANNOTATION_ASSERTION;
}
else if (opts.nextEq("-m|--map-xrefs")) {
ttac.buildClassMap(g);
}
else if (opts.nextEq("-p|--prop")) {
ttac.config.property = ((OWLNamedObject) resolveObjectProperty( opts.nextOpt())).getIRI();
//ttac.config.property = g.getOWLObjectProperty().getIRI();
}
else if (opts.nextEq("--default1")) {
ttac.config.defaultCol1 = opts.nextOpt();
}
else if (opts.nextEq("--default2")) {
ttac.config.defaultCol2 = opts.nextOpt();
}
else if (opts.nextEq("--iri-prefix")) {
int col = 0;
String x = opts.nextOpt();
if (x.equals("1") || x.startsWith("s")) {
col = 1;
}
else if (x.equals("2") || x.startsWith("o")) {
col = 2;
}
else {
}
String pfx = opts.nextOpt();
if (!pfx.startsWith("http:"))
pfx = "http://purl.obolibrary.org/obo/" + pfx + "_";
ttac.config.iriPrefixMap.put(col, pfx);
}
else if (opts.nextEq("-a|--axiom-type")) {
ttac.config.setAxiomType(opts.nextOpt());
}
else if (opts.nextEq("-t|--individuals-type")) {
System.out.println("setting types");
ttac.config.individualsType = resolveClass( opts.nextOpt());
}
else {
throw new OptionException(opts.nextOpt());
}
}
String f = opts.nextOpt();
System.out.println("tabfile: "+f);
ttac.parse(f);
}
else if (opts.nextEq("--idmap-extract-pairs")) {
opts.info("IDType1 IDType2 PIRMapFile", "extracts pairs from mapping file");
IDMappingPIRParser p = new IDMappingPIRParser();
IDMapPairWriter h = new IDMapPairWriter();
h.setPair(opts.nextOpt(), opts.nextOpt());
p.handler = h;
p.parse(new File(opts.nextOpt()));
}
else if (opts.nextEq("--parser-idmap")) {
opts.info("UniProtIDMapFile", "...");
UniProtIDMapParser p = new UniProtIDMapParser();
p.parse(new File(opts.nextOpt()));
System.out.println("Types:"+p.idMap.size());
// TODO...
}
else if (opts.nextEq("--gaf")) {
GafObjectsBuilder builder = new GafObjectsBuilder();
gafdoc = builder.buildDocument(opts.nextOpt());
//gafdoc = builder.buildDocument(new File(opts.nextOpt()));
}
else if (opts.nextEq("--gaf-xp-predict")) {
owlpp = new OWLPrettyPrinter(g);
if (gafdoc == null) {
System.err.println("No gaf document (use '--gaf GAF-FILE') ");
System.exit(1);
}
AnnotationPredictor ap = new CompositionalClassPredictor(gafdoc, g);
Set<Prediction> predictions = ap.getAllPredictions();
System.out.println("Predictions:"+predictions.size());
for (Prediction p : predictions) {
System.out.println(p.render(owlpp));
}
}
else if (opts.nextEq("--gaf-term-counts")) {
// TODO - ensure has_part and other relations are excluded
owlpp = new OWLPrettyPrinter(g);
Map<OWLObject,Set<String>> aMap = new HashMap<OWLObject,Set<String>>();
for (GeneAnnotation a : gafdoc.getGeneAnnotations()) {
OWLObject c = g.getOWLObjectByIdentifier(a.getCls());
for (OWLObject x : g.getAncestorsReflexive(c)) {
if (!aMap.containsKey(x))
aMap.put(x, new HashSet<String>());
aMap.get(x).add(a.getBioentity());
}
}
for (OWLObject c : g.getAllOWLObjects()) {
if (c instanceof OWLClass) {
if (g.isObsolete(c))
continue;
System.out.println(g.getIdentifier(c)+"\t"+g.getLabel(c)+"\t"+
(aMap.containsKey(c) ? aMap.get(c).size() : "0"));
}
}
}
else if (opts.nextEq("--gaf-query")) {
opts.info("LABEL", "list edges in graph closure to root nodes");
//System.out.println("i= "+i);
OWLObject obj = resolveEntity(opts);
Set<OWLObject> descs = g.getDescendantsReflexive(obj);
for (GeneAnnotation a : gafdoc.getGeneAnnotations()) {
OWLObject c = g.getOWLObjectByIdentifier(a.getCls());
if (descs.contains(c)) {
System.out.println(g.getIdentifier(c)+"\t"+a.getBioentityObject()+"\t"+a.getBioentityObject().getSymbol());
}
}
}
else if (opts.nextEq("--report-profile")) {
g.getProfiler().report();
}
else if (opts.nextEq("--no-cache")) {
g.getConfig().isCacheClosure = false;
}
else if (opts.nextEq("--create-ontology")) {
opts.info("ONT-IRI", "creates a new OWLOntology and makes it the source ontology");
g = new OWLGraphWrapper(opts.nextOpt());
}
else if (opts.nextEq("--parse-obo")) {
String f = opts.nextOpt();
OWLOntology ont = pw.parseOBO(f);
if (g == null)
g = new OWLGraphWrapper(ont);
else {
System.out.println("adding support ont "+ont);
g.addSupportOntology(ont);
}
}
else if (opts.hasArgs()) {
String f = opts.nextOpt();
try {
OWLOntology ont = pw.parse(f);
if (g == null)
g = new OWLGraphWrapper(ont);
else {
System.out.println("adding support ont "+ont);
g.addSupportOntology(ont);
}
}
catch (Exception e) {
System.err.println("could not parse:"+f+" Exception:"+e);
System.exit(1);
}
//paths.add(opt);
}
else {
if (opts.helpMode)
helpFooter();
// should only reach here in help mode
}
}
/*
OWLGraphWrapper g;
if (paths.size() == 0) {
throw new Error("must specify at least one file");
}
if (paths.size() > 1) {
if (merge) {
// note: currently we can only merge obo files
pw.parseOBOFiles(paths);
}
else {
throw new Error("cannot load multiple files unless --merge is set");
}
}
else {
g = pw.parseToOWLGraph(paths.get(0));
}
*/
}
private OWLReasoner createReasoner(OWLOntology ont, String reasonerName,
OWLOntologyManager manager) {
OWLReasonerFactory reasonerFactory = null;
OWLReasoner reasoner;
LOG.info("Creating reasoner:"+reasonerName);
if (reasonerName == null || reasonerName.equals("factpp"))
reasonerFactory = new FaCTPlusPlusReasonerFactory();
else if (reasonerName.equals("pellet"))
reasonerFactory = new PelletReasonerFactory();
else if (reasonerName.equals("hermit")) {
//return new org.semanticweb.HermiT.Reasoner.ReasonerFactory().createReasoner(ont);
reasonerFactory = new org.semanticweb.HermiT.Reasoner.ReasonerFactory();
}
else if (reasonerName.equals("elk")) {
//SimpleConfiguration rconf = new SimpleConfiguration(FreshEntityPolicy.ALLOW, Long.MAX_VALUE);
reasonerFactory = new ElkReasonerFactory();
//reasoner = reasonerFactory.createReasoner(ont, rconf);
reasoner = reasonerFactory.createNonBufferingReasoner(ont);
System.out.println(reasonerFactory+" "+reasoner+" // "+InferenceType.values());
reasoner.precomputeInferences(InferenceType.values());
return reasoner;
}
else if (reasonerName.equals("cb")) {
Class<?> rfc;
try {
rfc = Class.forName("org.semanticweb.cb.owlapi.CBReasonerFactory");
reasonerFactory =(OWLReasonerFactory) rfc.newInstance();
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InstantiationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
else if (reasonerName.equals("jcel")) {
System.out.println("making jcel reasoner with:"+ont);
reasoner = new JcelReasoner(ont);
reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY);
return reasoner;
}
else
System.out.println("no such reasoner: "+reasonerName);
reasoner = reasonerFactory.createReasoner(ont);
return reasoner;
}
private void catOntologies(Opts opts) throws OWLOntologyCreationException, IOException {
opts.info("[-r|--ref-ont ONT] [-i|--use-imports]", "Catenate ontologies taking only referenced subsets of supporting onts.\n"+
" See Mooncat docs");
Mooncat m = new Mooncat(g);
ParserWrapper pw = new ParserWrapper();
String newURI = null;
while (opts.hasOpts()) {
//String opt = opts.nextOpt();
if (opts.nextEq("-r") || opts.nextEq("--ref-ont")) {
LOG.error("DEPRECATED - list all ref ontologies on main command line");
String f = opts.nextOpt();
m.addReferencedOntology(pw.parseOWL(f));
}
else if (opts.nextEq("-s") || opts.nextEq("--src-ont")) {
m.setOntology(pw.parseOWL(opts.nextOpt()));
}
else if (opts.nextEq("-p") || opts.nextEq("--prefix")) {
m.addSourceOntologyPrefix(opts.nextOpt());
}
else if (opts.nextEq("-i") || opts.nextEq("--use-imports")) {
System.out.println("using everything in imports closure");
g.addSupportOntologiesFromImportsClosure();
}
else if (opts.nextEq("-n") || opts.nextEq("--new-uri")) {
System.out.println("new URI for merged ontology");
newURI = opts.nextOpt();
}
else {
break;
//opts.fail();
}
}
//if (m.getReferencedOntologies().size() == 0) {
// m.setReferencedOntologies(g.getSupportOntologySet());
//g.useImportClosureForQueries();
//for (OWLAxiom ax : m.getClosureAxiomsOfExternalReferencedEntities()) {
// System.out.println("M_AX:"+ax);
m.mergeOntologies();
m.removeDanglingAxioms();
if (newURI != null) {
SetOntologyID soi = new SetOntologyID(g.getSourceOntology(),
new OWLOntologyID(IRI.create(newURI)));
g.getManager().applyChange(soi);
/*
HashSet<OWLOntology> cpOnts = new HashSet<OWLOntology>();
LOG.info("srcOnt annots:"+g.getSourceOntology().getAnnotations().size());
cpOnts.add(g.getSourceOntology());
OWLOntology newOnt = g.getManager().createOntology(IRI.create(newURI), cpOnts);
LOG.info("newOnt annots:"+newOnt.getAnnotations().size());
//g.getDataFactory().getOWLOn
g.setSourceOntology(newOnt);
*/
}
}
private void showEdges(Set<OWLGraphEdge> edges) {
OWLPrettyPrinter owlpp = new OWLPrettyPrinter(g);
for (OWLGraphEdge e : edges) {
System.out.println(owlpp.render(e));
}
}
public void summarizeOntology(OWLOntology ont) {
System.out.println("Ontology:"+ont);
System.out.println(" Classes:"+ont.getClassesInSignature().size());
System.out.println(" Individuals:"+ont.getIndividualsInSignature().size());
System.out.println(" ObjectProperties:"+ont.getObjectPropertiesInSignature().size());
System.out.println(" AxiomCount:"+ont.getAxiomCount());
}
// todo - move to util
public OWLObject resolveEntity(Opts opts) {
OWLObject obj = null;
String id = opts.nextOpt(); // in future we will allow resolution by name etc
return resolveEntity(id);
}
public OWLObject resolveEntity(String id) {
OWLObject obj = null;
obj = g.getOWLObjectByLabel(id);
if (obj != null)
return obj;
obj = g.getOWLObject(id);
if (obj != null)
return obj;
obj = g.getOWLObjectByIdentifier(id);
return obj;
}
public OWLObjectProperty resolveObjectProperty(String id) {
IRI i = null;
i = g.getIRIByLabel(id);
if (i == null && id.startsWith("http:")) {
i = IRI.create(id);
}
if (i != null) {
return g.getDataFactory().getOWLObjectProperty(i);
}
return g.getOWLObjectPropertyByIdentifier(id);
}
public OWLClass resolveClass(String id) {
IRI i = null;
i = g.getIRIByLabel(id);
if (i == null && id.startsWith("http:")) {
i = IRI.create(id);
}
if (i != null) {
return g.getDataFactory().getOWLClass(i);
}
return g.getDataFactory().getOWLClass(IRI.create(id));
}
public void help() {
System.out.println("owltools [ONTOLOGY ...] [COMMAND ...]\n");
System.out.println("Commands/Options");
System.out.println(" (type 'owltools COMMAND -h' for more info)");
}
public void helpFooter() {
System.out.println("\nOntologies:");
System.out.println(" These are specified as IRIs. The IRI is typically 'file:PATH' or a URL");
System.out.println("\nLabel Resolution:");
System.out.println(" you can pass in either a class label (enclosed in single quotes), an OBO ID or a IRI");
System.out.println("\nExecution:");
System.out.println(" note that commands are processed *in order*. This allows you to run mini-pipelines" +
" or programs on the command line.");
System.out.println(" Each command has its own 'grammar'. Type owltools COMMAND -h to see options.");
System.out.println(" Any argument that is not a command is assumed to be an ontology, and an attempt is made to load it.");
System.out.println(" (again, this happens sequentially).");
System.out.println("\nExamples:");
System.out.println(" ");
}
}
|
package picoded.servlet;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.net.URL;
import java.lang.String;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset; //import java.nio.charset.StandardCharsets;
import java.util.*;
import java.lang.Number;
import java.lang.System;
import java.io.StringWriter;
import java.util.logging.*;
import java.lang.RuntimeException;
import java.lang.IllegalArgumentException;
import java.io.IOException;
import java.lang.Throwable;
import java.lang.Exception;
// Objects used
import java.util.HashMap;
import java.io.PrintWriter;
import java.net.URLDecoder;
import java.lang.Class;
import java.lang.RuntimeException;
import java.lang.Exception;
// Sub modules useds
import picoded.JStack.*;
import picoded.JStruct.*;
import picoded.JSql.*;
public class JStackPage extends CorePage {
// Static variables
// Internal variables, can be overwritten. Else it is auto "filled" when needed
public String _webInfPath = null;
public String _classesPath = null;
public String _libraryPath = null;
public String _configsPath = null;
public String _pagesTemplatePath = null;
public String _pagesOutputPath = null;
public String _jsmlTemplatePath = null;
// Path variables, according to standard WAR package convention
public String getWebInfPath() {
return (_webInfPath != null) ? _webInfPath : (_webInfPath = getContextPath() + "WEB-INF/");
}
public String getClassesPath() {
return (_classesPath != null) ? _classesPath : (_classesPath = getWebInfPath() + "classes/");
}
public String getLibraryPath() {
return (_libraryPath != null) ? _libraryPath : (_libraryPath = getWebInfPath() + "lib/");
}
public String getConfigsPath() {
return (_configsPath != null) ? _configsPath : (_configsPath = getWebInfPath() + "config/");
}
public String getPagesTemplatePath() {
return (_pagesTemplatePath != null) ? _pagesTemplatePath : (_pagesTemplatePath = getWebInfPath() + "pages/");
}
public String getPagesOutputPath() {
return (_pagesOutputPath != null) ? _pagesOutputPath : (_pagesOutputPath = getContextPath());
}
public String getJsmlTemplatePath() {
return (_jsmlTemplatePath != null) ? _jsmlTemplatePath : (_jsmlTemplatePath = getWebInfPath() + "jsml/");
}
// Config file handling
protected JConfig JConfigObj = null;
/// @TODO, the actual JConfig integration with the DB. Currently its purely via the file system
public JConfig JConfig() {
if (JConfigObj != null) {
return JConfigObj;
}
if ((new File(getConfigsPath())).exists()) {
JConfigObj = new JConfig(getConfigsPath());
} else {
JConfigObj = new JConfig();
}
return JConfigObj;
}
// JStack auto load handling
protected JStack JStackObj = null;
/// Generates the JSQL layer given the config namespace
/// @TODO : To Deprecate after full roll out of sys.JStack.stack config
protected JSql JSqlLayerFromConfig(String profileNameSpace) {
// Gets the configuration setup
JConfig jc = JConfig();
// Gets the config vars
String engine = jc.getString(profileNameSpace + ".engine", "");
String path = jc.getString(profileNameSpace + ".path", "");
String username = jc.getString(profileNameSpace + ".username", "");
String password = jc.getString(profileNameSpace + ".password", "");
String database = jc.getString(profileNameSpace + ".database", "");
// Default fallback on sqlite engine, if the profileNameSpace is default
// This is used to ensure existing test cases do not break
if (profileNameSpace.equalsIgnoreCase("sys.dataStack.JSqlOnly.sqlite")) {
if (engine.length() <= 0) {
engine = "sqlite";
}
if (path.length() <= 0) {
path = getWebInfPath() + "/sqlite.db";
}
}
// SQLite implmentation
if (engine.equalsIgnoreCase("sqlite")) {
if (path.length() <= 0) {
throw new RuntimeException("Unsupported " + profileNameSpace + ".path: " + path);
}
// Replaces WEB-INF path
path = path.replace("./WEB-INF/", getWebInfPath());
path = path.replace("${WEB-INF}", getWebInfPath());
// Generates the sqlite connection with the path
return JSql.sqlite(path);
} else if (engine.equalsIgnoreCase("mssql")) {
return JSql.mssql(path, database, username, password);
} else if (engine.equalsIgnoreCase("mysql")) {
return JSql.mysql(path, database, username, password);
} else if (engine.equalsIgnoreCase("oracle")) {
return JSql.oracle(path, username, password);
} else {
throw new RuntimeException("Unsupported " + profileNameSpace + ".engine: " + engine);
}
}
/// Loads the configurations from JStack, and setup the respective JStackLayers
/// @TODO: Support JStackLayers jsons config
protected JStackLayer[] loadConfiguredJStackLayers() {
// Gets the configuration setup
JConfig jc = JConfig();
JStackLayer[] ret = null;
// Gets the JStack configuration, and use it (if exists)
System.out.println( "
System.out.println( jc.getString("sys") );
System.out.println( jc.getString("sys.JStack.stack") );
ret = JStack.stackConfigLayersToJStackLayers( jc.getObjectList("sys.JStack.stack", null), getWebInfPath() );
if( ret != null ) {
return ret;
}
// Else falls back to legacy support
// Gets the profile name and type
String profileName = jc.getString("sys.dataStack.selected.profile.name", "JSqlOnly.sqlite");
String profileType = jc.getString("sys.dataStack.selected.profile.type", "JSql");
if (profileType.equalsIgnoreCase("JSql")) {
return new JStackLayer[] { JSqlLayerFromConfig("sys.dataStack." + profileName) };
} else {
throw new RuntimeException("Unsupported sys.dataStack.selected.profile.type: " + profileType);
}
}
// tableSetup calls for various jSql based modules
/// Returns the JStack object
/// @TODO Actual JStack config loading, now it just loads a blank sqlite file =(
public JStack JStack() {
if (JStackObj != null) {
return JStackObj;
}
//Default is sqlite
JStackObj = new JStack(loadConfiguredJStackLayers());
return JStackObj;
}
/// JStack.disposeStackLayers only if it was initialized
public void JStack_disposeStackLayers() throws JStackException {
if (JStackObj != null) {
JStackObj.disposeStackLayers();
JStackObj = null;
}
}
/// Does the disposal teardown of all layers (especially JSql in MySql mode)
@Override
public void doSharedTeardown() throws Exception {
JStack_disposeStackLayers();
super.doSharedTeardown();
}
}
|
package us.corenetwork.tradecraft;
import net.minecraft.server.v1_7_R1.*;
import org.bukkit.craftbukkit.v1_7_R1.util.CraftMagicNumbers;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import java.util.Set;
public class CustomVillager extends EntityVillager {
private String carreer;
private MerchantRecipeList trades;
public CustomVillager(World world) {
super(world);
init();
}
public CustomVillager(World world, int i) {
super(world, i);
init();
}
private void init()
{
loadVillagerData();
loadTradesFromDB();
if (trades.size() == 0)
{
addTier(0);
}
}
@Override
public EntityAgeable createChild(EntityAgeable entityAgeable) {
return b(entityAgeable);
}
/**
* Returns list of offers
*/
@Override
public MerchantRecipeList getOffers(EntityHuman entityHuman) {
if (trades == null || trades.size() == 0)
{
Console.severe("Villager " + uniqueID.toString() + " has no trades!");
CustomRecipe recipe = new CustomRecipe(new ItemStack((Block) Block.REGISTRY.a("diamond_block"), 64), new ItemStack((Block) Block.REGISTRY.a("dirt"), 1));
//recipe.lockManually();
MerchantRecipeList list = new MerchantRecipeList();
list.add(recipe);
return list;
}
return trades;
}
/**
* Activated when player makes a trade
*/
@Override
public void a(MerchantRecipe vanillaRecipe)
{
EntityHuman human = b();
if (human != null && human instanceof EntityPlayer)
{
((EntityPlayer) human).updateInventory(human.activeContainer);
}
CustomRecipe recipe = (CustomRecipe) vanillaRecipe;
if (trades == null)
return;
int tradeID = trades.indexOf(recipe);
if (tradeID < 0)
{
Console.severe("Player completed unknown trade on villager " + uniqueID.toString() + "! ");
return;
}
incrementCounter(tradeID);
if (areAllTiersUnlocked())
{
if (random.nextDouble() < Settings.getDouble(Setting.ALL_UNLOCKED_REFRESH_CHANCE))
{
refreshAll();
}
}
else
{
if (isLastTier(recipe) || random.nextInt(100) < 20) //Add new tier when on last trade or with 20% chance
{
addTier(getLastTier() + 1);
refreshAll();
}
}
}
public void loadVillagerData()
{
try
{
PreparedStatement statement = IO.getConnection().prepareStatement("SELECT * FROM villagers WHERE ID = ?");
statement.setString(1, uniqueID.toString());
ResultSet set = statement.executeQuery();
if (set.next())
{
carreer = set.getString("Career");
statement.close();
}
else
{
statement.close();
createVillagerData();
}
}
catch (SQLException e)
{
e.printStackTrace();
}
}
private void createVillagerData()
{
carreer = VillagerConfig.getRandomCareer(getProfession());
try
{
PreparedStatement statement = IO.getConnection().prepareStatement("INSERT INTO villagers (ID, Career) VALUES (?,?)");
statement.setString(1, uniqueID.toString());
statement.setString(2, carreer);
statement.executeUpdate();
IO.getConnection().commit();
statement.close();
}
catch (SQLException e)
{
e.printStackTrace();
}
}
private void loadTradesFromDB()
{
trades = new MerchantRecipeList();
try
{
PreparedStatement statement = IO.getConnection().prepareStatement("SELECT * FROM offers WHERE Villager = ?");
statement.setString(1, uniqueID.toString());
ResultSet set = statement.executeQuery();
while (set.next())
{
ItemStack itemA;
ItemStack itemB = null;
ItemStack itemC;
//First item
int id = set.getInt("FirstItemID");
int data = set.getInt("FirstItemDamage");
int amount = set.getInt("FirstItemAmount");
itemA = new ItemStack(CraftMagicNumbers.getItem(id), amount, data);
//Second item
id = set.getInt("SecondItemID");
if (id > 0)
{
data = set.getInt("SecondtItemDamage");
amount = set.getInt("SecondItemAmount");
itemB = new ItemStack(CraftMagicNumbers.getItem(id), amount, data);
}
//Result item
id = set.getInt("ThirdItemID");
data = set.getInt("ThirdItemDamage");
amount = set.getInt("ThirdItemAmount");
itemC = new ItemStack(CraftMagicNumbers.getItem(id), amount, data);
CustomRecipe recipe;
if (itemB == null)
recipe = new CustomRecipe(itemA, itemC);
else
recipe = new CustomRecipe(itemA, itemB, itemC);
recipe.setTier(set.getInt("Tier"));
recipe.setTradesLeft(set.getInt("TradesLeft"));
trades.add(recipe);
}
statement.close();
}
catch (SQLException e)
{
e.printStackTrace();
}
}
private void refreshAll()
{
try
{
PreparedStatement statement = IO.getConnection().prepareStatement("UPDATE offers SET TradesLeft = ? WHERE Villager = ? AND ID = ?");
for (int i = 0; i < trades.size(); i++)
{
int tradesLeft = getDefaultNumberOfTrades();
CustomRecipe recipe = (CustomRecipe) trades.get(i);
recipe.setTradesLeft(tradesLeft);
statement.setInt(1, tradesLeft);
statement.setString(2, uniqueID.toString());
statement.setInt(2, i);
statement.addBatch();
}
statement.executeBatch();
statement.close();
IO.getConnection().commit();
}
catch (SQLException e)
{
e.printStackTrace();
}
}
private void incrementCounter(int tradeID)
{
CustomRecipe recipe = (CustomRecipe) trades.get(tradeID);
recipe.setTradesLeft(recipe.getTradesLeft() - 1);
try
{
PreparedStatement statement = IO.getConnection().prepareStatement("UPDATE offers SET TradesLeft = ? WHERE Villager = ? AND ID = ?");
statement.setString(1, uniqueID.toString());
statement.setInt(2, tradeID);
statement.executeUpdate();
statement.close();
IO.getConnection().commit();
}
catch (SQLException e)
{
e.printStackTrace();
}
}
private void addTier(int tier)
{
List<CustomRecipe> recipes = VillagerConfig.getTrades(carreer, tier);
Console.info("Adding trades: " + recipes.size());
try
{
PreparedStatement statement = IO.getConnection().prepareStatement("INSERT INTO offers (Villager, ID, FirstItemID, FirstItemDamage, FirstItemNBT, FirstItemAmount, SecondItemID, SecondItemDamage, SecondItemNBT, SecondItemAmount, ThirdItemID, ThirdItemDamage, ThirdItemNBT, ThirdItemAmount, Tier, TradesLeft) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)");
for (int i = 0; i < recipes.size(); i++)
{
int id = i + trades.size();
CustomRecipe recipe = recipes.get(i);
statement.setString(1, uniqueID.toString());
statement.setInt(2, id);
statement.setInt(3, CraftMagicNumbers.getId(recipe.getBuyItem1().getItem()));
statement.setInt(4, recipe.getBuyItem1().getData());
statement.setString(5, "");
statement.setInt(6, recipe.getBuyItem1().count);
if (recipe.hasSecondItem())
{
statement.setInt(7, CraftMagicNumbers.getId(recipe.getBuyItem2().getItem()));
statement.setInt(8, recipe.getBuyItem2().getData());
statement.setString(9, "");
statement.setInt(10, recipe.getBuyItem2().count);
}
else
{
statement.setInt(7, 0);
statement.setInt(8, 0);
statement.setString(9, "");
statement.setInt(10, 0);
}
statement.setInt(11, CraftMagicNumbers.getId(recipe.getBuyItem3().getItem()));
statement.setInt(12, recipe.getBuyItem3().getData());
statement.setString(13, "");
statement.setInt(14, recipe.getBuyItem3().count);
statement.setInt(15, tier);
int amountOfTrades = getDefaultNumberOfTrades();
statement.setInt(16, amountOfTrades);
statement.addBatch();
recipe.setTier(tier);
recipe.setTradesLeft(amountOfTrades);
trades.add(recipe);
}
statement.executeBatch();
statement.close();
IO.getConnection().commit();
}
catch (SQLException e)
{
e.printStackTrace();
}
}
private int getLastTier()
{
if (trades.size() == 0)
return 0;
else
return ((CustomRecipe) trades.get(trades.size() - 1)).getTier();
}
private boolean isLastTier(CustomRecipe recipe)
{
return getLastTier() == recipe.getTier();
}
private boolean areAllTiersUnlocked()
{
return !VillagerConfig.hasTrades(carreer, getLastTier() + 1);
}
private static int getDefaultNumberOfTrades()
{
return 2 + TradeCraftPlugin.random.nextInt(6) + TradeCraftPlugin.random.nextInt(6);
}
}
|
/*
* AccessFlags.java
*/
package jltools.types;
/**
* AccessFlags
*
* Overview:
* A mutable set of access flags.
**/
public class AccessFlags implements Cloneable {
/**
* Effects: returns a new accessflags object with no accessflags set.
**/
public AccessFlags() {
// bits defaults to 0.
}
/**
* Returns a copy of this.
**/
public AccessFlags copy() {
AccessFlags other = new AccessFlags();
other.bits = bits;
return other;
}
/**
* Given the JVM encoding of a set of flags, returns the AccessFlags object
* for that encoding.
**/
public static AccessFlags flagsForInt(int fl) {
AccessFlags flags = new AccessFlags();
if ((fl & 0x1) != 0) { flags.setPublic(true); }
if ((fl & 0x2) != 0) { flags.setPrivate(true); }
if ((fl & 0x4) != 0) { flags.setProtected(true); }
if ((fl & 0x8) != 0) { flags.setStatic(true); }
if ((fl & 0x10) != 0) { flags.setFinal(true); }
if ((fl & 0x20) != 0) { flags.setSynchronized(true); }
if ((fl & 0x40) != 0) { flags.setVolatile(true); }
if ((fl & 0x80) != 0) { flags.setTransient(true); }
if ((fl & 0x100) != 0) { flags.setNative(true); }
if ((fl & 0x200) != 0) { flags.setInterface(true); }
if ((fl & 0x400) != 0) { flags.setAbstract(true); }
return flags;
}
public void setPublic(boolean val) {
if (val)
bits |= PUBLIC_BIT;
else
bits &= ~PUBLIC_BIT;
}
public boolean isPublic() {
return (bits & PUBLIC_BIT) != 0;
}
public void setPrivate(boolean val) {
if (val)
bits |= PRIVATE_BIT;
else
bits &= ~PRIVATE_BIT;
}
public boolean isPrivate() {
return (bits & PRIVATE_BIT) != 0;
}
public void setProtected(boolean val) {
if (val)
bits |= PROTECTED_BIT;
else
bits &= ~PROTECTED_BIT;
}
public boolean isProtected() {
return (bits & PROTECTED_BIT) != 0;
}
public void setStatic(boolean val) {
if (val)
bits |= STATIC_BIT;
else
bits &= ~STATIC_BIT;
}
public boolean isStatic() {
return (bits & STATIC_BIT) != 0;
}
public void setFinal(boolean val) {
if (val)
bits |= FINAL_BIT;
else
bits &= ~FINAL_BIT;
}
public boolean isFinal() {
return (bits & FINAL_BIT) != 0;
}
public void setSynchronized(boolean val) {
if (val)
bits |= SYNCHRONIZED_BIT;
else
bits &= ~SYNCHRONIZED_BIT;
}
public boolean isSynchronized() {
return (bits & SYNCHRONIZED_BIT) != 0;
}
public void setTransient(boolean val) {
if (val)
bits |= TRANSIENT_BIT;
else
bits &= ~TRANSIENT_BIT;
}
public boolean isTransient() {
return (bits & TRANSIENT_BIT) != 0;
}
public void setNative(boolean val) {
if (val)
bits |= NATIVE_BIT;
else
bits &= ~NATIVE_BIT;
}
public boolean isNative() {
return (bits & NATIVE_BIT) != 0;
}
public void setInterface(boolean val) {
if (val)
bits |= INTERFACE_BIT;
else
bits &= ~INTERFACE_BIT;
}
public boolean isInterface() {
return (bits & INTERFACE_BIT) != 0;
}
public void setAbstract(boolean val) {
if (val)
bits |= ABSTRACT_BIT;
else
bits &= ~ABSTRACT_BIT;
}
public boolean isAbstract() {
return (bits & ABSTRACT_BIT) != 0;
}
public void setVolatile(boolean val) {
if (val)
bits |= VOLATILE_BIT;
else
bits &= ~VOLATILE_BIT;
}
public boolean isVolatile() {
return (bits & VOLATILE_BIT) != 0;
}
public String getStringRepresentation()
{
String s = "";
s += bits & PUBLIC_BIT != 0 ? "public " : "";
s += bits & PRIVATE_BIT != 0 ? "private " : "";
s += bits & PROTECTED_BIT != 0 ? "protected " : "";
s += bits & STATIC_BIT != 0 ? "static " : "";
s += bits & FINAL_BIT != 0 ? "final " : "";
s += bits & SYNCHRONIZED_BIT != 0 ? "synchronized " : "";
s += bits & TRANSIENT_BIT != 0 ? "transient " : "";
s += bits & NATIVE_BIT != 0 ? "native " : "";
s += bits & INTERFACE_BIT != 0 ? "interface " : "";
s += bits & ABSTRACT_BIT != 0 ? "abstract " : "";
s += bits & VOLATILE_BIT != 0 ? "volatile " : "";
return s;
}
private static int PUBLIC_BIT = 1;
private static int PRIVATE_BIT = 2;
private static int PROTECTED_BIT = 4;
private static int STATIC_BIT = 8;
private static int FINAL_BIT = 16;
private static int SYNCHRONIZED_BIT = 32;
private static int TRANSIENT_BIT = 64;
private static int NATIVE_BIT = 128;
private static int INTERFACE_BIT = 256;
private static int ABSTRACT_BIT = 512;
private static int VOLATILE_BIT = 1024;
// Currently, the above bits fit into a short. We provide an int here
// for subclasses.
protected int bits;
}
|
package org.perl6.nqp.runtime;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.file.Files;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.FileTime;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import jline.ConsoleReader;
import org.perl6.nqp.jast2bc.JASTToJVMBytecode;
import org.perl6.nqp.sixmodel.BoolificationSpec;
import org.perl6.nqp.sixmodel.ContainerConfigurer;
import org.perl6.nqp.sixmodel.ContainerSpec;
import org.perl6.nqp.sixmodel.InvocationSpec;
import org.perl6.nqp.sixmodel.REPRRegistry;
import org.perl6.nqp.sixmodel.STable;
import org.perl6.nqp.sixmodel.SerializationContext;
import org.perl6.nqp.sixmodel.SerializationReader;
import org.perl6.nqp.sixmodel.SerializationWriter;
import org.perl6.nqp.sixmodel.SixModelObject;
import org.perl6.nqp.sixmodel.StorageSpec;
import org.perl6.nqp.sixmodel.TypeObject;
import org.perl6.nqp.sixmodel.reprs.CallCaptureInstance;
import org.perl6.nqp.sixmodel.reprs.ContextRef;
import org.perl6.nqp.sixmodel.reprs.ContextRefInstance;
import org.perl6.nqp.sixmodel.reprs.IOHandleInstance;
import org.perl6.nqp.sixmodel.reprs.MultiCacheInstance;
import org.perl6.nqp.sixmodel.reprs.NFA;
import org.perl6.nqp.sixmodel.reprs.NFAInstance;
import org.perl6.nqp.sixmodel.reprs.NFAStateInfo;
import org.perl6.nqp.sixmodel.reprs.P6bigintInstance;
import org.perl6.nqp.sixmodel.reprs.SCRefInstance;
import org.perl6.nqp.sixmodel.reprs.VMArray;
import org.perl6.nqp.sixmodel.reprs.VMArrayInstance;
import org.perl6.nqp.sixmodel.reprs.VMExceptionInstance;
import org.perl6.nqp.sixmodel.reprs.VMHash;
import org.perl6.nqp.sixmodel.reprs.VMHashInstance;
import org.perl6.nqp.sixmodel.reprs.VMIterInstance;
/**
* Contains complex operations that are more involved that the simple ops that the
* JVM makes available.
*/
public final class Ops {
/* I/O opcodes */
public static String print(String v) {
System.out.print(v);
return v;
}
public static String say(String v) {
System.out.println(v);
return v;
}
public static final int STAT_EXISTS = 0;
public static final int STAT_FILESIZE = 1;
public static final int STAT_ISDIR = 2;
public static final int STAT_ISREG = 3;
public static final int STAT_ISDEV = 4;
public static final int STAT_CREATETIME = 5;
public static final int STAT_ACCESSTIME = 6;
public static final int STAT_MODIFYTIME = 7;
public static final int STAT_CHANGETIME = 8;
public static final int STAT_BACKUPTIME = 9;
public static final int STAT_UID = 10;
public static final int STAT_GID = 11;
public static final int STAT_ISLNK = 12;
public static final int STAT_PLATFORM_DEV = -1;
public static final int STAT_PLATFORM_INODE = -2;
public static final int STAT_PLATFORM_MODE = -3;
public static final int STAT_PLATFORM_NLINKS = -4;
public static final int STAT_PLATFORM_DEVTYPE = -5;
public static final int STAT_PLATFORM_BLOCKSIZE = -6;
public static final int STAT_PLATFORM_BLOCKS = -7;
public static long stat(String filename, long status) {
long rval = -1;
switch ((int) status) {
case STAT_EXISTS:
rval = new File(filename).exists() ? 1 : 0;
break;
case STAT_FILESIZE:
rval = new File(filename).length();
break;
case STAT_ISDIR:
try {
rval = (Boolean) Files.getAttribute(Paths.get(filename), "basic:isDirectory") ? 1 : 0;
} catch (Exception e) {
rval = -1;
}
break;
case STAT_ISREG:
try {
rval = (Boolean) Files.getAttribute(Paths.get(filename), "basic:isRegularFile") ? 1 : 0;
} catch (Exception e) {
rval = -1;
}
break;
case STAT_ISDEV:
try {
rval = (Boolean) Files.getAttribute(Paths.get(filename), "basic:isOther") ? 1 : 0;
} catch (Exception e) {
rval = -1;
}
break;
case STAT_CREATETIME:
try {
rval = ((Number) Files.getAttribute(Paths.get(filename), "basic:creationTime")).longValue();
} catch (Exception e) {
rval = -1;
}
break;
case STAT_ACCESSTIME:
try {
rval = ((FileTime) Files.getAttribute(Paths.get(filename), "basic:lastAccessTime")).to(TimeUnit.SECONDS);
} catch (Exception e) {
rval = -1;
}
break;
case STAT_MODIFYTIME:
try {
rval = ((FileTime) Files.getAttribute(Paths.get(filename), "basic:lastModifiedTime")).to(TimeUnit.SECONDS);
} catch (Exception e) {
rval = -1;
}
break;
case STAT_CHANGETIME:
try {
rval = ((FileTime) Files.getAttribute(Paths.get(filename), "unix:ctime")).to(TimeUnit.SECONDS);
} catch (Exception e) {
rval = -1;
}
break;
case STAT_BACKUPTIME:
rval = -1;
break;
case STAT_UID:
try {
rval = ((Number) Files.getAttribute(Paths.get(filename), "unix:uid")).longValue();
} catch (Exception e) {
rval = -1;
}
break;
case STAT_GID:
try {
rval = ((Number) Files.getAttribute(Paths.get(filename), "unix:gid")).longValue();
} catch (Exception e) {
rval = -1;
}
break;
case STAT_ISLNK:
try {
rval = (Boolean) Files.getAttribute(Paths.get(filename), "basic:isSymbolicLink", LinkOption.NOFOLLOW_LINKS) ? 1 : 0;
} catch (Exception e) {
rval = -1;
}
break;
case STAT_PLATFORM_DEV:
try {
rval = ((Number) Files.getAttribute(Paths.get(filename), "unix:dev")).longValue();
} catch (Exception e) {
rval = -1;
}
break;
case STAT_PLATFORM_INODE:
try {
rval = ((Number) Files.getAttribute(Paths.get(filename), "unix:ino")).longValue();
} catch (Exception e) {
rval = -1;
}
break;
case STAT_PLATFORM_MODE:
try {
rval = ((Number) Files.getAttribute(Paths.get(filename), "unix:mode")).longValue();
} catch (Exception e) {
rval = -1;
}
break;
case STAT_PLATFORM_NLINKS:
try {
rval = ((Number) Files.getAttribute(Paths.get(filename), "unix:nlink")).longValue();
} catch (Exception e) {
rval = -1;
}
break;
case STAT_PLATFORM_DEVTYPE:
try {
rval = ((Number) Files.getAttribute(Paths.get(filename), "unix:rdev")).longValue();
} catch (Exception e) {
rval = -1;
}
break;
case STAT_PLATFORM_BLOCKSIZE:
throw new UnsupportedOperationException("STAT_PLATFORM_BLOCKSIZE not supported");
case STAT_PLATFORM_BLOCKS:
throw new UnsupportedOperationException("STAT_PLATFORM_BLOCKS not supported");
default:
break;
}
return rval;
}
public static SixModelObject open(String path, String mode, ThreadContext tc) {
SixModelObject IOType = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.ioType;
IOHandleInstance h = (IOHandleInstance)IOType.st.REPR.allocate(tc, IOType.st);
h.initialize(tc);
try {
if (mode.equals("r")) {
h.is = new FileInputStream(path);
}
else if (mode.equals("w")) {
h.os = new FileOutputStream(path);
}
else if (mode.equals("wa")) {
h.os = new FileOutputStream(path, true);
}
else {
die_s("Unhandled file open mode '" + mode + "'", tc);
}
}
catch (FileNotFoundException e) {
die_s(e.getMessage(), tc);
}
return h;
}
public static SixModelObject getstdin(ThreadContext tc) {
SixModelObject IOType = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.ioType;
IOHandleInstance h = (IOHandleInstance)IOType.st.REPR.allocate(tc, IOType.st);
h.initialize(tc);
h.is = System.in;
return h;
}
public static SixModelObject getstdout(ThreadContext tc) {
SixModelObject IOType = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.ioType;
IOHandleInstance h = (IOHandleInstance)IOType.st.REPR.allocate(tc, IOType.st);
h.initialize(tc);
h.os = System.out;
return h;
}
public static SixModelObject getstderr(ThreadContext tc) {
SixModelObject IOType = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.ioType;
IOHandleInstance h = (IOHandleInstance)IOType.st.REPR.allocate(tc, IOType.st);
h.initialize(tc);
h.os = System.err;
return h;
}
public static SixModelObject setencoding(SixModelObject obj, String encoding, ThreadContext tc) {
if (obj instanceof IOHandleInstance) {
IOHandleInstance h = (IOHandleInstance)obj;
if (h.isr != null || h.osw != null)
die_s("Too late to set file handle encoding", tc);
String charset = null;
if (encoding.equals("ascii"))
charset = "US-ASCII";
else if (encoding.equals("iso-8859-1"))
charset = "ISO-8859-1";
else if (encoding.equals("utf8"))
charset = "UTF-8";
else if (encoding.equals("utf16"))
charset = "UTF-16";
else if (encoding.equals("binary"))
charset = "ISO-8859-1"; /* Byte oriented... */
else
die_s("Unsupported encoding " + encoding, tc);
try {
if (h.is != null)
h.isr = new InputStreamReader(h.is, charset);
if (h.os != null)
h.osw = new OutputStreamWriter(h.os, charset);
}
catch (UnsupportedEncodingException e) {
die_s(e.getMessage(), tc);
}
}
else {
die_s("setencoding requires an object with the IOHandle REPR", tc);
}
return obj;
}
public static long tellfh(SixModelObject obj, ThreadContext tc) {
if (obj instanceof IOHandleInstance) {
/* TODO */
return 0;
}
else {
die_s("tellfh requires an object with the IOHandle REPR", tc);
return 0; /* Unreachable */
}
}
public static String printfh(SixModelObject obj, String data, ThreadContext tc) {
if (obj instanceof IOHandleInstance) {
IOHandleInstance h = (IOHandleInstance)obj;
if (h.os == null)
die_s("File handle is not opened for write", tc);
try {
if (h.osw == null)
h.osw = new OutputStreamWriter(h.os, "UTF-8");
h.osw.write(data);
h.osw.flush();
}
catch (IOException e) {
die_s(e.getMessage(), tc);
}
}
else {
die_s("printfh requires an object with the IOHandle REPR", tc);
}
return data;
}
public static String sayfh(SixModelObject obj, String data, ThreadContext tc) {
printfh(obj, data, tc);
printfh(obj, "\n", tc);
return data;
}
public static String readlinefh(SixModelObject obj, ThreadContext tc) {
if (obj instanceof IOHandleInstance) {
IOHandleInstance h = (IOHandleInstance)obj;
if (h.is == null)
die_s("File handle is not opened for read", tc);
try {
if (h.isr == null)
h.isr = new InputStreamReader(h.is, "UTF-8");
if (h.br == null)
h.br = new BufferedReader(h.isr);
String line = h.br.readLine();
if (line == null) {
h.eof = true;
}
return line;
}
catch (IOException e) {
die_s(e.getMessage(), tc);
return null; /* Unreachable */
}
}
else {
die_s("readlinefh requires an object with the IOHandle REPR", tc);
return null; /* Unreachable */
}
}
/* We don't have proper readline support yet. */
public static String readlineintfh(SixModelObject obj, String prompt, ThreadContext tc) {
if (obj instanceof IOHandleInstance) {
IOHandleInstance h = (IOHandleInstance)obj;
if (h.is == null)
die_s("File handle is not opened for read", tc);
try {
if (h.cr == null) {
h.cr = new ConsoleReader(h.is, new OutputStreamWriter(System.out));
}
String line = h.cr.readLine(prompt);
if (line == null) {
h.eof = true;
}
return line;
}
catch (IOException e) {
die_s(e.getMessage(), tc);
return null; /* Unreachable */
}
}
else {
die_s("readlineintfh requires an object with the IOHandle REPR", tc);
return null; /* Unreachable */
}
}
public static String readallfh(SixModelObject obj, ThreadContext tc) {
if (obj instanceof IOHandleInstance) {
IOHandleInstance h = (IOHandleInstance)obj;
if (h.is == null)
die_s("File handle is not opened for read", tc);
try {
if (h.isr == null)
h.isr = new InputStreamReader(h.is, "UTF-8");
if (h.br == null)
h.br = new BufferedReader(h.isr);
StringBuffer data = new StringBuffer();
char[] buf = new char[4096];
int read = 0;
while((read = h.br.read(buf)) != -1)
data.append(String.valueOf(buf, 0, read));
h.eof = true;
return data.toString();
}
catch (IOException e) {
die_s(e.getMessage(), tc);
return null; /* Unreachable */
}
}
else {
die_s("readallfh requires an object with the IOHandle REPR", tc);
return null; /* Unreachable */
}
}
public static long eoffh(SixModelObject obj, ThreadContext tc) {
if (obj instanceof IOHandleInstance) {
IOHandleInstance h = (IOHandleInstance)obj;
if (h.is == null)
die_s("File handle is not opened for read", tc);
return h.eof ? 1 : 0;
}
else {
die_s("eoffh requires an object with the IOHandle REPR", tc);
return 0; /* Unreachable */
}
}
public static SixModelObject closefh(SixModelObject obj, ThreadContext tc) {
if (obj instanceof IOHandleInstance) {
IOHandleInstance h = (IOHandleInstance)obj;
try {
if (h.br != null)
h.br.close();
else if (h.isr != null)
h.isr.close();
else if (h.osw != null)
h.osw.close();
else if (h.is != null)
h.is.close();
else if (h.os != null)
h.os.close();
}
catch (IOException e) {
die_s(e.getMessage(), tc);
}
}
else {
die_s("closefh requires an object with the IOHandle REPR", tc);
}
return obj;
}
public static Set<PosixFilePermission> modeToPosixFilePermission(long mode) {
Set<PosixFilePermission> perms = EnumSet.noneOf(PosixFilePermission.class);
if ((mode & 0001) != 0) perms.add(PosixFilePermission.OTHERS_EXECUTE);
if ((mode & 0002) != 0) perms.add(PosixFilePermission.OTHERS_WRITE);
if ((mode & 0004) != 0) perms.add(PosixFilePermission.OTHERS_READ);
if ((mode & 0010) != 0) perms.add(PosixFilePermission.GROUP_EXECUTE);
if ((mode & 0020) != 0) perms.add(PosixFilePermission.GROUP_WRITE);
if ((mode & 0040) != 0) perms.add(PosixFilePermission.GROUP_READ);
if ((mode & 0100) != 0) perms.add(PosixFilePermission.OWNER_EXECUTE);
if ((mode & 0200) != 0) perms.add(PosixFilePermission.OWNER_WRITE);
if ((mode & 0400) != 0) perms.add(PosixFilePermission.OWNER_READ);
return perms;
}
public static long chmod(String path, long mode) {
Path path_o;
try {
path_o = Paths.get(path);
}
catch (Exception e) {
return -1;
}
Set<PosixFilePermission> perms = modeToPosixFilePermission(mode);
try {
Files.setPosixFilePermissions(path_o, perms);
}
catch (Exception e) {
return -1;
}
return 0;
}
public static long unlink(String path) {
try {
if(!Files.deleteIfExists(Paths.get(path))) {
return -2;
}
}
catch (IOException e) {
return -1;
}
return 0;
}
public static long rmdir(String path) {
Path path_o = Paths.get(path);
try {
if (!Files.isDirectory(path_o)) {
return -2;
}
Files.delete(path_o);
}
catch (IOException e) {
return -1;
}
return 0;
}
public static String cwd() {
return new File(".").getAbsolutePath();
}
public static long mkdir(String path, long mode) {
try {
Files.createDirectory(Paths.get(path),
PosixFilePermissions.asFileAttribute(modeToPosixFilePermission(mode)));
}
catch (Exception e) {
return -1;
}
return 0;
}
public static long rename(String before, String after) {
Path before_o = Paths.get(before);
Path after_o = Paths.get(after);
try {
Files.move(before_o, after_o);
}
catch (Exception e) {
return -1;
}
return 0;
}
public static long copy(String before, String after) {
Path before_o = Paths.get(before);
Path after_o = Paths.get(after);
try {
Files.copy(before_o, after_o);
}
catch (Exception e) {
return -1;
}
return 0;
}
public static long link(String before, String after) {
Path before_o = Paths.get(before);
Path after_o = Paths.get(after);
try {
Files.createLink(before_o, after_o);
}
catch (Exception e) {
return -1;
}
return 0;
}
public static long symlink(String before, String after) {
Path before_o = Paths.get(before);
Path after_o = Paths.get(after);
try {
Files.createSymbolicLink(before_o, after_o);
}
catch (Exception e) {
return -1;
}
return 0;
}
/* Lexical lookup in current scope. */
public static long getlex_i(CallFrame cf, int i) { return cf.iLex[i]; }
public static double getlex_n(CallFrame cf, int i) { return cf.nLex[i]; }
public static String getlex_s(CallFrame cf, int i) { return cf.sLex[i]; }
public static SixModelObject getlex_o(CallFrame cf, int i) { return cf.oLex[i]; }
/* Lexical binding in current scope. */
public static long bindlex_i(long v, CallFrame cf, int i) { cf.iLex[i] = v; return v; }
public static double bindlex_n(double v, CallFrame cf, int i) { cf.nLex[i] = v; return v; }
public static String bindlex_s(String v, CallFrame cf, int i) { cf.sLex[i] = v; return v; }
public static SixModelObject bindlex_o(SixModelObject v, CallFrame cf, int i) { cf.oLex[i] = v; return v; }
/* Lexical lookup in outer scope. */
public static long getlex_i_si(CallFrame cf, int i, int si) {
while (si
cf = cf.outer;
return cf.iLex[i];
}
public static double getlex_n_si(CallFrame cf, int i, int si) {
while (si
cf = cf.outer;
return cf.nLex[i];
}
public static String getlex_s_si(CallFrame cf, int i, int si) {
while (si
cf = cf.outer;
return cf.sLex[i];
}
public static SixModelObject getlex_o_si(CallFrame cf, int i, int si) {
while (si
cf = cf.outer;
return cf.oLex[i];
}
/* Lexical binding in outer scope. */
public static long bindlex_i_si(long v, CallFrame cf, int i, int si) {
while (si
cf = cf.outer;
cf.iLex[i] = v;
return v;
}
public static double bindlex_n_si(double v, CallFrame cf, int i, int si) {
while (si
cf = cf.outer;
cf.nLex[i] = v;
return v;
}
public static String bindlex_s_si(String v, CallFrame cf, int i, int si) {
while (si
cf = cf.outer;
cf.sLex[i] = v;
return v;
}
public static SixModelObject bindlex_o_si(SixModelObject v, CallFrame cf, int i, int si) {
while (si
cf = cf.outer;
cf.oLex[i] = v;
return v;
}
/* Lexical lookup by name. */
public static SixModelObject getlex(String name, ThreadContext tc) {
CallFrame curFrame = tc.curFrame;
while (curFrame != null) {
Integer found = curFrame.codeRef.staticInfo.oTryGetLexicalIdx(name);
if (found != null)
return curFrame.oLex[found];
curFrame = curFrame.outer;
}
throw ExceptionHandling.dieInternal(tc, "Lexical '" + name + "' not found");
}
public static long getlex_i(String name, ThreadContext tc) {
CallFrame curFrame = tc.curFrame;
while (curFrame != null) {
Integer found = curFrame.codeRef.staticInfo.iTryGetLexicalIdx(name);
if (found != null)
return curFrame.iLex[found];
curFrame = curFrame.outer;
}
throw ExceptionHandling.dieInternal(tc, "Lexical '" + name + "' not found");
}
public static double getlex_n(String name, ThreadContext tc) {
CallFrame curFrame = tc.curFrame;
while (curFrame != null) {
Integer found = curFrame.codeRef.staticInfo.nTryGetLexicalIdx(name);
if (found != null)
return curFrame.nLex[found];
curFrame = curFrame.outer;
}
throw ExceptionHandling.dieInternal(tc, "Lexical '" + name + "' not found");
}
public static String getlex_s(String name, ThreadContext tc) {
CallFrame curFrame = tc.curFrame;
while (curFrame != null) {
Integer found = curFrame.codeRef.staticInfo.sTryGetLexicalIdx(name);
if (found != null)
return curFrame.sLex[found];
curFrame = curFrame.outer;
}
throw ExceptionHandling.dieInternal(tc, "Lexical '" + name + "' not found");
}
public static SixModelObject getlexouter(String name, ThreadContext tc) {
CallFrame curFrame = tc.curFrame.outer;
while (curFrame != null) {
Integer found = curFrame.codeRef.staticInfo.oTryGetLexicalIdx(name);
if (found != null)
return curFrame.oLex[found];
curFrame = curFrame.outer;
}
throw ExceptionHandling.dieInternal(tc, "Lexical '" + name + "' not found");
}
/* Lexical binding by name. */
public static SixModelObject bindlex(String name, SixModelObject value, ThreadContext tc) {
CallFrame curFrame = tc.curFrame;
while (curFrame != null) {
Integer found = curFrame.codeRef.staticInfo.oTryGetLexicalIdx(name);
if (found != null)
return curFrame.oLex[found] = value;
curFrame = curFrame.outer;
}
throw ExceptionHandling.dieInternal(tc, "Lexical '" + name + "' not found");
}
public static long bindlex_i(String name, long value, ThreadContext tc) {
CallFrame curFrame = tc.curFrame;
while (curFrame != null) {
Integer found = curFrame.codeRef.staticInfo.iTryGetLexicalIdx(name);
if (found != null)
return curFrame.iLex[found] = value;
curFrame = curFrame.outer;
}
throw ExceptionHandling.dieInternal(tc, "Lexical '" + name + "' not found");
}
public static double bindlex_n(String name, double value, ThreadContext tc) {
CallFrame curFrame = tc.curFrame;
while (curFrame != null) {
Integer found = curFrame.codeRef.staticInfo.nTryGetLexicalIdx(name);
if (found != null)
return curFrame.nLex[found] = value;
curFrame = curFrame.outer;
}
throw ExceptionHandling.dieInternal(tc, "Lexical '" + name + "' not found");
}
public static String bindlex_s(String name, String value, ThreadContext tc) {
CallFrame curFrame = tc.curFrame;
while (curFrame != null) {
Integer found = curFrame.codeRef.staticInfo.sTryGetLexicalIdx(name);
if (found != null)
return curFrame.sLex[found] = value;
curFrame = curFrame.outer;
}
throw ExceptionHandling.dieInternal(tc, "Lexical '" + name + "' not found");
}
/* Dynamic lexicals. */
public static SixModelObject bindlexdyn(SixModelObject value, String name, ThreadContext tc) {
CallFrame curFrame = tc.curFrame.caller;
while (curFrame != null) {
Integer idx = curFrame.codeRef.staticInfo.oTryGetLexicalIdx(name);
if (idx != null) {
curFrame.oLex[idx] = value;
return value;
}
curFrame = curFrame.caller;
}
throw ExceptionHandling.dieInternal(tc, "Dyanmic variable '" + name + "' not found");
}
public static SixModelObject getlexdyn(String name, ThreadContext tc) {
CallFrame curFrame = tc.curFrame.caller;
while (curFrame != null) {
Integer idx = curFrame.codeRef.staticInfo.oTryGetLexicalIdx(name);
if (idx != null)
return curFrame.oLex[idx];
curFrame = curFrame.caller;
}
return null;
}
/* Context introspection. */
public static SixModelObject ctx(ThreadContext tc) {
SixModelObject ContextRef = tc.gc.ContextRef;
SixModelObject wrap = ContextRef.st.REPR.allocate(tc, ContextRef.st);
((ContextRefInstance)wrap).context = tc.curFrame;
return wrap;
}
public static SixModelObject ctxouter(SixModelObject ctx, ThreadContext tc) {
if (ctx instanceof ContextRefInstance) {
CallFrame outer = ((ContextRefInstance)ctx).context.outer;
if (outer == null)
return null;
SixModelObject ContextRef = tc.gc.ContextRef;
SixModelObject wrap = ContextRef.st.REPR.allocate(tc, ContextRef.st);
((ContextRefInstance)wrap).context = outer;
return wrap;
}
else {
throw ExceptionHandling.dieInternal(tc, "ctxouter requires an operand with REPR ContextRef");
}
}
public static SixModelObject ctxcaller(SixModelObject ctx, ThreadContext tc) {
if (ctx instanceof ContextRefInstance) {
CallFrame caller = ((ContextRefInstance)ctx).context.caller;
if (caller == null)
return null;
SixModelObject ContextRef = tc.gc.ContextRef;
SixModelObject wrap = ContextRef.st.REPR.allocate(tc, ContextRef.st);
((ContextRefInstance)wrap).context = caller;
return wrap;
}
else {
throw ExceptionHandling.dieInternal(tc, "ctxcaller requires an operand with REPR ContextRef");
}
}
public static SixModelObject ctxlexpad(SixModelObject ctx, ThreadContext tc) {
if (ctx instanceof ContextRefInstance) {
// The context serves happily enough as the lexpad also (provides
// the associative bit of the REPR API, mapped to the lexpad).
return ctx;
}
else {
throw ExceptionHandling.dieInternal(tc, "ctxlexpad requires an operand with REPR ContextRef");
}
}
public static SixModelObject curcode(ThreadContext tc) {
return tc.curFrame.codeRef;
}
public static SixModelObject callercode(ThreadContext tc) {
CallFrame caller = tc.curFrame.caller;
return caller == null ? null : caller.codeRef;
}
public static long lexprimspec(SixModelObject pad, String key, ThreadContext tc) {
if (pad instanceof ContextRefInstance) {
StaticCodeInfo sci = ((ContextRefInstance)pad).context.codeRef.staticInfo;
if (sci.oTryGetLexicalIdx(key) != null) return StorageSpec.BP_NONE;
if (sci.iTryGetLexicalIdx(key) != null) return StorageSpec.BP_INT;
if (sci.nTryGetLexicalIdx(key) != null) return StorageSpec.BP_NUM;
if (sci.sTryGetLexicalIdx(key) != null) return StorageSpec.BP_STR;
throw ExceptionHandling.dieInternal(tc, "Invalid lexical name passed to lexprimspec");
}
else {
throw ExceptionHandling.dieInternal(tc, "lexprimspec requires an operand with REPR ContextRef");
}
}
/* Invocation arity check. */
public static CallSiteDescriptor checkarity(CallFrame cf, CallSiteDescriptor cs, Object[] args, int required, int accepted) {
if (cs.hasFlattening)
cs = cs.explodeFlattening(cf, args);
else
cf.tc.flatArgs = args;
int positionals = cs.numPositionals;
if (positionals < required || positionals > accepted && accepted != -1)
throw ExceptionHandling.dieInternal(cf.tc, "Wrong number of arguments passed; expected " +
required + ".." + accepted + ", but got " + positionals);
return cs;
}
/* Required positional parameter fetching. */
public static SixModelObject posparam_o(CallFrame cf, CallSiteDescriptor cs, Object[] args, int idx) {
switch (cs.argFlags[idx]) {
case CallSiteDescriptor.ARG_OBJ:
return (SixModelObject)args[idx];
case CallSiteDescriptor.ARG_INT:
return box_i((long)args[idx], cf.codeRef.staticInfo.compUnit.hllConfig.intBoxType, cf.tc);
case CallSiteDescriptor.ARG_NUM:
return box_n((double)args[idx], cf.codeRef.staticInfo.compUnit.hllConfig.numBoxType, cf.tc);
case CallSiteDescriptor.ARG_STR:
return box_s((String)args[idx], cf.codeRef.staticInfo.compUnit.hllConfig.strBoxType, cf.tc);
default:
throw ExceptionHandling.dieInternal(cf.tc, "Error in argument processing");
}
}
public static long posparam_i(CallFrame cf, CallSiteDescriptor cs, Object[] args, int idx) {
switch (cs.argFlags[idx]) {
case CallSiteDescriptor.ARG_INT:
return (long)args[idx];
case CallSiteDescriptor.ARG_NUM:
return (long)(double)args[idx];
case CallSiteDescriptor.ARG_STR:
return coerce_s2i((String)args[idx]);
case CallSiteDescriptor.ARG_OBJ:
return ((SixModelObject)args[idx]).get_int(cf.tc);
default:
throw ExceptionHandling.dieInternal(cf.tc, "Error in argument processing");
}
}
public static double posparam_n(CallFrame cf, CallSiteDescriptor cs, Object[] args, int idx) {
switch (cs.argFlags[idx]) {
case CallSiteDescriptor.ARG_NUM:
return (double)args[idx];
case CallSiteDescriptor.ARG_INT:
return (double)(long)args[idx];
case CallSiteDescriptor.ARG_STR:
return coerce_s2n((String)args[idx]);
case CallSiteDescriptor.ARG_OBJ:
return ((SixModelObject)args[idx]).get_num(cf.tc);
default:
throw ExceptionHandling.dieInternal(cf.tc, "Error in argument processing");
}
}
public static String posparam_s(CallFrame cf, CallSiteDescriptor cs, Object[] args, int idx) {
switch (cs.argFlags[idx]) {
case CallSiteDescriptor.ARG_STR:
return (String)args[idx];
case CallSiteDescriptor.ARG_INT:
return coerce_i2s((long)args[idx]);
case CallSiteDescriptor.ARG_NUM:
return coerce_n2s((double)args[idx]);
case CallSiteDescriptor.ARG_OBJ:
return ((SixModelObject)args[idx]).get_str(cf.tc);
default:
throw ExceptionHandling.dieInternal(cf.tc, "Error in argument processing");
}
}
/* Optional positional parameter fetching. */
public static SixModelObject posparam_opt_o(CallFrame cf, CallSiteDescriptor cs, Object[] args, int idx) {
if (idx < cs.numPositionals) {
cf.tc.lastParameterExisted = 1;
return posparam_o(cf, cs, args, idx);
}
else {
cf.tc.lastParameterExisted = 0;
return null;
}
}
public static long posparam_opt_i(CallFrame cf, CallSiteDescriptor cs, Object[] args, int idx) {
if (idx < cs.numPositionals) {
cf.tc.lastParameterExisted = 1;
return posparam_i(cf, cs, args, idx);
}
else {
cf.tc.lastParameterExisted = 0;
return 0;
}
}
public static double posparam_opt_n(CallFrame cf, CallSiteDescriptor cs, Object[] args, int idx) {
if (idx < cs.numPositionals) {
cf.tc.lastParameterExisted = 1;
return posparam_n(cf, cs, args, idx);
}
else {
cf.tc.lastParameterExisted = 0;
return 0.0;
}
}
public static String posparam_opt_s(CallFrame cf, CallSiteDescriptor cs, Object[] args, int idx) {
if (idx < cs.numPositionals) {
cf.tc.lastParameterExisted = 1;
return posparam_s(cf, cs, args, idx);
}
else {
cf.tc.lastParameterExisted = 0;
return null;
}
}
/* Slurpy positional parameter. */
public static SixModelObject posslurpy(ThreadContext tc, CallFrame cf, CallSiteDescriptor cs, Object[] args, int fromIdx) {
/* Create result. */
HLLConfig hllConfig = cf.codeRef.staticInfo.compUnit.hllConfig;
SixModelObject resType = hllConfig.slurpyArrayType;
SixModelObject result = resType.st.REPR.allocate(tc, resType.st);
result.initialize(tc);
/* Populate it. */
for (int i = fromIdx; i < cs.numPositionals; i++) {
switch (cs.argFlags[i]) {
case CallSiteDescriptor.ARG_OBJ:
result.push_boxed(tc, (SixModelObject)args[i]);
break;
case CallSiteDescriptor.ARG_INT:
result.push_boxed(tc, box_i((long)args[i], hllConfig.intBoxType, tc));
break;
case CallSiteDescriptor.ARG_NUM:
result.push_boxed(tc, box_n((double)args[i], hllConfig.numBoxType, tc));
break;
case CallSiteDescriptor.ARG_STR:
result.push_boxed(tc, box_s((String)args[i], hllConfig.strBoxType, tc));
break;
}
}
return result;
}
/* Required named parameter getting. */
public static SixModelObject namedparam_o(CallFrame cf, CallSiteDescriptor cs, Object[] args, String name) {
if (cf.workingNameMap == null)
cf.workingNameMap = new HashMap<String, Integer>(cs.nameMap);
Integer lookup = cf.workingNameMap.remove(name);
if (lookup != null) {
switch (lookup & 7) {
case CallSiteDescriptor.ARG_OBJ:
return (SixModelObject)args[lookup >> 3];
case CallSiteDescriptor.ARG_INT:
return box_i((long)args[lookup >> 3], cf.codeRef.staticInfo.compUnit.hllConfig.intBoxType, cf.tc);
case CallSiteDescriptor.ARG_NUM:
return box_n((double)args[lookup >> 3], cf.codeRef.staticInfo.compUnit.hllConfig.numBoxType, cf.tc);
case CallSiteDescriptor.ARG_STR:
return box_s((String)args[lookup >> 3], cf.codeRef.staticInfo.compUnit.hllConfig.strBoxType, cf.tc);
default:
throw ExceptionHandling.dieInternal(cf.tc, "Error in argument processing");
}
}
else
throw ExceptionHandling.dieInternal(cf.tc, "Required named argument '" + name + "' not passed");
}
public static long namedparam_i(CallFrame cf, CallSiteDescriptor cs, Object[] args, String name) {
if (cf.workingNameMap == null)
cf.workingNameMap = new HashMap<String, Integer>(cs.nameMap);
Integer lookup = cf.workingNameMap.remove(name);
if (lookup != null) {
switch ((lookup & 7)) {
case CallSiteDescriptor.ARG_INT:
return (long)args[lookup >> 3];
case CallSiteDescriptor.ARG_NUM:
return (long)(double)args[lookup >> 3];
case CallSiteDescriptor.ARG_STR:
return coerce_s2i((String)args[lookup >> 3]);
case CallSiteDescriptor.ARG_OBJ:
return ((SixModelObject)args[lookup >> 3]).get_int(cf.tc);
default:
throw ExceptionHandling.dieInternal(cf.tc, "Error in argument processing");
}
}
else
throw ExceptionHandling.dieInternal(cf.tc, "Required named argument '" + name + "' not passed");
}
public static double namedparam_n(CallFrame cf, CallSiteDescriptor cs, Object[] args, String name) {
if (cf.workingNameMap == null)
cf.workingNameMap = new HashMap<String, Integer>(cs.nameMap);
Integer lookup = cf.workingNameMap.remove(name);
if (lookup != null) {
switch ((lookup & 7)) {
case CallSiteDescriptor.ARG_NUM:
return (double)args[lookup >> 3];
case CallSiteDescriptor.ARG_INT:
return (double)(long)args[lookup >> 3];
case CallSiteDescriptor.ARG_STR:
return coerce_s2n((String)args[lookup >> 3]);
case CallSiteDescriptor.ARG_OBJ:
return ((SixModelObject)args[lookup >> 3]).get_num(cf.tc);
default:
throw ExceptionHandling.dieInternal(cf.tc, "Error in argument processing");
}
}
else
throw ExceptionHandling.dieInternal(cf.tc, "Required named argument '" + name + "' not passed");
}
public static String namedparam_s(CallFrame cf, CallSiteDescriptor cs, Object[] args, String name) {
if (cf.workingNameMap == null)
cf.workingNameMap = new HashMap<String, Integer>(cs.nameMap);
Integer lookup = cf.workingNameMap.remove(name);
if (lookup != null) {
switch ((lookup & 7)) {
case CallSiteDescriptor.ARG_STR:
return (String)args[lookup >> 3];
case CallSiteDescriptor.ARG_INT:
return coerce_i2s((long)args[lookup >> 3]);
case CallSiteDescriptor.ARG_NUM:
return coerce_n2s((double)args[lookup >> 3]);
case CallSiteDescriptor.ARG_OBJ:
return ((SixModelObject)args[lookup >> 3]).get_str(cf.tc);
default:
throw ExceptionHandling.dieInternal(cf.tc, "Error in argument processing");
}
}
else
throw ExceptionHandling.dieInternal(cf.tc, "Required named argument '" + name + "' not passed");
}
/* Optional named parameter getting. */
public static SixModelObject namedparam_opt_o(CallFrame cf, CallSiteDescriptor cs, Object[] args, String name) {
if (cf.workingNameMap == null)
cf.workingNameMap = new HashMap<String, Integer>(cs.nameMap);
Integer lookup = cf.workingNameMap.remove(name);
if (lookup != null) {
cf.tc.lastParameterExisted = 1;
switch (lookup & 7) {
case CallSiteDescriptor.ARG_OBJ:
return (SixModelObject)args[lookup >> 3];
case CallSiteDescriptor.ARG_INT:
return box_i((long)args[lookup >> 3], cf.codeRef.staticInfo.compUnit.hllConfig.intBoxType, cf.tc);
case CallSiteDescriptor.ARG_NUM:
return box_n((double)args[lookup >> 3], cf.codeRef.staticInfo.compUnit.hllConfig.numBoxType, cf.tc);
case CallSiteDescriptor.ARG_STR:
return box_s((String)args[lookup >> 3], cf.codeRef.staticInfo.compUnit.hllConfig.strBoxType, cf.tc);
default:
throw ExceptionHandling.dieInternal(cf.tc, "Error in argument processing");
}
}
else {
cf.tc.lastParameterExisted = 0;
return null;
}
}
public static long namedparam_opt_i(CallFrame cf, CallSiteDescriptor cs, Object[] args, String name) {
if (cf.workingNameMap == null)
cf.workingNameMap = new HashMap<String, Integer>(cs.nameMap);
Integer lookup = cf.workingNameMap.remove(name);
if (lookup != null) {
cf.tc.lastParameterExisted = 1;
switch ((lookup & 7)) {
case CallSiteDescriptor.ARG_INT:
return (long)args[lookup >> 3];
case CallSiteDescriptor.ARG_NUM:
return (long)(double)args[lookup >> 3];
case CallSiteDescriptor.ARG_STR:
return coerce_s2i((String)args[lookup >> 3]);
case CallSiteDescriptor.ARG_OBJ:
return ((SixModelObject)args[lookup >> 3]).get_int(cf.tc);
default:
throw ExceptionHandling.dieInternal(cf.tc, "Error in argument processing");
}
}
else {
cf.tc.lastParameterExisted = 0;
return 0;
}
}
public static double namedparam_opt_n(CallFrame cf, CallSiteDescriptor cs, Object[] args, String name) {
if (cf.workingNameMap == null)
cf.workingNameMap = new HashMap<String, Integer>(cs.nameMap);
Integer lookup = cf.workingNameMap.remove(name);
if (lookup != null) {
cf.tc.lastParameterExisted = 1;
switch ((lookup & 7)) {
case CallSiteDescriptor.ARG_NUM:
return (double)args[lookup >> 3];
case CallSiteDescriptor.ARG_INT:
return (double)(long)args[lookup >> 3];
case CallSiteDescriptor.ARG_STR:
return coerce_s2n((String)args[lookup >> 3]);
case CallSiteDescriptor.ARG_OBJ:
return ((SixModelObject)args[lookup >> 3]).get_num(cf.tc);
default:
throw ExceptionHandling.dieInternal(cf.tc, "Error in argument processing");
}
}
else {
cf.tc.lastParameterExisted = 0;
return 0.0;
}
}
public static String namedparam_opt_s(CallFrame cf, CallSiteDescriptor cs, Object[] args, String name) {
if (cf.workingNameMap == null)
cf.workingNameMap = new HashMap<String, Integer>(cs.nameMap);
Integer lookup = cf.workingNameMap.remove(name);
if (lookup != null) {
cf.tc.lastParameterExisted = 1;
switch ((lookup & 7)) {
case CallSiteDescriptor.ARG_STR:
return (String)args[lookup >> 3];
case CallSiteDescriptor.ARG_INT:
return coerce_i2s((long)args[lookup >> 3]);
case CallSiteDescriptor.ARG_NUM:
return coerce_n2s((double)args[lookup >> 3]);
case CallSiteDescriptor.ARG_OBJ:
return ((SixModelObject)args[lookup >> 3]).get_str(cf.tc);
default:
throw ExceptionHandling.dieInternal(cf.tc, "Error in argument processing");
}
}
else {
cf.tc.lastParameterExisted = 0;
return null;
}
}
/* Slurpy named parameter. */
public static SixModelObject namedslurpy(ThreadContext tc, CallFrame cf, CallSiteDescriptor cs, Object[] args) {
/* Create result. */
HLLConfig hllConfig = cf.codeRef.staticInfo.compUnit.hllConfig;
SixModelObject resType = hllConfig.slurpyHashType;
SixModelObject result = resType.st.REPR.allocate(tc, resType.st);
result.initialize(tc);
/* Populate it. */
if (cf.workingNameMap == null)
cf.workingNameMap = new HashMap<String, Integer>(cs.nameMap);
for (String name : cf.workingNameMap.keySet()) {
Integer lookup = cf.workingNameMap.get(name);
switch (lookup & 7) {
case CallSiteDescriptor.ARG_OBJ:
result.bind_key_boxed(tc, name, (SixModelObject)args[lookup >> 3]);
break;
case CallSiteDescriptor.ARG_INT:
result.bind_key_boxed(tc, name, box_i((long)args[lookup >> 3], hllConfig.intBoxType, tc));
break;
case CallSiteDescriptor.ARG_NUM:
result.bind_key_boxed(tc, name, box_n((double)args[lookup >> 3], hllConfig.numBoxType, tc));
break;
case CallSiteDescriptor.ARG_STR:
result.bind_key_boxed(tc, name, box_s((String)args[lookup >> 3], hllConfig.strBoxType, tc));
break;
}
}
return result;
}
/* Return value setting. */
public static void return_o(SixModelObject v, CallFrame cf) {
CallFrame caller = cf.caller;
if (caller != null) {
caller.oRet = v;
caller.retType = CallFrame.RET_OBJ;
}
}
public static void return_i(long v, CallFrame cf) {
CallFrame caller = cf.caller;
if (caller != null) {
caller.iRet = v;
caller.retType = CallFrame.RET_INT;
}
}
public static void return_n(double v, CallFrame cf) {
CallFrame caller = cf.caller;
if (caller != null) {
caller.nRet = v;
caller.retType = CallFrame.RET_NUM;
}
}
public static void return_s(String v, CallFrame cf) {
CallFrame caller = cf.caller;
if (caller != null) {
caller.sRet = v;
caller.retType = CallFrame.RET_STR;
}
}
/* Get returned result. */
public static SixModelObject result_o(CallFrame cf) {
switch (cf.retType) {
case CallFrame.RET_INT:
return box_i(cf.iRet, cf.codeRef.staticInfo.compUnit.hllConfig.intBoxType, cf.tc);
case CallFrame.RET_NUM:
return box_n(cf.nRet, cf.codeRef.staticInfo.compUnit.hllConfig.numBoxType, cf.tc);
case CallFrame.RET_STR:
return box_s(cf.sRet, cf.codeRef.staticInfo.compUnit.hllConfig.strBoxType, cf.tc);
default:
return cf.oRet;
}
}
public static long result_i(CallFrame cf) {
if (cf.retType == CallFrame.RET_INT)
return cf.iRet;
throw ExceptionHandling.dieInternal(cf.tc, "Return value coercion NYI");
}
public static double result_n(CallFrame cf) {
if (cf.retType == CallFrame.RET_NUM)
return cf.nRet;
throw ExceptionHandling.dieInternal(cf.tc, "Return value coercion NYI");
}
public static String result_s(CallFrame cf) {
if (cf.retType == CallFrame.RET_STR)
return cf.sRet;
throw ExceptionHandling.dieInternal(cf.tc, "Return value coercion NYI");
}
/* Capture related operations. */
public static SixModelObject usecapture(ThreadContext tc, CallSiteDescriptor cs, Object[] args) {
CallCaptureInstance cc = tc.savedCC;
cc.descriptor = cs;
cc.args = args.clone();
return cc;
}
public static SixModelObject savecapture(ThreadContext tc, CallSiteDescriptor cs, Object[] args) {
SixModelObject CallCapture = tc.gc.CallCapture;
CallCaptureInstance cc = (CallCaptureInstance)CallCapture.st.REPR.allocate(tc, CallCapture.st);
cc.descriptor = cs;
cc.args = args.clone();
return cc;
}
public static long captureposelems(SixModelObject obj, ThreadContext tc) {
if (obj instanceof CallCaptureInstance)
return ((CallCaptureInstance)obj).descriptor.numPositionals;
else
throw ExceptionHandling.dieInternal(tc, "captureposelems requires a CallCapture");
}
public static SixModelObject captureposarg(SixModelObject obj, long idx, ThreadContext tc) {
if (obj instanceof CallCaptureInstance) {
CallCaptureInstance cc = (CallCaptureInstance)obj;
int i = (int)idx;
switch (cc.descriptor.argFlags[i]) {
case CallSiteDescriptor.ARG_OBJ:
return (SixModelObject)cc.args[i];
case CallSiteDescriptor.ARG_INT:
return box_i((long)cc.args[i],
tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.intBoxType, tc);
case CallSiteDescriptor.ARG_NUM:
return box_n((double)cc.args[i],
tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.numBoxType, tc);
case CallSiteDescriptor.ARG_STR:
return box_s((String)cc.args[i],
tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.strBoxType, tc);
default:
throw ExceptionHandling.dieInternal(tc, "Invalid positional argument access from capture");
}
}
else {
throw ExceptionHandling.dieInternal(tc, "captureposarg requires a CallCapture");
}
}
public static long captureexistsnamed(SixModelObject obj, String name, ThreadContext tc) {
if (obj instanceof CallCaptureInstance) {
CallCaptureInstance cc = (CallCaptureInstance)obj;
return cc.descriptor.nameMap.containsKey(name) ? 1 : 0;
}
else {
throw ExceptionHandling.dieInternal(tc, "capturehasnameds requires a CallCapture");
}
}
public static long capturehasnameds(SixModelObject obj, ThreadContext tc) {
if (obj instanceof CallCaptureInstance) {
CallCaptureInstance cc = (CallCaptureInstance)obj;
return cc.descriptor.names == null ? 0 : 1;
}
else {
throw ExceptionHandling.dieInternal(tc, "capturehasnameds requires a CallCapture");
}
}
public static long captureposprimspec(SixModelObject obj, long idx, ThreadContext tc) {
if (obj instanceof CallCaptureInstance) {
CallCaptureInstance cc = (CallCaptureInstance)obj;
switch (cc.descriptor.argFlags[(int)idx]) {
case CallSiteDescriptor.ARG_INT:
return StorageSpec.BP_INT;
case CallSiteDescriptor.ARG_NUM:
return StorageSpec.BP_NUM;
case CallSiteDescriptor.ARG_STR:
return StorageSpec.BP_STR;
default:
return StorageSpec.BP_NONE;
}
}
else {
throw ExceptionHandling.dieInternal(tc, "captureposarg requires a CallCapture");
}
}
/* Invocation. */
public static final CallSiteDescriptor emptyCallSite = new CallSiteDescriptor(new byte[0], null);
public static final Object[] emptyArgList = new Object[0];
public static final CallSiteDescriptor invocantCallSite = new CallSiteDescriptor(new byte[] { CallSiteDescriptor.ARG_OBJ }, null);
public static final CallSiteDescriptor storeCallSite = new CallSiteDescriptor(new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null);
public static final CallSiteDescriptor findmethCallSite = new CallSiteDescriptor(new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_STR }, null);
public static void invoke(SixModelObject invokee, int callsiteIndex, Object[] args, ThreadContext tc) throws Exception {
// If it's lexotic, throw the exception right off.
if (invokee instanceof Lexotic) {
LexoticException throwee = tc.theLexotic;
throwee.target = ((Lexotic)invokee).target;
CallSiteDescriptor csd = tc.curFrame.codeRef.staticInfo.compUnit.callSites[callsiteIndex];
switch (csd.argFlags[0]) {
case CallSiteDescriptor.ARG_OBJ:
throwee.payload = (SixModelObject)args[0];
break;
case CallSiteDescriptor.ARG_INT:
throwee.payload = box_i((long)args[0],
tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.intBoxType, tc);
break;
case CallSiteDescriptor.ARG_NUM:
throwee.payload = box_n((double)args[0],
tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.numBoxType, tc);
break;
case CallSiteDescriptor.ARG_STR:
throwee.payload = box_s((String)args[0],
tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.strBoxType, tc);
break;
default:
throw ExceptionHandling.dieInternal(tc, "Invalid lexotic invocation argument");
}
throw throwee;
}
// TODO Find a smarter way to do this without all the pointer chasing.
if (callsiteIndex >= 0)
invokeDirect(tc, invokee, tc.curFrame.codeRef.staticInfo.compUnit.callSites[callsiteIndex], args);
else
invokeDirect(tc, invokee, emptyCallSite, args);
}
public static void invokeArgless(ThreadContext tc, SixModelObject invokee) {
invokeDirect(tc, invokee, emptyCallSite, new Object[0]);
}
public static void invokeMain(ThreadContext tc, SixModelObject invokee, String prog, String[] argv) {
/* Build argument list from argv. */
SixModelObject Str = ((CodeRef)invokee).staticInfo.compUnit.hllConfig.strBoxType;
Object[] args = new Object[argv.length + 1];
byte[] callsite = new byte[argv.length + 1];
args[0] = box_s(prog, Str, tc);
callsite[0] = CallSiteDescriptor.ARG_OBJ;
for (int i = 0; i < argv.length; i++) {
args[i + 1] = box_s(argv[i], Str, tc);
callsite[i + 1] = CallSiteDescriptor.ARG_OBJ;
}
/* Invoke with the descriptor and arg list. */
invokeDirect(tc, invokee, new CallSiteDescriptor(callsite, null), args);
}
public static void invokeDirect(ThreadContext tc, SixModelObject invokee, CallSiteDescriptor csd, Object[] args) {
// Otherwise, get the code ref.
CodeRef cr;
if (invokee instanceof CodeRef) {
cr = (CodeRef)invokee;
}
else {
InvocationSpec is = invokee.st.InvocationSpec;
if (is == null)
throw ExceptionHandling.dieInternal(tc, "Can not invoke this object");
if (is.ClassHandle != null)
cr = (CodeRef)invokee.get_attribute_boxed(tc, is.ClassHandle, is.AttrName, is.Hint);
else
cr = (CodeRef)is.InvocationHandler;
}
try {
// Do the invocation.
cr.staticInfo.mh.invokeExact(tc, cr, csd, args);
}
catch (ControlException e) {
throw e;
}
catch (Throwable e) {
ExceptionHandling.dieInternal(tc, e);
}
}
public static SixModelObject invokewithcapture(SixModelObject invokee, SixModelObject capture, ThreadContext tc) throws Exception {
if (capture instanceof CallCaptureInstance) {
CallCaptureInstance cc = (CallCaptureInstance)capture;
invokeDirect(tc, invokee, cc.descriptor, cc.args);
return result_o(tc.curFrame);
}
else {
throw ExceptionHandling.dieInternal(tc, "invokewithcapture requires a CallCapture");
}
}
/* Lexotic. */
public static SixModelObject lexotic(long target) {
Lexotic res = new Lexotic();
res.target = target;
return res;
}
/* Multi-dispatch cache. */
public static SixModelObject multicacheadd(SixModelObject cache, SixModelObject capture, SixModelObject result, ThreadContext tc) {
if (!(cache instanceof MultiCacheInstance))
cache = tc.gc.MultiCache.st.REPR.allocate(tc, tc.gc.MultiCache.st);
((MultiCacheInstance)cache).add((CallCaptureInstance)capture, result);
return cache;
}
public static SixModelObject multicachefind(SixModelObject cache, SixModelObject capture, ThreadContext tc) {
if (cache instanceof MultiCacheInstance)
return ((MultiCacheInstance)cache).lookup((CallCaptureInstance)capture);
else
return null;
}
/* Basic 6model operations. */
public static SixModelObject what(SixModelObject o) {
return o.st.WHAT;
}
public static SixModelObject how(SixModelObject o) {
return o.st.HOW;
}
public static SixModelObject who(SixModelObject o) {
return o.st.WHO;
}
public static long where(SixModelObject o) {
return o.hashCode();
}
public static SixModelObject setwho(SixModelObject o, SixModelObject who) {
o.st.WHO = who;
return o;
}
public static SixModelObject rebless(SixModelObject obj, SixModelObject newType, ThreadContext tc) {
if (obj.st != newType.st)
obj.st.REPR.change_type(tc, obj, newType);
return obj;
}
public static SixModelObject create(SixModelObject obj, ThreadContext tc) {
SixModelObject res = obj.st.REPR.allocate(tc, obj.st);
res.initialize(tc);
return res;
}
public static SixModelObject clone(SixModelObject obj, ThreadContext tc) {
return obj.clone(tc);
}
public static long isconcrete(SixModelObject obj, ThreadContext tc) {
return obj == null || obj instanceof TypeObject ? 0 : 1;
}
public static SixModelObject knowhow(ThreadContext tc) {
return tc.gc.KnowHOW;
}
public static SixModelObject knowhowattr(ThreadContext tc) {
return tc.gc.KnowHOWAttribute;
}
public static SixModelObject bootint(ThreadContext tc) {
return tc.gc.BOOTInt;
}
public static SixModelObject bootnum(ThreadContext tc) {
return tc.gc.BOOTNum;
}
public static SixModelObject bootstr(ThreadContext tc) {
return tc.gc.BOOTStr;
}
public static SixModelObject bootarray(ThreadContext tc) {
return tc.gc.BOOTArray;
}
public static SixModelObject bootintarray(ThreadContext tc) {
return tc.gc.BOOTIntArray;
}
public static SixModelObject bootnumarray(ThreadContext tc) {
return tc.gc.BOOTNumArray;
}
public static SixModelObject bootstrarray(ThreadContext tc) {
return tc.gc.BOOTStrArray;
}
public static SixModelObject boothash(ThreadContext tc) {
return tc.gc.BOOTHash;
}
public static SixModelObject hlllist(ThreadContext tc) {
return tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.listType;
}
public static SixModelObject hllhash(ThreadContext tc) {
return tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.hashType;
}
public static SixModelObject findmethod(ThreadContext tc, SixModelObject invocant, String name) {
SixModelObject meth = invocant.st.MethodCache.get(name);
if (meth == null)
throw ExceptionHandling.dieInternal(tc, "Method '" + name + "' not found");
return meth;
}
public static SixModelObject findmethod(SixModelObject invocant, String name, ThreadContext tc) {
Map<String, SixModelObject> cache = invocant.st.MethodCache;
/* Try the by-name method cache, if the HOW published one. */
if (cache != null) {
SixModelObject found = cache.get(name);
if (found != null)
return found;
if ((invocant.st.ModeFlags & STable.METHOD_CACHE_AUTHORITATIVE) != 0)
return null;
}
/* Otherwise delegate to the HOW. */
SixModelObject how = invocant.st.HOW;
SixModelObject find_method = findmethod(how, "find_method", tc);
invokeDirect(tc, find_method, findmethCallSite,
new Object[] { how, invocant, name });
return result_o(tc.curFrame);
}
public static long can(SixModelObject invocant, String name, ThreadContext tc) {
return findmethod(invocant, name, tc) == null ? 0 : 1;
}
public static long eqaddr(SixModelObject a, SixModelObject b) {
return a == b ? 1 : 0;
}
public static long isnull(SixModelObject obj) {
return obj == null ? 1 : 0;
}
public static long isnull_s(String str) {
return str == null ? 1 : 0;
}
public static String reprname(SixModelObject obj) {
return obj.st.REPR.name;
}
public static SixModelObject newtype(SixModelObject how, String reprname, ThreadContext tc) {
return REPRRegistry.getByName(reprname).type_object_for(tc, how);
}
public static SixModelObject composetype(SixModelObject obj, SixModelObject reprinfo, ThreadContext tc) {
obj.st.REPR.compose(tc, obj.st, reprinfo);
return obj;
}
public static SixModelObject setmethcache(SixModelObject obj, SixModelObject meths, ThreadContext tc) {
SixModelObject iter = iter(meths, tc);
HashMap<String, SixModelObject> cache = new HashMap<String, SixModelObject>();
while (istrue(iter, tc) != 0) {
SixModelObject cur = iter.shift_boxed(tc);
cache.put(iterkey_s(cur, tc), iterval(cur, tc));
}
obj.st.MethodCache = cache;
return obj;
}
public static SixModelObject setmethcacheauth(SixModelObject obj, long flag, ThreadContext tc) {
int newFlags = obj.st.ModeFlags & (~STable.METHOD_CACHE_AUTHORITATIVE);
if (flag != 0)
newFlags = newFlags | STable.METHOD_CACHE_AUTHORITATIVE;
obj.st.ModeFlags = newFlags;
return obj;
}
public static SixModelObject settypecache(SixModelObject obj, SixModelObject types, ThreadContext tc) {
long elems = types.elems(tc);
SixModelObject[] cache = new SixModelObject[(int)elems];
for (long i = 0; i < elems; i++)
cache[(int)i] = types.at_pos_boxed(tc, i);
obj.st.TypeCheckCache = cache;
return obj;
}
public static SixModelObject settypecheckmode(SixModelObject obj, long mode, ThreadContext tc) {
obj.st.ModeFlags = (int)mode |
(obj.st.ModeFlags & (~STable.TYPE_CHECK_CACHE_FLAG_MASK));
return obj;
}
public static long objprimspec(SixModelObject obj, ThreadContext tc) {
return obj.st.REPR.get_storage_spec(tc, obj.st).boxed_primitive;
}
public static SixModelObject setinvokespec(SixModelObject obj, SixModelObject ch,
String name, SixModelObject invocationHandler, ThreadContext tc) {
InvocationSpec is = new InvocationSpec();
is.ClassHandle = ch;
is.AttrName = name;
is.Hint = STable.NO_HINT;
is.InvocationHandler = invocationHandler;
obj.st.InvocationSpec = is;
return obj;
}
public static long isinvokable(SixModelObject obj, ThreadContext tc) {
return obj instanceof CodeRef || obj.st.InvocationSpec != null ? 1 : 0;
}
public static long istype(SixModelObject obj, SixModelObject type, ThreadContext tc) {
/* Null always type checks false. */
if (obj == null)
return 0;
int typeCheckMode = type.st.ModeFlags & STable.TYPE_CHECK_CACHE_FLAG_MASK;
SixModelObject[] cache = obj.st.TypeCheckCache;
if (cache != null) {
/* We have the cache, so just look for the type object we
* want to be in there. */
for (int i = 0; i < cache.length; i++)
if (cache[i] == type)
return 1;
/* If the type check cache is definitive, we're done. */
if ((typeCheckMode & STable.TYPE_CHECK_CACHE_THEN_METHOD) == 0 &&
(typeCheckMode & STable.TYPE_CHECK_NEEDS_ACCEPTS) == 0)
return 0;
}
/* If we get here, need to call .^type_check on the value we're
* checking. */
if (cache == null || (typeCheckMode & STable.TYPE_CHECK_CACHE_THEN_METHOD) != 0) {
/* TODO: Implement this. */
return 0;
}
/* If the flag to call .accepts_type on the target value is set, do so. */
if ((typeCheckMode & STable.TYPE_CHECK_NEEDS_ACCEPTS) != 0) {
throw new RuntimeException("Type accepts method fallback NYI");
}
/* If we get here, type check failed. */
return 0;
}
/* Box/unbox operations. */
public static SixModelObject box_i(long value, SixModelObject type, ThreadContext tc) {
SixModelObject res = type.st.REPR.allocate(tc, type.st);
res.initialize(tc);
res.set_int(tc, value);
return res;
}
public static SixModelObject box_n(double value, SixModelObject type, ThreadContext tc) {
SixModelObject res = type.st.REPR.allocate(tc, type.st);
res.initialize(tc);
res.set_num(tc, value);
return res;
}
public static SixModelObject box_s(String value, SixModelObject type, ThreadContext tc) {
SixModelObject res = type.st.REPR.allocate(tc, type.st);
res.initialize(tc);
res.set_str(tc, value);
return res;
}
public static long unbox_i(SixModelObject obj, ThreadContext tc) {
return obj.get_int(tc);
}
public static double unbox_n(SixModelObject obj, ThreadContext tc) {
return obj.get_num(tc);
}
public static String unbox_s(SixModelObject obj, ThreadContext tc) {
return obj.get_str(tc);
}
public static long isint(SixModelObject obj, ThreadContext tc) {
StorageSpec ss = obj.st.REPR.get_storage_spec(tc, obj.st);
return (ss.can_box & StorageSpec.CAN_BOX_INT) == 0 ? 0 : 1;
}
public static long isnum(SixModelObject obj, ThreadContext tc) {
StorageSpec ss = obj.st.REPR.get_storage_spec(tc, obj.st);
return (ss.can_box & StorageSpec.CAN_BOX_NUM) == 0 ? 0 : 1;
}
public static long isstr(SixModelObject obj, ThreadContext tc) {
StorageSpec ss = obj.st.REPR.get_storage_spec(tc, obj.st);
return (ss.can_box & StorageSpec.CAN_BOX_STR) == 0 ? 0 : 1;
}
/* Attribute operations. */
public static SixModelObject getattr(SixModelObject obj, SixModelObject ch, String name, ThreadContext tc) {
return obj.get_attribute_boxed(tc, ch, name, STable.NO_HINT);
}
public static long getattr_i(SixModelObject obj, SixModelObject ch, String name, ThreadContext tc) {
obj.get_attribute_native(tc, ch, name, STable.NO_HINT);
if (tc.native_type == ThreadContext.NATIVE_INT)
return tc.native_i;
else
throw ExceptionHandling.dieInternal(tc, "Attribute '" + name + "' is not a native int");
}
public static double getattr_n(SixModelObject obj, SixModelObject ch, String name, ThreadContext tc) {
obj.get_attribute_native(tc, ch, name, STable.NO_HINT);
if (tc.native_type == ThreadContext.NATIVE_NUM)
return tc.native_n;
else
throw ExceptionHandling.dieInternal(tc, "Attribute '" + name + "' is not a native num");
}
public static String getattr_s(SixModelObject obj, SixModelObject ch, String name, ThreadContext tc) {
obj.get_attribute_native(tc, ch, name, STable.NO_HINT);
if (tc.native_type == ThreadContext.NATIVE_STR)
return tc.native_s;
else
throw ExceptionHandling.dieInternal(tc, "Attribute '" + name + "' is not a native str");
}
public static SixModelObject bindattr(SixModelObject obj, SixModelObject ch, String name, SixModelObject value, ThreadContext tc) {
obj.bind_attribute_boxed(tc, ch, name, STable.NO_HINT, value);
return value;
}
public static long bindattr_i(SixModelObject obj, SixModelObject ch, String name, long value, ThreadContext tc) {
tc.native_i = value;
obj.bind_attribute_native(tc, ch, name, STable.NO_HINT);
if (tc.native_type != ThreadContext.NATIVE_INT)
throw ExceptionHandling.dieInternal(tc, "Attribute '" + name + "' is not a native int");
return value;
}
public static double bindattr_n(SixModelObject obj, SixModelObject ch, String name, double value, ThreadContext tc) {
tc.native_n = value;
obj.bind_attribute_native(tc, ch, name, STable.NO_HINT);
if (tc.native_type != ThreadContext.NATIVE_NUM)
throw ExceptionHandling.dieInternal(tc, "Attribute '" + name + "' is not a native num");
return value;
}
public static String bindattr_s(SixModelObject obj, SixModelObject ch, String name, String value, ThreadContext tc) {
tc.native_s = value;
obj.bind_attribute_native(tc, ch, name, STable.NO_HINT);
if (tc.native_type != ThreadContext.NATIVE_STR)
throw ExceptionHandling.dieInternal(tc, "Attribute '" + name + "' is not a native str");
return value;
}
public static long attrinited(SixModelObject obj, SixModelObject ch, String name, ThreadContext tc) {
return obj.is_attribute_initialized(tc, ch, name, STable.NO_HINT);
}
/* Positional operations. */
public static SixModelObject atpos(SixModelObject arr, long idx, ThreadContext tc) {
return arr.at_pos_boxed(tc, idx);
}
public static long atpos_i(SixModelObject arr, long idx, ThreadContext tc) {
arr.at_pos_native(tc, idx);
if (tc.native_type != ThreadContext.NATIVE_INT)
throw ExceptionHandling.dieInternal(tc, "This is not a native int array");
return tc.native_i;
}
public static double atpos_n(SixModelObject arr, long idx, ThreadContext tc) {
arr.at_pos_native(tc, idx);
if (tc.native_type != ThreadContext.NATIVE_NUM)
throw ExceptionHandling.dieInternal(tc, "This is not a native num array");
return tc.native_n;
}
public static String atpos_s(SixModelObject arr, long idx, ThreadContext tc) {
arr.at_pos_native(tc, idx);
if (tc.native_type != ThreadContext.NATIVE_STR)
throw ExceptionHandling.dieInternal(tc, "This is not a native str array");
return tc.native_s;
}
public static SixModelObject bindpos(SixModelObject arr, long idx, SixModelObject value, ThreadContext tc) {
arr.bind_pos_boxed(tc, idx, value);
return value;
}
public static long bindpos_i(SixModelObject arr, long idx, long value, ThreadContext tc) {
tc.native_i = value;
arr.bind_pos_native(tc, idx);
if (tc.native_type != ThreadContext.NATIVE_INT)
throw ExceptionHandling.dieInternal(tc, "This is not a native int array");
return value;
}
public static double bindpos_n(SixModelObject arr, long idx, double value, ThreadContext tc) {
tc.native_n = value;
arr.bind_pos_native(tc, idx);
if (tc.native_type != ThreadContext.NATIVE_NUM)
throw ExceptionHandling.dieInternal(tc, "This is not a native num array");
return value;
}
public static String bindpos_s(SixModelObject arr, long idx, String value, ThreadContext tc) {
tc.native_s = value;
arr.bind_pos_native(tc, idx);
if (tc.native_type != ThreadContext.NATIVE_STR)
throw ExceptionHandling.dieInternal(tc, "This is not a native str array");
return value;
}
public static SixModelObject push(SixModelObject arr, SixModelObject value, ThreadContext tc) {
arr.push_boxed(tc, value);
return value;
}
public static long push_i(SixModelObject arr, long value, ThreadContext tc) {
tc.native_i = value;
arr.push_native(tc);
if (tc.native_type != ThreadContext.NATIVE_INT)
throw ExceptionHandling.dieInternal(tc, "This is not a native int array");
return value;
}
public static double push_n(SixModelObject arr, double value, ThreadContext tc) {
tc.native_n = value;
arr.push_native(tc);
if (tc.native_type != ThreadContext.NATIVE_NUM)
throw ExceptionHandling.dieInternal(tc, "This is not a native num array");
return value;
}
public static String push_s(SixModelObject arr, String value, ThreadContext tc) {
tc.native_s = value;
arr.push_native(tc);
if (tc.native_type != ThreadContext.NATIVE_STR)
throw ExceptionHandling.dieInternal(tc, "This is not a native str array");
return value;
}
public static SixModelObject pop(SixModelObject arr, ThreadContext tc) {
return arr.pop_boxed(tc);
}
public static long pop_i(SixModelObject arr, ThreadContext tc) {
arr.pop_native(tc);
if (tc.native_type != ThreadContext.NATIVE_INT)
throw ExceptionHandling.dieInternal(tc, "This is not a native int array");
return tc.native_i;
}
public static double pop_n(SixModelObject arr, ThreadContext tc) {
arr.pop_native(tc);
if (tc.native_type != ThreadContext.NATIVE_NUM)
throw ExceptionHandling.dieInternal(tc, "This is not a native num array");
return tc.native_n;
}
public static String pop_s(SixModelObject arr, ThreadContext tc) {
arr.pop_native(tc);
if (tc.native_type != ThreadContext.NATIVE_STR)
throw ExceptionHandling.dieInternal(tc, "This is not a native str array");
return tc.native_s;
}
public static SixModelObject unshift(SixModelObject arr, SixModelObject value, ThreadContext tc) {
arr.unshift_boxed(tc, value);
return value;
}
public static long unshift_i(SixModelObject arr, long value, ThreadContext tc) {
tc.native_i = value;
arr.unshift_native(tc);
if (tc.native_type != ThreadContext.NATIVE_INT)
throw ExceptionHandling.dieInternal(tc, "This is not a native int array");
return value;
}
public static double unshift_n(SixModelObject arr, double value, ThreadContext tc) {
tc.native_n = value;
arr.unshift_native(tc);
if (tc.native_type != ThreadContext.NATIVE_NUM)
throw ExceptionHandling.dieInternal(tc, "This is not a native num array");
return value;
}
public static String unshift_s(SixModelObject arr, String value, ThreadContext tc) {
tc.native_s = value;
arr.unshift_native(tc);
if (tc.native_type != ThreadContext.NATIVE_STR)
throw ExceptionHandling.dieInternal(tc, "This is not a native str array");
return value;
}
public static SixModelObject shift(SixModelObject arr, ThreadContext tc) {
return arr.shift_boxed(tc);
}
public static long shift_i(SixModelObject arr, ThreadContext tc) {
arr.shift_native(tc);
if (tc.native_type != ThreadContext.NATIVE_INT)
throw ExceptionHandling.dieInternal(tc, "This is not a native int array");
return tc.native_i;
}
public static double shift_n(SixModelObject arr, ThreadContext tc) {
arr.shift_native(tc);
if (tc.native_type != ThreadContext.NATIVE_NUM)
throw ExceptionHandling.dieInternal(tc, "This is not a native num array");
return tc.native_n;
}
public static String shift_s(SixModelObject arr, ThreadContext tc) {
arr.shift_native(tc);
if (tc.native_type != ThreadContext.NATIVE_STR)
throw ExceptionHandling.dieInternal(tc, "This is not a native str array");
return tc.native_s;
}
public static SixModelObject splice(SixModelObject arr, SixModelObject from, long offset, long count, ThreadContext tc) {
arr.splice(tc, from, offset, count);
return arr;
}
/* Associative operations. */
public static SixModelObject atkey(SixModelObject hash, String key, ThreadContext tc) {
return hash.at_key_boxed(tc, key);
}
public static long atkey_i(SixModelObject hash, String key, ThreadContext tc) {
hash.at_key_native(tc, key);
if (tc.native_type != ThreadContext.NATIVE_INT)
throw ExceptionHandling.dieInternal(tc, "This is not a native int hash");
return tc.native_i;
}
public static double atkey_n(SixModelObject hash, String key, ThreadContext tc) {
hash.at_key_native(tc, key);
if (tc.native_type != ThreadContext.NATIVE_NUM)
throw ExceptionHandling.dieInternal(tc, "This is not a native num hash");
return tc.native_n;
}
public static String atkey_s(SixModelObject hash, String key, ThreadContext tc) {
hash.at_key_native(tc, key);
if (tc.native_type != ThreadContext.NATIVE_STR)
throw ExceptionHandling.dieInternal(tc, "This is not a native str hash");
return tc.native_s;
}
public static SixModelObject bindkey(SixModelObject hash, String key, SixModelObject value, ThreadContext tc) {
hash.bind_key_boxed(tc, key, value);
return value;
}
public static long bindkey_i(SixModelObject hash, String key, long value, ThreadContext tc) {
tc.native_i = value;
hash.bind_key_native(tc, key);
if (tc.native_type != ThreadContext.NATIVE_INT)
throw ExceptionHandling.dieInternal(tc, "This is not a native int hash");
return value;
}
public static double bindkey_n(SixModelObject hash, String key, double value, ThreadContext tc) {
tc.native_n = value;
hash.bind_key_native(tc, key);
if (tc.native_type != ThreadContext.NATIVE_NUM)
throw ExceptionHandling.dieInternal(tc, "This is not a native num hash");
return value;
}
public static String bindkey_s(SixModelObject hash, String key, String value, ThreadContext tc) {
tc.native_s = value;
hash.bind_key_native(tc, key);
if (tc.native_type != ThreadContext.NATIVE_STR)
throw ExceptionHandling.dieInternal(tc, "This is not a native str hash");
return value;
}
public static long existskey(SixModelObject hash, String key, ThreadContext tc) {
return hash.exists_key(tc, key);
}
public static SixModelObject deletekey(SixModelObject hash, String key, ThreadContext tc) {
hash.delete_key(tc, key);
return hash;
}
/* Terms */
public static long time_i() {
return (long) (System.currentTimeMillis() / 1000);
}
public static double time_n() {
return System.currentTimeMillis() / 1000.0;
}
/* Aggregate operations. */
public static long elems(SixModelObject agg, ThreadContext tc) {
return agg.elems(tc);
}
public static SixModelObject setelems(SixModelObject agg, long elems, ThreadContext tc) {
agg.set_elems(tc, elems);
return agg;
}
public static long existspos(SixModelObject agg, long key, ThreadContext tc) {
return agg.exists_pos(tc, key);
}
public static long islist(SixModelObject obj, ThreadContext tc) {
return obj != null && obj.st.REPR instanceof VMArray ? 1 : 0;
}
public static long ishash(SixModelObject obj, ThreadContext tc) {
return obj != null && obj.st.REPR instanceof VMHash ? 1 : 0;
}
/* Container operations. */
public static SixModelObject setcontspec(SixModelObject obj, String confname, SixModelObject confarg, ThreadContext tc) {
if (obj.st.ContainerSpec != null)
ExceptionHandling.dieInternal(tc, "Cannot change a type's container specification");
ContainerConfigurer cc = tc.gc.contConfigs.get(confname);
if (cc == null)
ExceptionHandling.dieInternal(tc, "No such container spec " + confname);
cc.setContainerSpec(tc, obj.st);
cc.configureContainerSpec(tc, obj.st, confarg);
return obj;
}
public static long iscont(SixModelObject obj) {
return obj.st.ContainerSpec == null ? 0 : 1;
}
public static SixModelObject decont(SixModelObject obj, ThreadContext tc) {
ContainerSpec cs = obj.st.ContainerSpec;
return cs == null ? obj : cs.fetch(tc, obj);
}
public static SixModelObject assign(SixModelObject cont, SixModelObject value, ThreadContext tc) {
ContainerSpec cs = cont.st.ContainerSpec;
if (cs != null)
cs.store(tc, cont, value);
else
ExceptionHandling.dieInternal(tc, "Cannot assign to an immutable value");
return cont;
}
public static SixModelObject assignunchecked(SixModelObject cont, SixModelObject value, ThreadContext tc) {
ContainerSpec cs = cont.st.ContainerSpec;
if (cs != null)
cs.storeUnchecked(tc, cont, value);
else
ExceptionHandling.dieInternal(tc, "Cannot assign to an immutable value");
return cont;
}
/* Iteration. */
public static SixModelObject iter(SixModelObject agg, ThreadContext tc) {
if (agg.st.REPR instanceof VMArray) {
SixModelObject iterType = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.arrayIteratorType;
VMIterInstance iter = (VMIterInstance)iterType.st.REPR.allocate(tc, iterType.st);
iter.target = agg;
iter.idx = -1;
iter.limit = agg.elems(tc);
iter.iterMode = VMIterInstance.MODE_ARRAY;
return iter;
}
else if (agg.st.REPR instanceof VMHash) {
SixModelObject iterType = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.hashIteratorType;
VMIterInstance iter = (VMIterInstance)iterType.st.REPR.allocate(tc, iterType.st);
iter.target = agg;
iter.hashKeyIter = ((VMHashInstance)agg).storage.keySet().iterator();
iter.iterMode = VMIterInstance.MODE_HASH;
return iter;
}
else if (agg.st.REPR instanceof ContextRef) {
/* Fake up a VMHash and then get its iterator. */
SixModelObject BOOTHash = tc.gc.BOOTHash;
SixModelObject hash = BOOTHash.st.REPR.allocate(tc, BOOTHash.st);
hash.initialize(tc);
/* TODO: don't cheat and just shove the nulls in. It's enough for
* the initial use case of this, though.
*/
StaticCodeInfo sci = ((ContextRefInstance)agg).context.codeRef.staticInfo;
if (sci.oLexicalNames != null) {
for (int i = 0; i < sci.oLexicalNames.length; i++)
hash.bind_key_boxed(tc, sci.oLexicalNames[i], null);
}
if (sci.iLexicalNames != null) {
for (int i = 0; i < sci.iLexicalNames.length; i++)
hash.bind_key_boxed(tc, sci.iLexicalNames[i], null);
}
if (sci.nLexicalNames != null) {
for (int i = 0; i < sci.nLexicalNames.length; i++)
hash.bind_key_boxed(tc, sci.nLexicalNames[i], null);
}
if (sci.sLexicalNames != null) {
for (int i = 0; i < sci.sLexicalNames.length; i++)
hash.bind_key_boxed(tc, sci.sLexicalNames[i], null);
}
return iter(hash, tc);
}
else {
throw ExceptionHandling.dieInternal(tc, "Can only use iter with representation VMArray and VMHash");
}
}
public static String iterkey_s(SixModelObject obj, ThreadContext tc) {
return ((VMIterInstance)obj).key_s(tc);
}
public static SixModelObject iterval(SixModelObject obj, ThreadContext tc) {
return ((VMIterInstance)obj).val(tc);
}
/* Boolification operations. */
public static SixModelObject setboolspec(SixModelObject obj, long mode, SixModelObject method, ThreadContext tc) {
BoolificationSpec bs = new BoolificationSpec();
bs.Mode = (int)mode;
bs.Method = method;
obj.st.BoolificationSpec = bs;
return obj;
}
public static long istrue(SixModelObject obj, ThreadContext tc) {
BoolificationSpec bs = obj.st.BoolificationSpec;
switch (bs == null ? BoolificationSpec.MODE_NOT_TYPE_OBJECT : bs.Mode) {
case BoolificationSpec.MODE_CALL_METHOD:
invokeDirect(tc, bs.Method, invocantCallSite, new Object[] { obj });
return istrue(result_o(tc.curFrame), tc);
case BoolificationSpec.MODE_UNBOX_INT:
return obj instanceof TypeObject || obj.get_int(tc) == 0 ? 0 : 1;
case BoolificationSpec.MODE_UNBOX_NUM:
return obj instanceof TypeObject || obj.get_num(tc) == 0.0 ? 0 : 1;
case BoolificationSpec.MODE_UNBOX_STR_NOT_EMPTY:
return obj instanceof TypeObject || obj.get_str(tc).equals("") ? 0 : 1;
case BoolificationSpec.MODE_UNBOX_STR_NOT_EMPTY_OR_ZERO:
if (obj instanceof TypeObject)
return 0;
String str = obj.get_str(tc);
return str == null || str.equals("") || str.equals("0") ? 0 : 1;
case BoolificationSpec.MODE_NOT_TYPE_OBJECT:
return obj instanceof TypeObject ? 0 : 1;
case BoolificationSpec.MODE_BIGINT:
return obj instanceof TypeObject || getBI(tc, obj).compareTo(BigInteger.ZERO) == 0 ? 0 : 1;
case BoolificationSpec.MODE_ITER:
return ((VMIterInstance)obj).boolify() ? 1 : 0;
case BoolificationSpec.MODE_HAS_ELEMS:
return obj.elems(tc) == 0 ? 0 : 1;
default:
throw ExceptionHandling.dieInternal(tc, "Invalid boolification spec mode used");
}
}
public static long isfalse(SixModelObject obj, ThreadContext tc) {
return istrue(obj, tc) == 0 ? 1 : 0;
}
public static long istrue_s(String str) {
return str == null || str.equals("") || str.equals("0") ? 0 : 1;
}
public static long isfalse_s(String str) {
return str == null || str.equals("") || str.equals("0") ? 1 : 0;
}
public static long not_i(long v) {
return v == 0 ? 1 : 0;
}
/* Smart coercions. */
public static String smart_stringify(SixModelObject obj, ThreadContext tc) {
// If it has a Str method, that wins.
// We could put this in the generated code, but it's here to avoid the
// bulk.
SixModelObject numMeth = obj.st.MethodCache.get("Str");
if (numMeth != null) {
invokeDirect(tc, numMeth, invocantCallSite, new Object[] { obj });
return result_s(tc.curFrame);
}
// If it's a type object, empty string.
if (obj instanceof TypeObject)
return "";
// See if it can unbox to a primitive we can stringify.
StorageSpec ss = obj.st.REPR.get_storage_spec(tc, obj.st);
if ((ss.can_box & StorageSpec.CAN_BOX_STR) != 0)
return obj.get_str(tc);
if ((ss.can_box & StorageSpec.CAN_BOX_INT) != 0)
return coerce_i2s(obj.get_int(tc));
if ((ss.can_box & StorageSpec.CAN_BOX_NUM) != 0)
return coerce_n2s(obj.get_num(tc));
// If it's an exception, take the message.
if (obj instanceof VMExceptionInstance) {
String msg = ((VMExceptionInstance)obj).message;
return msg == null ? "Died" : msg;
}
// If anything else, we can't do it.
throw ExceptionHandling.dieInternal(tc, "Cannot stringify this");
}
public static double smart_numify(SixModelObject obj, ThreadContext tc) {
// If it has a Num method, that wins.
// We could put this in the generated code, but it's here to avoid the
// bulk.
SixModelObject numMeth = obj.st.MethodCache.get("Num");
if (numMeth != null) {
invokeDirect(tc, numMeth, invocantCallSite, new Object[] { obj });
return result_n(tc.curFrame);
}
// If it's a type object, empty string.
if (obj instanceof TypeObject)
return 0.0;
// See if it can unbox to a primitive we can numify.
StorageSpec ss = obj.st.REPR.get_storage_spec(tc, obj.st);
if ((ss.can_box & StorageSpec.CAN_BOX_INT) != 0)
return (double)obj.get_int(tc);
if ((ss.can_box & StorageSpec.CAN_BOX_NUM) != 0)
return obj.get_num(tc);
if ((ss.can_box & StorageSpec.CAN_BOX_STR) != 0)
return coerce_s2n(obj.get_str(tc));
if (obj instanceof VMArrayInstance || obj instanceof VMHashInstance)
return obj.elems(tc);
// If anything else, we can't do it.
throw ExceptionHandling.dieInternal(tc, "Cannot numify this");
}
/* Math operations. */
public static double sec_n(double val) {
return 1 / Math.cos(val);
}
public static double asec_n(double val) {
return Math.acos(1 / val);
}
public static double sech_n(double val) {
return 1 / Math.cosh(val);
}
public static long gcd_i(long valA, long valB) {
return BigInteger.valueOf(valA).gcd(BigInteger.valueOf(valB))
.longValue();
}
public static SixModelObject gcd_I(SixModelObject a, SixModelObject b, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, getBI(tc, a).gcd(getBI(tc, b)));
}
public static long lcm_i(long valA, long valB) {
return valA * (valB / gcd_i(valA, valB));
}
public static SixModelObject lcm_I(SixModelObject a, SixModelObject b, SixModelObject type, ThreadContext tc) {
BigInteger valA = getBI(tc, a);
BigInteger valB = getBI(tc, b);
BigInteger gcd = valA.gcd(valB);
return makeBI(tc, type, valA.multiply(valB).divide(gcd));
}
public static SixModelObject radix(long radix, String str, long zpos, long flags, ThreadContext tc) {
double zvalue = 0.0;
double zbase = 1.0;
int chars = str.length();
double value = zvalue;
double base = zbase;
long pos = -1;
char ch;
boolean neg = false;
if (radix > 36) {
throw ExceptionHandling.dieInternal(tc, "Cannot convert radix of " + radix + " (max 36)");
}
ch = (zpos < chars) ? str.charAt((int)zpos) : 0;
if ((flags & 0x02) != 0 && (ch == '+' || ch == '-')) {
neg = (ch == '-');
zpos++;
ch = (zpos < chars) ? str.charAt((int)zpos) : 0;
}
while (zpos < chars) {
if (ch >= '0' && ch <= '9') ch = (char)(ch - '0');
else if (ch >= 'a' && ch <= 'z') ch = (char)(ch - 'a' + 10);
else if (ch >= 'A' && ch <= 'Z') ch = (char)(ch - 'A' + 10);
else break;
if (ch >= radix) break;
zvalue = zvalue * radix + ch;
zbase = zbase * radix;
zpos++; pos = zpos;
if (ch != 0 || (flags & 0x04) == 0) { value=zvalue; base=zbase; }
if (zpos >= chars) break;
ch = str.charAt((int)zpos);
if (ch != '_') continue;
zpos++;
if (zpos >= chars) break;
ch = str.charAt((int)zpos);
}
if (neg || (flags & 0x01) != 0) { value = -value; }
HLLConfig hllConfig = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig;
SixModelObject result = hllConfig.slurpyArrayType.st.REPR.allocate(tc,
hllConfig.slurpyArrayType.st);
result.initialize(tc);
result.push_boxed(tc, box_n(value, hllConfig.numBoxType, tc));
result.push_boxed(tc, box_n(base, hllConfig.numBoxType, tc));
result.push_boxed(tc, box_n(pos, hllConfig.numBoxType, tc));
return result;
}
/* String operations. */
public static long chars(String val) {
return val.length();
}
public static String lc(String val) {
return val.toLowerCase();
}
public static String uc(String val) {
return val.toUpperCase();
}
public static String x(String val, long count) {
StringBuilder retval = new StringBuilder();
for (long ii = 1; ii <= count; ii++) {
retval.append(val);
}
return retval.toString();
}
public static String concat(String valA, String valB) {
return valA + valB;
}
public static String chr(long val) {
return (new StringBuffer()).append((char) val).toString();
}
public static String join(String delimiter, SixModelObject arr, ThreadContext tc) {
final StringBuilder sb = new StringBuilder();
final int numElems = (int) arr.elems(tc);
for (int i = 0; i < numElems; i++) {
if (sb.length() > 0) {
sb.append(delimiter);
}
sb.append(arr.at_pos_boxed(tc, i).get_str(tc));
}
return sb.toString();
}
public static SixModelObject split(String delimiter, String string, ThreadContext tc) {
if (string == null || delimiter == null) {
return null;
}
HLLConfig hllConfig = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig;
SixModelObject arrayType = hllConfig.slurpyArrayType;
SixModelObject array = arrayType.st.REPR.allocate(tc, arrayType.st);
array.initialize(tc);
int slen = string.length();
if (slen == 0) {
return array;
}
int dlen = delimiter.length();
if (dlen == 0) {
for (int i = 0; i < slen; i++) {
String item = string.substring(i, i+1);
SixModelObject value = box_s(item, hllConfig.strBoxType, tc);
array.push_boxed(tc, value);
}
} else {
int curpos = 0;
int matchpos = string.indexOf(delimiter);
while (matchpos > -1) {
String item = string.substring(curpos, matchpos);
SixModelObject value = box_s(item, hllConfig.strBoxType, tc);
array.push_boxed(tc, value);
curpos = matchpos + dlen;
matchpos = string.indexOf(delimiter, curpos);
}
String tail = string.substring(curpos);
SixModelObject value = box_s(tail, hllConfig.strBoxType, tc);
array.push_boxed(tc, value);
}
return array;
}
public static long indexfrom(String string, String pattern, long fromIndex) {
return string.indexOf(pattern, (int)fromIndex);
}
public static long rindexfromend(String string, String pattern) {
return string.lastIndexOf(pattern);
}
public static long rindexfrom(String string, String pattern, long fromIndex) {
return string.lastIndexOf(pattern, (int)fromIndex);
}
public static String substr2(String val, long offset) {
if (offset >= val.length())
return "";
return val.substring((int)offset);
}
public static String substr3(String val, long offset, long length) {
if (offset >= val.length())
return "";
int end = (int)(offset + length);
if (end > val.length())
end = val.length();
return val.substring((int)offset, end);
}
public static long ordfirst(String str) {
return str.codePointAt(0);
}
public static long ordat(String str, long offset) {
return str.codePointAt((int)offset);
}
public static String sprintf(String format, SixModelObject arr, ThreadContext tc) {
// This function just assumes that Java's printf format is compatible
// with NQP's printf format...
final int numElems = (int) arr.elems(tc);
Object[] args = new Object[numElems];
for (int i = 0; i < numElems; i++) {
SixModelObject obj = arr.at_pos_boxed(tc, i);
StorageSpec ss = obj.st.REPR.get_storage_spec(tc, obj.st);
if ((ss.can_box & StorageSpec.CAN_BOX_INT) != 0) {
args[i] = Long.valueOf(obj.get_int(tc));
} else if ((ss.can_box & StorageSpec.CAN_BOX_NUM) != 0) {
args[i] = Double.valueOf(obj.get_num(tc));
} else if ((ss.can_box & StorageSpec.CAN_BOX_STR) != 0) {
args[i] = obj.get_str(tc);
} else {
throw new IllegalArgumentException("sprintf only accepts ints, nums, and strs, not " + obj.getClass());
}
}
return String.format(format, args);
}
public static String escape(String str) {
int len = str.length();
StringBuilder sb = new StringBuilder(2 * len);
for (int i = 0; i < len; i++) {
char c = str.charAt(i);
switch (c) {
case '\\': sb.append("\\\\"); break;
case 7: sb.append("\\a"); break;
case '\b': sb.append("\\b"); break;
case '\n': sb.append("\\n"); break;
case '\r': sb.append("\\r"); break;
case '\t': sb.append("\\t"); break;
case '\f': sb.append("\\f"); break;
case '"': sb.append("\\\""); break;
case 27: sb.append("\\e"); break;
default:
sb.append(c);
}
}
return sb.toString();
}
public static String flip(String str) {
return new StringBuffer(str).reverse().toString();
}
/* Brute force, but not normally needed for most programs. */
private static HashMap<String, Character> cpNameMap;
public static long codepointfromname(String name) {
if (cpNameMap == null) {
cpNameMap = new HashMap<String, Character>();
for (char i = 0; i < Character.MAX_VALUE; i++)
if (Character.isValidCodePoint(i))
cpNameMap.put(Character.getName(i), i);
}
Character found = cpNameMap.get(name);
return found == null ? -1 : found;
}
private static final int CCLASS_ANY = 65535;
private static final int CCLASS_UPPERCASE = 1;
private static final int CCLASS_LOWERCASE = 2;
private static final int CCLASS_ALPHABETIC = 4;
private static final int CCLASS_NUMERIC = 8;
private static final int CCLASS_HEXADECIMAL = 16;
private static final int CCLASS_WHITESPACE = 32;
private static final int CCLASS_BLANK = 256;
private static final int CCLASS_CONTROL = 512;
private static final int CCLASS_PUNCTUATION = 1024;
private static final int CCLASS_ALPHANUMERIC = 2048;
private static final int CCLASS_NEWLINE = 4096;
private static final int CCLASS_WORD = 8192;
private static final int[] PUNCT_TYPES = {
Character.CONNECTOR_PUNCTUATION, Character.DASH_PUNCTUATION,
Character.END_PUNCTUATION, Character.FINAL_QUOTE_PUNCTUATION,
Character.INITIAL_QUOTE_PUNCTUATION, Character.OTHER_PUNCTUATION,
Character.START_PUNCTUATION
};
public static long iscclass(long cclass, String target, long offset) {
if (offset < 0 || offset >= target.length())
return 0;
char test = target.charAt((int)offset);
switch ((int)cclass) {
case CCLASS_ANY:
return 1;
case CCLASS_NUMERIC:
return Character.isDigit(test) ? 1 : 0;
case CCLASS_WHITESPACE:
return Character.isWhitespace(test) ? 1 : 0;
case CCLASS_WORD:
return test == '_' || Character.isLetterOrDigit(test) ? 1 : 0;
case CCLASS_NEWLINE:
return (Character.getType(test) == Character.LINE_SEPARATOR) ||
(test == '\n' || test == '\r')
? 1 : 0;
case CCLASS_ALPHABETIC:
return Character.isAlphabetic(test) ? 1 : 0;
case CCLASS_UPPERCASE:
return Character.isUpperCase(test) ? 1 : 0;
case CCLASS_LOWERCASE:
return Character.isLowerCase(test) ? 1 : 0;
case CCLASS_HEXADECIMAL:
return Character.isDigit(test) ||
(test >= 'A' && test <= 'F' || test >= 'a' && test <= 'f')
? 1 : 0;
case CCLASS_BLANK:
return (Character.getType(test) == Character.SPACE_SEPARATOR) ||
(test == '\t')
? 1 : 0;
case CCLASS_CONTROL:
return Character.isISOControl(test) ? 1 : 0;
case CCLASS_PUNCTUATION:
int type = Character.getType(test);
for (int punct : PUNCT_TYPES) {
if (type == punct) { return 1; }
}
return 0;
case CCLASS_ALPHANUMERIC:
return Character.isLetterOrDigit(test) ? 1 : 0;
default:
return 0;
}
}
public static long findcclass(long cclass, String target, long offset, long count) {
long length = target.length();
long end = offset + count;
end = length < end ? length : end;
for (long pos = offset; pos < end; pos++) {
if (iscclass(cclass, target, pos) > 0) {
return pos;
}
}
return end;
}
public static long findnotcclass(long cclass, String target, long offset, long count) {
long length = target.length();
long end = offset + count;
end = length < end ? length : end;
for (long pos = offset; pos < end; pos++) {
if (iscclass(cclass, target, pos) == 0) {
return pos;
}
}
return end;
}
public static long ischarprop(String propName, String target, long offset) {
String check = target.substring((int)offset, (int)offset + 1);
return check.matches("\\p{" + propName + "}") ? 1 : 0;
}
/* serialization context related opcodes */
public static String sha1(String str) throws NoSuchAlgorithmException, UnsupportedEncodingException {
MessageDigest md = MessageDigest.getInstance("SHA1");
byte[] inBytes = str.getBytes("UTF-8");
byte[] outBytes = md.digest(inBytes);
StringBuilder sb = new StringBuilder();
for (byte b : outBytes) {
sb.append(String.format("%02X", b));
}
return sb.toString();
}
public static SixModelObject createsc(String handle, ThreadContext tc) {
if (tc.gc.scs.containsKey(handle))
throw ExceptionHandling.dieInternal(tc, "SC with handle " + handle + " already exists");
SerializationContext sc = new SerializationContext(handle);
tc.gc.scs.put(handle, sc);
SixModelObject SCRef = tc.gc.SCRef;
SCRefInstance ref = (SCRefInstance)SCRef.st.REPR.allocate(tc, SCRef.st);
ref.referencedSC = sc;
tc.gc.scRefs.put(handle, ref);
return ref;
}
public static SixModelObject scsetobj(SixModelObject scRef, long idx, SixModelObject obj, ThreadContext tc) {
if (scRef instanceof SCRefInstance) {
SerializationContext sc = ((SCRefInstance)scRef).referencedSC;
ArrayList<SixModelObject> roots = sc.root_objects;
if (roots.size() == idx)
roots.add(obj);
else
roots.set((int)idx, obj);
if (obj.st.sc == null) {
sc.root_stables.add(obj.st);
obj.st.sc = sc;
}
return obj;
}
else {
throw ExceptionHandling.dieInternal(tc, "scsetobj can only operate on an SCRef");
}
}
public static SixModelObject scsetcode(SixModelObject scRef, long idx, SixModelObject obj, ThreadContext tc) {
if (scRef instanceof SCRefInstance) {
if (obj instanceof CodeRef) {
ArrayList<CodeRef> roots = ((SCRefInstance)scRef).referencedSC.root_codes;
if (roots.size() == idx)
roots.add((CodeRef)obj);
else
roots.set((int)idx, (CodeRef)obj);
obj.sc = ((SCRefInstance)scRef).referencedSC;
return obj;
}
else {
throw ExceptionHandling.dieInternal(tc, "scsetcode can only store a CodeRef");
}
}
else {
throw ExceptionHandling.dieInternal(tc, "scsetcode can only operate on an SCRef");
}
}
public static SixModelObject scgetobj(SixModelObject scRef, long idx, ThreadContext tc) {
if (scRef instanceof SCRefInstance) {
return ((SCRefInstance)scRef).referencedSC.root_objects.get((int)idx);
}
else {
throw ExceptionHandling.dieInternal(tc, "scgetobj can only operate on an SCRef");
}
}
public static String scgethandle(SixModelObject scRef, ThreadContext tc) {
if (scRef instanceof SCRefInstance) {
return ((SCRefInstance)scRef).referencedSC.handle;
}
else {
throw ExceptionHandling.dieInternal(tc, "scgethandle can only operate on an SCRef");
}
}
public static String scgetdesc(SixModelObject scRef, ThreadContext tc) {
if (scRef instanceof SCRefInstance) {
return ((SCRefInstance)scRef).referencedSC.description;
}
else {
throw ExceptionHandling.dieInternal(tc, "scgetdesc can only operate on an SCRef");
}
}
public static long scgetobjidx(SixModelObject scRef, SixModelObject find, ThreadContext tc) {
if (scRef instanceof SCRefInstance) {
int idx = ((SCRefInstance)scRef).referencedSC.root_objects.indexOf(find);
if (idx < 0)
throw ExceptionHandling.dieInternal(tc, "Object does not exist in this SC");
return idx;
}
else {
throw ExceptionHandling.dieInternal(tc, "scgetobjidx can only operate on an SCRef");
}
}
public static String scsetdesc(SixModelObject scRef, String desc, ThreadContext tc) {
if (scRef instanceof SCRefInstance) {
((SCRefInstance)scRef).referencedSC.description = desc;
return desc;
}
else {
throw ExceptionHandling.dieInternal(tc, "scsetdesc can only operate on an SCRef");
}
}
public static long scobjcount(SixModelObject scRef, ThreadContext tc) {
if (scRef instanceof SCRefInstance) {
return ((SCRefInstance)scRef).referencedSC.root_objects.size();
}
else {
throw ExceptionHandling.dieInternal(tc, "scobjcount can only operate on an SCRef");
}
}
public static SixModelObject setobjsc(SixModelObject obj, SixModelObject scRef, ThreadContext tc) {
if (scRef instanceof SCRefInstance) {
obj.sc = ((SCRefInstance)scRef).referencedSC;
return obj;
}
else {
throw ExceptionHandling.dieInternal(tc, "setobjsc requires an SCRef");
}
}
public static SixModelObject getobjsc(SixModelObject obj, ThreadContext tc) {
SerializationContext sc = obj.sc;
if (sc == null)
return null;
if (!tc.gc.scRefs.containsKey(sc.handle)) {
SixModelObject SCRef = tc.gc.SCRef;
SCRefInstance ref = (SCRefInstance)SCRef.st.REPR.allocate(tc, SCRef.st);
ref.referencedSC = sc;
tc.gc.scRefs.put(sc.handle, ref);
}
return tc.gc.scRefs.get(sc.handle);
}
public static String serialize(SixModelObject scRef, SixModelObject sh, ThreadContext tc) {
if (scRef instanceof SCRefInstance) {
ArrayList<String> stringHeap = new ArrayList<String>();
SerializationWriter sw = new SerializationWriter(tc,
((SCRefInstance)scRef).referencedSC,
stringHeap);
String serialized = sw.serialize();
int index = 0;
for (String s : stringHeap) {
tc.native_s = s;
sh.bind_pos_native(tc, index++);
}
return serialized;
}
else {
throw ExceptionHandling.dieInternal(tc, "serialize was not passed a valid SCRef");
}
}
public static String deserialize(String blob, SixModelObject scRef, SixModelObject sh, SixModelObject cr, SixModelObject conflict, ThreadContext tc) {
if (scRef instanceof SCRefInstance) {
SerializationContext sc = ((SCRefInstance)scRef).referencedSC;
String[] shArray = new String[(int)sh.elems(tc)];
for (int i = 0; i < shArray.length; i++) {
sh.at_pos_native(tc, i);
shArray[i] = tc.native_s;
}
CodeRef[] crArray = new CodeRef[(int)cr.elems(tc)];
for (int i = 0; i < crArray.length; i++)
crArray[i] = (CodeRef)cr.at_pos_boxed(tc, i);
SerializationReader sr = new SerializationReader(
tc, sc, shArray, crArray,
Base64.decode(blob));
sr.deserialize();
return blob;
}
else {
throw ExceptionHandling.dieInternal(tc, "deserialize was not passed a valid SCRef");
}
}
public static SixModelObject wval(String sc, long idx, ThreadContext tc) {
return tc.gc.scs.get(sc).root_objects.get((int)idx);
}
public static long scwbdisable(ThreadContext tc) {
return ++tc.scwbDisableDepth;
}
public static long scwbenable(ThreadContext tc) {
return --tc.scwbDisableDepth;
}
public static SixModelObject pushcompsc(SixModelObject sc, ThreadContext tc) {
if (sc instanceof SCRefInstance) {
if (tc.compilingSCs == null)
tc.compilingSCs = new ArrayList<SCRefInstance>();
tc.compilingSCs.add((SCRefInstance)sc);
return sc;
}
else {
throw ExceptionHandling.dieInternal(tc, "Can only push an SCRef with pushcompsc");
}
}
public static SixModelObject popcompsc(ThreadContext tc) {
if (tc.compilingSCs == null)
throw ExceptionHandling.dieInternal(tc, "No current compiling SC.");
int idx = tc.compilingSCs.size() - 1;
SixModelObject result = tc.compilingSCs.get(idx);
tc.compilingSCs.remove(idx);
if (idx == 0)
tc.compilingSCs = null;
return result;
}
/* bitwise operations. */
public static long bitor_i(long valA, long valB) {
return valA | valB;
}
public static long bitxor_i(long valA, long valB) {
return valA ^ valB;
}
public static long bitand_i(long valA, long valB) {
return valA & valB;
}
public static long bitshiftl_i(long valA, long valB) {
return valA << valB;
}
public static long bitshiftr_i(long valA, long valB) {
return valA >> valB;
}
public static long bitneg_i(long val) {
return ~val;
}
/* Relational. */
public static long cmp_i(long a, long b) {
if (a < b) {
return -1;
} else if (a > b) {
return 1;
} else {
return 0;
}
}
public static long iseq_i(long a, long b) {
return a == b ? 1 : 0;
}
public static long isne_i(long a, long b) {
return a != b ? 1 : 0;
}
public static long islt_i(long a, long b) {
return a < b ? 1 : 0;
}
public static long isle_i(long a, long b) {
return a <= b ? 1 : 0;
}
public static long isgt_i(long a, long b) {
return a > b ? 1 : 0;
}
public static long isge_i(long a, long b) {
return a >= b ? 1 : 0;
}
public static long cmp_n(double a, double b) {
if (a < b) {
return -1;
} else if (a > b) {
return 1;
} else {
return 0;
}
}
public static long iseq_n(double a, double b) {
return a == b ? 1 : 0;
}
public static long isne_n(double a, double b) {
return a != b ? 1 : 0;
}
public static long islt_n(double a, double b) {
return a < b ? 1 : 0;
}
public static long isle_n(double a, double b) {
return a <= b ? 1 : 0;
}
public static long isgt_n(double a, double b) {
return a > b ? 1 : 0;
}
public static long isge_n(double a, double b) {
return a >= b ? 1 : 0;
}
public static long cmp_s(String a, String b) {
int result = a.compareTo(b);
return result < 0 ? -1 : result > 0 ? 1 : 0;
}
public static long iseq_s(String a, String b) {
return a.equals(b) ? 1 : 0;
}
public static long isne_s(String a, String b) {
return a.equals(b) ? 0 : 1;
}
public static long islt_s(String a, String b) {
return a.compareTo(b) < 0 ? 1 : 0;
}
public static long isle_s(String a, String b) {
return a.compareTo(b) <= 0 ? 1 : 0;
}
public static long isgt_s(String a, String b) {
return a.compareTo(b) > 0 ? 1 : 0;
}
public static long isge_s(String a, String b) {
return a.compareTo(b) >= 0 ? 1 : 0;
}
/* Code object related. */
public static SixModelObject takeclosure(SixModelObject code, ThreadContext tc) {
if (code instanceof CodeRef) {
CodeRef clone = (CodeRef)code.clone(tc);
clone.outer = tc.curFrame;
return clone;
}
else {
throw ExceptionHandling.dieInternal(tc, "takeclosure can only be used with a CodeRef");
}
}
public static SixModelObject getcodeobj(SixModelObject code, ThreadContext tc) {
if (code instanceof CodeRef)
return ((CodeRef)code).codeObject;
else
throw ExceptionHandling.dieInternal(tc, "getcodeobj can only be used with a CodeRef");
}
public static SixModelObject setcodeobj(SixModelObject code, SixModelObject obj, ThreadContext tc) {
if (code instanceof CodeRef) {
((CodeRef)code).codeObject = obj;
return code;
}
else {
throw ExceptionHandling.dieInternal(tc, "setcodeobj can only be used with a CodeRef");
}
}
public static String getcodename(SixModelObject code, ThreadContext tc) {
if (code instanceof CodeRef)
return ((CodeRef)code).staticInfo.name;
else
throw ExceptionHandling.dieInternal(tc, "getcodename can only be used with a CodeRef");
}
public static SixModelObject setcodename(SixModelObject code, String name, ThreadContext tc) {
if (code instanceof CodeRef) {
((CodeRef)code).staticInfo.name = name;
return code;
}
else {
throw ExceptionHandling.dieInternal(tc, "setcodename can only be used with a CodeRef");
}
}
public static String getcodecuid(SixModelObject code, ThreadContext tc) {
if (code instanceof CodeRef)
return ((CodeRef)code).staticInfo.uniqueId;
else
throw ExceptionHandling.dieInternal(tc, "getcodename can only be used with a CodeRef");
}
public static SixModelObject forceouterctx(SixModelObject code, SixModelObject ctx, ThreadContext tc) {
if (!(code instanceof CodeRef))
throw ExceptionHandling.dieInternal(tc, "forceouterctx first operand must be a CodeRef");
if (!(ctx instanceof ContextRefInstance))
throw ExceptionHandling.dieInternal(tc, "forceouterctx second operand must be a ContextRef");
((CodeRef)code).outer = ((ContextRefInstance)ctx).context;
return code;
}
public static SixModelObject freshcoderef(SixModelObject code, ThreadContext tc) {
if (!(code instanceof CodeRef))
throw ExceptionHandling.dieInternal(tc, "freshcoderef must be used on a CodeRef");
CodeRef clone = (CodeRef)code.clone(tc);
clone.staticInfo = clone.staticInfo.clone();
clone.staticInfo.staticCode = clone;
return clone;
}
public static SixModelObject markcodestatic(SixModelObject code, ThreadContext tc) {
if (!(code instanceof CodeRef))
throw ExceptionHandling.dieInternal(tc, "markcodestatic must be used on a CodeRef");
((CodeRef)code).isStaticCodeRef = true;
return code;
}
public static SixModelObject markcodestub(SixModelObject code, ThreadContext tc) {
if (!(code instanceof CodeRef))
throw ExceptionHandling.dieInternal(tc, "markcodestub must be used on a CodeRef");
((CodeRef)code).isCompilerStub = true;
return code;
}
public static SixModelObject getstaticcode(SixModelObject code, ThreadContext tc) {
if (code instanceof CodeRef)
return ((CodeRef)code).staticInfo.staticCode;
else
throw ExceptionHandling.dieInternal(tc, "getstaticcode can only be used with a CodeRef");
}
public static void takedispatcher(int lexIdx, ThreadContext tc) {
tc.curFrame.oLex[lexIdx] = tc.currentDispatcher;
tc.currentDispatcher = null;
}
/* process related opcodes */
public static long exit(final long status) {
System.exit((int) status);
return status;
}
public static double sleep(final double seconds) {
// Is this really the right behavior, i.e., swallowing all
// InterruptedExceptions? As far as I can tell the original
// nqp::sleep could not be interrupted, so that behavior is
// duplicated here, but that doesn't mean it's the right thing
// to do on the JVM...
long now = System.currentTimeMillis();
final long awake = now + (long) (seconds * 1000);
while ((now = System.currentTimeMillis()) < awake) {
long millis = awake - now;
try {
Thread.sleep(millis);
} catch(InterruptedException e) {
// swallow
}
}
return seconds;
}
public static SixModelObject getenvhash(ThreadContext tc) {
SixModelObject hashType = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.hashType;
SixModelObject strType = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.strBoxType;
SixModelObject res = hashType.st.REPR.allocate(tc, hashType.st);
res.initialize(tc);
Map<String, String> env = System.getenv();
for (String envName : env.keySet())
res.bind_key_boxed(tc, envName, box_s(env.get(envName), strType, tc));
return res;
}
/* Exception related. */
public static String die_s(String msg, ThreadContext tc) {
// Construct exception object.
SixModelObject exType = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.exceptionType;
VMExceptionInstance exObj = (VMExceptionInstance)exType.st.REPR.allocate(tc, exType.st);
exObj.initialize(tc);
exObj.message = msg;
exObj.category = ExceptionHandling.EX_CAT_CATCH;
exObj.origin = tc.curFrame;
ExceptionHandling.handlerDynamic(tc, ExceptionHandling.EX_CAT_CATCH, exObj);
return msg;
}
public static SixModelObject throwcatdyn(long category, ThreadContext tc) {
return ExceptionHandling.handlerDynamic(tc, category, null);
}
public static SixModelObject exception(ThreadContext tc) {
int numHandlers = tc.handlers.size();
if (numHandlers > 0)
return tc.handlers.get(numHandlers - 1).exObj;
else
throw ExceptionHandling.dieInternal(tc, "Cannot get exception object ouside of exception handler");
}
public static long getextype(SixModelObject obj, ThreadContext tc) {
if (obj instanceof VMExceptionInstance)
return ((VMExceptionInstance)obj).category;
else
throw ExceptionHandling.dieInternal(tc, "getextype needs an object with VMException representation");
}
public static String getmessage(SixModelObject obj, ThreadContext tc) {
if (obj instanceof VMExceptionInstance) {
String msg = ((VMExceptionInstance)obj).message;
return msg == null ? "Died" : msg;
}
else {
throw ExceptionHandling.dieInternal(tc, "getmessage needs an object with VMException representation");
}
}
public static SixModelObject getpayload(SixModelObject obj, ThreadContext tc) {
if (obj instanceof VMExceptionInstance)
return ((VMExceptionInstance)obj).payload;
else
throw ExceptionHandling.dieInternal(tc, "getpayload needs an object with VMException representation");
}
public static SixModelObject backtracestrings(SixModelObject obj, ThreadContext tc) {
if (obj instanceof VMExceptionInstance) {
SixModelObject Array = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.listType;
SixModelObject Str = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.strBoxType;
SixModelObject result = Array.st.REPR.allocate(tc, Array.st);
result.initialize(tc);
List<String> lines = ExceptionHandling.backtraceStrings(((VMExceptionInstance)obj).origin);
for (int i = 0; i < lines.size(); i++)
result.bind_pos_boxed(tc, i, box_s(lines.get(i), Str, tc));
return result;
}
else {
throw ExceptionHandling.dieInternal(tc, "backtracestring needs an object with VMException representation");
}
}
public static SixModelObject rethrow(SixModelObject obj, ThreadContext tc) {
if (obj instanceof VMExceptionInstance) {
VMExceptionInstance ex = (VMExceptionInstance)obj;
return ExceptionHandling.handlerDynamic(tc, ex.category, ex);
}
else {
throw ExceptionHandling.dieInternal(tc, "rethrow needs an object with VMException representation");
}
}
private static ResumeException theResumer = new ResumeException();
public static SixModelObject resume(SixModelObject obj, ThreadContext tc) {
throw theResumer;
}
/* HLL configuration and compiler related options. */
public static SixModelObject sethllconfig(String language, SixModelObject configHash, ThreadContext tc) {
HLLConfig config = tc.gc.getHLLConfigFor(language);
if (configHash.exists_key(tc, "int_box") != 0)
config.intBoxType = configHash.at_key_boxed(tc, "int_box");
if (configHash.exists_key(tc, "num_box") != 0)
config.numBoxType = configHash.at_key_boxed(tc, "num_box");
if (configHash.exists_key(tc, "str_box") != 0)
config.strBoxType = configHash.at_key_boxed(tc, "str_box");
if (configHash.exists_key(tc, "list") != 0)
config.listType = configHash.at_key_boxed(tc, "list");
if (configHash.exists_key(tc, "hash") != 0)
config.hashType = configHash.at_key_boxed(tc, "hash");
if (configHash.exists_key(tc, "slurpy_array") != 0)
config.slurpyArrayType = configHash.at_key_boxed(tc, "slurpy_array");
if (configHash.exists_key(tc, "slurpy_hash") != 0)
config.slurpyHashType = configHash.at_key_boxed(tc, "slurpy_hash");
if (configHash.exists_key(tc, "array_iter") != 0)
config.arrayIteratorType = configHash.at_key_boxed(tc, "array_iter");
if (configHash.exists_key(tc, "hash_iter") != 0)
config.hashIteratorType = configHash.at_key_boxed(tc, "hash_iter");
return configHash;
}
public static SixModelObject getcomp(String name, ThreadContext tc) {
return tc.gc.compilerRegistry.get(name);
}
public static SixModelObject bindcomp(String name, SixModelObject comp, ThreadContext tc) {
tc.gc.compilerRegistry.put(name, comp);
return comp;
}
public static SixModelObject getcurhllsym(String name, ThreadContext tc) {
String hllName = tc.curFrame.codeRef.staticInfo.compUnit.hllName();
HashMap<String, SixModelObject> hllSyms = tc.gc.hllSyms.get(hllName);
return hllSyms == null ? null : hllSyms.get(name);
}
public static SixModelObject bindcurhllsym(String name, SixModelObject value, ThreadContext tc) {
String hllName = tc.curFrame.codeRef.staticInfo.compUnit.hllName();
HashMap<String, SixModelObject> hllSyms = tc.gc.hllSyms.get(hllName);
if (hllSyms == null) {
hllSyms = new HashMap<String, SixModelObject>();
tc.gc.hllSyms.put(hllName, hllSyms);
}
hllSyms.put(name, value);
return value;
}
public static SixModelObject gethllsym(String hllName, String name, ThreadContext tc) {
HashMap<String, SixModelObject> hllSyms = tc.gc.hllSyms.get(hllName);
return hllSyms == null ? null : hllSyms.get(name);
}
public static SixModelObject bindhllsym(String hllName, String name, SixModelObject value, ThreadContext tc) {
HashMap<String, SixModelObject> hllSyms = tc.gc.hllSyms.get(hllName);
if (hllSyms == null) {
hllSyms = new HashMap<String, SixModelObject>();
tc.gc.hllSyms.put(hllName, hllSyms);
}
hllSyms.put(name, value);
return value;
}
public static String loadbytecode(String filename, ThreadContext tc) {
new LibraryLoader().load(tc, filename);
return filename;
}
public static SixModelObject settypehll(SixModelObject type, String language, ThreadContext tc) {
type.st.hllOwner = tc.gc.getHLLConfigFor(language);
return type;
}
public static SixModelObject settypehllrole(SixModelObject type, long role, ThreadContext tc) {
type.st.hllRole = role;
return type;
}
public static SixModelObject hllize(SixModelObject obj, ThreadContext tc) {
HLLConfig wanted = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig;
if (obj.st.hllOwner == wanted)
return obj;
System.err.println("Warning: HLL mapping NYI");
return obj;
}
public static SixModelObject hllizefor(SixModelObject obj, String language, ThreadContext tc) {
HLLConfig wanted = tc.gc.getHLLConfigFor(language);
if (obj.st.hllOwner == wanted)
return obj;
System.err.println("Warning: HLL mapping NYI");
return obj;
}
/* NFA operations. */
public static SixModelObject nfafromstatelist(SixModelObject states, SixModelObject nfaType, ThreadContext tc) {
/* Create NFA object. */
NFAInstance nfa = (NFAInstance)nfaType.st.REPR.allocate(tc, nfaType.st);
nfa.initialize(tc);
/* The first state entry is the fates list. */
nfa.fates = states.at_pos_boxed(tc, 0);
/* Go over the rest and convert to the NFA. */
int numStates = (int)states.elems(tc) - 1;
nfa.numStates = numStates;
nfa.states = new NFAStateInfo[numStates][];
for (int i = 0; i < numStates; i++) {
SixModelObject edgeInfo = states.at_pos_boxed(tc, i + 1);
int elems = (int)edgeInfo.elems(tc);
int edges = elems / 3;
int curEdge = 0;
nfa.states[i] = new NFAStateInfo[edges];
for (int j = 0; j < elems; j += 3) {
int act = (int)smart_numify(edgeInfo.at_pos_boxed(tc, j), tc);
int to = (int)smart_numify(edgeInfo.at_pos_boxed(tc, j + 2), tc);
nfa.states[i][curEdge] = new NFAStateInfo();
nfa.states[i][curEdge].act = act;
nfa.states[i][curEdge].to = to;
switch (act) {
case NFA.EDGE_FATE:
case NFA.EDGE_CODEPOINT:
case NFA.EDGE_CODEPOINT_NEG:
case NFA.EDGE_CHARCLASS:
case NFA.EDGE_CHARCLASS_NEG:
nfa.states[i][curEdge].arg_i = (int)smart_numify(edgeInfo.at_pos_boxed(tc, j + 1), tc);
break;
case NFA.EDGE_CHARLIST:
case NFA.EDGE_CHARLIST_NEG:
nfa.states[i][curEdge].arg_s = edgeInfo.at_pos_boxed(tc, j + 1).get_str(tc);
break;
case NFA.EDGE_CODEPOINT_I:
case NFA.EDGE_CODEPOINT_I_NEG: {
SixModelObject arg = edgeInfo.at_pos_boxed(tc, j + 1);
nfa.states[i][curEdge].arg_lc = (char)smart_numify(arg.at_pos_boxed(tc, 0), tc);
nfa.states[i][curEdge].arg_uc = (char)smart_numify(arg.at_pos_boxed(tc, 1), tc);
break;
}
}
curEdge++;
}
}
return nfa;
}
public static SixModelObject nfatostatelist(SixModelObject nfa, ThreadContext tc) {
throw ExceptionHandling.dieInternal(tc, "nfatostatelist NYI");
}
public static SixModelObject nfarunproto(SixModelObject nfa, String target, long pos, ThreadContext tc) {
/* Run the NFA. */
int[] fates = runNFA(tc, (NFAInstance)nfa, target, pos);
/* Copy results into an RIA. */
SixModelObject BOOTIntArray = tc.gc.BOOTIntArray;
SixModelObject fateRes = BOOTIntArray.st.REPR.allocate(tc, BOOTIntArray.st);
fateRes.initialize(tc);
for (int i = 0; i < fates.length; i++) {
tc.native_i = fates[i];
fateRes.bind_pos_native(tc, i);
}
return fateRes;
}
public static SixModelObject nfarunalt(SixModelObject nfa, String target, long pos,
SixModelObject bstack, SixModelObject cstack, SixModelObject marks, ThreadContext tc) {
/* Run the NFA. */
int[] fates = runNFA(tc, (NFAInstance)nfa, target, pos);
/* Push the results onto the bstack. */
long caps = cstack == null || cstack instanceof TypeObject ? 0 : cstack.elems(tc);
for (int i = 0; i < fates.length; i++) {
marks.at_pos_native(tc, fates[i]);
bstack.push_native(tc);
tc.native_i = pos;
bstack.push_native(tc);
tc.native_i = 0;
bstack.push_native(tc);
tc.native_i = caps;
bstack.push_native(tc);
}
return nfa;
}
/* The NFA evaluator. */
private static ArrayList<Integer> fates = new ArrayList<Integer>();
private static ArrayList<Integer> curst = new ArrayList<Integer>();
private static ArrayList<Integer> nextst = new ArrayList<Integer>();
private static int[] runNFA(ThreadContext tc, NFAInstance nfa, String target, long pos) {
int eos = target.length();
int gen = 1;
/* Allocate a "done states" array. */
int numStates = nfa.numStates;
int[] done = new int[numStates + 1];
/* Clear out other re-used arrays. */
curst.clear();
nextst.clear();
fates.clear();
nextst.add(1);
while (!nextst.isEmpty() && pos <= eos) {
/* Translation of:
* my @curst := @nextst;
* @nextst := [];
* But avoids an extra allocation per offset. */
ArrayList<Integer> temp = curst;
curst = nextst;
temp.clear();
nextst = temp;
/* Save how many fates we have before this position is considered. */
int prevFates = fates.size();
while (!curst.isEmpty()) {
int top = curst.size() - 1;
int st = curst.get(top);
curst.remove(top);
if (st <= numStates) {
if (done[st] == gen)
continue;
done[st] = gen;
}
NFAStateInfo[] edgeInfo = nfa.states[st - 1];
for (int i = 0; i < edgeInfo.length; i++) {
int act = edgeInfo[i].act;
int to = edgeInfo[i].to;
if (act == NFA.EDGE_FATE) {
/* Crossed a fate edge. Check if we already saw this, and
* if so bump the entry we already saw. */
int arg = edgeInfo[i].arg_i;
boolean foundFate = false;
for (int j = 0; j < fates.size(); j++) {
if (foundFate)
fates.set(j - 1, fates.get(j));
if (fates.get(j )== arg) {
foundFate = true;
if (j < prevFates)
prevFates
}
}
if (foundFate)
fates.set(fates.size() - 1, arg);
else
fates.add(arg);
}
else if (act == NFA.EDGE_EPSILON && to <= numStates && done[to] != gen) {
curst.add(to);
}
else if (pos >= eos) {
/* Can't match, so drop state. */
}
else if (act == NFA.EDGE_CODEPOINT) {
char arg = (char)edgeInfo[i].arg_i;
if (target.charAt((int)pos) == arg)
nextst.add(to);
}
else if (act == NFA.EDGE_CODEPOINT_NEG) {
char arg = (char)edgeInfo[i].arg_i;
if (target.charAt((int)pos) != arg)
nextst.add(to);
}
else if (act == NFA.EDGE_CHARCLASS) {
if (iscclass(edgeInfo[i].arg_i, target, pos) != 0)
nextst.add(to);
}
else if (act == NFA.EDGE_CHARCLASS_NEG) {
if (iscclass(edgeInfo[i].arg_i, target, pos) == 0)
nextst.add(to);
}
else if (act == NFA.EDGE_CHARLIST) {
String arg = edgeInfo[i].arg_s;
if (arg.indexOf(target.charAt((int)pos)) >= 0)
nextst.add(to);
}
else if (act == NFA.EDGE_CHARLIST_NEG) {
String arg = edgeInfo[i].arg_s;
if (arg.indexOf(target.charAt((int)pos)) < 0)
nextst.add(to);
}
else if (act == NFA.EDGE_CODEPOINT_I) {
char uc_arg = edgeInfo[i].arg_uc;
char lc_arg = edgeInfo[i].arg_lc;
char ord = target.charAt((int)pos);
if (ord == lc_arg || ord == uc_arg)
nextst.add(to);
}
else if (act == NFA.EDGE_CODEPOINT_I_NEG) {
char uc_arg = edgeInfo[i].arg_uc;
char lc_arg = edgeInfo[i].arg_lc;
char ord = target.charAt((int)pos);
if (ord != lc_arg && ord != uc_arg)
nextst.add(to);
}
}
}
/* Move to next character and generation. */
pos++;
gen++;
/* If we got multiple fates at this offset, sort them by the
* declaration order (represented by the fate number). In the
* future, we'll want to factor in longest literal prefix too. */
int charFates = fates.size() - prevFates;
if (charFates > 1) {
List<Integer> charFateList = fates.subList(prevFates, fates.size());
Collections.sort(charFateList, Collections.reverseOrder());
}
}
int[] result = new int[fates.size()];
for (int i = 0; i < fates.size(); i++)
result[i] = fates.get(i);
return result;
}
/* Regex engine mark stack operations. */
public static void rxmark(SixModelObject bstack, long mark, long pos, long rep, ThreadContext tc) {
long elems = bstack.elems(tc);
long caps;
if (elems > 0) {
bstack.at_pos_native(tc, elems - 1);
caps = tc.native_i;
}
else {
caps = 0;
}
tc.native_i = mark;
bstack.push_native(tc);
tc.native_i = pos;
bstack.push_native(tc);
tc.native_i = rep;
bstack.push_native(tc);
tc.native_i = caps;
bstack.push_native(tc);
}
public static long rxpeek(SixModelObject bstack, long mark, ThreadContext tc) {
long ptr = bstack.elems(tc);
while (ptr >= 0) {
bstack.at_pos_native(tc, ptr);
if (tc.native_i == mark)
break;
ptr -= 4;
}
return ptr;
}
public static void rxcommit(SixModelObject bstack, long mark, ThreadContext tc) {
long ptr = bstack.elems(tc);
long caps;
if (ptr > 0) {
bstack.at_pos_native(tc, ptr - 1);
caps = tc.native_i;
}
else {
caps = 0;
}
while (ptr >= 0) {
bstack.at_pos_native(tc, ptr);
if (tc.native_i == mark)
break;
ptr -= 4;
}
bstack.set_elems(tc, ptr);
if (caps > 0) {
if (ptr > 0) {
/* top mark frame is an autofail frame, reuse it to hold captures */
bstack.at_pos_native(tc, ptr - 3);
if (tc.native_i < 0) {
tc.native_i = caps;
bstack.bind_pos_native(tc, ptr - 1);
}
}
/* push a new autofail frame onto bstack to hold the captures */
tc.native_i = 0;
bstack.push_native(tc);
tc.native_i = -1;
bstack.push_native(tc);
tc.native_i = 0;
bstack.push_native(tc);
tc.native_i = caps;
bstack.push_native(tc);
}
}
/* Coercions. */
public static long coerce_s2i(String in) {
try {
return Long.parseLong(in);
}
catch (NumberFormatException e) {
return 0;
}
}
public static double coerce_s2n(String in) {
try {
return Double.parseDouble(in);
}
catch (NumberFormatException e) {
return 0.0;
}
}
public static String coerce_i2s(long in) {
return Long.toString(in);
}
public static String coerce_n2s(double in) {
return in == (long)in ? Long.toString((long)in) : Double.toString(in);
}
/* Long literal workaround. */
public static String join_literal(String[] parts) {
StringBuilder retval = new StringBuilder(parts.length * 65535);
for (int i = 0; i < parts.length; i++)
retval.append(parts[i]);
return retval.toString();
}
/* Big integer operations. */
private static BigInteger getBI(ThreadContext tc, SixModelObject obj) {
if (obj instanceof P6bigintInstance)
return ((P6bigintInstance)obj).value;
/* What follows is a bit of a hack, relying on the first field being the
* big integer. */
obj.get_attribute_native(tc, null, null, 0);
return (BigInteger)tc.native_j;
}
private static SixModelObject makeBI(ThreadContext tc, SixModelObject type, BigInteger value) {
SixModelObject res = type.st.REPR.allocate(tc, type.st);
res.initialize(tc);
if (res instanceof P6bigintInstance) {
((P6bigintInstance)res).value = value;
}
else {
tc.native_j = value;
res.bind_attribute_native(tc, null, null, 0);
}
return res;
}
public static SixModelObject fromstr_I(String str, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, new BigInteger(str));
}
public static String tostr_I(SixModelObject value, ThreadContext tc) {
return getBI(tc, value).toString();
}
public static String base_I(SixModelObject value, long radix, ThreadContext tc) {
return getBI(tc, value).toString((int)radix).toUpperCase();
}
public static long isbig_I(SixModelObject value, ThreadContext tc) {
/* Check if it needs more bits than a long can offer; note that
* bitLength excludes sign considerations, thus 63 rather than 64. */
return getBI(tc, value).bitLength() > 63 ? 1 : 0;
}
public static SixModelObject fromnum_I(double num, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, BigDecimal.valueOf(num).toBigInteger());
}
public static double tonum_I(SixModelObject value, ThreadContext tc) {
return getBI(tc, value).doubleValue();
}
public static long bool_I(SixModelObject a, ThreadContext tc) {
return getBI(tc, a).compareTo(BigInteger.ZERO) == 0 ? 0 : 1;
}
public static long cmp_I(SixModelObject a, SixModelObject b, ThreadContext tc) {
return getBI(tc, a).compareTo(getBI(tc, b));
}
public static long iseq_I(SixModelObject a, SixModelObject b, ThreadContext tc) {
return getBI(tc, a).compareTo(getBI(tc, b)) == 0 ? 1 : 0;
}
public static long isne_I(SixModelObject a, SixModelObject b, ThreadContext tc) {
return getBI(tc, a).compareTo(getBI(tc, b)) == 0 ? 0 : 1;
}
public static long islt_I(SixModelObject a, SixModelObject b, ThreadContext tc) {
return getBI(tc, a).compareTo(getBI(tc, b)) < 0 ? 1 : 0;
}
public static long isle_I(SixModelObject a, SixModelObject b, ThreadContext tc) {
return getBI(tc, a).compareTo(getBI(tc, b)) <= 0 ? 1 : 0;
}
public static long isgt_I(SixModelObject a, SixModelObject b, ThreadContext tc) {
return getBI(tc, a).compareTo(getBI(tc, b)) > 0 ? 1 : 0;
}
public static long isge_I(SixModelObject a, SixModelObject b, ThreadContext tc) {
return getBI(tc, a).compareTo(getBI(tc, b)) >= 0 ? 1 : 0;
}
public static SixModelObject add_I(SixModelObject a, SixModelObject b, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, getBI(tc, a).add(getBI(tc, b)));
}
public static SixModelObject sub_I(SixModelObject a, SixModelObject b, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, getBI(tc, a).subtract(getBI(tc, b)));
}
public static SixModelObject mul_I(SixModelObject a, SixModelObject b, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, getBI(tc, a).multiply(getBI(tc, b)));
}
public static SixModelObject div_I(SixModelObject a, SixModelObject b, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, getBI(tc, a).divide(getBI(tc, b)));
}
public static double div_In(SixModelObject a, SixModelObject b, ThreadContext tc) {
return new BigDecimal(getBI(tc, a)).divide(new BigDecimal(getBI(tc, b))).doubleValue();
}
public static SixModelObject mod_I(SixModelObject a, SixModelObject b, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, getBI(tc, a).mod(getBI(tc, b)));
}
public static SixModelObject expmod_I(SixModelObject a, SixModelObject b, SixModelObject c, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, getBI(tc, a).modPow(getBI(tc, b), getBI(tc, c)));
}
public static long isprime_I(SixModelObject a, long certainty, ThreadContext tc) {
BigInteger bi = getBI(tc, a);
if (bi.compareTo(BigInteger.valueOf(1)) <= 0) {
return 0;
}
return bi.isProbablePrime((int)certainty) ? 1 : 0;
}
public static SixModelObject rand_I(SixModelObject a, SixModelObject type, ThreadContext tc) {
BigInteger size = getBI(tc, a);
BigInteger random = new BigInteger(size.bitLength(), ThreadLocalRandom.current()).mod(size);
return makeBI(tc, type, random);
}
public static SixModelObject pow_I(SixModelObject a, SixModelObject b, SixModelObject biType, SixModelObject nType, ThreadContext tc) {
BigInteger base = getBI(tc, a);
BigInteger exponent = getBI(tc, b);
int cmp = exponent.compareTo(BigInteger.ZERO);
if (cmp == 0 || base.compareTo(BigInteger.ONE) == 0) {
return makeBI(tc, biType, BigInteger.ONE);
}
else if (cmp > 0) {
if (exponent.bitLength() > 31) {
/* Overflows integer. Terrifyingly huge, but try to cope somehow. */
cmp = base.compareTo(BigInteger.ZERO);
if (cmp == 0 || base.compareTo(BigInteger.ONE) == 0) {
/* 0 ** $big_number and 1 ** big_number are easy to do: */
return makeBI(tc, biType, base);
}
else {
/* Otherwise, do floating point infinity of the right sign. */
SixModelObject result = nType.st.REPR.allocate(tc, nType.st);
result.initialize(tc);
result.set_num(tc, cmp > 0 ? Double.POSITIVE_INFINITY : Double.NEGATIVE_INFINITY);
return result;
}
}
else {
/* Can safely take its integer value. */
return makeBI(tc, biType, base.pow(exponent.intValue()));
}
}
else {
double fBase = base.doubleValue();
double fExponent = exponent.doubleValue();
SixModelObject result = nType.st.REPR.allocate(tc, nType.st);
result.initialize(tc);
result.set_num(tc, Math.pow(fBase, fExponent));
return result;
}
}
public static SixModelObject neg_I(SixModelObject a, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, getBI(tc, a).negate());
}
public static SixModelObject abs_I(SixModelObject a, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, getBI(tc, a).abs());
}
public static SixModelObject radix_I(long radix_l, String str, long zpos, long flags, SixModelObject type, ThreadContext tc) {
BigInteger zvalue = BigInteger.ZERO;
BigInteger zbase = BigInteger.ONE;
int chars = str.length();
BigInteger value = zvalue;
BigInteger base = zbase;
long pos = -1;
char ch;
boolean neg = false;
BigInteger radix = BigInteger.valueOf(radix_l);
if (radix_l > 36) {
throw ExceptionHandling.dieInternal(tc, "Cannot convert radix of " + radix_l + " (max 36)");
}
ch = (zpos < chars) ? str.charAt((int)zpos) : 0;
if ((flags & 0x02) != 0 && (ch == '+' || ch == '-')) {
neg = (ch == '-');
zpos++;
ch = (zpos < chars) ? str.charAt((int)zpos) : 0;
}
while (zpos < chars) {
if (ch >= '0' && ch <= '9') ch = (char)(ch - '0');
else if (ch >= 'a' && ch <= 'z') ch = (char)(ch - 'a' + 10);
else if (ch >= 'A' && ch <= 'Z') ch = (char)(ch - 'A' + 10);
else break;
if (ch >= radix_l) break;
zvalue = zvalue.multiply(radix).add(BigInteger.valueOf(ch));
zbase = zbase.multiply(radix);
zpos++; pos = zpos;
if (ch != 0 || (flags & 0x04) == 0) { value=zvalue; base=zbase; }
if (zpos >= chars) break;
ch = str.charAt((int)zpos);
if (ch != '_') continue;
zpos++;
if (zpos >= chars) break;
ch = str.charAt((int)zpos);
}
if (neg || (flags & 0x01) != 0) { value = value.negate(); }
HLLConfig hllConfig = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig;
SixModelObject result = hllConfig.slurpyArrayType.st.REPR.allocate(tc,
hllConfig.slurpyArrayType.st);
result.initialize(tc);
result.push_boxed(tc, makeBI(tc, type, value));
result.push_boxed(tc, makeBI(tc, type, base));
result.push_boxed(tc, makeBI(tc, type, BigInteger.valueOf(pos)));
return result;
}
public static SixModelObject bitor_I(SixModelObject a, SixModelObject b, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, getBI(tc, a).or(getBI(tc, b)));
}
public static SixModelObject bitxor_I(SixModelObject a, SixModelObject b, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, getBI(tc, a).xor(getBI(tc, b)));
}
public static SixModelObject bitand_I(SixModelObject a, SixModelObject b, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, getBI(tc, a).and(getBI(tc, b)));
}
public static SixModelObject bitneg_I(SixModelObject a, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, getBI(tc, a).not());
}
public static SixModelObject bitshiftl_I(SixModelObject a, long b, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, getBI(tc, a).shiftLeft((int)b));
}
public static SixModelObject bitshiftr_I(SixModelObject a, long b, SixModelObject type, ThreadContext tc) {
return makeBI(tc, type, getBI(tc, a).shiftRight((int)b));
}
/* Evaluation of code; JVM-specific ops. */
public static SixModelObject compilejast(String dump, ThreadContext tc) {
EvalResult res = new EvalResult();
res.jc = JASTToJVMBytecode.buildClassFromString(dump);
return res;
}
public static String compilejasttofile(String dump, String filename, ThreadContext tc) {
JASTToJVMBytecode.writeClassFromString(dump, filename);
return dump;
}
public static SixModelObject loadcompunit(SixModelObject obj, long compileeHLL, ThreadContext tc) {
try {
EvalResult res = (EvalResult)obj;
ByteClassLoader cl = new ByteClassLoader(res.jc.bytes);
res.cu = (CompilationUnit)cl.findClass(res.jc.name).newInstance();
if (compileeHLL != 0)
usecompileehllconfig(tc);
res.cu.initializeCompilationUnit(tc);
if (compileeHLL != 0)
usecompilerhllconfig(tc);
res.jc = null;
return obj;
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
public static long iscompunit(SixModelObject obj, ThreadContext tc) {
return obj instanceof EvalResult ? 1 : 0;
}
public static SixModelObject compunitmainline(SixModelObject obj, ThreadContext tc) {
EvalResult res = (EvalResult)obj;
return res.cu.codeRefs[res.cu.mainlineIdx()];
}
public static SixModelObject compunitcodes(SixModelObject obj, ThreadContext tc) {
EvalResult res = (EvalResult)obj;
SixModelObject Array = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.listType;
SixModelObject result = Array.st.REPR.allocate(tc, Array.st);
result.initialize(tc);
for (int i = 0; i < res.cu.codeRefs.length; i++)
result.bind_pos_boxed(tc, i, res.cu.codeRefs[i]);
return result;
}
public static SixModelObject jvmclasspaths(ThreadContext tc) {
SixModelObject Array = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.listType;
SixModelObject Str = tc.curFrame.codeRef.staticInfo.compUnit.hllConfig.strBoxType;
SixModelObject result = Array.st.REPR.allocate(tc, Array.st);
result.initialize(tc);
String cpStr = System.getProperty("java.class.path");
String[] cps = cpStr.split("[:;]");
for (int i = 0; i < cps.length; i++)
result.push_boxed(tc, box_s(cps[i], Str, tc));
return result;
}
private static int compileeDepth = 0;
public static long usecompileehllconfig(ThreadContext tc) {
if (compileeDepth == 0)
tc.gc.useCompileeHLLConfig();
compileeDepth++;
return 1;
}
public static long usecompilerhllconfig(ThreadContext tc) {
compileeDepth
if (compileeDepth == 0)
tc.gc.useCompilerHLLConfig();
return 1;
}
}
|
package edu.oakland.OUSoft.linkedList;
import edu.oakland.OUSoft.items.Instructor;
import edu.oakland.OUSoft.items.Person;
import edu.oakland.OUSoft.items.Student;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import static org.junit.Assert.*;
public class cseLinkedListTest {
private cseLinkedList<Person> list;
// private ArrayList<Person> list;
private Person testPerson1 = new Person("TestID1");
private Person testPerson2 = new Person("TestID2");
private Person testPerson3 = new Person("TestID3");
private Person testPerson4 = new Person("TestID4");
private Student testStudent1 = new Student("TestStudentID1");
private Student testStudent2 = new Student("TestStudentID2");
private Instructor testInstructor1 = new Instructor("TestInstructorID1");
private Instructor testInstructor2 = new Instructor("TestInstructorID2");
@Before
public void setUp() throws Exception {
list = new cseLinkedList<>();
// list = new ArrayList<>();
}
@Test
public void size() throws Exception {
assertEquals(0, list.size());
list.add(testPerson1);
assertEquals(1, list.size());
list.add(testPerson2);
assertEquals(2, list.size());
list.remove(testPerson2);
assertEquals(1, list.size());
}
@Test
public void isEmpty() throws Exception {
assertTrue(list.isEmpty());
list.add(testPerson1);
assertFalse(list.isEmpty());
list.remove(testPerson1);
assertTrue(list.isEmpty());
}
@Test
public void contains() throws Exception {
assertFalse(list.contains(testPerson1));
list.add(testPerson1);
assertTrue(list.contains(testPerson1));
assertFalse(list.contains("Some object"));
assertFalse(list.contains(null));
}
@Test
public void iterator() throws Exception {
list.add(testPerson1);
list.add(testPerson2);
Iterator<Person> iterator = list.iterator();
assertTrue(iterator.hasNext());
assertEquals("Iterator didn't give expected first element", testPerson1, iterator.next());
assertTrue(iterator.hasNext());
assertEquals("Iterator didn't give expected second element", testPerson2, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
public void toArray() throws Exception {
list.add(testPerson1);
list.add(testPerson2);
Object[] array = list.toArray();
assertEquals(testPerson1, array[0]);
assertEquals(testPerson2, array[1]);
}
@Test
public void toArray1() throws Exception {
list.add(testPerson1);
list.add(testPerson2);
Person[] array = list.toArray(new Person[0]);
assertEquals(testPerson1, array[0]);
assertEquals(testPerson2, array[1]);
Person[] array2 = new Person[2];
list.toArray(array2);
assertEquals(testPerson1, array2[0]);
assertEquals(testPerson2, array2[1]);
Person[] array3 = new Person[3];
array3[2] = testPerson4;
list.toArray(array3);
assertEquals(testPerson1, array3[0]);
assertEquals(testPerson2, array3[1]);
assertNull(array3[2]);
}
@Test
public void add() throws Exception {
assertTrue(list.add(testPerson1));
assertTrue(list.contains(testPerson1));
assertEquals(testPerson1, list.get(0));
assertEquals(1, list.size());
assertTrue(list.add(testPerson2));
assertTrue(list.contains(testPerson2));
assertEquals(testPerson2, list.get(1));
assertEquals(2, list.size());
//Do not allow duplicate
try {
list.add(testPerson1);
assertTrue("Added a duplicate element", false);
} catch (IllegalArgumentException ex) {
}
//Do not allow null
try {
list.add(null);
assertEquals("Added a null element", 2, list.size());
} catch (NullPointerException e) {
assertEquals("Added a null element", 2, list.size());
}
}
@Test
public void remove() throws Exception {
assertFalse(list.remove(testPerson1));
list.add(testPerson1);
assertTrue(list.remove(testPerson1));
assertFalse(list.contains(testPerson1));
list.add(testPerson1);
list.add(testPerson2);
list.add(testPerson3);
assertEquals(3, list.size());
assertTrue(list.remove(testPerson2));
assertEquals(testPerson1, list.get(0));
assertEquals(testPerson3, list.get(1));
assertFalse(list.contains(testPerson2));
try {
assertFalse("Should have returned false for null remove", list.remove(null));
} catch (NullPointerException mayThrow) {
}
}
@Test
public void containsAll() throws Exception {
list.add(testPerson1);
list.add(testPerson2);
list.add(testPerson3);
assertEquals(3, list.size());
Collection<Person> testCollection1 = new ArrayList<>();
Collection<Person> testCollection2 = new ArrayList<>();
Collection<Person> testCollection3 = new ArrayList<>();
testCollection1.add(testPerson2);
testCollection2.add(testPerson1);
testCollection2.add(testPerson2);
testCollection2.add(testPerson3);
assertTrue(list.containsAll(testCollection1));
assertTrue(list.containsAll(testCollection2));
assertTrue(list.containsAll(testCollection3));
list.remove(testPerson3);
assertFalse(list.containsAll(testCollection2));
List<String> wrongType = new ArrayList<>();
wrongType.add("Checking for typing exceptions");
assertFalse(list.containsAll(wrongType));
try {
list.containsAll(null);
assertTrue("Did not throw exception for null containsAll", false);
} catch (NullPointerException shouldThrow) {
}
testCollection2.add(null);
try {
list.containsAll(testCollection2);
assertTrue("Did not throw exception for null element in containsAll", false);
} catch (NullPointerException shouldThrow) {
}
}
@Test
public void addAll() throws Exception {
Collection<Person> testCollection1 = new ArrayList<>();
testCollection1.add(testPerson1);
testCollection1.add(testPerson2);
testCollection1.add(testPerson3);
assertTrue(list.addAll(testCollection1));
assertTrue(list.containsAll(testCollection1));
Collection<Person> testCollection2 = new ArrayList<>();
testCollection2.add(null);
try {
list.addAll(null);
assertTrue("Did not throw exception for null addAll", false);
} catch (NullPointerException shouldThrow) {
}
try {
list.addAll(testCollection2);
assertTrue("Did not throw exception for null element in addAll", false);
} catch (NullPointerException shouldThrow) {
}
testCollection2.clear();
testCollection2.add(testPerson1);
try {
list.addAll(testCollection2);
assertTrue("Did not throw exception for duplicate element in addAll", false);
} catch (IllegalArgumentException shouldThrow) {
}
}
@Test
public void addAllIndex() throws Exception {
Collection<Person> testCollection1 = new ArrayList<>();
testCollection1.add(testPerson3);
testCollection1.add(testPerson4);
list.add(testPerson1);
list.add(testPerson2);
assertTrue(list.addAll(1, testCollection1));
assertEquals(testPerson1, list.get(0));
assertEquals(testPerson3, list.get(1));
assertEquals(testPerson4, list.get(2));
assertEquals(testPerson2, list.get(3));
Collection<Person> testCollection2 = new ArrayList<>();
testCollection2.add(null);
try {
list.addAll(1, null);
assertTrue("Did not throw exception for null addAll", false);
} catch (NullPointerException shouldThrow) {
}
try {
list.addAll(1, testCollection2);
assertTrue("Did not throw exception for null element in addAll", false);
} catch (NullPointerException shouldThrow) {
}
testCollection2.clear();
testCollection2.add(testPerson1);
try {
list.addAll(1, testCollection2);
assertTrue("Did not throw exception for duplicate element in addAll", false);
} catch (IllegalArgumentException shouldThrow) {
}
try {
list.addAll(99, testCollection1);
assertTrue("Did not throw exception for out of bounds index", false);
} catch (IndexOutOfBoundsException shouldThrow) {
}
try {
list.addAll(-1, testCollection1);
assertTrue("Did not throw exception for negative index", false);
} catch (IndexOutOfBoundsException shouldThrow) {
}
}
@Test
public void removeAll() throws Exception {
Collection<Person> testCollection1 = new ArrayList<>();
testCollection1.add(testPerson1);
testCollection1.add(testPerson2);
testCollection1.add(testPerson3);
assertFalse("List should not have changed", list.removeAll(testCollection1));
assertTrue(list.addAll(testCollection1));
assertTrue(list.containsAll(testCollection1));
testCollection1.add(testPerson4); //Not removed because not in list, but removeAll should still return true
assertTrue(list.removeAll(testCollection1));
assertFalse(list.contains(testPerson1));
try {
list.removeAll(null);
assertTrue("Did not throw exception for null remove", false);
} catch (NullPointerException shouldThrow) {
}
Collection<Person> testCollection2 = new ArrayList<>();
testCollection2.add(null);
try {
list.removeAll(testCollection2);
assertTrue("Did not throw exception for null element in remove", false);
} catch (NullPointerException shouldThrow) {
}
}
@Test
public void retainAll() throws Exception {
list.add(testPerson1);
list.add(testPerson2);
list.add(testPerson3);
list.add(testPerson4);
Collection<Person> testCollection1 = new ArrayList<>();
testCollection1.add(testPerson1);
testCollection1.add(testPerson2);
testCollection1.add(testPerson3);
testCollection1.add(testPerson4);
assertFalse(list.retainAll(testCollection1));
testCollection1.remove(testPerson2);
testCollection1.remove(testPerson4);
assertTrue(list.retainAll(testCollection1));
assertEquals(testPerson1, list.get(0));
assertEquals(testPerson3, list.get(1));
assertEquals(2, list.size());
try {
list.retainAll(null);
assertTrue("Did not throw exception for null retain", false);
} catch (NullPointerException shouldThrow) {
}
Collection<Person> testCollection2 = new ArrayList<>();
testCollection2.add(null);
try {
list.retainAll(testCollection2);
assertTrue("Did not throw exception for null element in retain", false);
} catch (NullPointerException shouldThrow) {
}
}
@Test
public void clear() throws Exception {
list.add(testPerson1);
list.add(testPerson2);
list.clear();
assertEquals(0, list.size());
assertTrue(list.isEmpty());
assertFalse(list.contains(testPerson2));
}
@Test
public void equals() throws Exception {
List<Person> testList = new ArrayList<>();
}
@Test
public void get() throws Exception {
list.add(testPerson1);
assertEquals(testPerson1, list.get(0));
list.add(testPerson2);
assertEquals(testPerson2, list.get(1));
list.add(testPerson3);
assertEquals(testPerson3, list.get(2));
try {
list.get(-1);
assertTrue("Did not throw exception for negative index", false);
} catch (IndexOutOfBoundsException shouldThrow) {
}
try {
list.get(99);
assertTrue("Did not throw exception for out of bounds index", false);
} catch (IndexOutOfBoundsException shouldThrow) {
}
}
@Test
public void set() throws Exception {
list.add(testPerson1);
list.add(testPerson2);
list.add(testPerson3);
assertEquals(testPerson1, list.get(0));
assertEquals(testPerson2, list.get(1));
assertEquals(testPerson3, list.get(2));
assertEquals(testPerson2, list.set(1, testPerson4)); //Set returns element previously at that index
assertEquals(testPerson1, list.get(0));
assertEquals(testPerson4, list.get(1));
assertEquals(testPerson3, list.get(2));
try {
list.set(2, null);
assertTrue("Did not throw exception for null element", false);
} catch (NullPointerException shouldThrow) {
}
try {
list.set(2, testPerson1);
assertTrue("Did not throw exception for duplicate element", false);
} catch (IllegalArgumentException shouldThrow) {
}
try {
list.set(-1, testPerson4);
assertTrue("Did not throw exception for negative index", false);
} catch (IndexOutOfBoundsException shouldThrow) {
}
try {
list.set(99, testPerson4);
assertTrue("Did not throw exception for out of bounds index", false);
} catch (IndexOutOfBoundsException shouldThrow) {
}
}
@Test
public void addIndex() throws Exception {
list.add(testPerson1);
list.add(testPerson2);
list.add(1, testPerson3);
assertEquals(testPerson1, list.get(0));
assertEquals(testPerson3, list.get(1));
assertEquals(testPerson2, list.get(2));
try {
list.add(1, testPerson1);
assertTrue("Did not throw exception for duplicate element", false);
} catch (IllegalArgumentException shouldThrow) {
}
try {
list.add(1, null);
assertTrue("Did not throw exception for null element", false);
} catch (NullPointerException shouldThrow) {
}
try {
list.add(-1, testPerson4);
assertTrue("Did not throw exception for negative index", false);
} catch (IndexOutOfBoundsException shouldThrow) {
}
try {
list.add(99, testPerson4);
assertTrue("Did not throw exception for out of bounds index", false);
} catch (IndexOutOfBoundsException shouldThrow) {
}
}
@Test
public void removeIndex() throws Exception {
list.add(testPerson1);
list.add(testPerson2);
list.add(testPerson3);
list.add(testPerson4);
assertEquals(testPerson3, list.remove(2));
assertEquals(testPerson1, list.get(0));
assertEquals(testPerson2, list.get(1));
assertEquals(testPerson4, list.get(2));
assertEquals(testPerson1, list.remove(0));
assertEquals(testPerson2, list.get(0));
assertEquals(testPerson4, list.get(1));
try {
list.remove(-1);
assertTrue("Did not throw exception for negative index", false);
} catch (IndexOutOfBoundsException shouldThrow) {
}
try {
list.remove(99);
assertTrue("Did not throw exception for out of bounds index", false);
} catch (IndexOutOfBoundsException shouldThrow) {
}
}
@Test
public void indexOf() throws Exception {
list.add(testPerson1);
list.add(testPerson2);
assertEquals(0, list.indexOf(testPerson1));
assertEquals(1, list.indexOf(testPerson2));
assertEquals(-1, list.indexOf(testPerson3));
}
@Test
public void lastIndexOf() throws Exception {
indexOf(); //No duplicate elements, so is same
}
}
|
package koopa.grammars.cobol;
import koopa.tokens.Token;
import koopa.verifiers.Verifier;
import koopa.verifiers.VerifyingSink;
public class CobolVerifier extends VerifyingSink {
protected void initialize() {
register("water", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("workingStorageSection");
final int p2 = lastIndexOf("copyStatement");
final int p3 = lastIndexOf("replaceStatement");
final int p4 = lastIndexOf("execStatement");
if (p0 < p1 && p2 < p1 && p3 < p1 && p4 < p1) {
warn(t, "Water in the working storage section.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("linkageSection");
final int p2 = lastIndexOf("copyStatement");
final int p3 = lastIndexOf("replaceStatement");
final int p4 = lastIndexOf("execStatement");
if (p0 < p1 && p2 < p1 && p3 < p1 && p4 < p1) {
warn(t, "Water in the linkage section.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("fileSection");
final int p2 = lastIndexOf("copyStatement");
final int p3 = lastIndexOf("replaceStatement");
final int p4 = lastIndexOf("execStatement");
if (p0 < p1 && p2 < p1 && p3 < p1 && p4 < p1) {
warn(t, "Water in the file section.");
}
}
}
});
register("ADD", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("addStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "ADD in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("addStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "ADD not in add statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("addStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "ADD not in add statement.");
}
}
}
});
register("END-ADD", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("addStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "END-ADD in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("addStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "END-ADD not in add statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("addStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "END-ADD not in add statement.");
}
}
}
});
register("COPY", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("copyStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "COPY in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("copyStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "COPY not in copy statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("copyStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "COPY not in copy statement.");
}
}
}
});
register("CALL", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "CALL in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("callStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "CALL not in call statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("callStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "CALL not in call statement.");
}
}
}
});
register("CANCEL", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "CANCEL in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("cancelStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "CANCEL not in cancel statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("cancelStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "CANCEL not in cancel statement.");
}
}
}
});
register("CLOSE", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "CLOSE in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("closeStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "CLOSE not in close statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("closeStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "CLOSE not in close statement.");
}
}
}
});
register("DIVIDE", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("divideStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "DIVIDE in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("divideStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "DIVIDE not in divide statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("divideStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "DIVIDE not in divide statement.");
}
}
}
});
register("END-DIVIDE", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("divideStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "END-DIVIDE in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("divideStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "END-DIVIDE not in divide statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("divideStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "END-DIVIDE not in divide statement.");
}
}
}
});
register("ENTRY", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "ENTRY in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("entryStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "ENTRY not in entry statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("entryStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "ENTRY not in entry statement.");
}
}
}
});
register("EVALUATE", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "EVALUATE in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("evaluateStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "EVALUATE not in evaluate statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("evaluateStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "EVALUATE not in evaluate statement.");
}
}
}
});
register("EXEC", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "EXEC in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "EXEC not in exec statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("execStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "EXEC not in exec statement.");
}
}
}
});
register("EXIT", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "EXIT in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("exitStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "EXIT not in exit statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("exitStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "EXIT not in exit statement.");
}
}
}
});
register("GOBACK", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "GOBACK in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("gobackStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "GOBACK not in goback statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("gobackStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "GOBACK not in goback statement.");
}
}
}
});
register("GO", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "GO in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("goToStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "GO not in go statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("goToStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "GO not in go statement.");
}
}
}
});
register("IF", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "IF in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("ifStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "IF not in if statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("ifStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "IF not in if statement.");
}
}
}
});
register("MOVE", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "MOVE in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("moveStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "MOVE not in move statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("moveStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "MOVE not in move statement.");
}
}
}
});
register("MULTIPLY", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("multiplyStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "MULTIPLY in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("multiplyStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "MULTIPLY not in multiply statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("multiplyStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "MULTIPLY not in multiply statement.");
}
}
}
});
register("END-MULTIPLY", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("multiplyStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "END-MULTIPLY in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("multiplyStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "END-MULTIPLY not in multiply statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("multiplyStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "END-MULTIPLY not in multiply statement.");
}
}
}
});
register("OPEN", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("openStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "OPEN in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("openStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "OPEN not in open statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("openStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "OPEN not in open statement.");
}
}
}
});
register("PERFORM", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "PERFORM in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("performStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "PERFORM not in perform statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("performStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "PERFORM not in perform statement.");
}
}
}
});
register("READ", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("readStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "READ in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("readStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "READ not in read statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("readStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "READ not in read statement.");
}
}
}
});
register("END-READ", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("readStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "END-READ in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("readStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "END-READ not in read statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("readStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "END-READ not in read statement.");
}
}
}
});
register("RELEASE", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("releaseStatement");
final int p3 = lastIndexOf("water");
if ((p1 < p0 || p1 > p3) && (p2 < p0 || p2 > p3) && p0 < p3) {
warn(t, "RELEASE in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("releaseStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "RELEASE not in release statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("releaseStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "RELEASE not in release statement.");
}
}
}
});
register("RETURN", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "RETURN in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("returnStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "RETURN not in return statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("returnStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "RETURN not in return statement.");
}
}
}
});
register("SEARCH", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "SEARCH in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("searchStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "SEARCH not in search statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("searchStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "SEARCH not in search statement.");
}
}
}
});
register("STOP", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "STOP in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("stopStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "STOP not in stop statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("stopStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "STOP not in stop statement.");
}
}
}
});
register("STRING", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("stringStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "STRING in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("stringStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "STRING not in string statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("stringStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "STRING not in string statement.");
}
}
}
});
register("END-STRING", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("stringStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "END-STRING in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("stringStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "END-STRING not in string statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("stringStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "END-STRING not in string statement.");
}
}
}
});
register("SUBTRACT", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("subtractStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "SUBTRACT in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("subtractStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "SUBTRACT not in subtract statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("subtractStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "SUBTRACT not in subtract statement.");
}
}
}
});
register("END-SUBTRACT", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("subtractStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "END-SUBTRACT in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("subtractStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "END-SUBTRACT not in subtract statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("subtractStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "END-SUBTRACT not in subtract statement.");
}
}
}
});
register("UNSTRING", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("unstringStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "UNSTRING in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("unstringStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "UNSTRING not in unstring statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("unstringStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "UNSTRING not in unstring statement.");
}
}
}
});
register("END-UNSTRING", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("unstringStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "END-UNSTRING in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("unstringStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "END-UNSTRING not in unstring statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("unstringStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "END-UNSTRING not in unstring statement.");
}
}
}
});
register("WRITE", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("writeStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "WRITE in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("writeStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "WRITE not in write statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("writeStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "WRITE not in write statement.");
}
}
}
});
register("END-WRITE", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("writeStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "END-WRITE in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("writeStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "END-WRITE not in write statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("writeStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "END-WRITE not in write statement.");
}
}
}
});
register("COMPUTE", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("computeStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "COMPUTE in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("computeStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "COMPUTE not in compute statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("computeStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "COMPUTE not in compute statement.");
}
}
}
});
register("END-COMPUTE", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("computeStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "END-COMPUTE in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("computeStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "END-COMPUTE not in compute statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("computeStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "END-COMPUTE not in compute statement.");
}
}
}
});
register("START", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("startStatement");
final int p3 = lastIndexOf("water");
if ((p1 < p0 || p1 > p3) && (p2 < p0 || p2 > p3) && p0 < p3) {
warn(t, "START in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("startStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "START not in start statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("startStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "START not in start statement.");
}
}
}
});
register("END-START", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("startStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "END-START in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("startStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "END-START not in start statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("startStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "END-START not in start statement.");
}
}
}
});
register("DELETE", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("deleteStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "DELETE in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("deleteStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "DELETE not in delete statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("deleteStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "DELETE not in delete statement.");
}
}
}
});
register("END-DELETE", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("deleteStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "END-DELETE in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("deleteStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "END-DELETE not in delete statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("deleteStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "END-DELETE not in delete statement.");
}
}
}
});
register("REWRITE", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("execStatement");
final int p2 = lastIndexOf("rewriteStatement");
final int p3 = lastIndexOf("water");
if ((p1 < p0 || p1 > p3) && (p2 < p0 || p2 > p3) && p0 < p3) {
warn(t, "REWRITE in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("rewriteStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "REWRITE not in rewrite statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("rewriteStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "REWRITE not in rewrite statement.");
}
}
}
});
register("END-REWRITE", new Verifier() {
public void verify(Token t) {
{ final int p0 = 0;
final int p1 = lastIndexOf("rewriteStatement");
final int p2 = lastIndexOf("water");
if ((p1 < p0 || p1 > p2) && p0 < p2) {
warn(t, "END-REWRITE in the water.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("rewriteStatement");
final int p2 = lastIndexOf("statement");
if (p0 < p1 && p1 < p2) {
warn(t, "END-REWRITE not in rewrite statement.");
}
}
{ final int p0 = 0;
final int p1 = lastIndexOf("statement");
final int p2 = lastIndexOf("execStatement");
final int p3 = lastIndexOf("rewriteStatement");
if (p0 < p1 && p2 < p1 && p3 < p1) {
warn(t, "END-REWRITE not in rewrite statement.");
}
}
}
});
}
}
|
package org.bouncycastle.cms.test;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.bouncycastle.asn1.ASN1InputStream;
import org.bouncycastle.asn1.ASN1Sequence;
import org.bouncycastle.asn1.DERObjectIdentifier;
import org.bouncycastle.asn1.DEROctetString;
import org.bouncycastle.asn1.kisa.KISAObjectIdentifiers;
import org.bouncycastle.asn1.nist.NISTObjectIdentifiers;
import org.bouncycastle.asn1.ntt.NTTObjectIdentifiers;
import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers;
import org.bouncycastle.cms.CMSEnvelopedData;
import org.bouncycastle.cms.CMSEnvelopedDataGenerator;
import org.bouncycastle.cms.CMSException;
import org.bouncycastle.cms.CMSPBEKey;
import org.bouncycastle.cms.CMSProcessableByteArray;
import org.bouncycastle.cms.KeyTransRecipientInformation;
import org.bouncycastle.cms.PKCS5Scheme2PBEKey;
import org.bouncycastle.cms.RecipientInformation;
import org.bouncycastle.cms.RecipientInformationStore;
import org.bouncycastle.cms.PKCS5Scheme2UTF8PBEKey;
import org.bouncycastle.cms.PasswordRecipientInformation;
import org.bouncycastle.util.encoders.Base64;
import org.bouncycastle.util.encoders.Hex;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
import java.security.GeneralSecurityException;
import java.security.Key;
import java.security.KeyFactory;
import java.security.KeyPair;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.PrivateKey;
import java.security.Security;
import java.security.cert.X509Certificate;
import java.security.spec.PKCS8EncodedKeySpec;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
public class EnvelopedDataTest
extends TestCase
{
private static String _signDN;
private static KeyPair _signKP;
private static X509Certificate _signCert;
private static String _origDN;
private static KeyPair _origKP;
private static X509Certificate _origCert;
private static String _reciDN;
private static KeyPair _reciKP;
private static X509Certificate _reciCert;
private static KeyPair _origEcKP;
private static KeyPair _reciEcKP;
private static X509Certificate _reciEcCert;
private static boolean _initialised = false;
private byte[] oldKEK = Base64.decode(
"MIAGCSqGSIb3DQEHA6CAMIACAQIxQaI/MD0CAQQwBwQFAQIDBAUwDQYJYIZIAWUDBAEFBQAEI"
+ "Fi2eHTPM4bQSjP4DUeDzJZLpfemW2gF1SPq7ZPHJi1mMIAGCSqGSIb3DQEHATAUBggqhkiG9w"
+ "0DBwQImtdGyUdGGt6ggAQYk9X9z01YFBkU7IlS3wmsKpm/zpZClTceAAAAAAAAAAAAAA==");
private byte[] ecKeyAgreeMsgAES256 = Base64.decode(
"MIAGCSqGSIb3DQEHA6CAMIACAQIxgcShgcECAQOgQ6FBMAsGByqGSM49AgEF"
+ "AAMyAAPdXlSTpub+qqno9hUGkUDl+S3/ABhPziIB5yGU4678tgOgU5CiKG9Z"
+ "kfnabIJ3nZYwGgYJK4EFEIZIPwACMA0GCWCGSAFlAwQBLQUAMFswWTAtMCgx"
+ "EzARBgNVBAMTCkFkbWluLU1EU0UxETAPBgNVBAoTCDRCQ1QtMklEAgEBBCi/"
+ "rJRLbFwEVW6PcLLmojjW9lI/xGD7CfZzXrqXFw8iHaf3hTRau1gYMIAGCSqG"
+ "SIb3DQEHATAdBglghkgBZQMEASoEEMtCnKKPwccmyrbgeSIlA3qggAQQDLw8"
+ "pNJR97bPpj6baG99bQQQwhEDsoj5Xg1oOxojHVcYzAAAAAAAAAAAAAA=");
private byte[] ecKeyAgreeMsgAES128 = Base64.decode(
"MIAGCSqGSIb3DQEHA6CAMIACAQIxgbShgbECAQOgQ6FBMAsGByqGSM49AgEF"
+ "AAMyAAL01JLEgKvKh5rbxI/hOxs/9WEezMIsAbUaZM4l5tn3CzXAN505nr5d"
+ "LhrcurMK+tAwGgYJK4EFEIZIPwACMA0GCWCGSAFlAwQBBQUAMEswSTAtMCgx"
+ "EzARBgNVBAMTCkFkbWluLU1EU0UxETAPBgNVBAoTCDRCQ1QtMklEAgEBBBhi"
+ "FLjc5g6aqDT3f8LomljOwl1WTrplUT8wgAYJKoZIhvcNAQcBMB0GCWCGSAFl"
+ "AwQBAgQQzXjms16Y69S/rB0EbHqRMaCABBAFmc/QdVW6LTKdEy97kaZzBBBa"
+ "fQuviUS03NycpojELx0bAAAAAAAAAAAAAA==");
private byte[] ecKeyAgreeMsgDESEDE = Base64.decode(
"MIAGCSqGSIb3DQEHA6CAMIACAQIxgcahgcMCAQOgQ6FBMAsGByqGSM49AgEF"
+ "AAMyAALIici6Nx1WN5f0ThH2A8ht9ovm0thpC5JK54t73E1RDzCifePaoQo0"
+ "xd6sUqoyGaYwHAYJK4EFEIZIPwACMA8GCyqGSIb3DQEJEAMGBQAwWzBZMC0w"
+ "KDETMBEGA1UEAxMKQWRtaW4tTURTRTERMA8GA1UEChMINEJDVC0ySUQCAQEE"
+ "KJuqZQ1NB1vXrKPOnb4TCpYOsdm6GscWdwAAZlm2EHMp444j0s55J9wwgAYJ"
+ "KoZIhvcNAQcBMBQGCCqGSIb3DQMHBAjwnsDMsafCrKCABBjyPvqFOVMKxxut"
+ "VfTx4fQlNGJN8S2ATRgECMcTQ/dsmeViAAAAAAAAAAAAAA==");
private byte[] ecMQVKeyAgreeMsgAES128 = Base64.decode(
"MIAGCSqGSIb3DQEHA6CAMIACAQIxgf2hgfoCAQOgQ6FBMAsGByqGSM49AgEF"
+ "AAMyAAPDKU+0H58tsjpoYmYCInMr/FayvCCkupebgsnpaGEB7qS9vzcNVUj6"
+ "mrnmiC2grpmhRwRFMEMwQTALBgcqhkjOPQIBBQADMgACZpD13z9c7DzRWx6S"
+ "0xdbq3S+EJ7vWO+YcHVjTD8NcQDcZcWASW899l1PkL936zsuMBoGCSuBBRCG"
+ "SD8AEDANBglghkgBZQMEAQUFADBLMEkwLTAoMRMwEQYDVQQDEwpBZG1pbi1N"
+ "RFNFMREwDwYDVQQKEwg0QkNULTJJRAIBAQQYFq58L71nyMK/70w3nc6zkkRy"
+ "RL7DHmpZMIAGCSqGSIb3DQEHATAdBglghkgBZQMEAQIEEDzRUpreBsZXWHBe"
+ "onxOtSmggAQQ7csAZXwT1lHUqoazoy8bhAQQq+9Zjj8iGdOWgyebbfj67QAA"
+ "AAAAAAAAAAA=");
private byte[] ecKeyAgreeKey = Base64.decode(
"MIG2AgEAMBAGByqGSM49AgEGBSuBBAAiBIGeMIGbAgEBBDC8vp7xVTbKSgYVU5Wc"
+ "hGkWbzaj+yUFETIWP1Dt7+WSpq3ikSPdl7PpHPqnPVZfoIWhZANiAgSYHTgxf+Dd"
+ "Tt84dUvuSKkFy3RhjxJmjwIscK6zbEUzKhcPQG2GHzXhWK5x1kov0I74XpGhVkya"
+ "ElH5K6SaOXiXAzcyNGggTOk4+ZFnz5Xl0pBje3zKxPhYu0SnCw7Pcqw=");
private byte[] bobPrivRsaEncrypt = Base64.decode(
"MIIChQIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAKnhZ5g/OdVf"
+ "8qCTQV6meYmFyDVdmpFb+x0B2hlwJhcPvaUi0DWFbXqYZhRBXM+3twg7CcmR"
+ "uBlpN235ZR572akzJKN/O7uvRgGGNjQyywcDWVL8hYsxBLjMGAgUSOZPHPtd"
+ "YMTgXB9T039T2GkB8QX4enDRvoPGXzjPHCyqaqfrAgMBAAECgYBnzUhMmg2P"
+ "mMIbZf8ig5xt8KYGHbztpwOIlPIcaw+LNd4Ogngwy+e6alatd8brUXlweQqg"
+ "9P5F4Kmy9Bnah5jWMIR05PxZbMHGd9ypkdB8MKCixQheIXFD/A0HPfD6bRSe"
+ "TmPwF1h5HEuYHD09sBvf+iU7o8AsmAX2EAnYh9sDGQJBANDDIsbeopkYdo+N"
+ "vKZ11mY/1I1FUox29XLE6/BGmvE+XKpVC5va3Wtt+Pw7PAhDk7Vb/s7q/WiE"
+ "I2Kv8zHCueUCQQDQUfweIrdb7bWOAcjXq/JY1PeClPNTqBlFy2bKKBlf4hAr"
+ "84/sajB0+E0R9KfEILVHIdxJAfkKICnwJAiEYH2PAkA0umTJSChXdNdVUN5q"
+ "SO8bKlocSHseIVnDYDubl6nA7xhmqU5iUjiEzuUJiEiUacUgFJlaV/4jbOSn"
+ "I3vQgLeFAkEAni+zN5r7CwZdV+EJBqRd2ZCWBgVfJAZAcpw6iIWchw+dYhKI"
+ "FmioNRobQ+g4wJhprwMKSDIETukPj3d9NDAlBwJAVxhn1grStavCunrnVNqc"
+ "BU+B1O8BiR4yPWnLMcRSyFRVJQA7HCp8JlDV6abXd8vPFfXuC9WN7rOvTKF8"
+ "Y0ZB9qANMAsGA1UdDzEEAwIAEA==");
private byte[] rfc4134ex5_1 = Base64.decode(
"MIIBHgYJKoZIhvcNAQcDoIIBDzCCAQsCAQAxgcAwgb0CAQAwJjASMRAwDgYD"
+ "VQQDEwdDYXJsUlNBAhBGNGvHgABWvBHTbi7NXXHQMA0GCSqGSIb3DQEBAQUA"
+ "BIGAC3EN5nGIiJi2lsGPcP2iJ97a4e8kbKQz36zg6Z2i0yx6zYC4mZ7mX7FB"
+ "s3IWg+f6KgCLx3M1eCbWx8+MDFbbpXadCDgO8/nUkUNYeNxJtuzubGgzoyEd"
+ "8Ch4H/dd9gdzTd+taTEgS0ipdSJuNnkVY4/M652jKKHRLFf02hosdR8wQwYJ"
+ "KoZIhvcNAQcBMBQGCCqGSIb3DQMHBAgtaMXpRwZRNYAgDsiSf8Z9P43LrY4O"
+ "xUk660cu1lXeCSFOSOpOJ7FuVyU=");
private byte[] rfc4134ex5_2 = Base64.decode(
"MIIBZQYJKoZIhvcNAQcDoIIBVjCCAVICAQIxggEAMIG9AgEAMCYwEjEQMA4G"
+ "A1UEAxMHQ2FybFJTQQIQRjRrx4AAVrwR024uzV1x0DANBgkqhkiG9w0BAQEF"
+ "AASBgJQmQojGi7Z4IP+CVypBmNFoCDoEp87khtgyff2N4SmqD3RxPx+8hbLQ"
+ "t9i3YcMwcap+aiOkyqjMalT03VUC0XBOGv+HYI3HBZm/aFzxoq+YOXAWs5xl"
+ "GerZwTOc9j6AYlK4qXvnztR5SQ8TBjlzytm4V7zg+TGrnGVNQBNw47Ewoj4C"
+ "AQQwDQQLTWFpbExpc3RSQzIwEAYLKoZIhvcNAQkQAwcCAToEGHcUr5MSJ/g9"
+ "HnJVHsQ6X56VcwYb+OfojTBJBgkqhkiG9w0BBwEwGgYIKoZIhvcNAwIwDgIC"
+ "AKAECJwE0hkuKlWhgCBeKNXhojuej3org9Lt7n+wWxOhnky5V50vSpoYRfRR"
+ "yw==");
public EnvelopedDataTest()
{
}
private static void init()
throws Exception
{
if (!_initialised)
{
_initialised = true;
_signDN = "O=Bouncy Castle, C=AU";
_signKP = CMSTestUtil.makeKeyPair();
_signCert = CMSTestUtil.makeCertificate(_signKP, _signDN, _signKP, _signDN);
_origDN = "CN=Bob, OU=Sales, O=Bouncy Castle, C=AU";
_origKP = CMSTestUtil.makeKeyPair();
_origCert = CMSTestUtil.makeCertificate(_origKP, _origDN, _signKP, _signDN);
_reciDN = "CN=Doug, OU=Sales, O=Bouncy Castle, C=AU";
_reciKP = CMSTestUtil.makeKeyPair();
_reciCert = CMSTestUtil.makeCertificate(_reciKP, _reciDN, _signKP, _signDN);
_origEcKP = CMSTestUtil.makeEcDsaKeyPair();
_reciEcKP = CMSTestUtil.makeEcDsaKeyPair();
_reciEcCert = CMSTestUtil.makeCertificate(_reciEcKP, _reciDN, _signKP, _signDN);
}
}
public static void main(
String args[])
throws Exception
{
junit.textui.TestRunner.run(EnvelopedDataTest.suite());
}
public static Test suite()
throws Exception
{
init();
return new CMSTestSetup(new TestSuite(EnvelopedDataTest.class));
}
public void testKeyTrans()
throws Exception
{
byte[] data = "WallaWallaWashington".getBytes();
CMSEnvelopedDataGenerator edGen = new CMSEnvelopedDataGenerator();
edGen.addKeyTransRecipient(_reciCert);
CMSEnvelopedData ed = edGen.generate(
new CMSProcessableByteArray(data),
CMSEnvelopedDataGenerator.DES_EDE3_CBC, "BC");
RecipientInformationStore recipients = ed.getRecipientInfos();
assertEquals(ed.getEncryptionAlgOID(), CMSEnvelopedDataGenerator.DES_EDE3_CBC);
Collection c = recipients.getRecipients();
assertEquals(1, c.size());
Iterator it = c.iterator();
while (it.hasNext())
{
RecipientInformation recipient = (RecipientInformation)it.next();
assertEquals(recipient.getKeyEncryptionAlgOID(), PKCSObjectIdentifiers.rsaEncryption.getId());
byte[] recData = recipient.getContent(_reciKP.getPrivate(), "BC");
assertEquals(true, Arrays.equals(data, recData));
}
}
public void testKeyTransCAST5SunJCE()
throws Exception
{
if (Security.getProvider("SunJCE") == null)
{
return;
}
String version = System.getProperty("java.version");
if (version.startsWith("1.4") || version.startsWith("1.3"))
{
return;
}
byte[] data = "WallaWallaWashington".getBytes();
CMSEnvelopedDataGenerator edGen = new CMSEnvelopedDataGenerator();
edGen.addKeyTransRecipient(_reciCert);
CMSEnvelopedData ed = edGen.generate(
new CMSProcessableByteArray(data),
CMSEnvelopedDataGenerator.CAST5_CBC, "SunJCE");
RecipientInformationStore recipients = ed.getRecipientInfos();
assertEquals(ed.getEncryptionAlgOID(), CMSEnvelopedDataGenerator.CAST5_CBC);
Collection c = recipients.getRecipients();
assertEquals(1, c.size());
Iterator it = c.iterator();
while (it.hasNext())
{
RecipientInformation recipient = (RecipientInformation)it.next();
assertEquals(recipient.getKeyEncryptionAlgOID(), PKCSObjectIdentifiers.rsaEncryption.getId());
byte[] recData = recipient.getContent(_reciKP.getPrivate(), "SunJCE");
assertEquals(true, Arrays.equals(data, recData));
}
}
public void testKeyTransRC4()
throws Exception
{
byte[] data = "WallaWallaBouncyCastle".getBytes();
CMSEnvelopedDataGenerator edGen = new CMSEnvelopedDataGenerator();
edGen.addKeyTransRecipient(_reciCert);
CMSEnvelopedData ed = edGen.generate(
new CMSProcessableByteArray(data),
"1.2.840.113549.3.4", "BC");
RecipientInformationStore recipients = ed.getRecipientInfos();
assertEquals(ed.getEncryptionAlgOID(), "1.2.840.113549.3.4");
Collection c = recipients.getRecipients();
assertEquals(1, c.size());
Iterator it = c.iterator();
while (it.hasNext())
{
RecipientInformation recipient = (RecipientInformation)it.next();
byte[] recData = recipient.getContent(_reciKP.getPrivate(), "BC");
assertEquals(true, Arrays.equals(data, recData));
}
}
public void testKeyTrans128RC4()
throws Exception
{
byte[] data = "WallaWallaBouncyCastle".getBytes();
CMSEnvelopedDataGenerator edGen = new CMSEnvelopedDataGenerator();
edGen.addKeyTransRecipient(_reciCert);
CMSEnvelopedData ed = edGen.generate(
new CMSProcessableByteArray(data),
"1.2.840.113549.3.4", 128, "BC");
RecipientInformationStore recipients = ed.getRecipientInfos();
assertEquals(ed.getEncryptionAlgOID(), "1.2.840.113549.3.4");
Collection c = recipients.getRecipients();
Iterator it = c.iterator();
if (it.hasNext())
{
RecipientInformation recipient = (RecipientInformation)it.next();
byte[] recData = recipient.getContent(_reciKP.getPrivate(), "BC");
assertEquals(true, Arrays.equals(data, recData));
}
else
{
fail("no recipient found");
}
}
public void testKeyTransODES()
throws Exception
{
byte[] data = "WallaWallaBouncyCastle".getBytes();
CMSEnvelopedDataGenerator edGen = new CMSEnvelopedDataGenerator();
edGen.addKeyTransRecipient(_reciCert);
CMSEnvelopedData ed = edGen.generate(
new CMSProcessableByteArray(data),
"1.3.14.3.2.7", "BC");
RecipientInformationStore recipients = ed.getRecipientInfos();
assertEquals(ed.getEncryptionAlgOID(), "1.3.14.3.2.7");
Collection c = recipients.getRecipients();
Iterator it = c.iterator();
if (it.hasNext())
{
RecipientInformation recipient = (RecipientInformation)it.next();
byte[] recData = recipient.getContent(_reciKP.getPrivate(), "BC");
assertEquals(true, Arrays.equals(data, recData));
}
else
{
fail("no recipient found");
}
}
public void testKeyTransSmallAES()
throws Exception
{
byte[] data = new byte[] { 0, 1, 2, 3 };
CMSEnvelopedDataGenerator edGen = new CMSEnvelopedDataGenerator();
edGen.addKeyTransRecipient(_reciCert);
CMSEnvelopedData ed = edGen.generate(
new CMSProcessableByteArray(data),
CMSEnvelopedDataGenerator.AES128_CBC, "BC");
RecipientInformationStore recipients = ed.getRecipientInfos();
assertEquals(ed.getEncryptionAlgOID(),
CMSEnvelopedDataGenerator.AES128_CBC);
Collection c = recipients.getRecipients();
Iterator it = c.iterator();
if (it.hasNext())
{
RecipientInformation recipient = (RecipientInformation)it.next();
byte[] recData = recipient.getContent(_reciKP.getPrivate(), "BC");
assertEquals(true, Arrays.equals(data, recData));
}
else
{
fail("no recipient found");
}
}
public void testKeyTransCAST5()
throws Exception
{
tryKeyTrans(CMSEnvelopedDataGenerator.CAST5_CBC, new DERObjectIdentifier(CMSEnvelopedDataGenerator.CAST5_CBC), ASN1Sequence.class);
}
public void testKeyTransAES128()
throws Exception
{
tryKeyTrans(CMSEnvelopedDataGenerator.AES128_CBC, NISTObjectIdentifiers.id_aes128_CBC, DEROctetString.class);
}
public void testKeyTransAES192()
throws Exception
{
tryKeyTrans(CMSEnvelopedDataGenerator.AES192_CBC, NISTObjectIdentifiers.id_aes192_CBC, DEROctetString.class);
}
public void testKeyTransAES256()
throws Exception
{
tryKeyTrans(CMSEnvelopedDataGenerator.AES256_CBC, NISTObjectIdentifiers.id_aes256_CBC, DEROctetString.class);
}
public void testKeyTransSEED()
throws Exception
{
tryKeyTrans(CMSEnvelopedDataGenerator.SEED_CBC, KISAObjectIdentifiers.id_seedCBC, DEROctetString.class);
}
public void testKeyTransCamellia128()
throws Exception
{
tryKeyTrans(CMSEnvelopedDataGenerator.CAMELLIA128_CBC, NTTObjectIdentifiers.id_camellia128_cbc, DEROctetString.class);
}
public void testKeyTransCamellia192()
throws Exception
{
tryKeyTrans(CMSEnvelopedDataGenerator.CAMELLIA192_CBC, NTTObjectIdentifiers.id_camellia192_cbc, DEROctetString.class);
}
public void testKeyTransCamellia256()
throws Exception
{
tryKeyTrans(CMSEnvelopedDataGenerator.CAMELLIA256_CBC, NTTObjectIdentifiers.id_camellia256_cbc, DEROctetString.class);
}
private void tryKeyTrans(String generatorOID, DERObjectIdentifier checkOID, Class asn1Params)
throws Exception
{
byte[] data = "WallaWallaWashington".getBytes();
CMSEnvelopedDataGenerator edGen = new CMSEnvelopedDataGenerator();
edGen.addKeyTransRecipient(_reciCert);
CMSEnvelopedData ed = edGen.generate(
new CMSProcessableByteArray(data),
generatorOID, "BC");
RecipientInformationStore recipients = ed.getRecipientInfos();
assertEquals(checkOID.getId(), ed.getEncryptionAlgOID());
if (asn1Params != null)
{
ASN1InputStream aIn = new ASN1InputStream(ed.getEncryptionAlgParams());
assertTrue(asn1Params.isAssignableFrom(aIn.readObject().getClass()));
}
Collection c = recipients.getRecipients();
assertEquals(1, c.size());
Iterator it = c.iterator();
if (!it.hasNext())
{
fail("no recipients found");
}
while (it.hasNext())
{
RecipientInformation recipient = (RecipientInformation)it.next();
assertEquals(recipient.getKeyEncryptionAlgOID(), PKCSObjectIdentifiers.rsaEncryption.getId());
byte[] recData = recipient.getContent(_reciKP.getPrivate(), "BC");
assertEquals(true, Arrays.equals(data, recData));
}
}
public void testErrorneousKEK()
throws Exception
{
byte[] data = "WallaWallaWashington".getBytes();
SecretKey kek = new SecretKeySpec(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 }, "AES");
CMSEnvelopedData ed = new CMSEnvelopedData(oldKEK);
RecipientInformationStore recipients = ed.getRecipientInfos();
assertEquals(ed.getEncryptionAlgOID(), CMSEnvelopedDataGenerator.DES_EDE3_CBC);
Collection c = recipients.getRecipients();
Iterator it = c.iterator();
if (it.hasNext())
{
RecipientInformation recipient = (RecipientInformation)it.next();
assertEquals(recipient.getKeyEncryptionAlgOID(), NISTObjectIdentifiers.id_aes128_wrap.getId());
byte[] recData = recipient.getContent(kek, "BC");
assertEquals(true, Arrays.equals(data, recData));
}
else
{
fail("no recipient found");
}
}
public void testDESKEK()
throws Exception
{
tryKekAlgorithm(CMSTestUtil.makeDesede192Key(), new DERObjectIdentifier("1.2.840.113549.1.9.16.3.6"));
}
public void testRC2128KEK()
throws Exception
{
tryKekAlgorithm(CMSTestUtil.makeRC2128Key(), new DERObjectIdentifier("1.2.840.113549.1.9.16.3.7"));
}
public void testAES128KEK()
throws Exception
{
tryKekAlgorithm(CMSTestUtil.makeAESKey(128), NISTObjectIdentifiers.id_aes128_wrap);
}
public void testAES192KEK()
throws Exception
{
tryKekAlgorithm(CMSTestUtil.makeAESKey(192), NISTObjectIdentifiers.id_aes192_wrap);
}
public void testAES256KEK()
throws Exception
{
tryKekAlgorithm(CMSTestUtil.makeAESKey(256), NISTObjectIdentifiers.id_aes256_wrap);
}
public void testSEED128KEK()
throws Exception
{
tryKekAlgorithm(CMSTestUtil.makeSEEDKey(), KISAObjectIdentifiers.id_npki_app_cmsSeed_wrap);
}
public void testCamellia128KEK()
throws Exception
{
tryKekAlgorithm(CMSTestUtil.makeCamelliaKey(128), NTTObjectIdentifiers.id_camellia128_wrap);
}
public void testCamellia192KEK()
throws Exception
{
tryKekAlgorithm(CMSTestUtil.makeCamelliaKey(192), NTTObjectIdentifiers.id_camellia192_wrap);
}
public void testCamellia256KEK()
throws Exception
{
tryKekAlgorithm(CMSTestUtil.makeCamelliaKey(256), NTTObjectIdentifiers.id_camellia256_wrap);
}
private void tryKekAlgorithm(SecretKey kek, DERObjectIdentifier algOid)
throws NoSuchAlgorithmException, NoSuchProviderException, CMSException
{
byte[] data = "WallaWallaWashington".getBytes();
CMSEnvelopedDataGenerator edGen = new CMSEnvelopedDataGenerator();
byte[] kekId = new byte[] { 1, 2, 3, 4, 5 };
edGen.addKEKRecipient(kek, kekId);
CMSEnvelopedData ed = edGen.generate(
new CMSProcessableByteArray(data),
CMSEnvelopedDataGenerator.DES_EDE3_CBC, "BC");
RecipientInformationStore recipients = ed.getRecipientInfos();
Collection c = recipients.getRecipients();
Iterator it = c.iterator();
assertEquals(ed.getEncryptionAlgOID(), CMSEnvelopedDataGenerator.DES_EDE3_CBC);
if (it.hasNext())
{
RecipientInformation recipient = (RecipientInformation)it.next();
assertEquals(algOid.getId(), recipient.getKeyEncryptionAlgOID());
byte[] recData = recipient.getContent(kek, "BC");
assertTrue(Arrays.equals(data, recData));
}
else
{
fail("no recipient found");
}
}
public void testECKeyAgree()
throws Exception
{
byte[] data = Hex.decode("504b492d4320434d5320456e76656c6f706564446174612053616d706c65");
CMSEnvelopedDataGenerator edGen = new CMSEnvelopedDataGenerator();
edGen.addKeyAgreementRecipient(CMSEnvelopedDataGenerator.ECDH_SHA1KDF, _origEcKP.getPrivate(), _origEcKP.getPublic(), _reciEcCert, CMSEnvelopedDataGenerator.AES128_WRAP, "BC");
CMSEnvelopedData ed = edGen.generate(
new CMSProcessableByteArray(data),
CMSEnvelopedDataGenerator.AES128_CBC, "BC");
RecipientInformationStore recipients = ed.getRecipientInfos();
assertEquals(ed.getEncryptionAlgOID(),
CMSEnvelopedDataGenerator.AES128_CBC);
Collection c = recipients.getRecipients();
Iterator it = c.iterator();
if (it.hasNext())
{
RecipientInformation recipient = (RecipientInformation)it.next();
byte[] recData = recipient.getContent(_reciEcKP.getPrivate(), "BC");
assertEquals(true, Arrays.equals(data, recData));
}
else
{
fail("no recipient found");
}
}
public void testECKeyAgreeVectors()
throws Exception
{
PKCS8EncodedKeySpec privSpec = new PKCS8EncodedKeySpec(ecKeyAgreeKey);
KeyFactory fact = KeyFactory.getInstance("ECDH", "BC");
PrivateKey privKey = fact.generatePrivate(privSpec);
verifyECKeyAgreeVectors(privKey, "2.16.840.1.101.3.4.1.42", ecKeyAgreeMsgAES256);
verifyECKeyAgreeVectors(privKey, "2.16.840.1.101.3.4.1.2", ecKeyAgreeMsgAES128);
verifyECKeyAgreeVectors(privKey, "1.2.840.113549.3.7", ecKeyAgreeMsgDESEDE);
}
public void testECMQVKeyAgreeVectors()
throws Exception
{
PKCS8EncodedKeySpec privSpec = new PKCS8EncodedKeySpec(ecKeyAgreeKey);
KeyFactory fact = KeyFactory.getInstance("ECDH", "BC");
PrivateKey privKey = fact.generatePrivate(privSpec);
verifyECMQVKeyAgreeVectors(privKey, "2.16.840.1.101.3.4.1.2", ecMQVKeyAgreeMsgAES128);
}
public void testPasswordAES256()
throws Exception
{
passwordTest(CMSEnvelopedDataGenerator.AES256_CBC);
passwordUTF8Test(CMSEnvelopedDataGenerator.AES256_CBC);
}
public void testPasswordDESEDE()
throws Exception
{
passwordTest(CMSEnvelopedDataGenerator.DES_EDE3_CBC);
passwordUTF8Test(CMSEnvelopedDataGenerator.DES_EDE3_CBC);
}
public void testRFC4134ex5_1()
throws Exception
{
byte[] data = Hex.decode("5468697320697320736f6d652073616d706c6520636f6e74656e742e");
KeyFactory kFact = KeyFactory.getInstance("RSA", "BC");
Key key = kFact.generatePrivate(new PKCS8EncodedKeySpec(bobPrivRsaEncrypt));
CMSEnvelopedData ed = new CMSEnvelopedData(rfc4134ex5_1);
RecipientInformationStore recipients = ed.getRecipientInfos();
assertEquals("1.2.840.113549.3.7", ed.getEncryptionAlgOID());
Collection c = recipients.getRecipients();
Iterator it = c.iterator();
if (it.hasNext())
{
RecipientInformation recipient = (RecipientInformation)it.next();
byte[] recData = recipient.getContent(key, "BC");
assertEquals(true, Arrays.equals(data, recData));
}
else
{
fail("no recipient found");
}
}
public void testRFC4134ex5_2()
throws Exception
{
byte[] data = Hex.decode("5468697320697320736f6d652073616d706c6520636f6e74656e742e");
KeyFactory kFact = KeyFactory.getInstance("RSA", "BC");
Key key = kFact.generatePrivate(new PKCS8EncodedKeySpec(bobPrivRsaEncrypt));
CMSEnvelopedData ed = new CMSEnvelopedData(rfc4134ex5_2);
RecipientInformationStore recipients = ed.getRecipientInfos();
assertEquals("1.2.840.113549.3.2", ed.getEncryptionAlgOID());
Collection c = recipients.getRecipients();
Iterator it = c.iterator();
if (it.hasNext())
{
while (it.hasNext())
{
RecipientInformation recipient = (RecipientInformation)it.next();
byte[] recData;
if (recipient instanceof KeyTransRecipientInformation)
{
recData = recipient.getContent(key, "BC");
assertEquals(true, Arrays.equals(data, recData));
}
}
}
else
{
fail("no recipient found");
}
}
public void testOriginatorInfo()
throws Exception
{
CMSEnvelopedData env = new CMSEnvelopedData(CMSSampleMessages.originatorMessage);
RecipientInformationStore recipients = env.getRecipientInfos();
assertEquals(CMSEnvelopedDataGenerator.DES_EDE3_CBC, env.getEncryptionAlgOID());
}
private void passwordTest(String algorithm)
throws Exception
{
byte[] data = Hex.decode("504b492d4320434d5320456e76656c6f706564446174612053616d706c65");
CMSEnvelopedDataGenerator edGen = new CMSEnvelopedDataGenerator();
edGen.addPasswordRecipient(new PKCS5Scheme2PBEKey("password".toCharArray(), new byte[20], 5), algorithm);
CMSEnvelopedData ed = edGen.generate(
new CMSProcessableByteArray(data),
CMSEnvelopedDataGenerator.AES128_CBC, "BC");
RecipientInformationStore recipients = ed.getRecipientInfos();
assertEquals(ed.getEncryptionAlgOID(),
CMSEnvelopedDataGenerator.AES128_CBC);
Collection c = recipients.getRecipients();
Iterator it = c.iterator();
if (it.hasNext())
{
PasswordRecipientInformation recipient = (PasswordRecipientInformation)it.next();
CMSPBEKey key = new PKCS5Scheme2PBEKey("password".toCharArray(),
recipient.getKeyDerivationAlgParameters("BC"));
byte[] recData = recipient.getContent(key, "BC");
assertEquals(true, Arrays.equals(data, recData));
}
else
{
fail("no recipient found");
}
// try algorithm parameters constructor
it = c.iterator();
RecipientInformation recipient = (RecipientInformation)it.next();
byte[] recData = recipient.getContent(new PKCS5Scheme2PBEKey("password".toCharArray(), ((PasswordRecipientInformation)recipient).getKeyDerivationAlgParameters("BC")), "BC");
assertEquals(true, Arrays.equals(data, recData));
}
private void passwordUTF8Test(String algorithm)
throws Exception
{
byte[] data = Hex.decode("504b492d4320434d5320456e76656c6f706564446174612053616d706c65");
CMSEnvelopedDataGenerator edGen = new CMSEnvelopedDataGenerator();
edGen.addPasswordRecipient(new PKCS5Scheme2UTF8PBEKey("abc\u5639\u563b".toCharArray(), new byte[20], 5), algorithm);
CMSEnvelopedData ed = edGen.generate(
new CMSProcessableByteArray(data),
CMSEnvelopedDataGenerator.AES128_CBC, "BC");
RecipientInformationStore recipients = ed.getRecipientInfos();
assertEquals(ed.getEncryptionAlgOID(),
CMSEnvelopedDataGenerator.AES128_CBC);
Collection c = recipients.getRecipients();
Iterator it = c.iterator();
if (it.hasNext())
{
RecipientInformation recipient = (RecipientInformation)it.next();
byte[] recData = recipient.getContent(new PKCS5Scheme2UTF8PBEKey("abc\u5639\u563b".toCharArray(), new byte[20], 5), "BC");
assertEquals(true, Arrays.equals(data, recData));
}
else
{
fail("no recipient found");
}
// try algorithm parameters constructor
it = c.iterator();
RecipientInformation recipient = (RecipientInformation)it.next();
byte[] recData = recipient.getContent(new PKCS5Scheme2UTF8PBEKey("abc\u5639\u563b".toCharArray(), ((PasswordRecipientInformation)recipient).getKeyDerivationAlgParameters("BC")), "BC");
assertEquals(true, Arrays.equals(data, recData));
}
private void verifyECKeyAgreeVectors(PrivateKey privKey, String wrapAlg, byte[] message)
throws CMSException, GeneralSecurityException
{
byte[] data = Hex.decode("504b492d4320434d5320456e76656c6f706564446174612053616d706c65");
CMSEnvelopedData ed = new CMSEnvelopedData(message);
RecipientInformationStore recipients = ed.getRecipientInfos();
Collection c = recipients.getRecipients();
Iterator it = c.iterator();
assertEquals(wrapAlg, ed.getEncryptionAlgOID());
if (it.hasNext())
{
RecipientInformation recipient = (RecipientInformation)it.next();
assertEquals("1.3.133.16.840.63.0.2", recipient.getKeyEncryptionAlgOID());
byte[] recData = recipient.getContent(privKey, "BC");
assertTrue(Arrays.equals(data, recData));
}
else
{
fail("no recipient found");
}
}
private void verifyECMQVKeyAgreeVectors(PrivateKey privKey, String wrapAlg, byte[] message)
throws CMSException, GeneralSecurityException
{
byte[] data = Hex.decode("504b492d4320434d5320456e76656c6f706564446174612053616d706c65");
CMSEnvelopedData ed = new CMSEnvelopedData(message);
RecipientInformationStore recipients = ed.getRecipientInfos();
Collection c = recipients.getRecipients();
Iterator it = c.iterator();
assertEquals(wrapAlg, ed.getEncryptionAlgOID());
if (it.hasNext())
{
RecipientInformation recipient = (RecipientInformation)it.next();
assertEquals("1.3.133.16.840.63.0.16", recipient.getKeyEncryptionAlgOID());
byte[] recData = recipient.getContent(privKey, "BC");
assertTrue(Arrays.equals(data, recData));
}
else
{
fail("no recipient found");
}
}
}
|
/*
* $Id: SimulatedPlugin.java,v 1.34 2014-11-29 21:40:44 tlipkis Exp $
*/
package org.lockss.plugin.simulated;
import java.util.*;
import org.lockss.app.*;
import org.lockss.config.*;
import org.lockss.daemon.*;
import org.lockss.plugin.*;
import org.lockss.plugin.base.*;
import org.lockss.util.*;
import org.lockss.test.*;
/**
* SimulatedPlugin is a Plugin that simulates a website with
* locally generated simulated content.
*/
public class SimulatedPlugin extends BasePlugin implements PluginTestable {
static Logger log = Logger.getLogger("SimulatedPlugin");
/**
* The root location for the simulated content to be generated.
*/
static final ConfigParamDescr PD_ROOT = new ConfigParamDescr();
static {
PD_ROOT.setKey("root");
PD_ROOT.setDisplayName("Root");
PD_ROOT.setType(ConfigParamDescr.TYPE_STRING);
PD_ROOT.setSize(20);
}
public static final String AU_PARAM_ROOT = PD_ROOT.getKey();
/**
* Non-definitional base_url param, because lots of tests know that
* SimulatedPlugin has only one definitional param
*/
static final ConfigParamDescr PD_NON_DEF_BASE_URL =
new ConfigParamDescr()
.setKey("base_url")
.setDisplayName("Base URL")
.setType(ConfigParamDescr.TYPE_URL)
.setSize(40)
.setDefinitional(false)
.setDescription("Usually of the form http://<journal-name>.com/");
/**
* The depth of the tree to generate (0 equals just the root dir).
*/
static final ConfigParamDescr PD_DEPTH = new ConfigParamDescr();
static {
PD_DEPTH.setKey("depth");
PD_DEPTH.setDisplayName("Depth");
PD_DEPTH.setType(ConfigParamDescr.TYPE_INT);
PD_DEPTH.setSize(8);
PD_DEPTH.setDefinitional(false);
}
public static final String AU_PARAM_DEPTH = PD_DEPTH.getKey();
/**
* The number of branches in each directory.
*/
static final ConfigParamDescr PD_BRANCH = new ConfigParamDescr();
static {
PD_BRANCH.setKey("branch");
PD_BRANCH.setDisplayName("Branches");
PD_BRANCH.setType(ConfigParamDescr.TYPE_INT);
PD_BRANCH.setSize(8);
PD_BRANCH.setDefinitional(false);
}
public static final String AU_PARAM_BRANCH = PD_BRANCH.getKey();
/**
* The number of files in each directory. This will be multiplied by the
* number of file types, so having both '.html' and '.txt' will generate
* 'file1.html', 'file1.txt', 'file2.html', 'file2.txt', etc.
*/
static final ConfigParamDescr PD_NUM_FILES = new ConfigParamDescr();
static {
PD_NUM_FILES.setKey("numFiles");
PD_NUM_FILES.setDisplayName("Files/branch");
PD_NUM_FILES.setType(ConfigParamDescr.TYPE_INT);
PD_NUM_FILES.setSize(8);
PD_NUM_FILES.setDefinitional(false);
}
public static final String AU_PARAM_NUM_FILES = PD_NUM_FILES.getKey();
/**
* The size to make binary files, if chosen as a type.
*/
static final ConfigParamDescr PD_BIN_FILE_SIZE = new ConfigParamDescr();
static {
PD_BIN_FILE_SIZE.setKey("binFileSize");
PD_BIN_FILE_SIZE.setDisplayName("Binary file size");
PD_BIN_FILE_SIZE.setType(ConfigParamDescr.TYPE_LONG);
PD_BIN_FILE_SIZE.setSize(8);
PD_BIN_FILE_SIZE.setDefinitional(false);
}
public static final String AU_PARAM_BIN_FILE_SIZE =
PD_BIN_FILE_SIZE.getKey();
/**
* The seed for random binary files, if chosen as a type.
*/
static final ConfigParamDescr PD_BIN_RANDOM_SEED = new ConfigParamDescr();
static {
PD_BIN_RANDOM_SEED.setKey("binRandomSeed");
PD_BIN_RANDOM_SEED.setDisplayName("Bin file random seed");
PD_BIN_RANDOM_SEED.setType(ConfigParamDescr.TYPE_LONG);
PD_BIN_RANDOM_SEED.setSize(8);
PD_BIN_RANDOM_SEED.setDefinitional(false);
}
public static final String AU_PARAM_BIN_RANDOM_SEED =
PD_BIN_RANDOM_SEED.getKey();
/**
* The maximum length for file names. Currently unused.
*/
static final ConfigParamDescr PD_MAXFILE_NAME = new ConfigParamDescr();
static {
PD_MAXFILE_NAME.setKey("maxFileName");
PD_MAXFILE_NAME.setDisplayName("Max file name");
PD_MAXFILE_NAME.setType(ConfigParamDescr.TYPE_INT);
PD_MAXFILE_NAME.setSize(8);
PD_MAXFILE_NAME.setDefinitional(false);
}
public static final String AU_PARAM_MAXFILE_NAME = PD_MAXFILE_NAME.getKey();
/**
* The file types to create. A bit-wise addition of
* {@link SimulatedContentGenerator}.FILE_TYPE_XXX values.
*/
static final ConfigParamDescr PD_FILE_TYPES = new ConfigParamDescr();
static {
PD_FILE_TYPES.setKey("fileTypes");
PD_FILE_TYPES.setDisplayName("File types");
PD_FILE_TYPES.setType(ConfigParamDescr.TYPE_INT);
PD_FILE_TYPES.setSize(8);
PD_FILE_TYPES.setDefinitional(false);
}
public static final String AU_PARAM_FILE_TYPES = PD_FILE_TYPES.getKey();
static final ConfigParamDescr PD_ODD_BRANCH_CONTENT = new ConfigParamDescr();
static {
PD_ODD_BRANCH_CONTENT.setKey("odd_branch_content");
PD_ODD_BRANCH_CONTENT.setDisplayName("Odd Branch Contents");
PD_ODD_BRANCH_CONTENT.setType(ConfigParamDescr.TYPE_BOOLEAN);
PD_ODD_BRANCH_CONTENT.setDefinitional(false);
}
public static final String AU_PARAM_ODD_BRANCH_CONTENT =
PD_ODD_BRANCH_CONTENT.getKey();
/**
* The directory location of the 'abnormal' file. Should be a string
* filepath (i.e. 'root/branch1/branch3').
*/
static final ConfigParamDescr PD_BAD_FILE_LOC = new ConfigParamDescr();
static {
PD_BAD_FILE_LOC.setKey("badFileLoc");
PD_BAD_FILE_LOC.setDisplayName("Bad File Path");
PD_BAD_FILE_LOC.setType(ConfigParamDescr.TYPE_STRING);
PD_BAD_FILE_LOC.setSize(30);
PD_BAD_FILE_LOC.setDefinitional(false);
}
public static final String AU_PARAM_BAD_FILE_LOC = PD_BAD_FILE_LOC.getKey();
/**
* The file number of the 'abnormal' file, in the directory given by the
* location string.
*/
static final ConfigParamDescr PD_BAD_FILE_NUM = new ConfigParamDescr();
static {
PD_BAD_FILE_NUM.setKey("badFileNum");
PD_BAD_FILE_NUM.setDisplayName("Bad File Number");
PD_BAD_FILE_NUM.setType(ConfigParamDescr.TYPE_INT);
PD_BAD_FILE_NUM.setSize(8);
PD_BAD_FILE_NUM.setDefinitional(false);
}
public static final String AU_PARAM_BAD_FILE_NUM = PD_BAD_FILE_NUM.getKey();
/**
* The directory location of a file to be marked as 'damaged' in the cache.
* Should be a string filepath.
*/
static final ConfigParamDescr PD_BAD_CACHED_FILE_LOC =
new ConfigParamDescr();
static {
PD_BAD_CACHED_FILE_LOC.setKey("badCachedFileLoc");
PD_BAD_CACHED_FILE_LOC.setDisplayName("Damaged File Path");
PD_BAD_CACHED_FILE_LOC.setType(ConfigParamDescr.TYPE_STRING);
PD_BAD_CACHED_FILE_LOC.setSize(30);
PD_BAD_CACHED_FILE_LOC.setDefinitional(false);
}
public static final String AU_PARAM_BAD_CACHED_FILE_LOC =
PD_BAD_CACHED_FILE_LOC.getKey();
/**
* File number of the 'damaged' cache file
*/
static final ConfigParamDescr PD_BAD_CACHED_FILE_NUM =
new ConfigParamDescr();
static {
PD_BAD_CACHED_FILE_NUM.setKey("badCachedFileNum");
PD_BAD_CACHED_FILE_NUM.setDisplayName("Damaged File Number");
PD_BAD_CACHED_FILE_NUM.setType(ConfigParamDescr.TYPE_INT);
PD_BAD_CACHED_FILE_NUM.setSize(8);
PD_BAD_CACHED_FILE_NUM.setDefinitional(false);
}
public static final String AU_PARAM_BAD_CACHED_FILE_NUM =
PD_BAD_CACHED_FILE_NUM.getKey();
/**
* Hash filter spec
*/
static final ConfigParamDescr PD_HASH_FILTER_SPEC =
new ConfigParamDescr();
static {
PD_HASH_FILTER_SPEC.setKey("hashFilterSpec");
PD_HASH_FILTER_SPEC.setDisplayName("Hash Filters");
PD_HASH_FILTER_SPEC.setType(ConfigParamDescr.TYPE_STRING);
PD_HASH_FILTER_SPEC.setSize(30);
PD_HASH_FILTER_SPEC.setDefinitional(false);
}
public static final String AU_PARAM_HASH_FILTER_SPEC =
PD_HASH_FILTER_SPEC.getKey();
/**
* The default article mime type for the ArticleIterator
*/
static final ConfigParamDescr PD_DEFAULT_ARTICLE_MIME_TYPE =
new ConfigParamDescr();
static {
PD_DEFAULT_ARTICLE_MIME_TYPE.setKey("default_article_mime_type");
PD_DEFAULT_ARTICLE_MIME_TYPE.setDisplayName("DefaultArticleMimeType");
PD_DEFAULT_ARTICLE_MIME_TYPE.setType(ConfigParamDescr.TYPE_STRING);
PD_DEFAULT_ARTICLE_MIME_TYPE.setSize(20);
}
public static final String AU_PARAM_DEFAULT_ARTICLE_MIME_TYPE =
PD_DEFAULT_ARTICLE_MIME_TYPE.getKey();
/**
* If true, mixed case names will be generated.
*/
static final ConfigParamDescr PD_MIXED_CASE =
new ConfigParamDescr();
static {
PD_MIXED_CASE.setKey("mixed_case");
PD_MIXED_CASE.setDisplayName("DefaultArticleMimeType");
PD_MIXED_CASE.setType(ConfigParamDescr.TYPE_STRING);
PD_MIXED_CASE.setSize(20);
}
public static final String AU_PARAM_MIXED_CASE =
PD_MIXED_CASE.getKey();
private String pluginId = "SimulatedPlugin";
private int initCtr = 0;
private int stopCtr = 0;
private Configuration auConfig;
public SimulatedPlugin() {
}
/**
* Called after plugin is loaded to give the plugin time to perform any
* needed initializations
* @param daemon the LockssDaemon
*/
public void initPlugin(LockssDaemon daemon) {
super.initPlugin(daemon);
initCtr++;
}
/**
* Called when the application is stopping to allow the plugin to perform
* any necessary tasks needed to cleanly halt
*/
public void stopPlugin() {
stopCtr++;
}
public String getVersion() {
return "SimulatedVersion";
}
public String getPluginName() {
return "Simulated Content";
}
// SimulatedPlugin's only definitional param is the root directory,
// typically a temp dir, so Tdb entries & TitleConfig don't really make
// sense, and cause (harmless) NPEs in some unit test. However, stf
// currently marks AUs down by creating a tdb entry, so this can't be
// suppressed. stf should be changed to mark AUs down by changing their
// config instead
@Override
protected void setTitleConfigs(Tdb tdb) {
super.setTitleConfigs(tdb);
}
/**
* Return the set of configuration properties required to configure
* an archival unit for this plugin.
* @return a List of strings which are the names of the properties for
* which values are needed in order to configure an AU
*/
public List getLocalAuConfigDescrs() {
return ListUtil.list(PD_NON_DEF_BASE_URL, PD_ROOT, PD_DEPTH,
PD_BRANCH, PD_NUM_FILES,
PD_BIN_FILE_SIZE, PD_BIN_RANDOM_SEED,
PD_MAXFILE_NAME,
PD_FILE_TYPES, PD_ODD_BRANCH_CONTENT,
PD_BAD_FILE_LOC, PD_BAD_FILE_NUM);
}
protected ArchivalUnit createAu0(Configuration auConfig)
throws ArchivalUnit.ConfigurationException {
log.debug("createAU(" + auConfig + ")");
ArchivalUnit au = new SimulatedArchivalUnit(this);
au.setConfiguration(auConfig);
this.auConfig = auConfig;
return au;
}
private String defaultArticleMimeType = null;
public void setDefaultArticleMimeType(String val) {
defaultArticleMimeType = val;
}
public String getDefaultArticleMimeType() {
if (defaultArticleMimeType != null) {
return defaultArticleMimeType;
}
if (auConfig == null) {
return null;
}
String ret = auConfig.get(AU_PARAM_DEFAULT_ARTICLE_MIME_TYPE,
null);
// "never/happens");
log.debug("DefaultArticleMimeType is " + ret);
return ret;
}
// SimulatedPlugin methods, not part of Plugin interface
public int getInitCtr() {
return initCtr;
}
public int getStopCtr() {
return stopCtr;
}
public void registerArchivalUnit(ArchivalUnit au) {
aus.add(au);
}
public void unregisterArchivalUnit(ArchivalUnit au) {
aus.remove(au);
}
public SimulatedContentGenerator getContentGenerator(Configuration cf,
String fileRoot) {
return SimulatedContentGenerator.getInstance(fileRoot);
}
}
|
package com.redhat.ceylon.compiler.typechecker.analyzer;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.checkAssignable;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.checkCasesDisjoint;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.checkIsExactly;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.getTypedDeclaration;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.inLanguageModule;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.inSameModule;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.typeDescription;
import static com.redhat.ceylon.compiler.typechecker.analyzer.AnalyzerUtil.typeNamesAsIntersection;
import static com.redhat.ceylon.compiler.typechecker.tree.TreeUtil.name;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.addToIntersection;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.areConsistentSupertypes;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.canonicalIntersection;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.intersectionOfSupertypes;
import static com.redhat.ceylon.model.typechecker.model.ModelUtil.isTypeUnknown;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.redhat.ceylon.compiler.typechecker.tree.Node;
import com.redhat.ceylon.compiler.typechecker.tree.Tree;
import com.redhat.ceylon.compiler.typechecker.tree.Visitor;
import com.redhat.ceylon.model.typechecker.model.Class;
import com.redhat.ceylon.model.typechecker.model.ClassOrInterface;
import com.redhat.ceylon.model.typechecker.model.Constructor;
import com.redhat.ceylon.model.typechecker.model.Declaration;
import com.redhat.ceylon.model.typechecker.model.Interface;
import com.redhat.ceylon.model.typechecker.model.Scope;
import com.redhat.ceylon.model.typechecker.model.Type;
import com.redhat.ceylon.model.typechecker.model.TypeAlias;
import com.redhat.ceylon.model.typechecker.model.TypeDeclaration;
import com.redhat.ceylon.model.typechecker.model.TypeParameter;
import com.redhat.ceylon.model.typechecker.model.TypedDeclaration;
import com.redhat.ceylon.model.typechecker.model.Unit;
import com.redhat.ceylon.model.typechecker.model.Value;
/**
* Enforces a number of rules surrounding inheritance.
* This work happens during an intermediate phase in
* between the second and third phases of type analysis.
*
* @see TypeHierarchyVisitor for more complex stuff
*
* @author Gavin King
*
*/
public class InheritanceVisitor extends Visitor {
@Override public void visit(Tree.TypeDeclaration that) {
validateSupertypes(that,
that.getDeclarationModel());
super.visit(that);
}
@Override public void visit(Tree.ObjectDefinition that) {
validateSupertypes(that,
that.getDeclarationModel()
.getType()
.getDeclaration());
super.visit(that);
validateEnumeratedSupertypes(that,
that.getAnonymousClass());
}
@Override public void visit(Tree.ObjectArgument that) {
validateSupertypes(that,
that.getAnonymousClass());
super.visit(that);
validateEnumeratedSupertypes(that,
that.getAnonymousClass());
}
@Override public void visit(Tree.ObjectExpression that) {
validateSupertypes(that,
that.getAnonymousClass());
super.visit(that);
validateEnumeratedSupertypes(that,
that.getAnonymousClass());
}
@Override public void visit(Tree.TypeConstraint that) {
super.visit(that);
validateUpperBounds(that,
that.getDeclarationModel());
}
@Override public void visit(Tree.ClassOrInterface that) {
super.visit(that);
validateEnumeratedSupertypeArguments(that,
that.getDeclarationModel());
}
@Override public void visit(Tree.ClassDefinition that) {
super.visit(that);
validateEnumeratedSupertypes(that,
that.getDeclarationModel());
}
@Override public void visit(Tree.InterfaceDefinition that) {
super.visit(that);
validateEnumeratedSupertypes(that,
that.getDeclarationModel());
}
private void validateSupertypes(Node that,
TypeDeclaration td) {
if (!(td instanceof TypeAlias)) {
List<Type> supertypes =
td.getType().getSupertypes();
for (int i=0; i<supertypes.size(); i++) {
Type st1 = supertypes.get(i);
for (int j=i+1; j<supertypes.size(); j++) {
Type st2 = supertypes.get(j);
//Note: sets td.inconsistentType by side-effect
checkSupertypeIntersection(that,
td, st1, st2);
}
}
}
}
private void checkSupertypeIntersection(Node that,
TypeDeclaration td,
Type st1, Type st2) {
TypeDeclaration st1d = st1.getDeclaration();
TypeDeclaration st2d = st2.getDeclaration();
if (st1d.equals(st2d) /*&& !st1.isExactly(st2)*/) {
Unit unit = that.getUnit();
if (!areConsistentSupertypes(st1, st2, unit)) {
that.addError(typeDescription(td, unit) +
" has the same parameterized supertype twice with incompatible type arguments: '" +
st1.asString(unit) + " & " +
st2.asString(unit) + "'");
td.setInconsistentType(true);
}
}
}
private void validateUpperBounds(Tree.TypeConstraint that,
TypeDeclaration td) {
if (!td.isInconsistentType()) {
Unit unit = that.getUnit();
List<Type> upperBounds =
td.getSatisfiedTypes();
List<Type> list =
new ArrayList<Type>
(upperBounds.size());
for (Type st: upperBounds) {
addToIntersection(list, st, unit);
}
if (canonicalIntersection(list, unit).isNothing()) {
that.addError(typeDescription(td, unit) +
" has unsatisfiable upper bound constraints: the constraints '" +
typeNamesAsIntersection(upperBounds, unit) +
"' cannot be satisfied by any type except 'Nothing'");
}
}
}
private void validateEnumeratedSupertypes(Node that, ClassOrInterface d) {
Type type = d.getType();
Unit unit = that.getUnit();
if (d instanceof Class) {
Type superclass = d.getExtendedType();
if (superclass!=null &&
superclass.isAnything() &&
!type.isObject() && !type.isNull()) {
//a class which directly extends Anything
//must be handled as a special case here
//because of a performance optimization in
//Class.inherits()
that.addError("not a subtype of any case of root type: '" +
d.getName(unit) +
"' directly inherits 'Anything'");
}
}
for (Type supertype: type.getSupertypes()) {
if (!type.isExactly(supertype)) {
TypeDeclaration std =
supertype.getDeclaration();
List<Type> cts = std.getCaseTypes();
if (cts!=null &&
!cts.isEmpty()) {
if (cts.size()==1 &&
cts.get(0)
.getDeclaration()
.isSelfType()) {
continue;
}
List<Type> types =
new ArrayList<Type>
(cts.size());
for (Type ct: cts) {
TypeDeclaration ctd =
ct.resolveAliases()
.getDeclaration();
Type cst = type.getSupertype(ctd);
if (cst!=null) {
types.add(cst);
}
}
if (types.isEmpty()) {
that.addError("type is not a subtype of any case of enumerated supertype: '" +
d.getName(unit) +
"' inherits '" +
std.getName(unit) + "'");
}
else if (types.size()>1) {
StringBuilder sb = new StringBuilder();
for (Type pt: types) {
sb.append("'")
.append(pt.asString(unit))
.append("' and ");
}
sb.setLength(sb.length()-5);
that.addError("type is a subtype of multiple cases of enumerated supertype '" +
std.getName(unit) + "': '" +
d.getName(unit) +
"' inherits " + sb);
}
}
}
}
}
private void validateEnumeratedSupertypeArguments(
Node that, ClassOrInterface classOrInterface) {
//note: I hate doing the whole traversal here, but
// it is the only way to get the error in the
// right place (see the note in visit(CaseTypes)
// for more)
Type type = classOrInterface.getType();
for (Type supertype: type.getSupertypes()) { //traverse the entire supertype hierarchy of the declaration
if (!type.isExactly(supertype)) {
List<Type> cts =
supertype.getDeclaration()
.getCaseTypes();
if (cts!=null) {
for (Type ct: cts) {
if (ct.getDeclaration()
.equals(classOrInterface)) { //the declaration is a case of the current enumerated supertype
validateEnumeratedSupertypeArguments(
that, classOrInterface,
supertype);
break;
}
}
}
}
}
}
private void validateEnumeratedSupertypeArguments(
Node that, TypeDeclaration type,
Type supertype) {
List<TypeParameter> params =
supertype.getDeclaration()
.getTypeParameters();
Map<TypeParameter, Type> typeArguments =
supertype.getTypeArguments();
for (TypeParameter param: params) {
Type arg = typeArguments.get(param); //the type argument that the declaration (indirectly) passes to the enumerated supertype
if (arg!=null) {
validateEnumeratedSupertypeArgument(that,
type, supertype, param, arg);
}
}
}
private void validateEnumeratedSupertypeArgument(
Node that, TypeDeclaration type,
Type supertype, TypeParameter tp,
Type arg) {
Unit unit = that.getUnit();
if (arg.isTypeParameter()) {
TypeParameter atp =
(TypeParameter)
arg.getDeclaration();
if (atp.getDeclaration().equals(type)) { //the argument is a type parameter of the declaration
//check that the variance of the argument
//type parameter is the same as the type
//parameter of the enumerated supertype
if (tp.isCovariant() && !atp.isCovariant()) {
that.addError("argument to covariant type parameter of enumerated supertype must be covariant: " +
typeDescription(tp, unit));
}
if (tp.isContravariant() &&
!atp.isContravariant()) {
that.addError("argument to contravariant type parameter of enumerated supertype must be contravariant: " +
typeDescription(tp, unit));
}
}
else {
that.addError("argument to type parameter of enumerated supertype must be a type parameter of '" +
type.getName() + "': " +
typeDescription(tp, unit));
}
}
else if (tp.isCovariant()) {
if (!(arg.isNothing())) {
//TODO: let it be the union of the lower bounds on p
that.addError("argument to covariant type parameter of enumerated supertype must be a type parameter or 'Nothing': " +
typeDescription(tp, unit));
}
}
else if (tp.isContravariant()) {
List<Type> sts = tp.getSatisfiedTypes();
//TODO: do I need to do type arg substitution here??
Type ub = intersectionOfSupertypes(tp);
if (!(arg.isExactly(ub))) {
that.addError("argument to contravariant type parameter of enumerated supertype must be a type parameter or '" +
typeNamesAsIntersection(sts, unit) + "': " +
typeDescription(tp, unit));
}
}
else {
that.addError("argument to type parameter of enumerated supertype must be a type parameter: " +
typeDescription(tp, unit));
}
}
@Override
public void visit(Tree.ExtendedType that) {
super.visit(that);
TypeDeclaration td =
(TypeDeclaration)
that.getScope();
if (!td.isAlias()) {
Tree.SimpleType et = that.getType();
if (et!=null) {
Tree.InvocationExpression ie =
that.getInvocationExpression();
Class clazz = (Class) td;
boolean hasConstructors =
clazz.hasConstructors() ||
clazz.hasEnumerated();
boolean anonymous = clazz.isAnonymous();
if (ie==null) {
if (!hasConstructors || anonymous) {
et.addError("missing instantiation arguments");
}
}
else {
if (hasConstructors && !anonymous) {
et.addError("unnecessary instantiation arguments");
}
}
Unit unit = that.getUnit();
Type type = et.getTypeModel();
if (type!=null) {
checkSelfTypes(et, td, type);
checkExtensionOfMemberType(et, td, type);
//checkCaseOfSupertype(et, td, type);
Type ext = td.getExtendedType();
TypeDeclaration etd =
ext==null ? null :
ext.getDeclaration();
TypeDeclaration aetd =
type.getDeclaration();
if (aetd instanceof Constructor &&
aetd.isAbstract()) {
et.addError("extends a partial constructor: '" +
aetd.getName(unit) +
"' is declared abstract");
}
while (etd!=null && etd.isAlias()) {
Type etdet =
etd.getExtendedType();
etd = etdet == null ? null :
etdet.getDeclaration();
}
if (etd!=null) {
if (etd.isFinal()) {
et.addError("extends a final class: '" +
etd.getName(unit) +
"' is declared final");
}
if (etd.isSealed() &&
!inSameModule(etd, unit)) {
String moduleName =
etd.getUnit()
.getPackage()
.getModule()
.getNameAsString();
et.addError("extends a sealed class in a different module: '" +
etd.getName(unit) +
"' in '" + moduleName +
"' is sealed");
}
}
}
checkSupertypeVarianceAnnotations(et);
}
}
}
@Override
public void visit(Tree.SatisfiedTypes that) {
super.visit(that);
TypeDeclaration td =
(TypeDeclaration)
that.getScope();
if (td.isAlias()) {
return;
}
Set<TypeDeclaration> set =
new HashSet<TypeDeclaration>();
if (td.getSatisfiedTypes().isEmpty()) {
return; //handle undecidable case
}
Unit unit = that.getUnit();
for (Tree.StaticType t: that.getTypes()) {
Type type = t.getTypeModel();
if (!isTypeUnknown(type)) {
type = type.resolveAliases();
TypeDeclaration dec = type.getDeclaration();
if (td instanceof ClassOrInterface &&
!inLanguageModule(unit)) {
if (unit.isCallableType(type)) {
t.addError("satisfies 'Callable'");
}
TypeDeclaration cad =
unit.getConstrainedAnnotationDeclaration();
if (dec.equals(cad)) {
t.addError("directly satisfies 'ConstrainedAnnotation'");
}
}
if (!set.add(dec)) {
//this error is not really truly necessary
//but the spec says it is an error, and
//the backend doesn't like it
t.addError("duplicate satisfied type: '" +
dec.getName(unit) + "' of '" +
td.getName() + "'");
}
if (td instanceof ClassOrInterface) {
TypeDeclaration std =
dec;
if (std.isSealed() &&
!inSameModule(std, unit)) {
String moduleName =
std.getUnit()
.getPackage()
.getModule()
.getNameAsString();
t.addError("satisfies a sealed interface in a different module: '" +
std.getName(unit) + "' in '" +
moduleName + "'");
}
}
checkSelfTypes(t, td, type);
checkExtensionOfMemberType(t, td, type);
/*if (!(td instanceof TypeParameter)) {
checkCaseOfSupertype(t, td, type);
}*/
}
if (t instanceof Tree.SimpleType) {
Tree.SimpleType st = (Tree.SimpleType) t;
checkSupertypeVarianceAnnotations(st);
}
}
}
@Override
public void visit(Tree.CaseTypes that) {
super.visit(that);
//this forces every case to be a subtype of the
//enumerated type, so that we can make use of the
//enumerated type is equivalent to its cases
TypeDeclaration td =
(TypeDeclaration)
that.getScope();
//TODO: get rid of this awful hack:
List<Type> cases = td.getCaseTypes();
td.setCaseTypes(null);
if (td instanceof TypeParameter) {
for (Tree.StaticType t: that.getTypes()) {
for (Tree.StaticType ot: that.getTypes()) {
if (t==ot) break;
checkCasesDisjoint(
t.getTypeModel(),
ot.getTypeModel(),
ot);
}
}
}
else {
collectCaseTypes(that, td);
collectCaseValues(that, td);
}
//TODO: get rid of this awful hack:
td.setCaseTypes(cases);
}
void collectCaseValues(Tree.CaseTypes that,
TypeDeclaration td) {
Unit unit = that.getUnit();
Set<Declaration> valueSet =
new HashSet<Declaration>();
for (Tree.BaseMemberExpression bme:
that.getBaseMemberExpressions()) {
TypedDeclaration value =
getTypedDeclaration(bme.getScope(),
name(bme.getIdentifier()),
null, false, unit);
if (value!=null) {
if (value!=null && !valueSet.add(value)) {
//this error is not really truly necessary
bme.addError("duplicate case: '" +
value.getName(unit) +
"' of '" + td.getName() + "'");
}
Type type = value.getType();
if (type!=null) {
TypeDeclaration caseDec =
type.getDeclaration();
if (caseDec instanceof Constructor) {
Scope scope = caseDec.getContainer();
if (scope instanceof Class) {
//enumerated singleton constructors
Constructor cons =
(Constructor) caseDec;
Class c = (Class) scope;
if (!c.isToplevel()) {
bme.addError("case must be a value constructor of a toplevel class: '" +
c.getName(unit) +
"' is not toplevel");
}
else if (!cons.getParameterLists().isEmpty()) {
bme.addError("case must be a value constructor of a toplevel class: '" +
cons.getName(unit) +
"' is not a value constructor");
}
/*else if (!c.inherits(unit.getIdentifiableDeclaration())) {
bme.addError("case must be a value constructor of an identifiable class: '" +
c.getName(unit) +
"' is not a subtype of 'Identifiable'");
}*/
}
}
else {
//enumerated anonymous subclasses
if (!caseDec.isObjectClass()) {
bme.addError("case must be a toplevel anonymous class: '" +
value.getName(unit) +
"' is not an anonymous class");
}
else if (!value.isToplevel()) {
bme.addError("case must be a toplevel anonymous class: '" +
value.getName(unit) +
"' is not toplevel");
}
}
if (checkDirectSubtype(td, bme, type)) {
checkAssignable(type, td.getType(), bme,
getCaseTypeExplanation(td, type));
}
}
}
}
}
void collectCaseTypes(Tree.CaseTypes that, TypeDeclaration td) {
Set<TypeDeclaration> typeSet =
new HashSet<TypeDeclaration>();
for (Tree.StaticType ct: that.getTypes()) {
Type type = ct.getTypeModel();
if (!isTypeUnknown(type)) {
type = type.resolveAliases();
TypeDeclaration ctd = type.getDeclaration();
if (!typeSet.add(ctd)) {
//this error is not really truly necessary
Unit unit = that.getUnit();
ct.addError("duplicate case type: '" +
ctd.getName(unit) + "' of '" +
td.getName() + "'");
}
if (!(ctd instanceof TypeParameter)) {
//it's not a self type
if (checkDirectSubtype(td, ct, type)) {
checkAssignable(type, td.getType(), ct,
getCaseTypeExplanation(td, type));
}
//note: this is a better, faster way to call
// validateEnumeratedSupertypeArguments()
// but unfortunately it winds up displaying
// the error on the wrong node, confusing
// the user
/*
Type supertype =
type.getDeclaration()
.getType()
.getSupertype(td);
validateEnumeratedSupertypeArguments(t,
type.getDeclaration(), supertype);
*/
}
checkCaseType(td, ct, ctd);
}
}
}
void checkCaseType(TypeDeclaration type,
Tree.StaticType ct,
TypeDeclaration caseTypeDec) {
if (caseTypeDec instanceof ClassOrInterface &&
ct instanceof Tree.SimpleType) {
Tree.SimpleType t = (Tree.SimpleType) ct;
Tree.TypeArgumentList tal =
t.getTypeArgumentList();
if (tal!=null) {
List<Tree.Type> args =
tal.getTypes();
List<TypeParameter> typeParameters =
caseTypeDec.getTypeParameters();
Set<TypeParameter> used =
new HashSet<TypeParameter>();
for (int i=0;
i<args.size() &&
i<typeParameters.size();
i++) {
Tree.Type arg = args.get(i);
TypeParameter typeParameter =
caseTypeDec.getTypeParameters()
.get(i);
Type argType = arg.getTypeModel();
if (argType!=null) {
TypeDeclaration argTypeDec =
argType.getDeclaration();
if (argType.isTypeParameter()) {
TypeParameter tp =
(TypeParameter)
argTypeDec;
if (!tp.getDeclaration()
.equals(type)) {
arg.addError("type argument is not a type parameter of the enumerated type: '" +
tp.getName() +
"' is not a type parameter of '" +
type.getName());
}
else if (!used.add(tp)) {
arg.addError("type parameter of the enumerated type is used twice as a type argument: '" +
argTypeDec.getName());
}
}
else if (typeParameter.isCovariant()) {
checkAssignable(
typeParameter.getType(),
argType, arg,
"type argument not an upper bound of the type parameter");
}
else if (typeParameter.isContravariant()) {
checkAssignable(argType,
typeParameter.getType(), arg,
"type argument not a lower bound of the type parameter");
}
else {
arg.addError("type argument is not a type parameter of the enumerated type: '" +
argTypeDec.getName() + "'");
}
}
}
}
}
}
@Override
public void visit(Tree.DelegatedConstructor that) {
super.visit(that);
TypeDeclaration constructor =
(TypeDeclaration)
that.getScope();
Scope container = constructor.getContainer();
Tree.SimpleType type = that.getType();
if (type!=null &&
constructor instanceof Constructor &&
container instanceof Class) {
Class containingClass = (Class) container;
Type et = containingClass.getExtendedType();
if (et!=null) {
Unit unit = that.getUnit();
Type extendedType =
containingClass.getExtendedType();
Type constructedType =
type.getTypeModel();
Declaration delegate =
type.getDeclarationModel();
TypeDeclaration superclass =
et.getDeclaration();
if (superclass instanceof Constructor) {
superclass =
superclass.getExtendedType()
.getDeclaration();
}
if (delegate instanceof Constructor) {
Constructor c = (Constructor) delegate;
if (c.equals(constructor)) {
type.addError("constructor delegates to itself: '" +
c.getName() + "'");
}
Type delegatedType = c.getExtendedType();
TypeDeclaration delegated =
delegatedType == null ? null :
delegatedType.getDeclaration();
if (superclass.equals(delegated)) {
checkIsExactly(
constructedType.getExtendedType(),
extendedType, type,
"type arguments must match type arguments in extended class expression");
}
else if (containingClass.equals(delegated)) {
if (type instanceof Tree.QualifiedType) {
Tree.QualifiedType qt =
(Tree.QualifiedType)
type;
checkIsExactly(
constructedType.getQualifyingType(),
containingClass.getType(),
qt.getOuterType(),
"type arguments must be the type parameters of this class");
}
}
else {
type.addError("not a constructor of the immediate superclass: '" +
delegate.getName(unit) +
"' is not a constructor of '" +
superclass.getName(unit) + "'");
}
}
else if (delegate instanceof Class) {
if (superclass.equals(delegate)) {
checkIsExactly(constructedType,
extendedType, type,
"type arguments must match type arguments in extended class expression");
}
else if (containingClass.equals(delegate)) {
checkIsExactly(constructedType,
containingClass.getType(), type,
"type arguments must be the type parameters of this class");
}
else {
type.addError("does not instantiate the immediate superclass: '" +
delegate.getName(unit) + "' is not '" +
superclass.getName(unit) + "'");
}
}
}
}
}
private static boolean checkDirectSubtype(TypeDeclaration td,
Node node, Type type) {
boolean found = false;
TypeDeclaration ctd = type.getDeclaration();
if (td instanceof Interface) {
for (Type st: ctd.getSatisfiedTypes()) {
if (st!=null &&
st.resolveAliases()
.getDeclaration()
.equals(td)) {
found = true;
}
}
}
else if (td instanceof Class) {
Type et = ctd.getExtendedType();
if (et!=null &&
et.resolveAliases()
.getDeclaration()
.equals(td)) {
found = true;
}
}
if (!found) {
node.addError("case type is not a direct subtype of enumerated type: " +
ctd.getName(node.getUnit()));
}
return found;
}
private String getCaseTypeExplanation(TypeDeclaration td,
Type type) {
String message = "case type must be a subtype of enumerated type";
if (!td.getTypeParameters().isEmpty() &&
type.getDeclaration().inherits(td)) {
message += " for every type argument of the generic enumerated type";
}
return message;
}
private void checkExtensionOfMemberType(Node that,
TypeDeclaration td, Type type) {
Type qt = type.getQualifyingType();
if (qt!=null && td instanceof ClassOrInterface) {
Unit unit = that.getUnit();
TypeDeclaration d = type.getDeclaration();
if (d.isStaticallyImportable() ||
d instanceof Constructor) {
checkExtensionOfMemberType(that, td, qt);
}
else {
Scope s = td;
while (s!=null) {
s = s.getContainer();
if (s instanceof TypeDeclaration) {
TypeDeclaration otd =
(TypeDeclaration) s;
if (otd.getType().isSubtypeOf(qt)) {
return;
}
}
}
that.addError("qualifying type '" + qt.asString(unit) +
"' of supertype '" + type.asString(unit) +
"' is not an outer type or supertype of any outer type of '" +
td.getName(unit) + "'");
}
}
}
private void checkSelfTypes(Tree.StaticType that,
TypeDeclaration td, Type type) {
if (!(td instanceof TypeParameter)) { //TODO: is this really ok?!
List<TypeParameter> params =
type.getDeclaration()
.getTypeParameters();
List<Type> args =
type.getTypeArgumentList();
Unit unit = that.getUnit();
for (int i=0; i<params.size(); i++) {
TypeParameter param = params.get(i);
if (param.isSelfType() && !args.isEmpty()) {
Type arg = args.get(i);
if (arg==null) {
arg = unit.getUnknownType();
}
TypeDeclaration std =
param.getSelfTypedDeclaration();
Type at;
TypeDeclaration mtd;
if (param.getContainer().equals(std)) {
at = td.getType();
mtd = td;
}
else {
//TODO: lots wrong here?
mtd = (TypeDeclaration)
td.getMember(std.getName(),
null, false);
at = mtd==null ? null : mtd.getType();
}
if (at!=null && !at.isSubtypeOf(arg) &&
!(mtd.getSelfType()!=null &&
mtd.getSelfType().isExactly(arg))) {
String help = "";
TypeDeclaration ad = arg.getDeclaration();
if (ad instanceof TypeParameter) {
TypeParameter tp = (TypeParameter) ad;
if (tp.getDeclaration().equals(td)) {
help = " (try making '" + ad.getName() +
"' a self type of '" + td.getName() + "')";
}
}
else if (ad instanceof Interface) {
help = " (try making '" + td.getName() +
"' satisfy '" + ad.getName() + "')";
}
else if (ad instanceof Class && td instanceof Class) {
help = " (try making '" + td.getName() +
"' extend '" + ad.getName() + "')";
}
that.addError("type argument does not satisfy self type constraint on type parameter '" +
param.getName() + "' of '" +
type.getDeclaration().getName(unit) + "': '" +
arg.asString(unit) +
"' is not a supertype or self type of '" +
td.getName(unit) + "'" + help);
}
}
}
}
}
private void checkSupertypeVarianceAnnotations(Tree.SimpleType et) {
Tree.TypeArgumentList tal =
et.getTypeArgumentList();
if (tal!=null) {
for (Tree.Type t: tal.getTypes()) {
if (t instanceof Tree.StaticType) {
Tree.StaticType st = (Tree.StaticType) t;
Tree.TypeVariance variance =
st.getTypeVariance();
if (variance!=null) {
variance.addError("supertype expression may not specify variance");
}
}
}
}
}
@Override
public void visit(Tree.Enumerated that) {
super.visit(that);
Value v = that.getDeclarationModel();
Scope container = v.getContainer();
if (container instanceof Class) {
Class cl = (Class) container;
List<TypedDeclaration> caseValues =
cl.getCaseValues();
if (caseValues!=null
&& !caseValues.contains(v) &&
!cl.isAbstract()) {
that.addError("value constructor does not occur in of clause of non-abstract enumerated class: '" +
v.getName() +
"' is not listed in the of clause of '" +
cl.getName() + "'");
}
}
}
@Override
public void visit(Tree.Constructor that) {
super.visit(that);
Constructor c = that.getConstructor();
Scope container =
c.getContainer();
if (container instanceof Class) {
Class cl = (Class) container;
List<TypedDeclaration> caseValues =
cl.getCaseValues();
if (caseValues!=null &&
!c.isAbstract() &&
!cl.isAbstract()) {
that.addError("non-abstract enumerated class may not have non-partial callable constructor");
}
}
}
}
|
import java.util.Random;
public class ANN {
private Random random = new Random();
private int L;
private double[][][] neuralNetWeights;
private double[][] neuralNetBias;
private double[][] neuralNetActivation;
private double[][] neuralNetZ;
private double[][] error;
private double weightsLearningRate = .75;
private double biasLearningRate = .75;
int trialCount = 0;
/**
* Create an artificial neural network
*
* @param inputNodesNum, hiddenNodesNum, outputNodesNum
*/
public ANN(int inputNodesNum, int[] hiddenNodesNum, int outputNodesNum) {
L = hiddenNodesNum.length+2;
neuralNetWeights = new double[L][][];
neuralNetBias = new double[L][];
neuralNetActivation = new double[L][];
neuralNetZ = new double[L][];
error = new double[L][];
neuralNetWeights[0] = new double[inputNodesNum][1];
neuralNetBias[0] = new double[inputNodesNum];
neuralNetActivation[0] = new double[inputNodesNum];
neuralNetZ[0] = new double[inputNodesNum];
error[0] = new double[inputNodesNum];
neuralNetWeights[1] = new double[hiddenNodesNum[0]][inputNodesNum];
neuralNetBias[1] = new double[hiddenNodesNum[0]];
neuralNetActivation[1] = new double[hiddenNodesNum[0]];
neuralNetZ[1] = new double[hiddenNodesNum[0]];
error[1] = new double[hiddenNodesNum[0]];
for(int i = 1; i < L - 2; i++){
neuralNetWeights[i+1] = new double[hiddenNodesNum[i]][hiddenNodesNum[i - 1]];
neuralNetBias[i+1] = new double[hiddenNodesNum[i]];
neuralNetActivation[i+1] = new double[hiddenNodesNum[i]];
neuralNetZ[i+1] = new double[hiddenNodesNum[i]];
error[i+1] = new double[hiddenNodesNum[i]];
}
neuralNetWeights[L - 1] = new double[outputNodesNum][hiddenNodesNum[hiddenNodesNum.length-1]];
neuralNetBias[L - 1] = new double[outputNodesNum];
neuralNetActivation[L - 1] = new double[outputNodesNum];
neuralNetZ[L - 1] = new double[outputNodesNum];
error[L - 1] = new double[outputNodesNum];
init();
}
/**
* Initializes weights and biases of the neurons
*/
public void init(){
for(int i = 0; i < neuralNetWeights.length; i++){
for(int j = 0; j < neuralNetWeights[i].length; j++){
if(i == 0|| i == neuralNetWeights.length-1) neuralNetBias[0][j] = 0;
else neuralNetBias[i][j] = random.nextGaussian();
for(int k = 0; k < neuralNetWeights[i][j].length; k++){
if(i == 0) neuralNetWeights[i][j][k] = 1;
else neuralNetWeights[i][j][k] = random.nextGaussian() * 1.0/Math.sqrt(neuralNetWeights[i].length);
}
}
}
}
/**
* Pass an array of matrixes with labels to train a NN
* Returns the average final error, average saturation and final activation signals
*
* @param values
* @param labels
* @return
*/
public double[] train(double[][] values, double[][] labels){
double[] settings = new double[2 + labels[0].length];
for(int i = 0; i < values.length; i ++){
test(values[i], labels[i]);
}
double averageLastError = 0;
double averageSaturation = 0;
for(int i = 0; i < labels[0].length; i++){
averageLastError += Math.abs(labels[labels.length - 1][i] - neuralNetActivation[L-1][i]);
averageSaturation += Math.pow(Math.abs(labels[labels.length - 1][i] - .5),2) * 2;
settings[i+2] = neuralNetActivation[L-1][i];
}
settings[0] = averageLastError/labels[0].length;
settings[1] = averageSaturation;
return settings;
}
/**
* Run a single trial through the ANN
*
* @param in
* @param out
*/
public double[] test(double[] values, double[] labels){
double[] settings = new double[2 + labels.length];
feedForward(values);
backPropogate(labels);
trialCount++;
double averageLastError = 0;
double averageSaturation = 0;
for(int i = 0; i < labels.length; i++){
averageLastError += Math.abs(labels[i] - neuralNetActivation[L-1][i]);
averageSaturation += Math.pow(Math.abs(labels[i] - .5),2) * 2;
settings[i+2] = neuralNetActivation[L-1][i];
}
settings[0] = averageLastError/labels.length;
settings[1] = averageSaturation;
return settings;
}
/**
* Helper method that does the feed forward
*
* @param inputField
*/
private void feedForward(double[] inputField){
for(int i = 0; i < inputField.length; i++){
neuralNetActivation[0][i] = inputField[i];
}
for(int i = 1; (i < L); i++){
calculateZ(i);
calculateActivation(i);
}
}
/**
* Helper function to do the back propagation
*
* @param y
*/
private void backPropogate(double[] y){
calculateErrorL(y);
calculateErrorl();
updateNet();
}
/**
* Intermediary used in a few calculations
*
* @param layer
*/
private void calculateZ(int layer){
for(int j = 0; j < neuralNetWeights[layer].length; j++){
neuralNetZ[layer][j] = 0;
for(int k = 0; k < neuralNetWeights[layer][j].length; k++){
neuralNetZ[layer][j] += neuralNetWeights[layer][j][k] * neuralNetActivation[layer - 1][k];
}
neuralNetZ[layer][j] += neuralNetBias[layer][j];
}
}
/**
* Activation calculation
*
* @param layer
*/
private void calculateActivation(int layer){
neuralNetActivation[layer] = sigmoid(neuralNetZ[layer]);
}
/**
* Activation Function
*
* @param x
* @return f(x)
*/
private double[] sigmoid(double[] x){
double[] out = new double[x.length];
for(int i = 0; i < x.length; i++)
out[i] = 1.0/(1.0 + Math.pow(Math.E,-x[i]));
return out;
}
/**
* Derivative of Activation function
*
* @param x
* @return f'(x)
*/
private double[] sigmoidPrime(double[] x){
double[] out = new double[x.length];
for(int i = 0; i < x.length; i++)
out[i] = Math.pow(Math.E, x[i])/Math.pow((Math.pow(Math.E, x[i])+1),2);
return out;
}
/**
* Error calculation for output nodes
*
* @param y
*/
private void calculateErrorL(double[] y){
error[L - 1] = hadamarProduct(costGradient(L - 1, y),sigmoidPrime(neuralNetZ[L - 1]));
}
/**
* Error calculation for non-output nodes
*/
private void calculateErrorl(){
for(int i = L - 2; i >= 0; i
error[i] = hadamarProduct(arrayProduct(transpose(neuralNetWeights[i + 1]),error[i+1]),sigmoidPrime(neuralNetZ[i]));
}
}
/**
* Updates the weights and and bias via their gradients
*/
private void updateNet(){
for(int i = 1; i < neuralNetWeights.length; i++){
for(int j = 0; j < neuralNetWeights[i].length; j++){
neuralNetBias[i][j] = neuralNetBias[i][j] - biasLearningRate * biasGradient(i,j);
for(int k = 0; k < neuralNetWeights[i][j].length; k++){
neuralNetWeights[i][j][k] = neuralNetWeights[i][j][k] - weightsLearningRate * weightGradient(i,j,k);
}
}
}
}
/**
* helper function for bias gradient
*
* @param l
* @param j
* @return gradient
*/
private double biasGradient(int l, int j){
if( l == neuralNetWeights.length-1) return 0;
return error[l][j];
}
/**
* Helper function for weight gradient
*
* @param l
* @param j
* @param k
* @return gradient
*/
private double weightGradient(int l, int j, int k){
System.out.println(l +" : " +j + " : " + k+ " : " + error.length+ " : " + error[2].length);
if(l == 1|| l == 0) return 1;
return neuralNetActivation[l - 1][k] * error[l][j];
}
/**
* Calculates Cost Gradient
*
* @param layer
* @param y
* @return cost gradient
*/
private double[] costGradient(int layer, double[] y){
double out[] = new double[y.length];
for(int i = 0; i < y.length; i++){
out[i] = neuralNetActivation[layer][i] - y[i];
}
return out;
}
/**
* Hadamar Product
*
* @param first
* @param second
* @return double[] of Hadamar Product
*/
private double[] hadamarProduct(double[] first, double[] second){
double[] out = new double[first.length];
for(int i = 0; i < out.length; i++){
out[i] = first[i] * second[i];
}
return out;
}
/**
* Matrix transpose
*
* @param a
* @return the transpose
*/
private double[][] transpose(double[][] a){
double[][] out = new double[a[0].length][a.length];
for (int i = 0; i < a.length; i++) {
for (int j = 0; j < a[0].length; j++) {
double temp = a[i][j];
out[j][i] = temp;
}
}
return out;
}
/**
* Matrix multiplication method
*
* @param first
* @param second
* @return A * B
*/
private double[] arrayProduct(double[][] first, double[] second){
double out[] = new double[first.length];
for(int i = 0; i < first.length; i++){
double sum = 0;
for(int k = 0; k < second.length; k++){
sum += first[i][k] * second[k];
}
out[i] = sum;
}
return out;
}
/**
* Prints weights
*
* @return weights
*/
public String toStringWeights(){
String out = "";
for(int i = 0; i < neuralNetWeights.length; i++){
for(int j = 0; j < neuralNetWeights[i].length; j++){
for(int k = 0; k < neuralNetWeights[i][j].length; k++){
out += neuralNetWeights[i][j][k] + " ";
}
out += "\n";
}
out += "\n";
}
return out;
}
/**
* Prints bias
*
* @return bias
*/
public String toStringBias(){
String out = "";
for(int i = 0; i < neuralNetBias.length; i++){
for(int j = 0; j < neuralNetBias[i].length; j++){
out += neuralNetBias[i][j] + " ";
out += "\n";
}
out += "\n";
}
return out;
}
/**
* Prints Activation Values
*
* @return Activation Values
*/
public String toStringActivationFunction(){
String out = "";
for(int i = 0; i < neuralNetActivation.length; i++){
for(int j = 0; j < neuralNetActivation[i].length; j++){
out += neuralNetActivation[i][j] + " ";
out += "\n";
}
out += "\n";
}
return out;
}
/**
* Prints Z
*
* @return Z
*/
public String toStringZ(){
String out = "";
for(int i = 0; i < neuralNetZ.length; i++){
for(int j = 0; j < neuralNetZ[i].length; j++){
out += neuralNetZ[i][j] + " ";
out += "\n";
}
out += "\n";
}
return out;
}
/**
* @return the weightsLearningRate
*/
public double getWeightsLearningRate() {
return weightsLearningRate;
}
/**
* @param weightsLearningRate the weightsLearningRate to set
*/
public void setWeightsLearningRate(double weightsLearningRate) {
this.weightsLearningRate = weightsLearningRate;
}
/**
* @return the biasLearningRate
*/
public double getBiasLearningRate() {
return biasLearningRate;
}
/**
* @param biasLearningRate the biasLearningRate to set
*/
public void setBiasLearningRate(double biasLearningRate) {
this.biasLearningRate = biasLearningRate;
}
}
|
package org.apache.jmeter.visualizers;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.UnsupportedEncodingException;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JEditorPane;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSplitPane;
import javax.swing.JTextArea;
import javax.swing.JTree;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.text.html.HTMLEditorKit;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeCellRenderer;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeSelectionModel;
import org.apache.jmeter.samplers.Clearable;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jmeter.visualizers.gui.AbstractVisualizer;
import org.apache.log.Hierarchy;
import org.apache.log.Logger;
import org.apache.jorphan.gui.JLabeledTextArea;
public class ViewResultsFullVisualizer
extends AbstractVisualizer
implements ActionListener, TreeSelectionListener, Clearable
{
public final static Color SERVER_ERROR_COLOR = Color.red;
public final static Color CLIENT_ERROR_COLOR = Color.blue;
public final static Color REDIRECT_COLOR = Color.green;
protected static final String HTML_BUTTON_LABEL = "Render HTML";
protected static final String TEXT_BUTTON_LABEL = "Show Text";
protected DefaultMutableTreeNode root;
protected DefaultTreeModel treeModel;
protected GridBagLayout gridBag;
protected GridBagConstraints gbc;
/** The button that will pop up the response as rendered HTML or
text. **/
protected JButton htmlOrTextButton;
/** The response to be displayed. **/
protected String response;
/** The pane where the rendered HTML response is displayed. **/
transient protected JEditorPane htmlEditPane;
protected JPanel resultPanel;
protected JScrollPane treePane;
protected JScrollPane resultPane;
protected JSplitPane treeSplitPane;
/** The text area where the response is displayed. **/
protected JTextArea textArea;
protected JTree jTree;
protected int childIndex;
transient private static Logger log =
Hierarchy.getDefaultHierarchy().getLoggerFor("jmeter.gui");
public ViewResultsFullVisualizer()
{
super();
init();
log.debug("Start : ViewResultsFullVisualizer1");
log.debug("End : ViewResultsFullVisualizer1");
}
public void add(SampleResult res)
{
updateGui(res);
}
public String getStaticLabel()
{
return JMeterUtils.getResString("view_results_tree_title");
}
public void updateGui(SampleResult res)
{
log.debug("Start : updateGui1");
if (log.isDebugEnabled())
log.debug("updateGui1 : sample result - " + res);
DefaultMutableTreeNode currNode = new DefaultMutableTreeNode(res);
treeModel.insertNodeInto(currNode, root, root.getChildCount());
SampleResult[] subResults = res.getSubResults();
if (subResults != null)
{
int leafIndex = 0;
for (int i = 0; i < subResults.length; i++)
{
SampleResult child = subResults[i];
if (log.isDebugEnabled())
log.debug("updateGui1 : child sample result - " + child);
DefaultMutableTreeNode leafNode = new DefaultMutableTreeNode(child);
treeModel.insertNodeInto(leafNode, currNode, leafIndex++);
}
}
log.debug("End : updateGui1");
}
public void clear()
{
log.debug("Start : clear1");
int totalChild = root.getChildCount();
if (log.isDebugEnabled())
log.debug("clear1 : total child - " + totalChild);
for (int i = 0; i < totalChild; i++) // the child to be removed will always be 0 'cos as the nodes are removed
// the nth node will become (n-1)th
treeModel.removeNodeFromParent(
(DefaultMutableTreeNode) root.getChildAt(0));
resultPanel.removeAll();
resultPanel.revalidate();
// reset the child index
childIndex = 0;
log.debug("End : clear1");
}
public String toString()
{
String desc = "Shows the text results of sampling in tree form";
if (log.isDebugEnabled())
log.debug("toString1 : Returning description - " + desc);
return desc;
}
public void valueChanged(TreeSelectionEvent e)
{
log.debug("Start : valueChanged1");
DefaultMutableTreeNode node =
(DefaultMutableTreeNode) jTree.getLastSelectedPathComponent();
if (log.isDebugEnabled())
log.debug("valueChanged : selected node - " + node);
if (node != null)
{
SampleResult res = (SampleResult) node.getUserObject();
if (log.isDebugEnabled())
log.debug("valueChanged1 : sample result - " + res);
if (res != null)
{
resultPanel.removeAll();
// load time label
JLabel loadTime = new JLabel();
log.debug("valueChanged1 : load time - " + res.getTime());
loadTime.setText("Load time : " + res.getTime());
gbc.gridx = 0;
gbc.gridy = 0;
// keep all of the labels to the left
gbc.anchor = GridBagConstraints.WEST;
// with weightx != 0.0, components won't clump in the center
gbc.weightx = 1.0;
// pad a bit from the display area
gbc.insets = new Insets(0, 10, 0, 0);
if (res != null && res.getSamplerData() != null)
{
JLabeledTextArea postData =
new JLabeledTextArea(
JMeterUtils.getResString("request_data"),
null);
postData.setText(res.getSamplerData().toString());
resultPanel.add(postData, gbc.clone());
gbc.gridy++;
}
resultPanel.add(loadTime, gbc.clone());
// response code label
JLabel httpResponseCode = new JLabel();
String responseCode = res.getResponseCode();
log.debug("valueChanged1 : response code - " + responseCode);
int responseLevel = 0;
if (responseCode != null)
try
{
responseLevel = Integer.parseInt(responseCode) / 100;
}
catch (NumberFormatException numberFormatException)
{
// no need to change the foreground color
}
switch (responseLevel)
{
case 3 :
httpResponseCode.setForeground(REDIRECT_COLOR);
case 4 :
httpResponseCode.setForeground(CLIENT_ERROR_COLOR);
case 5 :
httpResponseCode.setForeground(SERVER_ERROR_COLOR);
}
httpResponseCode.setText(
JMeterUtils.getResString("HTTP response code")
+ " : "
+ responseCode);
gbc.gridx = 0;
gbc.gridy++;
gridBag.setConstraints(httpResponseCode, gbc);
resultPanel.add(httpResponseCode);
// response message label
JLabel httpResponseMsg = new JLabel();
String responseMsgStr = res.getResponseMessage();
log.debug("valueChanged1 : response message - " + responseMsgStr);
httpResponseMsg.setText("HTTP response message : " + responseMsgStr);
gbc.gridx = 0;
gbc.gridy++;
gridBag.setConstraints(httpResponseMsg, gbc);
resultPanel.add(httpResponseMsg);
gbc.gridy++;
// get the text response and image icon
// to determine which is NOT null
byte[] responseBytes = (byte[]) res.getResponseData();
ImageIcon icon = null;
if (res.getDataType() != null
&& res.getDataType().equals(SampleResult.TEXT))
{
try
{
response = new String(responseBytes, "utf-8");
}
catch (UnsupportedEncodingException err)
{
response = new String(responseBytes);
}
}
else if (responseBytes != null)
{
icon = new ImageIcon(responseBytes);
}
if (response != null)
{
gbc.gridx = 0;
gbc.gridy++;
gridBag.setConstraints(htmlOrTextButton,
gbc);
resultPanel.add(htmlOrTextButton);
// Text display <=> HTML labeled button
if (HTML_BUTTON_LABEL.equals(htmlOrTextButton.getText()))
{
showTextResponse(response);
}
// HTML display <=> Text labeled button
else
{
showRenderedResponse(response);
}
}
else if (icon != null)
{
JLabel image = new JLabel();
image.setIcon(icon);
gbc.gridx = 0;
gridBag.setConstraints(image, gbc);
resultPanel.add(image);
}
resultPanel.repaint();
resultPanel.revalidate();
}
}
log.debug("End : valueChanged1");
}
protected void initTextArea()
{
textArea = new JTextArea();
textArea.setColumns(70);
textArea.setLineWrap(true);
textArea.setWrapStyleWord(true);
textArea.setTabSize(4);
gridBag.setConstraints(textArea, gbc);
resultPanel.add(textArea);
gbc.gridy++;
}
protected void showTextResponse(String response)
{
resultPanel.remove(htmlEditPane);
gbc.gridx = 0;
gbc.gridy++;
gridBag.setConstraints(textArea, gbc);
textArea.setText(response);
textArea.setCaretPosition(0);
resultPanel.add(textArea);
}
public void actionPerformed(ActionEvent e)
{
// If the htmlOrTextButton is clicked, show the response in the
// appropriate way, and change the button label
if (htmlOrTextButton.equals(e.getSource()))
{
// Show rendered HTML
if (HTML_BUTTON_LABEL.equals(htmlOrTextButton.getText()))
{
showRenderedResponse(response);
htmlOrTextButton.setText(TEXT_BUTTON_LABEL);
}
// Show the textual response
else
{
showTextResponse(response);
htmlOrTextButton.setText(HTML_BUTTON_LABEL);
}
}
}
protected void initHtmlEditPane()
{
htmlEditPane = new JEditorPane();
HTMLEditorKit htmlKit = new HTMLEditorKit();
htmlEditPane.setEditorKit(htmlKit);
}
protected void showRenderedResponse(String response)
{
resultPanel.remove(textArea);
int htmlIndex = response.indexOf("<html>");
String html = response.substring(htmlIndex, response.length());
htmlEditPane.setText(html);
htmlEditPane.setCaretPosition(0);
gbc.gridx = 0;
gbc.gridy++;
gridBag.setConstraints(htmlEditPane, gbc);
resultPanel.add(htmlEditPane);
}
protected void initHtmlOrTextButton()
{
htmlOrTextButton = new JButton(HTML_BUTTON_LABEL);
htmlOrTextButton.addActionListener(this);
}
protected void init()
{
this.setLayout(new BorderLayout());
SampleResult rootSampleResult = new SampleResult();
rootSampleResult.setSampleLabel("Root");
rootSampleResult.setSuccessful(true);
root = new DefaultMutableTreeNode(rootSampleResult);
treeModel = new DefaultTreeModel(root);
jTree = new JTree(treeModel);
jTree.setCellRenderer(new ResultsNodeRenderer());
jTree.getSelectionModel().setSelectionMode(
TreeSelectionModel.SINGLE_TREE_SELECTION);
jTree.addTreeSelectionListener(this);
treePane = new JScrollPane(jTree);
gridBag = new GridBagLayout();
gbc = new GridBagConstraints();
resultPanel = new JPanel(gridBag);
resultPane = new JScrollPane(resultPanel);
initHtmlOrTextButton();
initTextArea();
initHtmlEditPane();
treeSplitPane =
new JSplitPane(JSplitPane.VERTICAL_SPLIT, treePane, resultPane);
getFilePanel().add(getErrorLoggingCheckbox());
add(getFilePanel(), BorderLayout.NORTH);
add(treeSplitPane, BorderLayout.CENTER);
}
private class ResultsNodeRenderer extends DefaultTreeCellRenderer
{
public Component getTreeCellRendererComponent(
JTree tree,
Object value,
boolean sel,
boolean expanded,
boolean leaf,
int row,
boolean hasFocus)
{
super.getTreeCellRendererComponent(
tree,
value,
sel,
expanded,
leaf,
row,
hasFocus);
if (!((SampleResult) ((DefaultMutableTreeNode) value).getUserObject())
.isSuccessful())
{
this.setForeground(Color.red);
}
return this;
}
}
}
|
package de.unitrier.st.soposthistory.tests;
import com.google.common.collect.Sets;
import de.unitrier.st.soposthistory.blocks.CodeBlockVersion;
import de.unitrier.st.soposthistory.blocks.PostBlockVersion;
import de.unitrier.st.soposthistory.blocks.TextBlockVersion;
import de.unitrier.st.soposthistory.gt.*;
import de.unitrier.st.soposthistory.util.Config;
import de.unitrier.st.soposthistory.version.PostVersion;
import de.unitrier.st.soposthistory.version.PostVersionList;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.junit.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.*;
class BlockLifeSpanAndGroundTruthTest {
protected static Path pathToPostIdList = Paths.get("testdata", "postIds.csv");
protected static Path pathToPostHistory = Paths.get("testdata");
protected static Path pathToGroundTruth = Paths.get("testdata", "gt");
protected static Path outputDir = Paths.get("testdata", "metrics comparison");
@Test
void testReadFromDirectory() {
List<PostGroundTruth> postGroundTruthList = PostGroundTruth.readFromDirectory(pathToGroundTruth);
try {
assertEquals(Files.list(pathToGroundTruth).filter(
file -> PostGroundTruth.fileNamePattern.matcher(file.toFile().getName()).matches())
.count(),
postGroundTruthList.size());
} catch (IOException e) {
e.printStackTrace();
}
}
@Test
void testPostBlockLifeSpanVersionsEqual() {
// compare two PostBlockLifeSpanVersions
PostBlockLifeSpanVersion original = new PostBlockLifeSpanVersion(4711, 42, 1, 0, 0, 0, "");
PostBlockLifeSpanVersion differentPostId = new PostBlockLifeSpanVersion(4712, 42, 1, 0, 0, 0, "");
PostBlockLifeSpanVersion differentPostHistoryId = new PostBlockLifeSpanVersion(4711, 43, 1, 0, 0, 0, "");
PostBlockLifeSpanVersion differentPostBlockTypeId = new PostBlockLifeSpanVersion(4711, 42, 2, 0, 0, 0, "");
PostBlockLifeSpanVersion differentLocalId = new PostBlockLifeSpanVersion(4711, 42, 1, 1, 0, 0, "");
PostBlockLifeSpanVersion differentPredId = new PostBlockLifeSpanVersion(4711, 42, 1, 0, 1, 0, "");
PostBlockLifeSpanVersion differentSuccId = new PostBlockLifeSpanVersion(4711, 42, 1, 0, 0, 1, "");
assertTrue(original.equals(original));
assertFalse(original.equals(differentPostId));
assertFalse(original.equals(differentPostHistoryId));
assertFalse(original.equals(differentPostBlockTypeId));
assertFalse(original.equals(differentLocalId));
assertTrue(original.equals(differentPredId));
assertTrue(original.equals(differentSuccId));
}
@Test
void testPostBlockLifeSpanExtraction() {
int postId = 22037280;
PostVersionList a_22037280 = PostVersionList.readFromCSV(pathToPostHistory, postId, 2);
PostGroundTruth a_22037280_gt = PostGroundTruth.readFromCSV(pathToGroundTruth, postId);
List<PostBlockLifeSpan> lifeSpans = a_22037280.getPostBlockLifeSpans();
List<PostBlockLifeSpan> lifeSpansGT = a_22037280_gt.getPostBlockLifeSpans();
assertEquals(lifeSpans.size(), lifeSpansGT.size());
assertEquals(5, lifeSpans.size());
for (int i = 0; i < lifeSpans.size(); i++) {
assertTrue(lifeSpans.get(i).equals(lifeSpansGT.get(i)));
}
}
@Test
void testPostBlockLifeSpanExtractionFilter() {
int postId = 22037280;
PostVersionList a_22037280 = PostVersionList.readFromCSV(pathToPostHistory, postId, 2);
PostGroundTruth a_22037280_gt = PostGroundTruth.readFromCSV(pathToGroundTruth, postId);
// text
List<PostBlockLifeSpan> textBlockLifeSpans = a_22037280.getPostBlockLifeSpans(
TextBlockVersion.getPostBlockTypeIdFilter()
);
List<PostBlockLifeSpan> textLifeSpansGT = a_22037280_gt.getPostBlockLifeSpans(
TextBlockVersion.getPostBlockTypeIdFilter()
);
assertEquals(textBlockLifeSpans.size(), textLifeSpansGT.size());
assertEquals(3, textBlockLifeSpans.size());
for (int i = 0; i < textBlockLifeSpans.size(); i++) {
assertTrue(textBlockLifeSpans.get(i).equals(textLifeSpansGT.get(i)));
}
// code
List<PostBlockLifeSpan> codeBlockLifeSpans = a_22037280.getPostBlockLifeSpans(
CodeBlockVersion.getPostBlockTypeIdFilter()
);
List<PostBlockLifeSpan> codeLifeSpansGT = a_22037280_gt.getPostBlockLifeSpans(
CodeBlockVersion.getPostBlockTypeIdFilter()
);
assertEquals(codeBlockLifeSpans.size(), codeLifeSpansGT.size());
assertEquals(2, codeBlockLifeSpans.size());
for (int i = 0; i < codeBlockLifeSpans.size(); i++) {
assertTrue(codeBlockLifeSpans.get(i).equals(codeLifeSpansGT.get(i)));
}
}
@Test
void testPostBlockConnectionExtraction() {
int postId = 22037280;
PostVersionList a_22037280 = PostVersionList.readFromCSV(pathToPostHistory, postId, 2);
PostGroundTruth a_22037280_gt = PostGroundTruth.readFromCSV(pathToGroundTruth, postId);
List<PostBlockLifeSpan> lifeSpans = a_22037280.getPostBlockLifeSpans();
List<PostBlockLifeSpan> lifeSpansGT = a_22037280_gt.getPostBlockLifeSpans();
Set<PostBlockConnection> connections = PostBlockLifeSpan.toPostBlockConnections(lifeSpans);
Set<PostBlockConnection> connectionsGT = PostBlockLifeSpan.toPostBlockConnections(lifeSpansGT);
assertEquals(connections.size(), connectionsGT.size());
assertTrue(PostBlockConnection.equals(connections, connectionsGT));
assertTrue(PostBlockConnection.equals(
PostBlockConnection.intersection(connections, connectionsGT),
connections)
);
assertTrue(PostBlockConnection.equals(
PostBlockConnection.union(connections, connectionsGT),
connections)
);
assertEquals(0, PostBlockConnection.difference(connections, connectionsGT).size());
}
@Test
void testPostBlockPossibleConnectionsComparison() {
int postId = 22037280;
PostVersionList a_22037280 = PostVersionList.readFromCSV(pathToPostHistory, postId, 2);
PostGroundTruth a_22037280_gt = PostGroundTruth.readFromCSV(pathToGroundTruth, postId);
assertEquals(78, a_22037280.getPossibleConnections());
assertEquals(a_22037280.getPossibleConnections(), a_22037280_gt.getPossibleConnections());
}
@Test
void testPostBlockConnectionEquals() {
PostBlockLifeSpanVersion v1_1 = new PostBlockLifeSpanVersion(1, 1, 1, 1);
PostBlockLifeSpanVersion v1_2 = new PostBlockLifeSpanVersion(1, 1, 1, 1);
PostBlockLifeSpanVersion v2 = new PostBlockLifeSpanVersion(1, 2, 1, 1);
PostBlockLifeSpanVersion v3 = new PostBlockLifeSpanVersion(1, 3, 1, 1);
PostBlockLifeSpanVersion v4 = new PostBlockLifeSpanVersion(1, 4, 1, 1);
// test equality of PostBlockLifeSpanVersions
assertEquals(v1_1.getPostId(), v1_2.getPostId());
assertEquals(v1_1.getPostHistoryId(), v1_2.getPostHistoryId());
assertEquals(v1_1.getPostBlockTypeId(), v1_2.getPostBlockTypeId());
assertEquals(v1_1.getLocalId(), v1_2.getLocalId());
// test equality of PostBlockConnections
PostBlockConnection connection1 = new PostBlockConnection(v1_1, v2);
PostBlockConnection connection2 = new PostBlockConnection(v1_2, v2);
assertTrue(connection1.equals(connection2));
// test equaliy of a set of PostBlockConnections
PostBlockConnection connection3 = new PostBlockConnection(v1_2, v2);
PostBlockConnection connection4 = new PostBlockConnection(v3, v4);
assertTrue(PostBlockConnection.equals(
Sets.newHashSet(connection1, connection2, connection3, connection4),
Sets.newHashSet(connection1, connection2, connection3, connection4))
);
}
@Test
void testGetConnections() {
int postId = 22037280;
PostVersionList a_22037280 = PostVersionList.readFromCSV(pathToPostHistory, postId, 2);
PostGroundTruth a_22037280_gt = PostGroundTruth.readFromCSV(pathToGroundTruth, postId);
List<Integer> postVersionListPostHistoryIds = a_22037280.getPostHistoryIds();
List<Integer> groundTruthPostHistoryIds = a_22037280_gt.getPostHistoryIds();
assertEquals(postVersionListPostHistoryIds, groundTruthPostHistoryIds);
for (Integer postHistoryId : groundTruthPostHistoryIds) {
Set<PostBlockConnection> postBlockConnections = a_22037280.getPostVersion(postHistoryId).getConnections();
Set<PostBlockConnection> postBlockConnectionsGT = a_22037280_gt.getConnections(postHistoryId);
assertTrue(PostBlockConnection.equals(postBlockConnections, postBlockConnectionsGT));
}
}
@Test
void testProcessVersionHistoryWithIntermediateResetting() {
int postId = 22037280;
PostVersionList a_22037280 = PostVersionList.readFromCSV(pathToPostHistory, postId, 2, false);
testPostBlockVersionHistoryReset(a_22037280);
a_22037280.processVersionHistory();
testPostBlockVersionHistoryProcessed(a_22037280);
a_22037280.resetPostBlockVersionHistory();
testPostBlockVersionHistoryReset(a_22037280);
a_22037280.processVersionHistory();
testPostBlockVersionHistoryProcessed(a_22037280);
}
private void testPostBlockVersionHistoryReset(PostVersionList postVersionList) {
for (PostVersion currentPostVersion : postVersionList) {
for (PostBlockVersion currentPostBlockVersion : currentPostVersion.getPostBlocks()) {
assertNull(currentPostBlockVersion.getRootPostBlockId());
assertNull(currentPostBlockVersion.getPredPostBlockId());
assertNull(currentPostBlockVersion.getPredEqual());
assertNull(currentPostBlockVersion.getPredSimilarity());
assertEquals(0, currentPostBlockVersion.getPredCount());
assertEquals(0, currentPostBlockVersion.getSuccCount());
assertNull(currentPostBlockVersion.getPred());
assertNull(currentPostBlockVersion.getSucc());
assertNull(currentPostBlockVersion.getRootPostBlock());
assertNull(currentPostBlockVersion.getPredDiff());
assertTrue(currentPostBlockVersion.isAvailable());
assertEquals(0, currentPostBlockVersion.getMatchingPredecessors().size());
assertEquals(0, currentPostBlockVersion.getPredecessorSimilarities().size());
assertEquals(-1.0, currentPostBlockVersion.getMaxSimilarity());
assertEquals(-1.0, currentPostBlockVersion.getSimilarityThreshold());
assertFalse(currentPostBlockVersion.isLifeSpanExtracted());
}
}
}
private void testPostBlockVersionHistoryProcessed(PostVersionList postVersionList) {
for (PostVersion currentPostVersion : postVersionList) {
for (PostBlockVersion currentPostBlockVersion : currentPostVersion.getPostBlocks()) {
assertNotNull(currentPostBlockVersion.getRootPostBlockId());
assertNotNull(currentPostBlockVersion.getRootPostBlock());
}
}
}
@Test
void testMetricComparisonManager() {
MetricComparisonManager manager = MetricComparisonManager.create(
"TestManager", pathToPostIdList, pathToPostHistory, pathToGroundTruth, false
);
assertEquals(manager.getPostVersionLists().size(), manager.getPostGroundTruth().size());
assertThat(manager.getPostVersionLists().keySet(), is(manager.getPostGroundTruth().keySet()));
manager.addSimilarityMetric(
"fourGramOverlap",
de.unitrier.st.stringsimilarity.set.Variants::fourGramOverlap
);
manager.addSimilarityThreshold(0.6);
manager.compareMetrics();
List<Integer> postHistoryIds_3758880 = manager.getPostGroundTruth().get(3758880).getPostHistoryIds();
MetricComparison comparison_a_3758880 = manager.getMetricComparison(3758880, "fourGramOverlap", 0.6);
/* compare a 3758880 */
// first version has never predecessors
int version_1_id = postHistoryIds_3758880.get(0);
assertEquals(new Integer(0), comparison_a_3758880.getTruePositivesText().get(version_1_id));
assertEquals(new Integer(0), comparison_a_3758880.getFalsePositivesText().get(version_1_id));
assertEquals(new Integer(0), comparison_a_3758880.getTrueNegativesText().get(version_1_id));
assertEquals(new Integer(0), comparison_a_3758880.getFalseNegativesText().get(version_1_id));
assertEquals(new Integer(0), comparison_a_3758880.getTruePositivesCode().get(version_1_id));
assertEquals(new Integer(0), comparison_a_3758880.getFalsePositivesCode().get(version_1_id));
assertEquals(new Integer(0), comparison_a_3758880.getTrueNegativesCode().get(version_1_id));
assertEquals(new Integer(0), comparison_a_3758880.getFalseNegativesCode().get(version_1_id));
// second version
int version_2_id = postHistoryIds_3758880.get(1);
assertEquals(new Integer(1), comparison_a_3758880.getTruePositivesText().get(version_2_id));
assertEquals(new Integer(0), comparison_a_3758880.getFalsePositivesText().get(version_2_id));
assertEquals(new Integer(5), comparison_a_3758880.getTrueNegativesText().get(version_2_id));
assertEquals(new Integer(0), comparison_a_3758880.getFalseNegativesText().get(version_2_id));
assertEquals(new Integer(2), comparison_a_3758880.getTruePositivesCode().get(version_2_id));
assertEquals(new Integer(0), comparison_a_3758880.getFalsePositivesCode().get(version_2_id));
assertEquals(new Integer(4), comparison_a_3758880.getTrueNegativesCode().get(version_2_id));
assertEquals(new Integer(0), comparison_a_3758880.getFalseNegativesCode().get(version_2_id));
// version 3 to 10 only for text blocks (they don't differ)
for (int i = 2; i < 10; i++) {
int version_2_to_11_id_text = postHistoryIds_3758880.get(i);
assertEquals(new Integer(2), comparison_a_3758880.getTruePositivesText().get(version_2_to_11_id_text));
assertEquals(new Integer(0), comparison_a_3758880.getFalsePositivesText().get(version_2_to_11_id_text));
assertEquals(new Integer(2), comparison_a_3758880.getTrueNegativesText().get(version_2_to_11_id_text));
assertEquals(new Integer(0), comparison_a_3758880.getFalseNegativesText().get(version_2_to_11_id_text));
}
int version_11_id_text = postHistoryIds_3758880.get(10);
assertEquals(new Integer(2), comparison_a_3758880.getTruePositivesText().get(version_11_id_text));
assertEquals(new Integer(0), comparison_a_3758880.getFalsePositivesText().get(version_11_id_text));
assertEquals(new Integer(4), comparison_a_3758880.getTrueNegativesText().get(version_11_id_text));
assertEquals(new Integer(0), comparison_a_3758880.getFalseNegativesText().get(version_11_id_text));
// version 3 and 6 for code
List<Integer> versionsOfType1 = Arrays.asList(2, 5);
for (Integer version : versionsOfType1) {
int version_3_or_6_id = postHistoryIds_3758880.get(version);
assertEquals(new Integer(1), comparison_a_3758880.getTruePositivesCode().get(version_3_or_6_id));
assertEquals(new Integer(0), comparison_a_3758880.getFalsePositivesCode().get(version_3_or_6_id));
assertEquals(new Integer(2), comparison_a_3758880.getTrueNegativesCode().get(version_3_or_6_id));
assertEquals(new Integer(1), comparison_a_3758880.getFalseNegativesCode().get(version_3_or_6_id));
}
// version 4,5,7,8,9,10,11 for code
List<Integer> versionsOfType2 = Arrays.asList(3, 4, 6, 7, 8, 9, 10);
for (Integer version : versionsOfType2) {
int version_i_id = postHistoryIds_3758880.get(version);
assertEquals(new Integer(2), comparison_a_3758880.getTruePositivesCode().get(version_i_id));
assertEquals(new Integer(0), comparison_a_3758880.getFalsePositivesCode().get(version_i_id));
assertEquals(new Integer(2), comparison_a_3758880.getTrueNegativesCode().get(version_i_id));
assertEquals(new Integer(0), comparison_a_3758880.getFalseNegativesCode().get(version_i_id));
}
/* compare a 22037280 */
List<Integer> postHistoryIds_22037280 = manager.getPostGroundTruth().get(22037280).getPostHistoryIds();
MetricComparison comparison_a_22037280 = manager.getMetricComparison(22037280, "fourGramOverlap", 0.6);
version_1_id = postHistoryIds_22037280.get(0);
assertEquals(new Integer(0), comparison_a_22037280.getTruePositivesText().get(version_1_id));
assertEquals(new Integer(0), comparison_a_22037280.getFalsePositivesText().get(version_1_id));
assertEquals(new Integer(0), comparison_a_22037280.getTrueNegativesText().get(version_1_id));
assertEquals(new Integer(0), comparison_a_22037280.getFalseNegativesText().get(version_1_id));
assertEquals(new Integer(0), comparison_a_22037280.getTruePositivesCode().get(version_1_id));
assertEquals(new Integer(0), comparison_a_22037280.getFalsePositivesCode().get(version_1_id));
assertEquals(new Integer(0), comparison_a_22037280.getTrueNegativesCode().get(version_1_id));
assertEquals(new Integer(0), comparison_a_22037280.getFalseNegativesCode().get(version_1_id));
for (int i = 1; i < postHistoryIds_22037280.size(); i++) {
version_2_id = postHistoryIds_22037280.get(i);
assertEquals(new Integer(3), comparison_a_22037280.getTruePositivesText().get(version_2_id));
assertEquals(new Integer(0), comparison_a_22037280.getFalsePositivesText().get(version_2_id));
assertEquals(new Integer(6), comparison_a_22037280.getTrueNegativesText().get(version_2_id));
assertEquals(new Integer(0), comparison_a_22037280.getFalseNegativesText().get(version_2_id));
assertEquals(new Integer(2), comparison_a_22037280.getTruePositivesCode().get(version_2_id));
assertEquals(new Integer(0), comparison_a_22037280.getFalsePositivesCode().get(version_2_id));
assertEquals(new Integer(2), comparison_a_22037280.getTrueNegativesCode().get(version_2_id));
assertEquals(new Integer(0), comparison_a_22037280.getFalseNegativesCode().get(version_2_id));
}
manager.writeToCSV(outputDir);
}
@Test
void testGetPossibleConnections() {
int postId = 22037280;
PostVersionList a_22037280 = PostVersionList.readFromCSV(pathToPostHistory, postId, 2);
PostGroundTruth a_22037280_gt = PostGroundTruth.readFromCSV(pathToGroundTruth, postId);
assertEquals(7, a_22037280.size());
assertEquals(a_22037280.getPossibleConnections(), a_22037280_gt.getPossibleConnections());
for (PostVersion postVersion : a_22037280) {
assertEquals(3, postVersion.getTextBlocks().size());
assertEquals(2, postVersion.getCodeBlocks().size());
}
Set<Integer> set = new HashSet<>();
assertEquals(0, a_22037280.getPossibleConnections(set));
int possibleTextConnections = a_22037280.getPossibleConnections(TextBlockVersion.getPostBlockTypeIdFilter());
assertEquals(6 * 9, possibleTextConnections); // 6 versions with each 9=3*3 possible text connections
int possibleCodeConnections = a_22037280.getPossibleConnections(CodeBlockVersion.getPostBlockTypeIdFilter());
assertEquals(6 * 4, possibleCodeConnections); // 6 versions with each 4=2*2 possible code connections
int possibleConnections = a_22037280.getPossibleConnections(PostBlockVersion.getAllPostBlockTypeIdFilters());
assertEquals(6 * 4 + 6 * 9, possibleConnections); // 6 versions with each 4=2*2 and 9=3*3 possible connections
// compare results of getPossibleConnections() for PostVersion and PostVersionList
possibleConnections = 0;
for (PostVersion current : a_22037280) {
possibleConnections += current.getPossibleConnections();
}
assertEquals(a_22037280.getPossibleConnections(), possibleConnections);
// check if post version pred and succ assignments are also set in case post history has not been processed yet
possibleConnections = 0;
for (PostVersion current : a_22037280) {
possibleConnections += current.getPossibleConnections();
}
assertEquals(a_22037280.getPossibleConnections(), possibleConnections);
}
@Test
void testNumberOfPredecessorsOfOnePost() {
// this checks whether a block can be predecessor of more than one block by choosing a very low threshold.
int postId = 3758880;
PostVersionList a_3758880 = PostVersionList.readFromCSV(pathToPostHistory, postId, 2, false);
a_3758880.processVersionHistory(
Config.DEFAULT
.withTextSimilarityMetric(de.unitrier.st.stringsimilarity.set.Variants::twoGramDice)
.withCodeSimilarityThreshold(0.01),
TextBlockVersion.getPostBlockTypeIdFilter());
List<TextBlockVersion> textBlocks = a_3758880.getLast().getTextBlocks();
assertEquals(new Integer(1), textBlocks.get(0).getPred().getLocalId());
assertEquals(new Integer(1), textBlocks.get(0).getLocalId());
assertEquals(new Integer(3), textBlocks.get(1).getPred().getLocalId());
assertEquals(new Integer(3), textBlocks.get(1).getLocalId());
assertEquals(null, textBlocks.get(2).getPred());
assertEquals(new Integer(5), textBlocks.get(2).getLocalId());
}
@Test
void testNumberOfPredecessorsComputedMetric() {
List<PostVersionList> postVersionLists = PostVersionList.readFromDirectory(pathToPostHistory);
for (PostVersionList postVersionList : postVersionLists) {
postVersionList.processVersionHistory(
Config.DEFAULT
.withTextSimilarityMetric(de.unitrier.st.stringsimilarity.set.Variants::twoGramDice)
.withCodeSimilarityMetric(de.unitrier.st.stringsimilarity.set.Variants::twoGramDice)
.withTextSimilarityThreshold(0.01)
.withCodeSimilarityThreshold(0.01)
);
for (PostVersion postVersion : postVersionList) {
List<PostBlockVersion> postBlocks = postVersion.getPostBlocks();
for (int i = 0; i < postBlocks.size(); i++) {
if (postBlocks.get(i).getPred() == null)
continue;
for (int j = i + 1; j < postBlocks.size(); j++) {
if (postBlocks.get(j).getPred() == null || postBlocks.get(i) instanceof TextBlockVersion != postBlocks.get(j) instanceof TextBlockVersion)
continue;
assertNotEquals(postBlocks.get(i).getPred().getLocalId(), postBlocks.get(j).getPred().getLocalId());
}
}
}
}
}
@Test
void checkWhetherPostVersionListConnectionsWillBeResetRight() {
int postId = 3758880;
PostVersionList a_3758880 = PostVersionList.readFromCSV(pathToPostHistory, postId, 2, false);
assertNull(a_3758880.get(1).getTextBlocks().get(0).getPred()); // no predecessors have been set yet so it is null
a_3758880.processVersionHistory();
assertNotNull(a_3758880.get(1).getTextBlocks().get(0).getPred()); // predecessors have been set so it is not null
}
}
|
package dr.inference.operators;
import java.util.ArrayList;
import java.util.List;
import cern.colt.matrix.impl.DenseDoubleMatrix2D;
import cern.colt.matrix.linalg.SingularValueDecomposition;
import dr.inference.model.CompoundParameter;
import dr.inference.model.MatrixParameter;
import dr.inference.model.Parameter;
import dr.math.MathUtils;
import dr.math.matrixAlgebra.CholeskyDecomposition;
import dr.math.matrixAlgebra.IllegalDimension;
import dr.math.matrixAlgebra.SymmetricMatrix;
import dr.util.Transform;
import dr.xml.AbstractXMLObjectParser;
import dr.xml.AttributeRule;
import dr.xml.ElementRule;
import dr.xml.XMLObject;
import dr.xml.XMLObjectParser;
import dr.xml.XMLParseException;
import dr.xml.XMLSyntaxRule;
/**
* @author Guy Baele
* @author Marc A. Suchard
*/
public class AdaptableVarianceMultivariateNormalOperator extends AbstractCoercableOperator {
public static final String AVMVN_OPERATOR = "adaptableVarianceMultivariateNormalOperator";
public static final String SCALE_FACTOR = "scaleFactor";
public static final String BETA = "beta";
public static final String INITIAL = "initial";
public static final String BURNIN = "burnin";
public static final String UPDATE_EVERY = "updateEvery";
public static final String FORM_XTX = "formXtXInverse";
public static final String COEFFICIENT = "coefficient";
public static final String SKIP_RANK_CHECK = "skipRankCheck";
public static final String TRANSFORM = "transform";
public static final String TYPE = "type";
public static final boolean DEBUG = false;
public static final boolean PRINT_FULL_MATRIX = false;
private double scaleFactor;
private double beta;
private int iterations, updates, initial, burnin, every;
private final Parameter parameter;
private final Transform[] transformations;
private final int[] transformationSizes;
private final int dim;
// private final double constantFactor;
private double[] oldMeans, newMeans;
final double[][] matrix;
private double[][] empirical;
private double[][] cholesky;
// temporary storage, allocated once.
private double[] epsilon;
private double[][] proposal;
public AdaptableVarianceMultivariateNormalOperator(Parameter parameter, Transform[] transformations, int[] transformationSizes, double scaleFactor, double[][] inMatrix,
double weight, double beta, int initial, int burnin, int every, CoercionMode mode, boolean isVarianceMatrix, boolean skipRankCheck) {
super(mode);
this.scaleFactor = scaleFactor;
this.parameter = parameter;
this.transformations = transformations;
this.transformationSizes = transformationSizes;
this.beta = beta;
this.iterations = 0;
this.updates = 0;
setWeight(weight);
dim = parameter.getDimension();
// constantFactor = Math.pow(2.38, 2) / ((double) dim); // not necessary because scaleFactor is auto-tuned
this.initial = initial;
this.burnin = burnin;
this.every = every;
this.empirical = new double[dim][dim];
this.oldMeans = new double[dim];
this.newMeans = new double[dim];
this.epsilon = new double[dim];
this.proposal = new double[dim][dim];
if (!skipRankCheck) {
SingularValueDecomposition svd = new SingularValueDecomposition(new DenseDoubleMatrix2D(inMatrix));
if (inMatrix[0].length != svd.rank()) {
throw new RuntimeException("Variance matrix in AdaptableVarianceMultivariateNormalOperator is not of full rank");
}
}
if (isVarianceMatrix) {
matrix = inMatrix;
} else {
matrix = formXtXInverse(inMatrix);
}
/*System.err.println("matrix initialization: ");
for (int i = 0; i < matrix.length; i++) {
for (int j = 0; j < matrix.length; j++) {
System.err.print(matrix[i][j] + " ");
}
System.err.println();
}*/
try {
cholesky = (new CholeskyDecomposition(matrix)).getL();
} catch (IllegalDimension illegalDimension) {
throw new RuntimeException("Unable to decompose matrix in AdaptableVarianceMultivariateNormalOperator");
}
}
public AdaptableVarianceMultivariateNormalOperator(Parameter parameter, Transform[] transformations, int[] transformationSizes, double scaleFactor,
MatrixParameter varMatrix, double weight, double beta, int initial, int burnin, int every, CoercionMode mode, boolean isVariance, boolean skipRankCheck) {
this(parameter, transformations, transformationSizes, scaleFactor, varMatrix.getParameterAsMatrix(), weight, beta, initial, burnin, every, mode, isVariance, skipRankCheck);
}
private double[][] formXtXInverse(double[][] X) {
int N = X.length;
int P = X[0].length;
double[][] matrix = new double[P][P];
for (int i = 0; i < P; i++) {
for (int j = 0; j < P; j++) {
int total = 0;
for (int k = 0; k < N; k++) {
total += X[k][i] * X[k][j];
}
matrix[i][j] = total;
}
}
// Take inverse
matrix = new SymmetricMatrix(matrix).inverse().toComponents();
return matrix;
}
//act as if population mean is known
private double calculateCovariance(int number, double currentMatrixEntry, double[] values, int firstIndex, int secondIndex) {
// number will always be > 1 here
/*double result = currentMatrixEntry * (number - 1);
result += (values[firstIndex] * values[secondIndex]);
result += ((number - 1) * oldMeans[firstIndex] * oldMeans[secondIndex] - number * newMeans[firstIndex] * newMeans[secondIndex]);
result /= ((double) number);*/
double result = currentMatrixEntry * (number - 2);
result += (values[firstIndex] * values[secondIndex]);
result += ((number - 1) * oldMeans[firstIndex] * oldMeans[secondIndex] - number * newMeans[firstIndex] * newMeans[secondIndex]);
result /= ((double)(number - 1));
return result;
}
public double doOperation() {
iterations++;
if (DEBUG) {
System.err.println("\nAVMVN Iteration: " + iterations);
System.err.println("Using AdaptableVarianceMultivariateNormalOperator: " + iterations + " for " + parameter.getParameterName());
System.err.println("Old parameter values:");
for (int i = 0; i < dim; i++) {
System.err.println(parameter.getParameterValue(i));
}
}
double[] x = parameter.getParameterValues();
//transform to the appropriate scale
double[] transformedX = new double[dim];
/*for (int i = 0; i < dim; i++) {
transformedX[i] = transformations[i].transform(x[i]);
}*/
//iterate over transformation sizes rather than number of parameters
//as a transformation might impact multiple parameters
int currentIndex = 0;
for (int i = 0; i < transformationSizes.length; i++) {
if (DEBUG) {
System.err.println("currentIndex = " + currentIndex);
System.err.println("transformationSizes[i] = " + transformationSizes[i]);
}
if (transformationSizes[i] > 1) {
System.arraycopy(transformations[i].transform(x, currentIndex, currentIndex + transformationSizes[i] - 1),0,transformedX,currentIndex,transformationSizes[i]);
} else {
transformedX[currentIndex] = transformations[i].transform(x[currentIndex]);
if (DEBUG) {
System.err.println("x[" + currentIndex + "] = " + x[currentIndex] + " -> " + transformedX[currentIndex]);
}
}
currentIndex += transformationSizes[i];
}
if (DEBUG) {
System.err.println("Old transformed parameter values:");
for (int i = 0; i < dim; i++) {
System.err.println(transformedX[i]);
}
}
//store MH-ratio in logq
double logJacobian = 0.0;
//change this: make a rule for when iterations == burnin
if (iterations > 1 && iterations > burnin) {
if (DEBUG) {
System.err.println(" AVMVN iterations > burnin");
}
if (iterations > (burnin+1)) {
if (iterations % every == 0) {
updates++;
if (DEBUG) {
System.err.println("updates = " + updates);
}
//first recalculate the means using recursion
for (int i = 0; i < dim; i++) {
newMeans[i] = ((oldMeans[i] * (updates - 1)) + transformedX[i]) / updates;
}
if (updates > 1) {
//here we can simply use the double[][] matrix
for (int i = 0; i < dim; i++) {
for (int j = i; j < dim; j++) {
empirical[i][j] = calculateCovariance(updates, empirical[i][j], transformedX, i, j);
empirical[j][i] = empirical[i][j];
}
}
}
if (DEBUG) {
System.err.println("Old means:");
for (int i = 0; i < dim; i++) {
System.err.println(oldMeans[i]);
}
System.err.println("New means:");
for (int i = 0; i < dim; i++) {
System.err.println(newMeans[i]);
}
System.err.println("Empirical covariance matrix:");
for (int i = 0; i < dim; i++) {
for (int j = 0; j < dim; j++) {
System.err.print(empirical[i][j] + " ");
}
System.err.println();
}
}
}
/*if (iterations == 17) {
System.exit(0);
}*/
} else if (iterations == (burnin+1)) {
//updates++;
//i.e. iterations == burnin+1, i.e. first sample for C_t
//this will not be reached when burnin is set to 0
for (int i = 0; i < dim; i++) {
//oldMeans[i] = transformedX[i];
//newMeans[i] = transformedX[i];
oldMeans[i] = 0.0;
newMeans[i] = 0.0;
}
for (int i = 0; i < dim; i++) {
for (int j = 0; j < dim; j++) {
empirical[i][j] = 0.0;
}
}
}
} else if (iterations == 1) {
if (DEBUG) {
System.err.println("\niterations == 1");
}
//System.err.println("Iteration: " + iterations);
//iterations == 1
for (int i = 0; i < dim; i++) {
//oldMeans[i] = transformedX[i];
//newMeans[i] = transformedX[i];
oldMeans[i] = 0.0;
newMeans[i] = 0.0;
}
for (int i = 0; i < dim; i++) {
for (int j = 0; j < dim; j++) {
empirical[i][j] = 0.0;
proposal[i][j] = matrix[i][j];
}
}
}
for (int i = 0; i < dim; i++) {
epsilon[i] = scaleFactor * MathUtils.nextGaussian();
}
if (iterations > initial) {
if (DEBUG) {
System.err.println(" iterations > initial");
}
if (iterations % every == 0) {
// TODO: For speed, it may not be necessary to update decomposition each and every iteration
//double start = System.nanoTime();
// double[][] proposal = new double[dim][dim];
for (int i = 0; i < dim; i++) {
for (int j = i; j < dim; j++) { // symmetric matrix
proposal[j][i] = proposal[i][j] = (1 - beta) * // constantFactor * /* auto-tuning using scaleFactor */
empirical[i][j] + beta * matrix[i][j];
}
}
// not necessary for first test phase, but will need to be performed when covariance matrix is being updated
try {
cholesky = (new CholeskyDecomposition(proposal)).getL();
} catch (IllegalDimension illegalDimension) {
throw new RuntimeException("Unable to decompose matrix in AdaptableVarianceMultivariateNormalOperator");
}
//double end = System.nanoTime();
//double baseResult = end - start;
//System.err.println("Cholesky decomposition took: " + baseResult);
}
}
if (DEBUG) {
System.err.println(" Drawing new values");
}
/*for (int i = 0; i < dim; i++) {
for (int j = i; j < dim; j++) {
transformedX[i] += cholesky[j][i] * epsilon[j];
// caution: decomposition returns lower triangular
}
if (MULTI) {
parameter.setParameterValueQuietly(i, transformations[i].inverse(transformedX[i]));
} else {
if (transformationSizes[i] > 1) {
throw new RuntimeException("Transformations on more than 1 parameter value should be set quietly");
} else {
parameter.setParameterValue(i, transformations[i].inverse(transformedX[i]));
}
}
//this should be correct
//logJacobian += transformations[i].getLogJacobian(parameter.getParameterValue(i)) - transformations[i].getLogJacobian(x[i]);
logJacobian += transformations[i].getLogJacobian(x[i]) - transformations[i].getLogJacobian(parameter.getParameterValue(i));
}*/
for (int i = 0; i < dim; i++) {
for (int j = i; j < dim; j++) {
transformedX[i] += cholesky[j][i] * epsilon[j];
// caution: decomposition returns lower triangular
}
}
if (DEBUG) {
System.err.println("\nTransformed X values:");
for (int i = 0; i < dim; i++) {
System.err.println(transformedX[i]);
}
System.err.println();
}
//iterate over transformation sizes rather than number of parameters
//as a transformation might impact multiple parameters
currentIndex = 0;
for (int i = 0; i < transformationSizes.length; i++) {
if (DEBUG) {
System.err.println("currentIndex = " + currentIndex);
System.err.println("transformationSizes[i] = " + transformationSizes[i]);
}
if (MULTI) {
if (transformationSizes[i] > 1) {
double[] temp = transformations[i].inverse(transformedX, currentIndex, currentIndex + transformationSizes[i] - 1);
for (int k = 0; k < temp.length; k++) {
parameter.setParameterValueQuietly(currentIndex + k, temp[k]);
}
logJacobian += transformations[i].getLogJacobian(x, currentIndex, currentIndex + transformationSizes[i] - 1) - transformations[i].getLogJacobian(temp, 0, transformationSizes[i] - 1);
} else {
parameter.setParameterValueQuietly(currentIndex, transformations[i].inverse(transformedX[currentIndex]));
logJacobian += transformations[i].getLogJacobian(x[currentIndex]) - transformations[i].getLogJacobian(parameter.getParameterValue(currentIndex));
}
if (DEBUG) {
System.err.println("Current logJacobian = " + logJacobian);
}
} else {
if (transformationSizes[i] > 1) {
//TODO: figure out if this is really a problem ...
throw new RuntimeException("Transformations on more than 1 parameter value should be set quietly");
} else {
parameter.setParameterValue(currentIndex, transformations[i].inverse(transformedX[currentIndex]));
logJacobian += transformations[i].getLogJacobian(x[currentIndex]) - transformations[i].getLogJacobian(parameter.getParameterValue(currentIndex));
}
if (DEBUG) {
System.err.println("Current logJacobian = " + logJacobian);
}
}
currentIndex += transformationSizes[i];
}
if (DEBUG) {
System.err.println("Proposed parameter values:");
for (int i = 0; i < dim; i++) {
System.err.println(x[i] + " -> " + parameter.getValue(i));
}
System.err.println("LogJacobian: " + logJacobian);
}
if (MULTI) {
parameter.fireParameterChangedEvent(); // Signal once.
}
if (iterations % every == 0) {
if (DEBUG) {
System.err.println(" Copying means");
}
//copy new means to old means for next update iteration
//System.arraycopy(newMeans, 0, oldMeans, 0, dim);
double[] tmp = oldMeans;
oldMeans = newMeans;
newMeans = tmp; // faster to swap pointers
}
//System.err.println("scale factor: " + scaleFactor);
/*System.err.println("New parameter values:");
for (int i = 0; i < dim; i++) {
System.err.println(parameter.getParameterValue(i));
}*/
//System.err.println("log(Jacobian): " + logJacobian);
//return 0.0;
return logJacobian;
}
public String toString() {
return AVMVN_OPERATOR + "(" + parameter.getParameterName() + ")";
}
public static final boolean MULTI = true;
//Methods needed when using TwoPhaseOperator(Parser)
public Parameter getParameter() {
return this.parameter;
}
public void provideSamples(ArrayList<ArrayList<Double>> parameterSamples) {
if (DEBUG) {
System.err.println("AVMVN operator parameter length: " + parameter.getDimension());
System.err.println("provideSamples argument length: " + parameterSamples.size());
}
if (parameter.getDimension() != parameterSamples.size()) {
throw new RuntimeException("Dimension mismatch in AVMVN Operator: inconsistent parameter dimensions");
} else {
int lowestNumberOfSamples = parameterSamples.get(0).size();
for (int i = 0; i < parameterSamples.size(); i++) {
if (parameterSamples.get(i).size() < lowestNumberOfSamples) {
lowestNumberOfSamples = parameterSamples.get(i).size();
}
}
if (DEBUG) {
System.err.println("lowest number of samples: " + lowestNumberOfSamples);
}
//set number of iterations of AVMVN operator
this.iterations = lowestNumberOfSamples;
this.updates = lowestNumberOfSamples;
this.beta = 0.0;
//set means based on provided samples, but take into account transformation(s)
for (int i = 0; i < parameterSamples.size(); i++) {
for (int j = 0; j < lowestNumberOfSamples; j++) {
newMeans[i] += transformations[i].transform(parameterSamples.get(i).get(j));
//parameterSamples.get(i).get(j);
}
newMeans[i] /= (double)lowestNumberOfSamples;
}
if (DEBUG) {
System.err.println();
for (int i = 0; i < parameterSamples.size(); i++) {
System.err.println("Mean " + i + ": " + newMeans[i]);
}
}
//set covariance matrix based on provided samples, but take into account transformation(s)
for (int i = 0; i < dim; i++) {
for (int j = i; j < dim; j++) {
for (int k = 0; k < lowestNumberOfSamples; k++) {
empirical[i][j] += transformations[i].transform(parameterSamples.get(i).get(k))*transformations[i].transform(parameterSamples.get(j).get(k));
}
empirical[i][j] /= (double)lowestNumberOfSamples;
empirical[i][j] -= newMeans[i]*newMeans[j];
empirical[j][i] = empirical[i][j];
}
}
if (DEBUG) {
System.err.println();
for (int i = 0; i < dim; i++) {
for (int j = 0; j < dim; j++) {
System.err.print(empirical[i][j] + " ");
}
System.err.println();
}
}
}
}
//MCMCOperator INTERFACE
public final String getOperatorName() {
String output = "adaptableVarianceMultivariateNormal(" + parameter.getParameterName() + ")";
if (PRINT_FULL_MATRIX) {
output += "\nMeans:\n";
for (int i = 0; i < dim; i++) {
output += newMeans[i] + " ";
}
output += "\nVariance-covariance matrix:\n";
for (int i = 0; i < dim; i++) {
for (int j = 0; j < dim; j++) {
output += empirical[i][j] + " ";
}
output += "\n";
}
}
return output;
}
public double getCoercableParameter() {
return Math.log(scaleFactor);
}
public void setCoercableParameter(double value) {
scaleFactor = Math.exp(value);
}
public double getRawParameter() {
return scaleFactor;
}
public double getScaleFactor() {
return scaleFactor;
}
public double getTargetAcceptanceProbability() {
return 0.234;
}
public double getMinimumAcceptanceLevel() {
return 0.1;
}
public double getMaximumAcceptanceLevel() {
return 0.4;
}
public double getMinimumGoodAcceptanceLevel() {
return 0.20;
}
public double getMaximumGoodAcceptanceLevel() {
return 0.30;
}
public final String getPerformanceSuggestion() {
double prob = MCMCOperator.Utils.getAcceptanceProbability(this);
double targetProb = getTargetAcceptanceProbability();
dr.util.NumberFormatter formatter = new dr.util.NumberFormatter(5);
double sf = OperatorUtils.optimizeWindowSize(scaleFactor, prob, targetProb);
if (prob < getMinimumGoodAcceptanceLevel()) {
return "Try setting scaleFactor to about " + formatter.format(sf);
} else if (prob > getMaximumGoodAcceptanceLevel()) {
return "Try setting scaleFactor to about " + formatter.format(sf);
} else return "";
}
public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return AVMVN_OPERATOR;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
if (DEBUG) {
System.err.println("\nParsing AdaptableVarianceMultivariateNormalOperator.");
}
CoercionMode mode = CoercionMode.parseMode(xo);
double weight = xo.getDoubleAttribute(WEIGHT);
double beta = xo.getDoubleAttribute(BETA);
int initial = xo.getIntegerAttribute(INITIAL);
double scaleFactor = xo.getDoubleAttribute(SCALE_FACTOR);
double coefficient = xo.getDoubleAttribute(COEFFICIENT);
int burnin = 0;
int every = 1;
if (xo.hasAttribute(BURNIN)) {
burnin = xo.getIntegerAttribute(BURNIN);
}
if (burnin > initial || burnin < 0) {
throw new XMLParseException("burnin must be smaller than the initial period");
}
if (xo.hasAttribute(UPDATE_EVERY)) {
every = xo.getIntegerAttribute(UPDATE_EVERY);
}
if (every <= 0) {
throw new XMLParseException("covariance matrix needs to be updated at least every single iteration");
}
if (scaleFactor <= 0.0) {
throw new XMLParseException("scaleFactor must be greater than 0.0");
}
boolean formXtXInverse = xo.getAttribute(FORM_XTX, false);
Transform.ParsedTransform pt = (Transform.ParsedTransform) xo.getChild(Transform.ParsedTransform.class);
boolean oldXML = pt.parameters == null;
Parameter parameter;
Transform[] transformations;
int[] transformationSizes;
int transformationSizeCounter = 0;
if (!oldXML) {
// if there are no ParsedTransform elements then use the new parser syntax
if (DEBUG) {
System.err.println("New parser");
}
CompoundParameter allParameters = new CompoundParameter("allParameters");
List<Transform> transformList = new ArrayList<Transform>();
List<Integer> transformCountList = new ArrayList<Integer>();
for (Object co : xo.getChildren()) {
if (co instanceof Parameter) {
// parameters in the body of the object are assumed to have no transform
transformList.add(Transform.NONE);
Parameter param = (Parameter) co;
allParameters.addParameter(param);
transformCountList.add(param.getDimension());
} else if (co instanceof Transform.ParsedTransform) {
Transform.ParsedTransform parsedTransform = (Transform.ParsedTransform)co;
transformList.add(parsedTransform.transform);
int dim = 0;
for (Parameter param : parsedTransform.parameters) {
allParameters.addParameter(param);
dim += param.getDimension();
}
transformCountList.add(dim);
} else {
throw new XMLParseException("Unknown element in " + AVMVN_OPERATOR);
}
}
parameter = allParameters;
transformations = new Transform[parameter.getDimension()];
transformationSizes = new int[parameter.getDimension()];
/*transformations = transformList.toArray(transformations);
for (int i = 0; i < transformCountList.size(); i++) {
transformationSizes[i] = transformCountList.get(i);
}*/
if (DEBUG) {
for (int i = 0; i < transformList.size(); i++) {
System.err.println(i + " " + transformList.get(i));
}
for (int i = 0; i < transformCountList.size(); i++) {
System.err.println(i + " " + transformCountList.get(i));
}
}
int index = 0;
for (int i = 0; i < transformCountList.size(); i++) {
if (!transformList.get(i).getTransformName().equals(Transform.LOG_CONSTRAINED_SUM.getTransformName())) {
for (int j = 0; j < transformCountList.get(i); j++) {
transformations[index] = transformList.get(i);
transformationSizes[index] = 1;
index++;
transformationSizeCounter++;
}
} else {
transformations[index] = transformList.get(i);
transformationSizes[index] = transformCountList.get(i);
index++;
transformationSizeCounter++;
}
}
} else {
if (DEBUG) {
System.err.println("Old parser");
}
// assume old parser syntax for backwards compatibility
parameter = (Parameter)xo.getChild(Parameter.class);
transformations = new Transform[parameter.getDimension()];
transformationSizes = new int[parameter.getDimension()];
for (int i = 0; i < xo.getChildCount(); i++) {
Object child = xo.getChild(i);
if (child instanceof Transform.ParsedTransform) {
Transform.ParsedTransform thisObject = (Transform.ParsedTransform) child;
if (DEBUG) {
System.err.println(thisObject.transform.getTransformName());
}
if (thisObject.transform.getTransformName().equals(Transform.LOG_CONSTRAINED_SUM.getTransformName())) {
transformations[transformationSizeCounter] = thisObject.transform;
transformationSizes[transformationSizeCounter] = thisObject.end - thisObject.start;
if (DEBUG) {
System.err.println("Transformation size (logConstrainedSum) = " + transformationSizes[transformationSizeCounter]);
}
transformationSizeCounter++;
} else {
for (int j = thisObject.start; j < thisObject.end; ++j) {
transformations[transformationSizeCounter] = thisObject.transform;
transformationSizes[transformationSizeCounter] = 1;
if (DEBUG) {
System.err.println("Transformation size = " + transformationSizes[transformationSizeCounter]);
}
transformationSizeCounter++;
}
}
}
}
}
//determine array length for transformationSizes = transformationSizeCounter - 1;
if (DEBUG) {
System.err.println("\nCleaning up transformation and size arrays");
System.err.println("transformationSizeCounter = " + transformationSizeCounter);
}
int temp[] = new int[transformationSizeCounter];
Transform tempTransform[] = new Transform[transformationSizeCounter];
for (int i = 0; i < temp.length; i++) {
temp[i] = transformationSizes[i];
tempTransform[i] = transformations[i];
if (transformationSizes[i] == 0 || temp[i] == 0) {
throw new XMLParseException("Transformation size 0 encountered");
}
}
transformationSizes = temp;
transformations = tempTransform;
//varMatrix needs to be initialized
int dim = parameter.getDimension();
if (DEBUG) {
System.err.println("Dimension: " + dim);
}
if (initial <= 2 * dim) {
initial = 2 * dim;
}
Parameter[] init = new Parameter[dim];
for (int i = 0; i < dim; i++) {
init[i] = new Parameter.Default(dim, 0.0);
}
for (int i = 0; i < dim; i++) {
init[i].setParameterValue(i, Math.pow(coefficient, 2) / ((double) dim));
}
MatrixParameter varMatrix = new MatrixParameter(null, init);
if (DEBUG) {
System.err.println("\nChecking transformation array contents");
for (int i = 0; i < transformations.length; i++) {
System.err.println(transformations[i].getTransformName());
}
System.err.println("\nChecking size array contents");
for (int i = 0; i < transformationSizes.length; i++) {
System.err.print(transformationSizes[i] + " ");
}
System.err.println();
}
// Make sure varMatrix is square and dim(varMatrix) = dim(parameter)
if (!formXtXInverse) {
if (varMatrix.getColumnDimension() != varMatrix.getRowDimension())
throw new XMLParseException("The variance matrix is not square");
}
if (varMatrix.getColumnDimension() != parameter.getDimension())
throw new XMLParseException("The parameter and variance matrix have differing dimensions");
/*java.util.logging.Logger.getLogger("dr.inference").info("\nCreating the adaptable variance multivariate normal operator:" +
"\n beta = " + beta + "\n initial = " + initial + "\n burnin = " + burnin + "\n every = " + every +
"\n If you use this operator, please cite: " +
" Guy Baele, Philippe Lemey, Marc A. Suchard. 2016. In preparation.");*/
boolean skipRankCheck = xo.getAttribute(SKIP_RANK_CHECK, false);
return new AdaptableVarianceMultivariateNormalOperator(parameter, transformations, transformationSizes, scaleFactor, varMatrix, weight, beta, initial, burnin, every,
mode, !formXtXInverse, skipRankCheck);
}
|
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
public class Cky {
public static void main(String[] args) {
Cky program = new Cky();
program.run(args);
}
private void run(String[] args){
if(args.length!=2){
System.err.print("Please supply two arguments, rule file and string");
System.exit(1);
}
Set<Rules> rulesSet = new HashSet<>();
try{
Reader reader = new Reader();
reader.read(args[0]);
rulesSet = reader.rules();
}catch (IOException e){
System.err.print(e.getMessage());
}
int len = args[1].length();
String input = args[1];
Set[][] grid = new HashSet[len][len];
for(int x=0;x<len;x++){
for(int y=0;y<len;y++){
grid[x][y] = new HashSet<>();
}
}
int j = 0;
for(int i = 0; i < len ; i ++ ){
char a = input.charAt(i);
String string = String.valueOf(a);
Set<String> foundStrings = parseString(string,rulesSet);
grid[i][0] = foundStrings;
}
j = 1;
for( ; j<len;j++){
for(int i = 0; i < len ; i++) {
int x = 1;
Set<String> joint = new HashSet<>();
while (x <= j ) {
if( i+x >= len) break;
Set<String> joined = joinSet( grid[i][x-1], grid[i+x][j-x] );
for(String seek : joined){
joint.addAll(parseString(seek, rulesSet));
}
x++;
}
grid[i][j] = joint;
}
}
printSet(grid,len);
}
private void printSet(Set<String>[][] sets, int len){
int[] colWidth = new int[len];
for(int y = 0; y<len;y++){
for(int x = 0; x<len ; x++){
if(colWidth[y]<sets[x][y].size()){
colWidth[y] = sets[x][y].size();
}
}
System.out.println();
}
for(int y = len -1 ; y>=0;y
for(int x = 0; x<len; x++){
int size = colWidth[x] - sets[x][y].size();
int fillerStart = size/2;
int fillerEnd = size - fillerStart;
System.out.print(" "+filler(fillerStart)+ setToString(sets[x][y]) + filler(fillerEnd) + " " );
}
System.out.println();
}
}
private String filler(int n){
if(n>0) {
char[] array = new char[n];
Arrays.fill(array,' ');
return new String(array);
}
return "";
}
private String setToString(Set<String> set){
StringBuilder s = new StringBuilder();
for(String string : set){
s.append(string);
}
return s.toString();
}
private Set<String> joinSet(Set<String> set1, Set<String> set2){
Set<String> combination = new HashSet<>();
if(set1.size()==0)return set2;
if(set2.size()==0)return set1;
for(String s1 : set1){
for(String s2 : set2){
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(s1);
stringBuilder.append(s2);
combination.add(stringBuilder.toString());
}
}
return combination;
}
private Set<String> parseString(String string, Set<Rules> rulesSet){
Set<String> output = new HashSet<>();
for(Rules rules : rulesSet){
for(String neighbor : rules.getNeighbors()){
if(neighbor.equals(string)){
output.add( rules.getNode());
}
}
}
return output;
}
}
|
package org.jboss.xnio;
import java.io.Closeable;
import java.io.IOException;
import java.util.concurrent.Executor;
import java.util.concurrent.ThreadFactory;
import java.net.SocketAddress;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import org.jboss.xnio.channels.TcpChannel;
import org.jboss.xnio.channels.UdpChannel;
import org.jboss.xnio.channels.StreamChannel;
import org.jboss.xnio.channels.StreamSourceChannel;
import org.jboss.xnio.channels.StreamSinkChannel;
/**
* The XNIO entry point class.
*/
public abstract class Xnio implements Closeable {
private static final String NIO_IMPL_CLASS_NAME = "org.jboss.xnio.nio.XnioNioImpl";
private static final String PROVIDER_CLASS;
static {
String provider = System.getProperty("xnio.provider", NIO_IMPL_CLASS_NAME);
PROVIDER_CLASS = provider;
}
/**
* Create an instance of the default XNIO provider. The class name of this provider can be specified through the
* {@code xnio.provider} system property. Any failure to create the XNIO provider will cause an {@code java.io.IOException}
* to be thrown.
*
* @return an XNIO instance
* @throws IOException the the XNIO provider could not be created
*/
public static Xnio create() throws IOException {
return createInstance(PROVIDER_CLASS, new Class[0]);
}
private static Xnio createInstance(String className, Class[] paramTypes, Object... params) throws IOException {
try {
Class<? extends Xnio> xnioClass = Class.forName(className).asSubclass(Xnio.class);
final Constructor<? extends Xnio> constructor = xnioClass.getConstructor(paramTypes);
return constructor.newInstance(params);
} catch (ClassCastException e) {
final IOException ioe = new IOException("The XNIO provider class \"" + PROVIDER_CLASS + "\" is not really an XNIO provider");
ioe.initCause(e);
throw ioe;
} catch (ClassNotFoundException e) {
final IOException ioe = new IOException("The XNIO provider class \"" + PROVIDER_CLASS + "\" was not found");
ioe.initCause(e);
throw ioe;
} catch (IllegalAccessException e) {
final IOException ioe = new IOException("The XNIO provider class \"" + PROVIDER_CLASS + "\" was not instantiatable due to an illegal access exception");
ioe.initCause(e);
throw ioe;
} catch (InstantiationException e) {
final IOException ioe = new IOException("The XNIO provider class \"" + PROVIDER_CLASS + "\" was not instantiatable due to an instantiation exception");
ioe.initCause(e);
throw ioe;
} catch (InvocationTargetException e) {
final Throwable cause = e.getCause();
if (cause instanceof IOException) {
throw (IOException) cause;
} else if (cause instanceof RuntimeException) {
throw (RuntimeException) cause;
} else {
final IOException ioe = new IOException("The XNIO provider class \"" + PROVIDER_CLASS + "\" constructor threw an exception");
ioe.initCause(cause);
throw ioe;
}
} catch (NoSuchMethodException e) {
final IOException ioe = new IOException("The XNIO provider class \"" + PROVIDER_CLASS + "\" does not have an accessible no-argument constructor");
ioe.initCause(e);
throw ioe;
} catch (ExceptionInInitializerError e) {
final IOException ioe = new IOException("The XNIO provider class \"" + PROVIDER_CLASS + "\" was not instantiatable due to an error in initialization");
ioe.initCause(e);
throw ioe;
}
}
/**
* Construct an XNIO provider instance.
*/
protected Xnio() {
}
/**
* Create an NIO-based XNIO provider. A direct executor is used for the handlers; the provider will create its own
* selector threads, of which there will be one reader thread, one writer thread, and one connect/accept thread.
*
* @return a new provider
* @throws IOException if an I/O error occurs while starting the service
* @deprecated Will be removed in 1.2. Please use {@link #create()} instead, or use the constructor of your desired implementation.
*/
@Deprecated
public static Xnio createNio() throws IOException {
return createInstance(NIO_IMPL_CLASS_NAME, new Class[0]);
}
@Deprecated
public static Xnio createNio(final int readSelectorThreads, final int writeSelectorThreads, final int connectSelectorThreads) throws IOException, IllegalArgumentException {
return createInstance(NIO_IMPL_CLASS_NAME, new Class[] { Integer.class, Integer.class, Integer.class }, Integer.valueOf(readSelectorThreads), Integer.valueOf(writeSelectorThreads), Integer.valueOf(connectSelectorThreads));
}
@Deprecated
public static Xnio createNio(Executor handlerExecutor, final int readSelectorThreads, final int writeSelectorThreads, final int connectSelectorThreads) throws IOException, IllegalArgumentException {
return createInstance(NIO_IMPL_CLASS_NAME, new Class[] { Executor.class, Integer.class, Integer.class, Integer.class }, handlerExecutor, Integer.valueOf(readSelectorThreads), Integer.valueOf(writeSelectorThreads), Integer.valueOf(connectSelectorThreads));
}
@Deprecated
public static Xnio createNio(Executor handlerExecutor, ThreadFactory selectorThreadFactory, final int readSelectorThreads, final int writeSelectorThreads, final int connectSelectorThreads) throws IOException, IllegalArgumentException {
return createInstance(NIO_IMPL_CLASS_NAME, new Class[] { Executor.class, ThreadFactory.class, Integer.class, Integer.class, Integer.class }, handlerExecutor, selectorThreadFactory, Integer.valueOf(readSelectorThreads), Integer.valueOf(writeSelectorThreads), Integer.valueOf(connectSelectorThreads));
}
/**
* Create a TCP server. The server will bind to the given addresses.
*
* @param executor the executor to use to execute the handlers
* @param handlerFactory the factory which will produce handlers for inbound connections
* @param bindAddresses the addresses to bind to
*
* @return a factory that can be used to configure the new TCP server
*/
public ConfigurableFactory<Closeable> createTcpServer(Executor executor, IoHandlerFactory<? super TcpChannel> handlerFactory, SocketAddress... bindAddresses) {
throw new UnsupportedOperationException("TCP Server");
}
/**
* Create a TCP server. The server will bind to the given addresses. The provider's executor will be used to
* execute handler methods.
*
* @param handlerFactory the factory which will produce handlers for inbound connections
* @param bindAddresses the addresses to bind to
*
* @return a factory that can be used to configure the new TCP server
*/
public ConfigurableFactory<Closeable> createTcpServer(IoHandlerFactory<? super TcpChannel> handlerFactory, SocketAddress... bindAddresses) {
throw new UnsupportedOperationException("TCP Server");
}
/**
* Create a configurable TCP connector. The connector can be configured before it is actually created.
*
* @param executor the executor to use to execute the handlers
*
* @return a factory that can be used to configure the new TCP connector
*/
public ConfigurableFactory<CloseableTcpConnector> createTcpConnector(Executor executor) {
throw new UnsupportedOperationException("TCP Connector");
}
/**
* Create a configurable TCP connector. The connector can be configured before it is actually created. The
* provider's executor will be used to execute handler methods.
*
* @return a factory that can be used to configure the new TCP connector
*/
public ConfigurableFactory<CloseableTcpConnector> createTcpConnector() {
throw new UnsupportedOperationException("TCP Connector");
}
/**
* Create a UDP server. The server will bind to the given addresses. The UDP server can be configured to be
* multicast-capable; this should only be done if multicast is needed, since some providers have a performance
* penalty associated with multicast.
*
* @param multicast {@code true} if the UDP server should be multicast-capable
* @param executor the executor to use to execute the handlers
* @param handlerFactory the factory which will produce handlers for each channel
* @param bindAddresses the addresses to bind
*
* @return a factory that can be used to configure the new UDP server
*/
public ConfigurableFactory<Closeable> createUdpServer(Executor executor, boolean multicast, IoHandlerFactory<? super UdpChannel> handlerFactory, SocketAddress... bindAddresses) {
throw new UnsupportedOperationException("UDP Server");
}
/**
* Create a UDP server. The server will bind to the given addresses. The provider's executor will be used to
* execute handler methods.
*
* @param multicast {@code true} if the UDP server should be multicast-capable
* @param handlerFactory the factory which will produce handlers for each channel
* @param bindAddresses the addresses to bind
*
* @return a factory that can be used to configure the new UDP server
*/
public ConfigurableFactory<Closeable> createUdpServer(boolean multicast, IoHandlerFactory<? super UdpChannel> handlerFactory, SocketAddress... bindAddresses) {
throw new UnsupportedOperationException("UDP Server");
}
/**
* Create a pipe "server". The provided handler factory is used to supply handlers for the server "end" of the
* pipe. The returned channel source is used to establish connections to the server.
*
* @param handlerFactory the server handler factory
*
* @return the client channel source
*/
public ChannelSource<StreamChannel> createPipeServer(IoHandlerFactory<? super StreamChannel> handlerFactory) {
throw new UnsupportedOperationException("Pipe Server");
}
/**
* Create a one-way pipe "server". The provided handler factory is used to supply handlers for the server "end" of
* the pipe. The returned channel source is used to establish connections to the server. The data flows from the
* server to the client.
*
* @param handlerFactory the server handler factory
*
* @return the client channel source
*/
public ChannelSource<StreamSourceChannel> createPipeSourceServer(IoHandlerFactory<? super StreamSinkChannel> handlerFactory) {
throw new UnsupportedOperationException("One-way Pipe Server");
}
/**
* Create a one-way pipe "server". The provided handler factory is used to supply handlers for the server "end" of
* the pipe. The returned channel source is used to establish connections to the server. The data flows from the
* server to the client.
*
* @param handlerFactory the server handler factory
*
* @return the client channel source
*/
public ChannelSource<StreamSinkChannel> createPipeSinkServer(IoHandlerFactory<? super StreamSourceChannel> handlerFactory) {
throw new UnsupportedOperationException("One-way Pipe Server");
}
/**
* Create a single pipe connection.
*
* @param leftHandler the handler for the "left" side of the pipe
* @param rightHandler the handler for the "right" side of the pipe
*
* @return the future connection
*/
public IoFuture<Closeable> createPipeConnection(IoHandler<? super StreamChannel> leftHandler, IoHandler<? super StreamChannel> rightHandler) {
throw new UnsupportedOperationException("Pipe Connection");
}
/**
* Create a single one-way pipe connection.
*
* @param sourceHandler the handler for the "source" side of the pipe
* @param sinkHandler the handler for the "sink" side of the pipe
*
* @return the future connection
*/
public IoFuture<Closeable> createOneWayPipeConnection(IoHandler<? super StreamSourceChannel> sourceHandler, IoHandler<? super StreamSinkChannel> sinkHandler) {
throw new UnsupportedOperationException("One-way Pipe Connection");
}
/**
* Close this XNIO provider. Calling this method more than one time has no additional effect.
*/
public abstract void close() throws IOException;
}
|
package org.endeavourhealth.queuereader;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.gson.JsonSyntaxException;
import org.apache.commons.csv.*;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.endeavourhealth.common.cache.ObjectMapperPool;
import org.endeavourhealth.common.config.ConfigManager;
import org.endeavourhealth.common.fhir.PeriodHelper;
import org.endeavourhealth.common.utility.FileHelper;
import org.endeavourhealth.common.utility.FileInfo;
import org.endeavourhealth.common.utility.JsonSerializer;
import org.endeavourhealth.common.utility.SlackHelper;
import org.endeavourhealth.core.configuration.ConfigDeserialiser;
import org.endeavourhealth.core.configuration.PostMessageToExchangeConfig;
import org.endeavourhealth.core.configuration.QueueReaderConfiguration;
import org.endeavourhealth.core.csv.CsvHelper;
import org.endeavourhealth.core.database.dal.DalProvider;
import org.endeavourhealth.core.database.dal.admin.ServiceDalI;
import org.endeavourhealth.core.database.dal.admin.models.Service;
import org.endeavourhealth.core.database.dal.audit.ExchangeBatchDalI;
import org.endeavourhealth.core.database.dal.audit.ExchangeDalI;
import org.endeavourhealth.core.database.dal.audit.models.*;
import org.endeavourhealth.core.database.dal.ehr.ResourceDalI;
import org.endeavourhealth.core.database.dal.ehr.models.ResourceWrapper;
import org.endeavourhealth.core.database.rdbms.ConnectionManager;
import org.endeavourhealth.core.exceptions.TransformException;
import org.endeavourhealth.core.fhirStorage.FhirStorageService;
import org.endeavourhealth.core.fhirStorage.JsonServiceInterfaceEndpoint;
import org.endeavourhealth.core.messaging.pipeline.components.PostMessageToExchange;
import org.endeavourhealth.core.queueing.QueueHelper;
import org.endeavourhealth.core.xml.TransformErrorSerializer;
import org.endeavourhealth.core.xml.transformError.TransformError;
import org.endeavourhealth.transform.common.*;
import org.endeavourhealth.transform.emis.EmisCsvToFhirTransformer;
import org.endeavourhealth.transform.emis.csv.helpers.EmisCsvHelper;
import org.hibernate.internal.SessionImpl;
import org.hl7.fhir.instance.model.EpisodeOfCare;
import org.hl7.fhir.instance.model.Patient;
import org.hl7.fhir.instance.model.ResourceType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.EntityManager;
import java.io.*;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.*;
public class Main {
private static final Logger LOG = LoggerFactory.getLogger(Main.class);
public static void main(String[] args) throws Exception {
String configId = args[0];
LOG.info("Initialising config manager");
ConfigManager.initialize("queuereader", configId);
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEncounters")) {
String table = args[1];
fixEncounters(table);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateHomertonSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createHomertonSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateAdastraSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createAdastraSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateVisionSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createVisionSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateTppSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createTppSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateBartsSubset")) {
String sourceDirPath = args[1];
UUID serviceUuid = UUID.fromString(args[2]);
UUID systemUuid = UUID.fromString(args[3]);
String samplePatientsFile = args[4];
createBartsSubset(sourceDirPath, serviceUuid, systemUuid, samplePatientsFile);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixBartsOrgs")) {
String serviceId = args[1];
fixBartsOrgs(serviceId);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("TestPreparedStatements")) {
String url = args[1];
String user = args[2];
String pass = args[3];
String serviceId = args[4];
testPreparedStatements(url, user, pass, serviceId);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("ApplyEmisAdminCaches")) {
applyEmisAdminCaches();
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixSubscribers")) {
fixSubscriberDbs();
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("ConvertExchangeBody")) {
String systemId = args[1];
convertExchangeBody(UUID.fromString(systemId));
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixReferrals")) {
fixReferralRequests();
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PopulateNewSearchTable")) {
String table = args[1];
populateNewSearchTable(table);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixBartsEscapes")) {
String filePath = args[1];
fixBartsEscapedFiles(filePath);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("PostToInbound")) {
String serviceId = args[1];
String systemId = args[2];
String filePath = args[3];
postToInboundFromFile(UUID.fromString(serviceId), UUID.fromString(systemId), filePath);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixDisabledExtract")) {
String serviceId = args[1];
String systemId = args[2];
String sharedStoragePath = args[3];
String tempDir = args[4];
fixDisabledEmisExtract(serviceId, systemId, sharedStoragePath, tempDir);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("TestSlack")) {
testSlack();
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PostToInbound")) {
String serviceId = args[1];
boolean all = Boolean.parseBoolean(args[2]);
postToInbound(UUID.fromString(serviceId), all);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixPatientSearch")) {
String serviceId = args[1];
fixPatientSearch(serviceId);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("Exit")) {
String exitCode = args[1];
LOG.info("Exiting with error code " + exitCode);
int exitCodeInt = Integer.parseInt(exitCode);
System.exit(exitCodeInt);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("RunSql")) {
String host = args[1];
String username = args[2];
String password = args[3];
String sqlFile = args[4];
runSql(host, username, password, sqlFile);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PopulateProtocolQueue")) {
String serviceId = null;
if (args.length > 1) {
serviceId = args[1];
}
String startingExchangeId = null;
if (args.length > 2) {
startingExchangeId = args[2];
}
populateProtocolQueue(serviceId, startingExchangeId);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FindEncounterTerms")) {
String path = args[1];
String outputPath = args[2];
findEncounterTerms(path, outputPath);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FindEmisStartDates")) {
String path = args[1];
String outputPath = args[2];
findEmisStartDates(path, outputPath);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("ExportHl7Encounters")) {
String sourceCsvPpath = args[1];
String outputPath = args[2];
exportHl7Encounters(sourceCsvPpath, outputPath);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixExchangeBatches")) {
fixExchangeBatches();
System.exit(0);
}*/
/*if (args.length >= 0
&& args[0].equalsIgnoreCase("FindCodes")) {
findCodes();
System.exit(0);
}*/
/*if (args.length >= 0
&& args[0].equalsIgnoreCase("FindDeletedOrgs")) {
findDeletedOrgs();
System.exit(0);
}*/
if (args.length != 1) {
LOG.error("Usage: queuereader config_id");
return;
}
LOG.info("
LOG.info("EDS Queue Reader " + configId);
LOG.info("
LOG.info("Fetching queuereader configuration");
String configXml = ConfigManager.getConfiguration(configId);
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
/*LOG.info("Registering shutdown hook");
registerShutdownHook();*/
// Instantiate rabbit handler
LOG.info("Creating EDS queue reader");
RabbitHandler rabbitHandler = new RabbitHandler(configuration, configId);
// Begin consume
rabbitHandler.start();
LOG.info("EDS Queue reader running (kill file location " + TransformConfig.instance().getKillFileLocation() + ")");
}
private static void convertExchangeBody(UUID systemUuid) {
try {
LOG.info("Converting exchange bodies for system " + systemUuid);
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Service> services = serviceDal.getAll();
for (Service service: services) {
List<Exchange> exchanges = exchangeDal.getExchangesByService(service.getId(), systemUuid, Integer.MAX_VALUE);
if (exchanges.isEmpty()) {
continue;
}
LOG.debug("doing " + service.getName() + " with " + exchanges.size() + " exchanges");
for (Exchange exchange: exchanges) {
String exchangeBody = exchange.getBody();
try {
//already done
ExchangePayloadFile[] files = JsonSerializer.deserialize(exchangeBody, ExchangePayloadFile[].class);
continue;
} catch (JsonSyntaxException ex) {
//if the JSON can't be parsed, then it'll be the old format of body that isn't JSON
}
List<ExchangePayloadFile> newFiles = new ArrayList<>();
String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody);
for (String file: files) {
ExchangePayloadFile fileObj = new ExchangePayloadFile();
String fileWithoutSharedStorage = file.substring(TransformConfig.instance().getSharedStoragePath().length()+1);
fileObj.setPath(fileWithoutSharedStorage);
//size
List<FileInfo> fileInfos = FileHelper.listFilesInSharedStorageWithInfo(file);
for (FileInfo info: fileInfos) {
if (info.getFilePath().equals(file)) {
long size = info.getSize();
fileObj.setSize(new Long(size));
}
}
//type
if (systemUuid.toString().equalsIgnoreCase("991a9068-01d3-4ff2-86ed-249bd0541fb3") //live
|| systemUuid.toString().equalsIgnoreCase("55c08fa5-ef1e-4e94-aadc-e3d6adc80774")) { //dev
//emis
String name = FilenameUtils.getName(file);
String[] toks = name.split("_");
String first = toks[1];
String second = toks[2];
fileObj.setType(first + "_" + second);
/* } else if (systemUuid.toString().equalsIgnoreCase("e517fa69-348a-45e9-a113-d9b59ad13095")
|| systemUuid.toString().equalsIgnoreCase("b0277098-0b6c-4d9d-86ef-5f399fb25f34")) { //dev
//cerner
String name = FilenameUtils.getName(file);
if (Strings.isNullOrEmpty(name)) {
continue;
}
try {
String type = BartsCsvToFhirTransformer.identifyFileType(name);
fileObj.setType(type);
} catch (Exception ex2) {
throw new Exception("Failed to parse file name " + name + " on exchange " + exchange.getId());
}*/
} else {
throw new Exception("Unknown system ID " + systemUuid);
}
newFiles.add(fileObj);
}
String json = JsonSerializer.serialize(newFiles);
exchange.setBody(json);
exchangeDal.save(exchange);
}
}
LOG.info("Finished Converting exchange bodies for system " + systemUuid);
} catch (Exception ex) {
LOG.error("", ex);
}
}
*//*LOG.debug(json);
LOG.debug(newJson);*//*
/*private static void fixBartsOrgs(String serviceId) {
try {
LOG.info("Fixing Barts orgs");
ResourceDalI dal = DalProvider.factoryResourceDal();
List<ResourceWrapper> wrappers = dal.getResourcesByService(UUID.fromString(serviceId), ResourceType.Organization.toString());
LOG.debug("Found " + wrappers.size() + " resources");
int done = 0;
int fixed = 0;
for (ResourceWrapper wrapper: wrappers) {
if (!wrapper.isDeleted()) {
List<ResourceWrapper> history = dal.getResourceHistory(UUID.fromString(serviceId), wrapper.getResourceType(), wrapper.getResourceId());
ResourceWrapper mostRecent = history.get(0);
String json = mostRecent.getResourceData();
Organization org = (Organization)FhirSerializationHelper.deserializeResource(json);
String odsCode = IdentifierHelper.findOdsCode(org);
if (Strings.isNullOrEmpty(odsCode)
&& org.hasIdentifier()) {
boolean hasBeenFixed = false;
for (Identifier identifier: org.getIdentifier()) {
if (identifier.getSystem().equals(FhirIdentifierUri.IDENTIFIER_SYSTEM_ODS_CODE)
&& identifier.hasId()) {
odsCode = identifier.getId();
identifier.setValue(odsCode);
identifier.setId(null);
hasBeenFixed = true;
}
}
if (hasBeenFixed) {
String newJson = FhirSerializationHelper.serializeResource(org);
mostRecent.setResourceData(newJson);
LOG.debug("Fixed Organization " + org.getId());
saveResourceWrapper(UUID.fromString(serviceId), mostRecent);
fixed ++;
}
}
}
done ++;
if (done % 100 == 0) {
LOG.debug("Done " + done + ", Fixed " + fixed);
}
}
LOG.debug("Done " + done + ", Fixed " + fixed);
LOG.info("Finished Barts orgs");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
/*private static void testPreparedStatements(String url, String user, String pass, String serviceId) {
try {
LOG.info("Testing Prepared Statements");
LOG.info("Url: " + url);
LOG.info("user: " + user);
LOG.info("pass: " + pass);
//open connection
Class.forName("com.mysql.cj.jdbc.Driver");
//create connection
Properties props = new Properties();
props.setProperty("user", user);
props.setProperty("password", pass);
Connection conn = DriverManager.getConnection(url, props);
String sql = "SELECT * FROM internal_id_map WHERE service_id = ? AND id_type = ? AND source_id = ?";
long start = System.currentTimeMillis();
for (int i=0; i<10000; i++) {
PreparedStatement ps = null;
try {
ps = conn.prepareStatement(sql);
ps.setString(1, serviceId);
ps.setString(2, "MILLPERSIDtoMRN");
ps.setString(3, UUID.randomUUID().toString());
ResultSet rs = ps.executeQuery();
while (rs.next()) {
//do nothing
}
} finally {
if (ps != null) {
ps.close();
}
}
}
long end = System.currentTimeMillis();
LOG.info("Took " + (end-start) + " ms");
//close connection
conn.close();
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
*//*Encounter encounter = (Encounter)FhirSerializationHelper.deserializeResource(currentState.getResourceData());
saveResourceWrapper(serviceId, currentState);*//*
*//*Resource resource = FhirSerializationHelper.deserializeResource(currentState.getResourceData());
}*//*
*//*Condition condition = (Condition)FhirSerializationHelper.deserializeResource(currentState.getResourceData());
saveResourceWrapper(serviceId, currentState);*//*
*//**
*//*
/*private static void fixEncounters(String table) {
LOG.info("Fixing encounters from " + table);
try {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
Date cutoff = sdf.parse("2018-03-14 11:42");
EntityManager entityManager = ConnectionManager.getAdminEntityManager();
SessionImpl session = (SessionImpl)entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
List<UUID> serviceIds = new ArrayList<>();
Map<UUID, UUID> hmSystems = new HashMap<>();
String sql = "SELECT service_id, system_id FROM " + table + " WHERE done = 0";
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
UUID serviceId = UUID.fromString(rs.getString(1));
UUID systemId = UUID.fromString(rs.getString(2));
serviceIds.add(serviceId);
hmSystems.put(serviceId, systemId);
}
rs.close();
statement.close();
entityManager.close();
for (UUID serviceId: serviceIds) {
UUID systemId = hmSystems.get(serviceId);
LOG.info("Doing service " + serviceId + " and system " + systemId);
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, systemId);
List<UUID> exchangeIdsToProcess = new ArrayList<>();
for (UUID exchangeId: exchangeIds) {
List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId);
for (ExchangeTransformAudit audit: audits) {
Date d = audit.getStarted();
if (d.after(cutoff)) {
exchangeIdsToProcess.add(exchangeId);
break;
}
}
}
Map<String, ReferenceList> consultationNewChildMap = new HashMap<>();
Map<String, ReferenceList> observationChildMap = new HashMap<>();
Map<String, ReferenceList> newProblemChildren = new HashMap<>();
for (UUID exchangeId: exchangeIdsToProcess) {
Exchange exchange = exchangeDal.getExchange(exchangeId);
String[] files = ExchangeHelper.parseExchangeBodyIntoFileList(exchange.getBody());
String version = EmisCsvToFhirTransformer.determineVersion(files);
List<String> interestingFiles = new ArrayList<>();
for (String file: files) {
if (file.indexOf("CareRecord_Consultation") > -1
|| file.indexOf("CareRecord_Observation") > -1
|| file.indexOf("CareRecord_Diary") > -1
|| file.indexOf("Prescribing_DrugRecord") > -1
|| file.indexOf("Prescribing_IssueRecord") > -1
|| file.indexOf("CareRecord_Problem") > -1) {
interestingFiles.add(file);
}
}
files = interestingFiles.toArray(new String[0]);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.createParsers(serviceId, systemId, exchangeId, files, version, parsers);
String dataSharingAgreementGuid = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(parsers);
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchangeId, dataSharingAgreementGuid, true);
Consultation consultationParser = (Consultation)parsers.get(Consultation.class);
while (consultationParser.nextRecord()) {
CsvCell consultationGuid = consultationParser.getConsultationGuid();
CsvCell patientGuid = consultationParser.getPatientGuid();
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid);
consultationNewChildMap.put(sourceId, new ReferenceList());
}
Problem problemParser = (Problem)parsers.get(Problem.class);
while (problemParser.nextRecord()) {
CsvCell problemGuid = problemParser.getObservationGuid();
CsvCell patientGuid = problemParser.getPatientGuid();
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
newProblemChildren.put(sourceId, new ReferenceList());
}
//run this pre-transformer to pre-cache some stuff in the csv helper, which
//is needed when working out the resource type that each observation would be saved as
ObservationPreTransformer.transform(version, parsers, null, csvHelper);
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (observationParser.nextRecord()) {
CsvCell observationGuid = observationParser.getObservationGuid();
CsvCell patientGuid = observationParser.getPatientGuid();
String obSourceId = EmisCsvHelper.createUniqueId(patientGuid, observationGuid);
CsvCell codeId = observationParser.getCodeId();
if (codeId.isEmpty()) {
continue;
}
ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper);
UUID obUuid = IdHelper.getEdsResourceId(serviceId, resourceType, obSourceId);
if (obUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + resourceType + " and source ID " + obSourceId);
//resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper);
}
Reference obReference = ReferenceHelper.createReference(resourceType, obUuid.toString());
CsvCell consultationGuid = observationParser.getConsultationGuid();
if (!consultationGuid.isEmpty()) {
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid);
ReferenceList referenceList = consultationNewChildMap.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
consultationNewChildMap.put(sourceId, referenceList);
}
referenceList.add(obReference);
}
CsvCell problemGuid = observationParser.getProblemGuid();
if (!problemGuid.isEmpty()) {
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
ReferenceList referenceList = newProblemChildren.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
newProblemChildren.put(sourceId, referenceList);
}
referenceList.add(obReference);
}
CsvCell parentObGuid = observationParser.getParentObservationGuid();
if (!parentObGuid.isEmpty()) {
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, parentObGuid);
ReferenceList referenceList = observationChildMap.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
observationChildMap.put(sourceId, referenceList);
}
referenceList.add(obReference);
}
}
Diary diaryParser = (Diary)parsers.get(Diary.class);
while (diaryParser.nextRecord()) {
CsvCell consultationGuid = diaryParser.getConsultationGuid();
if (!consultationGuid.isEmpty()) {
CsvCell diaryGuid = diaryParser.getDiaryGuid();
CsvCell patientGuid = diaryParser.getPatientGuid();
String diarySourceId = EmisCsvHelper.createUniqueId(patientGuid, diaryGuid);
UUID diaryUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.ProcedureRequest, diarySourceId);
if (diaryUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + ResourceType.ProcedureRequest + " and source ID " + diarySourceId);
}
Reference diaryReference = ReferenceHelper.createReference(ResourceType.ProcedureRequest, diaryUuid.toString());
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid);
ReferenceList referenceList = consultationNewChildMap.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
consultationNewChildMap.put(sourceId, referenceList);
}
referenceList.add(diaryReference);
}
}
IssueRecord issueRecordParser = (IssueRecord)parsers.get(IssueRecord.class);
while (issueRecordParser.nextRecord()) {
CsvCell problemGuid = issueRecordParser.getProblemObservationGuid();
if (!problemGuid.isEmpty()) {
CsvCell issueRecordGuid = issueRecordParser.getIssueRecordGuid();
CsvCell patientGuid = issueRecordParser.getPatientGuid();
String issueRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, issueRecordGuid);
UUID issueRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationOrder, issueRecordSourceId);
if (issueRecordUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + ResourceType.MedicationOrder + " and source ID " + issueRecordSourceId);
}
Reference issueRecordReference = ReferenceHelper.createReference(ResourceType.MedicationOrder, issueRecordUuid.toString());
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
ReferenceList referenceList = newProblemChildren.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
newProblemChildren.put(sourceId, referenceList);
}
referenceList.add(issueRecordReference);
}
}
DrugRecord drugRecordParser = (DrugRecord)parsers.get(DrugRecord.class);
while (drugRecordParser.nextRecord()) {
CsvCell problemGuid = drugRecordParser.getProblemObservationGuid();
if (!problemGuid.isEmpty()) {
CsvCell drugRecordGuid = drugRecordParser.getDrugRecordGuid();
CsvCell patientGuid = drugRecordParser.getPatientGuid();
String drugRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, drugRecordGuid);
UUID drugRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationStatement, drugRecordSourceId);
if (drugRecordUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + ResourceType.MedicationStatement + " and source ID " + drugRecordSourceId);
}
Reference drugRecordReference = ReferenceHelper.createReference(ResourceType.MedicationStatement, drugRecordUuid.toString());
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
ReferenceList referenceList = newProblemChildren.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
newProblemChildren.put(sourceId, referenceList);
}
referenceList.add(drugRecordReference);
}
}
for (AbstractCsvParser parser : parsers.values()) {
try {
parser.close();
} catch (IOException ex) {
//don't worry if this fails, as we're done anyway
}
}
}
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
LOG.info("Found " + consultationNewChildMap.size() + " Encounters to fix");
for (String encounterSourceId: consultationNewChildMap.keySet()) {
ReferenceList childReferences = consultationNewChildMap.get(encounterSourceId);
//map to UUID
UUID encounterId = IdHelper.getEdsResourceId(serviceId, ResourceType.Encounter, encounterSourceId);
if (encounterId == null) {
continue;
}
//get history, which is most recent FIRST
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Encounter.toString(), encounterId);
if (history.isEmpty()) {
continue;
//throw new Exception("Empty history for Encounter " + encounterId);
}
ResourceWrapper currentState = history.get(0);
if (currentState.isDeleted()) {
continue;
}
//find last instance prior to cutoff and get its linked children
for (ResourceWrapper wrapper: history) {
Date d = wrapper.getCreatedAt();
if (!d.after(cutoff)) {
if (wrapper.getResourceData() != null) {
Encounter encounter = (Encounter) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
EncounterBuilder encounterBuilder = new EncounterBuilder(encounter);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder);
List<Reference> previousChildren = containedListBuilder.getContainedListItems();
childReferences.add(previousChildren);
}
break;
}
}
if (childReferences.size() == 0) {
continue;
}
String json = currentState.getResourceData();
Resource resource = FhirSerializationHelper.deserializeResource(json);
String newJson = FhirSerializationHelper.serializeResource(resource);
if (!json.equals(newJson)) {
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
EncounterBuilder encounterBuilder = new EncounterBuilder(encounter);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder);
containedListBuilder.addReferences(childReferences);
String newJson = FhirSerializationHelper.serializeResource(encounter);
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
}
LOG.info("Found " + observationChildMap.size() + " Parent Observations to fix");
for (String sourceId: observationChildMap.keySet()) {
ReferenceList childReferences = observationChildMap.get(sourceId);
//map to UUID
ResourceType resourceType = null;
UUID resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.Observation, sourceId);
if (resourceId != null) {
resourceType = ResourceType.Observation;
} else {
resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.DiagnosticReport, sourceId);
if (resourceId != null) {
resourceType = ResourceType.DiagnosticReport;
} else {
continue;
}
}
//get history, which is most recent FIRST
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, resourceType.toString(), resourceId);
if (history.isEmpty()) {
//throw new Exception("Empty history for " + resourceType + " " + resourceId);
continue;
}
ResourceWrapper currentState = history.get(0);
if (currentState.isDeleted()) {
continue;
}
//find last instance prior to cutoff and get its linked children
for (ResourceWrapper wrapper: history) {
Date d = wrapper.getCreatedAt();
if (!d.after(cutoff)) {
if (resourceType == ResourceType.Observation) {
if (wrapper.getResourceData() != null) {
Observation observation = (Observation) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
if (observation.hasRelated()) {
for (Observation.ObservationRelatedComponent related : observation.getRelated()) {
Reference reference = related.getTarget();
childReferences.add(reference);
}
}
}
} else {
if (wrapper.getResourceData() != null) {
DiagnosticReport report = (DiagnosticReport) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
if (report.hasResult()) {
for (Reference reference : report.getResult()) {
childReferences.add(reference);
}
}
}
}
break;
}
}
if (childReferences.size() == 0) {
continue;
}
String json = currentState.getResourceData();
Resource resource = FhirSerializationHelper.deserializeResource(json);
String newJson = FhirSerializationHelper.serializeResource(resource);
if (!json.equals(newJson)) {
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
boolean changed = false;
if (resourceType == ResourceType.Observation) {
ObservationBuilder resourceBuilder = new ObservationBuilder((Observation)resource);
for (int i=0; i<childReferences.size(); i++) {
Reference reference = childReferences.getReference(i);
if (resourceBuilder.addChildObservation(reference)) {
changed = true;
}
}
} else {
DiagnosticReportBuilder resourceBuilder = new DiagnosticReportBuilder((DiagnosticReport)resource);
for (int i=0; i<childReferences.size(); i++) {
Reference reference = childReferences.getReference(i);
if (resourceBuilder.addResult(reference)) {
changed = true;
}
}
}
if (changed) {
String newJson = FhirSerializationHelper.serializeResource(resource);
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
LOG.info("Found " + newProblemChildren.size() + " Problems to fix");
for (String sourceId: newProblemChildren.keySet()) {
ReferenceList childReferences = newProblemChildren.get(sourceId);
//map to UUID
UUID conditionId = IdHelper.getEdsResourceId(serviceId, ResourceType.Condition, sourceId);
if (conditionId == null) {
continue;
}
//get history, which is most recent FIRST
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Condition.toString(), conditionId);
if (history.isEmpty()) {
continue;
//throw new Exception("Empty history for Condition " + conditionId);
}
ResourceWrapper currentState = history.get(0);
if (currentState.isDeleted()) {
continue;
}
//find last instance prior to cutoff and get its linked children
for (ResourceWrapper wrapper: history) {
Date d = wrapper.getCreatedAt();
if (!d.after(cutoff)) {
if (wrapper.getResourceData() != null) {
Condition previousVersion = (Condition) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
ConditionBuilder conditionBuilder = new ConditionBuilder(previousVersion);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder);
List<Reference> previousChildren = containedListBuilder.getContainedListItems();
childReferences.add(previousChildren);
}
break;
}
}
if (childReferences.size() == 0) {
continue;
}
String json = currentState.getResourceData();
Resource resource = FhirSerializationHelper.deserializeResource(json);
String newJson = FhirSerializationHelper.serializeResource(resource);
if (!json.equals(newJson)) {
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
ConditionBuilder conditionBuilder = new ConditionBuilder(condition);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder);
containedListBuilder.addReferences(childReferences);
String newJson = FhirSerializationHelper.serializeResource(condition);
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
}
//mark as done
String updateSql = "UPDATE " + table + " SET done = 1 WHERE service_id = '" + serviceId + "';";
entityManager = ConnectionManager.getAdminEntityManager();
session = (SessionImpl)entityManager.getDelegate();
connection = session.connection();
statement = connection.createStatement();
entityManager.getTransaction().begin();
statement.executeUpdate(updateSql);
entityManager.getTransaction().commit();
}
* For each practice:
Go through all files processed since 14 March
Cache all links as above
Cache all Encounters saved too
For each Encounter referenced at all:
Retrieve latest version from resource current
Retrieve version prior to 14 March
Update current version with old references plus new ones
For each parent observation:
Retrieve latest version (could be observation or diagnostic report)
For each problem:
Retrieve latest version from resource current
Check if still a problem:
Retrieve version prior to 14 March
Update current version with old references plus new ones
LOG.info("Finished Fixing encounters from " + table);
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void saveResourceWrapper(UUID serviceId, ResourceWrapper wrapper) throws Exception {
if (wrapper.getResourceData() != null) {
long checksum = FhirStorageService.generateChecksum(wrapper.getResourceData());
wrapper.setResourceChecksum(new Long(checksum));
}
EntityManager entityManager = ConnectionManager.getEhrEntityManager(serviceId);
SessionImpl session = (SessionImpl)entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
entityManager.getTransaction().begin();
String json = wrapper.getResourceData();
json = json.replace("'", "''");
json = json.replace("\\", "\\\\");
String patientId = "";
if (wrapper.getPatientId() != null) {
patientId = wrapper.getPatientId().toString();
}
String updateSql = "UPDATE resource_current"
+ " SET resource_data = '" + json + "',"
+ " resource_checksum = " + wrapper.getResourceChecksum()
+ " WHERE service_id = '" + wrapper.getServiceId() + "'"
+ " AND patient_id = '" + patientId + "'"
+ " AND resource_type = '" + wrapper.getResourceType() + "'"
+ " AND resource_id = '" + wrapper.getResourceId() + "'";
statement.executeUpdate(updateSql);
//LOG.debug(updateSql);
//SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:SS");
//String createdAtStr = sdf.format(wrapper.getCreatedAt());
updateSql = "UPDATE resource_history"
+ " SET resource_data = '" + json + "',"
+ " resource_checksum = " + wrapper.getResourceChecksum()
+ " WHERE resource_id = '" + wrapper.getResourceId() + "'"
+ " AND resource_type = '" + wrapper.getResourceType() + "'"
//+ " AND created_at = '" + createdAtStr + "'"
+ " AND version = '" + wrapper.getVersion() + "'";
statement.executeUpdate(updateSql);
//LOG.debug(updateSql);
entityManager.getTransaction().commit();
}
/*private static void populateNewSearchTable(String table) {
LOG.info("Populating New Search Table");
try {
EntityManager entityManager = ConnectionManager.getEdsEntityManager();
SessionImpl session = (SessionImpl)entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
List<String> patientIds = new ArrayList<>();
Map<String, String> serviceIds = new HashMap<>();
String sql = "SELECT patient_id, service_id FROM " + table + " WHERE done = 0";
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
String patientId = rs.getString(1);
String serviceId = rs.getString(2);
patientIds.add(patientId);
serviceIds.put(patientId, serviceId);
}
rs.close();
statement.close();
entityManager.close();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearch2Dal();
LOG.info("Found " + patientIds.size() + " to do");
for (int i=0; i<patientIds.size(); i++) {
String patientIdStr = patientIds.get(i);
UUID patientId = UUID.fromString(patientIdStr);
String serviceIdStr = serviceIds.get(patientIdStr);
UUID serviceId = UUID.fromString(serviceIdStr);
Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(serviceId, ResourceType.Patient, patientIdStr);
if (patient != null) {
patientSearchDal.update(serviceId, patient);
//find episode of care
List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, null, patientId, ResourceType.EpisodeOfCare.toString());
for (ResourceWrapper wrapper: wrappers) {
if (!wrapper.isDeleted()) {
EpisodeOfCare episodeOfCare = (EpisodeOfCare)FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
patientSearchDal.update(serviceId, episodeOfCare);
}
}
}
String updateSql = "UPDATE " + table + " SET done = 1 WHERE patient_id = '" + patientIdStr + "' AND service_id = '" + serviceIdStr + "';";
entityManager = ConnectionManager.getEdsEntityManager();
session = (SessionImpl)entityManager.getDelegate();
connection = session.connection();
statement = connection.createStatement();
entityManager.getTransaction().begin();
statement.executeUpdate(updateSql);
entityManager.getTransaction().commit();
if (i % 5000 == 0) {
LOG.info("Done " + (i+1) + " of " + patientIds.size());
}
}
entityManager.close();
LOG.info("Finished Populating New Search Table");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
private static void createBartsSubset(String sourceDir, UUID serviceUuid, UUID systemUuid, String samplePatientsFile) {
LOG.info("Creating Barts Subset");
try {
Set<String> personIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line: lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
personIds.add(line);
}
createBartsSubsetForFile(sourceDir, serviceUuid, systemUuid, personIds);
LOG.info("Finished Creating Barts Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
/*private static void createBartsSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
for (File sourceFile: sourceDir.listFiles()) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
LOG.info("Doing dir " + sourceFile);
createBartsSubsetForFile(sourceFile, destFile, personIds);
} else {
//we have some bad partial files in, so ignore them
String ext = FilenameUtils.getExtension(name);
if (ext.equalsIgnoreCase("filepart")) {
continue;
}
//if the file is empty, we still need the empty file in the filtered directory, so just copy it
if (sourceFile.length() == 0) {
LOG.info("Copying empty file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
continue;
}
String baseName = FilenameUtils.getBaseName(name);
String fileType = BartsCsvToFhirTransformer.identifyFileType(baseName);
if (isCerner22File(fileType)) {
LOG.info("Checking 2.2 file " + sourceFile);
if (destFile.exists()) {
destFile.delete();
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
int lineIndex = -1;
PrintWriter pw = null;
int personIdColIndex = -1;
int expectedCols = -1;
while (true) {
String line = br.readLine();
if (line == null) {
break;
}
lineIndex ++;
if (lineIndex == 0) {
if (fileType.equalsIgnoreCase("FAMILYHISTORY")) {
//this file has no headers, so needs hard-coding
personIdColIndex = 5;
} else {
//check headings for PersonID col
String[] toks = line.split("\\|", -1);
expectedCols = toks.length;
for (int i=0; i<expectedCols; i++) {
String col = toks[i];
if (col.equalsIgnoreCase("PERSON_ID")
|| col.equalsIgnoreCase("#PERSON_ID")) {
personIdColIndex = i;
break;
}
}
//if no person ID, then just copy the entire file
if (personIdColIndex == -1) {
br.close();
br = null;
LOG.info(" Copying 2.2 file to " + destFile);
copyFile(sourceFile, destFile);
break;
} else {
LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex);
}
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
pw = new PrintWriter(bw);
} else {
//filter on personID
String[] toks = line.split("\\|", -1);
if (expectedCols != -1
&& toks.length != expectedCols) {
throw new Exception("Line " + (lineIndex+1) + " has " + toks.length + " cols but expecting " + expectedCols);
} else {
String personId = toks[personIdColIndex];
if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes
&& !personIds.contains(personId)) {
continue;
}
}
}
pw.println(line);
}
if (br != null) {
br.close();
}
if (pw != null) {
pw.flush();
pw.close();
}
} else {
//the 2.1 files are going to be a pain to split by patient, so just copy them over
LOG.info("Copying 2.1 file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
}
}
}
}*/
private static void createBartsSubsetForFile(String sourceDir, UUID serviceUuid, UUID systemUuid, Set<String> personIds) throws Exception {
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE);
for (Exchange exchange: exchanges) {
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody());
for (ExchangePayloadFile fileObj : files) {
String filePathWithoutSharedStorage = fileObj.getPath().substring(TransformConfig.instance().getSharedStoragePath().length()+1);
String sourceFilePath = FilenameUtils.concat(sourceDir, filePathWithoutSharedStorage);
File sourceFile = new File(sourceFilePath);
String destFilePath = fileObj.getPath();
File destFile = new File(destFilePath);
File destDir = destFile.getParentFile();
if (!destDir.exists()) {
destDir.mkdirs();
}
//if the file is empty, we still need the empty file in the filtered directory, so just copy it
if (sourceFile.length() == 0) {
LOG.info("Copying empty file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
continue;
}
String fileType = fileObj.getType();
if (isCerner22File(fileType)) {
LOG.info("Checking 2.2 file " + sourceFile);
if (destFile.exists()) {
destFile.delete();
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
int lineIndex = -1;
PrintWriter pw = null;
int personIdColIndex = -1;
int expectedCols = -1;
while (true) {
String line = br.readLine();
if (line == null) {
break;
}
lineIndex++;
if (lineIndex == 0) {
if (fileType.equalsIgnoreCase("FAMILYHISTORY")) {
//this file has no headers, so needs hard-coding
personIdColIndex = 5;
} else {
//check headings for PersonID col
String[] toks = line.split("\\|", -1);
expectedCols = toks.length;
for (int i = 0; i < expectedCols; i++) {
String col = toks[i];
if (col.equalsIgnoreCase("PERSON_ID")
|| col.equalsIgnoreCase("#PERSON_ID")) {
personIdColIndex = i;
break;
}
}
//if no person ID, then just copy the entire file
if (personIdColIndex == -1) {
br.close();
br = null;
LOG.info(" Copying 2.2 file to " + destFile);
copyFile(sourceFile, destFile);
break;
} else {
LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex);
}
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
pw = new PrintWriter(bw);
} else {
//filter on personID
String[] toks = line.split("\\|", -1);
if (expectedCols != -1
&& toks.length != expectedCols) {
throw new Exception("Line " + (lineIndex + 1) + " has " + toks.length + " cols but expecting " + expectedCols);
} else {
String personId = toks[personIdColIndex];
if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes
&& !personIds.contains(personId)) {
continue;
}
}
}
pw.println(line);
}
if (br != null) {
br.close();
}
if (pw != null) {
pw.flush();
pw.close();
}
} else {
//the 2.1 files are going to be a pain to split by patient, so just copy them over
LOG.info("Copying 2.1 file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
}
}
}
}
private static void copyFile(File src, File dst) throws Exception {
FileInputStream fis = new FileInputStream(src);
BufferedInputStream bis = new BufferedInputStream(fis);
Files.copy(bis, dst.toPath());
bis.close();
}
private static boolean isCerner22File(String fileType) throws Exception {
if (fileType.equalsIgnoreCase("PPATI")
|| fileType.equalsIgnoreCase("PPREL")
|| fileType.equalsIgnoreCase("CDSEV")
|| fileType.equalsIgnoreCase("PPATH")
|| fileType.equalsIgnoreCase("RTTPE")
|| fileType.equalsIgnoreCase("AEATT")
|| fileType.equalsIgnoreCase("AEINV")
|| fileType.equalsIgnoreCase("AETRE")
|| fileType.equalsIgnoreCase("OPREF")
|| fileType.equalsIgnoreCase("OPATT")
|| fileType.equalsIgnoreCase("EALEN")
|| fileType.equalsIgnoreCase("EALSU")
|| fileType.equalsIgnoreCase("EALOF")
|| fileType.equalsIgnoreCase("HPSSP")
|| fileType.equalsIgnoreCase("IPEPI")
|| fileType.equalsIgnoreCase("IPWDS")
|| fileType.equalsIgnoreCase("DELIV")
|| fileType.equalsIgnoreCase("BIRTH")
|| fileType.equalsIgnoreCase("SCHAC")
|| fileType.equalsIgnoreCase("APPSL")
|| fileType.equalsIgnoreCase("DIAGN")
|| fileType.equalsIgnoreCase("PROCE")
|| fileType.equalsIgnoreCase("ORDER")
|| fileType.equalsIgnoreCase("DOCRP")
|| fileType.equalsIgnoreCase("DOCREF")
|| fileType.equalsIgnoreCase("CNTRQ")
|| fileType.equalsIgnoreCase("LETRS")
|| fileType.equalsIgnoreCase("LOREF")
|| fileType.equalsIgnoreCase("ORGREF")
|| fileType.equalsIgnoreCase("PRSNLREF")
|| fileType.equalsIgnoreCase("CVREF")
|| fileType.equalsIgnoreCase("NOMREF")
|| fileType.equalsIgnoreCase("EALIP")
|| fileType.equalsIgnoreCase("CLEVE")
|| fileType.equalsIgnoreCase("ENCNT")
|| fileType.equalsIgnoreCase("RESREF")
|| fileType.equalsIgnoreCase("PPNAM")
|| fileType.equalsIgnoreCase("PPADD")
|| fileType.equalsIgnoreCase("PPPHO")
|| fileType.equalsIgnoreCase("PPALI")
|| fileType.equalsIgnoreCase("PPINF")
|| fileType.equalsIgnoreCase("PPAGP")
|| fileType.equalsIgnoreCase("SURCC")
|| fileType.equalsIgnoreCase("SURCP")
|| fileType.equalsIgnoreCase("SURCA")
|| fileType.equalsIgnoreCase("SURCD")
|| fileType.equalsIgnoreCase("PDRES")
|| fileType.equalsIgnoreCase("PDREF")
|| fileType.equalsIgnoreCase("ABREF")
|| fileType.equalsIgnoreCase("CEPRS")
|| fileType.equalsIgnoreCase("ORDDT")
|| fileType.equalsIgnoreCase("STATREF")
|| fileType.equalsIgnoreCase("STATA")
|| fileType.equalsIgnoreCase("ENCINF")
|| fileType.equalsIgnoreCase("SCHDETAIL")
|| fileType.equalsIgnoreCase("SCHOFFER")
|| fileType.equalsIgnoreCase("PPGPORG")
|| fileType.equalsIgnoreCase("FAMILYHISTORY")) {
return true;
} else {
return false;
}
}
private static void fixSubscriberDbs() {
LOG.info("Fixing Subscriber DBs");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3");
UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774");
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-05-11");
List<Service> services = serviceDal.getAll();
for (Service service: services) {
String endpointsJson = service.getEndpoints();
if (Strings.isNullOrEmpty(endpointsJson)) {
continue;
}
UUID serviceId = service.getId();
LOG.info("Checking " + service.getName() + " " + serviceId);
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
if (!endpointSystemId.equals(emisSystem)
&& !endpointSystemId.equals(emisSystemDev)) {
LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis");
continue;
}
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId);
boolean needsFixing = false;
for (UUID exchangeId: exchangeIds) {
if (!needsFixing) {
List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId);
for (ExchangeTransformAudit audit: transformAudits) {
Date transfromStart = audit.getStarted();
if (!transfromStart.before(dateError)) {
needsFixing = true;
break;
}
}
}
if (!needsFixing) {
continue;
}
List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId);
Exchange exchange = exchangeDal.getExchange(exchangeId);
LOG.info(" Posting exchange " + exchangeId + " with " + batches.size() + " batches");
List<UUID> batchIds = new ArrayList<>();
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId == null) {
continue;
}
UUID batchId = batch.getBatchId();
batchIds.add(batchId);
}
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr);
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
}
}
LOG.info("Finished Fixing Subscriber DBs");
} catch (Throwable t) {
LOG.error("", t);
}
}
*//*if (!referral.hasServiceRequested()) {
referral.getServiceRequested().clear();*//*
/*private static void fixReferralRequests() {
LOG.info("Fixing Referral Requests");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3");
UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774");
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-04-24");
List<Service> services = serviceDal.getAll();
for (Service service: services) {
String endpointsJson = service.getEndpoints();
if (Strings.isNullOrEmpty(endpointsJson)) {
continue;
}
UUID serviceId = service.getId();
LOG.info("Checking " + service.getName() + " " + serviceId);
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
if (!endpointSystemId.equals(emisSystem)
&& !endpointSystemId.equals(emisSystemDev)) {
LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis");
continue;
}
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId);
boolean needsFixing = false;
Set<UUID> patientIdsToPost = new HashSet<>();
for (UUID exchangeId: exchangeIds) {
if (!needsFixing) {
List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId);
for (ExchangeTransformAudit audit: transformAudits) {
Date transfromStart = audit.getStarted();
if (!transfromStart.before(dateError)) {
needsFixing = true;
break;
}
}
}
if (!needsFixing) {
continue;
}
List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId);
Exchange exchange = exchangeDal.getExchange(exchangeId);
LOG.info("Checking exchange " + exchangeId + " with " + batches.size() + " batches");
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId == null) {
continue;
}
UUID batchId = batch.getBatchId();
List<ResourceWrapper> wrappers = resourceDal.getResourcesForBatch(serviceId, batchId);
for (ResourceWrapper wrapper: wrappers) {
String resourceType = wrapper.getResourceType();
if (!resourceType.equals(ResourceType.ReferralRequest.toString())
|| wrapper.isDeleted()) {
continue;
}
String json = wrapper.getResourceData();
ReferralRequest referral = (ReferralRequest)FhirSerializationHelper.deserializeResource(json);
continue;
}
CodeableConcept reason = referral.getServiceRequested().get(0);
referral.setReason(reason);
if (!referral.hasReason()) {
continue;
}
CodeableConcept reason = referral.getReason();
referral.setReason(null);
referral.addServiceRequested(reason);
json = FhirSerializationHelper.serializeResource(referral);
wrapper.setResourceData(json);
saveResourceWrapper(serviceId, wrapper);
//add to the set of patients we know need sending on to the protocol queue
patientIdsToPost.add(patientId);
LOG.info("Fixed " + resourceType + " " + wrapper.getResourceId() + " in batch " + batchId);
}
//if our patient has just been fixed or was fixed before, post onto the protocol queue
if (patientIdsToPost.contains(patientId)) {
List<UUID> batchIds = new ArrayList<>();
batchIds.add(batchId);
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr);
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
}
}
}
}
LOG.info("Finished Fixing Referral Requests");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void applyEmisAdminCaches() {
LOG.info("Applying Emis Admin Caches");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3");
UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774");
List<Service> services = serviceDal.getAll();
for (Service service: services) {
String endpointsJson = service.getEndpoints();
if (Strings.isNullOrEmpty(endpointsJson)) {
continue;
}
UUID serviceId = service.getId();
LOG.info("Checking " + service.getName() + " " + serviceId);
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
if (!endpointSystemId.equals(emisSystem)
&& !endpointSystemId.equals(emisSystemDev)) {
LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis");
continue;
}
if (!exchangeDal.isServiceStarted(serviceId, endpointSystemId)) {
LOG.info(" Service not started, so skipping");
continue;
}
//get exchanges
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId);
if (exchangeIds.isEmpty()) {
LOG.info(" No exchanges found, so skipping");
continue;
}
UUID firstExchangeId = exchangeIds.get(0);
List<ExchangeEvent> events = exchangeDal.getExchangeEvents(firstExchangeId);
boolean appliedAdminCache = false;
for (ExchangeEvent event: events) {
if (event.getEventDesc().equals("Applied Emis Admin Resource Cache")) {
appliedAdminCache = true;
}
}
if (appliedAdminCache) {
LOG.info(" Have already applied admin cache, so skipping");
continue;
}
Exchange exchange = exchangeDal.getExchange(firstExchangeId);
String body = exchange.getBody();
String[] files = ExchangeHelper.parseExchangeBodyOldWay(body);
if (files.length == 0) {
LOG.info(" No files in exchange " + firstExchangeId + " so skipping");
continue;
}
String firstFilePath = files[0];
String name = FilenameUtils.getBaseName(firstFilePath); //file name without extension
String[] toks = name.split("_");
if (toks.length != 5) {
throw new TransformException("Failed to extract data sharing agreement GUID from filename " + firstFilePath);
}
String sharingAgreementGuid = toks[4];
List<UUID> batchIds = new ArrayList<>();
TransformError transformError = new TransformError();
FhirResourceFiler fhirResourceFiler = new FhirResourceFiler(firstExchangeId, serviceId, endpointSystemId, transformError, batchIds);
EmisCsvHelper csvHelper = new EmisCsvHelper(fhirResourceFiler.getServiceId(), fhirResourceFiler.getSystemId(),
fhirResourceFiler.getExchangeId(), sharingAgreementGuid,
true);
ExchangeTransformAudit transformAudit = new ExchangeTransformAudit();
transformAudit.setServiceId(serviceId);
transformAudit.setSystemId(endpointSystemId);
transformAudit.setExchangeId(firstExchangeId);
transformAudit.setId(UUID.randomUUID());
transformAudit.setStarted(new Date());
LOG.info(" Going to apply admin resource cache");
csvHelper.applyAdminResourceCache(fhirResourceFiler);
fhirResourceFiler.waitToFinish();
for (UUID batchId: batchIds) {
LOG.info(" Created batch ID " + batchId + " for exchange " + firstExchangeId);
}
transformAudit.setEnded(new Date());
transformAudit.setNumberBatchesCreated(new Integer(batchIds.size()));
boolean hadError = false;
if (transformError.getError().size() > 0) {
transformAudit.setErrorXml(TransformErrorSerializer.writeToXml(transformError));
hadError = true;
}
exchangeDal.save(transformAudit);
//clear down the cache of reference mappings since they won't be of much use for the next Exchange
IdHelper.clearCache();
if (hadError) {
LOG.error(" <<<<<<Error applying resource cache!");
continue;
}
//add the event to say we've applied the cache
AuditWriter.writeExchangeEvent(firstExchangeId, "Applied Emis Admin Resource Cache");
//post that ONE new batch ID onto the protocol queue
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr);
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
}
LOG.info("Finished Applying Emis Admin Caches");
} catch (Throwable t) {
LOG.error("", t);
}
}
/**
* fixes Emis extract(s) when a practice was disabled then subsequently re-bulked, by
* replacing the "delete" extracts with newly generated deltas that can be processed
* before the re-bulk is done
*/
private static void fixDisabledEmisExtract(String serviceId, String systemId, String sharedStoragePath, String tempDir) {
LOG.info("Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceId);
try {
/*File tempDirLast = new File(tempDir, "last");
if (!tempDirLast.exists()) {
if (!tempDirLast.mkdirs()) {
throw new Exception("Failed to create temp dir " + tempDirLast);
}
tempDirLast.mkdirs();
}
File tempDirEmpty = new File(tempDir, "empty");
if (!tempDirEmpty.exists()) {
if (!tempDirEmpty.mkdirs()) {
throw new Exception("Failed to create temp dir " + tempDirEmpty);
}
tempDirEmpty.mkdirs();
}*/
UUID serviceUuid = UUID.fromString(serviceId);
UUID systemUuid = UUID.fromString(systemId);
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
//get all the exchanges, which are returned in reverse order, so reverse for simplicity
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE);
//sorting by timestamp seems unreliable when exchanges were posted close together?
List<Exchange> tmp = new ArrayList<>();
for (int i=exchanges.size()-1; i>=0; i
Exchange exchange = exchanges.get(i);
tmp.add(exchange);
}
exchanges = tmp;
/*exchanges.sort((o1, o2) -> {
Date d1 = o1.getTimestamp();
Date d2 = o2.getTimestamp();
return d1.compareTo(d2);
});*/
LOG.info("Found " + exchanges.size() + " exchanges");
//continueOrQuit();
//find the files for each exchange
Map<Exchange, List<String>> hmExchangeFiles = new HashMap<>();
Map<Exchange, List<String>> hmExchangeFilesWithoutStoragePrefix = new HashMap<>();
for (Exchange exchange: exchanges) {
//populate a map of the files with the shared storage prefix
String exchangeBody = exchange.getBody();
String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody);
List<String> fileList = Lists.newArrayList(files);
hmExchangeFiles.put(exchange, fileList);
//populate a map of the same files without the prefix
files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody);
for (int i=0; i<files.length; i++) {
String file = files[i].substring(sharedStoragePath.length() + 1);
files[i] = file;
}
fileList = Lists.newArrayList(files);
hmExchangeFilesWithoutStoragePrefix.put(exchange, fileList);
}
LOG.info("Cached files for each exchange");
int indexDisabled = -1;
int indexRebulked = -1;
int indexOriginallyBulked = -1;
//go back through them to find the extract where the re-bulk is and when it was disabled
for (int i=exchanges.size()-1; i>=0; i
Exchange exchange = exchanges.get(i);
boolean disabled = isDisabledInSharingAgreementFile(exchange, hmExchangeFiles);
if (disabled) {
indexDisabled = i;
} else {
if (indexDisabled == -1) {
indexRebulked = i;
} else {
//if we've found a non-disabled extract older than the disabled ones,
//then we've gone far enough back
break;
}
}
}
//go back from when disabled to find the previous bulk load (i.e. the first one or one after it was previously not disabled)
for (int i=indexDisabled-1; i>=0; i
Exchange exchange = exchanges.get(i);
boolean disabled = isDisabledInSharingAgreementFile(exchange, hmExchangeFiles);
if (disabled) {
break;
}
indexOriginallyBulked = i;
}
if (indexDisabled == -1
|| indexRebulked == -1
|| indexOriginallyBulked == -1) {
throw new Exception("Failed to find exchanges for disabling (" + indexDisabled + "), re-bulking (" + indexRebulked + ") or original bulk (" + indexOriginallyBulked + ")");
}
Exchange exchangeDisabled = exchanges.get(indexDisabled);
LOG.info("Disabled on " + findExtractDate(exchangeDisabled, hmExchangeFiles) + " " + exchangeDisabled.getId());
Exchange exchangeRebulked = exchanges.get(indexRebulked);
LOG.info("Rebulked on " + findExtractDate(exchangeRebulked, hmExchangeFiles) + " " + exchangeRebulked.getId());
Exchange exchangeOriginallyBulked = exchanges.get(indexOriginallyBulked);
LOG.info("Originally bulked on " + findExtractDate(exchangeOriginallyBulked, hmExchangeFiles) + " " + exchangeOriginallyBulked.getId());
//continueOrQuit();
List<String> rebulkFiles = hmExchangeFiles.get(exchangeRebulked);
List<String> tempFilesCreated = new ArrayList<>();
Set<String> patientGuidsDeletedOrTooOld = new HashSet<>();
for (String rebulkFile: rebulkFiles) {
String fileType = findFileType(rebulkFile);
if (!isPatientFile(fileType)) {
continue;
}
LOG.info("Doing " + fileType);
String guidColumnName = getGuidColumnName(fileType);
//find all the guids in the re-bulk
Set<String> idsInRebulk = new HashSet<>();
InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(rebulkFile);
CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT);
String[] headers = null;
try {
headers = CsvHelper.getHeaderMapAsArray(csvParser);
Iterator<CSVRecord> iterator = csvParser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
//get the patient and row guid out of the file and cache in our set
String id = record.get("PatientGuid");
if (!Strings.isNullOrEmpty(guidColumnName)) {
id += "//" + record.get(guidColumnName);
}
idsInRebulk.add(id);
}
} finally {
csvParser.close();
}
LOG.info("Found " + idsInRebulk.size() + " IDs in re-bulk file: " + rebulkFile);
//create a replacement file for the exchange the service was disabled
String replacementDisabledFile = null;
List<String> disabledFiles = hmExchangeFilesWithoutStoragePrefix.get(exchangeDisabled);
for (String s: disabledFiles) {
String disabledFileType = findFileType(s);
if (disabledFileType.equals(fileType)) {
replacementDisabledFile = FilenameUtils.concat(tempDir, s);
File dir = new File(replacementDisabledFile).getParentFile();
if (!dir.exists()) {
if (!dir.mkdirs()) {
throw new Exception("Failed to create directory " + dir);
}
}
tempFilesCreated.add(s);
LOG.info("Created replacement file " + replacementDisabledFile);
}
}
FileWriter fileWriter = new FileWriter(replacementDisabledFile);
BufferedWriter bufferedWriter = new BufferedWriter(fileWriter);
CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers));
csvPrinter.flush();
Set<String> pastIdsProcessed = new HashSet<>();
//now go through all files of the same type PRIOR to the service was disabled
//to find any rows that we'll need to explicitly delete because they were deleted while
//the extract was disabled
for (int i=indexDisabled-1; i>=indexOriginallyBulked; i
Exchange exchange = exchanges.get(i);
String originalFile = null;
List<String> files = hmExchangeFiles.get(exchange);
for (String s: files) {
String originalFileType = findFileType(s);
if (originalFileType.equals(fileType)) {
originalFile = s;
break;
}
}
if (originalFile == null) {
continue;
}
LOG.info(" Reading " + originalFile);
reader = FileHelper.readFileReaderFromSharedStorage(originalFile);
csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT);
try {
Iterator<CSVRecord> iterator = csvParser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String patientGuid = record.get("PatientGuid");
//get the patient and row guid out of the file and cache in our set
String uniqueId = patientGuid;
if (!Strings.isNullOrEmpty(guidColumnName)) {
uniqueId += "//" + record.get(guidColumnName);
}
//if we're already handled this record in a more recent extract, then skip it
if (pastIdsProcessed.contains(uniqueId)) {
continue;
}
pastIdsProcessed.add(uniqueId);
//if this ID isn't deleted and isn't in the re-bulk then it means
//it WAS deleted in Emis Web but we didn't receive the delete, because it was deleted
//from Emis Web while the extract feed was disabled
//if the record is deleted, then we won't expect it in the re-bulk
boolean deleted = Boolean.parseBoolean(record.get("Deleted"));
if (deleted) {
//if it's the Patient file, stick the patient GUID in a set so we know full patient record deletes
if (fileType.equals("Admin_Patient")) {
patientGuidsDeletedOrTooOld.add(patientGuid);
}
continue;
}
//if it's not the patient file and we refer to a patient that we know
//has been deleted, then skip this row, since we know we're deleting the entire patient record
if (patientGuidsDeletedOrTooOld.contains(patientGuid)) {
continue;
}
//if the re-bulk contains a record matching this one, then it's OK
if (idsInRebulk.contains(uniqueId)) {
continue;
}
//the rebulk won't contain any data for patients that are now too old (i.e. deducted or deceased > 2 yrs ago),
//so any patient ID in the original files but not in the rebulk can be treated like this and any data for them can be skipped
if (fileType.equals("Admin_Patient")) {
//retrieve the Patient and EpisodeOfCare resource for the patient so we can confirm they are deceased or deducted
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
UUID patientUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.Patient, patientGuid);
if (patientUuid == null) {
throw new Exception("Failed to find patient UUID from GUID [" + patientGuid + "]");
}
Patient patientResource = (Patient)resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.Patient, patientUuid.toString());
if (patientResource.hasDeceased()) {
patientGuidsDeletedOrTooOld.add(patientGuid);
continue;
}
UUID episodeUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.EpisodeOfCare, patientGuid); //we use the patient GUID for the episode too
EpisodeOfCare episodeResource = (EpisodeOfCare)resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.EpisodeOfCare, episodeUuid.toString());
if (episodeResource.hasPeriod()
&& !PeriodHelper.isActive(episodeResource.getPeriod())) {
patientGuidsDeletedOrTooOld.add(patientGuid);
continue;
}
}
//create a new CSV record, carrying over the GUIDs from the original but marking as deleted
String[] newRecord = new String[headers.length];
for (int j=0; j<newRecord.length; j++) {
String header = headers[j];
if (header.equals("PatientGuid")
|| header.equals("OrganisationGuid")
|| (!Strings.isNullOrEmpty(guidColumnName)
&& header.equals(guidColumnName))) {
String val = record.get(header);
newRecord[j] = val;
} else if (header.equals("Deleted")) {
newRecord[j] = "true";
} else {
newRecord[j] = "";
}
}
csvPrinter.printRecord((Object[])newRecord);
csvPrinter.flush();
//log out the raw record that's missing from the original
StringBuffer sb = new StringBuffer();
sb.append("Record not in re-bulk: ");
for (int j=0; j<record.size(); j++) {
if (j > 0) {
sb.append(",");
}
sb.append(record.get(j));
}
LOG.info(sb.toString());
}
} finally {
csvParser.close();
}
}
csvPrinter.flush();
csvPrinter.close();
//also create a version of the CSV file with just the header and nothing else in
for (int i=indexDisabled+1; i<indexRebulked; i++) {
Exchange ex = exchanges.get(i);
List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex);
for (String s: exchangeFiles) {
String exchangeFileType = findFileType(s);
if (exchangeFileType.equals(fileType)) {
String emptyTempFile = FilenameUtils.concat(tempDir, s);
File dir = new File(emptyTempFile).getParentFile();
if (!dir.exists()) {
if (!dir.mkdirs()) {
throw new Exception("Failed to create directory " + dir);
}
}
fileWriter = new FileWriter(emptyTempFile);
bufferedWriter = new BufferedWriter(fileWriter);
csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers));
csvPrinter.flush();
csvPrinter.close();
tempFilesCreated.add(s);
LOG.info("Created empty file " + emptyTempFile);
}
}
}
}
//we also need to copy the restored sharing agreement file to replace all the period it was disabled
String rebulkedSharingAgreementFile = null;
for (String s: rebulkFiles) {
String fileType = findFileType(s);
if (fileType.equals("Agreements_SharingOrganisation")) {
rebulkedSharingAgreementFile = s;
}
}
for (int i=indexDisabled; i<indexRebulked; i++) {
Exchange ex = exchanges.get(i);
List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex);
for (String s: exchangeFiles) {
String exchangeFileType = findFileType(s);
if (exchangeFileType.equals("Agreements_SharingOrganisation")) {
String replacementFile = FilenameUtils.concat(tempDir, s);
InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkedSharingAgreementFile);
Files.copy(inputStream, new File(replacementFile).toPath());
inputStream.close();
tempFilesCreated.add(s);
}
}
}
//create a script to copy the files into S3
List<String> copyScript = new ArrayList<>();
copyScript.add("#!/bin/bash");
copyScript.add("");
for (String s: tempFilesCreated) {
String localFile = FilenameUtils.concat(tempDir, s);
copyScript.add("sudo aws s3 cp " + localFile + " s3://discoverysftplanding/endeavour/" + s);
}
String scriptFile = FilenameUtils.concat(tempDir, "copy.sh");
FileUtils.writeLines(new File(scriptFile), copyScript);
/*continueOrQuit();
//back up every file where the service was disabled
for (int i=indexDisabled; i<indexRebulked; i++) {
Exchange exchange = exchanges.get(i);
List<String> files = hmExchangeFiles.get(exchange);
for (String file: files) {
//first download from S3 to the local temp dir
InputStream inputStream = FileHelper.readFileFromSharedStorage(file);
String fileName = FilenameUtils.getName(file);
String tempPath = FilenameUtils.concat(tempDir, fileName);
File downloadDestination = new File(tempPath);
Files.copy(inputStream, downloadDestination.toPath());
//then write back to S3 in a sub-dir of the original file
String backupPath = FilenameUtils.getPath(file);
backupPath = FilenameUtils.concat(backupPath, "Original");
backupPath = FilenameUtils.concat(backupPath, fileName);
FileHelper.writeFileToSharedStorage(backupPath, downloadDestination);
LOG.info("Backed up " + file + " -> " + backupPath);
//delete from temp dir
downloadDestination.delete();
}
}
continueOrQuit();
//copy the new CSV files into the dir where it was disabled
List<String> disabledFiles = hmExchangeFiles.get(exchangeDisabled);
for (String disabledFile: disabledFiles) {
String fileType = findFileType(disabledFile);
if (!isPatientFile(fileType)) {
continue;
}
String tempFile = FilenameUtils.concat(tempDirLast.getAbsolutePath(), fileType + ".csv");
File f = new File(tempFile);
if (!f.exists()) {
throw new Exception("Failed to find expected temp file " + f);
}
FileHelper.writeFileToSharedStorage(disabledFile, f);
LOG.info("Copied " + tempFile + " -> " + disabledFile);
}
continueOrQuit();
//empty the patient files for any extracts while the service was disabled
for (int i=indexDisabled+1; i<indexRebulked; i++) {
Exchange otherExchangeDisabled = exchanges.get(i);
List<String> otherDisabledFiles = hmExchangeFiles.get(otherExchangeDisabled);
for (String otherDisabledFile: otherDisabledFiles) {
String fileType = findFileType(otherDisabledFile);
if (!isPatientFile(fileType)) {
continue;
}
String tempFile = FilenameUtils.concat(tempDirEmpty.getAbsolutePath(), fileType + ".csv");
File f = new File(tempFile);
if (!f.exists()) {
throw new Exception("Failed to find expected empty file " + f);
}
FileHelper.writeFileToSharedStorage(otherDisabledFile, f);
LOG.info("Copied " + tempFile + " -> " + otherDisabledFile);
}
}
continueOrQuit();
//copy the content of the sharing agreement file from when it was re-bulked
for (String rebulkFile: rebulkFiles) {
String fileType = findFileType(rebulkFile);
if (fileType.equals("Agreements_SharingOrganisation")) {
String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv");
File downloadDestination = new File(tempFile);
InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkFile);
Files.copy(inputStream, downloadDestination.toPath());
tempFilesCreated.add(tempFile);
}
}
//replace the sharing agreement file for all disabled extracts with the non-disabled one
for (int i=indexDisabled; i<indexRebulked; i++) {
Exchange exchange = exchanges.get(i);
List<String> files = hmExchangeFiles.get(exchange);
for (String file: files) {
String fileType = findFileType(file);
if (fileType.equals("Agreements_SharingOrganisation")) {
String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv");
File f = new File(tempFile);
if (!f.exists()) {
throw new Exception("Failed to find expected empty file " + f);
}
FileHelper.writeFileToSharedStorage(file, f);
LOG.info("Copied " + tempFile + " -> " + file);
}
}
}
LOG.info("Finished Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceId);
continueOrQuit();
for (String tempFileCreated: tempFilesCreated) {
File f = new File(tempFileCreated);
if (f.exists()) {
f.delete();
}
}*/
} catch (Exception ex) {
LOG.error("", ex);
}
}
private static String findExtractDate(Exchange exchange, Map<Exchange, List<String>> fileMap) throws Exception {
List<String> files = fileMap.get(exchange);
String file = findSharingAgreementFile(files);
String name = FilenameUtils.getBaseName(file);
String[] toks = name.split("_");
return toks[3];
}
private static boolean isDisabledInSharingAgreementFile(Exchange exchange, Map<Exchange, List<String>> fileMap) throws Exception {
List<String> files = fileMap.get(exchange);
String file = findSharingAgreementFile(files);
InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(file);
CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT);
try {
Iterator<CSVRecord> iterator = csvParser.iterator();
CSVRecord record = iterator.next();
String s = record.get("Disabled");
boolean disabled = Boolean.parseBoolean(s);
return disabled;
} finally {
csvParser.close();
}
}
private static void continueOrQuit() throws Exception {
LOG.info("Enter y to continue, anything else to quit");
byte[] bytes = new byte[10];
System.in.read(bytes);
char c = (char)bytes[0];
if (c != 'y' && c != 'Y') {
System.out.println("Read " + c);
System.exit(1);
}
}
private static String getGuidColumnName(String fileType) {
if (fileType.equals("Admin_Patient")) {
//patient file just has patient GUID, nothing extra
return null;
} else if (fileType.equals("CareRecord_Consultation")) {
return "ConsultationGuid";
} else if (fileType.equals("CareRecord_Diary")) {
return "DiaryGuid";
} else if (fileType.equals("CareRecord_Observation")) {
return "ObservationGuid";
} else if (fileType.equals("CareRecord_Problem")) {
//there is no separate problem GUID, as it's just a modified observation
return "ObservationGuid";
} else if (fileType.equals("Prescribing_DrugRecord")) {
return "DrugRecordGuid";
} else if (fileType.equals("Prescribing_IssueRecord")) {
return "IssueRecordGuid";
} else {
throw new IllegalArgumentException(fileType);
}
}
private static String findFileType(String filePath) {
String fileName = FilenameUtils.getName(filePath);
String[] toks = fileName.split("_");
String domain = toks[1];
String name = toks[2];
return domain + "_" + name;
}
private static boolean isPatientFile(String fileType) {
if (fileType.equals("Admin_Patient")
|| fileType.equals("CareRecord_Consultation")
|| fileType.equals("CareRecord_Diary")
|| fileType.equals("CareRecord_Observation")
|| fileType.equals("CareRecord_Problem")
|| fileType.equals("Prescribing_DrugRecord")
|| fileType.equals("Prescribing_IssueRecord")) {
//note the referral file doesn't have a Deleted column, so isn't in this list
return true;
} else {
return false;
}
}
private static String findSharingAgreementFile(List<String> files) throws Exception {
for (String file : files) {
String fileType = findFileType(file);
if (fileType.equals("Agreements_SharingOrganisation")) {
return file;
}
}
throw new Exception("Failed to find sharing agreement file in " + files.get(0));
}
private static void testSlack() {
LOG.info("Testing slack");
try {
SlackHelper.sendSlackMessage(SlackHelper.Channel.QueueReaderAlerts, "Test Message from Queue Reader");
LOG.info("Finished testing slack");
} catch (Exception ex) {
LOG.error("", ex);
}
}
private static void postToInboundFromFile(UUID serviceId, UUID systemId, String filePath) {
try {
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
Service service = serviceDalI.getById(serviceId);
LOG.info("Posting to inbound exchange for " + service.getName() + " from file " + filePath);
FileReader fr = new FileReader(filePath);
BufferedReader br = new BufferedReader(fr);
int count = 0;
List<UUID> exchangeIdBatch = new ArrayList<>();
while (true) {
String line = br.readLine();
if (line == null) {
break;
}
UUID exchangeId = UUID.fromString(line);
//update the transform audit, so EDS UI knows we've re-queued this exchange
ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId);
if (audit != null
&& !audit.isResubmitted()) {
audit.setResubmitted(true);
auditRepository.save(audit);
}
count ++;
exchangeIdBatch.add(exchangeId);
if (exchangeIdBatch.size() >= 1000) {
QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false);
exchangeIdBatch = new ArrayList<>();
LOG.info("Done " + count);
}
}
if (!exchangeIdBatch.isEmpty()) {
QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false);
LOG.info("Done " + count);
}
br.close();
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Posting to inbound for " + serviceId);
}
/*private static void postToInbound(UUID serviceId, boolean all) {
LOG.info("Posting to inbound for " + serviceId);
try {
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
Service service = serviceDalI.getById(serviceId);
List<UUID> systemIds = findSystemIds(service);
UUID systemId = systemIds.get(0);
ExchangeTransformErrorState errorState = auditRepository.getErrorState(serviceId, systemId);
for (UUID exchangeId: errorState.getExchangeIdsInError()) {
//update the transform audit, so EDS UI knows we've re-queued this exchange
ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId);
//skip any exchange IDs we've already re-queued up to be processed again
if (audit.isResubmitted()) {
LOG.debug("Not re-posting " + audit.getExchangeId() + " as it's already been resubmitted");
continue;
}
LOG.debug("Re-posting " + audit.getExchangeId());
audit.setResubmitted(true);
auditRepository.save(audit);
//then re-submit the exchange to Rabbit MQ for the queue reader to pick up
QueueHelper.postToExchange(exchangeId, "EdsInbound", null, false);
if (!all) {
LOG.info("Posted first exchange, so stopping");
break;
}
}
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Posting to inbound for " + serviceId);
}*/
/*private static void fixPatientSearch(String serviceId) {
LOG.info("Fixing patient search for " + serviceId);
try {
UUID serviceUuid = UUID.fromString(serviceId);
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDalI = DalProvider.factoryResourceDal();
PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal();
ParserPool parser = new ParserPool();
Set<UUID> patientsDone = new HashSet<>();
List<UUID> exchanges = exchangeDalI.getExchangeIdsForService(serviceUuid);
LOG.info("Found " + exchanges.size() + " exchanges");
for (UUID exchangeId: exchanges) {
List<ExchangeBatch> batches = exchangeBatchDalI.retrieveForExchangeId(exchangeId);
LOG.info("Found " + batches.size() + " batches in exchange " + exchangeId);
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId == null) {
continue;
}
if (patientsDone.contains(patientId)) {
continue;
}
ResourceWrapper wrapper = resourceDalI.getCurrentVersion(serviceUuid, ResourceType.Patient.toString(), patientId);
if (wrapper != null) {
String json = wrapper.getResourceData();
if (!Strings.isNullOrEmpty(json)) {
Patient fhirPatient = (Patient) parser.parse(json);
UUID systemUuid = wrapper.getSystemId();
patientSearchDal.update(serviceUuid, systemUuid, fhirPatient);
}
}
patientsDone.add(patientId);
if (patientsDone.size() % 1000 == 0) {
LOG.info("Done " + patientsDone.size());
}
}
}
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished fixing patient search for " + serviceId);
}*/
private static void runSql(String host, String username, String password, String sqlFile) {
LOG.info("Running SQL on " + host + " from " + sqlFile);
Connection conn = null;
Statement statement = null;
try {
File f = new File(sqlFile);
if (!f.exists()) {
LOG.error("" + f + " doesn't exist");
return;
}
List<String> lines = FileUtils.readLines(f);
/*String combined = String.join("\n", lines);
LOG.info("Going to run SQL");
LOG.info(combined);*/
//load driver
Class.forName("com.mysql.cj.jdbc.Driver");
//create connection
Properties props = new Properties();
props.setProperty("user", username);
props.setProperty("password", password);
conn = DriverManager.getConnection(host, props);
LOG.info("Opened connection");
statement = conn.createStatement();
long totalStart = System.currentTimeMillis();
for (String sql: lines) {
sql = sql.trim();
if (sql.startsWith("
/*private static void fixExchangeBatches() {
LOG.info("Starting Fixing Exchange Batches");
try {
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDalI = DalProvider.factoryResourceDal();
List<Service> services = serviceDalI.getAll();
for (Service service: services) {
LOG.info("Doing " + service.getName());
List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(service.getId());
for (UUID exchangeId: exchangeIds) {
LOG.info(" Exchange " + exchangeId);
List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId);
for (ExchangeBatch exchangeBatch: exchangeBatches) {
if (exchangeBatch.getEdsPatientId() != null) {
continue;
}
List<ResourceWrapper> resources = resourceDalI.getResourcesForBatch(exchangeBatch.getBatchId());
if (resources.isEmpty()) {
continue;
}
ResourceWrapper first = resources.get(0);
UUID patientId = first.getPatientId();
if (patientId != null) {
exchangeBatch.setEdsPatientId(patientId);
exchangeBatchDalI.save(exchangeBatch);
LOG.info("Fixed batch " + exchangeBatch.getBatchId() + " -> " + exchangeBatch.getEdsPatientId());
}
}
}
}
LOG.info("Finished Fixing Exchange Batches");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void exportHl7Encounters(String sourceCsvPath, String outputPath) {
LOG.info("Exporting HL7 Encounters from " + sourceCsvPath + " to " + outputPath);
try {
File sourceFile = new File(sourceCsvPath);
CSVParser csvParser = CSVParser.parse(sourceFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
//"service_id","system_id","nhs_number","patient_id","count"
int count = 0;
HashMap<UUID, List<UUID>> serviceAndSystemIds = new HashMap<>();
HashMap<UUID, Integer> patientIds = new HashMap<>();
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
count ++;
String serviceId = csvRecord.get("service_id");
String systemId = csvRecord.get("system_id");
String patientId = csvRecord.get("patient_id");
UUID serviceUuid = UUID.fromString(serviceId);
List<UUID> systemIds = serviceAndSystemIds.get(serviceUuid);
if (systemIds == null) {
systemIds = new ArrayList<>();
serviceAndSystemIds.put(serviceUuid, systemIds);
}
systemIds.add(UUID.fromString(systemId));
patientIds.put(UUID.fromString(patientId), new Integer(count));
}
csvParser.close();
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
ResourceDalI resourceDalI = DalProvider.factoryResourceDal();
ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal();
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ParserPool parser = new ParserPool();
Map<Integer, List<Object[]>> patientRows = new HashMap<>();
SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
for (UUID serviceId: serviceAndSystemIds.keySet()) {
//List<UUID> systemIds = serviceAndSystemIds.get(serviceId);
Service service = serviceDalI.getById(serviceId);
String serviceName = service.getName();
LOG.info("Doing service " + serviceId + " " + serviceName);
List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(serviceId);
LOG.info("Got " + exchangeIds.size() + " exchange IDs to scan");
int exchangeCount = 0;
for (UUID exchangeId: exchangeIds) {
exchangeCount ++;
if (exchangeCount % 1000 == 0) {
LOG.info("Done " + exchangeCount + " exchanges");
}
List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId);
for (ExchangeBatch exchangeBatch: exchangeBatches) {
UUID patientId = exchangeBatch.getEdsPatientId();
if (patientId != null
&& !patientIds.containsKey(patientId)) {
continue;
}
Integer patientIdInt = patientIds.get(patientId);
//get encounters for exchange batch
UUID batchId = exchangeBatch.getBatchId();
List<ResourceWrapper> resourceWrappers = resourceDalI.getResourcesForBatch(serviceId, batchId);
for (ResourceWrapper resourceWrapper: resourceWrappers) {
if (resourceWrapper.isDeleted()) {
continue;
}
String resourceType = resourceWrapper.getResourceType();
if (!resourceType.equals(ResourceType.Encounter.toString())) {
continue;
}
LOG.info("Processing " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId());
String json = resourceWrapper.getResourceData();
Encounter fhirEncounter = (Encounter)parser.parse(json);
Date date = null;
if (fhirEncounter.hasPeriod()) {
Period period = fhirEncounter.getPeriod();
if (period.hasStart()) {
date = period.getStart();
}
}
String episodeId = null;
if (fhirEncounter.hasEpisodeOfCare()) {
Reference episodeReference = fhirEncounter.getEpisodeOfCare().get(0);
ReferenceComponents comps = ReferenceHelper.getReferenceComponents(episodeReference);
EpisodeOfCare fhirEpisode = (EpisodeOfCare)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId());
if (fhirEpisode != null) {
if (fhirEpisode.hasIdentifier()) {
episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_BARTS_FIN_EPISODE_ID);
if (Strings.isNullOrEmpty(episodeId)) {
episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_HOMERTON_FIN_EPISODE_ID);
}
}
}
}
String adtType = null;
String adtCode = null;
Extension extension = ExtensionConverter.findExtension(fhirEncounter, FhirExtensionUri.HL7_MESSAGE_TYPE);
if (extension != null) {
CodeableConcept codeableConcept = (CodeableConcept) extension.getValue();
Coding hl7MessageTypeCoding = CodeableConceptHelper.findCoding(codeableConcept, FhirUri.CODE_SYSTEM_HL7V2_MESSAGE_TYPE);
if (hl7MessageTypeCoding != null) {
adtType = hl7MessageTypeCoding.getDisplay();
adtCode = hl7MessageTypeCoding.getCode();
}
} else {
//for older formats of the transformed resources, the HL7 message type can only be found from the raw original exchange body
try {
Exchange exchange = exchangeDalI.getExchange(exchangeId);
String exchangeBody = exchange.getBody();
Bundle bundle = (Bundle) FhirResourceHelper.deserialiseResouce(exchangeBody);
for (Bundle.BundleEntryComponent entry: bundle.getEntry()) {
if (entry.getResource() != null
&& entry.getResource() instanceof MessageHeader) {
MessageHeader header = (MessageHeader)entry.getResource();
if (header.hasEvent()) {
Coding coding = header.getEvent();
adtType = coding.getDisplay();
adtCode = coding.getCode();
}
}
}
} catch (Exception ex) {
//if the exchange body isn't a FHIR bundle, then we'll get an error by treating as such, so just ignore them
}
}
String cls = null;
if (fhirEncounter.hasClass_()) {
Encounter.EncounterClass encounterClass = fhirEncounter.getClass_();
if (encounterClass == Encounter.EncounterClass.OTHER
&& fhirEncounter.hasClass_Element()
&& fhirEncounter.getClass_Element().hasExtension()) {
for (Extension classExtension: fhirEncounter.getClass_Element().getExtension()) {
if (classExtension.getUrl().equals(FhirExtensionUri.ENCOUNTER_CLASS)) {
//not 100% of the type of the value, so just append to a String
cls = "" + classExtension.getValue();
}
}
}
if (Strings.isNullOrEmpty(cls)) {
cls = encounterClass.toCode();
}
}
String type = null;
if (fhirEncounter.hasType()) {
//only seem to ever have one type
CodeableConcept codeableConcept = fhirEncounter.getType().get(0);
type = codeableConcept.getText();
}
String status = null;
if (fhirEncounter.hasStatus()) {
Encounter.EncounterState encounterState = fhirEncounter.getStatus();
status = encounterState.toCode();
}
String location = null;
String locationType = null;
if (fhirEncounter.hasLocation()) {
//first location is always the current location
Encounter.EncounterLocationComponent encounterLocation = fhirEncounter.getLocation().get(0);
if (encounterLocation.hasLocation()) {
Reference locationReference = encounterLocation.getLocation();
ReferenceComponents comps = ReferenceHelper.getReferenceComponents(locationReference);
Location fhirLocation = (Location)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId());
if (fhirLocation != null) {
if (fhirLocation.hasName()) {
location = fhirLocation.getName();
}
if (fhirLocation.hasType()) {
CodeableConcept typeCodeableConcept = fhirLocation.getType();
if (typeCodeableConcept.hasCoding()) {
Coding coding = typeCodeableConcept.getCoding().get(0);
locationType = coding.getDisplay();
}
}
}
}
}
String clinician = null;
if (fhirEncounter.hasParticipant()) {
//first participant seems to be the interesting one
Encounter.EncounterParticipantComponent encounterParticipant = fhirEncounter.getParticipant().get(0);
if (encounterParticipant.hasIndividual()) {
Reference practitionerReference = encounterParticipant.getIndividual();
ReferenceComponents comps = ReferenceHelper.getReferenceComponents(practitionerReference);
Practitioner fhirPractitioner = (Practitioner)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId());
if (fhirPractitioner != null) {
if (fhirPractitioner.hasName()) {
HumanName name = fhirPractitioner.getName();
clinician = name.getText();
if (Strings.isNullOrEmpty(clinician)) {
clinician = "";
for (StringType s: name.getPrefix()) {
clinician += s.getValueNotNull();
clinician += " ";
}
for (StringType s: name.getGiven()) {
clinician += s.getValueNotNull();
clinician += " ";
}
for (StringType s: name.getFamily()) {
clinician += s.getValueNotNull();
clinician += " ";
}
clinician = clinician.trim();
}
}
}
}
}
Object[] row = new Object[12];
row[0] = serviceName;
row[1] = patientIdInt.toString();
row[2] = sdfOutput.format(date);
row[3] = episodeId;
row[4] = adtCode;
row[5] = adtType;
row[6] = cls;
row[7] = type;
row[8] = status;
row[9] = location;
row[10] = locationType;
row[11] = clinician;
List<Object[]> rows = patientRows.get(patientIdInt);
if (rows == null) {
rows = new ArrayList<>();
patientRows.put(patientIdInt, rows);
}
rows.add(row);
}
}
}
}
String[] outputColumnHeaders = new String[] {"Source", "Patient", "Date", "Episode ID", "ADT Message Code", "ADT Message Type", "Class", "Type", "Status", "Location", "Location Type", "Clinician"};
FileWriter fileWriter = new FileWriter(outputPath);
BufferedWriter bufferedWriter = new BufferedWriter(fileWriter);
CSVFormat format = CSVFormat.DEFAULT
.withHeader(outputColumnHeaders)
.withQuote('"');
CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, format);
for (int i=0; i <= count; i++) {
Integer patientIdInt = new Integer(i);
List<Object[]> rows = patientRows.get(patientIdInt);
if (rows != null) {
for (Object[] row: rows) {
csvPrinter.printRecord(row);
}
}
}
csvPrinter.close();
bufferedWriter.close();
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Exporting Encounters from " + sourceCsvPath + " to " + outputPath);
}*/
/*private static void registerShutdownHook() {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
LOG.info("");
try {
Thread.sleep(5000);
} catch (Throwable ex) {
LOG.error("", ex);
}
LOG.info("Done");
}
});
}*/
private static void findEmisStartDates(String path, String outputPath) {
LOG.info("Finding EMIS Start Dates in " + path + ", writing to " + outputPath);
try {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH.mm.ss");
Map<String, Date> startDates = new HashMap<>();
Map<String, String> servers = new HashMap<>();
Map<String, String> names = new HashMap<>();
Map<String, String> odsCodes = new HashMap<>();
Map<String, String> cdbNumbers = new HashMap<>();
Map<String, Set<String>> distinctPatients = new HashMap<>();
File root = new File(path);
for (File sftpRoot: root.listFiles()) {
LOG.info("Checking " + sftpRoot);
Map<Date, File> extracts = new HashMap<>();
List<Date> extractDates = new ArrayList<>();
for (File extractRoot: sftpRoot.listFiles()) {
Date d = sdf.parse(extractRoot.getName());
//LOG.info("" + extractRoot.getName() + " -> " + d);
extracts.put(d, extractRoot);
extractDates.add(d);
}
Collections.sort(extractDates);
for (Date extractDate: extractDates) {
File extractRoot = extracts.get(extractDate);
LOG.info("Checking " + extractRoot);
//read the sharing agreements file
//e.g. 291_Agreements_SharingOrganisation_20150211164536_45E7CD20-EE37-41AB-90D6-DC9D4B03D102.csv
File sharingAgreementsFile = null;
for (File f: extractRoot.listFiles()) {
String name = f.getName().toLowerCase();
if (name.indexOf("agreements_sharingorganisation") > -1
&& name.endsWith(".csv")) {
sharingAgreementsFile = f;
break;
}
}
if (sharingAgreementsFile == null) {
LOG.info("Null agreements file for " + extractRoot);
continue;
}
CSVParser csvParser = CSVParser.parse(sharingAgreementsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
try {
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String orgGuid = csvRecord.get("OrganisationGuid");
String activated = csvRecord.get("IsActivated");
String disabled = csvRecord.get("Disabled");
servers.put(orgGuid, sftpRoot.getName());
if (activated.equalsIgnoreCase("true")) {
if (disabled.equalsIgnoreCase("false")) {
Date d = sdf.parse(extractRoot.getName());
Date existingDate = startDates.get(orgGuid);
if (existingDate == null) {
startDates.put(orgGuid, d);
}
} else {
if (startDates.containsKey(orgGuid)) {
startDates.put(orgGuid, null);
}
}
}
}
} finally {
csvParser.close();
}
//go through orgs file to get name, ods and cdb codes
File orgsFile = null;
for (File f: extractRoot.listFiles()) {
String name = f.getName().toLowerCase();
if (name.indexOf("admin_organisation_") > -1
&& name.endsWith(".csv")) {
orgsFile = f;
break;
}
}
csvParser = CSVParser.parse(orgsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
try {
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String orgGuid = csvRecord.get("OrganisationGuid");
String name = csvRecord.get("OrganisationName");
String odsCode = csvRecord.get("ODSCode");
String cdb = csvRecord.get("CDB");
names.put(orgGuid, name);
odsCodes.put(orgGuid, odsCode);
cdbNumbers.put(orgGuid, cdb);
}
} finally {
csvParser.close();
}
//go through patients file to get count
File patientFile = null;
for (File f: extractRoot.listFiles()) {
String name = f.getName().toLowerCase();
if (name.indexOf("admin_patient_") > -1
&& name.endsWith(".csv")) {
patientFile = f;
break;
}
}
csvParser = CSVParser.parse(patientFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
try {
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String orgGuid = csvRecord.get("OrganisationGuid");
String patientGuid = csvRecord.get("PatientGuid");
String deleted = csvRecord.get("Deleted");
Set<String> distinctPatientSet = distinctPatients.get(orgGuid);
if (distinctPatientSet == null) {
distinctPatientSet = new HashSet<>();
distinctPatients.put(orgGuid, distinctPatientSet);
}
if (deleted.equalsIgnoreCase("true")) {
distinctPatientSet.remove(patientGuid);
} else {
distinctPatientSet.add(patientGuid);
}
}
} finally {
csvParser.close();
}
}
}
SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd");
StringBuilder sb = new StringBuilder();
sb.append("Name,OdsCode,CDB,OrgGuid,StartDate,Server,Patients");
for (String orgGuid: startDates.keySet()) {
Date startDate = startDates.get(orgGuid);
String server = servers.get(orgGuid);
String name = names.get(orgGuid);
String odsCode = odsCodes.get(orgGuid);
String cdbNumber = cdbNumbers.get(orgGuid);
Set<String> distinctPatientSet = distinctPatients.get(orgGuid);
String startDateDesc = null;
if (startDate != null) {
startDateDesc = sdfOutput.format(startDate);
}
Long countDistinctPatients = null;
if (distinctPatientSet != null) {
countDistinctPatients = new Long(distinctPatientSet.size());
}
sb.append("\n");
sb.append("\"" + name + "\"");
sb.append(",");
sb.append("\"" + odsCode + "\"");
sb.append(",");
sb.append("\"" + cdbNumber + "\"");
sb.append(",");
sb.append("\"" + orgGuid + "\"");
sb.append(",");
sb.append(startDateDesc);
sb.append(",");
sb.append("\"" + server + "\"");
sb.append(",");
sb.append(countDistinctPatients);
}
LOG.info(sb.toString());
FileUtils.writeStringToFile(new File(outputPath), sb.toString());
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Finding Start Dates in " + path + ", writing to " + outputPath);
}
private static void findEncounterTerms(String path, String outputPath) {
LOG.info("Finding Encounter Terms from " + path);
Map<String, Long> hmResults = new HashMap<>();
//source term, source term snomed ID, source term snomed term - count
try {
File root = new File(path);
File[] files = root.listFiles();
for (File readerRoot: files) { //emis001
LOG.info("Finding terms in " + readerRoot);
//first read in all the coding files to build up our map of codes
Map<String, String> hmCodes = new HashMap<>();
for (File dateFolder: readerRoot.listFiles()) {
LOG.info("Looking for codes in " + dateFolder);
File f = findFile(dateFolder, "Coding_ClinicalCode");
if (f == null) {
LOG.error("Failed to find coding file in " + dateFolder.getAbsolutePath());
continue;
}
CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String codeId = csvRecord.get("CodeId");
String term = csvRecord.get("Term");
String snomed = csvRecord.get("SnomedCTConceptId");
hmCodes.put(codeId, snomed + ",\"" + term + "\"");
}
csvParser.close();
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
Date cutoff = dateFormat.parse("2017-01-01");
//now process the consultation files themselves
for (File dateFolder: readerRoot.listFiles()) {
LOG.info("Looking for consultations in " + dateFolder);
File f = findFile(dateFolder, "CareRecord_Consultation");
if (f == null) {
LOG.error("Failed to find consultation file in " + dateFolder.getAbsolutePath());
continue;
}
CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String term = csvRecord.get("ConsultationSourceTerm");
String codeId = csvRecord.get("ConsultationSourceCodeId");
if (Strings.isNullOrEmpty(term)
&& Strings.isNullOrEmpty(codeId)) {
continue;
}
String date = csvRecord.get("EffectiveDate");
if (Strings.isNullOrEmpty(date)) {
continue;
}
Date d = dateFormat.parse(date);
if (d.before(cutoff)) {
continue;
}
String line = "\"" + term + "\",";
if (!Strings.isNullOrEmpty(codeId)) {
String codeLookup = hmCodes.get(codeId);
if (codeLookup == null) {
LOG.error("Failed to find lookup for codeID " + codeId);
continue;
}
line += codeLookup;
} else {
line += ",";
}
Long count = hmResults.get(line);
if (count == null) {
count = new Long(1);
} else {
count = new Long(count.longValue() + 1);
}
hmResults.put(line, count);
}
csvParser.close();
}
}
//save results to file
StringBuilder output = new StringBuilder();
output.append("\"consultation term\",\"snomed concept ID\",\"snomed term\",\"count\"");
output.append("\r\n");
for (String line: hmResults.keySet()) {
Long count = hmResults.get(line);
String combined = line + "," + count;
output.append(combined);
output.append("\r\n");
}
LOG.info("FInished");
LOG.info(output.toString());
FileUtils.writeStringToFile(new File(outputPath), output.toString());
LOG.info("written output to " + outputPath);
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished finding Encounter Terms from " + path);
}
private static File findFile(File root, String token) throws Exception {
for (File f: root.listFiles()) {
String s = f.getName();
if (s.indexOf(token) > -1) {
return f;
}
}
return null;
}
/*private static void populateProtocolQueue(String serviceIdStr, String startingExchangeId) {
LOG.info("Starting Populating Protocol Queue for " + serviceIdStr);
ServiceDalI serviceRepository = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
if (serviceIdStr.equalsIgnoreCase("All")) {
serviceIdStr = null;
}
try {
List<Service> services = new ArrayList<>();
if (Strings.isNullOrEmpty(serviceIdStr)) {
services = serviceRepository.getAll();
} else {
UUID serviceId = UUID.fromString(serviceIdStr);
Service service = serviceRepository.getById(serviceId);
services.add(service);
}
for (Service service: services) {
List<UUID> exchangeIds = auditRepository.getExchangeIdsForService(service.getId());
LOG.info("Found " + exchangeIds.size() + " exchangeIds for " + service.getName());
if (startingExchangeId != null) {
UUID startingExchangeUuid = UUID.fromString(startingExchangeId);
if (exchangeIds.contains(startingExchangeUuid)) {
//if in the list, remove everything up to and including the starting exchange
int index = exchangeIds.indexOf(startingExchangeUuid);
LOG.info("Found starting exchange " + startingExchangeId + " at " + index + " so removing up to this point");
for (int i=index; i>=0; i--) {
exchangeIds.remove(i);
}
startingExchangeId = null;
} else {
//if not in the list, skip all these exchanges
LOG.info("List doesn't contain starting exchange " + startingExchangeId + " so skipping");
continue;
}
}
QueueHelper.postToExchange(exchangeIds, "edsProtocol", null, true);
}
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Populating Protocol Queue for " + serviceIdStr);
}*/
/*private static void findDeletedOrgs() {
LOG.info("Starting finding deleted orgs");
ServiceDalI serviceRepository = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
List<Service> services = new ArrayList<>();
try {
for (Service service: serviceRepository.getAll()) {
services.add(service);
}
} catch (Exception ex) {
LOG.error("", ex);
}
services.sort((o1, o2) -> {
String name1 = o1.getName();
String name2 = o2.getName();
return name1.compareToIgnoreCase(name2);
});
for (Service service: services) {
try {
UUID serviceUuid = service.getId();
List<Exchange> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 1, new Date(0), new Date());
LOG.info("Service: " + service.getName() + " " + service.getLocalId());
if (exchangeByServices.isEmpty()) {
LOG.info(" no exchange found!");
continue;
}
Exchange exchangeByService = exchangeByServices.get(0);
UUID exchangeId = exchangeByService.getId();
Exchange exchange = auditRepository.getExchange(exchangeId);
Map<String, String> headers = exchange.getHeaders();
String systemUuidStr = headers.get(HeaderKeys.SenderSystemUuid);
UUID systemUuid = UUID.fromString(systemUuidStr);
int batches = countBatches(exchangeId, serviceUuid, systemUuid);
LOG.info(" Most recent exchange had " + batches + " batches");
if (batches > 1 && batches < 2000) {
continue;
}
//go back until we find the FIRST exchange where it broke
exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 250, new Date(0), new Date());
for (int i=0; i<exchangeByServices.size(); i++) {
exchangeByService = exchangeByServices.get(i);
exchangeId = exchangeByService.getId();
batches = countBatches(exchangeId, serviceUuid, systemUuid);
exchange = auditRepository.getExchange(exchangeId);
Date timestamp = exchange.getTimestamp();
if (batches < 1 || batches > 2000) {
LOG.info(" " + timestamp + " had " + batches);
}
if (batches > 1 && batches < 2000) {
LOG.info(" " + timestamp + " had " + batches);
break;
}
}
} catch (Exception ex) {
LOG.error("", ex);
}
}
LOG.info("Finished finding deleted orgs");
}*/
private static int countBatches(UUID exchangeId, UUID serviceId, UUID systemId) throws Exception {
int batches = 0;
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId);
for (ExchangeTransformAudit audit: audits) {
if (audit.getNumberBatchesCreated() != null) {
batches += audit.getNumberBatchesCreated();
}
}
return batches;
}
/*private static void fixExchanges(UUID justThisService) {
LOG.info("Fixing exchanges");
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId : exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
boolean changed = false;
String body = exchange.getBody();
String[] files = body.split("\n");
if (files.length == 0) {
continue;
}
for (int i=0; i<files.length; i++) {
String original = files[i];
//remove /r characters
String trimmed = original.trim();
//add the new prefix
if (!trimmed.startsWith("sftpreader/EMIS001/")) {
trimmed = "sftpreader/EMIS001/" + trimmed;
}
if (!original.equals(trimmed)) {
files[i] = trimmed;
changed = true;
}
}
if (changed) {
LOG.info("Fixed exchange " + exchangeId);
LOG.info(body);
body = String.join("\n", files);
exchange.setBody(body);
AuditWriter.writeExchange(exchange);
}
}
}
LOG.info("Fixed exchanges");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void deleteDataForService(UUID serviceId) {
Service dbService = new ServiceRepository().getById(serviceId);
//the delete will take some time, so do the delete in a separate thread
LOG.info("Deleting all data for service " + dbService.getName() + " " + dbService.getId());
FhirDeletionService deletor = new FhirDeletionService(dbService);
try {
deletor.deleteData();
LOG.info("Completed deleting all data for service " + dbService.getName() + " " + dbService.getId());
} catch (Exception ex) {
LOG.error("Error deleting service " + dbService.getName() + " " + dbService.getId(), ex);
}
}*/
/*private static void testLogging() {
while (true) {
System.out.println("Checking logging at " + System.currentTimeMillis());
try {
Thread.sleep(4000);
} catch (Exception e) {
e.printStackTrace();
}
LOG.trace("trace logging");
LOG.debug("debug logging");
LOG.info("info logging");
LOG.warn("warn logging");
LOG.error("error logging");
}
}
*/
/*private static void fixExchangeProtocols() {
LOG.info("Fixing exchange protocols");
AuditRepository auditRepository = new AuditRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.Exchange LIMIT 1000;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
LOG.info("Processing exchange " + exchangeId);
Exchange exchange = auditRepository.getExchange(exchangeId);
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception ex) {
LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex);
continue;
}
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
if (Strings.isNullOrEmpty(serviceIdStr)) {
LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId());
continue;
}
UUID serviceId = UUID.fromString(serviceIdStr);
List<String> newIds = new ArrayList<>();
String protocolJson = headers.get(HeaderKeys.Protocols);
if (!headers.containsKey(HeaderKeys.Protocols)) {
try {
List<LibraryItem> libraryItemList = LibraryRepositoryHelper.getProtocolsByServiceId(serviceIdStr);
// Get protocols where service is publisher
newIds = libraryItemList.stream()
.filter(
libraryItem -> libraryItem.getProtocol().getServiceContract().stream()
.anyMatch(sc ->
sc.getType().equals(ServiceContractType.PUBLISHER)
&& sc.getService().getUuid().equals(serviceIdStr)))
.map(t -> t.getUuid().toString())
.collect(Collectors.toList());
} catch (Exception e) {
LOG.error("Failed to find protocols for exchange " + exchange.getExchangeId(), e);
continue;
}
} else {
try {
JsonNode node = ObjectMapperPool.getInstance().readTree(protocolJson);
for (int i = 0; i < node.size(); i++) {
JsonNode libraryItemNode = node.get(i);
JsonNode idNode = libraryItemNode.get("uuid");
String id = idNode.asText();
newIds.add(id);
}
} catch (Exception e) {
LOG.error("Failed to read Json from " + protocolJson + " for exchange " + exchange.getExchangeId(), e);
continue;
}
}
try {
if (newIds.isEmpty()) {
headers.remove(HeaderKeys.Protocols);
} else {
String protocolsJson = ObjectMapperPool.getInstance().writeValueAsString(newIds.toArray());
headers.put(HeaderKeys.Protocols, protocolsJson);
}
} catch (JsonProcessingException e) {
LOG.error("Unable to serialize protocols to JSON for exchange " + exchange.getExchangeId(), e);
continue;
}
try {
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(headerJson);
} catch (JsonProcessingException e) {
LOG.error("Failed to write exchange headers to Json for exchange " + exchange.getExchangeId(), e);
continue;
}
auditRepository.save(exchange);
}
LOG.info("Finished fixing exchange protocols");
}*/
/*private static void fixExchangeHeaders() {
LOG.info("Fixing exchange headers");
AuditRepository auditRepository = new AuditRepository();
ServiceRepository serviceRepository = new ServiceRepository();
OrganisationRepository organisationRepository = new OrganisationRepository();
List<Exchange> exchanges = new AuditRepository().getAllExchanges();
for (Exchange exchange: exchanges) {
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception ex) {
LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex);
continue;
}
if (headers.containsKey(HeaderKeys.SenderLocalIdentifier)
&& headers.containsKey(HeaderKeys.SenderOrganisationUuid)) {
continue;
}
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
if (Strings.isNullOrEmpty(serviceIdStr)) {
LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId());
continue;
}
UUID serviceId = UUID.fromString(serviceIdStr);
Service service = serviceRepository.getById(serviceId);
Map<UUID, String> orgMap = service.getOrganisations();
if (orgMap.size() != 1) {
LOG.error("Wrong number of orgs in service " + serviceId + " for exchange " + exchange.getExchangeId());
continue;
}
UUID orgId = orgMap
.keySet()
.stream()
.collect(StreamExtension.firstOrNullCollector());
Organisation organisation = organisationRepository.getById(orgId);
String odsCode = organisation.getNationalId();
headers.put(HeaderKeys.SenderLocalIdentifier, odsCode);
headers.put(HeaderKeys.SenderOrganisationUuid, orgId.toString());
try {
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
} catch (JsonProcessingException e) {
//not throwing this exception further up, since it should never happen
//and means we don't need to litter try/catches everywhere this is called from
LOG.error("Failed to write exchange headers to Json", e);
continue;
}
exchange.setHeaders(headerJson);
auditRepository.save(exchange);
LOG.info("Creating exchange " + exchange.getExchangeId());
}
LOG.info("Finished fixing exchange headers");
}*/
/*private static void fixExchangeHeaders() {
LOG.info("Fixing exchange headers");
AuditRepository auditRepository = new AuditRepository();
ServiceRepository serviceRepository = new ServiceRepository();
OrganisationRepository organisationRepository = new OrganisationRepository();
LibraryRepository libraryRepository = new LibraryRepository();
List<Exchange> exchanges = new AuditRepository().getAllExchanges();
for (Exchange exchange: exchanges) {
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception ex) {
LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex);
continue;
}
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
if (Strings.isNullOrEmpty(serviceIdStr)) {
LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId());
continue;
}
boolean changed = false;
UUID serviceId = UUID.fromString(serviceIdStr);
Service service = serviceRepository.getById(serviceId);
try {
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint : endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString();
ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId);
Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId());
LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent());
System system = libraryItem.getSystem();
for (TechnicalInterface technicalInterface : system.getTechnicalInterface()) {
if (endpointInterfaceId.equals(technicalInterface.getUuid())) {
if (!headers.containsKey(HeaderKeys.SourceSystem)) {
headers.put(HeaderKeys.SourceSystem, technicalInterface.getMessageFormat());
changed = true;
}
if (!headers.containsKey(HeaderKeys.SystemVersion)) {
headers.put(HeaderKeys.SystemVersion, technicalInterface.getMessageFormatVersion());
changed = true;
}
if (!headers.containsKey(HeaderKeys.SenderSystemUuid)) {
headers.put(HeaderKeys.SenderSystemUuid, endpointSystemId.toString());
changed = true;
}
}
}
}
} catch (Exception e) {
LOG.error("Failed to find endpoint details for " + exchange.getExchangeId());
continue;
}
if (changed) {
try {
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
} catch (JsonProcessingException e) {
//not throwing this exception further up, since it should never happen
//and means we don't need to litter try/catches everywhere this is called from
LOG.error("Failed to write exchange headers to Json", e);
continue;
}
exchange.setHeaders(headerJson);
auditRepository.save(exchange);
LOG.info("Fixed exchange " + exchange.getExchangeId());
}
}
LOG.info("Finished fixing exchange headers");
}*/
/*private static void testConnection(String configName) {
try {
JsonNode config = ConfigManager.getConfigurationAsJson(configName, "enterprise");
String driverClass = config.get("driverClass").asText();
String url = config.get("url").asText();
String username = config.get("username").asText();
String password = config.get("password").asText();
//force the driver to be loaded
Class.forName(driverClass);
Connection conn = DriverManager.getConnection(url, username, password);
conn.setAutoCommit(false);
LOG.info("Connection ok");
conn.close();
} catch (Exception e) {
LOG.error("", e);
}
}*/
/*private static void testConnection() {
try {
JsonNode config = ConfigManager.getConfigurationAsJson("postgres", "enterprise");
String url = config.get("url").asText();
String username = config.get("username").asText();
String password = config.get("password").asText();
//force the driver to be loaded
Class.forName("org.postgresql.Driver");
Connection conn = DriverManager.getConnection(url, username, password);
conn.setAutoCommit(false);
LOG.info("Connection ok");
conn.close();
} catch (Exception e) {
LOG.error("", e);
}
}*/
*//*if (exchangeId.equals(UUID.fromString("b9b93be0-afd8-11e6-8c16-c1d5a00342f3"))) {
}*//*
/*private static void startEnterpriseStream(UUID serviceId, String configName, UUID exchangeIdStartFrom, UUID batchIdStartFrom) throws Exception {
LOG.info("Starting Enterprise Streaming for " + serviceId + " using " + configName + " starting from exchange " + exchangeIdStartFrom + " and batch " + batchIdStartFrom);
LOG.info("Testing database connection");
testConnection(configName);
Service service = new ServiceRepository().getById(serviceId);
List<UUID> orgIds = new ArrayList<>(service.getOrganisations().keySet());
UUID orgId = orgIds.get(0);
List<ExchangeByService> exchangeByServiceList = new AuditRepository().getExchangesByService(serviceId, Integer.MAX_VALUE);
for (int i=exchangeByServiceList.size()-1; i>=0; i--) {
ExchangeByService exchangeByService = exchangeByServiceList.get(i);
//for (ExchangeByService exchangeByService: exchangeByServiceList) {
UUID exchangeId = exchangeByService.getExchangeId();
if (exchangeIdStartFrom != null) {
if (!exchangeIdStartFrom.equals(exchangeId)) {
continue;
} else {
//once we have a match, set to null so we don't skip any subsequent ones
exchangeIdStartFrom = null;
}
}
Exchange exchange = AuditWriter.readExchange(exchangeId);
String senderOrgUuidStr = exchange.getHeader(HeaderKeys.SenderOrganisationUuid);
UUID senderOrgUuid = UUID.fromString(senderOrgUuidStr);
//this one had 90,000 batches and doesn't need doing again
LOG.info("Skipping exchange " + exchangeId);
continue;
List<ExchangeBatch> exchangeBatches = new ExchangeBatchRepository().retrieveForExchangeId(exchangeId);
LOG.info("Processing exchange " + exchangeId + " with " + exchangeBatches.size() + " batches");
for (int j=0; j<exchangeBatches.size(); j++) {
ExchangeBatch exchangeBatch = exchangeBatches.get(j);
UUID batchId = exchangeBatch.getBatchId();
if (batchIdStartFrom != null) {
if (!batchIdStartFrom.equals(batchId)) {
continue;
} else {
batchIdStartFrom = null;
}
}
LOG.info("Processing exchange " + exchangeId + " and batch " + batchId + " " + (j+1) + "/" + exchangeBatches.size());
try {
String outbound = FhirToEnterpriseCsvTransformer.transformFromFhir(senderOrgUuid, batchId, null);
if (!Strings.isNullOrEmpty(outbound)) {
EnterpriseFiler.file(outbound, configName);
}
} catch (Exception ex) {
throw new PipelineException("Failed to process exchange " + exchangeId + " and batch " + batchId, ex);
}
}
}
}*/
/*private static void fixMissingExchanges() {
LOG.info("Fixing missing exchanges");
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id, batch_id, inserted_at FROM ehr.exchange_batch LIMIT 600000;");
stmt.setFetchSize(100);
Set<UUID> exchangeIdsDone = new HashSet<>();
AuditRepository auditRepository = new AuditRepository();
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
UUID batchId = row.get(1, UUID.class);
Date date = row.getTimestamp(2);
//LOG.info("Exchange " + exchangeId + " batch " + batchId + " date " + date);
if (exchangeIdsDone.contains(exchangeId)) {
continue;
}
if (auditRepository.getExchange(exchangeId) != null) {
continue;
}
UUID serviceId = findServiceId(batchId, session);
if (serviceId == null) {
continue;
}
Exchange exchange = new Exchange();
ExchangeByService exchangeByService = new ExchangeByService();
ExchangeEvent exchangeEvent = new ExchangeEvent();
Map<String, String> headers = new HashMap<>();
headers.put(HeaderKeys.SenderServiceUuid, serviceId.toString());
String headersJson = null;
try {
headersJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
} catch (JsonProcessingException e) {
//not throwing this exception further up, since it should never happen
//and means we don't need to litter try/catches everywhere this is called from
LOG.error("Failed to write exchange headers to Json", e);
continue;
}
exchange.setBody("Body not available, as exchange re-created");
exchange.setExchangeId(exchangeId);
exchange.setHeaders(headersJson);
exchange.setTimestamp(date);
exchangeByService.setExchangeId(exchangeId);
exchangeByService.setServiceId(serviceId);
exchangeByService.setTimestamp(date);
exchangeEvent.setEventDesc("Created_By_Conversion");
exchangeEvent.setExchangeId(exchangeId);
exchangeEvent.setTimestamp(new Date());
auditRepository.save(exchange);
auditRepository.save(exchangeEvent);
auditRepository.save(exchangeByService);
exchangeIdsDone.add(exchangeId);
LOG.info("Creating exchange " + exchangeId);
}
LOG.info("Finished exchange fix");
}
private static UUID findServiceId(UUID batchId, Session session) {
Statement stmt = new SimpleStatement("select resource_type, resource_id from ehr.resource_by_exchange_batch where batch_id = " + batchId + " LIMIT 1;");
ResultSet rs = session.execute(stmt);
if (rs.isExhausted()) {
LOG.error("Failed to find resource_by_exchange_batch for batch_id " + batchId);
return null;
}
Row row = rs.one();
String resourceType = row.getString(0);
UUID resourceId = row.get(1, UUID.class);
stmt = new SimpleStatement("select service_id from ehr.resource_history where resource_type = '" + resourceType + "' and resource_id = " + resourceId + " LIMIT 1;");
rs = session.execute(stmt);
if (rs.isExhausted()) {
LOG.error("Failed to find resource_history for resource_type " + resourceType + " and resource_id " + resourceId);
return null;
}
row = rs.one();
UUID serviceId = row.get(0, UUID.class);
return serviceId;
}*/
/*private static void fixExchangeEvents() {
List<ExchangeEvent> events = new AuditRepository().getAllExchangeEvents();
for (ExchangeEvent event: events) {
if (event.getEventDesc() != null) {
continue;
}
String eventDesc = "";
int eventType = event.getEvent().intValue();
switch (eventType) {
case 1:
eventDesc = "Receive";
break;
case 2:
eventDesc = "Validate";
break;
case 3:
eventDesc = "Transform_Start";
break;
case 4:
eventDesc = "Transform_End";
break;
case 5:
eventDesc = "Send";
break;
default:
eventDesc = "??? " + eventType;
}
event.setEventDesc(eventDesc);
new AuditRepository().save(null, event);
}
}*/
*//*String serviceId = headers.get(HeaderKeys.SenderServiceUuid);
}*//*
/*private static void fixExchanges() {
AuditRepository auditRepository = new AuditRepository();
Map<UUID, Set<UUID>> existingOnes = new HashMap();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
List<Exchange> exchanges = auditRepository.getAllExchanges();
for (Exchange exchange: exchanges) {
UUID exchangeUuid = exchange.getExchangeId();
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception e) {
LOG.error("Failed to read headers for exchange " + exchangeUuid + " and Json " + headerJson);
continue;
}
if (serviceId == null) {
LOG.warn("No service ID found for exchange " + exchange.getExchangeId());
continue;
}
UUID serviceUuid = UUID.fromString(serviceId);
Set<UUID> exchangeIdsDone = existingOnes.get(serviceUuid);
if (exchangeIdsDone == null) {
exchangeIdsDone = new HashSet<>();
List<ExchangeByService> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, Integer.MAX_VALUE);
for (ExchangeByService exchangeByService: exchangeByServices) {
exchangeIdsDone.add(exchangeByService.getExchangeId());
}
existingOnes.put(serviceUuid, exchangeIdsDone);
}
//create the exchange by service entity
if (!exchangeIdsDone.contains(exchangeUuid)) {
Date timestamp = exchange.getTimestamp();
ExchangeByService newOne = new ExchangeByService();
newOne.setExchangeId(exchangeUuid);
newOne.setServiceId(serviceUuid);
newOne.setTimestamp(timestamp);
auditRepository.save(newOne);
try {
headers.remove(HeaderKeys.BatchIdsJson);
String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(newHeaderJson);
auditRepository.save(exchange);
} catch (JsonProcessingException e) {
LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e);
}
if (!headers.containsKey(HeaderKeys.BatchIdsJson)) {
//fix the batch IDs not being in the exchange
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeUuid);
if (!batches.isEmpty()) {
List<UUID> batchUuids = batches
.stream()
.map(t -> t.getBatchId())
.collect(Collectors.toList());
try {
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchUuids.toArray());
headers.put(HeaderKeys.BatchIdsJson, batchUuidsStr);
String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(newHeaderJson);
auditRepository.save(exchange, null);
} catch (JsonProcessingException e) {
LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e);
}
}
//}
}
}*/
/*private static UUID findSystemId(Service service, String software, String messageVersion) throws PipelineException {
List<JsonServiceInterfaceEndpoint> endpoints = null;
try {
endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString();
LibraryRepository libraryRepository = new LibraryRepository();
ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId);
Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId());
LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent());
System system = libraryItem.getSystem();
for (TechnicalInterface technicalInterface: system.getTechnicalInterface()) {
if (endpointInterfaceId.equals(technicalInterface.getUuid())
&& technicalInterface.getMessageFormat().equalsIgnoreCase(software)
&& technicalInterface.getMessageFormatVersion().equalsIgnoreCase(messageVersion)) {
return endpointSystemId;
}
}
}
} catch (Exception e) {
throw new PipelineException("Failed to process endpoints from service " + service.getId());
}
return null;
}
*/
/*private static void addSystemIdToExchangeHeaders() throws Exception {
LOG.info("populateExchangeBatchPatients");
AuditRepository auditRepository = new AuditRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ResourceRepository resourceRepository = new ResourceRepository();
ServiceRepository serviceRepository = new ServiceRepository();
//OrganisationRepository organisationRepository = new OrganisationRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId);
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception e) {
LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson);
continue;
}
if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid))) {
LOG.info("Skipping exchange " + exchangeId + " as no service UUID");
continue;
}
if (!Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) {
LOG.info("Skipping exchange " + exchangeId + " as already got system UUID");
continue;
}
try {
//work out service ID
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
UUID serviceId = UUID.fromString(serviceIdStr);
String software = headers.get(HeaderKeys.SourceSystem);
String version = headers.get(HeaderKeys.SystemVersion);
Service service = serviceRepository.getById(serviceId);
UUID systemUuid = findSystemId(service, software, version);
headers.put(HeaderKeys.SenderSystemUuid, systemUuid.toString());
//work out protocol IDs
try {
String newProtocolIdsJson = DetermineRelevantProtocolIds.getProtocolIdsForPublisherService(serviceIdStr);
headers.put(HeaderKeys.ProtocolIds, newProtocolIdsJson);
} catch (Exception ex) {
LOG.error("Failed to recalculate protocols for " + exchangeId + ": " + ex.getMessage());
}
//save to DB
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(headerJson);
auditRepository.save(exchange);
} catch (Exception ex) {
LOG.error("Error with exchange " + exchangeId, ex);
}
}
LOG.info("Finished populateExchangeBatchPatients");
}*/
/*private static void populateExchangeBatchPatients() throws Exception {
LOG.info("populateExchangeBatchPatients");
AuditRepository auditRepository = new AuditRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ResourceRepository resourceRepository = new ResourceRepository();
//ServiceRepository serviceRepository = new ServiceRepository();
//OrganisationRepository organisationRepository = new OrganisationRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId);
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception e) {
LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson);
continue;
}
if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid))
|| Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) {
LOG.info("Skipping exchange " + exchangeId + " because no service or system in header");
continue;
}
try {
UUID serviceId = UUID.fromString(headers.get(HeaderKeys.SenderServiceUuid));
UUID systemId = UUID.fromString(headers.get(HeaderKeys.SenderSystemUuid));
List<ExchangeBatch> exchangeBatches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch exchangeBatch : exchangeBatches) {
if (exchangeBatch.getEdsPatientId() != null) {
continue;
}
UUID batchId = exchangeBatch.getBatchId();
List<ResourceByExchangeBatch> resourceWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Patient.toString());
if (resourceWrappers.isEmpty()) {
continue;
}
List<UUID> patientIds = new ArrayList<>();
for (ResourceByExchangeBatch resourceWrapper : resourceWrappers) {
UUID patientId = resourceWrapper.getResourceId();
if (resourceWrapper.getIsDeleted()) {
deleteEntirePatientRecord(patientId, serviceId, systemId, exchangeId, batchId);
}
if (!patientIds.contains(patientId)) {
patientIds.add(patientId);
}
}
if (patientIds.size() != 1) {
LOG.info("Skipping exchange " + exchangeId + " and batch " + batchId + " because found " + patientIds.size() + " patient IDs");
continue;
}
UUID patientId = patientIds.get(0);
exchangeBatch.setEdsPatientId(patientId);
exchangeBatchRepository.save(exchangeBatch);
}
} catch (Exception ex) {
LOG.error("Error with exchange " + exchangeId, ex);
}
}
LOG.info("Finished populateExchangeBatchPatients");
}
private static void deleteEntirePatientRecord(UUID patientId, UUID serviceId, UUID systemId, UUID exchangeId, UUID batchId) throws Exception {
FhirStorageService storageService = new FhirStorageService(serviceId, systemId);
ResourceRepository resourceRepository = new ResourceRepository();
List<ResourceByPatient> resourceWrappers = resourceRepository.getResourcesByPatient(serviceId, systemId, patientId);
for (ResourceByPatient resourceWrapper: resourceWrappers) {
String json = resourceWrapper.getResourceData();
Resource resource = new JsonParser().parse(json);
storageService.exchangeBatchDelete(exchangeId, batchId, resource);
}
}*/
/*private static void convertPatientSearch() {
LOG.info("Converting Patient Search");
ResourceRepository resourceRepository = new ResourceRepository();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
LOG.info("Doing service " + service.getName());
for (UUID systemId : findSystemIds(service)) {
List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.EpisodeOfCare.toString());
for (ResourceByService resourceWrapper: resourceWrappers) {
if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) {
continue;
}
try {
EpisodeOfCare episodeOfCare = (EpisodeOfCare) new JsonParser().parse(resourceWrapper.getResourceData());
String patientId = ReferenceHelper.getReferenceId(episodeOfCare.getPatient());
ResourceHistory patientWrapper = resourceRepository.getCurrentVersion(ResourceType.Patient.toString(), UUID.fromString(patientId));
if (Strings.isNullOrEmpty(patientWrapper.getResourceData())) {
continue;
}
Patient patient = (Patient) new JsonParser().parse(patientWrapper.getResourceData());
PatientSearchHelper.update(serviceId, systemId, patient);
PatientSearchHelper.update(serviceId, systemId, episodeOfCare);
} catch (Exception ex) {
LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex);
}
}
}
}
LOG.info("Converted Patient Search");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
private static List<UUID> findSystemIds(Service service) throws Exception {
List<UUID> ret = new ArrayList<>();
List<JsonServiceInterfaceEndpoint> endpoints = null;
try {
endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
ret.add(endpointSystemId);
}
} catch (Exception e) {
throw new Exception("Failed to process endpoints from service " + service.getId());
}
return ret;
}
/*private static void convertPatientLink() {
LOG.info("Converting Patient Link");
ResourceRepository resourceRepository = new ResourceRepository();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
LOG.info("Doing service " + service.getName());
for (UUID systemId : findSystemIds(service)) {
List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.Patient.toString());
for (ResourceByService resourceWrapper: resourceWrappers) {
if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) {
continue;
}
try {
Patient patient = (Patient)new JsonParser().parse(resourceWrapper.getResourceData());
PatientLinkHelper.updatePersonId(patient);
} catch (Exception ex) {
LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex);
}
}
}
}
LOG.info("Converted Patient Link");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void fixConfidentialPatients(String sharedStoragePath, UUID justThisService) {
LOG.info("Fixing Confidential Patients using path " + sharedStoragePath + " and service " + justThisService);
ResourceRepository resourceRepository = new ResourceRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ParserPool parserPool = new ParserPool();
MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager();
Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class);
Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class);
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
Map<String, ResourceHistory> resourcesFixed = new HashMap<>();
Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>();
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
if (systemIds.size() > 1) {
throw new Exception("Multiple system IDs for service " + serviceId);
}
UUID systemId = systemIds.get(0);
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
LOG.info("Doing Emis CSV exchange " + exchangeId);
Set<UUID> batchIdsToPutInProtocolQueue = new HashSet<>();
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f);
EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId);
ResourceFiler filer = new ResourceFiler(exchangeId, serviceId, systemId, null, null, 1);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers);
ProblemPreTransformer.transform(version, parsers, filer, helper);
ObservationPreTransformer.transform(version, parsers, filer, helper);
DrugRecordPreTransformer.transform(version, parsers, filer, helper);
IssueRecordPreTransformer.transform(version, parsers, filer, helper);
DiaryPreTransformer.transform(version, parsers, filer, helper);
org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient)parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class);
while (patientParser.nextRecord()) {
if (patientParser.getIsConfidential()
&& !patientParser.getDeleted()) {
PatientTransformer.createResource(patientParser, filer, helper, version);
}
}
patientParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class);
while (consultationParser.nextRecord()) {
if (consultationParser.getIsConfidential()
&& !consultationParser.getDeleted()) {
ConsultationTransformer.createResource(consultationParser, filer, helper, version);
}
}
consultationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (observationParser.nextRecord()) {
if (observationParser.getIsConfidential()
&& !observationParser.getDeleted()) {
ObservationTransformer.createResource(observationParser, filer, helper, version);
}
}
observationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class);
while (diaryParser.nextRecord()) {
if (diaryParser.getIsConfidential()
&& !diaryParser.getDeleted()) {
DiaryTransformer.createResource(diaryParser, filer, helper, version);
}
}
diaryParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class);
while (drugRecordParser.nextRecord()) {
if (drugRecordParser.getIsConfidential()
&& !drugRecordParser.getDeleted()) {
DrugRecordTransformer.createResource(drugRecordParser, filer, helper, version);
}
}
drugRecordParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class);
while (issueRecordParser.nextRecord()) {
if (issueRecordParser.getIsConfidential()
&& !issueRecordParser.getDeleted()) {
IssueRecordTransformer.createResource(issueRecordParser, filer, helper, version);
}
}
issueRecordParser.close();
filer.waitToFinish(); //just to close the thread pool, even though it's not been used
List<Resource> resources = filer.getNewResources();
for (Resource resource: resources) {
String patientId = IdHelper.getPatientId(resource);
UUID edsPatientId = UUID.fromString(patientId);
ResourceType resourceType = resource.getResourceType();
UUID resourceId = UUID.fromString(resource.getId());
boolean foundResourceInDbBatch = false;
List<UUID> batchIds = batchesPerPatient.get(edsPatientId);
if (batchIds != null) {
for (UUID batchId : batchIds) {
List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), resourceId);
if (resourceByExchangeBatches.isEmpty()) {
//if we've deleted data, this will be null
continue;
}
foundResourceInDbBatch = true;
for (ResourceByExchangeBatch resourceByExchangeBatch : resourceByExchangeBatches) {
String json = resourceByExchangeBatch.getResourceData();
if (!Strings.isNullOrEmpty(json)) {
LOG.warn("JSON already in resource " + resourceType + " " + resourceId);
} else {
json = parserPool.composeString(resource);
resourceByExchangeBatch.setResourceData(json);
resourceByExchangeBatch.setIsDeleted(false);
resourceByExchangeBatch.setSchemaVersion("0.1");
LOG.info("Saved resource by batch " + resourceType + " " + resourceId + " in batch " + batchId);
UUID versionUuid = resourceByExchangeBatch.getVersion();
ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(resourceId, resourceType.toString(), versionUuid);
if (resourceHistory == null) {
throw new Exception("Failed to find resource history for " + resourceType + " " + resourceId + " and version " + versionUuid);
}
resourceHistory.setIsDeleted(false);
resourceHistory.setResourceData(json);
resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json));
resourceHistory.setSchemaVersion("0.1");
resourceRepository.save(resourceByExchangeBatch);
resourceRepository.save(resourceHistory);
batchIdsToPutInProtocolQueue.add(batchId);
String key = resourceType.toString() + ":" + resourceId;
resourcesFixed.put(key, resourceHistory);
}
//if a patient became confidential, we will have deleted all resources for that
//patient, so we need to undo that too
//to undelete WHOLE patient record
//1. if THIS resource is a patient
//2. get all other deletes from the same exchange batch
//3. delete those from resource_by_exchange_batch (the deleted ones only)
//4. delete same ones from resource_history
//5. retrieve most recent resource_history
//6. if not deleted, add to resources fixed
if (resourceType == ResourceType.Patient) {
List<ResourceByExchangeBatch> resourcesInSameBatch = resourceRepository.getResourcesForBatch(batchId);
LOG.info("Undeleting " + resourcesInSameBatch.size() + " resources for batch " + batchId);
for (ResourceByExchangeBatch resourceInSameBatch: resourcesInSameBatch) {
if (!resourceInSameBatch.getIsDeleted()) {
continue;
}
//patient and episode resources will be restored by the above stuff, so don't try
//to do it again
if (resourceInSameBatch.getResourceType().equals(ResourceType.Patient.toString())
|| resourceInSameBatch.getResourceType().equals(ResourceType.EpisodeOfCare.toString())) {
continue;
}
ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(resourceInSameBatch.getResourceId(), resourceInSameBatch.getResourceType(), resourceInSameBatch.getVersion());
mapperResourceByExchangeBatch.delete(resourceInSameBatch);
mapperResourceHistory.delete(deletedResourceHistory);
batchIdsToPutInProtocolQueue.add(batchId);
//check the most recent version of our resource, and if it's not deleted, add to the list to update the resource_by_service table
ResourceHistory mostRecentDeletedResourceHistory = resourceRepository.getCurrentVersion(resourceInSameBatch.getResourceType(), resourceInSameBatch.getResourceId());
if (mostRecentDeletedResourceHistory != null
&& !mostRecentDeletedResourceHistory.getIsDeleted()) {
String key2 = mostRecentDeletedResourceHistory.getResourceType().toString() + ":" + mostRecentDeletedResourceHistory.getResourceId();
resourcesFixed.put(key2, mostRecentDeletedResourceHistory);
}
}
}
}
}
}
//if we didn't find records in the DB to update, then
if (!foundResourceInDbBatch) {
//we can't generate a back-dated time UUID, but we need one so the resource_history
//table is in order. To get a suitable time UUID, we just pull out the first exchange batch for our exchange,
//and the batch ID is actually a time UUID that was allocated around the right time
ExchangeBatch firstBatch = exchangeBatchRepository.retrieveFirstForExchangeId(exchangeId);
//if there was no batch for the exchange, then the exchange wasn't processed at all. So skip this exchange
//and we'll pick up the same patient data in a following exchange
if (firstBatch == null) {
continue;
}
UUID versionUuid = firstBatch.getBatchId();
//find suitable batch ID
UUID batchId = null;
if (batchIds != null
&& batchIds.size() > 0) {
batchId = batchIds.get(batchIds.size()-1);
} else {
//create new batch ID if not found
ExchangeBatch exchangeBatch = new ExchangeBatch();
exchangeBatch.setBatchId(UUIDs.timeBased());
exchangeBatch.setExchangeId(exchangeId);
exchangeBatch.setInsertedAt(new Date());
exchangeBatch.setEdsPatientId(edsPatientId);
exchangeBatchRepository.save(exchangeBatch);
batchId = exchangeBatch.getBatchId();
//add to map for next resource
if (batchIds == null) {
batchIds = new ArrayList<>();
}
batchIds.add(batchId);
batchesPerPatient.put(edsPatientId, batchIds);
}
String json = parserPool.composeString(resource);
ResourceHistory resourceHistory = new ResourceHistory();
resourceHistory.setResourceId(resourceId);
resourceHistory.setResourceType(resourceType.toString());
resourceHistory.setVersion(versionUuid);
resourceHistory.setCreatedAt(new Date());
resourceHistory.setServiceId(serviceId);
resourceHistory.setSystemId(systemId);
resourceHistory.setIsDeleted(false);
resourceHistory.setSchemaVersion("0.1");
resourceHistory.setResourceData(json);
resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json));
ResourceByExchangeBatch resourceByExchangeBatch = new ResourceByExchangeBatch();
resourceByExchangeBatch.setBatchId(batchId);
resourceByExchangeBatch.setExchangeId(exchangeId);
resourceByExchangeBatch.setResourceType(resourceType.toString());
resourceByExchangeBatch.setResourceId(resourceId);
resourceByExchangeBatch.setVersion(versionUuid);
resourceByExchangeBatch.setIsDeleted(false);
resourceByExchangeBatch.setSchemaVersion("0.1");
resourceByExchangeBatch.setResourceData(json);
resourceRepository.save(resourceHistory);
resourceRepository.save(resourceByExchangeBatch);
batchIdsToPutInProtocolQueue.add(batchId);
}
}
if (!batchIdsToPutInProtocolQueue.isEmpty()) {
exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchIdsToPutInProtocolQueue);
}
}
//update the resource_by_service table (and the resource_by_patient view)
for (ResourceHistory resourceHistory: resourcesFixed.values()) {
UUID latestVersionUpdatedUuid = resourceHistory.getVersion();
ResourceHistory latestVersion = resourceRepository.getCurrentVersion(resourceHistory.getResourceType(), resourceHistory.getResourceId());
UUID latestVersionUuid = latestVersion.getVersion();
//if there have been subsequent updates to the resource, then skip it
if (!latestVersionUuid.equals(latestVersionUpdatedUuid)) {
continue;
}
Resource resource = parserPool.parse(resourceHistory.getResourceData());
ResourceMetadata metadata = MetadataFactory.createMetadata(resource);
UUID patientId = ((PatientCompartment)metadata).getPatientId();
ResourceByService resourceByService = new ResourceByService();
resourceByService.setServiceId(resourceHistory.getServiceId());
resourceByService.setSystemId(resourceHistory.getSystemId());
resourceByService.setResourceType(resourceHistory.getResourceType());
resourceByService.setResourceId(resourceHistory.getResourceId());
resourceByService.setCurrentVersion(resourceHistory.getVersion());
resourceByService.setUpdatedAt(resourceHistory.getCreatedAt());
resourceByService.setPatientId(patientId);
resourceByService.setSchemaVersion(resourceHistory.getSchemaVersion());
resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata));
resourceByService.setResourceData(resourceHistory.getResourceData());
resourceRepository.save(resourceByService);
//call out to our patient search and person matching services
if (resource instanceof Patient) {
PatientLinkHelper.updatePersonId((Patient)resource);
PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (Patient)resource);
} else if (resource instanceof EpisodeOfCare) {
PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (EpisodeOfCare)resource);
}
}
if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) {
//find the config for our protocol queue
String configXml = ConfigManager.getConfiguration("inbound", "queuereader");
//the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
Pipeline pipeline = configuration.getPipeline();
PostMessageToExchangeConfig config = pipeline
.getPipelineComponents()
.stream()
.filter(t -> t instanceof PostMessageToExchangeConfig)
.map(t -> (PostMessageToExchangeConfig) t)
.filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol"))
.collect(StreamExtension.singleOrNullCollector());
//post to the protocol exchange
for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) {
Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId);
org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId);
String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds);
exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString);
PostMessageToExchange component = new PostMessageToExchange(config);
component.process(exchange);
}
}
}
LOG.info("Finished Fixing Confidential Patients");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void fixDeletedAppointments(String sharedStoragePath, boolean saveChanges, UUID justThisService) {
LOG.info("Fixing Deleted Appointments using path " + sharedStoragePath + " and service " + justThisService);
ResourceRepository resourceRepository = new ResourceRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ParserPool parserPool = new ParserPool();
MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager();
Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class);
Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class);
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
if (systemIds.size() > 1) {
throw new Exception("Multiple system IDs for service " + serviceId);
}
UUID systemId = systemIds.get(0);
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
LOG.info("Doing Emis CSV exchange " + exchangeId);
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch batch : batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class, dir, version, true, parsers);
//find any deleted patients
List<UUID> deletedPatientUuids = new ArrayList<>();
org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class);
while (patientParser.nextRecord()) {
if (patientParser.getDeleted()) {
//find the EDS patient ID for this local guid
String patientGuid = patientParser.getPatientGuid();
UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid);
if (edsPatientId == null) {
throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid);
}
deletedPatientUuids.add(edsPatientId);
}
}
patientParser.close();
//go through the appts file to find properly deleted appt GUIDS
List<UUID> deletedApptUuids = new ArrayList<>();
org.endeavourhealth.transform.emis.csv.schema.appointment.Slot apptParser = (org.endeavourhealth.transform.emis.csv.schema.appointment.Slot) parsers.get(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class);
while (apptParser.nextRecord()) {
if (apptParser.getDeleted()) {
String patientGuid = apptParser.getPatientGuid();
String slotGuid = apptParser.getSlotGuid();
if (!Strings.isNullOrEmpty(patientGuid)) {
String uniqueLocalId = EmisCsvHelper.createUniqueId(patientGuid, slotGuid);
UUID edsApptId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Appointment, uniqueLocalId);
deletedApptUuids.add(edsApptId);
}
}
}
apptParser.close();
for (UUID edsPatientId : deletedPatientUuids) {
List<UUID> batchIds = batchesPerPatient.get(edsPatientId);
if (batchIds == null) {
//if there are no batches for this patient, we'll be handling this data in another exchange
continue;
}
for (UUID batchId : batchIds) {
List<ResourceByExchangeBatch> apptWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Appointment.toString());
for (ResourceByExchangeBatch apptWrapper : apptWrappers) {
//ignore non-deleted appts
if (!apptWrapper.getIsDeleted()) {
continue;
}
//if the appt was deleted legitamately, then skip it
UUID apptId = apptWrapper.getResourceId();
if (deletedApptUuids.contains(apptId)) {
continue;
}
ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(apptWrapper.getResourceId(), apptWrapper.getResourceType(), apptWrapper.getVersion());
if (saveChanges) {
mapperResourceByExchangeBatch.delete(apptWrapper);
mapperResourceHistory.delete(deletedResourceHistory);
}
LOG.info("Un-deleted " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " in batch " + batchId + " patient " + edsPatientId);
//now get the most recent instance of the appointment, and if it's NOT deleted, insert into the resource_by_service table
ResourceHistory mostRecentResourceHistory = resourceRepository.getCurrentVersion(apptWrapper.getResourceType(), apptWrapper.getResourceId());
if (mostRecentResourceHistory != null
&& !mostRecentResourceHistory.getIsDeleted()) {
Resource resource = parserPool.parse(mostRecentResourceHistory.getResourceData());
ResourceMetadata metadata = MetadataFactory.createMetadata(resource);
UUID patientId = ((PatientCompartment) metadata).getPatientId();
ResourceByService resourceByService = new ResourceByService();
resourceByService.setServiceId(mostRecentResourceHistory.getServiceId());
resourceByService.setSystemId(mostRecentResourceHistory.getSystemId());
resourceByService.setResourceType(mostRecentResourceHistory.getResourceType());
resourceByService.setResourceId(mostRecentResourceHistory.getResourceId());
resourceByService.setCurrentVersion(mostRecentResourceHistory.getVersion());
resourceByService.setUpdatedAt(mostRecentResourceHistory.getCreatedAt());
resourceByService.setPatientId(patientId);
resourceByService.setSchemaVersion(mostRecentResourceHistory.getSchemaVersion());
resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata));
resourceByService.setResourceData(mostRecentResourceHistory.getResourceData());
if (saveChanges) {
resourceRepository.save(resourceByService);
}
LOG.info("Restored " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " to resource_by_service table");
}
}
}
}
}
}
LOG.info("Finished Deleted Appointments Patients");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void fixReviews(String sharedStoragePath, UUID justThisService) {
LOG.info("Fixing Reviews using path " + sharedStoragePath + " and service " + justThisService);
ResourceRepository resourceRepository = new ResourceRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ParserPool parserPool = new ParserPool();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
Map<String, Long> problemCodes = new HashMap<>();
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
LOG.info("Doing Emis CSV exchange " + exchangeId + " with " + batches.size() + " batches");
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers);
org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem problemParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class);
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (problemParser.nextRecord()) {
String patientGuid = problemParser.getPatientGuid();
String observationGuid = problemParser.getObservationGuid();
String key = patientGuid + ":" + observationGuid;
if (!problemCodes.containsKey(key)) {
problemCodes.put(key, null);
}
}
problemParser.close();
while (observationParser.nextRecord()) {
String patientGuid = observationParser.getPatientGuid();
String observationGuid = observationParser.getObservationGuid();
String key = patientGuid + ":" + observationGuid;
if (problemCodes.containsKey(key)) {
Long codeId = observationParser.getCodeId();
if (codeId == null) {
continue;
}
problemCodes.put(key, codeId);
}
}
observationParser.close();
LOG.info("Found " + problemCodes.size() + " problem codes so far");
String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f);
EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId);
while (observationParser.nextRecord()) {
String problemGuid = observationParser.getProblemGuid();
if (!Strings.isNullOrEmpty(problemGuid)) {
String patientGuid = observationParser.getPatientGuid();
Long codeId = observationParser.getCodeId();
if (codeId == null) {
continue;
}
String key = patientGuid + ":" + problemGuid;
Long problemCodeId = problemCodes.get(key);
if (problemCodeId == null
|| problemCodeId.longValue() != codeId.longValue()) {
continue;
}
//if here, our code is the same as the problem, so it's a review
String locallyUniqueId = patientGuid + ":" + observationParser.getObservationGuid();
ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, helper);
for (UUID systemId: systemIds) {
UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid);
if (edsPatientId == null) {
throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid);
}
UUID edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId);
if (edsObservationId == null) {
//try observations as diagnostic reports, because it could be one of those instead
if (resourceType == ResourceType.Observation) {
resourceType = ResourceType.DiagnosticReport;
edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId);
}
if (edsObservationId == null) {
throw new Exception("Failed to find observation ID for service " + serviceId + " system " + systemId + " resourceType " + resourceType + " local ID " + locallyUniqueId);
}
}
List<UUID> batchIds = batchesPerPatient.get(edsPatientId);
if (batchIds == null) {
//if there are no batches for this patient, we'll be handling this data in another exchange
continue;
//throw new Exception("Failed to find batch ID for patient " + edsPatientId + " in exchange " + exchangeId + " for resource " + resourceType + " " + edsObservationId);
}
for (UUID batchId: batchIds) {
List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), edsObservationId);
if (resourceByExchangeBatches.isEmpty()) {
//if we've deleted data, this will be null
continue;
//throw new Exception("No resources found for batch " + batchId + " resource type " + resourceType + " and resource id " + edsObservationId);
}
for (ResourceByExchangeBatch resourceByExchangeBatch: resourceByExchangeBatches) {
String json = resourceByExchangeBatch.getResourceData();
if (Strings.isNullOrEmpty(json)) {
throw new Exception("No JSON in resource " + resourceType + " " + edsObservationId + " in batch " + batchId);
}
Resource resource = parserPool.parse(json);
if (addReviewExtension((DomainResource)resource)) {
json = parserPool.composeString(resource);
resourceByExchangeBatch.setResourceData(json);
LOG.info("Changed " + resourceType + " " + edsObservationId + " to have extension in batch " + batchId);
resourceRepository.save(resourceByExchangeBatch);
UUID versionUuid = resourceByExchangeBatch.getVersion();
ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(edsObservationId, resourceType.toString(), versionUuid);
if (resourceHistory == null) {
throw new Exception("Failed to find resource history for " + resourceType + " " + edsObservationId + " and version " + versionUuid);
}
resourceHistory.setResourceData(json);
resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json));
resourceRepository.save(resourceHistory);
ResourceByService resourceByService = resourceRepository.getResourceByServiceByKey(serviceId, systemId, resourceType.toString(), edsObservationId);
if (resourceByService != null) {
UUID serviceVersionUuid = resourceByService.getCurrentVersion();
if (serviceVersionUuid.equals(versionUuid)) {
resourceByService.setResourceData(json);
resourceRepository.save(resourceByService);
}
}
} else {
LOG.info("" + resourceType + " " + edsObservationId + " already has extension");
}
}
}
}
//1. find out resource type originall saved from
//2. retrieve from resource_by_exchange_batch
//3. update resource in resource_by_exchange_batch
//4. retrieve from resource_history
//5. update resource_history
//6. retrieve record from resource_by_service
//7. if resource_by_service version UUID matches the resource_history updated, then update that too
}
}
observationParser.close();
}
}
LOG.info("Finished Fixing Reviews");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static boolean addReviewExtension(DomainResource resource) {
if (ExtensionConverter.hasExtension(resource, FhirExtensionUri.IS_REVIEW)) {
return false;
}
Extension extension = ExtensionConverter.createExtension(FhirExtensionUri.IS_REVIEW, new BooleanType(true));
resource.addExtension(extension);
return true;
}*/
/*private static void runProtocolsForConfidentialPatients(String sharedStoragePath, UUID justThisService) {
LOG.info("Running Protocols for Confidential Patients using path " + sharedStoragePath + " and service " + justThisService);
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
//once we match the servce, set this to null to do all other services
justThisService = null;
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
List<String> interestingPatientGuids = new ArrayList<>();
Map<UUID, Map<UUID, List<UUID>>> batchesPerPatientPerExchange = new HashMap<>();
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
LOG.info("Doing Emis CSV exchange " + exchangeId);
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch batch : batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
batchesPerPatientPerExchange.put(exchangeId, batchesPerPatient);
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers);
org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class);
while (patientParser.nextRecord()) {
if (patientParser.getIsConfidential() || patientParser.getDeleted()) {
interestingPatientGuids.add(patientParser.getPatientGuid());
}
}
patientParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class);
while (consultationParser.nextRecord()) {
if (consultationParser.getIsConfidential()
&& !consultationParser.getDeleted()) {
interestingPatientGuids.add(consultationParser.getPatientGuid());
}
}
consultationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (observationParser.nextRecord()) {
if (observationParser.getIsConfidential()
&& !observationParser.getDeleted()) {
interestingPatientGuids.add(observationParser.getPatientGuid());
}
}
observationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class);
while (diaryParser.nextRecord()) {
if (diaryParser.getIsConfidential()
&& !diaryParser.getDeleted()) {
interestingPatientGuids.add(diaryParser.getPatientGuid());
}
}
diaryParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class);
while (drugRecordParser.nextRecord()) {
if (drugRecordParser.getIsConfidential()
&& !drugRecordParser.getDeleted()) {
interestingPatientGuids.add(drugRecordParser.getPatientGuid());
}
}
drugRecordParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class);
while (issueRecordParser.nextRecord()) {
if (issueRecordParser.getIsConfidential()
&& !issueRecordParser.getDeleted()) {
interestingPatientGuids.add(issueRecordParser.getPatientGuid());
}
}
issueRecordParser.close();
}
Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>();
for (String interestingPatientGuid: interestingPatientGuids) {
if (systemIds.size() > 1) {
throw new Exception("Multiple system IDs for service " + serviceId);
}
UUID systemId = systemIds.get(0);
UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, interestingPatientGuid);
if (edsPatientId == null) {
throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + interestingPatientGuid);
}
for (UUID exchangeId: batchesPerPatientPerExchange.keySet()) {
Map<UUID, List<UUID>> batchesPerPatient = batchesPerPatientPerExchange.get(exchangeId);
List<UUID> batches = batchesPerPatient.get(edsPatientId);
if (batches != null) {
Set<UUID> batchesForExchange = exchangeBatchesToPutInProtocolQueue.get(exchangeId);
if (batchesForExchange == null) {
batchesForExchange = new HashSet<>();
exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchesForExchange);
}
batchesForExchange.addAll(batches);
}
}
}
if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) {
//find the config for our protocol queue
String configXml = ConfigManager.getConfiguration("inbound", "queuereader");
//the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
Pipeline pipeline = configuration.getPipeline();
PostMessageToExchangeConfig config = pipeline
.getPipelineComponents()
.stream()
.filter(t -> t instanceof PostMessageToExchangeConfig)
.map(t -> (PostMessageToExchangeConfig) t)
.filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol"))
.collect(StreamExtension.singleOrNullCollector());
//post to the protocol exchange
for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) {
Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId);
org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId);
String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds);
exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString);
LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString);
PostMessageToExchange component = new PostMessageToExchange(config);
component.process(exchange);
}
}
}
LOG.info("Finished Running Protocols for Confidential Patients");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void fixOrgs() {
LOG.info("Posting orgs to protocol queue");
String[] orgIds = new String[]{
"332f31a2-7b28-47cb-af6f-18f65440d43d",
"c893d66b-eb89-4657-9f53-94c5867e7ed9"};
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ResourceRepository resourceRepository = new ResourceRepository();
Map<UUID, Set<UUID>> exchangeBatches = new HashMap<>();
for (String orgId: orgIds) {
LOG.info("Doing org ID " + orgId);
UUID orgUuid = UUID.fromString(orgId);
try {
//select batch_id from ehr.resource_by_exchange_batch where resource_type = 'Organization' and resource_id = 8f465517-729b-4ad9-b405-92b487047f19 LIMIT 1 ALLOW FILTERING;
ResourceByExchangeBatch resourceByExchangeBatch = resourceRepository.getFirstResourceByExchangeBatch(ResourceType.Organization.toString(), orgUuid);
UUID batchId = resourceByExchangeBatch.getBatchId();
//select exchange_id from ehr.exchange_batch where batch_id = 1a940e10-1535-11e7-a29d-a90b99186399 LIMIT 1 ALLOW FILTERING;
ExchangeBatch exchangeBatch = exchangeBatchRepository.retrieveFirstForBatchId(batchId);
UUID exchangeId = exchangeBatch.getExchangeId();
Set<UUID> list = exchangeBatches.get(exchangeId);
if (list == null) {
list = new HashSet<>();
exchangeBatches.put(exchangeId, list);
}
list.add(batchId);
} catch (Exception ex) {
LOG.error("", ex);
break;
}
}
try {
//find the config for our protocol queue (which is in the inbound config)
String configXml = ConfigManager.getConfiguration("inbound", "queuereader");
//the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
Pipeline pipeline = configuration.getPipeline();
PostMessageToExchangeConfig config = pipeline
.getPipelineComponents()
.stream()
.filter(t -> t instanceof PostMessageToExchangeConfig)
.map(t -> (PostMessageToExchangeConfig) t)
.filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol"))
.collect(StreamExtension.singleOrNullCollector());
//post to the protocol exchange
for (UUID exchangeId : exchangeBatches.keySet()) {
Set<UUID> batchIds = exchangeBatches.get(exchangeId);
org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId);
String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds);
exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString);
LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString);
PostMessageToExchange component = new PostMessageToExchange(config);
component.process(exchange);
}
} catch (Exception ex) {
LOG.error("", ex);
return;
}
LOG.info("Finished posting orgs to protocol queue");
}*/
/*private static void findCodes() {
LOG.info("Finding missing codes");
AuditRepository auditRepository = new AuditRepository();
ServiceRepository serviceRepository = new ServiceRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT service_id, system_id, exchange_id, version FROM audit.exchange_transform_audit ALLOW FILTERING;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID serviceId = row.get(0, UUID.class);
UUID systemId = row.get(1, UUID.class);
UUID exchangeId = row.get(2, UUID.class);
UUID version = row.get(3, UUID.class);
ExchangeTransformAudit audit = auditRepository.getExchangeTransformAudit(serviceId, systemId, exchangeId, version);
String xml = audit.getErrorXml();
if (xml == null) {
continue;
}
String codePrefix = "Failed to find clinical code CodeableConcept for codeId ";
int codeIndex = xml.indexOf(codePrefix);
if (codeIndex > -1) {
int startIndex = codeIndex + codePrefix.length();
int tagEndIndex = xml.indexOf("<", startIndex);
String code = xml.substring(startIndex, tagEndIndex);
Service service = serviceRepository.getById(serviceId);
String name = service.getName();
LOG.info(name + " clinical code " + code + " from " + audit.getStarted());
continue;
}
codePrefix = "Failed to find medication CodeableConcept for codeId ";
codeIndex = xml.indexOf(codePrefix);
if (codeIndex > -1) {
int startIndex = codeIndex + codePrefix.length();
int tagEndIndex = xml.indexOf("<", startIndex);
String code = xml.substring(startIndex, tagEndIndex);
Service service = serviceRepository.getById(serviceId);
String name = service.getName();
LOG.info(name + " drug code " + code + " from " + audit.getStarted());
continue;
}
}
LOG.info("Finished finding missing codes");
}*/
private static void createTppSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating TPP Subset");
try {
Set<String> personIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line: lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
personIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createTppSubsetForFile(sourceDir, destDir, personIds);
LOG.info("Finished Creating TPP Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createTppSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile: files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
//LOG.info("Doing dir " + sourceFile);
createTppSubsetForFile(sourceFile, destFile, personIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL).withHeader();
CSVParser parser = new CSVParser(br, format);
String filterColumn = null;
Map<String, Integer> headerMap = parser.getHeaderMap();
if (headerMap.containsKey("IDPatient")) {
filterColumn = "IDPatient";
} else if (name.equalsIgnoreCase("SRPatient.csv")) {
filterColumn = "RowIdentifier";
} else {
//if no patient column, just copy the file
parser.close();
LOG.info("Copying non-patient file " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
String[] columnHeaders = new String[headerMap.size()];
Iterator<String> headerIterator = headerMap.keySet().iterator();
while (headerIterator.hasNext()) {
String headerName = headerIterator.next();
int headerIndex = headerMap.get(headerName);
columnHeaders[headerIndex] = headerName;
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders));
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String patientId = csvRecord.get(filterColumn);
if (personIds.contains(patientId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
/*} else {
//the 2.1 files are going to be a pain to split by patient, so just copy them over
LOG.info("Copying 2.1 file " + sourceFile);
copyFile(sourceFile, destFile);
}*/
}
}
}
private static void createVisionSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating Vision Subset");
try {
Set<String> personIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line: lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
personIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createVisionSubsetForFile(sourceDir, destDir, personIds);
LOG.info("Finished Creating Vision Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createVisionSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile: files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
createVisionSubsetForFile(sourceFile, destFile, personIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL);
CSVParser parser = new CSVParser(br, format);
int filterColumn = -1;
if (name.contains("encounter_data") || name.contains("journal_data") ||
name.contains("patient_data") || name.contains("referral_data")) {
filterColumn = 0;
} else {
//if no patient column, just copy the file
parser.close();
LOG.info("Copying non-patient file " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
CSVPrinter printer = new CSVPrinter(bw, format);
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String patientId = csvRecord.get(filterColumn);
if (personIds.contains(patientId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
}
}
}
private static void createHomertonSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating Homerton Subset");
try {
Set<String> PersonIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line: lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
PersonIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createHomertonSubsetForFile(sourceDir, destDir, PersonIds);
LOG.info("Finished Creating Homerton Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createHomertonSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile: files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
createHomertonSubsetForFile(sourceFile, destFile, personIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
//fully quote destination file to fix CRLF in columns
CSVFormat format = CSVFormat.DEFAULT.withHeader();
CSVParser parser = new CSVParser(br, format);
int filterColumn = -1;
//PersonId column at 1
if (name.contains("ENCOUNTER") || name.contains("PATIENT")) {
filterColumn = 1;
} else if (name.contains("DIAGNOSIS")) {
//PersonId column at 13
filterColumn = 13;
} else if (name.contains("ALLERGY")) {
//PersonId column at 2
filterColumn = 2;
} else if (name.contains("PROBLEM")) {
//PersonId column at 4
filterColumn = 4;
} else {
//if no patient column, just copy the file (i.e. PROCEDURE)
parser.close();
LOG.info("Copying file without PatientId " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
Map<String, Integer> headerMap = parser.getHeaderMap();
String[] columnHeaders = new String[headerMap.size()];
Iterator<String> headerIterator = headerMap.keySet().iterator();
while (headerIterator.hasNext()) {
String headerName = headerIterator.next();
int headerIndex = headerMap.get(headerName);
columnHeaders[headerIndex] = headerName;
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders));
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String patientId = csvRecord.get(filterColumn);
if (personIds.contains(patientId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
}
}
}
private static void createAdastraSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating Adastra Subset");
try {
Set<String> caseIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line: lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
//adastra extract files are all keyed on caseId
caseIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createAdastraSubsetForFile(sourceDir, destDir, caseIds);
LOG.info("Finished Creating Adastra Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createAdastraSubsetForFile(File sourceDir, File destDir, Set<String> caseIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile: files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
createAdastraSubsetForFile(sourceFile, destFile, caseIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
//fully quote destination file to fix CRLF in columns
CSVFormat format = CSVFormat.DEFAULT.withDelimiter('|');
CSVParser parser = new CSVParser(br, format);
int filterColumn = -1;
//CaseRef column at 0
if (name.contains("NOTES") || name.contains("CASEQUESTIONS") ||
name.contains("OUTCOMES") || name.contains("CONSULTATION") ||
name.contains("CLINICALCODES") || name.contains("PRESCRIPTIONS") ||
name.contains("PATIENT")) {
filterColumn = 0;
} else if (name.contains("CASE")) {
//CaseRef column at 2
filterColumn = 2;
} else if (name.contains("PROVIDER")) {
//CaseRef column at 7
filterColumn = 7;
} else {
//if no patient column, just copy the file
parser.close();
LOG.info("Copying non-patient file " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
CSVPrinter printer = new CSVPrinter(bw, format);
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String caseId = csvRecord.get(filterColumn);
if (caseIds.contains(caseId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
}
}
}
}
/*class ResourceFiler extends FhirResourceFiler {
public ResourceFiler(UUID exchangeId, UUID serviceId, UUID systemId, TransformError transformError,
List<UUID> batchIdsCreated, int maxFilingThreads) {
super(exchangeId, serviceId, systemId, transformError, batchIdsCreated, maxFilingThreads);
}
private List<Resource> newResources = new ArrayList<>();
public List<Resource> getNewResources() {
return newResources;
}
@Override
public void saveAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception {
throw new Exception("shouldn't be calling saveAdminResource");
}
@Override
public void deleteAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception {
throw new Exception("shouldn't be calling deleteAdminResource");
}
@Override
public void savePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception {
for (Resource resource: resources) {
if (mapIds) {
IdHelper.mapIds(getServiceId(), getSystemId(), resource);
}
newResources.add(resource);
}
}
@Override
public void deletePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception {
throw new Exception("shouldn't be calling deletePatientResource");
}
}*/
|
package org.endeavourhealth.queuereader;
import OpenPseudonymiser.Crypto;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.rabbitmq.client.*;
import org.apache.commons.csv.*;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.endeavourhealth.common.cache.ObjectMapperPool;
import org.endeavourhealth.common.config.ConfigManager;
import org.endeavourhealth.common.fhir.PeriodHelper;
import org.endeavourhealth.common.fhir.ReferenceHelper;
import org.endeavourhealth.common.utility.*;
import org.endeavourhealth.core.configuration.*;
import org.endeavourhealth.core.csv.CsvHelper;
import org.endeavourhealth.core.database.dal.DalProvider;
import org.endeavourhealth.core.database.dal.admin.LibraryRepositoryHelper;
import org.endeavourhealth.core.database.dal.admin.ServiceDalI;
import org.endeavourhealth.core.database.dal.admin.models.Service;
import org.endeavourhealth.core.database.dal.audit.ExchangeBatchDalI;
import org.endeavourhealth.core.database.dal.audit.ExchangeDalI;
import org.endeavourhealth.core.database.dal.audit.models.Exchange;
import org.endeavourhealth.core.database.dal.audit.models.ExchangeBatch;
import org.endeavourhealth.core.database.dal.audit.models.ExchangeTransformAudit;
import org.endeavourhealth.core.database.dal.audit.models.HeaderKeys;
import org.endeavourhealth.core.database.dal.eds.PatientLinkDalI;
import org.endeavourhealth.core.database.dal.eds.PatientSearchDalI;
import org.endeavourhealth.core.database.dal.ehr.ResourceDalI;
import org.endeavourhealth.core.database.dal.ehr.models.ResourceWrapper;
import org.endeavourhealth.core.database.dal.publisherTransform.InternalIdDalI;
import org.endeavourhealth.core.database.dal.reference.PostcodeDalI;
import org.endeavourhealth.core.database.dal.reference.models.PostcodeLookup;
import org.endeavourhealth.core.database.dal.subscriberTransform.SubscriberOrgMappingDalI;
import org.endeavourhealth.core.database.dal.subscriberTransform.SubscriberPersonMappingDalI;
import org.endeavourhealth.core.database.dal.subscriberTransform.SubscriberResourceMappingDalI;
import org.endeavourhealth.core.database.dal.subscriberTransform.models.EnterpriseAge;
import org.endeavourhealth.core.database.rdbms.ConnectionManager;
import org.endeavourhealth.core.database.rdbms.enterprise.EnterpriseConnector;
import org.endeavourhealth.core.exceptions.TransformException;
import org.endeavourhealth.core.fhirStorage.FhirSerializationHelper;
import org.endeavourhealth.core.fhirStorage.FhirStorageService;
import org.endeavourhealth.core.fhirStorage.JsonServiceInterfaceEndpoint;
import org.endeavourhealth.core.messaging.pipeline.components.OpenEnvelope;
import org.endeavourhealth.core.messaging.pipeline.components.PostMessageToExchange;
import org.endeavourhealth.core.queueing.QueueHelper;
import org.endeavourhealth.core.xml.QueryDocument.*;
import org.endeavourhealth.core.xml.TransformErrorSerializer;
import org.endeavourhealth.core.xml.TransformErrorUtility;
import org.endeavourhealth.core.xml.transformError.TransformError;
import org.endeavourhealth.transform.barts.schema.PPALI;
import org.endeavourhealth.transform.barts.schema.PPATI;
import org.endeavourhealth.transform.common.*;
import org.endeavourhealth.transform.common.resourceBuilders.GenericBuilder;
import org.endeavourhealth.transform.emis.EmisCsvToFhirTransformer;
import org.endeavourhealth.transform.emis.csv.helpers.EmisCsvHelper;
import org.endeavourhealth.transform.emis.csv.schema.appointment.Slot;
import org.endeavourhealth.transform.emis.csv.transforms.appointment.SessionUserTransformer;
import org.endeavourhealth.transform.emis.csv.transforms.appointment.SlotTransformer;
import org.hibernate.internal.SessionImpl;
import org.hl7.fhir.instance.model.*;
import org.hl7.fhir.instance.model.Resource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.EntityManager;
import java.io.*;
import java.lang.System;
import java.lang.reflect.Constructor;
import java.net.InetAddress;
import java.net.Socket;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.StandardOpenOption;
import java.sql.Connection;
import java.sql.*;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.Date;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicInteger;
public class Main {
private static final Logger LOG = LoggerFactory.getLogger(Main.class);
public static void main(String[] args) throws Exception {
String configId = args[0];
LOG.info("Initialising config manager");
ConfigManager.initialize("queuereader", configId);
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEncounters")) {
String table = args[1];
fixEncounters(table);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("TestRabbit")) {
String nodes = args[1];
String username = args[2];
String password = args[3];
String exchangeName = args[4];
String queueName = args[5];
String sslProtocol = null;
if (args.length > 6) {
sslProtocol = args[6];
}
testRabbit(nodes, username, password, sslProtocol, exchangeName, queueName);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEmisEpisodes1")) {
String odsCode = args[1];
//fixEmisEpisodes1(odsCode);
fixEmisEpisodes2(odsCode);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("TestS3Listing")) {
String path = args[1];
testS3Listing(path);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateHomertonSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createHomertonSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateAdastraSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createAdastraSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateVisionSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createVisionSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateTppSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createTppSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateBartsSubset")) {
String sourceDirPath = args[1];
UUID serviceUuid = UUID.fromString(args[2]);
UUID systemUuid = UUID.fromString(args[3]);
String samplePatientsFile = args[4];
createBartsSubset(sourceDirPath, serviceUuid, systemUuid, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateEmisSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createEmisSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FindBartsPersonIds")) {
String sourceFile = args[1];
UUID serviceUuid = UUID.fromString(args[2]);
UUID systemUuid = UUID.fromString(args[3]);
String dateCutoffStr = args[4];
String dstFile = args[5];
findBartsPersonIds(sourceFile, serviceUuid, systemUuid, dateCutoffStr, dstFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixTPPNullOrgs")) {
String sourceDirPath = args[1];
String orgODS = args[2];
LOG.info("Fixing TPP Null Organisations");
fixTPPNullOrgs(sourceDirPath, orgODS);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEmisDeletedPatients")) {
String odsCode = args[1];
fixEmisDeletedPatients(odsCode);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("PostPatientToProtocol")) {
String odsCode = args[1];
String patientUuid = args[2];
postPatientToProtocol(odsCode, patientUuid);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("TestMetrics")) {
testMetrics();
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("TestXML")) {
testXml();
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("TestGraphiteMetrics")) {
String host = args[1];
String port = args[2];
testGraphiteMetrics(host, port);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixBartsOrgs")) {
String serviceId = args[1];
fixBartsOrgs(serviceId);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("TestPreparedStatements")) {
String url = args[1];
String user = args[2];
String pass = args[3];
String serviceId = args[4];
testPreparedStatements(url, user, pass, serviceId);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateTransformMap")) {
UUID serviceId = UUID.fromString(args[1]);
String table = args[2];
String dstFile = args[3];
createTransforMap(serviceId, table, dstFile);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("ExportFhirToCsv")) {
UUID serviceId = UUID.fromString(args[1]);
String path = args[2];
exportFhirToCsv(serviceId, path);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("TestBatchInserts")) {
String url = args[1];
String user = args[2];
String pass = args[3];
String num = args[4];
String batchSize = args[5];
testBatchInserts(url, user, pass, num, batchSize);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("ApplyEmisAdminCaches")) {
applyEmisAdminCaches();
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixSubscribers")) {
fixSubscriberDbs();
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEmisProblems")) {
String serviceId = args[1];
String systemId = args[2];
fixEmisProblems(UUID.fromString(serviceId), UUID.fromString(systemId));
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("TestS3Read")) {
String s3Bucket = args[1];
String s3Key = args[2];
String start = args[3];
String len = args[4];
testS3Read(s3Bucket, s3Key, start, len);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEmisProblems3ForPublisher")) {
String publisherId = args[1];
String systemId = args[2];
fixEmisProblems3ForPublisher(publisherId, UUID.fromString(systemId));
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEmisProblems3")) {
String serviceId = args[1];
String systemId = args[2];
fixEmisProblems3(UUID.fromString(serviceId), UUID.fromString(systemId));
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("CheckDeletedObs")) {
String serviceId = args[1];
String systemId = args[2];
checkDeletedObs(UUID.fromString(serviceId), UUID.fromString(systemId));
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixPersonsNoNhsNumber")) {
fixPersonsNoNhsNumber();
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("PopulateSubscriberUprnTable")) {
String subscriberConfigName = args[1];
populateSubscriberUprnTable(subscriberConfigName);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("ConvertEmisGuid")) {
convertEmisGuids();
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("PostToRabbit")) {
String exchangeName = args[1];
String srcFile = args[2];
Integer throttle = null;
if (args.length > 3) {
throttle = Integer.parseInt(args[3]);
}
postToRabbit(exchangeName, srcFile, throttle);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PostToProtocol")) {
String srcFile = args[1];
postToProtocol(srcFile);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixBartsPatients")) {
UUID serviceId = UUID.fromString(args[1]);
fixBartsPatients(serviceId);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixDeceasedPatients")) {
String subscriberConfig = args[1];
fixDeceasedPatients(subscriberConfig);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixPseudoIds")) {
String subscriberConfig = args[1];
int threads = Integer.parseInt(args[2]);
fixPseudoIds(subscriberConfig, threads);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("MoveS3ToAudit")) {
int threads = Integer.parseInt(args[1]);
moveS3ToAudit(threads);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("ConvertFhirAudit")) {
UUID serviceId = UUID.fromString(args[1]);
int threads = Integer.parseInt(args[2]);
convertFhirAudit(serviceId, threads);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("ConvertExchangeBody")) {
String systemId = args[1];
convertExchangeBody(UUID.fromString(systemId));
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixReferrals")) {
fixReferralRequests();
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("PopulateNewSearchTable")) {
String table = args[1];
populateNewSearchTable(table);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixBartsEscapes")) {
String filePath = args[1];
fixBartsEscapedFiles(filePath);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PostToInbound")) {
String serviceId = args[1];
String systemId = args[2];
String filePath = args[3];
postToInboundFromFile(UUID.fromString(serviceId), UUID.fromString(systemId), filePath);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixDisabledExtract")) {
String sharedStoragePath = args[1];
String tempDir = args[2];
String systemId = args[3];
String serviceOdsCode = args[4];
fixDisabledEmisExtract(serviceOdsCode, systemId, sharedStoragePath, tempDir);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEmisMissingSlots")) {
String serviceOdsCode = args[1];
fixEmisMissingSlots(serviceOdsCode);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("PopulateLastDataDate")) {
int threads = Integer.parseInt(args[1]);
int batchSize = Integer.parseInt(args[2]);
populateLastDataDate(threads, batchSize);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("TestSlack")) {
testSlack();
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PostToInbound")) {
String serviceId = args[1];
boolean all = Boolean.parseBoolean(args[2]);
postToInbound(UUID.fromString(serviceId), all);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixPatientSearch")) {
String serviceId = args[1];
String systemId = null;
if (args.length > 2) {
systemId = args[2];
}
if (serviceId.equalsIgnoreCase("All")) {
fixPatientSearchAllServices(systemId);
} else {
fixPatientSearch(serviceId, systemId);
}
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixSlotReferences")) {
String serviceId = args[1];
try {
UUID serviceUuid = UUID.fromString(serviceId);
fixSlotReferences(serviceUuid);
} catch (Exception ex) {
fixSlotReferencesForPublisher(serviceId);
}
System.exit(0);
}
if (args.length >= 0
&& args[0].equalsIgnoreCase("TestAuditingFile")) {
UUID serviceId = UUID.fromString(args[1]);
UUID systemId = UUID.fromString(args[2]);
UUID exchangeId = UUID.fromString(args[3]);
String version = args[4];
String filePath = args[5];
testAuditingFile(serviceId, systemId, exchangeId, version, filePath);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("TestS3VsMySQL")) {
UUID serviceUuid = UUID.fromString(args[1]);
int count = Integer.parseInt(args[2]);
int sqlBatchSize = Integer.parseInt(args[3]);
String bucketName = args[4];
testS3VsMySql(serviceUuid, count, sqlBatchSize, bucketName);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("Exit")) {
String exitCode = args[1];
LOG.info("Exiting with error code " + exitCode);
int exitCodeInt = Integer.parseInt(exitCode);
System.exit(exitCodeInt);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("RunSql")) {
String host = args[1];
String username = args[2];
String password = args[3];
String sqlFile = args[4];
runSql(host, username, password, sqlFile);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PopulateProtocolQueue")) {
String serviceId = null;
if (args.length > 1) {
serviceId = args[1];
}
String startingExchangeId = null;
if (args.length > 2) {
startingExchangeId = args[2];
}
populateProtocolQueue(serviceId, startingExchangeId);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FindEncounterTerms")) {
String path = args[1];
String outputPath = args[2];
findEncounterTerms(path, outputPath);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FindEmisStartDates")) {
String path = args[1];
String outputPath = args[2];
findEmisStartDates(path, outputPath);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("ExportHl7Encounters")) {
String sourceCsvPpath = args[1];
String outputPath = args[2];
exportHl7Encounters(sourceCsvPpath, outputPath);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixExchangeBatches")) {
fixExchangeBatches();
System.exit(0);
}*/
/*if (args.length >= 0
&& args[0].equalsIgnoreCase("FindCodes")) {
findCodes();
System.exit(0);
}*/
/*if (args.length >= 0
&& args[0].equalsIgnoreCase("FindDeletedOrgs")) {
findDeletedOrgs();
System.exit(0);
}*/
if (args.length >= 0
&& args[0].equalsIgnoreCase("LoadEmisData")) {
String serviceId = args[1];
String systemId = args[2];
String dbUrl = args[3];
String dbUsername = args[4];
String dbPassword = args[5];
String onlyThisFileType = null;
if (args.length > 6) {
onlyThisFileType = args[6];
}
loadEmisData(serviceId, systemId, dbUrl, dbUsername, dbPassword, onlyThisFileType);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateEmisDataTables")) {
createEmisDataTables();
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("LoadBartsData")) {
String serviceId = args[1];
String systemId = args[2];
String dbUrl = args[3];
String dbUsername = args[4];
String dbPassword = args[5];
String startDate = args[6];
String onlyThisFileType = null;
if (args.length > 7) {
onlyThisFileType = args[7];
}
loadBartsData(serviceId, systemId, dbUrl, dbUsername, dbPassword, startDate, onlyThisFileType);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateBartsDataTables")) {
createBartsDataTables();
System.exit(0);
}
if (args.length != 1) {
LOG.error("Usage: queuereader config_id");
return;
}
LOG.info("
LOG.info("EDS Queue Reader " + configId);
LOG.info("
LOG.info("Fetching queuereader configuration");
String configXml = ConfigManager.getConfiguration(configId);
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
/*LOG.info("Registering shutdown hook");
registerShutdownHook();*/
// Instantiate rabbit handler
LOG.info("Creating EDS queue reader");
RabbitHandler rabbitHandler = new RabbitHandler(configuration, configId);
// Begin consume
rabbitHandler.start();
LOG.info("EDS Queue reader running (kill file location " + TransformConfig.instance().getKillFileLocation() + ")");
}
private static void testS3Listing(String path) {
LOG.info("Testing S3 Listing");
try {
LOG.info("Trying with full path: " + path);
List<FileInfo> l = FileHelper.listFilesInSharedStorageWithInfo(path);
LOG.info("Found " + l.size());
/*for (FileInfo info: l) {
LOG.info("Got " + info.getFilePath());
}*/
String parent = FilenameUtils.getFullPath(path);
LOG.info("Trying with parent: " + parent);
l = FileHelper.listFilesInSharedStorageWithInfo(parent);
LOG.info("Found " + l.size());
/*for (FileInfo info: l) {
LOG.info("Got " + info.getFilePath());
}*/
LOG.info("Finished Testing S3 Listing");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void testAuditingFile(UUID serviceId, UUID systemId, UUID exchangeId, String version, String filePath) {
LOG.info("Testing Auditing File");
try {
LOG.info("Creating parser");
//org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation obsParser = new org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation(serviceId, systemId, exchangeId, version, filePath);
org.endeavourhealth.transform.tpp.csv.schema.staff.SRStaffMemberProfile obsParser = new org.endeavourhealth.transform.tpp.csv.schema.staff.SRStaffMemberProfile(serviceId, systemId, exchangeId, version, filePath);
LOG.info("Created parser");
obsParser.nextRecord();
LOG.info("Done auditing");
obsParser.close();
LOG.info("Closed");
LOG.info("Finish Testing Auditing File");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void postPatientToProtocol(String odsCode, String patientUuid) {
LOG.info("Posting patient " + patientUuid + " for " + odsCode + " to Protocol queue");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
Service service = serviceDal.getByLocalIdentifier(odsCode);
LOG.info("Service " + service.getId() + " -> " + service.getName());
UUID patientId = UUID.fromString(patientUuid);
List<UUID> systemIds = findSystemIds(service);
if (systemIds.size() != 1) {
throw new Exception("Found " + systemIds.size() + " for service");
}
UUID systemId = systemIds.get(0);
UUID serviceId = service.getId();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
LOG.info("Found " + exchanges.size() + " exchanges");
ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal();
//exchanges are in order most recent first, so iterate backwards to get them in date order
for (int i=exchanges.size()-1; i>=0; i
Exchange exchange = exchanges.get(i);
List<UUID> batchesForPatient = new ArrayList<>();
List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchange.getId());
for (ExchangeBatch batch: batches) {
if (batch.getEdsPatientId() != null
&& batch.getEdsPatientId().equals(patientId)) {
batchesForPatient.add(batch.getBatchId());
}
}
if (!batchesForPatient.isEmpty()) {
LOG.debug("Posting " + batchesForPatient.size() + " for exchange " + exchange.getId() + " to rabbit");
//set new batch ID in exchange header
String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchesForPatient.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString);
//post new batch to protocol Q
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
}
LOG.info("Finished posting patient " + patientUuid + " for " + odsCode + " to Protocol queue");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void testXml() {
LOG.info("Testing XML");
try {
//PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
Map<String, String> queueReadConfigs = ConfigManager.getConfigurations("queuereader");
for (String configId: queueReadConfigs.keySet()) {
LOG.debug("Checking config XML for " + configId);
String configXml = queueReadConfigs.get(configId);
if (configXml.startsWith("{")) {
LOG.debug("Skipping JSON");
continue;
}
try {
ApiConfiguration config = ConfigWrapper.deserialise(configXml);
//LOG.debug("Deserialised as messaging API XML");
ApiConfiguration.PostMessageAsync postConfig = config.getPostMessageAsync();
} catch (Exception ex) {
try {
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
} catch (Exception ex2) {
LOG.error(configXml);
LOG.error("", ex2);
}
}
}
LOG.info("Testing XML");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void testMetrics() {
LOG.info("Testing Metrics");
try {
Random r = new Random(System.currentTimeMillis());
while (true) {
String metric1 = "frailty-api.ms-duration";
Integer value1 = new Integer(r.nextInt(1000));
MetricsHelper.recordValue(metric1, value1);
if (r.nextBoolean()) {
MetricsHelper.recordEvent("frailty-api.response-code-200");
} else {
MetricsHelper.recordEvent("frailty-api.response-code-400");
}
int sleep = r.nextInt(10 * 1000);
LOG.debug("Waiting " + sleep + " ms");
Thread.sleep(sleep);
/**
* N3-MessagingAPI-01.messaging-api.frailty-api.duration-ms
N3-MessagingAPI-01.messaging-api.frailty-api.response-code (edited)
*/
}
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void testGraphiteMetrics(String host, String port) {
LOG.info("Testing Graphite metrics to " + host + " " + port);
try {
InetAddress ip = InetAddress.getLocalHost();
String hostname = ip.getHostName();
LOG.debug("Hostname = " + hostname);
String appId = ConfigManager.getAppId();
LOG.debug("AppID = " + appId);
Random r = new Random(System.currentTimeMillis());
while (true) {
Map<String, Object> metrics = new HashMap<>();
String metric1 = hostname + "." + appId + ".frailty-api.duration-ms";
Integer value1 = new Integer(r.nextInt(1000));
metrics.put(metric1, value1);
String metric2 = hostname + "." + appId+ ".frailty-api.response-code";
Integer value2 = null;
if (r.nextBoolean()) {
value2 = new Integer(200);
} else {
value2 = new Integer(400);
}
metrics.put(metric2, value2);
long timestamp = System.currentTimeMillis() / 1000;
LOG.debug("Sending metrics");
sendMetrics(host, Integer.parseInt(port), metrics, timestamp);
int sleep = r.nextInt(10 * 1000);
LOG.debug("Waiting " + sleep + " ms");
Thread.sleep(sleep);
/**
* N3-MessagingAPI-01.messaging-api.frailty-api.duration-ms
N3-MessagingAPI-01.messaging-api.frailty-api.response-code (edited)
*/
}
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void sendMetrics(String graphiteHost, int graphitePort, Map<String, Object> metrics, long timeStamp) throws Exception {
Socket socket = new Socket(graphiteHost, graphitePort);
OutputStream s = socket.getOutputStream();
PrintWriter out = new PrintWriter(s, true);
for (Map.Entry<String, Object> metric: metrics.entrySet()) {
if (metric.getValue() instanceof Integer) {
out.printf("%s %d %d%n", metric.getKey(), ((Integer)metric.getValue()).intValue(), timeStamp);
}
else if (metric.getValue() instanceof Float) {
out.printf("%s %f %d%n", metric.getKey(), ((Float)metric.getValue()).floatValue(), timeStamp);
} else {
throw new RuntimeException("Unsupported type " + metric.getValue().getClass());
}
}
out.close();
socket.close();
}
private static void fixEmisDeletedPatients(String odsCode) {
LOG.info("Fixing Emis Deleted Patients for " + odsCode);
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
Service service = serviceDal.getByLocalIdentifier(odsCode);
LOG.info("Service " + service.getId() + " -> " + service.getName());
List<UUID> systemIds = findSystemIds(service);
if (systemIds.size() != 1) {
throw new Exception("Found " + systemIds.size() + " for service");
}
UUID systemId = systemIds.get(0);
UUID serviceId = service.getId();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
LOG.info("Found " + exchanges.size() + " exchanges");
Set<String> hsPatientGuidsDeductedDeceased = new HashSet<>();
Map<String, List<UUID>> hmPatientGuidsDeleted = new HashMap<>();
Map<String, List<String>> hmPatientGuidsToFix = new HashMap<>();
//exchanges are in REVERSE order (most recent first)
for (int i=exchanges.size()-1; i>=0; i
Exchange exchange = exchanges.get(i);
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody());
//skip exchanges that are for custom extracts
if (files.size() <= 1) {
continue;
}
//skip if we're ignoring old data
boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files);
if (!processPatientData) {
continue;
}
//find patient file
ExchangePayloadFile patientFile = findFileOfType(files, "Admin_Patient");
if (patientFile == null) {
throw new Exception("Failed to find Admin_Patient file in exchange " + exchange.getId());
}
ExchangePayloadFile agreementFile = findFileOfType(files, "Agreements_SharingOrganisation");
if (agreementFile == null) {
throw new Exception("Failed to find Agreements_SharingOrganisation file in exchange " + exchange.getId());
}
//work out file version
List<ExchangePayloadFile> filesTmp = new ArrayList<>();
filesTmp.add(patientFile);
filesTmp.add(agreementFile);
String version = EmisCsvToFhirTransformer.determineVersion(filesTmp);
//see if sharing agreement is disabled
String path = agreementFile.getPath();
org.endeavourhealth.transform.emis.csv.schema.agreements.SharingOrganisation agreementParser = new org.endeavourhealth.transform.emis.csv.schema.agreements.SharingOrganisation(serviceId, systemId, exchange.getId(), version, path);
agreementParser.nextRecord();
CsvCell disabled = agreementParser.getDisabled();
boolean isDisabled = disabled.getBoolean();
//create the parser
path = patientFile.getPath();
org.endeavourhealth.transform.emis.csv.schema.admin.Patient parser = new org.endeavourhealth.transform.emis.csv.schema.admin.Patient(serviceId, systemId, exchange.getId(), version, path);
while (parser.nextRecord()) {
CsvCell patientGuidCell = parser.getPatientGuid();
String patientGuid = patientGuidCell.getString();
CsvCell dateOfDeathCell = parser.getDateOfDeath();
CsvCell dateOfDeductionCell = parser.getDateOfDeactivation();
CsvCell deletedCell = parser.getDeleted();
if (deletedCell.getBoolean()) {
List<UUID> exchangesDeleted = hmPatientGuidsDeleted.get(patientGuid);
if (exchangesDeleted == null) {
exchangesDeleted = new ArrayList<>();
hmPatientGuidsDeleted.put(patientGuid, exchangesDeleted);
}
exchangesDeleted.add(exchange.getId());
//if this patient was previously updated with a deduction date or date of death, and the sharing
//agreement isn't disabled, then we will have deleted them and need to undelete
if (hsPatientGuidsDeductedDeceased.contains(patientGuid)
&& !isDisabled) {
List<String> exchangesToFix = hmPatientGuidsToFix.get(patientGuid);
if (exchangesToFix == null) {
exchangesToFix = new ArrayList<>();
hmPatientGuidsToFix.put(patientGuid, exchangesToFix);
}
exchangesToFix.add(exchange.getId().toString() + ": Deducted/Dead and Deleted after");
}
} else {
//if the date of death of deduction is set then we need to track this
//because we're going to possibly get a delete in a years time
if (!dateOfDeathCell.isEmpty() || !dateOfDeductionCell.isEmpty()) {
hsPatientGuidsDeductedDeceased.add(patientGuid);
} else {
hsPatientGuidsDeductedDeceased.remove(patientGuid);
}
//if this patient was previously deleted and is now UN-deleted, then we'll
//need to fix the record
if (hmPatientGuidsDeleted.containsKey(patientGuid)) {
List<UUID> exchangesDeleted = hmPatientGuidsDeleted.remove(patientGuid);
List<String> exchangesToFix = hmPatientGuidsToFix.get(patientGuid);
if (exchangesToFix == null) {
exchangesToFix = new ArrayList<>();
hmPatientGuidsToFix.put(patientGuid, exchangesToFix);
}
for (UUID exchangeId: exchangesDeleted) {
exchangesToFix.add(exchangeId.toString() + ": Deleted and subsequently undeleted");
}
}
}
}
parser.close();
}
LOG.info("Finished checking for affected patients - found " + hmPatientGuidsToFix.size() + " patients to fix");
for (String patientGuid: hmPatientGuidsToFix.keySet()) {
List<String> exchangeIds = hmPatientGuidsToFix.get(patientGuid);
LOG.info("Patient " + patientGuid);
for (String exchangeId: exchangeIds) {
LOG.info(" Exchange Id " + exchangeId);
}
//log out the UUID for the patient too
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, null, null, false, null);
Reference ref = ReferenceHelper.createReference(ResourceType.Patient, patientGuid);
ref = IdHelper.convertLocallyUniqueReferenceToEdsReference(ref, csvHelper);
LOG.debug(" Patient UUID " + ref.getReference());
String patientUuidStr = ReferenceHelper.getReferenceId(ref);
UUID patientUuid = UUID.fromString(patientUuidStr);
Set<UUID> hsExchangeIdsDone = new HashSet<>();
Set<String> resourcesDone = new HashSet<>();
for (String exchangeId: exchangeIds) {
UUID exchangeUuid = UUID.fromString(exchangeId.split(":")[0]);
//in some cases, the same exchange was found twice
if (hsExchangeIdsDone.contains(exchangeUuid)) {
continue;
}
hsExchangeIdsDone.add(exchangeUuid);
Exchange exchange = exchangeDal.getExchange(exchangeUuid);
ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
List<UUID> batchIdsCreated = new ArrayList<>();
TransformError transformError = new TransformError();
FhirResourceFiler filer = new FhirResourceFiler(exchangeUuid, serviceId, systemId, transformError, batchIdsCreated);
//get all exchange batches for our patient
List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeUuid);
for (ExchangeBatch batch: batches) {
UUID batchPatient = batch.getEdsPatientId();
if (batchPatient == null || !batchPatient.equals(patientUuid)) {
continue;
}
//get all resources for this batch
List<ResourceWrapper> resourceWrappers = resourceDal.getResourcesForBatch(serviceId, batch.getBatchId());
//restore each resource
for (ResourceWrapper resourceWrapper: resourceWrappers) {
//if an exchange was processed multiple times, we might try to pick up the same resource twice, so skip it
String resourceRef = ReferenceHelper.createResourceReference(resourceWrapper.getResourceType(), resourceWrapper.getResourceId().toString());
if (resourcesDone.contains(resourceRef)) {
continue;
}
resourcesDone.add(resourceRef);
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, resourceWrapper.getResourceType(), resourceWrapper.getResourceId());
//most recent is first
ResourceWrapper mostRecent = history.get(0);
if (!mostRecent.isDeleted()) {
continue;
}
//find latest non-deleted version and save it over the deleted version
for (ResourceWrapper historyItem: history) {
if (!historyItem.isDeleted()) {
Resource resource = FhirSerializationHelper.deserializeResource(historyItem.getResourceData());
GenericBuilder builder = new GenericBuilder(resource);
filer.savePatientResource(null, false, builder);
break;
}
}
}
}
filer.waitToFinish();
//set new batch ID in exchange header
String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIdsCreated.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString);
//post new batch to protocol Q
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
}
LOG.info("Finished Fixing Emis Deleted Patients for " + odsCode);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static ExchangePayloadFile findFileOfType(List<ExchangePayloadFile> files, String fileType) {
for (ExchangePayloadFile file: files) {
if (file.getType().equals(fileType)) {
return file;
}
}
return null;
}
private static void fixEmisEpisodes2(String odsCode) {
LOG.info("Fixing Emis Episodes (2) for " + odsCode);
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
Service service = serviceDal.getByLocalIdentifier(odsCode);
LOG.info("Service " + service.getId() + " -> " + service.getName());
List<UUID> systemIds = findSystemIds(service);
if (systemIds.size() != 1) {
throw new Exception("Found " + systemIds.size() + " for service");
}
UUID systemId = systemIds.get(0);
UUID serviceId = service.getId();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
LOG.info("Found " + exchanges.size() + " exchanges");
InternalIdDalI internalIdDal = DalProvider.factoryInternalIdDal();
Set<String> patientGuidsDone = new HashSet<>();
//exchanges are in REVERSE order (most recent first)
for (int i=exchanges.size()-1; i>=0; i
Exchange exchange = exchanges.get(i);
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody());
//skip exchanges that are for custom extracts
if (files.size() <= 1) {
continue;
}
//skip if we're ignoring old data
boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files);
if (!processPatientData) {
continue;
}
//find patient file
ExchangePayloadFile patientFile = null;
for (ExchangePayloadFile file: files) {
if (file.getType().equals("Admin_Patient")) {
patientFile = file;
break;
}
}
if (patientFile == null) {
throw new Exception("Failed to find Admin_Patient file in exchange " + exchange.getId());
}
String path = patientFile.getPath();
List<ExchangePayloadFile> filesTmp = new ArrayList<>();
filesTmp.add(patientFile);
String version = EmisCsvToFhirTransformer.determineVersion(filesTmp);
org.endeavourhealth.transform.emis.csv.schema.admin.Patient parser = new org.endeavourhealth.transform.emis.csv.schema.admin.Patient(serviceId, systemId, exchange.getId(), version, path);
while (parser.nextRecord()) {
CsvCell deletedCell = parser.getDeleted();
if (deletedCell.getBoolean()) {
continue;
}
//skip patients already done
CsvCell patientGuidCell = parser.getPatientGuid();
String patientGuid = patientGuidCell.getString();
if (patientGuidsDone.contains(patientGuid)) {
continue;
}
patientGuidsDone.add(patientGuid);
//check we've not already converted this patient previously (i.e. re-running this conversion)
CsvCell startDateCell = parser.getDateOfRegistration();
if (startDateCell.isEmpty()) {
LOG.error("Missing start date for patient " + patientGuid + " in exchange " + exchange.getId());
startDateCell = CsvCell.factoryDummyWrapper("1900-01-01");
}
//save internal ID map
String key = patientGuidCell.getString();
String value = startDateCell.getString();
internalIdDal.save(serviceId, "Emis_Latest_Reg_Date", key, value);
}
parser.close();
}
LOG.info("Finished Fixing Emis Episodes (2) for " + odsCode);
} catch (Throwable t) {
LOG.error("", t);
}
}
/*private static void fixEmisEpisodes1(String odsCode) {
LOG.info("Fixing Emis Episodes (1) for " + odsCode);
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
Service service = serviceDal.getByLocalIdentifier(odsCode);
LOG.info("Service " + service.getId() + " -> " + service.getName());
List<UUID> systemIds = findSystemIds(service);
if (systemIds.size() != 1) {
throw new Exception("Found " + systemIds.size() + " for service");
}
UUID systemId = systemIds.get(0);
UUID serviceId = service.getId();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
LOG.info("Found " + exchanges.size() + " exchanges");
InternalIdDalI internalIdDal = DalProvider.factoryInternalIdDal();
Set<String> patientGuidsDone = new HashSet<>();
//exchanges are in REVERSE order (most recent first)
for (Exchange exchange: exchanges) {
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody());
//skip exchanges that are for custom extracts
if (files.size() <= 1) {
continue;
}
//skip if we're ignoring old data
boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files);
if (!processPatientData) {
continue;
}
//find patient file
ExchangePayloadFile patientFile = null;
for (ExchangePayloadFile file: files) {
if (file.getType().equals("Admin_Patient")) {
patientFile = file;
break;
}
}
if (patientFile == null) {
throw new Exception("Failed to find Admin_Patient file in exchange " + exchange.getId());
}
String path = patientFile.getPath();
List<ExchangePayloadFile> filesTmp = new ArrayList<>();
filesTmp.add(patientFile);
String version = EmisCsvToFhirTransformer.determineVersion(filesTmp);
org.endeavourhealth.transform.emis.csv.schema.admin.Patient parser = new org.endeavourhealth.transform.emis.csv.schema.admin.Patient(serviceId, systemId, exchange.getId(), version, path);
while (parser.nextRecord()) {
CsvCell deletedCell = parser.getDeleted();
if (deletedCell.getBoolean()) {
continue;
}
//skip patients already done
CsvCell patientGuidCell = parser.getPatientGuid();
String patientGuid = patientGuidCell.getString();
if (patientGuidsDone.contains(patientGuid)) {
continue;
}
patientGuidsDone.add(patientGuid);
//check we've not already converted this patient previously (i.e. re-running this conversion)
String key = patientGuidCell.getString();
String existingIdMapValue = internalIdDal.getDestinationId(serviceId, "Emis_Latest_Reg_Date", key);
if (existingIdMapValue != null) {
continue;
}
CsvCell startDateCell = parser.getDateOfRegistration();
if (startDateCell.isEmpty()) {
LOG.error("Missing start date for patient " + patientGuid + " in exchange " + exchange.getId());
startDateCell = CsvCell.factoryDummyWrapper("1900-01-01");
}
//find the existing UUID we've previously allocated
String oldSourceId = patientGuid;
UUID episodeUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.EpisodeOfCare, oldSourceId);
if (episodeUuid == null) {
LOG.error("Null episode UUID for old source ID " + oldSourceId + " in exchange " + exchange.getId());
continue;
}
//save ID reference mapping
String newSourceId = patientGuid + ":" + startDateCell.getString();
UUID newEpisodeUuid = IdHelper.getOrCreateEdsResourceId(serviceId, ResourceType.EpisodeOfCare, newSourceId, episodeUuid);
if (!newEpisodeUuid.equals(episodeUuid)) {
throw new Exception("Failed to carry over UUID for episode. Old UUID was " + episodeUuid + " new UUID is " + newEpisodeUuid + " in exchange " + exchange.getId());
}
//save internal ID map
String value = startDateCell.getString();
internalIdDal.save(serviceId, "Emis_Latest_Reg_Date", key, value);
}
parser.close();
}
LOG.info("Finished Fixing Emis Episodes (1) for " + odsCode);
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void testRabbit(String nodes, String username, String password, String sslProtocol, String exchangeName, String queueName) {
LOG.info("Testing RabbitMQ Connectivity on " + nodes);
LOG.info("SSL Protocol = " + sslProtocol);
LOG.info("Exchange = " + exchangeName);
LOG.info("Queue = " + queueName);
try {
//test publishing
LOG.info("Testing publishing...");
com.rabbitmq.client.Connection publishConnection = org.endeavourhealth.core.queueing.ConnectionManager.getConnection(username, password, nodes, sslProtocol);
Channel publishChannel = org.endeavourhealth.core.queueing.ConnectionManager.getPublishChannel(publishConnection, exchangeName);
publishChannel.confirmSelect();
for (int i=0; i<5; i++) {
Map<String, Object> headers = new HashMap<>();
headers.put("HeaderIndex", "" + i);
AMQP.BasicProperties properties = new AMQP.BasicProperties()
.builder()
.deliveryMode(2) // Persistent message
.headers(headers)
.build();
String body = "MessageIndex = " + i;
byte[] bytes = body.getBytes();
publishChannel.basicPublish(
exchangeName,
"All", //routing key
properties,
bytes);
}
publishChannel.close();
publishConnection.close();
LOG.info("...Finished testing publishing");
//test consuming
LOG.info("Testing reading...");
com.rabbitmq.client.Connection readConnection = org.endeavourhealth.core.queueing.ConnectionManager.getConnection(username, password, nodes, sslProtocol);
Channel readChannel = readConnection.createChannel();
readChannel.basicQos(1);
Consumer consumer = new TestRabbitConsumer(readChannel);
readChannel.basicConsume(queueName, false, "TestRabbitConsumer", false, true, null, consumer);
LOG.info("Reader Connected (ctrl+c to close) will quit in 30s");
Thread.sleep(30 * 1000);
LOG.info("Finished Testing RabbitMQ Connectivity");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void populateLastDataDate(int threads, int batchSize) {
LOG.debug("Populating last data date");
try {
int processed = 0;
AtomicInteger fixed = new AtomicInteger();
ThreadPool threadPool = new ThreadPool(threads, batchSize);
while (true) {
String sql = "SELECT id FROM drewtest.exchange_ids WHERE done = 0 LIMIT " + batchSize;
//LOG.debug("Getting new batch using: " + sql);
EntityManager auditEntityManager = ConnectionManager.getAuditEntityManager();
SessionImpl auditSession = (SessionImpl)auditEntityManager.getDelegate();
Connection auditConnection = auditSession.connection();
Statement statement = auditConnection.createStatement();
ResultSet rs = statement.executeQuery(sql);
List<UUID> exchangeIds = new ArrayList<>();
while (rs.next()) {
String s = rs.getString(1);
//LOG.debug("Got back exchange ID " + s);
exchangeIds.add(UUID.fromString(s));
}
rs.close();
statement.close();
auditEntityManager.close();
for (UUID exchangeId: exchangeIds) {
threadPool.submit(new PopulateDataDateCallable(exchangeId, fixed));
}
List<ThreadPoolError> errs = threadPool.waitUntilEmpty();
if (!errs.isEmpty()) {
LOG.debug("Got " + errs.size() + " errors");
for (ThreadPoolError err: errs) {
LOG.error("", err.getException());
}
break;
}
processed += exchangeIds.size();
LOG.debug("processed " + processed + " fixed " + fixed.get());
//if finished
if (exchangeIds.size() < batchSize) {
break;
}
}
threadPool.waitAndStop();
LOG.debug("Finished Populating last data date");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void fixEmisMissingSlots(String serviceOdsCode) {
LOG.debug("Fixing Emis Missing Slots for " + serviceOdsCode);
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
Service service = serviceDal.getByLocalIdentifier(serviceOdsCode);
LOG.info("Service " + service.getId() + " " + service.getName() + " " + service.getLocalId());
List<UUID> systemIds = findSystemIds(service);
if (systemIds.size() != 1) {
throw new Exception("Found " + systemIds.size() + " for service");
}
UUID systemId = systemIds.get(0);
UUID serviceId = service.getId();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
Set<String> hsSlotsToSkip = new HashSet<>();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
File auditFile = new File("SlotAudit_" + serviceOdsCode + ".csv");
LOG.debug("Auditing to " + auditFile);
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
if (exchangeConfig == null) {
throw new Exception("Failed to find PostMessageToExchange config details for exchange EdsProtocol");
}
//the list of exchanges is most-recent-first, so iterate backwards to do them in order
for (Exchange exchange : exchanges) {
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody());
//skip exchanges that are for custom extracts
if (files.size() <= 1) {
continue;
}
boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files);
if (!processPatientData) {
continue;
}
ExchangeTransformAudit transformAudit = new ExchangeTransformAudit();
transformAudit.setServiceId(serviceId);
transformAudit.setSystemId(systemId);
transformAudit.setExchangeId(exchange.getId());
transformAudit.setId(UUID.randomUUID());
transformAudit.setStarted(new Date());
String version = EmisCsvToFhirTransformer.determineVersion(files);
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), null, processPatientData, null);
//the processor is responsible for saving FHIR resources
TransformError transformError = new TransformError();
List<UUID> batchIdsCreated = new ArrayList<>();
FhirResourceFiler fhirResourceFiler = new FhirResourceFiler(exchange.getId(), serviceId, systemId, transformError, batchIdsCreated);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.createParsers(serviceId, systemId, exchange.getId(), files, version, parsers);
try {
//cache the practitioners for each session
SessionUserTransformer.transform(parsers, fhirResourceFiler, csvHelper);
Slot parser = (Slot) parsers.get(Slot.class);
while (parser.nextRecord()) {
//should this record be transformed?
//the slots CSV contains data on empty slots too; ignore them
CsvCell patientGuid = parser.getPatientGuid();
if (patientGuid.isEmpty()) {
continue;
}
//the EMIS data contains thousands of appointments that refer to patients we don't have, so I'm explicitly
//handling this here, and ignoring any Slot record that is in this state
UUID patientEdsId = IdHelper.getEdsResourceId(fhirResourceFiler.getServiceId(), ResourceType.Patient, patientGuid.getString());
if (patientEdsId == null) {
continue;
}
//see if this appointment has previously been transformed
CsvCell slotGuid = parser.getSlotGuid();
String uniqueId = patientGuid.getString() + ":" + slotGuid.getString();
if (!hsSlotsToSkip.contains(uniqueId)) {
//transform this slot record if no appt already exists for it
boolean alreadyExists = false;
UUID discoveryId = IdHelper.getEdsResourceId(serviceId, ResourceType.Slot, uniqueId);
if (discoveryId != null) {
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Slot.toString(), discoveryId);
if (!history.isEmpty()) {
alreadyExists = true;
}
}
if (alreadyExists) {
hsSlotsToSkip.add(uniqueId);
}
}
if (hsSlotsToSkip.contains(uniqueId)) {
continue;
}
hsSlotsToSkip.add(uniqueId);
try {
LOG.debug("Creating slot for " + uniqueId);
SlotTransformer.createSlotAndAppointment((Slot) parser, fhirResourceFiler, csvHelper);
} catch (Exception ex) {
fhirResourceFiler.logTransformRecordError(ex, parser.getCurrentState());
}
}
csvHelper.clearCachedSessionPractitioners();
fhirResourceFiler.failIfAnyErrors();
fhirResourceFiler.waitToFinish();
} catch (Throwable ex) {
Map<String, String> args = new HashMap<>();
args.put(TransformErrorUtility.ARG_FATAL_ERROR, ex.getMessage());
TransformErrorUtility.addTransformError(transformError, ex, args);
LOG.error("", ex);
}
transformAudit.setEnded(new Date());
transformAudit.setNumberBatchesCreated(new Integer(batchIdsCreated.size()));
if (transformError.getError().size() > 0) {
transformAudit.setErrorXml(TransformErrorSerializer.writeToXml(transformError));
}
//save our audit if something went wrong or was saved
if (transformError.getError().size() > 0
|| !batchIdsCreated.isEmpty()) {
exchangeDal.save(transformAudit);
}
//send to Rabbit protocol queue
if (!batchIdsCreated.isEmpty()) {
//write batch ID to file, so we have an audit of what we created
List<String> lines = new ArrayList<>();
for (UUID batchId : batchIdsCreated) {
lines.add("\"" + exchange.getId() + "\",\"" + batchId + "\"");
}
Files.write(auditFile.toPath(), lines, StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE);
String batchesJson = ObjectMapperPool.getInstance().writeValueAsString(batchIdsCreated.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchesJson);
//send to Rabbit
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
if (transformError.getError().size() > 0) {
throw new Exception("Dropping out due to error in transform");
}
}
LOG.debug("Finished Fixing Emis Missing Slots for " + serviceOdsCode);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void findBartsPersonIds(String sourceFile, UUID serviceUuid, UUID systemUuid, String dateCutoffStr, String destFile) {
LOG.debug("Finding Barts person IDs for " + sourceFile);
try {
//read NHS numbers into memory
Set<String> hsNhsNumbers = new HashSet<>();
List<String> listNhsNumbers = new ArrayList<>();
File src = new File(sourceFile);
List<String> lines = Files.readAllLines(src.toPath());
for (String line : lines) {
String s = line.trim();
hsNhsNumbers.add(s);
listNhsNumbers.add(s); //maintain a list so we can preserve the ordering
}
LOG.debug("Looking for Person IDs for " + hsNhsNumbers.size() + " nhs numbers or any since " + dateCutoffStr);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Date dateCutoff = sdf.parse(dateCutoffStr);
Map<String, Set<String>> hmMatches = new HashMap<>();
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDalI.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE);
for (Exchange exchange : exchanges) {
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody());
for (ExchangePayloadFile file : files) {
String parentPath = new File(file.getPath()).getParent();
String parentDir = FilenameUtils.getBaseName(parentPath);
Date extractDate = sdf.parse(parentDir);
boolean inDateRange = !extractDate.before(dateCutoff);
String type = file.getType();
if (type.equals("PPATI")) {
PPATI parser = new PPATI(null, null, null, null, file.getPath());
while (parser.nextRecord()) {
CsvCell nhsNumberCell = parser.getNhsNumber();
String nhsNumber = nhsNumberCell.getString();
nhsNumber = nhsNumber.replace("-", "");
if (hsNhsNumbers.contains(nhsNumber)
|| inDateRange) {
CsvCell personIdCell = parser.getMillenniumPersonId();
String personId = personIdCell.getString();
Set<String> s = hmMatches.get(nhsNumber);
if (s == null) {
s = new HashSet<>();
hmMatches.put(nhsNumber, s);
}
s.add(personId);
}
}
parser.close();
} else if (type.equals("PPALI")) {
PPALI parser = new PPALI(null, null, null, null, file.getPath());
while (parser.nextRecord()) {
CsvCell aliasCell = parser.getAlias();
//not going to bother trying to filter on alias type, since it won't hurt to include
//extra patients, if they have an MRN that accidentally matches one of the NHS numbers being searched for
String alias = aliasCell.getString();
if (hsNhsNumbers.contains(alias)
|| inDateRange) {
//NHS numbers in PPALI don't have the extra hyphens
CsvCell personIdCell = parser.getMillenniumPersonIdentifier();
String personId = personIdCell.getString();
Set<String> s = hmMatches.get(alias);
if (s == null) {
s = new HashSet<>();
hmMatches.put(alias, s);
}
s.add(personId);
}
}
parser.close();
} else {
//just ignore other file types
}
}
}
LOG.debug("" + hmMatches.size() + " / " + hsNhsNumbers.size() + " NHS numbers had person IDs found");
List<String> newLines = new ArrayList<>();
for (String nhsNumber : listNhsNumbers) {
Set<String> personIds = hmMatches.get(nhsNumber);
if (personIds == null) {
LOG.error("Failed to find person ID for " + nhsNumber);
continue;
}
newLines.add("#NHS " + nhsNumber + ":");
for (String personId : personIds) {
newLines.add(personId);
}
}
File dst = new File(destFile);
if (dst.exists()) {
dst.delete();
}
Files.write(dst.toPath(), newLines);
LOG.debug("Finished Finding Barts person IDs for " + sourceFile);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createEmisDataTables() {
LOG.debug("Creating Emis data tables");
try {
List<String> fileTypes = new ArrayList<>();
fileTypes.add("Admin_Location");
fileTypes.add("Admin_OrganisationLocation");
fileTypes.add("Admin_Organisation");
fileTypes.add("Admin_Patient");
fileTypes.add("Admin_UserInRole");
fileTypes.add("Agreements_SharingOrganisation");
fileTypes.add("Appointment_SessionUser");
fileTypes.add("Appointment_Session");
fileTypes.add("Appointment_Slot");
fileTypes.add("CareRecord_Consultation");
fileTypes.add("CareRecord_Diary");
fileTypes.add("CareRecord_ObservationReferral");
fileTypes.add("CareRecord_Observation");
fileTypes.add("CareRecord_Problem");
fileTypes.add("Coding_ClinicalCode");
fileTypes.add("Coding_DrugCode");
fileTypes.add("Prescribing_DrugRecord");
fileTypes.add("Prescribing_IssueRecord");
fileTypes.add("Audit_PatientAudit");
fileTypes.add("Audit_RegistrationAudit");
for (String fileType : fileTypes) {
createEmisDataTable(fileType);
}
LOG.debug("Finished Creating Emis data tables");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createEmisDataTable(String fileType) throws Exception {
ParserI parser = createParserForEmisFileType(fileType, null);
if (parser == null) {
return;
}
System.out.println("-- " + fileType);
String table = fileType.replace(" ", "_");
String dropSql = "DROP TABLE IF EXISTS `" + table + "`;";
System.out.println(dropSql);
String sql = "CREATE TABLE `" + table + "` (";
sql += "file_name varchar(100)";
sql += ", ";
sql += "extract_date datetime";
if (parser instanceof AbstractFixedParser) {
AbstractFixedParser fixedParser = (AbstractFixedParser) parser;
List<FixedParserField> fields = fixedParser.getFieldList();
for (FixedParserField field : fields) {
String col = field.getName();
int len = field.getFieldlength();
sql += ", ";
sql += col.replace(" ", "_").replace("#", "").replace("/", "");
sql += " varchar(";
sql += len;
sql += ")";
}
} else {
List<String> cols = parser.getColumnHeaders();
for (String col : cols) {
sql += ", ";
sql += col.replace(" ", "_").replace("#", "").replace("/", "");
if (col.equals("BLOB_CONTENTS")
|| col.equals("VALUE_LONG_TXT")
|| col.equals("COMMENT_TXT")
|| col.equals("NONPREG_REL_PROBLM_SCT_CD")) {
sql += " mediumtext";
} else if (col.indexOf("Date") > -1
|| col.indexOf("Time") > -1) {
sql += " varchar(10)";
} else {
sql += " varchar(255)";
}
}
}
sql += ");";
/*LOG.debug("-- fileType");
LOG.debug(sql);*/
System.out.println(sql);
}
private static void convertFhirAudit(UUID serviceId, int threads) {
LOG.info("Converting FHIR audit for " + serviceId);
try {
//get all systems
//for each file in publisher transform
// need to create new version in audit
// go through each file and generate the published_file_record entries
// store map of old audit file ID -> new file ID
//update transform_warning table, to set new file ID
//for each audit JSON record
// deserialise
// convert to new format
// what about FHIR ones that point to the wrong DB, like the Emis code map ones dp?
// save to new DB
// delete from old DB
LOG.info("Finished Converting FHIR audit for " + serviceId);
} catch (Throwable t) {
LOG.error("", t);
}
}
/*private static void moveS3ToAudit(int threads) {
LOG.info("Moving S3 to Audit");
try {
//list S3 contents
List<FileInfo> files = FileHelper.listFilesInSharedStorageWithInfo("s3://discoveryaudit/audit");
LOG.debug("Found " + files.size() + " audits");
int countPerThread = files.size() / threads;
int pos = 0;
AtomicInteger done = new AtomicInteger();
List<Thread> threadList = new ArrayList<>();
for (int i=0; i<threads; i++) {
List<FileInfo> perThread = new ArrayList<>();
int countThisThread = countPerThread;
if (i+1 == threads) {
countThisThread = files.size() - pos;
}
for (int j=0; j<countThisThread; j++) {
FileInfo fileInfo = files.get(pos);
pos ++;
perThread.add(fileInfo);
}
MoveToS3Runnable r = new MoveToS3Runnable(perThread, done);
Thread t = new Thread(r);
threadList.add(t);
t.start();
}
while (true) {
Thread.sleep(5000);
boolean allDone = true;
for (Thread t: threadList) {
if (t.getState() != Thread.State.TERMINATED) {
//if (!t.isAlive()) {
allDone = false;
break;
}
}
if (allDone) {
break;
}
}
LOG.debug("Finished with " + done.get() + " / " + files.size());
LOG.info("Finished Moving S3 to Audit");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
/*private static void convertEmisGuids() {
LOG.debug("Converting Emis Guid");
try {
Map<String, String> map = new HashMap<>();
//this list of guids and dates is based off the live Emis extracts, giving the most recent bulk date for each organisation
//only practices where the extract started before the move to AWS and where the extract was disabled and re-bulked need to be in here.
//Practices disabled and re-bulked since the move to AWS are handled differently.
map.put("{DD31E915-7076-46CF-99CD-8378AB588B69}", "20/07/2017");
map.put("{87A8851C-3DA4-4BE0-869C-3BF6BA7C0612}", "15/10/2017");
map.put("{612DCB3A-5BE6-4D50-909B-F0F20565F9FC}", "09/08/2017");
map.put("{15667F8D-46A0-4A87-9FA8-0C56B157A0A9}", "05/05/2017");
map.put("{3CFEFBF9-B856-4A40-A39A-4EB6FA39295E}", "31/01/2017");
map.put("{3F481450-AD19-4793-B1F0-40D5C2C57EF7}", "04/11/2017");
map.put("{83939542-20E4-47C5-9883-BF416294BB22}", "13/10/2017");
map.put("{73AA7E3A-4331-4167-8711-FE07DDBF4657}", "15/10/2017");
map.put("{3B703CCF-C527-4EC8-A802-00D3B1535DD0}", "01/02/2017");
map.put("{ED442CA3-351F-43E4-88A2-2EEACE39A402}", "13/10/2017");
map.put("{86537B5B-7CF3-4964-8906-7C10929FBC20}", "13/05/2017");
map.put("{9A4518C4-82CE-4509-8039-1B5F49F9C1FA}", "12/08/2017");
map.put("{16D7F8F9-4A35-44B1-8F1D-DD0162584684}", "11/07/2017");
map.put("{D392C499-345C-499B-898C-93F2CB8CC1B9}", "15/10/2017");
map.put("{5B87882A-0EE8-4233-93D0-D2F5F4F94040}", "15/03/2017");
map.put("{CFE3B460-9058-47FB-BF1D-6BEC13A2257D}", "19/04/2017");
map.put("{7B03E105-9275-47CC-8022-1469FE2D6AE4}", "20/04/2017");
map.put("{94470227-587C-47D7-A51F-9893512424D8}", "27/04/2017");
map.put("{734F4C99-6326-4CA4-A22C-632F0AC12FFC}", "17/10/2017");
map.put("{03C5B4B4-1A70-45F8-922E-135C826D48E0}", "20/04/2017");
map.put("{1BB17C3F-CE80-4261-AF6C-BE987E3A5772}", "09/05/2017");
map.put("{16F6DD42-2140-4395-95D5-3FA50E252896}", "20/04/2017");
map.put("{3B6FD632-3FFB-48E6-9775-287F6C486752}", "15/10/2017");
map.put("{F987F7BD-E19C-46D2-A446-913489F1BB7A}", "05/02/2017");
map.put("{BE7CC1DC-3CAB-4BB1-A5A2-B0C854C3B78E}", "06/07/2017");
map.put("{303EFA4E-EC8F-4CBC-B629-960E4D799E0D}", "15/10/2017");
map.put("{5EE8FD1F-F23A-4209-A1EE-556F9350C900}", "01/02/2017");
map.put("{04F6C555-A298-45F1-AC5E-AC8EBD2BB720}", "17/10/2017");
map.put("{67383254-F7F1-4847-9AA9-C7DCF32859B8}", "17/10/2017");
map.put("{31272E4E-40E0-4103-ABDC-F40A7B75F278}", "19/10/2017");
map.put("{09CA2E3B-7143-4999-9934-971F3F2E6D8C}", "15/10/2017");
map.put("{0527BCE2-4315-47F2-86A1-2E9F3E50399B}", "15/10/2017");
map.put("{16DD14B5-D1D5-4B0C-B886-59AC4DACDA7A}", "04/07/2017");
map.put("{411D0A79-6913-473C-B486-C01F6430D8A6}", "21/09/2017");
map.put("{0862FADA-594A-415E-B971-7A4312E0A58C}", "10/06/2017");
map.put("{249C3F3C-24F0-44CE-97A9-B535982BD70C}", "15/10/2017");
map.put("{5D7A1915-6E22-4B20-A8AE-4768C06D3BBF}", "28/09/2017"); //Barts community
map.put("{131AE556-8B50-4C17-9D7D-A4B19F7B1FEA}", "15/10/2017"); //Aberfeldy practice F84698
map.put("{C0D2D0DF-EF78-444D-9A6D-B9EDEF5EF350}", "13/10/2017");
map.put("{F174B354-4156-4BCB-960F-35D0145075EA}", "01/02/2017");
map.put("{38600D63-1DE0-4910-8ED6-A38DC28A9DAA}", "19/02/2018"); //THE SPITALFIELDS PRACTICE (CDB 16);F84081
map.put("{B3ECA2DE-D926-4594-B0EA-CF2F28057CE1}", "19/10/2017");
map.put("{18F7C28B-2A54-4F82-924B-38C60631FFFA}", "04/02/2018"); //Rowans Surgery (CDB 18174);H85035
map.put("{16FB5EE8-5039-4068-BC42-1DB56DC2A530}", "08/06/2017");
map.put("{4BA4A5AC-7B25-40B2-B0EA-135702A72F9D}", "15/10/2017");
map.put("{01B8341F-BC8F-450E-8AFA-4CDA344A5009}", "15/10/2017");
map.put("{E6FBEA1C-BDA2-40B7-A461-C262103F08D7}", "08/06/2017");
map.put("{141C68EB-1BC8-4E99-A9D9-0E63A8944CA9}", "15/10/2017");
map.put("{A3EA804D-E7EB-43EE-8F1F-E860F6337FF7}", "15/10/2017");
map.put("{771B42CC-9C0C-46E2-8143-76F04AF91AD5}", "13/11/2017"); //cranwich road
map.put("{16EA8D5C-C667-4818-B629-5D6F4300FEEF}", "11/05/2017");
map.put("{29E51964-C94D-4CB4-894E-EB18E27DEFC1}", "15/10/2017");
map.put("{3646CCA5-7FE4-4DFE-87CD-DA3CE1BA885D}", "27/09/2017");
map.put("{3EC82820-702F-4218-853B-D3E5053646A8}", "05/05/2017");
map.put("{37F3E676-B203-4329-97F8-2AF5BFEAEE5A}", "19/10/2017");
map.put("{A0E3208B-95E9-4284-9B5A-D4D387CCC9F9}", "07/06/2017");
map.put("{0BEAF1F0-9507-4AC2-8997-EC0BA1D0247E}", "19/10/2017");
map.put("{071A50E7-1764-4210-94EF-6A4BF96CF753}", "21/02/2017");
map.put("{0C1983D8-FB7D-4563-84D0-1F8F6933E786}", "20/07/2017");
map.put("{871FEEB2-CE30-4603-B9A3-6FA6CC47B5D4}", "15/10/2017");
map.put("{42906EBE-8628-486D-A52F-27B935C9937A}", "01/02/2017");
map.put("{1AB7ABF3-2572-4D07-B719-CFB2FE3AAC80}", "15/10/2017");
map.put("{E312A5B7-13E7-4E43-BE35-ED29F6216D3C}", "20/04/2017");
map.put("{55E60891-8827-40CD-8011-B0223D5C8970}", "15/10/2017");
map.put("{03A63F52-7FEE-4592-9B54-83CEBCF67B5D}", "26/04/2017");
map.put("{DB39B649-B48D-4AC2-BAB1-AC807AABFAC4}", "15/10/2017");
map.put("{0AF9B2AF-A0FB-40B0-BA05-743BA6845DB1}", "26/08/2017");
map.put("{A7600092-319C-4213-92C2-738BEEFC1609}", "31/01/2017");
map.put("{5A1AABA9-7E96-41E7-AF18-E02F4CF1DFB6}", "15/10/2017");
map.put("{7D8CE31D-66AA-4D6A-9EFD-313646BD1D73}", "15/10/2017");
map.put("{03EA4A79-B6F1-4524-9D15-992B47BCEC9A}", "15/10/2017");
map.put("{4588C493-2EA3-429A-8428-E610AE6A6D76}", "28/09/2017"); //Barts community
map.put("{B13F3CC9-C317-4E0D-9C57-C545E4A53CAF}", "15/10/2017");
map.put("{463DA820-6EC4-48CB-B915-81B31AFBD121}", "13/10/2017");
map.put("{16F0D65C-B2A8-4186-B4E7-BBAF4390EC55}", "13/10/2017");
map.put("{0039EF15-2DCF-4F70-B371-014C807210FD}", "24/05/2017");
map.put("{E132BF05-78D9-4E4B-B875-53237E76A0FA}", "19/10/2017");
map.put("{3DFC2DA6-AD8C-4836-945D-A6F8DB22AA49}", "15/10/2017");
map.put("{BCB43B1D-2857-4186-918B-460620F98F81}", "13/10/2017");
map.put("{E134C74E-FA3E-4E14-A4BB-314EA3D3AC16}", "15/10/2017");
map.put("{C0F40044-C2CA-4D1D-95D3-553B29992385}", "26/08/2017");
map.put("{B174A018-538D-4065-838C-023A245B53DA}", "14/02/2017");
map.put("{43380A69-AE7D-4ED7-B014-0708675D0C02}", "08/06/2017");
map.put("{E503F0E0-FE56-4CEF-BAB5-0D25B834D9BD}", "13/10/2017");
map.put("{08946F29-1A53-4AF2-814B-0B8758112F21}", "07/02/2018"); //NEWHAM MEDICAL CENTRE (CDB 3461);F84669
map.put("{09857684-535C-4ED6-8007-F91F366611C6}", "19/10/2017");
map.put("{C409A597-009A-4E11-B828-A595755DE0EA}", "17/10/2017");
map.put("{58945A1C-2628-4595-8F8C-F75D93045949}", "15/10/2017");
map.put("{16FF2874-20B0-4188-B1AF-69C97055AA60}", "17/10/2017");
map.put("{2C91E9DA-3F92-464E-B6E6-61D3DE52E62F}", "15/10/2017");
map.put("{16E7AD27-2AD9-43C0-A473-1F39DF93E981}", "10/06/2017");
map.put("{A528478D-65DB-435C-9E98-F8BDB49C9279}", "20/04/2017");
map.put("{A2BDB192-E79C-44C5-97A2-1FD4517C456F}", "21/08/2017");
map.put("{73DFF193-E917-4DBC-B5CF-DD2797B29377}", "15/10/2017");
map.put("{62825316-9107-4E2C-A22C-86211B4760DA}", "13/10/2017");
map.put("{006E8A30-2A45-4DBE-91D7-1C53FADF38B1}", "28/01/2018"); //The Lawson Practice (CDB 4334);F84096
map.put("{E32AA6A6-46B1-4198-AA13-058038AB8746}", "13/10/2017");
map.put("{B51160F1-79E3-4BA7-AA3D-1112AB341146}", "30/09/2017");
map.put("{234503E5-56B4-45A0-99DA-39854FBE78E9}", "01/02/2017");
map.put("{7D1852DA-E264-4599-B9B4-8F40207F967D}", "09/10/2017");
map.put("{44716213-7FEE-4247-A09E-7285BD6B69C6}", "13/10/2017");
map.put("{19BCC870-2704-4D21-BA7B-56F2F472AF35}", "15/10/2017");
map.put("{FEF842DA-FD7C-480F-945A-D097910A81EB}", "13/10/2017");
map.put("{1C980E19-4A39-4ACD-BA8A-925D3E525765}", "13/10/2017");
map.put("{AABDDC3A-93A4-4A87-9506-AAF52E74012B}", "07/02/2018"); //DR N DRIVER AND PARTNERS (CDB 4419);F84086
map.put("{90C2959C-0C2D-43DC-A81B-4AD594C17999}", "20/04/2017");
map.put("{1F1669CF-1BB0-47A7-8FBF-BE65651644C1}", "15/10/2017");
map.put("{C1800BE8-4C1D-4340-B0F2-7ED208586ED3}", "15/10/2017");
map.put("{55A94703-4582-46FB-808A-1990E9CBCB6F}", "19/02/2018"); //Stamford Hill Group Practice (CDB 56);F84013
map.put("{D4996E62-268F-4759-83A6-7A68D0B38CEC}", "27/04/2017");
map.put("{3C843BBA-C507-4A95-9934-1A85B977C7B8}", "01/02/2017");
map.put("{2216253B-705D-4C46-ADB3-ED48493D6A39}", "03/02/2018"); //RIVERSIDE MEDICAL PRACTICE (CDB 14675);Y01962
map.put("{00123F97-4557-44AD-81B5-D9902DD72EE9}", "28/04/2017");
map.put("{E35D4D12-E7D2-484B-BFF6-4653B3FED228}", "15/10/2017");
map.put("{6D8B4D28-838B-4915-A148-6FEC2CEBCE77}", "05/07/2017");
map.put("{188D5B4D-4BF6-46E3-AF11-3AD32C68D251}", "19/10/2017");
map.put("{16F7DDE1-3763-4D3A-A58D-F12F967718CF}", "02/11/2017");
map.put("{03148933-6E1C-4A8A-A6D2-A3D488E14DDD}", "30/12/2017");
map.put("{16DE1A3C-875B-4AB2-B227-8A42604E029C}", "05/11/2017");
map.put("{D628D1BC-D02E-4101-B8CD-5B3DB2D06FC1}", "05/05/2017");
map.put("{1EA6259A-6A49-46DB-991D-D604675F87E2}", "15/10/2017");
map.put("{817F9B46-AEE0-45D5-95E3-989F75C4844E}", "20/04/2017");
map.put("{1C422471-F52A-4C30-8D23-140BEB7AAEFC}", "15/08/2017");
map.put("{A6467E73-0F15-49D6-AFAB-4DFB487E7963}", "10/05/2017");
map.put("{CC7D1781-1B85-4AD6-A5DD-9AD5E092E8DB}", "13/10/2017");
map.put("{167CD5C8-148F-4D78-8997-3B22EC0AF6B6}", "13/10/2017");
map.put("{9DD5D2CE-2585-49D8-AF04-2CB1BD137594}", "15/10/2017");
map.put("{D6696BB5-DE69-49D1-BC5E-C56799E42640}", "07/02/2018"); //BOLEYN MEDICAL CENTRE (CDB 4841);F84050
map.put("{169375A9-C3AB-4C5E-82B0-DFF7656AD1FA}", "20/04/2017");
map.put("{0A8ECFDE-95EE-4811-BC05-668D49F5C799}", "19/11/2017");
map.put("{79C898A1-BB92-48F9-B0C3-6725370132B5}", "20/10/2017");
map.put("{472AC9BA-AFFE-4E81-81CA-40DD8389784D}", "27/04/2017");
map.put("{00121CB7-76A6-4D57-8260-E9CA62FFCD77}", "13/10/2017");
map.put("{0FCBA0A7-7CAB-4E75-AC81-5041CD869CA1}", "15/10/2017");
map.put("{00A9C32D-2BB2-4A20-842A-381B3F2031C0}", "19/10/2017");
map.put("{26597C5A-3E29-4960-BE11-AC75D0430615}", "03/05/2017");
map.put("{D945FEF7-F5EF-422B-AB35-6937F9792B54}", "15/10/2017");
map.put("{16D685C6-130A-4B19-BCA9-90AC7DC16346}", "08/07/2017");
map.put("{F09E9CEF-2615-4C9D-AA3D-79E0AB10D0B3}", "13/10/2017");
map.put("{CD7EF748-DB88-49CF-AA6E-24F65029391F}", "15/10/2017");
map.put("{B22018CF-2B52-4A1A-9F6A-CEA13276DB2E}", "19/10/2017");
map.put("{0DF8CFC7-5DE6-4DDB-846A-7F28A2740A00}", "02/12/2017");
map.put("{50F439E5-DB18-43A0-9F25-825957013A07}", "11/01/2018"); //DR PI ABIOLA (CDB 5681);F84631
map.put("{00A3BA25-21C6-42DE-82AA-55FF0D85A6C3}", "31/10/2018"); //MARKET STREET HEALTH GROUP (CDB 381);F84004
map.put("{77B59D29-0FD9-4737-964F-5DBA49D94AB6}", "31/10/2018"); //Star Lane Medical Centre (CDB 40);F84017
map.put("{91239362-A105-4DEA-8E8E-239C3BCEDFD2}", "11/01/2018"); //BEECHWOOD MEDICAL CENTRE (CDB 5661);F84038
map.put("{53A113F5-6E3B-410F-A473-53E38A79335B}", "01/06/2018"); //ELFT Community RWKGY CDB 25362
map.put("{164BE8EC-E2D5-40DE-A5FC-25E058A5C47E}", "17/10/2018"); //Haiderian Medical Centre F82002
map.put("{164CE1B0-F7B3-44AF-B1E4-3DA6C64DEA4C}", "26/11/2018"); //THE GREEN WOOD PRACTICE F82007
map.put("{A30A4BB7-B17B-11D9-AD5F-00D0B77FCBFC}", "26/11/2018"); //Tulasi Medical Practice F82660
LOG.debug("Starting with map size " + map.size());
Map<String, String> hmGuidToOdsMap = new HashMap<>();
UUID systemId = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3");
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
List<Service> services = serviceDal.getAll();
for (Service service: services) {
UUID serviceId = service.getId();
String ods = service.getLocalId();
String orgGuid = null;
List<Exchange> exchanges = exchangeDalI.getExchangesByService(serviceId, systemId, 5);
for (Exchange exchange: exchanges) {
String exchangeBody = exchange.getBody();
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody);
if (!files.isEmpty()) {
ExchangePayloadFile first = files.get(0);
String path = first.getPath();
if (path.indexOf("EMIS_CUSTOM") > -1) {
continue;
}
File f = new File(path);
f = f.getParentFile(); //org GUID
orgGuid = f.getName();
break;
}
}
if (orgGuid == null) {
LOG.error("Failed to find OrgGuid for " + service.getName() + " " + ods);
} else {
hmGuidToOdsMap.put(orgGuid, ods);
}
}
//create new code
for (String orgGuid: map.keySet()) {
String dateStr = map.get(orgGuid);
String odsCode = hmGuidToOdsMap.get(orgGuid);
if (Strings.isNullOrEmpty(odsCode)) {
LOG.error("Missing ODS code for " + orgGuid);
} else {
System.out.println("map.put(\"" + odsCode + "\", \"" + dateStr + "\");");
}
}
LOG.debug("Finished Converting Emis Guid");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
/*private static void testS3VsMySql(UUID serviceUuid, int count, int sqlBatchSize, String bucketName) {
LOG.debug("Testing S3 vs MySQL for service " + serviceUuid);
try {
//retrieve some audit JSON from the DB
EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection connection = session.connection();
String sql = "select resource_id, resource_type, version, mappings_json"
+ " from resource_field_mappings"
+ " where mappings_json != '[]'";
if (count > -1) {
sql += "limit " + count + ";";
}
Statement statement = connection.createStatement();
statement.setFetchSize(1000);
ResultSet rs = statement.executeQuery(sql);
List<ResourceFieldMapping> list = new ArrayList<>();
while (rs.next()) {
int col = 1;
String resourceId = rs.getString(col++);
String resourceType = rs.getString(col++);
String version = rs.getString(col++);
String json = rs.getString(col++);
ResourceFieldMapping obj = new ResourceFieldMapping();
obj.setResourceId(UUID.fromString(resourceId));
obj.setResourceType(resourceType);
obj.setVersion(UUID.fromString(version));
obj.setResourceField(json);
list.add(obj);
}
rs.close();
statement.close();
entityManager.close();
int done = 0;
//test writing to S3
long s3Start = System.currentTimeMillis();
LOG.debug("Doing S3 test");
for (int i=0; i<list.size(); i++) {
ResourceFieldMapping mapping = list.get(i);
String entryName = mapping.getVersion().toString() + ".json";
String keyName = "auditTest/" + serviceUuid + "/" + mapping.getResourceType() + "/" + mapping.getResourceId() + "/" + mapping.getVersion() + ".zip";
String jsonStr = mapping.getResourceField();
//may as well zip the data, since it will compress well
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(baos);
zos.putNextEntry(new ZipEntry(entryName));
zos.write(jsonStr.getBytes());
zos.flush();
zos.close();
byte[] bytes = baos.toByteArray();
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes);
//ProfileCredentialsProvider credentialsProvider = new ProfileCredentialsProvider();
DefaultAWSCredentialsProviderChain credentialsProvider = DefaultAWSCredentialsProviderChain.getInstance();
AmazonS3ClientBuilder clientBuilder = AmazonS3ClientBuilder
.standard()
.withCredentials(credentialsProvider)
.withRegion(Regions.EU_WEST_2);
AmazonS3 s3Client = clientBuilder.build();
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
objectMetadata.setContentLength(bytes.length);
PutObjectRequest putRequest = new PutObjectRequest(bucketName, keyName, byteArrayInputStream, objectMetadata);
s3Client.putObject(putRequest);
done ++;
if (done % 1000 == 0) {
LOG.debug("Done " + done + " / " + list.size());
}
}
long s3End = System.currentTimeMillis();
LOG.debug("S3 took " + (s3End - s3Start) + " ms");
//test inserting into a DB
long sqlStart = System.currentTimeMillis();
LOG.debug("Doing SQL test");
sql = "insert into drewtest.json_speed_test (resource_id, resource_type, created_at, version, mappings_json) values (?, ?, ?, ?, ?)";
entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid);
session = (SessionImpl) entityManager.getDelegate();
connection = session.connection();
PreparedStatement ps = connection.prepareStatement(sql);
entityManager.getTransaction().begin();
done = 0;
int currentBatchSize = 0;
for (int i=0; i<list.size(); i++) {
ResourceFieldMapping mapping = list.get(i);
int col = 1;
ps.setString(col++, mapping.getResourceId().toString());
ps.setString(col++, mapping.getResourceType());
ps.setDate(col++, new java.sql.Date(System.currentTimeMillis()));
ps.setString(col++, mapping.getVersion().toString());
ps.setString(col++, mapping.getResourceField());
ps.addBatch();
currentBatchSize ++;
if (currentBatchSize >= sqlBatchSize
|| i+1 == list.size()) {
ps.executeBatch();
entityManager.getTransaction().commit();
//mirror what would happen normally
ps.close();
entityManager.close();
if (i+1 < list.size()) {
entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid);
session = (SessionImpl) entityManager.getDelegate();
connection = session.connection();
ps = connection.prepareStatement(sql);
entityManager.getTransaction().begin();
}
}
done ++;
if (done % 1000 == 0) {
LOG.debug("Done " + done + " / " + list.size());
}
}
long sqlEnd = System.currentTimeMillis();
LOG.debug("SQL took " + (sqlEnd - sqlStart) + " ms");
LOG.debug("Finished Testing S3 vs MySQL for service " + serviceUuid);
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void loadEmisData(String serviceId, String systemId, String dbUrl, String dbUsername, String dbPassword, String onlyThisFileType) {
LOG.debug("Loading Emis data from into " + dbUrl);
try {
//hash file type of every file
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(UUID.fromString(serviceId), UUID.fromString(systemId), Integer.MAX_VALUE);
//open connection
Class.forName("com.mysql.cj.jdbc.Driver");
Connection conn = DriverManager.getConnection(dbUrl, dbUsername, dbPassword);
SimpleDateFormat sdfStart = new SimpleDateFormat("yyyy-MM-dd");
Date startDate = sdfStart.parse("2000-01-01");
for (int i = exchanges.size() - 1; i >= 0; i
Exchange exchange = exchanges.get(i);
String exchangeBody = exchange.getBody();
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody);
if (files.isEmpty()) {
continue;
}
for (ExchangePayloadFile file : files) {
String type = file.getType();
String path = file.getPath();
//if only doing a specific file type, skip all others
if (onlyThisFileType != null
&& !type.equals(onlyThisFileType)) {
continue;
}
String name = FilenameUtils.getBaseName(path);
String[] toks = name.split("_");
if (toks.length != 5) {
throw new TransformException("Failed to find extract date in filename " + path);
}
String dateStr = toks[3];
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
Date extractDate = sdf.parse(dateStr);
boolean processFile = false;
if (type.equalsIgnoreCase("OriginalTerms")
|| type.equalsIgnoreCase("RegistrationStatus")) {
//can't process these custom files in this routine
} else if (type.equalsIgnoreCase("Coding_ClinicalCode")
|| type.equalsIgnoreCase("Coding_DrugCode")) {
processFile = true;
} else {
if (!extractDate.before(startDate)) {
processFile = true;
}
}
if (processFile) {
loadEmisDataFromFile(conn, path, type, extractDate);
}
}
}
conn.close();
LOG.debug("Finished Emis data from into " + dbUrl);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static ParserI createParserForEmisFileType(String fileType, String filePath) {
String[] toks = fileType.split("_");
String domain = toks[0];
String name = toks[1];
String first = domain.substring(0, 1);
String last = domain.substring(1);
domain = first.toLowerCase() + last;
try {
String clsName = "org.endeavourhealth.transform.emis.csv.schema." + domain + "." + name;
Class cls = Class.forName(clsName);
//now construct an instance of the parser for the file we've found
Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class);
return constructor.newInstance(null, null, null, EmisCsvToFhirTransformer.VERSION_5_4, filePath);
} catch (Exception ex) {
LOG.error("No parser for file type [" + fileType + "]");
LOG.error("", ex);
return null;
}
}
private static void loadEmisDataFromFile(Connection conn, String filePath, String fileType, Date extractDate) throws Exception {
LOG.debug("Loading " + fileType + ": " + filePath);
String fileName = FilenameUtils.getName(filePath);
ParserI parser = createParserForEmisFileType(fileType, filePath);
if (parser == null) {
return;
}
String table = fileType.replace(" ", "_");
//check table is there
String sql = "SELECT 1 FROM information_schema.tables WHERE table_schema = database() AND table_name = '" + table + "' LIMIT 1";
Statement statement = conn.createStatement();
ResultSet rs = statement.executeQuery(sql);
boolean tableExists = rs.next();
rs.close();
statement.close();
if (!tableExists) {
LOG.error("No table exists for " + table);
return;
}
//create insert statement
sql = "INSERT INTO `" + table + "` (";
sql += "file_name, extract_date";
List<String> cols = parser.getColumnHeaders();
for (String col : cols) {
sql += ", ";
sql += col.replace(" ", "_").replace("#", "").replace("/", "");
}
sql += ") VALUES (";
sql += "?, ?";
for (String col : cols) {
sql += ", ";
sql += "?";
}
sql += ")";
PreparedStatement ps = conn.prepareStatement(sql);
List<String> currentBatchStrs = new ArrayList<>();
//load table
try {
int done = 0;
int currentBatchSize = 0;
while (parser.nextRecord()) {
int col = 1;
//file name is always first
ps.setString(col++, fileName);
ps.setDate(col++, new java.sql.Date(extractDate.getTime()));
for (String colName : cols) {
CsvCell cell = parser.getCell(colName);
if (cell == null) {
ps.setNull(col++, Types.VARCHAR);
} else {
ps.setString(col++, cell.getString());
}
}
ps.addBatch();
currentBatchSize++;
currentBatchStrs.add((ps.toString())); //for error handling
if (currentBatchSize >= 5) {
ps.executeBatch();
currentBatchSize = 0;
currentBatchStrs.clear();
}
done++;
if (done % 5000 == 0) {
LOG.debug("Done " + done);
}
}
if (currentBatchSize >= 0) {
ps.executeBatch();
}
ps.close();
} catch (Throwable t) {
LOG.error("Failed on batch with statements:");
for (String currentBatchStr : currentBatchStrs) {
LOG.error(currentBatchStr);
}
throw t;
}
LOG.debug("Finished " + fileType + ": " + filePath);
}
private static void createBartsDataTables() {
LOG.debug("Creating Barts data tables");
try {
List<String> fileTypes = new ArrayList<>();
fileTypes.add("AEATT");
fileTypes.add("Birth");
//fileTypes.add("BulkDiagnosis");
//fileTypes.add("BulkProblem");
//fileTypes.add("BulkProcedure");
fileTypes.add("CLEVE");
fileTypes.add("CVREF");
fileTypes.add("Diagnosis");
fileTypes.add("ENCINF");
fileTypes.add("ENCNT");
fileTypes.add("FamilyHistory");
fileTypes.add("IPEPI");
fileTypes.add("IPWDS");
fileTypes.add("LOREF");
fileTypes.add("NOMREF");
fileTypes.add("OPATT");
fileTypes.add("ORGREF");
fileTypes.add("PPADD");
fileTypes.add("PPAGP");
fileTypes.add("PPALI");
fileTypes.add("PPATI");
fileTypes.add("PPINF");
fileTypes.add("PPNAM");
fileTypes.add("PPPHO");
fileTypes.add("PPREL");
fileTypes.add("Pregnancy");
fileTypes.add("Problem");
fileTypes.add("PROCE");
fileTypes.add("Procedure");
fileTypes.add("PRSNLREF");
fileTypes.add("SusEmergency");
fileTypes.add("SusInpatient");
fileTypes.add("SusOutpatient");
//fileTypes.add("Tails"); TODO - have three separate tails files
fileTypes.add("EventCode");
fileTypes.add("EventSetCanon");
fileTypes.add("EventSet");
fileTypes.add("EventSetExplode");
fileTypes.add("BlobContent");
fileTypes.add("SusInpatientTail");
fileTypes.add("SusOutpatientTail");
fileTypes.add("SusEmergencyTail");
fileTypes.add("AEINV");
fileTypes.add("AETRE");
fileTypes.add("OPREF");
fileTypes.add("STATREF");
fileTypes.add("RTTPE");
fileTypes.add("PPATH");
fileTypes.add("DOCRP");
fileTypes.add("SCHAC");
fileTypes.add("EALEN");
fileTypes.add("DELIV");
fileTypes.add("EALOF");
fileTypes.add("SusEmergencyCareDataSet");
fileTypes.add("SusEmergencyCareDataSetTail");
for (String fileType : fileTypes) {
createBartsDataTable(fileType);
}
LOG.debug("Finished Creating Barts data tables");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createBartsDataTable(String fileType) throws Exception {
ParserI parser = null;
try {
String clsName = "org.endeavourhealth.transform.barts.schema." + fileType;
Class cls = Class.forName(clsName);
//now construct an instance of the parser for the file we've found
Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class);
parser = constructor.newInstance(null, null, null, null, null);
} catch (ClassNotFoundException cnfe) {
System.out.println("-- No parser for file type [" + fileType + "]");
return;
}
System.out.println("-- " + fileType);
String table = fileType.replace(" ", "_");
String dropSql = "DROP TABLE IF EXISTS `" + table + "`;";
System.out.println(dropSql);
String sql = "CREATE TABLE `" + table + "` (";
sql += "file_name varchar(100)";
if (parser instanceof AbstractFixedParser) {
AbstractFixedParser fixedParser = (AbstractFixedParser) parser;
List<FixedParserField> fields = fixedParser.getFieldList();
for (FixedParserField field : fields) {
String col = field.getName();
int len = field.getFieldlength();
sql += ", ";
sql += col.replace(" ", "_").replace("#", "").replace("/", "");
sql += " varchar(";
sql += len;
sql += ")";
}
} else {
List<String> cols = parser.getColumnHeaders();
for (String col : cols) {
sql += ", ";
sql += col.replace(" ", "_").replace("#", "").replace("/", "");
if (col.equals("BLOB_CONTENTS")
|| col.equals("VALUE_LONG_TXT")
|| col.equals("COMMENT_TXT")
|| col.equals("NONPREG_REL_PROBLM_SCT_CD")) {
sql += " mediumtext";
} else if (col.indexOf("Date") > -1
|| col.indexOf("Time") > -1) {
sql += " varchar(10)";
} else {
sql += " varchar(255)";
}
}
}
sql += ");";
/*LOG.debug("-- fileType");
LOG.debug(sql);*/
System.out.println(sql);
}
private static void loadBartsData(String serviceId, String systemId, String dbUrl, String dbUsername, String dbPassword, String startDateStr, String onlyThisFileType) {
LOG.debug("Loading Barts data from into " + dbUrl);
try {
//hash file type of every file
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(UUID.fromString(serviceId), UUID.fromString(systemId), Integer.MAX_VALUE);
//open connection
Class.forName("com.mysql.cj.jdbc.Driver");
Connection conn = DriverManager.getConnection(dbUrl, dbUsername, dbPassword);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Date startDate = sdf.parse(startDateStr);
for (int i = exchanges.size() - 1; i >= 0; i
Exchange exchange = exchanges.get(i);
String exchangeBody = exchange.getBody();
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody);
if (files.isEmpty()) {
continue;
}
for (ExchangePayloadFile file : files) {
String type = file.getType();
String path = file.getPath();
//if only doing a specific file type, skip all others
if (onlyThisFileType != null
&& !type.equals(onlyThisFileType)) {
continue;
}
boolean processFile = false;
if (type.equalsIgnoreCase("CVREF")
|| type.equalsIgnoreCase("LOREF")
|| type.equalsIgnoreCase("ORGREF")
|| type.equalsIgnoreCase("PRSNLREF")
|| type.equalsIgnoreCase("NOMREF")) {
processFile = true;
} else {
File f = new File(path);
File parentFile = f.getParentFile();
String parentDir = parentFile.getName();
Date extractDate = sdf.parse(parentDir);
if (!extractDate.before(startDate)) {
processFile = true;
}
/*if (!extractDate.before(startDate)
&& !extractDate.after(endDate)) {
processFile = true;
}*/
}
if (processFile) {
loadBartsDataFromFile(conn, path, type);
}
}
}
conn.close();
LOG.debug("Finished Loading Barts data from into " + dbUrl);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void loadBartsDataFromFile(Connection conn, String filePath, String fileType) throws Exception {
LOG.debug("Loading " + fileType + ": " + filePath);
String fileName = FilenameUtils.getName(filePath);
ParserI parser = null;
try {
String clsName = "org.endeavourhealth.transform.barts.schema." + fileType;
Class cls = Class.forName(clsName);
//now construct an instance of the parser for the file we've found
Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class);
parser = constructor.newInstance(null, null, null, null, filePath);
} catch (ClassNotFoundException cnfe) {
LOG.error("No parser for file type [" + fileType + "]");
return;
}
String table = fileType.replace(" ", "_");
//check table is there
String sql = "SELECT 1 FROM information_schema.tables WHERE table_schema = database() AND table_name = '" + table + "' LIMIT 1";
Statement statement = conn.createStatement();
ResultSet rs = statement.executeQuery(sql);
boolean tableExists = rs.next();
rs.close();
statement.close();
if (!tableExists) {
LOG.error("No table exists for " + table);
return;
}
//create insert statement
sql = "INSERT INTO `" + table + "` (";
sql += "file_name";
List<String> cols = parser.getColumnHeaders();
for (String col : cols) {
sql += ", ";
sql += col.replace(" ", "_").replace("#", "").replace("/", "");
}
sql += ") VALUES (";
sql += "?";
for (String col : cols) {
sql += ", ";
sql += "?";
}
sql += ")";
PreparedStatement ps = conn.prepareStatement(sql);
List<String> currentBatchStrs = new ArrayList<>();
//load table
try {
int done = 0;
int currentBatchSize = 0;
while (parser.nextRecord()) {
int col = 1;
//file name is always first
ps.setString(col++, fileName);
for (String colName : cols) {
CsvCell cell = parser.getCell(colName);
if (cell == null) {
ps.setNull(col++, Types.VARCHAR);
} else {
ps.setString(col++, cell.getString());
}
}
ps.addBatch();
currentBatchSize++;
currentBatchStrs.add((ps.toString())); //for error handling
if (currentBatchSize >= 5) {
ps.executeBatch();
currentBatchSize = 0;
currentBatchStrs.clear();
}
done++;
if (done % 5000 == 0) {
LOG.debug("Done " + done);
}
}
if (currentBatchSize >= 0) {
ps.executeBatch();
}
ps.close();
} catch (Throwable t) {
LOG.error("Failed on batch with statements:");
for (String currentBatchStr : currentBatchStrs) {
LOG.error(currentBatchStr);
}
throw t;
}
LOG.debug("Finished " + fileType + ": " + filePath);
}
/*private static void fixPseudoIds(String subscriberConfig, int threads) {
LOG.debug("Fixing Pseudo IDs for " + subscriberConfig);
try {
//update psuedo ID on patient table
//update psuedo ID on person table
//update pseudo ID on subscriber_transform mapping table
JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber");
JsonNode saltNode = config.get("pseudonymisation");
ObjectMapper mapper = new ObjectMapper();
Object json = mapper.readValue(saltNode.toString(), Object.class);
String linkDistributors = mapper.writeValueAsString(json);
LinkDistributorConfig salt = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig.class);
LinkDistributorConfig[] arr = null;
JsonNode linkDistributorsNode = config.get("linkedDistributors");
if (linkDistributorsNode != null) {
json = mapper.readValue(linkDistributorsNode.toString(), Object.class);
linkDistributors = mapper.writeValueAsString(json);
arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class);
}
Connection subscriberConnection = EnterpriseFiler.openConnection(config);
List<Long> patientIds = new ArrayList<>();
Map<Long, Long> hmOrgIds = new HashMap<>();
Map<Long, Long> hmPersonIds = new HashMap<>();
String sql = "SELECT id, organization_id, person_id FROM patient";
Statement statement = subscriberConnection.createStatement();
statement.setFetchSize(10000);
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
long patientId = rs.getLong(1);
long orgId = rs.getLong(2);
long personId = rs.getLong(3);
patientIds.add(new Long(patientId));
hmOrgIds.put(new Long(patientId), new Long(orgId));
hmPersonIds.put(new Long(patientId), new Long(personId));
}
rs.close();
subscriberConnection.close();
LOG.debug("Found " + patientIds.size() + " patients");
AtomicInteger done = new AtomicInteger();
int pos = 0;
List<Thread> threadList = new ArrayList<>();
for (int i=0; i<threads; i++) {
List<Long> patientSubset = new ArrayList<>();
int count = patientIds.size() / threads;
if (i+1 == threads) {
count = patientIds.size() - pos;
}
for (int j=0; j<count; j++) {
Long patientId = patientIds.get(pos);
patientSubset.add(patientId);
pos ++;
}
FixPseudoIdRunnable runnable = new FixPseudoIdRunnable(subscriberConfig, patientSubset, hmOrgIds, hmPersonIds, done);
Thread t = new Thread(runnable);
t.start();
threadList.add(t);
}
while (true) {
Thread.sleep(5000);
boolean allDone = true;
for (Thread t: threadList) {
if (t.getState() != Thread.State.TERMINATED) {
//if (!t.isAlive()) {
allDone = false;
break;
}
}
if (allDone) {
break;
}
}
LOG.debug("Finished Fixing Pseudo IDs for " + subscriberConfig);
} catch (Throwable t) {
LOG.error("", t);
}
}
static class FixPseudoIdRunnable implements Runnable {
private String subscriberConfig = null;
private List<Long> patientIds = null;
private Map<Long, Long> hmOrgIds = null;
private Map<Long, Long> hmPersonIds = null;
private AtomicInteger done = null;
public FixPseudoIdRunnable(String subscriberConfig, List<Long> patientIds, Map<Long, Long> hmOrgIds, Map<Long, Long> hmPersonIds, AtomicInteger done) {
this.subscriberConfig = subscriberConfig;
this.patientIds = patientIds;
this.hmOrgIds = hmOrgIds;
this.hmPersonIds = hmPersonIds;
this.done = done;
}
@Override
public void run() {
try {
doRun();
} catch (Throwable t) {
LOG.error("", t);
}
}
private void doRun() throws Exception {
JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber");
Connection subscriberConnection = EnterpriseFiler.openConnection(config);
Statement statement = subscriberConnection.createStatement();
JsonNode saltNode = config.get("pseudonymisation");
ObjectMapper mapper = new ObjectMapper();
Object json = mapper.readValue(saltNode.toString(), Object.class);
String linkDistributors = mapper.writeValueAsString(json);
LinkDistributorConfig salt = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig.class);
LinkDistributorConfig[] arr = null;
JsonNode linkDistributorsNode = config.get("linkedDistributors");
if (linkDistributorsNode != null) {
json = mapper.readValue(linkDistributorsNode.toString(), Object.class);
linkDistributors = mapper.writeValueAsString(json);
arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class);
}
//PseudoIdDalI pseudoIdDal = DalProvider.factoryPseudoIdDal(subscriberConfig);
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
EntityManager entityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfig);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection subscriberTransformConnection = session.connection();
Statement subscriberTransformStatement = subscriberTransformConnection.createStatement();
String sql = null;
ResultSet rs = null;
for (Long patientId: patientIds) {
Long orgId = hmOrgIds.get(patientId);
Long personId = hmPersonIds.get(patientId);
//find service ID
sql = "SELECT service_id FROM enterprise_organisation_id_map WHERE enterprise_id = " + orgId;
rs = subscriberTransformStatement.executeQuery(sql);
if (!rs.next()) {
throw new Exception("Failed to find service iD for patient ID " + patientId + " and org ID " + orgId);
}
String serviceId = rs.getString(1);
rs.close();
//find patient ID
sql = "SELECT resource_type, resource_id FROM enterprise_id_map WHERE enterprise_id = " + patientId;
rs = subscriberTransformStatement.executeQuery(sql);
if (!rs.next()) {
throw new Exception("Failed to find resource iD for patient ID " + patientId);
}
String resourceType = rs.getString(1);
String resourceId = rs.getString(2);
rs.close();
if (!resourceType.equals("Patient")) {
throw new Exception("Not a patient resource type for enterprise ID " + patientId);
}
//get patient
Resource resource = null;
try {
resource = resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.Patient, resourceId);
} catch (Exception ex) {
throw new Exception("Failed to get patient " + resourceId + " for service " + serviceId, ex);
}
if (resource == null) {
LOG.error("Failed to find patient resource for " + ResourceType.Patient + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId);
continue;
//throw new Exception("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId);
}
Patient patient = (Patient)resource;
//generate new pseudo ID
String pseudoId = PatientTransformer.pseudonymiseUsingConfig(patient, salt);
//save to person
if (Strings.isNullOrEmpty(pseudoId)) {
sql = "UPDATE person"
+ " SET pseudo_id = null"
+ " WHERE id = " + personId;
statement.executeUpdate(sql);
} else {
sql = "UPDATE person"
+ " SET pseudo_id = '" + pseudoId + "'"
+ " WHERE id = " + personId;
statement.executeUpdate(sql);
}
//save to patient
if (Strings.isNullOrEmpty(pseudoId)) {
sql = "UPDATE patient"
+ " SET pseudo_id = null"
+ " WHERE id = " + patientId;
statement.executeUpdate(sql);
} else {
sql = "UPDATE patient"
+ " SET pseudo_id = '" + pseudoId + "'"
+ " WHERE id = " + patientId;
statement.executeUpdate(sql);
}
//linked distributers
if (arr != null) {
for (LinkDistributorConfig linked: arr) {
String linkedPseudoId = PatientTransformer.pseudonymiseUsingConfig(patient, linked);
sql = "INSERT INTO link_distributor (source_skid, target_salt_key_name, target_skid) VALUES ('" + pseudoId + "', '" + linked.getSaltKeyName() + "', '" + linkedPseudoId + "')"
+ " ON DUPLICATE KEY UPDATE"
+ " target_salt_key_name = VALUES(target_salt_key_name),"
+ " target_skid = VALUES(target_skid)";
statement.executeUpdate(sql);
}
}
//save to subscriber transform
sql = "DELETE FROM pseudo_id_map WHERE patient_id = '" + resourceId + "'";
subscriberTransformStatement.executeUpdate(sql);
if (!Strings.isNullOrEmpty(pseudoId)) {
sql = "INSERT INTO pseudo_id_map (patient_id, pseudo_id) VALUES ('" + resourceId + "', '" + pseudoId + "')";
subscriberTransformStatement.executeUpdate(sql);
}
subscriberConnection.commit();
subscriberTransformConnection.commit();
int doneLocal = done.incrementAndGet();
if (doneLocal % 1000 == 0) {
LOG.debug("Done " + doneLocal);
}
}
statement.close();
subscriberTransformStatement.close();
subscriberConnection.close();
subscriberTransformConnection.close();
}
}*/
/*private static void fixDeceasedPatients(String subscriberConfig) {
LOG.debug("Fixing Deceased Patients for " + subscriberConfig);
try {
JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber");
Connection subscriberConnection = EnterpriseFiler.openConnection(config);
Map<Long, Long> patientIds = new HashMap<>();
String sql = "SELECT id, organization_id FROM patient WHERE date_of_death IS NOT NULL";
Statement statement = subscriberConnection.createStatement();
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
long patientId = rs.getLong(1);
long orgId = rs.getLong(2);
patientIds.put(new Long(patientId), new Long(orgId));
}
rs.close();
statement.close();
EnterpriseAgeUpdaterlDalI dal = DalProvider.factoryEnterpriseAgeUpdaterlDal(subscriberConfig);
EntityManager entityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfig);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection subscriberTransformConnection = session.connection();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
for (Long patientId: patientIds.keySet()) {
Long orgId = patientIds.get(patientId);
statement = subscriberTransformConnection.createStatement();
sql = "SELECT service_id FROM enterprise_organisation_id_map WHERE enterprise_id = " + orgId;
rs = statement.executeQuery(sql);
if (!rs.next()) {
throw new Exception("Failed to find service iD for patient ID " + patientId + " and org ID " + orgId);
}
String serviceId = rs.getString(1);
rs.close();
sql = "SELECT resource_type, resource_id FROM enterprise_id_map WHERE enterprise_id = " + patientId;
rs = statement.executeQuery(sql);
if (!rs.next()) {
throw new Exception("Failed to find resource iD for patient ID " + patientId);
}
String resourceType = rs.getString(1);
String resourceId = rs.getString(2);
rs.close();
statement.close();
Resource resource = resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.valueOf(resourceType), resourceId);
if (resource == null) {
LOG.error("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId);
continue;
//throw new Exception("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId);
}
Patient patient = (Patient)resource;
Date dob = patient.getBirthDate();
Date dod = patient.getDeceasedDateTimeType().getValue();
Integer[] ages = dal.calculateAgeValuesAndUpdateTable(patientId, dob, dod);
updateEnterprisePatient(patientId, ages, subscriberConnection);
updateEnterprisePerson(patientId, ages, subscriberConnection);
}
subscriberConnection.close();
subscriberTransformConnection.close();
LOG.debug("Finished Fixing Deceased Patients for " + subscriberConfig);
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void updateEnterprisePatient(long enterprisePatientId, Integer[] ages, Connection connection) throws Exception {
//the enterprise patient database isn't managed using hibernate, so we need to simply write a simple update statement
StringBuilder sb = new StringBuilder();
sb.append("UPDATE patient SET ");
sb.append("age_years = ?, ");
sb.append("age_months = ?, ");
sb.append("age_weeks = ? ");
sb.append("WHERE id = ?");
PreparedStatement update = connection.prepareStatement(sb.toString());
if (ages[EnterpriseAge.UNIT_YEARS] == null) {
update.setNull(1, Types.INTEGER);
} else {
update.setInt(1, ages[EnterpriseAge.UNIT_YEARS]);
}
if (ages[EnterpriseAge.UNIT_MONTHS] == null) {
update.setNull(2, Types.INTEGER);
} else {
update.setInt(2, ages[EnterpriseAge.UNIT_MONTHS]);
}
if (ages[EnterpriseAge.UNIT_WEEKS] == null) {
update.setNull(3, Types.INTEGER);
} else {
update.setInt(3, ages[EnterpriseAge.UNIT_WEEKS]);
}
update.setLong(4, enterprisePatientId);
update.addBatch();
update.executeBatch();
connection.commit();
LOG.info("Updated patient " + enterprisePatientId + " to ages " + ages[EnterpriseAge.UNIT_YEARS] + " y, " + ages[EnterpriseAge.UNIT_MONTHS] + " m " + ages[EnterpriseAge.UNIT_WEEKS] + " wks");
}
private static void updateEnterprisePerson(long enterprisePatientId, Integer[] ages, Connection connection) throws Exception {
//update the age fields on the person table where the person is for our patient and their pseudo IDs match
StringBuilder sb = new StringBuilder();
sb.append("UPDATE patient, person SET ");
sb.append("person.age_years = ?, ");
sb.append("person.age_months = ?, ");
sb.append("person.age_weeks = ? ");
sb.append("WHERE patient.id = ? ");
sb.append("AND patient.person_id = person.id ");
sb.append("AND patient.pseudo_id = person.pseudo_id");
PreparedStatement update = connection.prepareStatement(sb.toString());
if (ages[EnterpriseAge.UNIT_YEARS] == null) {
update.setNull(1, Types.INTEGER);
} else {
update.setInt(1, ages[EnterpriseAge.UNIT_YEARS]);
}
if (ages[EnterpriseAge.UNIT_MONTHS] == null) {
update.setNull(2, Types.INTEGER);
} else {
update.setInt(2, ages[EnterpriseAge.UNIT_MONTHS]);
}
if (ages[EnterpriseAge.UNIT_WEEKS] == null) {
update.setNull(3, Types.INTEGER);
} else {
update.setInt(3, ages[EnterpriseAge.UNIT_WEEKS]);
}
update.setLong(4, enterprisePatientId);
update.addBatch();
update.executeBatch();
connection.commit();
}
/*private static void testS3Read(String s3BucketName, String keyName, String start, String len) {
LOG.debug("Testing S3 Read from " + s3BucketName + " " + keyName + " from " + start + " " + len + " bytes");
try {
AmazonS3ClientBuilder clientBuilder = AmazonS3ClientBuilder
.standard()
.withCredentials(DefaultAWSCredentialsProviderChain.getInstance())
.withRegion(Regions.EU_WEST_2);
AmazonS3 s3Client = clientBuilder.build();
GetObjectRequest request = new GetObjectRequest(s3BucketName, keyName);
long startInt = Long.parseLong(start);
long lenInt = Long.parseLong(len);
long endInt = startInt + lenInt;
request.setRange(startInt, endInt);
long startMs = System.currentTimeMillis();
S3Object object = s3Client.getObject(request);
InputStream inputStream = object.getObjectContent();
InputStreamReader reader = new InputStreamReader(inputStream, Charset.defaultCharset());
StringBuilder sb = new StringBuilder();
char[] buf = new char[100];
while (true) {
int read = reader.read(buf);
if (read == -1
|| sb.length() >= lenInt) {
break;
}
sb.append(buf, 0, read);
}
reader.close();
long endMs = System.currentTimeMillis();
LOG.debug("Read " + sb.toString() + " in " + (endMs - startMs) + " ms");
LOG.debug("Finished Testing S3 Read from " + s3BucketName + " " + keyName + " from " + start + " " + len + " bytes");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
*//*sql = "SELECT * FROM resource_field_mappings WHERE version = 'a905db26-1357-4710-90ef-474f256567ed';";
PreparedStatement statement1 = mappingConnection.prepareStatement(sql);*//*
*//*sql = "SELECT * FROM resource_field_mappings WHERE version = ?";
PreparedStatement statement1 = mappingConnection.prepareStatement(sql);*//*
*//*statement1.setString(1, resourceType);
statement1.setString(3, resourceVersion);*//*
/*private static void fixBartsPatients(UUID serviceId) {
LOG.debug("Fixing Barts patients at service " + serviceId);
try {
EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager();
SessionImpl session = (SessionImpl)edsEntityManager.getDelegate();
Connection edsConnection = session.connection();
int checked = 0;
int fixed = 0;
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
String sql = "SELECT patient_id FROM patient_search WHERE service_id = '" + serviceId + "';";
Statement s = edsConnection.createStatement();
s.setFetchSize(10000); //don't get all rows at once
ResultSet rs = s.executeQuery(sql);
LOG.info("Got raw results back");
while (rs.next()) {
String patientId = rs.getString(1);
ResourceWrapper wrapper = resourceDal.getCurrentVersion(serviceId, ResourceType.Patient.toString(), UUID.fromString(patientId));
if (wrapper == null) {
LOG.error("Failed to get recource current for ID " + patientId);
continue;
}
String oldJson = wrapper.getResourceData();
Patient patient = (Patient)FhirSerializationHelper.deserializeResource(oldJson);
PatientBuilder patientBuilder = new PatientBuilder(patient);
List<String> numbersFromCsv = new ArrayList<>();
if (patient.hasTelecom()) {
for (ContactPoint contactPoint: patient.getTelecom()) {
if (contactPoint.hasId()) {
numbersFromCsv.add(contactPoint.getValue());
}
}
for (String numberFromCsv: numbersFromCsv) {
PPPHOTransformer.removeExistingContactPointWithoutIdByValue(patientBuilder, numberFromCsv);
}
}
List<HumanName> namesFromCsv = new ArrayList<>();
if (patient.hasName()) {
for (HumanName name: patient.getName()) {
if (name.hasId()) {
namesFromCsv.add(name);
}
}
for (HumanName name: namesFromCsv) {
PPNAMTransformer.removeExistingNameWithoutIdByValue(patientBuilder, name);
}
}
List<Address> addressesFromCsv = new ArrayList<>();
if (patient.hasAddress()) {
for (Address address: patient.getAddress()) {
if (address.hasId()) {
addressesFromCsv.add(address);
}
}
for (Address address: addressesFromCsv) {
PPADDTransformer.removeExistingAddressWithoutIdByValue(patientBuilder, address);
}
}
String newJson = FhirSerializationHelper.serializeResource(patient);
if (!newJson.equals(oldJson)) {
wrapper.setResourceData(newJson);
saveResourceWrapper(serviceId, wrapper);
fixed ++;
}
checked ++;
if (checked % 1000 == 0) {
LOG.debug("Checked " + checked + " fixed " + fixed);
}
}
LOG.debug("Checked " + checked + " fixed " + fixed);
rs.close();
s.close();
edsEntityManager.close();
LOG.debug("Finish Fixing Barts patients at service " + serviceId);
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void postToRabbit(String exchangeName, String srcFile, Integer throttle) {
LOG.info("Posting to " + exchangeName + " from " + srcFile);
if (throttle != null) {
LOG.info("Throttled to " + throttle + " messages/second");
}
try {
File src = new File(srcFile);
//create file of ones done
File dir = src.getParentFile();
String name = "DONE" + src.getName();
File dst = new File(dir, name);
Set<UUID> hsAlreadyDone = new HashSet<>();
if (dst.exists()) {
List<String> lines = Files.readAllLines(dst.toPath());
for (String line : lines) {
if (!Strings.isNullOrEmpty(line)) {
try {
UUID uuid = UUID.fromString(line);
hsAlreadyDone.add(uuid);
} catch (Exception ex) {
LOG.error("Skipping line " + line);
}
}
}
LOG.info("Already done " + hsAlreadyDone.size());
}
List<UUID> exchangeIds = new ArrayList<>();
int countTotal = 0;
List<String> lines = Files.readAllLines(src.toPath());
for (String line : lines) {
if (!Strings.isNullOrEmpty(line)) {
try {
UUID uuid = UUID.fromString(line);
countTotal++;
if (!hsAlreadyDone.contains(uuid)) {
exchangeIds.add(uuid);
}
} catch (Exception ex) {
LOG.error("Skipping line " + line);
}
}
}
LOG.info("Found " + countTotal + " down to " + exchangeIds.size() + " skipping ones already done, to post to " + exchangeName);
continueOrQuit();
FileWriter fileWriter = new FileWriter(dst, true);
PrintWriter printWriter = new PrintWriter(fileWriter);
long startMs = System.currentTimeMillis();
int doneThisSecond = 0;
LOG.info("Posting " + exchangeIds.size() + " to " + exchangeName);
for (int i = 0; i < exchangeIds.size(); i++) {
UUID exchangeId = exchangeIds.get(i);
List<UUID> tmp = new ArrayList<>();
tmp.add(exchangeId);
QueueHelper.postToExchange(tmp, exchangeName, null, true);
printWriter.println(exchangeId.toString());
printWriter.flush();
if (i % 5000 == 0) {
LOG.debug("Done " + i + " / " + exchangeIds.size());
}
if (throttle != null) {
doneThisSecond++;
if (doneThisSecond > throttle.intValue()) {
long now = System.currentTimeMillis();
long sleep = 1000 - (now - startMs);
if (sleep > 0) {
Thread.sleep(sleep);
}
startMs = System.currentTimeMillis();
doneThisSecond = 0;
}
}
}
printWriter.close();
LOG.info("Finished Posting to " + exchangeName + " from " + srcFile);
} catch (Throwable t) {
LOG.error("", t);
}
}
/*private static void postToProtocol(String srcFile) {
LOG.info("Posting to protocol from " + srcFile);
try {
List<UUID> exchangeIds = new ArrayList<>();
List<String> lines = Files.readAllLines(new File(srcFile).toPath());
for (String line: lines) {
if (!Strings.isNullOrEmpty(line)) {
UUID uuid = UUID.fromString(line);
exchangeIds.add(uuid);
}
}
LOG.info("Posting " + exchangeIds.size() + " to Protocol queue");
QueueHelper.postToExchange(exchangeIds, "EdsProtocol", null, false);
LOG.info("Finished Posting to protocol from " + srcFile);
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void populateSubscriberUprnTable(String subscriberConfigName) throws Exception {
LOG.info("Populating Subscriber UPRN Table for " + subscriberConfigName);
try {
JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfigName, "db_subscriber");
//changed the format of the JSON
JsonNode pseudoNode = config.get("pseudonymisation");
boolean pseudonymised = pseudoNode != null;
byte[] saltBytes = null;
if (pseudonymised) {
JsonNode saltNode = pseudoNode.get("salt");
String base64Salt = saltNode.asText();
saltBytes = Base64.getDecoder().decode(base64Salt);
}
/*boolean pseudonymised = config.get("pseudonymised").asBoolean();
byte[] saltBytes = null;
if (pseudonymised) {
JsonNode saltNode = config.get("salt");
String base64Salt = saltNode.asText();
saltBytes = Base64.getDecoder().decode(base64Salt);
}*/
List<EnterpriseConnector.ConnectionWrapper> connectionWrappers = EnterpriseConnector.openConnection(config);
for (EnterpriseConnector.ConnectionWrapper connectionWrapper: connectionWrappers) {
Connection subscriberConnection = connectionWrapper.getConnection();
LOG.info("Populating " + connectionWrapper);
String upsertSql;
if (pseudonymised) {
upsertSql = "INSERT INTO patient_uprn"
+ " (patient_id, organization_id, person_id, lsoa_code, pseudo_uprn, qualifier, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode)"
+ " VALUES"
+ " (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
+ " ON DUPLICATE KEY UPDATE"
+ " organization_id = VALUES(organization_id),"
+ " person_id = VALUES(person_id),"
+ " lsoa_code = VALUES(lsoa_code),"
+ " pseudo_uprn = VALUES(pseudo_uprn),"
+ " qualifier = VALUES(qualifier),"
+ " `algorithm` = VALUES(`algorithm`),"
+ " `match` = VALUES(`match`),"
+ " no_address = VALUES(no_address),"
+ " invalid_address = VALUES(invalid_address),"
+ " missing_postcode = VALUES(missing_postcode),"
+ " invalid_postcode = VALUES(invalid_postcode)";
} else {
upsertSql = "INSERT INTO patient_uprn"
+ " (patient_id, organization_id, person_id, lsoa_code, uprn, qualifier, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode)"
+ " VALUES"
+ " (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
+ " ON DUPLICATE KEY UPDATE"
+ " organization_id = VALUES(organization_id),"
+ " person_id = VALUES(person_id),"
+ " lsoa_code = VALUES(lsoa_code),"
+ " uprn = VALUES(uprn),"
+ " qualifier = VALUES(qualifier),"
+ " `algorithm` = VALUES(`algorithm`),"
+ " `match` = VALUES(`match`),"
+ " no_address = VALUES(no_address),"
+ " invalid_address = VALUES(invalid_address),"
+ " missing_postcode = VALUES(missing_postcode),"
+ " invalid_postcode = VALUES(invalid_postcode)";
}
PreparedStatement psUpsert = subscriberConnection.prepareStatement(upsertSql);
int inBatch = 0;
EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager();
SessionImpl session = (SessionImpl) edsEntityManager.getDelegate();
Connection edsConnection = session.connection();
SubscriberResourceMappingDalI enterpriseIdDal = DalProvider.factorySubscriberResourceMappingDal(subscriberConfigName);
PatientLinkDalI patientLinkDal = DalProvider.factoryPatientLinkDal();
PostcodeDalI postcodeDal = DalProvider.factoryPostcodeDal();
int checked = 0;
int saved = 0;
Map<String, Boolean> hmPermittedPublishers = new HashMap<>();
String sql = "SELECT service_id, patient_id, uprn, qualifier, abp_address, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode FROM patient_address_uprn";
Statement s = edsConnection.createStatement();
s.setFetchSize(10000); //don't get all rows at once
ResultSet rs = s.executeQuery(sql);
LOG.info("Got raw results back");
while (rs.next()) {
int col = 1;
String serviceId = rs.getString(col++);
String patientId = rs.getString(col++);
Long uprn = rs.getLong(col++);
if (rs.wasNull()) {
uprn = null;
}
String qualifier = rs.getString(col++);
String abpAddress = rs.getString(col++);
String algorithm = rs.getString(col++);
String match = rs.getString(col++);
boolean noAddress = rs.getBoolean(col++);
boolean invalidAddress = rs.getBoolean(col++);
boolean missingPostcode = rs.getBoolean(col++);
boolean invalidPostcode = rs.getBoolean(col++);
//check if patient ID already exists in the subscriber DB
Long subscriberPatientId = enterpriseIdDal.findEnterpriseIdOldWay(ResourceType.Patient.toString(), patientId);
//if the patient doesn't exist on this subscriber DB, then don't transform this record
if (subscriberPatientId != null) {
//because of past mistakes, we have Discovery->Enterprise mappings for patients that
//shouldn't, so we also need to check that the service ID is definitely a publisher to this subscriber
Boolean isPublisher = hmPermittedPublishers.get(serviceId);
if (isPublisher == null) {
List<LibraryItem> libraryItems = LibraryRepositoryHelper.getProtocolsByServiceId(serviceId, null); //passing null means don't filter on system ID
for (LibraryItem libraryItem : libraryItems) {
Protocol protocol = libraryItem.getProtocol();
if (protocol.getEnabled() != ProtocolEnabled.TRUE) {
continue;
}
//check to make sure that this service is actually a PUBLISHER to this protocol
boolean isProtocolPublisher = false;
for (ServiceContract serviceContract : protocol.getServiceContract()) {
if (serviceContract.getType().equals(ServiceContractType.PUBLISHER)
&& serviceContract.getService().getUuid().equals(serviceId)
&& serviceContract.getActive() == ServiceContractActive.TRUE) {
isProtocolPublisher = true;
break;
}
}
if (!isProtocolPublisher) {
continue;
}
//check to see if this subscriber config is a subscriber to this DB
for (ServiceContract serviceContract : protocol.getServiceContract()) {
if (serviceContract.getType().equals(ServiceContractType.SUBSCRIBER)
&& serviceContract.getActive() == ServiceContractActive.TRUE) {
ServiceDalI serviceRepository = DalProvider.factoryServiceDal();
UUID subscriberServiceId = UUID.fromString(serviceContract.getService().getUuid());
UUID subscriberTechnicalInterfaceId = UUID.fromString(serviceContract.getTechnicalInterface().getUuid());
Service subscriberService = serviceRepository.getById(subscriberServiceId);
List<JsonServiceInterfaceEndpoint> serviceEndpoints = ObjectMapperPool.getInstance().readValue(subscriberService.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {
});
for (JsonServiceInterfaceEndpoint serviceEndpoint : serviceEndpoints) {
if (serviceEndpoint.getTechnicalInterfaceUuid().equals(subscriberTechnicalInterfaceId)) {
String protocolSubscriberConfigName = serviceEndpoint.getEndpoint();
if (protocolSubscriberConfigName.equals(subscriberConfigName)) {
isPublisher = new Boolean(true);
break;
}
}
}
}
}
}
if (isPublisher == null) {
isPublisher = new Boolean(false);
}
hmPermittedPublishers.put(serviceId, isPublisher);
}
if (isPublisher.booleanValue()) {
SubscriberOrgMappingDalI orgMappingDal = DalProvider.factorySubscriberOrgMappingDal(subscriberConfigName);
Long subscriberOrgId = orgMappingDal.findEnterpriseOrganisationId(serviceId);
String discoveryPersonId = patientLinkDal.getPersonId(patientId);
SubscriberPersonMappingDalI personMappingDal = DalProvider.factorySubscriberPersonMappingDal(subscriberConfigName);
Long subscriberPersonId = personMappingDal.findOrCreateEnterprisePersonId(discoveryPersonId);
String lsoaCode = null;
if (!Strings.isNullOrEmpty(abpAddress)) {
String[] toks = abpAddress.split(" ");
String postcode = toks[toks.length - 1];
PostcodeLookup postcodeReference = postcodeDal.getPostcodeReference(postcode);
if (postcodeReference != null) {
lsoaCode = postcodeReference.getLsoaCode();
}
}
col = 1;
psUpsert.setLong(col++, subscriberPatientId);
psUpsert.setLong(col++, subscriberOrgId);
psUpsert.setLong(col++, subscriberPersonId);
psUpsert.setString(col++, lsoaCode);
if (pseudonymised) {
String pseuoUprn = null;
if (uprn != null) {
TreeMap<String, String> keys = new TreeMap<>();
keys.put("UPRN", "" + uprn);
Crypto crypto = new Crypto();
crypto.SetEncryptedSalt(saltBytes);
pseuoUprn = crypto.GetDigest(keys);
}
psUpsert.setString(col++, pseuoUprn);
} else {
if (uprn != null) {
psUpsert.setLong(col++, uprn.longValue());
} else {
psUpsert.setNull(col++, Types.BIGINT);
}
}
psUpsert.setString(col++, qualifier);
psUpsert.setString(col++, algorithm);
psUpsert.setString(col++, match);
psUpsert.setBoolean(col++, noAddress);
psUpsert.setBoolean(col++, invalidAddress);
psUpsert.setBoolean(col++, missingPostcode);
psUpsert.setBoolean(col++, invalidPostcode);
//LOG.debug("" + psUpsert);
psUpsert.addBatch();
inBatch++;
saved++;
if (inBatch >= TransformConfig.instance().getResourceSaveBatchSize()) {
psUpsert.executeBatch();
subscriberConnection.commit();
inBatch = 0;
}
}
}
checked++;
if (checked % 1000 == 0) {
LOG.info("Checked " + checked + " Saved " + saved);
}
}
if (inBatch > 0) {
psUpsert.executeBatch();
subscriberConnection.commit();
}
LOG.info("Chcked " + checked + " Saved " + saved);
psUpsert.close();
subscriberConnection.close();
edsEntityManager.close();
subscriberConnection.close();
}
LOG.info("Finished Populating Subscriber UPRN Table for " + subscriberConfigName);
} catch (Throwable t) {
LOG.error("", t);
}
}
/*private static void fixPersonsNoNhsNumber() {
LOG.info("Fixing persons with no NHS number");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
List<Service> services = serviceDal.getAll();
EntityManager entityManager = ConnectionManager.getEdsEntityManager();
SessionImpl session = (SessionImpl)entityManager.getDelegate();
Connection patientSearchConnection = session.connection();
Statement patientSearchStatement = patientSearchConnection.createStatement();
for (Service service: services) {
LOG.info("Doing " + service.getName() + " " + service.getId());
int checked = 0;
int fixedPersons = 0;
int fixedSearches = 0;
String sql = "SELECT patient_id, nhs_number FROM patient_search WHERE service_id = '" + service.getId() + "' AND (nhs_number IS NULL or CHAR_LENGTH(nhs_number) != 10)";
ResultSet rs = patientSearchStatement.executeQuery(sql);
while (rs.next()) {
String patientId = rs.getString(1);
String nhsNumber = rs.getString(2);
//find matched person ID
String personIdSql = "SELECT person_id FROM patient_link WHERE patient_id = '" + patientId + "'";
Statement s = patientSearchConnection.createStatement();
ResultSet rsPersonId = s.executeQuery(personIdSql);
String personId = null;
if (rsPersonId.next()) {
personId = rsPersonId.getString(1);
}
rsPersonId.close();
s.close();
if (Strings.isNullOrEmpty(personId)) {
LOG.error("Patient " + patientId + " has no person ID");
continue;
}
//see whether person ID used NHS number to match
String patientLinkSql = "SELECT nhs_number FROM patient_link_person WHERE person_id = '" + personId + "'";
s = patientSearchConnection.createStatement();
ResultSet rsPatientLink = s.executeQuery(patientLinkSql);
String matchingNhsNumber = null;
if (rsPatientLink.next()) {
matchingNhsNumber = rsPatientLink.getString(1);
}
rsPatientLink.close();
s.close();
//if patient link person has a record for this nhs number, update the person link
if (!Strings.isNullOrEmpty(matchingNhsNumber)) {
String newPersonId = UUID.randomUUID().toString();
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String createdAtStr = sdf.format(new Date());
s = patientSearchConnection.createStatement();
//new record in patient link history
String patientHistorySql = "INSERT INTO patient_link_history VALUES ('" + patientId + "', '" + service.getId() + "', '" + createdAtStr + "', '" + newPersonId + "', '" + personId + "')";
//LOG.debug(patientHistorySql);
s.execute(patientHistorySql);
//update patient link
String patientLinkUpdateSql = "UPDATE patient_link SET person_id = '" + newPersonId + "' WHERE patient_id = '" + patientId + "'";
s.execute(patientLinkUpdateSql);
patientSearchConnection.commit();
s.close();
fixedPersons ++;
}
//if patient search has an invalid NHS number, update it
if (!Strings.isNullOrEmpty(nhsNumber)) {
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(service.getId(), ResourceType.Patient, patientId);
PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal();
patientSearchDal.update(service.getId(), patient);
fixedSearches ++;
}
checked ++;
if (checked % 50 == 0) {
LOG.info("Checked " + checked + ", FixedPersons = " + fixedPersons + ", FixedSearches = " + fixedSearches);
}
}
LOG.info("Checked " + checked + ", FixedPersons = " + fixedPersons + ", FixedSearches = " + fixedSearches);
rs.close();
}
patientSearchStatement.close();
entityManager.close();
LOG.info("Finished fixing persons with no NHS number");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
/*private static void checkDeletedObs(UUID serviceId, UUID systemId) {
LOG.info("Checking Observations for " + serviceId);
try {
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal();
List<ResourceType> potentialResourceTypes = new ArrayList<>();
potentialResourceTypes.add(ResourceType.Procedure);
potentialResourceTypes.add(ResourceType.AllergyIntolerance);
potentialResourceTypes.add(ResourceType.FamilyMemberHistory);
potentialResourceTypes.add(ResourceType.Immunization);
potentialResourceTypes.add(ResourceType.DiagnosticOrder);
potentialResourceTypes.add(ResourceType.Specimen);
potentialResourceTypes.add(ResourceType.DiagnosticReport);
potentialResourceTypes.add(ResourceType.ReferralRequest);
potentialResourceTypes.add(ResourceType.Condition);
potentialResourceTypes.add(ResourceType.Observation);
List<String> subscriberConfigs = new ArrayList<>();
subscriberConfigs.add("ceg_data_checking");
subscriberConfigs.add("ceg_enterprise");
subscriberConfigs.add("hurley_data_checking");
subscriberConfigs.add("hurley_deidentified");
Set<String> observationsNotDeleted = new HashSet<>();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
for (Exchange exchange : exchanges) {
List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody());
ExchangePayloadFile firstItem = payload.get(0);
//String version = EmisCsvToFhirTransformer.determineVersion(payload);
//if we've reached the point before we process data for this practice, break out
try {
if (!EmisCsvToFhirTransformer.shouldProcessPatientData(payload)) {
break;
}
} catch (TransformException e) {
LOG.info("Skipping exchange containing " + firstItem.getPath());
continue;
}
String name = FilenameUtils.getBaseName(firstItem.getPath());
String[] toks = name.split("_");
String agreementId = toks[4];
LOG.info("Doing exchange containing " + firstItem.getPath());
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), agreementId, true);
Map<UUID, ExchangeBatch> hmBatchesByPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchange.getId());
for (ExchangeBatch batch : batches) {
if (batch.getEdsPatientId() != null) {
hmBatchesByPatient.put(batch.getEdsPatientId(), batch);
}
}
for (ExchangePayloadFile item : payload) {
String type = item.getType();
if (type.equals("CareRecord_Observation")) {
InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath());
CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT);
Iterator<CSVRecord> iterator = parser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String deleted = record.get("Deleted");
String observationId = record.get("ObservationGuid");
if (deleted.equalsIgnoreCase("true")) {
//if observation was reinstated at some point, skip it
if (observationsNotDeleted.contains(observationId)) {
continue;
}
String patientId = record.get("PatientGuid");
CsvCell patientCell = CsvCell.factoryDummyWrapper(patientId);
CsvCell observationCell = CsvCell.factoryDummyWrapper(observationId);
Set<ResourceType> resourceTypes = org.endeavourhealth.transform.emis.csv.transforms.careRecord.ObservationTransformer.findOriginalTargetResourceTypes(csvHelper, patientCell, observationCell);
for (ResourceType resourceType: resourceTypes) {
//will already have been done OK
if (resourceType == ResourceType.Observation) {
continue;
}
String sourceId = patientId + ":" + observationId;
UUID uuid = IdHelper.getEdsResourceId(serviceId, resourceType, sourceId);
if (uuid == null) {
throw new Exception("Failed to find UUID for " + resourceType + " " + sourceId);
}
LOG.debug("Fixing " + resourceType + " " + uuid);
//create file of IDs to delete for each subscriber DB
for (String subscriberConfig : subscriberConfigs) {
EnterpriseIdDalI subscriberDal = DalProvider.factoryEnterpriseIdDal(subscriberConfig);
Long enterpriseId = subscriberDal.findEnterpriseId(resourceType.toString(), uuid.toString());
if (enterpriseId == null) {
continue;
}
String sql = null;
if (resourceType == ResourceType.AllergyIntolerance) {
sql = "DELETE FROM allergy_intolerance WHERE id = " + enterpriseId;
} else if (resourceType == ResourceType.ReferralRequest) {
sql = "DELETE FROM referral_request WHERE id = " + enterpriseId;
} else {
sql = "DELETE FROM observation WHERE id = " + enterpriseId;
}
sql += "\n";
File f = new File(subscriberConfig + ".sql");
Files.write(f.toPath(), sql.getBytes(), StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE);
}
//delete resource if not already done
ResourceWrapper resourceWrapper = resourceDal.getCurrentVersion(serviceId, resourceType.toString(), uuid);
if (resourceWrapper != null && !resourceWrapper.isDeleted()) {
ExchangeBatch batch = hmBatchesByPatient.get(resourceWrapper.getPatientId());
resourceWrapper.setDeleted(true);
resourceWrapper.setResourceData(null);
resourceWrapper.setResourceMetadata("");
resourceWrapper.setExchangeBatchId(batch.getBatchId());
resourceWrapper.setVersion(UUID.randomUUID());
resourceWrapper.setCreatedAt(new Date());
resourceWrapper.setExchangeId(exchange.getId());
resourceDal.delete(resourceWrapper);
}
}
} else {
observationsNotDeleted.add(observationId);
}
}
parser.close();
}
}
}
LOG.info("Finished Checking Observations for " + serviceId);
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void testBatchInserts(String url, String user, String pass, String num, String batchSizeStr) {
LOG.info("Testing Batch Inserts");
try {
int inserts = Integer.parseInt(num);
int batchSize = Integer.parseInt(batchSizeStr);
LOG.info("Openning Connection");
Properties props = new Properties();
props.setProperty("user", user);
props.setProperty("password", pass);
Connection conn = DriverManager.getConnection(url, props);
//String sql = "INSERT INTO drewtest.insert_test VALUES (?, ?, ?);";
String sql = "INSERT INTO drewtest.insert_test VALUES (?, ?, ?)";
PreparedStatement ps = conn.prepareStatement(sql);
if (batchSize == 1) {
LOG.info("Testing non-batched inserts");
long start = System.currentTimeMillis();
for (int i = 0; i < inserts; i++) {
int col = 1;
ps.setString(col++, UUID.randomUUID().toString());
ps.setString(col++, UUID.randomUUID().toString());
ps.setString(col++, randomStr());
ps.execute();
}
long end = System.currentTimeMillis();
LOG.info("Done " + inserts + " in " + (end - start) + " ms");
} else {
LOG.info("Testing batched inserts with batch size " + batchSize);
long start = System.currentTimeMillis();
for (int i = 0; i < inserts; i++) {
int col = 1;
ps.setString(col++, UUID.randomUUID().toString());
ps.setString(col++, UUID.randomUUID().toString());
ps.setString(col++, randomStr());
ps.addBatch();
if ((i + 1) % batchSize == 0
|| i + 1 >= inserts) {
ps.executeBatch();
}
}
long end = System.currentTimeMillis();
LOG.info("Done " + inserts + " in " + (end - start) + " ms");
}
ps.close();
conn.close();
LOG.info("Finished Testing Batch Inserts");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
private static String randomStr() {
StringBuffer sb = new StringBuffer();
Random r = new Random(System.currentTimeMillis());
while (sb.length() < 1100) {
sb.append(r.nextLong());
}
return sb.toString();
}
/*private static void fixEmisProblems(UUID serviceId, UUID systemId) {
LOG.info("Fixing Emis Problems for " + serviceId);
try {
Map<String, List<String>> hmReferences = new HashMap<>();
Set<String> patientIds = new HashSet<>();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
FhirResourceFiler filer = new FhirResourceFiler(null, serviceId, systemId, null, null);
LOG.info("Caching problem links");
//Go through all files to work out problem children for every problem
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
for (int i=exchanges.size()-1; i>=0; i--) {
Exchange exchange = exchanges.get(i);
List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody());
//String version = EmisCsvToFhirTransformer.determineVersion(payload);
ExchangePayloadFile firstItem = payload.get(0);
String name = FilenameUtils.getBaseName(firstItem.getPath());
String[] toks = name.split("_");
String agreementId = toks[4];
LOG.info("Doing exchange containing " + firstItem.getPath());
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), agreementId, true);
for (ExchangePayloadFile item: payload) {
String type = item.getType();
if (type.equals("CareRecord_Observation")) {
InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath());
CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT);
Iterator<CSVRecord> iterator = parser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String parentProblemId = record.get("ProblemGuid");
String patientId = record.get("PatientGuid");
patientIds.add(patientId);
if (!Strings.isNullOrEmpty(parentProblemId)) {
String observationId = record.get("ObservationGuid");
String localId = patientId + ":" + observationId;
ResourceType resourceType = ObservationTransformer.findOriginalTargetResourceType(filer, CsvCell.factoryDummyWrapper(patientId), CsvCell.factoryDummyWrapper(observationId));
Reference localReference = ReferenceHelper.createReference(resourceType, localId);
Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper);
String localProblemId = patientId + ":" + parentProblemId;
Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId);
Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper);
String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference);
List<String> problemChildren = hmReferences.get(globalProblemId);
if (problemChildren == null) {
problemChildren = new ArrayList<>();
hmReferences.put(globalProblemId, problemChildren);
}
problemChildren.add(globalReference.getReference());
}
}
parser.close();
} else if (type.equals("Prescribing_DrugRecord")) {
InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath());
CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT);
Iterator<CSVRecord> iterator = parser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String parentProblemId = record.get("ProblemObservationGuid");
String patientId = record.get("PatientGuid");
patientIds.add(patientId);
if (!Strings.isNullOrEmpty(parentProblemId)) {
String observationId = record.get("DrugRecordGuid");
String localId = patientId + ":" + observationId;
Reference localReference = ReferenceHelper.createReference(ResourceType.MedicationStatement, localId);
Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper);
String localProblemId = patientId + ":" + parentProblemId;
Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId);
Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper);
String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference);
List<String> problemChildren = hmReferences.get(globalProblemId);
if (problemChildren == null) {
problemChildren = new ArrayList<>();
hmReferences.put(globalProblemId, problemChildren);
}
problemChildren.add(globalReference.getReference());
}
}
parser.close();
} else if (type.equals("Prescribing_IssueRecord")) {
InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath());
CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT);
Iterator<CSVRecord> iterator = parser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String parentProblemId = record.get("ProblemObservationGuid");
String patientId = record.get("PatientGuid");
patientIds.add(patientId);
if (!Strings.isNullOrEmpty(parentProblemId)) {
String observationId = record.get("IssueRecordGuid");
String localId = patientId + ":" + observationId;
Reference localReference = ReferenceHelper.createReference(ResourceType.MedicationOrder, localId);
String localProblemId = patientId + ":" + parentProblemId;
Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId);
Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper);
Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper);
String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference);
List<String> problemChildren = hmReferences.get(globalProblemId);
if (problemChildren == null) {
problemChildren = new ArrayList<>();
hmReferences.put(globalProblemId, problemChildren);
}
problemChildren.add(globalReference.getReference());
}
}
parser.close();
} else {
//no problem link
}
}
}
LOG.info("Finished caching problem links, finding " + patientIds.size() + " patients");
int done = 0;
int fixed = 0;
for (String localPatientId: patientIds) {
Reference localPatientReference = ReferenceHelper.createReference(ResourceType.Patient, localPatientId);
Reference globalPatientReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localPatientReference, filer);
String patientUuid = ReferenceHelper.getReferenceId(globalPatientReference);
List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, UUID.fromString(patientUuid), ResourceType.Condition.toString());
for (ResourceWrapper wrapper: wrappers) {
if (wrapper.isDeleted()) {
continue;
}
String originalJson = wrapper.getResourceData();
Condition condition = (Condition)FhirSerializationHelper.deserializeResource(originalJson);
ConditionBuilder conditionBuilder = new ConditionBuilder(condition);
//sort out the nested extension references
Extension outerExtension = ExtensionConverter.findExtension(condition, FhirExtensionUri.PROBLEM_LAST_REVIEWED);
if (outerExtension != null) {
Extension innerExtension = ExtensionConverter.findExtension(outerExtension, FhirExtensionUri._PROBLEM_LAST_REVIEWED__PERFORMER);
if (innerExtension != null) {
Reference performerReference = (Reference)innerExtension.getValue();
String value = performerReference.getReference();
if (value.endsWith("}")) {
Reference globalPerformerReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(performerReference, filer);
innerExtension.setValue(globalPerformerReference);
}
}
}
//sort out the contained list of children
ContainedListBuilder listBuilder = new ContainedListBuilder(conditionBuilder);
//remove any existing children
listBuilder.removeContainedList();
//add all the new ones we've found
List<String> localChildReferences = hmReferences.get(wrapper.getResourceId().toString());
if (localChildReferences != null) {
for (String localChildReference: localChildReferences) {
Reference reference = ReferenceHelper.createReference(localChildReference);
listBuilder.addContainedListItem(reference);
}
}
//save the updated condition
String newJson = FhirSerializationHelper.serializeResource(condition);
if (!newJson.equals(originalJson)) {
wrapper.setResourceData(newJson);
saveResourceWrapper(serviceId, wrapper);
fixed ++;
}
}
done ++;
if (done % 1000 == 0) {
LOG.info("Done " + done + " patients and fixed " + fixed + " problems");
}
}
LOG.info("Done " + done + " patients and fixed " + fixed + " problems");
LOG.info("Finished Emis Problems for " + serviceId);
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void fixEmisProblems3ForPublisher(String publisher, UUID systemId) {
try {
LOG.info("Doing fix for " + publisher);
String[] done = new String[]{
"01fcfe94-5dfd-4951-b74d-129f874209b0",
"07a267d3-189b-4968-b9b0-547de28edef5",
"0b9601d1-f7ab-4f5d-9f77-1841050f75ab",
"0fd2ff5d-2c25-4707-afe8-707e81a250b8",
"14276da8-c344-4841-a36d-aa38940e78e7",
"158251ca-0e1d-4471-8fae-250b875911e1",
"160131e2-a5ff-49c8-b62e-ae499a096193",
"16490f2b-62ce-44c6-9816-528146272340",
"18fa1bed-b9a0-4d55-a0cc-dfc31831259a",
"19cba169-d41e-424a-812f-575625c72305",
"19ff6a03-25df-4e61-9ab1-4573cfd24729",
"1b3d1627-f49e-4103-92d6-af6016476da3",
"1e198fbb-c9cd-429a-9b50-0f124d0d825c",
"20444fbe-0802-46fc-8203-339a36f52215",
"21e27bf3-8071-48dd-924f-1d8d21f9216f",
"23203e72-a3b0-4577-9942-30f7cdff358e",
"23be1f4a-68ec-4a49-b2ec-aa9109c99dcd",
"2b56033f-a9b4-4bab-bb53-c619bdb38895",
"2ba26f2d-8068-4b77-8e62-431edfc2c2e2",
"2ed89931-0ce7-49ea-88ac-7266b6c03be0",
"3abf8ded-f1b1-495b-9a2d-5d0223e33fa7",
"3b0f6720-2ffd-4f8a-afcd-7e3bb311212d",
"415b509a-cf39-45bc-9acf-7f982a00e159",
"4221276f-a3b0-4992-b426-ec2d8c7347f2",
"49868211-d868-4b55-a201-5acac0be0cc0",
"55fdcbd0-9b2d-493a-b874-865ccc93a156",
"56124545-d266-4da9-ba1f-b3a16edc7f31",
"6c11453b-dbf8-4749-a0ec-ab705920e316"
};
ServiceDalI dal = DalProvider.factoryServiceDal();
List<Service> all = dal.getAll();
for (Service service: all) {
if (service.getPublisherConfigName() != null
&& service.getPublisherConfigName().equals(publisher)) {
boolean alreadyDone = false;
String idStr = service.getId().toString();
for (String doneId: done) {
if (idStr.equalsIgnoreCase(doneId)) {
alreadyDone = true;
break;
}
}
if (alreadyDone) {
continue;
}
fixEmisProblems3(service.getId(), systemId);
}
}
LOG.info("Done fix for " + publisher);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void fixEmisProblems3(UUID serviceId, UUID systemId) {
LOG.info("Fixing Emis Problems 3 for " + serviceId);
try {
Set<String> patientIds = new HashSet<>();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
FhirResourceFiler filer = new FhirResourceFiler(null, serviceId, systemId, null, null);
LOG.info("Finding patients");
//Go through all files to work out problem children for every problem
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
for (int i=exchanges.size()-1; i>=0; i--) {
Exchange exchange = exchanges.get(i);
List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody());
for (ExchangePayloadFile item: payload) {
String type = item.getType();
if (type.equals("Admin_Patient")) {
InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath());
CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT);
Iterator<CSVRecord> iterator = parser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String patientId = record.get("PatientGuid");
patientIds.add(patientId);
}
parser.close();
}
}
}
LOG.info("Finished checking files, finding " + patientIds.size() + " patients");
int done = 0;
int fixed = 0;
for (String localPatientId: patientIds) {
Reference localPatientReference = ReferenceHelper.createReference(ResourceType.Patient, localPatientId);
Reference globalPatientReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localPatientReference, filer);
String patientUuid = ReferenceHelper.getReferenceId(globalPatientReference);
List<ResourceType> potentialResourceTypes = new ArrayList<>();
potentialResourceTypes.add(ResourceType.Procedure);
potentialResourceTypes.add(ResourceType.AllergyIntolerance);
potentialResourceTypes.add(ResourceType.FamilyMemberHistory);
potentialResourceTypes.add(ResourceType.Immunization);
potentialResourceTypes.add(ResourceType.DiagnosticOrder);
potentialResourceTypes.add(ResourceType.Specimen);
potentialResourceTypes.add(ResourceType.DiagnosticReport);
potentialResourceTypes.add(ResourceType.ReferralRequest);
potentialResourceTypes.add(ResourceType.Condition);
potentialResourceTypes.add(ResourceType.Observation);
for (ResourceType resourceType: potentialResourceTypes) {
List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, UUID.fromString(patientUuid), resourceType.toString());
for (ResourceWrapper wrapper : wrappers) {
if (wrapper.isDeleted()) {
continue;
}
String originalJson = wrapper.getResourceData();
DomainResource resource = (DomainResource)FhirSerializationHelper.deserializeResource(originalJson);
//Also go through all observation records and any that have parent observations - these need fixing too???
Extension extension = ExtensionConverter.findExtension(resource, FhirExtensionUri.PARENT_RESOURCE);
if (extension != null) {
Reference reference = (Reference)extension.getValue();
fixReference(serviceId, filer, reference, potentialResourceTypes);
}
if (resource instanceof Observation) {
Observation obs = (Observation)resource;
if (obs.hasRelated()) {
for (Observation.ObservationRelatedComponent related: obs.getRelated()) {
if (related.hasTarget()) {
Reference reference = related.getTarget();
fixReference(serviceId, filer, reference, potentialResourceTypes);
}
}
}
}
if (resource instanceof DiagnosticReport) {
DiagnosticReport diag = (DiagnosticReport)resource;
if (diag.hasResult()) {
for (Reference reference: diag.getResult()) {
fixReference(serviceId, filer, reference, potentialResourceTypes);
}
}
}
//Go through all patients, go through all problems, for any child that's Observation, find the true resource type then update and save
if (resource instanceof Condition) {
if (resource.hasContained()) {
for (Resource contained: resource.getContained()) {
if (contained.getId().equals("Items")) {
List_ containedList = (List_)contained;
if (containedList.hasEntry()) {
for (List_.ListEntryComponent entry: containedList.getEntry()) {
Reference reference = entry.getItem();
fixReference(serviceId, filer, reference, potentialResourceTypes);
}
}
}
}
}
//sort out the nested extension references
Extension outerExtension = ExtensionConverter.findExtension(resource, FhirExtensionUri.PROBLEM_RELATED);
if (outerExtension != null) {
Extension innerExtension = ExtensionConverter.findExtension(outerExtension, FhirExtensionUri._PROBLEM_RELATED__TARGET);
if (innerExtension != null) {
Reference performerReference = (Reference)innerExtension.getValue();
String value = performerReference.getReference();
if (value.endsWith("}")) {
Reference globalPerformerReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(performerReference, filer);
innerExtension.setValue(globalPerformerReference);
}
}
}
}
//save the updated condition
String newJson = FhirSerializationHelper.serializeResource(resource);
if (!newJson.equals(originalJson)) {
wrapper.setResourceData(newJson);
saveResourceWrapper(serviceId, wrapper);
fixed++;
}
}
}
done ++;
if (done % 1000 == 0) {
LOG.info("Done " + done + " patients and fixed " + fixed + " problems");
}
}
LOG.info("Done " + done + " patients and fixed " + fixed + " problems");
LOG.info("Finished Emis Problems 3 for " + serviceId);
} catch (Exception ex) {
LOG.error("", ex);
}
}
private static boolean fixReference(UUID serviceId, HasServiceSystemAndExchangeIdI csvHelper, Reference reference, List<ResourceType> potentialResourceTypes) throws Exception {
//if it's already something other than observation, we're OK
ReferenceComponents comps = ReferenceHelper.getReferenceComponents(reference);
if (comps.getResourceType() != ResourceType.Observation) {
return false;
}
Reference sourceReference = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, reference);
String sourceId = ReferenceHelper.getReferenceId(sourceReference);
String newReferenceValue = findTrueResourceType(serviceId, potentialResourceTypes, sourceId);
if (newReferenceValue == null) {
return false;
}
reference.setReference(newReferenceValue);
return true;
}
private static String findTrueResourceType(UUID serviceId, List<ResourceType> potentials, String sourceId) throws Exception {
ResourceDalI dal = DalProvider.factoryResourceDal();
for (ResourceType resourceType: potentials) {
UUID uuid = IdHelper.getEdsResourceId(serviceId, resourceType, sourceId);
if (uuid == null) {
continue;
}
ResourceWrapper wrapper = dal.getCurrentVersion(serviceId, resourceType.toString(), uuid);
if (wrapper != null) {
return ReferenceHelper.createResourceReference(resourceType, uuid.toString());
}
}
return null;
}*/
*//* } else if (systemUuid.toString().equalsIgnoreCase("e517fa69-348a-45e9-a113-d9b59ad13095")
}*//*
/*private static void convertExchangeBody(UUID systemUuid) {
try {
LOG.info("Converting exchange bodies for system " + systemUuid);
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Service> services = serviceDal.getAll();
for (Service service: services) {
List<Exchange> exchanges = exchangeDal.getExchangesByService(service.getId(), systemUuid, Integer.MAX_VALUE);
if (exchanges.isEmpty()) {
continue;
}
LOG.debug("doing " + service.getName() + " with " + exchanges.size() + " exchanges");
for (Exchange exchange: exchanges) {
String exchangeBody = exchange.getBody();
try {
//already done
ExchangePayloadFile[] files = JsonSerializer.deserialize(exchangeBody, ExchangePayloadFile[].class);
continue;
} catch (JsonSyntaxException ex) {
//if the JSON can't be parsed, then it'll be the old format of body that isn't JSON
}
List<ExchangePayloadFile> newFiles = new ArrayList<>();
String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody);
for (String file: files) {
ExchangePayloadFile fileObj = new ExchangePayloadFile();
String fileWithoutSharedStorage = file.substring(TransformConfig.instance().getSharedStoragePath().length()+1);
fileObj.setPath(fileWithoutSharedStorage);
//size
List<FileInfo> fileInfos = FileHelper.listFilesInSharedStorageWithInfo(file);
for (FileInfo info: fileInfos) {
if (info.getFilePath().equals(file)) {
long size = info.getSize();
fileObj.setSize(new Long(size));
}
}
//type
if (systemUuid.toString().equalsIgnoreCase("991a9068-01d3-4ff2-86ed-249bd0541fb3") //live
|| systemUuid.toString().equalsIgnoreCase("55c08fa5-ef1e-4e94-aadc-e3d6adc80774")) { //dev
//emis
String name = FilenameUtils.getName(file);
String[] toks = name.split("_");
String first = toks[1];
String second = toks[2];
fileObj.setType(first + "_" + second);
|| systemUuid.toString().equalsIgnoreCase("b0277098-0b6c-4d9d-86ef-5f399fb25f34")) { //dev
//cerner
String name = FilenameUtils.getName(file);
if (Strings.isNullOrEmpty(name)) {
continue;
}
try {
String type = BartsCsvToFhirTransformer.identifyFileType(name);
fileObj.setType(type);
} catch (Exception ex2) {
throw new Exception("Failed to parse file name " + name + " on exchange " + exchange.getId());
} else {
throw new Exception("Unknown system ID " + systemUuid);
}
newFiles.add(fileObj);
}
String json = JsonSerializer.serialize(newFiles);
exchange.setBody(json);
exchangeDal.save(exchange);
}
}
LOG.info("Finished Converting exchange bodies for system " + systemUuid);
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
*//*LOG.debug(json);
LOG.debug(newJson);*//*
/*private static void fixBartsOrgs(String serviceId) {
try {
LOG.info("Fixing Barts orgs");
ResourceDalI dal = DalProvider.factoryResourceDal();
List<ResourceWrapper> wrappers = dal.getResourcesByService(UUID.fromString(serviceId), ResourceType.Organization.toString());
LOG.debug("Found " + wrappers.size() + " resources");
int done = 0;
int fixed = 0;
for (ResourceWrapper wrapper: wrappers) {
if (!wrapper.isDeleted()) {
List<ResourceWrapper> history = dal.getResourceHistory(UUID.fromString(serviceId), wrapper.getResourceType(), wrapper.getResourceId());
ResourceWrapper mostRecent = history.get(0);
String json = mostRecent.getResourceData();
Organization org = (Organization)FhirSerializationHelper.deserializeResource(json);
String odsCode = IdentifierHelper.findOdsCode(org);
if (Strings.isNullOrEmpty(odsCode)
&& org.hasIdentifier()) {
boolean hasBeenFixed = false;
for (Identifier identifier: org.getIdentifier()) {
if (identifier.getSystem().equals(FhirIdentifierUri.IDENTIFIER_SYSTEM_ODS_CODE)
&& identifier.hasId()) {
odsCode = identifier.getId();
identifier.setValue(odsCode);
identifier.setId(null);
hasBeenFixed = true;
}
}
if (hasBeenFixed) {
String newJson = FhirSerializationHelper.serializeResource(org);
mostRecent.setResourceData(newJson);
LOG.debug("Fixed Organization " + org.getId());
saveResourceWrapper(UUID.fromString(serviceId), mostRecent);
fixed ++;
}
}
}
done ++;
if (done % 100 == 0) {
LOG.debug("Done " + done + ", Fixed " + fixed);
}
}
LOG.debug("Done " + done + ", Fixed " + fixed);
LOG.info("Finished Barts orgs");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
/*private static void testPreparedStatements(String url, String user, String pass, String serviceId) {
try {
LOG.info("Testing Prepared Statements");
LOG.info("Url: " + url);
LOG.info("user: " + user);
LOG.info("pass: " + pass);
//open connection
Class.forName("com.mysql.cj.jdbc.Driver");
//create connection
Properties props = new Properties();
props.setProperty("user", user);
props.setProperty("password", pass);
Connection conn = DriverManager.getConnection(url, props);
String sql = "SELECT * FROM internal_id_map WHERE service_id = ? AND id_type = ? AND source_id = ?";
long start = System.currentTimeMillis();
for (int i=0; i<10000; i++) {
PreparedStatement ps = null;
try {
ps = conn.prepareStatement(sql);
ps.setString(1, serviceId);
ps.setString(2, "MILLPERSIDtoMRN");
ps.setString(3, UUID.randomUUID().toString());
ResultSet rs = ps.executeQuery();
while (rs.next()) {
//do nothing
}
} finally {
if (ps != null) {
ps.close();
}
}
}
long end = System.currentTimeMillis();
LOG.info("Took " + (end-start) + " ms");
//close connection
conn.close();
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
*//*Encounter encounter = (Encounter)FhirSerializationHelper.deserializeResource(currentState.getResourceData());
saveResourceWrapper(serviceId, currentState);*//*
*//*Resource resource = FhirSerializationHelper.deserializeResource(currentState.getResourceData());
}*//*
*//*Condition condition = (Condition)FhirSerializationHelper.deserializeResource(currentState.getResourceData());
saveResourceWrapper(serviceId, currentState);*//*
/*private static void fixEncounters(String table) {
LOG.info("Fixing encounters from " + table);
try {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
Date cutoff = sdf.parse("2018-03-14 11:42");
EntityManager entityManager = ConnectionManager.getAdminEntityManager();
SessionImpl session = (SessionImpl)entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
List<UUID> serviceIds = new ArrayList<>();
Map<UUID, UUID> hmSystems = new HashMap<>();
String sql = "SELECT service_id, system_id FROM " + table + " WHERE done = 0";
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
UUID serviceId = UUID.fromString(rs.getString(1));
UUID systemId = UUID.fromString(rs.getString(2));
serviceIds.add(serviceId);
hmSystems.put(serviceId, systemId);
}
rs.close();
statement.close();
entityManager.close();
for (UUID serviceId: serviceIds) {
UUID systemId = hmSystems.get(serviceId);
LOG.info("Doing service " + serviceId + " and system " + systemId);
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, systemId);
List<UUID> exchangeIdsToProcess = new ArrayList<>();
for (UUID exchangeId: exchangeIds) {
List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId);
for (ExchangeTransformAudit audit: audits) {
Date d = audit.getStarted();
if (d.after(cutoff)) {
exchangeIdsToProcess.add(exchangeId);
break;
}
}
}
Map<String, ReferenceList> consultationNewChildMap = new HashMap<>();
Map<String, ReferenceList> observationChildMap = new HashMap<>();
Map<String, ReferenceList> newProblemChildren = new HashMap<>();
for (UUID exchangeId: exchangeIdsToProcess) {
Exchange exchange = exchangeDal.getExchange(exchangeId);
String[] files = ExchangeHelper.parseExchangeBodyIntoFileList(exchange.getBody());
String version = EmisCsvToFhirTransformer.determineVersion(files);
List<String> interestingFiles = new ArrayList<>();
for (String file: files) {
if (file.indexOf("CareRecord_Consultation") > -1
|| file.indexOf("CareRecord_Observation") > -1
|| file.indexOf("CareRecord_Diary") > -1
|| file.indexOf("Prescribing_DrugRecord") > -1
|| file.indexOf("Prescribing_IssueRecord") > -1
|| file.indexOf("CareRecord_Problem") > -1) {
interestingFiles.add(file);
}
}
files = interestingFiles.toArray(new String[0]);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.createParsers(serviceId, systemId, exchangeId, files, version, parsers);
String dataSharingAgreementGuid = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(parsers);
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchangeId, dataSharingAgreementGuid, true);
Consultation consultationParser = (Consultation)parsers.get(Consultation.class);
while (consultationParser.nextRecord()) {
CsvCell consultationGuid = consultationParser.getConsultationGuid();
CsvCell patientGuid = consultationParser.getPatientGuid();
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid);
consultationNewChildMap.put(sourceId, new ReferenceList());
}
Problem problemParser = (Problem)parsers.get(Problem.class);
while (problemParser.nextRecord()) {
CsvCell problemGuid = problemParser.getObservationGuid();
CsvCell patientGuid = problemParser.getPatientGuid();
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
newProblemChildren.put(sourceId, new ReferenceList());
}
//run this pre-transformer to pre-cache some stuff in the csv helper, which
//is needed when working out the resource type that each observation would be saved as
ObservationPreTransformer.transform(version, parsers, null, csvHelper);
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (observationParser.nextRecord()) {
CsvCell observationGuid = observationParser.getObservationGuid();
CsvCell patientGuid = observationParser.getPatientGuid();
String obSourceId = EmisCsvHelper.createUniqueId(patientGuid, observationGuid);
CsvCell codeId = observationParser.getCodeId();
if (codeId.isEmpty()) {
continue;
}
ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper);
UUID obUuid = IdHelper.getEdsResourceId(serviceId, resourceType, obSourceId);
if (obUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + resourceType + " and source ID " + obSourceId);
//resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper);
}
Reference obReference = ReferenceHelper.createReference(resourceType, obUuid.toString());
CsvCell consultationGuid = observationParser.getConsultationGuid();
if (!consultationGuid.isEmpty()) {
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid);
ReferenceList referenceList = consultationNewChildMap.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
consultationNewChildMap.put(sourceId, referenceList);
}
referenceList.add(obReference);
}
CsvCell problemGuid = observationParser.getProblemGuid();
if (!problemGuid.isEmpty()) {
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
ReferenceList referenceList = newProblemChildren.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
newProblemChildren.put(sourceId, referenceList);
}
referenceList.add(obReference);
}
CsvCell parentObGuid = observationParser.getParentObservationGuid();
if (!parentObGuid.isEmpty()) {
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, parentObGuid);
ReferenceList referenceList = observationChildMap.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
observationChildMap.put(sourceId, referenceList);
}
referenceList.add(obReference);
}
}
Diary diaryParser = (Diary)parsers.get(Diary.class);
while (diaryParser.nextRecord()) {
CsvCell consultationGuid = diaryParser.getConsultationGuid();
if (!consultationGuid.isEmpty()) {
CsvCell diaryGuid = diaryParser.getDiaryGuid();
CsvCell patientGuid = diaryParser.getPatientGuid();
String diarySourceId = EmisCsvHelper.createUniqueId(patientGuid, diaryGuid);
UUID diaryUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.ProcedureRequest, diarySourceId);
if (diaryUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + ResourceType.ProcedureRequest + " and source ID " + diarySourceId);
}
Reference diaryReference = ReferenceHelper.createReference(ResourceType.ProcedureRequest, diaryUuid.toString());
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid);
ReferenceList referenceList = consultationNewChildMap.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
consultationNewChildMap.put(sourceId, referenceList);
}
referenceList.add(diaryReference);
}
}
IssueRecord issueRecordParser = (IssueRecord)parsers.get(IssueRecord.class);
while (issueRecordParser.nextRecord()) {
CsvCell problemGuid = issueRecordParser.getProblemObservationGuid();
if (!problemGuid.isEmpty()) {
CsvCell issueRecordGuid = issueRecordParser.getIssueRecordGuid();
CsvCell patientGuid = issueRecordParser.getPatientGuid();
String issueRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, issueRecordGuid);
UUID issueRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationOrder, issueRecordSourceId);
if (issueRecordUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + ResourceType.MedicationOrder + " and source ID " + issueRecordSourceId);
}
Reference issueRecordReference = ReferenceHelper.createReference(ResourceType.MedicationOrder, issueRecordUuid.toString());
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
ReferenceList referenceList = newProblemChildren.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
newProblemChildren.put(sourceId, referenceList);
}
referenceList.add(issueRecordReference);
}
}
DrugRecord drugRecordParser = (DrugRecord)parsers.get(DrugRecord.class);
while (drugRecordParser.nextRecord()) {
CsvCell problemGuid = drugRecordParser.getProblemObservationGuid();
if (!problemGuid.isEmpty()) {
CsvCell drugRecordGuid = drugRecordParser.getDrugRecordGuid();
CsvCell patientGuid = drugRecordParser.getPatientGuid();
String drugRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, drugRecordGuid);
UUID drugRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationStatement, drugRecordSourceId);
if (drugRecordUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + ResourceType.MedicationStatement + " and source ID " + drugRecordSourceId);
}
Reference drugRecordReference = ReferenceHelper.createReference(ResourceType.MedicationStatement, drugRecordUuid.toString());
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
ReferenceList referenceList = newProblemChildren.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
newProblemChildren.put(sourceId, referenceList);
}
referenceList.add(drugRecordReference);
}
}
for (AbstractCsvParser parser : parsers.values()) {
try {
parser.close();
} catch (IOException ex) {
//don't worry if this fails, as we're done anyway
}
}
}
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
LOG.info("Found " + consultationNewChildMap.size() + " Encounters to fix");
for (String encounterSourceId: consultationNewChildMap.keySet()) {
ReferenceList childReferences = consultationNewChildMap.get(encounterSourceId);
//map to UUID
UUID encounterId = IdHelper.getEdsResourceId(serviceId, ResourceType.Encounter, encounterSourceId);
if (encounterId == null) {
continue;
}
//get history, which is most recent FIRST
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Encounter.toString(), encounterId);
if (history.isEmpty()) {
continue;
//throw new Exception("Empty history for Encounter " + encounterId);
}
ResourceWrapper currentState = history.get(0);
if (currentState.isDeleted()) {
continue;
}
//find last instance prior to cutoff and get its linked children
for (ResourceWrapper wrapper: history) {
Date d = wrapper.getCreatedAt();
if (!d.after(cutoff)) {
if (wrapper.getResourceData() != null) {
Encounter encounter = (Encounter) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
EncounterBuilder encounterBuilder = new EncounterBuilder(encounter);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder);
List<Reference> previousChildren = containedListBuilder.getContainedListItems();
childReferences.add(previousChildren);
}
break;
}
}
if (childReferences.size() == 0) {
continue;
}
String json = currentState.getResourceData();
Resource resource = FhirSerializationHelper.deserializeResource(json);
String newJson = FhirSerializationHelper.serializeResource(resource);
if (!json.equals(newJson)) {
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
EncounterBuilder encounterBuilder = new EncounterBuilder(encounter);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder);
containedListBuilder.addReferences(childReferences);
String newJson = FhirSerializationHelper.serializeResource(encounter);
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
}
LOG.info("Found " + observationChildMap.size() + " Parent Observations to fix");
for (String sourceId: observationChildMap.keySet()) {
ReferenceList childReferences = observationChildMap.get(sourceId);
//map to UUID
ResourceType resourceType = null;
UUID resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.Observation, sourceId);
if (resourceId != null) {
resourceType = ResourceType.Observation;
} else {
resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.DiagnosticReport, sourceId);
if (resourceId != null) {
resourceType = ResourceType.DiagnosticReport;
} else {
continue;
}
}
//get history, which is most recent FIRST
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, resourceType.toString(), resourceId);
if (history.isEmpty()) {
//throw new Exception("Empty history for " + resourceType + " " + resourceId);
continue;
}
ResourceWrapper currentState = history.get(0);
if (currentState.isDeleted()) {
continue;
}
//find last instance prior to cutoff and get its linked children
for (ResourceWrapper wrapper: history) {
Date d = wrapper.getCreatedAt();
if (!d.after(cutoff)) {
if (resourceType == ResourceType.Observation) {
if (wrapper.getResourceData() != null) {
Observation observation = (Observation) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
if (observation.hasRelated()) {
for (Observation.ObservationRelatedComponent related : observation.getRelated()) {
Reference reference = related.getTarget();
childReferences.add(reference);
}
}
}
} else {
if (wrapper.getResourceData() != null) {
DiagnosticReport report = (DiagnosticReport) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
if (report.hasResult()) {
for (Reference reference : report.getResult()) {
childReferences.add(reference);
}
}
}
}
break;
}
}
if (childReferences.size() == 0) {
continue;
}
String json = currentState.getResourceData();
Resource resource = FhirSerializationHelper.deserializeResource(json);
String newJson = FhirSerializationHelper.serializeResource(resource);
if (!json.equals(newJson)) {
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
boolean changed = false;
if (resourceType == ResourceType.Observation) {
ObservationBuilder resourceBuilder = new ObservationBuilder((Observation)resource);
for (int i=0; i<childReferences.size(); i++) {
Reference reference = childReferences.getReference(i);
if (resourceBuilder.addChildObservation(reference)) {
changed = true;
}
}
} else {
DiagnosticReportBuilder resourceBuilder = new DiagnosticReportBuilder((DiagnosticReport)resource);
for (int i=0; i<childReferences.size(); i++) {
Reference reference = childReferences.getReference(i);
if (resourceBuilder.addResult(reference)) {
changed = true;
}
}
}
if (changed) {
String newJson = FhirSerializationHelper.serializeResource(resource);
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
LOG.info("Found " + newProblemChildren.size() + " Problems to fix");
for (String sourceId: newProblemChildren.keySet()) {
ReferenceList childReferences = newProblemChildren.get(sourceId);
//map to UUID
UUID conditionId = IdHelper.getEdsResourceId(serviceId, ResourceType.Condition, sourceId);
if (conditionId == null) {
continue;
}
//get history, which is most recent FIRST
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Condition.toString(), conditionId);
if (history.isEmpty()) {
continue;
//throw new Exception("Empty history for Condition " + conditionId);
}
ResourceWrapper currentState = history.get(0);
if (currentState.isDeleted()) {
continue;
}
//find last instance prior to cutoff and get its linked children
for (ResourceWrapper wrapper: history) {
Date d = wrapper.getCreatedAt();
if (!d.after(cutoff)) {
if (wrapper.getResourceData() != null) {
Condition previousVersion = (Condition) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
ConditionBuilder conditionBuilder = new ConditionBuilder(previousVersion);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder);
List<Reference> previousChildren = containedListBuilder.getContainedListItems();
childReferences.add(previousChildren);
}
break;
}
}
if (childReferences.size() == 0) {
continue;
}
String json = currentState.getResourceData();
Resource resource = FhirSerializationHelper.deserializeResource(json);
String newJson = FhirSerializationHelper.serializeResource(resource);
if (!json.equals(newJson)) {
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
ConditionBuilder conditionBuilder = new ConditionBuilder(condition);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder);
containedListBuilder.addReferences(childReferences);
String newJson = FhirSerializationHelper.serializeResource(condition);
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
}
//mark as done
String updateSql = "UPDATE " + table + " SET done = 1 WHERE service_id = '" + serviceId + "';";
entityManager = ConnectionManager.getAdminEntityManager();
session = (SessionImpl)entityManager.getDelegate();
connection = session.connection();
statement = connection.createStatement();
entityManager.getTransaction().begin();
statement.executeUpdate(updateSql);
entityManager.getTransaction().commit();
}
*/
*//*
/**
* For each practice:
* Go through all files processed since 14 March
* Cache all links as above
* Cache all Encounters saved too
* <p>
* For each Encounter referenced at all:
* Retrieve latest version from resource current
* Retrieve version prior to 14 March
* Update current version with old references plus new ones
* <p>
* For each parent observation:
* Retrieve latest version (could be observation or diagnostic report)
* <p>
* For each problem:
* Retrieve latest version from resource current
* Check if still a problem:
* Retrieve version prior to 14 March
* Update current version with old references plus new ones
LOG.info("Finished Fixing encounters from " + table);
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void saveResourceWrapper(UUID serviceId, ResourceWrapper wrapper) throws Exception {
if (wrapper.getVersion() == null) {
throw new Exception("Can't update resource history without version UUID");
}
if (wrapper.getResourceData() != null) {
long checksum = FhirStorageService.generateChecksum(wrapper.getResourceData());
wrapper.setResourceChecksum(new Long(checksum));
}
EntityManager entityManager = ConnectionManager.getEhrEntityManager(serviceId);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
entityManager.getTransaction().begin();
String json = wrapper.getResourceData();
json = json.replace("'", "''");
json = json.replace("\\", "\\\\");
String patientId = "";
if (wrapper.getPatientId() != null) {
patientId = wrapper.getPatientId().toString();
}
String updateSql = "UPDATE resource_current"
+ " SET resource_data = '" + json + "',"
+ " resource_checksum = " + wrapper.getResourceChecksum()
+ " WHERE service_id = '" + wrapper.getServiceId() + "'"
+ " AND patient_id = '" + patientId + "'"
+ " AND resource_type = '" + wrapper.getResourceType() + "'"
+ " AND resource_id = '" + wrapper.getResourceId() + "'";
statement.executeUpdate(updateSql);
//LOG.debug(updateSql);
//SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:SS");
//String createdAtStr = sdf.format(wrapper.getCreatedAt());
updateSql = "UPDATE resource_history"
+ " SET resource_data = '" + json + "',"
+ " resource_checksum = " + wrapper.getResourceChecksum()
+ " WHERE resource_id = '" + wrapper.getResourceId() + "'"
+ " AND resource_type = '" + wrapper.getResourceType() + "'"
//+ " AND created_at = '" + createdAtStr + "'"
+ " AND version = '" + wrapper.getVersion() + "'";
statement.executeUpdate(updateSql);
//LOG.debug(updateSql);
entityManager.getTransaction().commit();
}
private static void populateNewSearchTable(String table) {
LOG.info("Populating New Search Table");
try {
EntityManager entityManager = ConnectionManager.getEdsEntityManager();
SessionImpl session = (SessionImpl)entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
List<String> patientIds = new ArrayList<>();
Map<String, String> serviceIds = new HashMap<>();
String sql = "SELECT patient_id, service_id FROM " + table + " WHERE done = 0";
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
String patientId = rs.getString(1);
String serviceId = rs.getString(2);
patientIds.add(patientId);
serviceIds.put(patientId, serviceId);
}
rs.close();
statement.close();
entityManager.close();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal();
LOG.info("Found " + patientIds.size() + " to do");
for (int i=0; i<patientIds.size(); i++) {
String patientIdStr = patientIds.get(i);
UUID patientId = UUID.fromString(patientIdStr);
String serviceIdStr = serviceIds.get(patientIdStr);
UUID serviceId = UUID.fromString(serviceIdStr);
Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(serviceId, ResourceType.Patient, patientIdStr);
if (patient != null) {
LOG.debug("Updating for patient " + patientIdStr);
patientSearchDal.update(serviceId, patient);
LOG.debug("Done");
} else {
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Patient.toString(), patientId);
if (history.isEmpty()) {
LOG.debug("No history found for patient " + patientIdStr);
} else {
ResourceWrapper first = history.get(0);
if (!first.isDeleted()) {
throw new Exception("Resource current null for " + ResourceType.Patient + " " + patientIdStr + " but not deleted in resource_history");
}
//find first non-deleted instance and update for it, then delete
for (ResourceWrapper historyItem: history) {
if (!historyItem.isDeleted()) {
patient = (Patient)FhirSerializationHelper.deserializeResource(historyItem.getResourceData());
LOG.debug("Patient is deleted, so updating for deleted patient " + patientIdStr);
patientSearchDal.update(serviceId, patient);
patientSearchDal.deletePatient(serviceId, patient);
LOG.debug("Done");
break;
}
}
}
}
//find episode of care
//note, we don't have any current way to retrieve deleted episodes of care for a patient, so can only do this for non-deleted ones
List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, patientId, ResourceType.EpisodeOfCare.toString());
for (ResourceWrapper wrapper: wrappers) {
if (!wrapper.isDeleted()) {
LOG.debug("Updating for episodeOfCare resource " + wrapper.getResourceId());
EpisodeOfCare episodeOfCare = (EpisodeOfCare)FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
patientSearchDal.update(serviceId, episodeOfCare);
LOG.debug("Done");
} else {
LOG.debug("EpisodeOfCare " + wrapper.getResourceId() + " is deleted");
}
}
String updateSql = "UPDATE " + table + " SET done = 1 WHERE patient_id = '" + patientIdStr + "' AND service_id = '" + serviceIdStr + "';";
entityManager = ConnectionManager.getEdsEntityManager();
session = (SessionImpl)entityManager.getDelegate();
connection = session.connection();
statement = connection.createStatement();
entityManager.getTransaction().begin();
statement.executeUpdate(updateSql);
entityManager.getTransaction().commit();
if (i % 5000 == 0) {
LOG.info("Done " + (i+1) + " of " + patientIds.size());
}
}
entityManager.close();
LOG.info("Finished Populating New Search Table");
} catch (Exception ex) {
LOG.error("", ex);
}
}
private static void createBartsSubset(String sourceDir, UUID serviceUuid, UUID systemUuid, String samplePatientsFile) {
LOG.info("Creating Barts Subset");
try {
Set<String> personIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line : lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
personIds.add(line);
}
createBartsSubsetForFile(sourceDir, serviceUuid, systemUuid, personIds);
LOG.info("Finished Creating Barts Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
/*private static void createBartsSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
for (File sourceFile: sourceDir.listFiles()) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
LOG.info("Doing dir " + sourceFile);
createBartsSubsetForFile(sourceFile, destFile, personIds);
} else {
//we have some bad partial files in, so ignore them
String ext = FilenameUtils.getExtension(name);
if (ext.equalsIgnoreCase("filepart")) {
continue;
}
//if the file is empty, we still need the empty file in the filtered directory, so just copy it
if (sourceFile.length() == 0) {
LOG.info("Copying empty file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
continue;
}
String baseName = FilenameUtils.getBaseName(name);
String fileType = BartsCsvToFhirTransformer.identifyFileType(baseName);
if (isCerner22File(fileType)) {
LOG.info("Checking 2.2 file " + sourceFile);
if (destFile.exists()) {
destFile.delete();
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
int lineIndex = -1;
PrintWriter pw = null;
int personIdColIndex = -1;
int expectedCols = -1;
while (true) {
String line = br.readLine();
if (line == null) {
break;
}
lineIndex ++;
if (lineIndex == 0) {
if (fileType.equalsIgnoreCase("FAMILYHISTORY")) {
//this file has no headers, so needs hard-coding
personIdColIndex = 5;
} else {
//check headings for PersonID col
String[] toks = line.split("\\|", -1);
expectedCols = toks.length;
for (int i=0; i<expectedCols; i++) {
String col = toks[i];
if (col.equalsIgnoreCase("PERSON_ID")
|| col.equalsIgnoreCase("#PERSON_ID")) {
personIdColIndex = i;
break;
}
}
//if no person ID, then just copy the entire file
if (personIdColIndex == -1) {
br.close();
br = null;
LOG.info(" Copying 2.2 file to " + destFile);
copyFile(sourceFile, destFile);
break;
} else {
LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex);
}
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
pw = new PrintWriter(bw);
} else {
//filter on personID
String[] toks = line.split("\\|", -1);
if (expectedCols != -1
&& toks.length != expectedCols) {
throw new Exception("Line " + (lineIndex+1) + " has " + toks.length + " cols but expecting " + expectedCols);
} else {
String personId = toks[personIdColIndex];
if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes
&& !personIds.contains(personId)) {
continue;
}
}
}
pw.println(line);
}
if (br != null) {
br.close();
}
if (pw != null) {
pw.flush();
pw.close();
}
} else {
//the 2.1 files are going to be a pain to split by patient, so just copy them over
LOG.info("Copying 2.1 file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
}
}
}
}*/
private static void createBartsSubsetForFile(String sourceDir, UUID serviceUuid, UUID systemUuid, Set<String> personIds) throws Exception {
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE);
for (Exchange exchange : exchanges) {
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody());
for (ExchangePayloadFile fileObj : files) {
String filePathWithoutSharedStorage = fileObj.getPath().substring(TransformConfig.instance().getSharedStoragePath().length() + 1);
String sourceFilePath = FilenameUtils.concat(sourceDir, filePathWithoutSharedStorage);
File sourceFile = new File(sourceFilePath);
String destFilePath = fileObj.getPath();
File destFile = new File(destFilePath);
File destDir = destFile.getParentFile();
if (!destDir.exists()) {
destDir.mkdirs();
}
//if the file is empty, we still need the empty file in the filtered directory, so just copy it
if (sourceFile.length() == 0) {
LOG.info("Copying empty file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
continue;
}
String fileType = fileObj.getType();
if (isCerner22File(fileType)) {
LOG.info("Checking 2.2 file " + sourceFile);
if (destFile.exists()) {
destFile.delete();
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
int lineIndex = -1;
PrintWriter pw = null;
int personIdColIndex = -1;
int expectedCols = -1;
while (true) {
String line = br.readLine();
if (line == null) {
break;
}
lineIndex++;
if (lineIndex == 0) {
if (fileType.equalsIgnoreCase("FAMILYHISTORY")) {
//this file has no headers, so needs hard-coding
personIdColIndex = 5;
} else {
//check headings for PersonID col
String[] toks = line.split("\\|", -1);
expectedCols = toks.length;
for (int i = 0; i < expectedCols; i++) {
String col = toks[i];
if (col.equalsIgnoreCase("PERSON_ID")
|| col.equalsIgnoreCase("#PERSON_ID")) {
personIdColIndex = i;
break;
}
}
//if no person ID, then just copy the entire file
if (personIdColIndex == -1) {
br.close();
br = null;
LOG.info(" Copying 2.2 file to " + destFile);
copyFile(sourceFile, destFile);
break;
} else {
LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex);
}
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
pw = new PrintWriter(bw);
} else {
//filter on personID
String[] toks = line.split("\\|", -1);
if (expectedCols != -1
&& toks.length != expectedCols) {
throw new Exception("Line " + (lineIndex + 1) + " has " + toks.length + " cols but expecting " + expectedCols);
} else {
String personId = toks[personIdColIndex];
if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes
&& !personIds.contains(personId)) {
continue;
}
}
}
pw.println(line);
}
if (br != null) {
br.close();
}
if (pw != null) {
pw.flush();
pw.close();
}
} else {
//the 2.1 files are going to be a pain to split by patient, so just copy them over
LOG.info("Copying 2.1 file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
}
}
}
}
private static void copyFile(File src, File dst) throws Exception {
FileInputStream fis = new FileInputStream(src);
BufferedInputStream bis = new BufferedInputStream(fis);
Files.copy(bis, dst.toPath());
bis.close();
}
private static boolean isCerner22File(String fileType) throws Exception {
if (fileType.equalsIgnoreCase("PPATI")
|| fileType.equalsIgnoreCase("PPREL")
|| fileType.equalsIgnoreCase("CDSEV")
|| fileType.equalsIgnoreCase("PPATH")
|| fileType.equalsIgnoreCase("RTTPE")
|| fileType.equalsIgnoreCase("AEATT")
|| fileType.equalsIgnoreCase("AEINV")
|| fileType.equalsIgnoreCase("AETRE")
|| fileType.equalsIgnoreCase("OPREF")
|| fileType.equalsIgnoreCase("OPATT")
|| fileType.equalsIgnoreCase("EALEN")
|| fileType.equalsIgnoreCase("EALSU")
|| fileType.equalsIgnoreCase("EALOF")
|| fileType.equalsIgnoreCase("HPSSP")
|| fileType.equalsIgnoreCase("IPEPI")
|| fileType.equalsIgnoreCase("IPWDS")
|| fileType.equalsIgnoreCase("DELIV")
|| fileType.equalsIgnoreCase("BIRTH")
|| fileType.equalsIgnoreCase("SCHAC")
|| fileType.equalsIgnoreCase("APPSL")
|| fileType.equalsIgnoreCase("DIAGN")
|| fileType.equalsIgnoreCase("PROCE")
|| fileType.equalsIgnoreCase("ORDER")
|| fileType.equalsIgnoreCase("DOCRP")
|| fileType.equalsIgnoreCase("DOCREF")
|| fileType.equalsIgnoreCase("CNTRQ")
|| fileType.equalsIgnoreCase("LETRS")
|| fileType.equalsIgnoreCase("LOREF")
|| fileType.equalsIgnoreCase("ORGREF")
|| fileType.equalsIgnoreCase("PRSNLREF")
|| fileType.equalsIgnoreCase("CVREF")
|| fileType.equalsIgnoreCase("NOMREF")
|| fileType.equalsIgnoreCase("EALIP")
|| fileType.equalsIgnoreCase("CLEVE")
|| fileType.equalsIgnoreCase("ENCNT")
|| fileType.equalsIgnoreCase("RESREF")
|| fileType.equalsIgnoreCase("PPNAM")
|| fileType.equalsIgnoreCase("PPADD")
|| fileType.equalsIgnoreCase("PPPHO")
|| fileType.equalsIgnoreCase("PPALI")
|| fileType.equalsIgnoreCase("PPINF")
|| fileType.equalsIgnoreCase("PPAGP")
|| fileType.equalsIgnoreCase("SURCC")
|| fileType.equalsIgnoreCase("SURCP")
|| fileType.equalsIgnoreCase("SURCA")
|| fileType.equalsIgnoreCase("SURCD")
|| fileType.equalsIgnoreCase("PDRES")
|| fileType.equalsIgnoreCase("PDREF")
|| fileType.equalsIgnoreCase("ABREF")
|| fileType.equalsIgnoreCase("CEPRS")
|| fileType.equalsIgnoreCase("ORDDT")
|| fileType.equalsIgnoreCase("STATREF")
|| fileType.equalsIgnoreCase("STATA")
|| fileType.equalsIgnoreCase("ENCINF")
|| fileType.equalsIgnoreCase("SCHDETAIL")
|| fileType.equalsIgnoreCase("SCHOFFER")
|| fileType.equalsIgnoreCase("PPGPORG")
|| fileType.equalsIgnoreCase("FAMILYHISTORY")) {
return true;
} else {
return false;
}
}
/*private static void fixSubscriberDbs() {
LOG.info("Fixing Subscriber DBs");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3");
UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774");
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-05-11");
List<Service> services = serviceDal.getAll();
for (Service service: services) {
String endpointsJson = service.getEndpoints();
if (Strings.isNullOrEmpty(endpointsJson)) {
continue;
}
UUID serviceId = service.getId();
LOG.info("Checking " + service.getName() + " " + serviceId);
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
if (!endpointSystemId.equals(emisSystem)
&& !endpointSystemId.equals(emisSystemDev)) {
LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis");
continue;
}
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId);
boolean needsFixing = false;
for (UUID exchangeId: exchangeIds) {
if (!needsFixing) {
List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId);
for (ExchangeTransformAudit audit: transformAudits) {
Date transfromStart = audit.getStarted();
if (!transfromStart.before(dateError)) {
needsFixing = true;
break;
}
}
}
if (!needsFixing) {
continue;
}
List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId);
Exchange exchange = exchangeDal.getExchange(exchangeId);
LOG.info(" Posting exchange " + exchangeId + " with " + batches.size() + " batches");
List<UUID> batchIds = new ArrayList<>();
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId == null) {
continue;
}
UUID batchId = batch.getBatchId();
batchIds.add(batchId);
}
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr);
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
}
}
LOG.info("Finished Fixing Subscriber DBs");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
*//*if (!referral.hasServiceRequested()) {
referral.getServiceRequested().clear();*//*
/*private static void fixReferralRequests() {
LOG.info("Fixing Referral Requests");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3");
UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774");
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-04-24");
List<Service> services = serviceDal.getAll();
for (Service service: services) {
String endpointsJson = service.getEndpoints();
if (Strings.isNullOrEmpty(endpointsJson)) {
continue;
}
UUID serviceId = service.getId();
LOG.info("Checking " + service.getName() + " " + serviceId);
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
if (!endpointSystemId.equals(emisSystem)
&& !endpointSystemId.equals(emisSystemDev)) {
LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis");
continue;
}
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId);
boolean needsFixing = false;
Set<UUID> patientIdsToPost = new HashSet<>();
for (UUID exchangeId: exchangeIds) {
if (!needsFixing) {
List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId);
for (ExchangeTransformAudit audit: transformAudits) {
Date transfromStart = audit.getStarted();
if (!transfromStart.before(dateError)) {
needsFixing = true;
break;
}
}
}
if (!needsFixing) {
continue;
}
List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId);
Exchange exchange = exchangeDal.getExchange(exchangeId);
LOG.info("Checking exchange " + exchangeId + " with " + batches.size() + " batches");
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId == null) {
continue;
}
UUID batchId = batch.getBatchId();
List<ResourceWrapper> wrappers = resourceDal.getResourcesForBatch(serviceId, batchId);
for (ResourceWrapper wrapper: wrappers) {
String resourceType = wrapper.getResourceType();
if (!resourceType.equals(ResourceType.ReferralRequest.toString())
|| wrapper.isDeleted()) {
continue;
}
String json = wrapper.getResourceData();
ReferralRequest referral = (ReferralRequest)FhirSerializationHelper.deserializeResource(json);
continue;
}
CodeableConcept reason = referral.getServiceRequested().get(0);
referral.setReason(reason);
if (!referral.hasReason()) {
continue;
}
CodeableConcept reason = referral.getReason();
referral.setReason(null);
referral.addServiceRequested(reason);
json = FhirSerializationHelper.serializeResource(referral);
wrapper.setResourceData(json);
saveResourceWrapper(serviceId, wrapper);
//add to the set of patients we know need sending on to the protocol queue
patientIdsToPost.add(patientId);
LOG.info("Fixed " + resourceType + " " + wrapper.getResourceId() + " in batch " + batchId);
}
//if our patient has just been fixed or was fixed before, post onto the protocol queue
if (patientIdsToPost.contains(patientId)) {
List<UUID> batchIds = new ArrayList<>();
batchIds.add(batchId);
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr);
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
}
}
}
}
LOG.info("Finished Fixing Referral Requests");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
/*private static void applyEmisAdminCaches() {
LOG.info("Applying Emis Admin Caches");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3");
UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774");
List<Service> services = serviceDal.getAll();
for (Service service: services) {
String endpointsJson = service.getEndpoints();
if (Strings.isNullOrEmpty(endpointsJson)) {
continue;
}
UUID serviceId = service.getId();
LOG.info("Checking " + service.getName() + " " + serviceId);
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
if (!endpointSystemId.equals(emisSystem)
&& !endpointSystemId.equals(emisSystemDev)) {
LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis");
continue;
}
if (!exchangeDal.isServiceStarted(serviceId, endpointSystemId)) {
LOG.info(" Service not started, so skipping");
continue;
}
//get exchanges
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId);
if (exchangeIds.isEmpty()) {
LOG.info(" No exchanges found, so skipping");
continue;
}
UUID firstExchangeId = exchangeIds.get(0);
List<ExchangeEvent> events = exchangeDal.getExchangeEvents(firstExchangeId);
boolean appliedAdminCache = false;
for (ExchangeEvent event: events) {
if (event.getEventDesc().equals("Applied Emis Admin Resource Cache")) {
appliedAdminCache = true;
}
}
if (appliedAdminCache) {
LOG.info(" Have already applied admin cache, so skipping");
continue;
}
Exchange exchange = exchangeDal.getExchange(firstExchangeId);
String body = exchange.getBody();
String[] files = ExchangeHelper.parseExchangeBodyOldWay(body);
if (files.length == 0) {
LOG.info(" No files in exchange " + firstExchangeId + " so skipping");
continue;
}
String firstFilePath = files[0];
String name = FilenameUtils.getBaseName(firstFilePath); //file name without extension
String[] toks = name.split("_");
if (toks.length != 5) {
throw new TransformException("Failed to extract data sharing agreement GUID from filename " + firstFilePath);
}
String sharingAgreementGuid = toks[4];
List<UUID> batchIds = new ArrayList<>();
TransformError transformError = new TransformError();
FhirResourceFiler fhirResourceFiler = new FhirResourceFiler(firstExchangeId, serviceId, endpointSystemId, transformError, batchIds);
EmisCsvHelper csvHelper = new EmisCsvHelper(fhirResourceFiler.getServiceId(), fhirResourceFiler.getSystemId(),
fhirResourceFiler.getExchangeId(), sharingAgreementGuid,
true);
ExchangeTransformAudit transformAudit = new ExchangeTransformAudit();
transformAudit.setServiceId(serviceId);
transformAudit.setSystemId(endpointSystemId);
transformAudit.setExchangeId(firstExchangeId);
transformAudit.setId(UUID.randomUUID());
transformAudit.setStarted(new Date());
LOG.info(" Going to apply admin resource cache");
csvHelper.applyAdminResourceCache(fhirResourceFiler);
fhirResourceFiler.waitToFinish();
for (UUID batchId: batchIds) {
LOG.info(" Created batch ID " + batchId + " for exchange " + firstExchangeId);
}
transformAudit.setEnded(new Date());
transformAudit.setNumberBatchesCreated(new Integer(batchIds.size()));
boolean hadError = false;
if (transformError.getError().size() > 0) {
transformAudit.setErrorXml(TransformErrorSerializer.writeToXml(transformError));
hadError = true;
}
exchangeDal.save(transformAudit);
//clear down the cache of reference mappings since they won't be of much use for the next Exchange
IdHelper.clearCache();
if (hadError) {
LOG.error(" <<<<<<Error applying resource cache!");
continue;
}
//add the event to say we've applied the cache
AuditWriter.writeExchangeEvent(firstExchangeId, "Applied Emis Admin Resource Cache");
//post that ONE new batch ID onto the protocol queue
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr);
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
}
LOG.info("Finished Applying Emis Admin Caches");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
/**
* fixes Emis extract(s) when a practice was disabled then subsequently re-bulked, by
* replacing the "delete" extracts with newly generated deltas that can be processed
* before the re-bulk is done
*/
private static void fixDisabledEmisExtract(String serviceOdsCode, String systemId, String sharedStoragePath, String tempDirParent) {
LOG.info("Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceOdsCode);
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
Service service = serviceDal.getByLocalIdentifier(serviceOdsCode);
LOG.info("Service " + service.getId() + " " + service.getName() + " " + service.getLocalId());
/*File tempDirLast = new File(tempDir, "last");
if (!tempDirLast.exists()) {
if (!tempDirLast.mkdirs()) {
throw new Exception("Failed to create temp dir " + tempDirLast);
}
tempDirLast.mkdirs();
}
File tempDirEmpty = new File(tempDir, "empty");
if (!tempDirEmpty.exists()) {
if (!tempDirEmpty.mkdirs()) {
throw new Exception("Failed to create temp dir " + tempDirEmpty);
}
tempDirEmpty.mkdirs();
}*/
String tempDir = FilenameUtils.concat(tempDirParent, serviceOdsCode);
File f = new File(tempDir);
if (f.exists()) {
FileUtils.deleteDirectory(f);
}
UUID serviceUuid = service.getId();
UUID systemUuid = UUID.fromString(systemId);
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
//get all the exchanges, which are returned in reverse order, most recent first
List<Exchange> exchangesDesc = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE);
Map<Exchange, List<String>> hmExchangeFiles = new HashMap<>();
Map<Exchange, List<String>> hmExchangeFilesWithoutStoragePrefix = new HashMap<>();
//reverse the exchange list and cache the files for each one
List<Exchange> exchanges = new ArrayList<>();
for (int i = exchangesDesc.size() - 1; i >= 0; i
Exchange exchange = exchangesDesc.get(i);
String exchangeBody = exchange.getBody();
String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody);
//drop out and ignore any exchanges containing the singular bespoke reg status files
if (files.length <= 1) {
continue;
}
//drop out and ignore any exchanges for the left and dead extracts, since we don't
//expect to receive re-bulked data for the dead patients
String firstFile = files[0];
if (firstFile.indexOf("LEFT_AND_DEAD") > -1) {
continue;
}
exchanges.add(exchange);
//populate the map of the files with the shared storage prefix
List<String> fileList = Lists.newArrayList(files);
hmExchangeFiles.put(exchange, fileList);
//populate a map of the same files without the prefix
files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody);
for (int j = 0; j < files.length; j++) {
String file = files[j].substring(sharedStoragePath.length() + 1);
files[j] = file;
}
fileList = Lists.newArrayList(files);
hmExchangeFilesWithoutStoragePrefix.put(exchange, fileList);
}
/*exchanges.sort((o1, o2) -> {
Date d1 = o1.getTimestamp();
Date d2 = o2.getTimestamp();
return d1.compareTo(d2);
});*/
LOG.info("Found " + exchanges.size() + " exchanges and cached their files");
int indexDisabled = -1;
int indexRebulked = -1;
int indexOriginallyBulked = -1;
//go back through them to find the extract where the re-bulk is and when it was disabled (the list is in date order, so we're iterating most-recent first)
for (int i = exchanges.size() - 1; i >= 0; i
Exchange exchange = exchanges.get(i);
List<String> files = hmExchangeFiles.get(exchange);
boolean disabled = isDisabledInSharingAgreementFile(files);
if (disabled) {
indexDisabled = i;
} else {
if (indexDisabled == -1) {
indexRebulked = i;
} else {
//if we've found a non-disabled extract older than the disabled ones,
//then we've gone far enough back
break;
}
}
}
//go back from when disabled to find the previous bulk load (i.e. the first one or one after it was previously not disabled)
for (int i = indexDisabled - 1; i >= 0; i
Exchange exchange = exchanges.get(i);
List<String> files = hmExchangeFiles.get(exchange);
boolean disabled = isDisabledInSharingAgreementFile(files);
if (disabled) {
break;
}
indexOriginallyBulked = i;
}
if (indexOriginallyBulked > -1) {
Exchange exchangeOriginallyBulked = exchanges.get(indexOriginallyBulked);
LOG.info("Originally bulked on " + findExtractDate(exchangeOriginallyBulked, hmExchangeFiles) + " " + exchangeOriginallyBulked.getId());
}
if (indexDisabled > -1) {
Exchange exchangeDisabled = exchanges.get(indexDisabled);
LOG.info("Disabled on " + findExtractDate(exchangeDisabled, hmExchangeFiles) + " " + exchangeDisabled.getId());
}
if (indexRebulked > -1) {
Exchange exchangeRebulked = exchanges.get(indexRebulked);
LOG.info("Rebulked on " + findExtractDate(exchangeRebulked, hmExchangeFiles) + " " + exchangeRebulked.getId());
}
if (indexDisabled == -1
|| indexRebulked == -1
|| indexOriginallyBulked == -1) {
throw new Exception("Failed to find exchanges for original bulk (" + indexOriginallyBulked + ") disabling (" + indexDisabled + ") or re-bulking (" + indexRebulked + ")");
}
//continueOrQuit();
Exchange exchangeRebulked = exchanges.get(indexRebulked);
List<String> rebulkFiles = hmExchangeFiles.get(exchangeRebulked);
List<String> tempFilesCreated = new ArrayList<>();
Set<String> patientGuidsDeletedOrTooOld = new HashSet<>();
for (String rebulkFile : rebulkFiles) {
String fileType = findFileType(rebulkFile);
if (!isPatientFile(fileType)) {
continue;
}
LOG.info("Doing " + fileType);
String guidColumnName = getGuidColumnName(fileType);
//find all the guids in the re-bulk
Set<String> idsInRebulk = new HashSet<>();
InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(rebulkFile);
CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT);
String[] headers = null;
try {
headers = CsvHelper.getHeaderMapAsArray(csvParser);
Iterator<CSVRecord> iterator = csvParser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
//get the patient and row guid out of the file and cache in our set
String id = record.get("PatientGuid");
if (!Strings.isNullOrEmpty(guidColumnName)) {
id += "//" + record.get(guidColumnName);
}
idsInRebulk.add(id);
}
} finally {
csvParser.close();
}
LOG.info("Found " + idsInRebulk.size() + " IDs in re-bulk file: " + rebulkFile);
//create a replacement file for the exchange the service was disabled
String replacementDisabledFile = null;
Exchange exchangeDisabled = exchanges.get(indexDisabled);
List<String> disabledFiles = hmExchangeFilesWithoutStoragePrefix.get(exchangeDisabled);
for (String s : disabledFiles) {
String disabledFileType = findFileType(s);
if (disabledFileType.equals(fileType)) {
replacementDisabledFile = FilenameUtils.concat(tempDir, s);
File dir = new File(replacementDisabledFile).getParentFile();
if (!dir.exists()) {
if (!dir.mkdirs()) {
throw new Exception("Failed to create directory " + dir);
}
}
tempFilesCreated.add(s);
LOG.info("Created replacement file " + replacementDisabledFile);
}
}
FileWriter fileWriter = new FileWriter(replacementDisabledFile);
BufferedWriter bufferedWriter = new BufferedWriter(fileWriter);
CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers));
csvPrinter.flush();
Set<String> pastIdsProcessed = new HashSet<>();
//now go through all files of the same type PRIOR to the service was disabled
//to find any rows that we'll need to explicitly delete because they were deleted while
//the extract was disabled
for (int i = indexDisabled - 1; i >= indexOriginallyBulked; i
Exchange exchange = exchanges.get(i);
String originalFile = null;
List<String> files = hmExchangeFiles.get(exchange);
for (String s : files) {
String originalFileType = findFileType(s);
if (originalFileType.equals(fileType)) {
originalFile = s;
break;
}
}
if (originalFile == null) {
continue;
}
LOG.info(" Reading " + originalFile);
reader = FileHelper.readFileReaderFromSharedStorage(originalFile);
csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT);
try {
Iterator<CSVRecord> iterator = csvParser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String patientGuid = record.get("PatientGuid");
//get the patient and row guid out of the file and cache in our set
String uniqueId = patientGuid;
if (!Strings.isNullOrEmpty(guidColumnName)) {
uniqueId += "//" + record.get(guidColumnName);
}
//if we're already handled this record in a more recent extract, then skip it
if (pastIdsProcessed.contains(uniqueId)) {
continue;
}
pastIdsProcessed.add(uniqueId);
//if this ID isn't deleted and isn't in the re-bulk then it means
//it WAS deleted in Emis Web but we didn't receive the delete, because it was deleted
//from Emis Web while the extract feed was disabled
//if the record is deleted, then we won't expect it in the re-bulk
boolean deleted = Boolean.parseBoolean(record.get("Deleted"));
if (deleted) {
//if it's the Patient file, stick the patient GUID in a set so we know full patient record deletes
if (fileType.equals("Admin_Patient")) {
patientGuidsDeletedOrTooOld.add(patientGuid);
}
continue;
}
//if it's not the patient file and we refer to a patient that we know
//has been deleted, then skip this row, since we know we're deleting the entire patient record
if (patientGuidsDeletedOrTooOld.contains(patientGuid)) {
continue;
}
//if the re-bulk contains a record matching this one, then it's OK
if (idsInRebulk.contains(uniqueId)) {
continue;
}
//the rebulk won't contain any data for patients that are now too old (i.e. deducted or deceased > 2 yrs ago),
//so any patient ID in the original files but not in the rebulk can be treated like this and any data for them can be skipped
if (fileType.equals("Admin_Patient")) {
//retrieve the Patient and EpisodeOfCare resource for the patient so we can confirm they are deceased or deducted
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
UUID patientUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.Patient, patientGuid);
if (patientUuid == null) {
throw new Exception("Failed to find patient UUID from GUID [" + patientGuid + "]");
}
Patient patientResource = (Patient) resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.Patient, patientUuid.toString());
if (patientResource.hasDeceased()) {
patientGuidsDeletedOrTooOld.add(patientGuid);
continue;
}
UUID episodeUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.EpisodeOfCare, patientGuid); //we use the patient GUID for the episode too
EpisodeOfCare episodeResource = (EpisodeOfCare) resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.EpisodeOfCare, episodeUuid.toString());
if (episodeResource.hasPeriod()
&& !PeriodHelper.isActive(episodeResource.getPeriod())) {
patientGuidsDeletedOrTooOld.add(patientGuid);
continue;
}
}
//create a new CSV record, carrying over the GUIDs from the original but marking as deleted
String[] newRecord = new String[headers.length];
for (int j = 0; j < newRecord.length; j++) {
String header = headers[j];
if (header.equals("PatientGuid")
|| header.equals("OrganisationGuid")
|| (!Strings.isNullOrEmpty(guidColumnName)
&& header.equals(guidColumnName))) {
String val = record.get(header);
newRecord[j] = val;
} else if (header.equals("Deleted")) {
newRecord[j] = "true";
} else {
newRecord[j] = "";
}
}
csvPrinter.printRecord((Object[]) newRecord);
csvPrinter.flush();
//log out the raw record that's missing from the original
StringBuffer sb = new StringBuffer();
sb.append("Record not in re-bulk: ");
for (int j = 0; j < record.size(); j++) {
if (j > 0) {
sb.append(",");
}
sb.append(record.get(j));
}
LOG.info(sb.toString());
}
} finally {
csvParser.close();
}
}
csvPrinter.flush();
csvPrinter.close();
//also create a version of the CSV file with just the header and nothing else in
for (int i = indexDisabled + 1; i < indexRebulked; i++) {
Exchange ex = exchanges.get(i);
List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex);
for (String s : exchangeFiles) {
String exchangeFileType = findFileType(s);
if (exchangeFileType.equals(fileType)) {
String emptyTempFile = FilenameUtils.concat(tempDir, s);
File dir = new File(emptyTempFile).getParentFile();
if (!dir.exists()) {
if (!dir.mkdirs()) {
throw new Exception("Failed to create directory " + dir);
}
}
fileWriter = new FileWriter(emptyTempFile);
bufferedWriter = new BufferedWriter(fileWriter);
csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers));
csvPrinter.flush();
csvPrinter.close();
tempFilesCreated.add(s);
LOG.info("Created empty file " + emptyTempFile);
}
}
}
}
//we also need to copy the restored sharing agreement file to replace all the period it was disabled
String rebulkedSharingAgreementFile = null;
for (String s : rebulkFiles) {
String fileType = findFileType(s);
if (fileType.equals("Agreements_SharingOrganisation")) {
rebulkedSharingAgreementFile = s;
}
}
for (int i = indexDisabled; i < indexRebulked; i++) {
Exchange ex = exchanges.get(i);
List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex);
for (String s : exchangeFiles) {
String exchangeFileType = findFileType(s);
if (exchangeFileType.equals("Agreements_SharingOrganisation")) {
String replacementFile = FilenameUtils.concat(tempDir, s);
InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkedSharingAgreementFile);
File replacementFileObj = new File(replacementFile);
Files.copy(inputStream, replacementFileObj.toPath());
inputStream.close();
tempFilesCreated.add(s);
}
}
}
//create a script to copy the files into S3
List<String> copyScript = new ArrayList<>();
copyScript.add("#!/bin/bash");
copyScript.add("");
for (String s : tempFilesCreated) {
String localFile = FilenameUtils.concat(tempDir, s);
copyScript.add("sudo aws s3 cp " + localFile + " s3://discoverysftplanding/endeavour/" + s);
}
String scriptFile = FilenameUtils.concat(tempDir, "copy.sh");
FileUtils.writeLines(new File(scriptFile), copyScript);
LOG.info("Finished - written files to " + tempDir);
dumpFileSizes(new File(tempDir));
/*continueOrQuit();
//back up every file where the service was disabled
for (int i=indexDisabled; i<indexRebulked; i++) {
Exchange exchange = exchanges.get(i);
List<String> files = hmExchangeFiles.get(exchange);
for (String file: files) {
//first download from S3 to the local temp dir
InputStream inputStream = FileHelper.readFileFromSharedStorage(file);
String fileName = FilenameUtils.getName(file);
String tempPath = FilenameUtils.concat(tempDir, fileName);
File downloadDestination = new File(tempPath);
Files.copy(inputStream, downloadDestination.toPath());
//then write back to S3 in a sub-dir of the original file
String backupPath = FilenameUtils.getPath(file);
backupPath = FilenameUtils.concat(backupPath, "Original");
backupPath = FilenameUtils.concat(backupPath, fileName);
FileHelper.writeFileToSharedStorage(backupPath, downloadDestination);
LOG.info("Backed up " + file + " -> " + backupPath);
//delete from temp dir
downloadDestination.delete();
}
}
continueOrQuit();
//copy the new CSV files into the dir where it was disabled
List<String> disabledFiles = hmExchangeFiles.get(exchangeDisabled);
for (String disabledFile: disabledFiles) {
String fileType = findFileType(disabledFile);
if (!isPatientFile(fileType)) {
continue;
}
String tempFile = FilenameUtils.concat(tempDirLast.getAbsolutePath(), fileType + ".csv");
File f = new File(tempFile);
if (!f.exists()) {
throw new Exception("Failed to find expected temp file " + f);
}
FileHelper.writeFileToSharedStorage(disabledFile, f);
LOG.info("Copied " + tempFile + " -> " + disabledFile);
}
continueOrQuit();
//empty the patient files for any extracts while the service was disabled
for (int i=indexDisabled+1; i<indexRebulked; i++) {
Exchange otherExchangeDisabled = exchanges.get(i);
List<String> otherDisabledFiles = hmExchangeFiles.get(otherExchangeDisabled);
for (String otherDisabledFile: otherDisabledFiles) {
String fileType = findFileType(otherDisabledFile);
if (!isPatientFile(fileType)) {
continue;
}
String tempFile = FilenameUtils.concat(tempDirEmpty.getAbsolutePath(), fileType + ".csv");
File f = new File(tempFile);
if (!f.exists()) {
throw new Exception("Failed to find expected empty file " + f);
}
FileHelper.writeFileToSharedStorage(otherDisabledFile, f);
LOG.info("Copied " + tempFile + " -> " + otherDisabledFile);
}
}
continueOrQuit();
//copy the content of the sharing agreement file from when it was re-bulked
for (String rebulkFile: rebulkFiles) {
String fileType = findFileType(rebulkFile);
if (fileType.equals("Agreements_SharingOrganisation")) {
String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv");
File downloadDestination = new File(tempFile);
InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkFile);
Files.copy(inputStream, downloadDestination.toPath());
tempFilesCreated.add(tempFile);
}
}
//replace the sharing agreement file for all disabled extracts with the non-disabled one
for (int i=indexDisabled; i<indexRebulked; i++) {
Exchange exchange = exchanges.get(i);
List<String> files = hmExchangeFiles.get(exchange);
for (String file: files) {
String fileType = findFileType(file);
if (fileType.equals("Agreements_SharingOrganisation")) {
String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv");
File f = new File(tempFile);
if (!f.exists()) {
throw new Exception("Failed to find expected empty file " + f);
}
FileHelper.writeFileToSharedStorage(file, f);
LOG.info("Copied " + tempFile + " -> " + file);
}
}
}
LOG.info("Finished Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceId);
continueOrQuit();
for (String tempFileCreated: tempFilesCreated) {
File f = new File(tempFileCreated);
if (f.exists()) {
f.delete();
}
}*/
} catch (Exception ex) {
LOG.error("", ex);
}
}
private static void dumpFileSizes(File f) {
if (f.isDirectory()) {
for (File child : f.listFiles()) {
dumpFileSizes(child);
}
} else {
String totalSizeReadable = FileUtils.byteCountToDisplaySize(f.length());
LOG.info("" + f + " = " + totalSizeReadable);
}
}
private static String findExtractDate(Exchange exchange, Map<Exchange, List<String>> fileMap) throws Exception {
List<String> files = fileMap.get(exchange);
String file = findSharingAgreementFile(files);
String name = FilenameUtils.getBaseName(file);
String[] toks = name.split("_");
return toks[3];
}
private static boolean isDisabledInSharingAgreementFile(List<String> files) throws Exception {
String file = findSharingAgreementFile(files);
InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(file);
CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT);
try {
Iterator<CSVRecord> iterator = csvParser.iterator();
CSVRecord record = iterator.next();
String s = record.get("Disabled");
boolean disabled = Boolean.parseBoolean(s);
return disabled;
} finally {
csvParser.close();
}
}
private static void continueOrQuit() throws Exception {
LOG.info("Enter y to continue, anything else to quit");
byte[] bytes = new byte[10];
System.in.read(bytes);
char c = (char) bytes[0];
if (c != 'y' && c != 'Y') {
System.out.println("Read " + c);
System.exit(1);
}
}
private static String getGuidColumnName(String fileType) {
if (fileType.equals("Admin_Patient")) {
//patient file just has patient GUID, nothing extra
return null;
} else if (fileType.equals("CareRecord_Consultation")) {
return "ConsultationGuid";
} else if (fileType.equals("CareRecord_Diary")) {
return "DiaryGuid";
} else if (fileType.equals("CareRecord_Observation")) {
return "ObservationGuid";
} else if (fileType.equals("CareRecord_Problem")) {
//there is no separate problem GUID, as it's just a modified observation
return "ObservationGuid";
} else if (fileType.equals("Prescribing_DrugRecord")) {
return "DrugRecordGuid";
} else if (fileType.equals("Prescribing_IssueRecord")) {
return "IssueRecordGuid";
} else {
throw new IllegalArgumentException(fileType);
}
}
private static String findFileType(String filePath) {
String fileName = FilenameUtils.getName(filePath);
String[] toks = fileName.split("_");
String domain = toks[1];
String name = toks[2];
return domain + "_" + name;
}
private static boolean isPatientFile(String fileType) {
if (fileType.equals("Admin_Patient")
|| fileType.equals("CareRecord_Consultation")
|| fileType.equals("CareRecord_Diary")
|| fileType.equals("CareRecord_Observation")
|| fileType.equals("CareRecord_Problem")
|| fileType.equals("Prescribing_DrugRecord")
|| fileType.equals("Prescribing_IssueRecord")) {
//note the referral file doesn't have a Deleted column, so isn't in this list
return true;
} else {
return false;
}
}
private static String findSharingAgreementFile(List<String> files) throws Exception {
for (String file : files) {
String fileType = findFileType(file);
if (fileType.equals("Agreements_SharingOrganisation")) {
return file;
}
}
throw new Exception("Failed to find sharing agreement file in " + files.get(0));
}
private static void testSlack() {
LOG.info("Testing slack");
try {
SlackHelper.sendSlackMessage(SlackHelper.Channel.QueueReaderAlerts, "Test Message from Queue Reader");
LOG.info("Finished testing slack");
} catch (Exception ex) {
LOG.error("", ex);
}
}
/*private static void postToInboundFromFile(UUID serviceId, UUID systemId, String filePath) {
try {
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
Service service = serviceDalI.getById(serviceId);
LOG.info("Posting to inbound exchange for " + service.getName() + " from file " + filePath);
FileReader fr = new FileReader(filePath);
BufferedReader br = new BufferedReader(fr);
int count = 0;
List<UUID> exchangeIdBatch = new ArrayList<>();
while (true) {
String line = br.readLine();
if (line == null) {
break;
}
UUID exchangeId = UUID.fromString(line);
//update the transform audit, so EDS UI knows we've re-queued this exchange
ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId);
if (audit != null
&& !audit.isResubmitted()) {
audit.setResubmitted(true);
auditRepository.save(audit);
}
count ++;
exchangeIdBatch.add(exchangeId);
if (exchangeIdBatch.size() >= 1000) {
QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false);
exchangeIdBatch = new ArrayList<>();
LOG.info("Done " + count);
}
}
if (!exchangeIdBatch.isEmpty()) {
QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false);
LOG.info("Done " + count);
}
br.close();
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Posting to inbound for " + serviceId);
}*/
/*private static void postToInbound(UUID serviceId, boolean all) {
LOG.info("Posting to inbound for " + serviceId);
try {
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
Service service = serviceDalI.getById(serviceId);
List<UUID> systemIds = findSystemIds(service);
UUID systemId = systemIds.get(0);
ExchangeTransformErrorState errorState = auditRepository.getErrorState(serviceId, systemId);
for (UUID exchangeId: errorState.getExchangeIdsInError()) {
//update the transform audit, so EDS UI knows we've re-queued this exchange
ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId);
//skip any exchange IDs we've already re-queued up to be processed again
if (audit.isResubmitted()) {
LOG.debug("Not re-posting " + audit.getExchangeId() + " as it's already been resubmitted");
continue;
}
LOG.debug("Re-posting " + audit.getExchangeId());
audit.setResubmitted(true);
auditRepository.save(audit);
//then re-submit the exchange to Rabbit MQ for the queue reader to pick up
QueueHelper.postToExchange(exchangeId, "EdsInbound", null, false);
if (!all) {
LOG.info("Posted first exchange, so stopping");
break;
}
}
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Posting to inbound for " + serviceId);
}*/
/*private static void fixPatientSearchAllServices(String filterSystemId) {
LOG.info("Fixing patient search for all services and system " + filterSystemId);
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
List<Service> services = serviceDal.getAll();
for (Service service: services) {
fixPatientSearch(service.getId().toString(), filterSystemId);
}
LOG.info("Finished Fixing patient search for all services and system " + filterSystemId);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void fixPatientSearch(String serviceId, String filterSystemId) {
LOG.info("Fixing patient search for service " + serviceId);
try {
UUID serviceUuid = UUID.fromString(serviceId);
UUID filterSystemUuid = null;
if (!Strings.isNullOrEmpty(filterSystemId)) {
filterSystemUuid = UUID.fromString(filterSystemId);
}
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDalI = DalProvider.factoryResourceDal();
PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal();
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
Set<UUID> patientsDone = new HashSet<>();
Service service = serviceDal.getById(serviceUuid);
List<UUID> systemIds = findSystemIds(service);
for (UUID systemId: systemIds) {
if (filterSystemUuid != null
&& !filterSystemUuid.equals(systemId)) {
continue;
}
List<UUID> exchanges = exchangeDalI.getExchangeIdsForService(serviceUuid, systemId);
LOG.info("Found " + exchanges.size() + " exchanges for system " + systemId);
for (UUID exchangeId : exchanges) {
List<ExchangeBatch> batches = exchangeBatchDalI.retrieveForExchangeId(exchangeId);
LOG.info("Found " + batches.size() + " batches in exchange " + exchangeId);
for (ExchangeBatch batch : batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId == null) {
continue;
}
if (patientsDone.contains(patientId)) {
continue;
}
patientsDone.add(patientId);
ResourceWrapper wrapper = resourceDalI.getCurrentVersion(serviceUuid, ResourceType.Patient.toString(), patientId);
if (wrapper != null) {
String json = wrapper.getResourceData();
if (!Strings.isNullOrEmpty(json)) {
Patient fhirPatient = (Patient)FhirSerializationHelper.deserializeResource(json);
patientSearchDal.update(serviceUuid, fhirPatient);
}
}
if (patientsDone.size() % 1000 == 0) {
LOG.info("Done " + patientsDone.size());
}
}
}
}
LOG.info("Done " + patientsDone.size());
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished fixing patient search for " + serviceId);
}*/
private static void runSql(String host, String username, String password, String sqlFile) {
LOG.info("Running SQL on " + host + " from " + sqlFile);
Connection conn = null;
Statement statement = null;
try {
File f = new File(sqlFile);
if (!f.exists()) {
LOG.error("" + f + " doesn't exist");
return;
}
List<String> lines = FileUtils.readLines(f);
/*String combined = String.join("\n", lines);
LOG.info("Going to run SQL");
LOG.info(combined);*/
//load driver
Class.forName("com.mysql.cj.jdbc.Driver");
//create connection
Properties props = new Properties();
props.setProperty("user", username);
props.setProperty("password", password);
conn = DriverManager.getConnection(host, props);
LOG.info("Opened connection");
statement = conn.createStatement();
long totalStart = System.currentTimeMillis();
for (String sql : lines) {
sql = sql.trim();
if (sql.startsWith("
/*private static void fixExchangeBatches() {
LOG.info("Starting Fixing Exchange Batches");
try {
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDalI = DalProvider.factoryResourceDal();
List<Service> services = serviceDalI.getAll();
for (Service service: services) {
LOG.info("Doing " + service.getName());
List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(service.getId());
for (UUID exchangeId: exchangeIds) {
LOG.info(" Exchange " + exchangeId);
List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId);
for (ExchangeBatch exchangeBatch: exchangeBatches) {
if (exchangeBatch.getEdsPatientId() != null) {
continue;
}
List<ResourceWrapper> resources = resourceDalI.getResourcesForBatch(exchangeBatch.getBatchId());
if (resources.isEmpty()) {
continue;
}
ResourceWrapper first = resources.get(0);
UUID patientId = first.getPatientId();
if (patientId != null) {
exchangeBatch.setEdsPatientId(patientId);
exchangeBatchDalI.save(exchangeBatch);
LOG.info("Fixed batch " + exchangeBatch.getBatchId() + " -> " + exchangeBatch.getEdsPatientId());
}
}
}
}
LOG.info("Finished Fixing Exchange Batches");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void exportHl7Encounters(String sourceCsvPath, String outputPath) {
LOG.info("Exporting HL7 Encounters from " + sourceCsvPath + " to " + outputPath);
try {
File sourceFile = new File(sourceCsvPath);
CSVParser csvParser = CSVParser.parse(sourceFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
//"service_id","system_id","nhs_number","patient_id","count"
int count = 0;
HashMap<UUID, List<UUID>> serviceAndSystemIds = new HashMap<>();
HashMap<UUID, Integer> patientIds = new HashMap<>();
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
count ++;
String serviceId = csvRecord.get("service_id");
String systemId = csvRecord.get("system_id");
String patientId = csvRecord.get("patient_id");
UUID serviceUuid = UUID.fromString(serviceId);
List<UUID> systemIds = serviceAndSystemIds.get(serviceUuid);
if (systemIds == null) {
systemIds = new ArrayList<>();
serviceAndSystemIds.put(serviceUuid, systemIds);
}
systemIds.add(UUID.fromString(systemId));
patientIds.put(UUID.fromString(patientId), new Integer(count));
}
csvParser.close();
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
ResourceDalI resourceDalI = DalProvider.factoryResourceDal();
ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal();
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ParserPool parser = new ParserPool();
Map<Integer, List<Object[]>> patientRows = new HashMap<>();
SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
for (UUID serviceId: serviceAndSystemIds.keySet()) {
//List<UUID> systemIds = serviceAndSystemIds.get(serviceId);
Service service = serviceDalI.getById(serviceId);
String serviceName = service.getName();
LOG.info("Doing service " + serviceId + " " + serviceName);
List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(serviceId);
LOG.info("Got " + exchangeIds.size() + " exchange IDs to scan");
int exchangeCount = 0;
for (UUID exchangeId: exchangeIds) {
exchangeCount ++;
if (exchangeCount % 1000 == 0) {
LOG.info("Done " + exchangeCount + " exchanges");
}
List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId);
for (ExchangeBatch exchangeBatch: exchangeBatches) {
UUID patientId = exchangeBatch.getEdsPatientId();
if (patientId != null
&& !patientIds.containsKey(patientId)) {
continue;
}
Integer patientIdInt = patientIds.get(patientId);
//get encounters for exchange batch
UUID batchId = exchangeBatch.getBatchId();
List<ResourceWrapper> resourceWrappers = resourceDalI.getResourcesForBatch(serviceId, batchId);
for (ResourceWrapper resourceWrapper: resourceWrappers) {
if (resourceWrapper.isDeleted()) {
continue;
}
String resourceType = resourceWrapper.getResourceType();
if (!resourceType.equals(ResourceType.Encounter.toString())) {
continue;
}
LOG.info("Processing " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId());
String json = resourceWrapper.getResourceData();
Encounter fhirEncounter = (Encounter)parser.parse(json);
Date date = null;
if (fhirEncounter.hasPeriod()) {
Period period = fhirEncounter.getPeriod();
if (period.hasStart()) {
date = period.getStart();
}
}
String episodeId = null;
if (fhirEncounter.hasEpisodeOfCare()) {
Reference episodeReference = fhirEncounter.getEpisodeOfCare().get(0);
ReferenceComponents comps = ReferenceHelper.getReferenceComponents(episodeReference);
EpisodeOfCare fhirEpisode = (EpisodeOfCare)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId());
if (fhirEpisode != null) {
if (fhirEpisode.hasIdentifier()) {
episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_BARTS_FIN_EPISODE_ID);
if (Strings.isNullOrEmpty(episodeId)) {
episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_HOMERTON_FIN_EPISODE_ID);
}
}
}
}
String adtType = null;
String adtCode = null;
Extension extension = ExtensionConverter.findExtension(fhirEncounter, FhirExtensionUri.HL7_MESSAGE_TYPE);
if (extension != null) {
CodeableConcept codeableConcept = (CodeableConcept) extension.getValue();
Coding hl7MessageTypeCoding = CodeableConceptHelper.findCoding(codeableConcept, FhirUri.CODE_SYSTEM_HL7V2_MESSAGE_TYPE);
if (hl7MessageTypeCoding != null) {
adtType = hl7MessageTypeCoding.getDisplay();
adtCode = hl7MessageTypeCoding.getCode();
}
} else {
//for older formats of the transformed resources, the HL7 message type can only be found from the raw original exchange body
try {
Exchange exchange = exchangeDalI.getExchange(exchangeId);
String exchangeBody = exchange.getBody();
Bundle bundle = (Bundle) FhirResourceHelper.deserialiseResouce(exchangeBody);
for (Bundle.BundleEntryComponent entry: bundle.getEntry()) {
if (entry.getResource() != null
&& entry.getResource() instanceof MessageHeader) {
MessageHeader header = (MessageHeader)entry.getResource();
if (header.hasEvent()) {
Coding coding = header.getEvent();
adtType = coding.getDisplay();
adtCode = coding.getCode();
}
}
}
} catch (Exception ex) {
//if the exchange body isn't a FHIR bundle, then we'll get an error by treating as such, so just ignore them
}
}
String cls = null;
if (fhirEncounter.hasClass_()) {
Encounter.EncounterClass encounterClass = fhirEncounter.getClass_();
if (encounterClass == Encounter.EncounterClass.OTHER
&& fhirEncounter.hasClass_Element()
&& fhirEncounter.getClass_Element().hasExtension()) {
for (Extension classExtension: fhirEncounter.getClass_Element().getExtension()) {
if (classExtension.getUrl().equals(FhirExtensionUri.ENCOUNTER_CLASS)) {
//not 100% of the type of the value, so just append to a String
cls = "" + classExtension.getValue();
}
}
}
if (Strings.isNullOrEmpty(cls)) {
cls = encounterClass.toCode();
}
}
String type = null;
if (fhirEncounter.hasType()) {
//only seem to ever have one type
CodeableConcept codeableConcept = fhirEncounter.getType().get(0);
type = codeableConcept.getText();
}
String status = null;
if (fhirEncounter.hasStatus()) {
Encounter.EncounterState encounterState = fhirEncounter.getStatus();
status = encounterState.toCode();
}
String location = null;
String locationType = null;
if (fhirEncounter.hasLocation()) {
//first location is always the current location
Encounter.EncounterLocationComponent encounterLocation = fhirEncounter.getLocation().get(0);
if (encounterLocation.hasLocation()) {
Reference locationReference = encounterLocation.getLocation();
ReferenceComponents comps = ReferenceHelper.getReferenceComponents(locationReference);
Location fhirLocation = (Location)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId());
if (fhirLocation != null) {
if (fhirLocation.hasName()) {
location = fhirLocation.getName();
}
if (fhirLocation.hasType()) {
CodeableConcept typeCodeableConcept = fhirLocation.getType();
if (typeCodeableConcept.hasCoding()) {
Coding coding = typeCodeableConcept.getCoding().get(0);
locationType = coding.getDisplay();
}
}
}
}
}
String clinician = null;
if (fhirEncounter.hasParticipant()) {
//first participant seems to be the interesting one
Encounter.EncounterParticipantComponent encounterParticipant = fhirEncounter.getParticipant().get(0);
if (encounterParticipant.hasIndividual()) {
Reference practitionerReference = encounterParticipant.getIndividual();
ReferenceComponents comps = ReferenceHelper.getReferenceComponents(practitionerReference);
Practitioner fhirPractitioner = (Practitioner)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId());
if (fhirPractitioner != null) {
if (fhirPractitioner.hasName()) {
HumanName name = fhirPractitioner.getName();
clinician = name.getText();
if (Strings.isNullOrEmpty(clinician)) {
clinician = "";
for (StringType s: name.getPrefix()) {
clinician += s.getValueNotNull();
clinician += " ";
}
for (StringType s: name.getGiven()) {
clinician += s.getValueNotNull();
clinician += " ";
}
for (StringType s: name.getFamily()) {
clinician += s.getValueNotNull();
clinician += " ";
}
clinician = clinician.trim();
}
}
}
}
}
Object[] row = new Object[12];
row[0] = serviceName;
row[1] = patientIdInt.toString();
row[2] = sdfOutput.format(date);
row[3] = episodeId;
row[4] = adtCode;
row[5] = adtType;
row[6] = cls;
row[7] = type;
row[8] = status;
row[9] = location;
row[10] = locationType;
row[11] = clinician;
List<Object[]> rows = patientRows.get(patientIdInt);
if (rows == null) {
rows = new ArrayList<>();
patientRows.put(patientIdInt, rows);
}
rows.add(row);
}
}
}
}
String[] outputColumnHeaders = new String[] {"Source", "Patient", "Date", "Episode ID", "ADT Message Code", "ADT Message Type", "Class", "Type", "Status", "Location", "Location Type", "Clinician"};
FileWriter fileWriter = new FileWriter(outputPath);
BufferedWriter bufferedWriter = new BufferedWriter(fileWriter);
CSVFormat format = CSVFormat.DEFAULT
.withHeader(outputColumnHeaders)
.withQuote('"');
CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, format);
for (int i=0; i <= count; i++) {
Integer patientIdInt = new Integer(i);
List<Object[]> rows = patientRows.get(patientIdInt);
if (rows != null) {
for (Object[] row: rows) {
csvPrinter.printRecord(row);
}
}
}
csvPrinter.close();
bufferedWriter.close();
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Exporting Encounters from " + sourceCsvPath + " to " + outputPath);
}*/
/*private static void registerShutdownHook() {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
LOG.info("");
try {
Thread.sleep(5000);
} catch (Throwable ex) {
LOG.error("", ex);
}
LOG.info("Done");
}
});
}*/
private static void findEmisStartDates(String path, String outputPath) {
LOG.info("Finding EMIS Start Dates in " + path + ", writing to " + outputPath);
try {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH.mm.ss");
Map<String, Date> startDates = new HashMap<>();
Map<String, String> servers = new HashMap<>();
Map<String, String> names = new HashMap<>();
Map<String, String> odsCodes = new HashMap<>();
Map<String, String> cdbNumbers = new HashMap<>();
Map<String, Set<String>> distinctPatients = new HashMap<>();
File root = new File(path);
for (File sftpRoot : root.listFiles()) {
LOG.info("Checking " + sftpRoot);
Map<Date, File> extracts = new HashMap<>();
List<Date> extractDates = new ArrayList<>();
for (File extractRoot : sftpRoot.listFiles()) {
Date d = sdf.parse(extractRoot.getName());
//LOG.info("" + extractRoot.getName() + " -> " + d);
extracts.put(d, extractRoot);
extractDates.add(d);
}
Collections.sort(extractDates);
for (Date extractDate : extractDates) {
File extractRoot = extracts.get(extractDate);
LOG.info("Checking " + extractRoot);
//read the sharing agreements file
//e.g. 291_Agreements_SharingOrganisation_20150211164536_45E7CD20-EE37-41AB-90D6-DC9D4B03D102.csv
File sharingAgreementsFile = null;
for (File f : extractRoot.listFiles()) {
String name = f.getName().toLowerCase();
if (name.indexOf("agreements_sharingorganisation") > -1
&& name.endsWith(".csv")) {
sharingAgreementsFile = f;
break;
}
}
if (sharingAgreementsFile == null) {
LOG.info("Null agreements file for " + extractRoot);
continue;
}
CSVParser csvParser = CSVParser.parse(sharingAgreementsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
try {
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String orgGuid = csvRecord.get("OrganisationGuid");
String activated = csvRecord.get("IsActivated");
String disabled = csvRecord.get("Disabled");
servers.put(orgGuid, sftpRoot.getName());
if (activated.equalsIgnoreCase("true")) {
if (disabled.equalsIgnoreCase("false")) {
Date d = sdf.parse(extractRoot.getName());
Date existingDate = startDates.get(orgGuid);
if (existingDate == null) {
startDates.put(orgGuid, d);
}
} else {
if (startDates.containsKey(orgGuid)) {
startDates.put(orgGuid, null);
}
}
}
}
} finally {
csvParser.close();
}
//go through orgs file to get name, ods and cdb codes
File orgsFile = null;
for (File f : extractRoot.listFiles()) {
String name = f.getName().toLowerCase();
if (name.indexOf("admin_organisation_") > -1
&& name.endsWith(".csv")) {
orgsFile = f;
break;
}
}
csvParser = CSVParser.parse(orgsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
try {
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String orgGuid = csvRecord.get("OrganisationGuid");
String name = csvRecord.get("OrganisationName");
String odsCode = csvRecord.get("ODSCode");
String cdb = csvRecord.get("CDB");
names.put(orgGuid, name);
odsCodes.put(orgGuid, odsCode);
cdbNumbers.put(orgGuid, cdb);
}
} finally {
csvParser.close();
}
//go through patients file to get count
File patientFile = null;
for (File f : extractRoot.listFiles()) {
String name = f.getName().toLowerCase();
if (name.indexOf("admin_patient_") > -1
&& name.endsWith(".csv")) {
patientFile = f;
break;
}
}
csvParser = CSVParser.parse(patientFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
try {
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String orgGuid = csvRecord.get("OrganisationGuid");
String patientGuid = csvRecord.get("PatientGuid");
String deleted = csvRecord.get("Deleted");
Set<String> distinctPatientSet = distinctPatients.get(orgGuid);
if (distinctPatientSet == null) {
distinctPatientSet = new HashSet<>();
distinctPatients.put(orgGuid, distinctPatientSet);
}
if (deleted.equalsIgnoreCase("true")) {
distinctPatientSet.remove(patientGuid);
} else {
distinctPatientSet.add(patientGuid);
}
}
} finally {
csvParser.close();
}
}
}
SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd");
StringBuilder sb = new StringBuilder();
sb.append("Name,OdsCode,CDB,OrgGuid,StartDate,Server,Patients");
for (String orgGuid : startDates.keySet()) {
Date startDate = startDates.get(orgGuid);
String server = servers.get(orgGuid);
String name = names.get(orgGuid);
String odsCode = odsCodes.get(orgGuid);
String cdbNumber = cdbNumbers.get(orgGuid);
Set<String> distinctPatientSet = distinctPatients.get(orgGuid);
String startDateDesc = null;
if (startDate != null) {
startDateDesc = sdfOutput.format(startDate);
}
Long countDistinctPatients = null;
if (distinctPatientSet != null) {
countDistinctPatients = new Long(distinctPatientSet.size());
}
sb.append("\n");
sb.append("\"" + name + "\"");
sb.append(",");
sb.append("\"" + odsCode + "\"");
sb.append(",");
sb.append("\"" + cdbNumber + "\"");
sb.append(",");
sb.append("\"" + orgGuid + "\"");
sb.append(",");
sb.append(startDateDesc);
sb.append(",");
sb.append("\"" + server + "\"");
sb.append(",");
sb.append(countDistinctPatients);
}
LOG.info(sb.toString());
FileUtils.writeStringToFile(new File(outputPath), sb.toString());
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Finding Start Dates in " + path + ", writing to " + outputPath);
}
private static void findEncounterTerms(String path, String outputPath) {
LOG.info("Finding Encounter Terms from " + path);
Map<String, Long> hmResults = new HashMap<>();
//source term, source term snomed ID, source term snomed term - count
try {
File root = new File(path);
File[] files = root.listFiles();
for (File readerRoot : files) { //emis001
LOG.info("Finding terms in " + readerRoot);
//first read in all the coding files to build up our map of codes
Map<String, String> hmCodes = new HashMap<>();
for (File dateFolder : readerRoot.listFiles()) {
LOG.info("Looking for codes in " + dateFolder);
File f = findFile(dateFolder, "Coding_ClinicalCode");
if (f == null) {
LOG.error("Failed to find coding file in " + dateFolder.getAbsolutePath());
continue;
}
CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String codeId = csvRecord.get("CodeId");
String term = csvRecord.get("Term");
String snomed = csvRecord.get("SnomedCTConceptId");
hmCodes.put(codeId, snomed + ",\"" + term + "\"");
}
csvParser.close();
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
Date cutoff = dateFormat.parse("2017-01-01");
//now process the consultation files themselves
for (File dateFolder : readerRoot.listFiles()) {
LOG.info("Looking for consultations in " + dateFolder);
File f = findFile(dateFolder, "CareRecord_Consultation");
if (f == null) {
LOG.error("Failed to find consultation file in " + dateFolder.getAbsolutePath());
continue;
}
CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String term = csvRecord.get("ConsultationSourceTerm");
String codeId = csvRecord.get("ConsultationSourceCodeId");
if (Strings.isNullOrEmpty(term)
&& Strings.isNullOrEmpty(codeId)) {
continue;
}
String date = csvRecord.get("EffectiveDate");
if (Strings.isNullOrEmpty(date)) {
continue;
}
Date d = dateFormat.parse(date);
if (d.before(cutoff)) {
continue;
}
String line = "\"" + term + "\",";
if (!Strings.isNullOrEmpty(codeId)) {
String codeLookup = hmCodes.get(codeId);
if (codeLookup == null) {
LOG.error("Failed to find lookup for codeID " + codeId);
continue;
}
line += codeLookup;
} else {
line += ",";
}
Long count = hmResults.get(line);
if (count == null) {
count = new Long(1);
} else {
count = new Long(count.longValue() + 1);
}
hmResults.put(line, count);
}
csvParser.close();
}
}
//save results to file
StringBuilder output = new StringBuilder();
output.append("\"consultation term\",\"snomed concept ID\",\"snomed term\",\"count\"");
output.append("\r\n");
for (String line : hmResults.keySet()) {
Long count = hmResults.get(line);
String combined = line + "," + count;
output.append(combined);
output.append("\r\n");
}
LOG.info("FInished");
LOG.info(output.toString());
FileUtils.writeStringToFile(new File(outputPath), output.toString());
LOG.info("written output to " + outputPath);
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished finding Encounter Terms from " + path);
}
private static File findFile(File root, String token) throws Exception {
for (File f : root.listFiles()) {
String s = f.getName();
if (s.indexOf(token) > -1) {
return f;
}
}
return null;
}
/*private static void populateProtocolQueue(String serviceIdStr, String startingExchangeId) {
LOG.info("Starting Populating Protocol Queue for " + serviceIdStr);
ServiceDalI serviceRepository = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
if (serviceIdStr.equalsIgnoreCase("All")) {
serviceIdStr = null;
}
try {
List<Service> services = new ArrayList<>();
if (Strings.isNullOrEmpty(serviceIdStr)) {
services = serviceRepository.getAll();
} else {
UUID serviceId = UUID.fromString(serviceIdStr);
Service service = serviceRepository.getById(serviceId);
services.add(service);
}
for (Service service: services) {
List<UUID> exchangeIds = auditRepository.getExchangeIdsForService(service.getId());
LOG.info("Found " + exchangeIds.size() + " exchangeIds for " + service.getName());
if (startingExchangeId != null) {
UUID startingExchangeUuid = UUID.fromString(startingExchangeId);
if (exchangeIds.contains(startingExchangeUuid)) {
//if in the list, remove everything up to and including the starting exchange
int index = exchangeIds.indexOf(startingExchangeUuid);
LOG.info("Found starting exchange " + startingExchangeId + " at " + index + " so removing up to this point");
for (int i=index; i>=0; i--) {
exchangeIds.remove(i);
}
startingExchangeId = null;
} else {
//if not in the list, skip all these exchanges
LOG.info("List doesn't contain starting exchange " + startingExchangeId + " so skipping");
continue;
}
}
QueueHelper.postToExchange(exchangeIds, "edsProtocol", null, true);
}
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Populating Protocol Queue for " + serviceIdStr);
}*/
/*private static void findDeletedOrgs() {
LOG.info("Starting finding deleted orgs");
ServiceDalI serviceRepository = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
List<Service> services = new ArrayList<>();
try {
for (Service service: serviceRepository.getAll()) {
services.add(service);
}
} catch (Exception ex) {
LOG.error("", ex);
}
services.sort((o1, o2) -> {
String name1 = o1.getName();
String name2 = o2.getName();
return name1.compareToIgnoreCase(name2);
});
for (Service service: services) {
try {
UUID serviceUuid = service.getId();
List<Exchange> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 1, new Date(0), new Date());
LOG.info("Service: " + service.getName() + " " + service.getLocalId());
if (exchangeByServices.isEmpty()) {
LOG.info(" no exchange found!");
continue;
}
Exchange exchangeByService = exchangeByServices.get(0);
UUID exchangeId = exchangeByService.getId();
Exchange exchange = auditRepository.getExchange(exchangeId);
Map<String, String> headers = exchange.getHeaders();
String systemUuidStr = headers.get(HeaderKeys.SenderSystemUuid);
UUID systemUuid = UUID.fromString(systemUuidStr);
int batches = countBatches(exchangeId, serviceUuid, systemUuid);
LOG.info(" Most recent exchange had " + batches + " batches");
if (batches > 1 && batches < 2000) {
continue;
}
//go back until we find the FIRST exchange where it broke
exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 250, new Date(0), new Date());
for (int i=0; i<exchangeByServices.size(); i++) {
exchangeByService = exchangeByServices.get(i);
exchangeId = exchangeByService.getId();
batches = countBatches(exchangeId, serviceUuid, systemUuid);
exchange = auditRepository.getExchange(exchangeId);
Date timestamp = exchange.getTimestamp();
if (batches < 1 || batches > 2000) {
LOG.info(" " + timestamp + " had " + batches);
}
if (batches > 1 && batches < 2000) {
LOG.info(" " + timestamp + " had " + batches);
break;
}
}
} catch (Exception ex) {
LOG.error("", ex);
}
}
LOG.info("Finished finding deleted orgs");
}*/
private static int countBatches(UUID exchangeId, UUID serviceId, UUID systemId) throws Exception {
int batches = 0;
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId);
for (ExchangeTransformAudit audit : audits) {
if (audit.getNumberBatchesCreated() != null) {
batches += audit.getNumberBatchesCreated();
}
}
return batches;
}
/*private static void fixExchanges(UUID justThisService) {
LOG.info("Fixing exchanges");
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId : exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
boolean changed = false;
String body = exchange.getBody();
String[] files = body.split("\n");
if (files.length == 0) {
continue;
}
for (int i=0; i<files.length; i++) {
String original = files[i];
//remove /r characters
String trimmed = original.trim();
//add the new prefix
if (!trimmed.startsWith("sftpreader/EMIS001/")) {
trimmed = "sftpreader/EMIS001/" + trimmed;
}
if (!original.equals(trimmed)) {
files[i] = trimmed;
changed = true;
}
}
if (changed) {
LOG.info("Fixed exchange " + exchangeId);
LOG.info(body);
body = String.join("\n", files);
exchange.setBody(body);
AuditWriter.writeExchange(exchange);
}
}
}
LOG.info("Fixed exchanges");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void deleteDataForService(UUID serviceId) {
Service dbService = new ServiceRepository().getById(serviceId);
//the delete will take some time, so do the delete in a separate thread
LOG.info("Deleting all data for service " + dbService.getName() + " " + dbService.getId());
FhirDeletionService deletor = new FhirDeletionService(dbService);
try {
deletor.deleteData();
LOG.info("Completed deleting all data for service " + dbService.getName() + " " + dbService.getId());
} catch (Exception ex) {
LOG.error("Error deleting service " + dbService.getName() + " " + dbService.getId(), ex);
}
}*/
/*private static void testLogging() {
while (true) {
System.out.println("Checking logging at " + System.currentTimeMillis());
try {
Thread.sleep(4000);
} catch (Exception e) {
e.printStackTrace();
}
LOG.trace("trace logging");
LOG.debug("debug logging");
LOG.info("info logging");
LOG.warn("warn logging");
LOG.error("error logging");
}
}
*/
/*private static void fixExchangeProtocols() {
LOG.info("Fixing exchange protocols");
AuditRepository auditRepository = new AuditRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.Exchange LIMIT 1000;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
LOG.info("Processing exchange " + exchangeId);
Exchange exchange = auditRepository.getExchange(exchangeId);
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception ex) {
LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex);
continue;
}
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
if (Strings.isNullOrEmpty(serviceIdStr)) {
LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId());
continue;
}
UUID serviceId = UUID.fromString(serviceIdStr);
List<String> newIds = new ArrayList<>();
String protocolJson = headers.get(HeaderKeys.Protocols);
if (!headers.containsKey(HeaderKeys.Protocols)) {
try {
List<LibraryItem> libraryItemList = LibraryRepositoryHelper.getProtocolsByServiceId(serviceIdStr);
// Get protocols where service is publisher
newIds = libraryItemList.stream()
.filter(
libraryItem -> libraryItem.getProtocol().getServiceContract().stream()
.anyMatch(sc ->
sc.getType().equals(ServiceContractType.PUBLISHER)
&& sc.getService().getUuid().equals(serviceIdStr)))
.map(t -> t.getUuid().toString())
.collect(Collectors.toList());
} catch (Exception e) {
LOG.error("Failed to find protocols for exchange " + exchange.getExchangeId(), e);
continue;
}
} else {
try {
JsonNode node = ObjectMapperPool.getInstance().readTree(protocolJson);
for (int i = 0; i < node.size(); i++) {
JsonNode libraryItemNode = node.get(i);
JsonNode idNode = libraryItemNode.get("uuid");
String id = idNode.asText();
newIds.add(id);
}
} catch (Exception e) {
LOG.error("Failed to read Json from " + protocolJson + " for exchange " + exchange.getExchangeId(), e);
continue;
}
}
try {
if (newIds.isEmpty()) {
headers.remove(HeaderKeys.Protocols);
} else {
String protocolsJson = ObjectMapperPool.getInstance().writeValueAsString(newIds.toArray());
headers.put(HeaderKeys.Protocols, protocolsJson);
}
} catch (JsonProcessingException e) {
LOG.error("Unable to serialize protocols to JSON for exchange " + exchange.getExchangeId(), e);
continue;
}
try {
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(headerJson);
} catch (JsonProcessingException e) {
LOG.error("Failed to write exchange headers to Json for exchange " + exchange.getExchangeId(), e);
continue;
}
auditRepository.save(exchange);
}
LOG.info("Finished fixing exchange protocols");
}*/
/*private static void fixExchangeHeaders() {
LOG.info("Fixing exchange headers");
AuditRepository auditRepository = new AuditRepository();
ServiceRepository serviceRepository = new ServiceRepository();
OrganisationRepository organisationRepository = new OrganisationRepository();
List<Exchange> exchanges = new AuditRepository().getAllExchanges();
for (Exchange exchange: exchanges) {
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception ex) {
LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex);
continue;
}
if (headers.containsKey(HeaderKeys.SenderLocalIdentifier)
&& headers.containsKey(HeaderKeys.SenderOrganisationUuid)) {
continue;
}
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
if (Strings.isNullOrEmpty(serviceIdStr)) {
LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId());
continue;
}
UUID serviceId = UUID.fromString(serviceIdStr);
Service service = serviceRepository.getById(serviceId);
Map<UUID, String> orgMap = service.getOrganisations();
if (orgMap.size() != 1) {
LOG.error("Wrong number of orgs in service " + serviceId + " for exchange " + exchange.getExchangeId());
continue;
}
UUID orgId = orgMap
.keySet()
.stream()
.collect(StreamExtension.firstOrNullCollector());
Organisation organisation = organisationRepository.getById(orgId);
String odsCode = organisation.getNationalId();
headers.put(HeaderKeys.SenderLocalIdentifier, odsCode);
headers.put(HeaderKeys.SenderOrganisationUuid, orgId.toString());
try {
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
} catch (JsonProcessingException e) {
//not throwing this exception further up, since it should never happen
//and means we don't need to litter try/catches everywhere this is called from
LOG.error("Failed to write exchange headers to Json", e);
continue;
}
exchange.setHeaders(headerJson);
auditRepository.save(exchange);
LOG.info("Creating exchange " + exchange.getExchangeId());
}
LOG.info("Finished fixing exchange headers");
}*/
/*private static void fixExchangeHeaders() {
LOG.info("Fixing exchange headers");
AuditRepository auditRepository = new AuditRepository();
ServiceRepository serviceRepository = new ServiceRepository();
OrganisationRepository organisationRepository = new OrganisationRepository();
LibraryRepository libraryRepository = new LibraryRepository();
List<Exchange> exchanges = new AuditRepository().getAllExchanges();
for (Exchange exchange: exchanges) {
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception ex) {
LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex);
continue;
}
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
if (Strings.isNullOrEmpty(serviceIdStr)) {
LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId());
continue;
}
boolean changed = false;
UUID serviceId = UUID.fromString(serviceIdStr);
Service service = serviceRepository.getById(serviceId);
try {
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint : endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString();
ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId);
Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId());
LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent());
System system = libraryItem.getSystem();
for (TechnicalInterface technicalInterface : system.getTechnicalInterface()) {
if (endpointInterfaceId.equals(technicalInterface.getUuid())) {
if (!headers.containsKey(HeaderKeys.SourceSystem)) {
headers.put(HeaderKeys.SourceSystem, technicalInterface.getMessageFormat());
changed = true;
}
if (!headers.containsKey(HeaderKeys.SystemVersion)) {
headers.put(HeaderKeys.SystemVersion, technicalInterface.getMessageFormatVersion());
changed = true;
}
if (!headers.containsKey(HeaderKeys.SenderSystemUuid)) {
headers.put(HeaderKeys.SenderSystemUuid, endpointSystemId.toString());
changed = true;
}
}
}
}
} catch (Exception e) {
LOG.error("Failed to find endpoint details for " + exchange.getExchangeId());
continue;
}
if (changed) {
try {
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
} catch (JsonProcessingException e) {
//not throwing this exception further up, since it should never happen
//and means we don't need to litter try/catches everywhere this is called from
LOG.error("Failed to write exchange headers to Json", e);
continue;
}
exchange.setHeaders(headerJson);
auditRepository.save(exchange);
LOG.info("Fixed exchange " + exchange.getExchangeId());
}
}
LOG.info("Finished fixing exchange headers");
}*/
/*private static void testConnection(String configName) {
try {
JsonNode config = ConfigManager.getConfigurationAsJson(configName, "enterprise");
String driverClass = config.get("driverClass").asText();
String url = config.get("url").asText();
String username = config.get("username").asText();
String password = config.get("password").asText();
//force the driver to be loaded
Class.forName(driverClass);
Connection conn = DriverManager.getConnection(url, username, password);
conn.setAutoCommit(false);
LOG.info("Connection ok");
conn.close();
} catch (Exception e) {
LOG.error("", e);
}
}*/
/*private static void testConnection() {
try {
JsonNode config = ConfigManager.getConfigurationAsJson("postgres", "enterprise");
String url = config.get("url").asText();
String username = config.get("username").asText();
String password = config.get("password").asText();
//force the driver to be loaded
Class.forName("org.postgresql.Driver");
Connection conn = DriverManager.getConnection(url, username, password);
conn.setAutoCommit(false);
LOG.info("Connection ok");
conn.close();
} catch (Exception e) {
LOG.error("", e);
}
}*/
*//*if (exchangeId.equals(UUID.fromString("b9b93be0-afd8-11e6-8c16-c1d5a00342f3"))) {
}*//*
/*private static void startEnterpriseStream(UUID serviceId, String configName, UUID exchangeIdStartFrom, UUID batchIdStartFrom) throws Exception {
LOG.info("Starting Enterprise Streaming for " + serviceId + " using " + configName + " starting from exchange " + exchangeIdStartFrom + " and batch " + batchIdStartFrom);
LOG.info("Testing database connection");
testConnection(configName);
Service service = new ServiceRepository().getById(serviceId);
List<UUID> orgIds = new ArrayList<>(service.getOrganisations().keySet());
UUID orgId = orgIds.get(0);
List<ExchangeByService> exchangeByServiceList = new AuditRepository().getExchangesByService(serviceId, Integer.MAX_VALUE);
for (int i=exchangeByServiceList.size()-1; i>=0; i--) {
ExchangeByService exchangeByService = exchangeByServiceList.get(i);
//for (ExchangeByService exchangeByService: exchangeByServiceList) {
UUID exchangeId = exchangeByService.getExchangeId();
if (exchangeIdStartFrom != null) {
if (!exchangeIdStartFrom.equals(exchangeId)) {
continue;
} else {
//once we have a match, set to null so we don't skip any subsequent ones
exchangeIdStartFrom = null;
}
}
Exchange exchange = AuditWriter.readExchange(exchangeId);
String senderOrgUuidStr = exchange.getHeader(HeaderKeys.SenderOrganisationUuid);
UUID senderOrgUuid = UUID.fromString(senderOrgUuidStr);
//this one had 90,000 batches and doesn't need doing again
LOG.info("Skipping exchange " + exchangeId);
continue;
List<ExchangeBatch> exchangeBatches = new ExchangeBatchRepository().retrieveForExchangeId(exchangeId);
LOG.info("Processing exchange " + exchangeId + " with " + exchangeBatches.size() + " batches");
for (int j=0; j<exchangeBatches.size(); j++) {
ExchangeBatch exchangeBatch = exchangeBatches.get(j);
UUID batchId = exchangeBatch.getBatchId();
if (batchIdStartFrom != null) {
if (!batchIdStartFrom.equals(batchId)) {
continue;
} else {
batchIdStartFrom = null;
}
}
LOG.info("Processing exchange " + exchangeId + " and batch " + batchId + " " + (j+1) + "/" + exchangeBatches.size());
try {
String outbound = FhirToEnterpriseCsvTransformer.transformFromFhir(senderOrgUuid, batchId, null);
if (!Strings.isNullOrEmpty(outbound)) {
EnterpriseFiler.file(outbound, configName);
}
} catch (Exception ex) {
throw new PipelineException("Failed to process exchange " + exchangeId + " and batch " + batchId, ex);
}
}
}
}*/
/*private static void fixMissingExchanges() {
LOG.info("Fixing missing exchanges");
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id, batch_id, inserted_at FROM ehr.exchange_batch LIMIT 600000;");
stmt.setFetchSize(100);
Set<UUID> exchangeIdsDone = new HashSet<>();
AuditRepository auditRepository = new AuditRepository();
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
UUID batchId = row.get(1, UUID.class);
Date date = row.getTimestamp(2);
//LOG.info("Exchange " + exchangeId + " batch " + batchId + " date " + date);
if (exchangeIdsDone.contains(exchangeId)) {
continue;
}
if (auditRepository.getExchange(exchangeId) != null) {
continue;
}
UUID serviceId = findServiceId(batchId, session);
if (serviceId == null) {
continue;
}
Exchange exchange = new Exchange();
ExchangeByService exchangeByService = new ExchangeByService();
ExchangeEvent exchangeEvent = new ExchangeEvent();
Map<String, String> headers = new HashMap<>();
headers.put(HeaderKeys.SenderServiceUuid, serviceId.toString());
String headersJson = null;
try {
headersJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
} catch (JsonProcessingException e) {
//not throwing this exception further up, since it should never happen
//and means we don't need to litter try/catches everywhere this is called from
LOG.error("Failed to write exchange headers to Json", e);
continue;
}
exchange.setBody("Body not available, as exchange re-created");
exchange.setExchangeId(exchangeId);
exchange.setHeaders(headersJson);
exchange.setTimestamp(date);
exchangeByService.setExchangeId(exchangeId);
exchangeByService.setServiceId(serviceId);
exchangeByService.setTimestamp(date);
exchangeEvent.setEventDesc("Created_By_Conversion");
exchangeEvent.setExchangeId(exchangeId);
exchangeEvent.setTimestamp(new Date());
auditRepository.save(exchange);
auditRepository.save(exchangeEvent);
auditRepository.save(exchangeByService);
exchangeIdsDone.add(exchangeId);
LOG.info("Creating exchange " + exchangeId);
}
LOG.info("Finished exchange fix");
}
private static UUID findServiceId(UUID batchId, Session session) {
Statement stmt = new SimpleStatement("select resource_type, resource_id from ehr.resource_by_exchange_batch where batch_id = " + batchId + " LIMIT 1;");
ResultSet rs = session.execute(stmt);
if (rs.isExhausted()) {
LOG.error("Failed to find resource_by_exchange_batch for batch_id " + batchId);
return null;
}
Row row = rs.one();
String resourceType = row.getString(0);
UUID resourceId = row.get(1, UUID.class);
stmt = new SimpleStatement("select service_id from ehr.resource_history where resource_type = '" + resourceType + "' and resource_id = " + resourceId + " LIMIT 1;");
rs = session.execute(stmt);
if (rs.isExhausted()) {
LOG.error("Failed to find resource_history for resource_type " + resourceType + " and resource_id " + resourceId);
return null;
}
row = rs.one();
UUID serviceId = row.get(0, UUID.class);
return serviceId;
}*/
/*private static void fixExchangeEvents() {
List<ExchangeEvent> events = new AuditRepository().getAllExchangeEvents();
for (ExchangeEvent event: events) {
if (event.getEventDesc() != null) {
continue;
}
String eventDesc = "";
int eventType = event.getEvent().intValue();
switch (eventType) {
case 1:
eventDesc = "Receive";
break;
case 2:
eventDesc = "Validate";
break;
case 3:
eventDesc = "Transform_Start";
break;
case 4:
eventDesc = "Transform_End";
break;
case 5:
eventDesc = "Send";
break;
default:
eventDesc = "??? " + eventType;
}
event.setEventDesc(eventDesc);
new AuditRepository().save(null, event);
}
}*/
*//*String serviceId = headers.get(HeaderKeys.SenderServiceUuid);
}*//*
/*private static void fixExchanges() {
AuditRepository auditRepository = new AuditRepository();
Map<UUID, Set<UUID>> existingOnes = new HashMap();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
List<Exchange> exchanges = auditRepository.getAllExchanges();
for (Exchange exchange: exchanges) {
UUID exchangeUuid = exchange.getExchangeId();
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception e) {
LOG.error("Failed to read headers for exchange " + exchangeUuid + " and Json " + headerJson);
continue;
}
if (serviceId == null) {
LOG.warn("No service ID found for exchange " + exchange.getExchangeId());
continue;
}
UUID serviceUuid = UUID.fromString(serviceId);
Set<UUID> exchangeIdsDone = existingOnes.get(serviceUuid);
if (exchangeIdsDone == null) {
exchangeIdsDone = new HashSet<>();
List<ExchangeByService> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, Integer.MAX_VALUE);
for (ExchangeByService exchangeByService: exchangeByServices) {
exchangeIdsDone.add(exchangeByService.getExchangeId());
}
existingOnes.put(serviceUuid, exchangeIdsDone);
}
//create the exchange by service entity
if (!exchangeIdsDone.contains(exchangeUuid)) {
Date timestamp = exchange.getTimestamp();
ExchangeByService newOne = new ExchangeByService();
newOne.setExchangeId(exchangeUuid);
newOne.setServiceId(serviceUuid);
newOne.setTimestamp(timestamp);
auditRepository.save(newOne);
try {
headers.remove(HeaderKeys.BatchIdsJson);
String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(newHeaderJson);
auditRepository.save(exchange);
} catch (JsonProcessingException e) {
LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e);
}
if (!headers.containsKey(HeaderKeys.BatchIdsJson)) {
//fix the batch IDs not being in the exchange
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeUuid);
if (!batches.isEmpty()) {
List<UUID> batchUuids = batches
.stream()
.map(t -> t.getBatchId())
.collect(Collectors.toList());
try {
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchUuids.toArray());
headers.put(HeaderKeys.BatchIdsJson, batchUuidsStr);
String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(newHeaderJson);
auditRepository.save(exchange, null);
} catch (JsonProcessingException e) {
LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e);
}
}
//}
}
}*/
/*private static UUID findSystemId(Service service, String software, String messageVersion) throws PipelineException {
List<JsonServiceInterfaceEndpoint> endpoints = null;
try {
endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString();
LibraryRepository libraryRepository = new LibraryRepository();
ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId);
Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId());
LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent());
System system = libraryItem.getSystem();
for (TechnicalInterface technicalInterface: system.getTechnicalInterface()) {
if (endpointInterfaceId.equals(technicalInterface.getUuid())
&& technicalInterface.getMessageFormat().equalsIgnoreCase(software)
&& technicalInterface.getMessageFormatVersion().equalsIgnoreCase(messageVersion)) {
return endpointSystemId;
}
}
}
} catch (Exception e) {
throw new PipelineException("Failed to process endpoints from service " + service.getId());
}
return null;
}
*/
/*private static void addSystemIdToExchangeHeaders() throws Exception {
LOG.info("populateExchangeBatchPatients");
AuditRepository auditRepository = new AuditRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ResourceRepository resourceRepository = new ResourceRepository();
ServiceRepository serviceRepository = new ServiceRepository();
//OrganisationRepository organisationRepository = new OrganisationRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId);
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception e) {
LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson);
continue;
}
if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid))) {
LOG.info("Skipping exchange " + exchangeId + " as no service UUID");
continue;
}
if (!Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) {
LOG.info("Skipping exchange " + exchangeId + " as already got system UUID");
continue;
}
try {
//work out service ID
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
UUID serviceId = UUID.fromString(serviceIdStr);
String software = headers.get(HeaderKeys.SourceSystem);
String version = headers.get(HeaderKeys.SystemVersion);
Service service = serviceRepository.getById(serviceId);
UUID systemUuid = findSystemId(service, software, version);
headers.put(HeaderKeys.SenderSystemUuid, systemUuid.toString());
//work out protocol IDs
try {
String newProtocolIdsJson = DetermineRelevantProtocolIds.getProtocolIdsForPublisherService(serviceIdStr);
headers.put(HeaderKeys.ProtocolIds, newProtocolIdsJson);
} catch (Exception ex) {
LOG.error("Failed to recalculate protocols for " + exchangeId + ": " + ex.getMessage());
}
//save to DB
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(headerJson);
auditRepository.save(exchange);
} catch (Exception ex) {
LOG.error("Error with exchange " + exchangeId, ex);
}
}
LOG.info("Finished populateExchangeBatchPatients");
}*/
/*private static void populateExchangeBatchPatients() throws Exception {
LOG.info("populateExchangeBatchPatients");
AuditRepository auditRepository = new AuditRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ResourceRepository resourceRepository = new ResourceRepository();
//ServiceRepository serviceRepository = new ServiceRepository();
//OrganisationRepository organisationRepository = new OrganisationRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId);
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception e) {
LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson);
continue;
}
if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid))
|| Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) {
LOG.info("Skipping exchange " + exchangeId + " because no service or system in header");
continue;
}
try {
UUID serviceId = UUID.fromString(headers.get(HeaderKeys.SenderServiceUuid));
UUID systemId = UUID.fromString(headers.get(HeaderKeys.SenderSystemUuid));
List<ExchangeBatch> exchangeBatches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch exchangeBatch : exchangeBatches) {
if (exchangeBatch.getEdsPatientId() != null) {
continue;
}
UUID batchId = exchangeBatch.getBatchId();
List<ResourceByExchangeBatch> resourceWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Patient.toString());
if (resourceWrappers.isEmpty()) {
continue;
}
List<UUID> patientIds = new ArrayList<>();
for (ResourceByExchangeBatch resourceWrapper : resourceWrappers) {
UUID patientId = resourceWrapper.getResourceId();
if (resourceWrapper.getIsDeleted()) {
deleteEntirePatientRecord(patientId, serviceId, systemId, exchangeId, batchId);
}
if (!patientIds.contains(patientId)) {
patientIds.add(patientId);
}
}
if (patientIds.size() != 1) {
LOG.info("Skipping exchange " + exchangeId + " and batch " + batchId + " because found " + patientIds.size() + " patient IDs");
continue;
}
UUID patientId = patientIds.get(0);
exchangeBatch.setEdsPatientId(patientId);
exchangeBatchRepository.save(exchangeBatch);
}
} catch (Exception ex) {
LOG.error("Error with exchange " + exchangeId, ex);
}
}
LOG.info("Finished populateExchangeBatchPatients");
}
private static void deleteEntirePatientRecord(UUID patientId, UUID serviceId, UUID systemId, UUID exchangeId, UUID batchId) throws Exception {
FhirStorageService storageService = new FhirStorageService(serviceId, systemId);
ResourceRepository resourceRepository = new ResourceRepository();
List<ResourceByPatient> resourceWrappers = resourceRepository.getResourcesByPatient(serviceId, systemId, patientId);
for (ResourceByPatient resourceWrapper: resourceWrappers) {
String json = resourceWrapper.getResourceData();
Resource resource = new JsonParser().parse(json);
storageService.exchangeBatchDelete(exchangeId, batchId, resource);
}
}*/
/*private static void convertPatientSearch() {
LOG.info("Converting Patient Search");
ResourceRepository resourceRepository = new ResourceRepository();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
LOG.info("Doing service " + service.getName());
for (UUID systemId : findSystemIds(service)) {
List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.EpisodeOfCare.toString());
for (ResourceByService resourceWrapper: resourceWrappers) {
if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) {
continue;
}
try {
EpisodeOfCare episodeOfCare = (EpisodeOfCare) new JsonParser().parse(resourceWrapper.getResourceData());
String patientId = ReferenceHelper.getReferenceId(episodeOfCare.getPatient());
ResourceHistory patientWrapper = resourceRepository.getCurrentVersion(ResourceType.Patient.toString(), UUID.fromString(patientId));
if (Strings.isNullOrEmpty(patientWrapper.getResourceData())) {
continue;
}
Patient patient = (Patient) new JsonParser().parse(patientWrapper.getResourceData());
PatientSearchHelper.update(serviceId, systemId, patient);
PatientSearchHelper.update(serviceId, systemId, episodeOfCare);
} catch (Exception ex) {
LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex);
}
}
}
}
LOG.info("Converted Patient Search");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
private static List<UUID> findSystemIds(Service service) throws Exception {
List<UUID> ret = new ArrayList<>();
List<JsonServiceInterfaceEndpoint> endpoints = null;
try {
endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {
});
for (JsonServiceInterfaceEndpoint endpoint : endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
ret.add(endpointSystemId);
}
} catch (Exception e) {
throw new Exception("Failed to process endpoints from service " + service.getId());
}
return ret;
}
/*private static void convertPatientLink() {
LOG.info("Converting Patient Link");
ResourceRepository resourceRepository = new ResourceRepository();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
LOG.info("Doing service " + service.getName());
for (UUID systemId : findSystemIds(service)) {
List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.Patient.toString());
for (ResourceByService resourceWrapper: resourceWrappers) {
if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) {
continue;
}
try {
Patient patient = (Patient)new JsonParser().parse(resourceWrapper.getResourceData());
PatientLinkHelper.updatePersonId(patient);
} catch (Exception ex) {
LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex);
}
}
}
}
LOG.info("Converted Patient Link");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void fixConfidentialPatients(String sharedStoragePath, UUID justThisService) {
LOG.info("Fixing Confidential Patients using path " + sharedStoragePath + " and service " + justThisService);
ResourceRepository resourceRepository = new ResourceRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ParserPool parserPool = new ParserPool();
MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager();
Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class);
Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class);
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
Map<String, ResourceHistory> resourcesFixed = new HashMap<>();
Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>();
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
if (systemIds.size() > 1) {
throw new Exception("Multiple system IDs for service " + serviceId);
}
UUID systemId = systemIds.get(0);
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
LOG.info("Doing Emis CSV exchange " + exchangeId);
Set<UUID> batchIdsToPutInProtocolQueue = new HashSet<>();
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f);
EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId);
ResourceFiler filer = new ResourceFiler(exchangeId, serviceId, systemId, null, null, 1);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers);
ProblemPreTransformer.transform(version, parsers, filer, helper);
ObservationPreTransformer.transform(version, parsers, filer, helper);
DrugRecordPreTransformer.transform(version, parsers, filer, helper);
IssueRecordPreTransformer.transform(version, parsers, filer, helper);
DiaryPreTransformer.transform(version, parsers, filer, helper);
org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient)parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class);
while (patientParser.nextRecord()) {
if (patientParser.getIsConfidential()
&& !patientParser.getDeleted()) {
PatientTransformer.createResource(patientParser, filer, helper, version);
}
}
patientParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class);
while (consultationParser.nextRecord()) {
if (consultationParser.getIsConfidential()
&& !consultationParser.getDeleted()) {
ConsultationTransformer.createResource(consultationParser, filer, helper, version);
}
}
consultationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (observationParser.nextRecord()) {
if (observationParser.getIsConfidential()
&& !observationParser.getDeleted()) {
ObservationTransformer.createResource(observationParser, filer, helper, version);
}
}
observationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class);
while (diaryParser.nextRecord()) {
if (diaryParser.getIsConfidential()
&& !diaryParser.getDeleted()) {
DiaryTransformer.createResource(diaryParser, filer, helper, version);
}
}
diaryParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class);
while (drugRecordParser.nextRecord()) {
if (drugRecordParser.getIsConfidential()
&& !drugRecordParser.getDeleted()) {
DrugRecordTransformer.createResource(drugRecordParser, filer, helper, version);
}
}
drugRecordParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class);
while (issueRecordParser.nextRecord()) {
if (issueRecordParser.getIsConfidential()
&& !issueRecordParser.getDeleted()) {
IssueRecordTransformer.createResource(issueRecordParser, filer, helper, version);
}
}
issueRecordParser.close();
filer.waitToFinish(); //just to close the thread pool, even though it's not been used
List<Resource> resources = filer.getNewResources();
for (Resource resource: resources) {
String patientId = IdHelper.getPatientId(resource);
UUID edsPatientId = UUID.fromString(patientId);
ResourceType resourceType = resource.getResourceType();
UUID resourceId = UUID.fromString(resource.getId());
boolean foundResourceInDbBatch = false;
List<UUID> batchIds = batchesPerPatient.get(edsPatientId);
if (batchIds != null) {
for (UUID batchId : batchIds) {
List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), resourceId);
if (resourceByExchangeBatches.isEmpty()) {
//if we've deleted data, this will be null
continue;
}
foundResourceInDbBatch = true;
for (ResourceByExchangeBatch resourceByExchangeBatch : resourceByExchangeBatches) {
String json = resourceByExchangeBatch.getResourceData();
if (!Strings.isNullOrEmpty(json)) {
LOG.warn("JSON already in resource " + resourceType + " " + resourceId);
} else {
json = parserPool.composeString(resource);
resourceByExchangeBatch.setResourceData(json);
resourceByExchangeBatch.setIsDeleted(false);
resourceByExchangeBatch.setSchemaVersion("0.1");
LOG.info("Saved resource by batch " + resourceType + " " + resourceId + " in batch " + batchId);
UUID versionUuid = resourceByExchangeBatch.getVersion();
ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(resourceId, resourceType.toString(), versionUuid);
if (resourceHistory == null) {
throw new Exception("Failed to find resource history for " + resourceType + " " + resourceId + " and version " + versionUuid);
}
resourceHistory.setIsDeleted(false);
resourceHistory.setResourceData(json);
resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json));
resourceHistory.setSchemaVersion("0.1");
resourceRepository.save(resourceByExchangeBatch);
resourceRepository.save(resourceHistory);
batchIdsToPutInProtocolQueue.add(batchId);
String key = resourceType.toString() + ":" + resourceId;
resourcesFixed.put(key, resourceHistory);
}
//if a patient became confidential, we will have deleted all resources for that
//patient, so we need to undo that too
//to undelete WHOLE patient record
//1. if THIS resource is a patient
//2. get all other deletes from the same exchange batch
//3. delete those from resource_by_exchange_batch (the deleted ones only)
//4. delete same ones from resource_history
//5. retrieve most recent resource_history
//6. if not deleted, add to resources fixed
if (resourceType == ResourceType.Patient) {
List<ResourceByExchangeBatch> resourcesInSameBatch = resourceRepository.getResourcesForBatch(batchId);
LOG.info("Undeleting " + resourcesInSameBatch.size() + " resources for batch " + batchId);
for (ResourceByExchangeBatch resourceInSameBatch: resourcesInSameBatch) {
if (!resourceInSameBatch.getIsDeleted()) {
continue;
}
//patient and episode resources will be restored by the above stuff, so don't try
//to do it again
if (resourceInSameBatch.getResourceType().equals(ResourceType.Patient.toString())
|| resourceInSameBatch.getResourceType().equals(ResourceType.EpisodeOfCare.toString())) {
continue;
}
ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(resourceInSameBatch.getResourceId(), resourceInSameBatch.getResourceType(), resourceInSameBatch.getVersion());
mapperResourceByExchangeBatch.delete(resourceInSameBatch);
mapperResourceHistory.delete(deletedResourceHistory);
batchIdsToPutInProtocolQueue.add(batchId);
//check the most recent version of our resource, and if it's not deleted, add to the list to update the resource_by_service table
ResourceHistory mostRecentDeletedResourceHistory = resourceRepository.getCurrentVersion(resourceInSameBatch.getResourceType(), resourceInSameBatch.getResourceId());
if (mostRecentDeletedResourceHistory != null
&& !mostRecentDeletedResourceHistory.getIsDeleted()) {
String key2 = mostRecentDeletedResourceHistory.getResourceType().toString() + ":" + mostRecentDeletedResourceHistory.getResourceId();
resourcesFixed.put(key2, mostRecentDeletedResourceHistory);
}
}
}
}
}
}
//if we didn't find records in the DB to update, then
if (!foundResourceInDbBatch) {
//we can't generate a back-dated time UUID, but we need one so the resource_history
//table is in order. To get a suitable time UUID, we just pull out the first exchange batch for our exchange,
//and the batch ID is actually a time UUID that was allocated around the right time
ExchangeBatch firstBatch = exchangeBatchRepository.retrieveFirstForExchangeId(exchangeId);
//if there was no batch for the exchange, then the exchange wasn't processed at all. So skip this exchange
//and we'll pick up the same patient data in a following exchange
if (firstBatch == null) {
continue;
}
UUID versionUuid = firstBatch.getBatchId();
//find suitable batch ID
UUID batchId = null;
if (batchIds != null
&& batchIds.size() > 0) {
batchId = batchIds.get(batchIds.size()-1);
} else {
//create new batch ID if not found
ExchangeBatch exchangeBatch = new ExchangeBatch();
exchangeBatch.setBatchId(UUIDs.timeBased());
exchangeBatch.setExchangeId(exchangeId);
exchangeBatch.setInsertedAt(new Date());
exchangeBatch.setEdsPatientId(edsPatientId);
exchangeBatchRepository.save(exchangeBatch);
batchId = exchangeBatch.getBatchId();
//add to map for next resource
if (batchIds == null) {
batchIds = new ArrayList<>();
}
batchIds.add(batchId);
batchesPerPatient.put(edsPatientId, batchIds);
}
String json = parserPool.composeString(resource);
ResourceHistory resourceHistory = new ResourceHistory();
resourceHistory.setResourceId(resourceId);
resourceHistory.setResourceType(resourceType.toString());
resourceHistory.setVersion(versionUuid);
resourceHistory.setCreatedAt(new Date());
resourceHistory.setServiceId(serviceId);
resourceHistory.setSystemId(systemId);
resourceHistory.setIsDeleted(false);
resourceHistory.setSchemaVersion("0.1");
resourceHistory.setResourceData(json);
resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json));
ResourceByExchangeBatch resourceByExchangeBatch = new ResourceByExchangeBatch();
resourceByExchangeBatch.setBatchId(batchId);
resourceByExchangeBatch.setExchangeId(exchangeId);
resourceByExchangeBatch.setResourceType(resourceType.toString());
resourceByExchangeBatch.setResourceId(resourceId);
resourceByExchangeBatch.setVersion(versionUuid);
resourceByExchangeBatch.setIsDeleted(false);
resourceByExchangeBatch.setSchemaVersion("0.1");
resourceByExchangeBatch.setResourceData(json);
resourceRepository.save(resourceHistory);
resourceRepository.save(resourceByExchangeBatch);
batchIdsToPutInProtocolQueue.add(batchId);
}
}
if (!batchIdsToPutInProtocolQueue.isEmpty()) {
exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchIdsToPutInProtocolQueue);
}
}
//update the resource_by_service table (and the resource_by_patient view)
for (ResourceHistory resourceHistory: resourcesFixed.values()) {
UUID latestVersionUpdatedUuid = resourceHistory.getVersion();
ResourceHistory latestVersion = resourceRepository.getCurrentVersion(resourceHistory.getResourceType(), resourceHistory.getResourceId());
UUID latestVersionUuid = latestVersion.getVersion();
//if there have been subsequent updates to the resource, then skip it
if (!latestVersionUuid.equals(latestVersionUpdatedUuid)) {
continue;
}
Resource resource = parserPool.parse(resourceHistory.getResourceData());
ResourceMetadata metadata = MetadataFactory.createMetadata(resource);
UUID patientId = ((PatientCompartment)metadata).getPatientId();
ResourceByService resourceByService = new ResourceByService();
resourceByService.setServiceId(resourceHistory.getServiceId());
resourceByService.setSystemId(resourceHistory.getSystemId());
resourceByService.setResourceType(resourceHistory.getResourceType());
resourceByService.setResourceId(resourceHistory.getResourceId());
resourceByService.setCurrentVersion(resourceHistory.getVersion());
resourceByService.setUpdatedAt(resourceHistory.getCreatedAt());
resourceByService.setPatientId(patientId);
resourceByService.setSchemaVersion(resourceHistory.getSchemaVersion());
resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata));
resourceByService.setResourceData(resourceHistory.getResourceData());
resourceRepository.save(resourceByService);
//call out to our patient search and person matching services
if (resource instanceof Patient) {
PatientLinkHelper.updatePersonId((Patient)resource);
PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (Patient)resource);
} else if (resource instanceof EpisodeOfCare) {
PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (EpisodeOfCare)resource);
}
}
if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) {
//find the config for our protocol queue
String configXml = ConfigManager.getConfiguration("inbound", "queuereader");
//the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
Pipeline pipeline = configuration.getPipeline();
PostMessageToExchangeConfig config = pipeline
.getPipelineComponents()
.stream()
.filter(t -> t instanceof PostMessageToExchangeConfig)
.map(t -> (PostMessageToExchangeConfig) t)
.filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol"))
.collect(StreamExtension.singleOrNullCollector());
//post to the protocol exchange
for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) {
Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId);
org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId);
String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds);
exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString);
PostMessageToExchange component = new PostMessageToExchange(config);
component.process(exchange);
}
}
}
LOG.info("Finished Fixing Confidential Patients");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void fixDeletedAppointments(String sharedStoragePath, boolean saveChanges, UUID justThisService) {
LOG.info("Fixing Deleted Appointments using path " + sharedStoragePath + " and service " + justThisService);
ResourceRepository resourceRepository = new ResourceRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ParserPool parserPool = new ParserPool();
MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager();
Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class);
Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class);
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
if (systemIds.size() > 1) {
throw new Exception("Multiple system IDs for service " + serviceId);
}
UUID systemId = systemIds.get(0);
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
LOG.info("Doing Emis CSV exchange " + exchangeId);
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch batch : batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class, dir, version, true, parsers);
//find any deleted patients
List<UUID> deletedPatientUuids = new ArrayList<>();
org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class);
while (patientParser.nextRecord()) {
if (patientParser.getDeleted()) {
//find the EDS patient ID for this local guid
String patientGuid = patientParser.getPatientGuid();
UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid);
if (edsPatientId == null) {
throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid);
}
deletedPatientUuids.add(edsPatientId);
}
}
patientParser.close();
//go through the appts file to find properly deleted appt GUIDS
List<UUID> deletedApptUuids = new ArrayList<>();
org.endeavourhealth.transform.emis.csv.schema.appointment.Slot apptParser = (org.endeavourhealth.transform.emis.csv.schema.appointment.Slot) parsers.get(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class);
while (apptParser.nextRecord()) {
if (apptParser.getDeleted()) {
String patientGuid = apptParser.getPatientGuid();
String slotGuid = apptParser.getSlotGuid();
if (!Strings.isNullOrEmpty(patientGuid)) {
String uniqueLocalId = EmisCsvHelper.createUniqueId(patientGuid, slotGuid);
UUID edsApptId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Appointment, uniqueLocalId);
deletedApptUuids.add(edsApptId);
}
}
}
apptParser.close();
for (UUID edsPatientId : deletedPatientUuids) {
List<UUID> batchIds = batchesPerPatient.get(edsPatientId);
if (batchIds == null) {
//if there are no batches for this patient, we'll be handling this data in another exchange
continue;
}
for (UUID batchId : batchIds) {
List<ResourceByExchangeBatch> apptWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Appointment.toString());
for (ResourceByExchangeBatch apptWrapper : apptWrappers) {
//ignore non-deleted appts
if (!apptWrapper.getIsDeleted()) {
continue;
}
//if the appt was deleted legitamately, then skip it
UUID apptId = apptWrapper.getResourceId();
if (deletedApptUuids.contains(apptId)) {
continue;
}
ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(apptWrapper.getResourceId(), apptWrapper.getResourceType(), apptWrapper.getVersion());
if (saveChanges) {
mapperResourceByExchangeBatch.delete(apptWrapper);
mapperResourceHistory.delete(deletedResourceHistory);
}
LOG.info("Un-deleted " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " in batch " + batchId + " patient " + edsPatientId);
//now get the most recent instance of the appointment, and if it's NOT deleted, insert into the resource_by_service table
ResourceHistory mostRecentResourceHistory = resourceRepository.getCurrentVersion(apptWrapper.getResourceType(), apptWrapper.getResourceId());
if (mostRecentResourceHistory != null
&& !mostRecentResourceHistory.getIsDeleted()) {
Resource resource = parserPool.parse(mostRecentResourceHistory.getResourceData());
ResourceMetadata metadata = MetadataFactory.createMetadata(resource);
UUID patientId = ((PatientCompartment) metadata).getPatientId();
ResourceByService resourceByService = new ResourceByService();
resourceByService.setServiceId(mostRecentResourceHistory.getServiceId());
resourceByService.setSystemId(mostRecentResourceHistory.getSystemId());
resourceByService.setResourceType(mostRecentResourceHistory.getResourceType());
resourceByService.setResourceId(mostRecentResourceHistory.getResourceId());
resourceByService.setCurrentVersion(mostRecentResourceHistory.getVersion());
resourceByService.setUpdatedAt(mostRecentResourceHistory.getCreatedAt());
resourceByService.setPatientId(patientId);
resourceByService.setSchemaVersion(mostRecentResourceHistory.getSchemaVersion());
resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata));
resourceByService.setResourceData(mostRecentResourceHistory.getResourceData());
if (saveChanges) {
resourceRepository.save(resourceByService);
}
LOG.info("Restored " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " to resource_by_service table");
}
}
}
}
}
}
LOG.info("Finished Deleted Appointments Patients");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
private static void fixSlotReferencesForPublisher(String publisher) {
try {
ServiceDalI dal = DalProvider.factoryServiceDal();
List<Service> services = dal.getAll();
for (Service service: services) {
if (service.getPublisherConfigName() != null
&& service.getPublisherConfigName().equals(publisher)) {
fixSlotReferences(service.getId());
}
}
} catch (Exception ex) {
LOG.error("", ex);
}
}
private static void fixSlotReferences(UUID serviceId) {
LOG.info("Fixing Slot References in Appointments for " + serviceId);
try {
//get patient IDs from patient search
List<UUID> patientIds = new ArrayList<>();
EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceId);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
String sql = "SELECT eds_id FROM resource_id_map WHERE service_id = '" + serviceId + "' AND resource_type = '" + ResourceType.Patient + "';";
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
String patientUuid = rs.getString(1);
patientIds.add(UUID.fromString(patientUuid));
}
rs.close();
statement.close();
connection.close();
LOG.debug("Found " + patientIds.size() + " patients");
int done = 0;
int fixed = 0;
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, null, null, null, true, null);
//for each patient
for (UUID patientUuid: patientIds) {
//LOG.debug("Checking patient " + patientUuid);
//get all appointment resources
List<ResourceWrapper> appointmentWrappers = resourceDal.getResourcesByPatient(serviceId, patientUuid, ResourceType.Appointment.toString());
for (ResourceWrapper apptWrapper: appointmentWrappers) {
//LOG.debug("Checking appointment " + apptWrapper.getResourceId());
List<ResourceWrapper> historyWrappers = resourceDal.getResourceHistory(serviceId, apptWrapper.getResourceType(), apptWrapper.getResourceId());
//the above returns most recent first, but we want to do them in order
historyWrappers = Lists.reverse(historyWrappers);
for (ResourceWrapper historyWrapper : historyWrappers) {
if (historyWrapper.isDeleted()) {
//LOG.debug("Appointment " + historyWrapper.getResourceId() + " is deleted");
continue;
}
String json = historyWrapper.getResourceData();
Appointment appt = (Appointment) FhirSerializationHelper.deserializeResource(json);
if (!appt.hasSlot()) {
//LOG.debug("Appointment " + historyWrapper.getResourceId() + " has no slot");
continue;
}
if (appt.getSlot().size() != 1) {
throw new Exception("Appointment " + appt.getId() + " has " + appt.getSlot().size() + " slot refs");
}
Reference slotRef = appt.getSlot().get(0);
//test if slot reference exists
Reference slotLocalRef = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, slotRef);
String slotSourceId = ReferenceHelper.getReferenceId(slotLocalRef);
if (slotSourceId.indexOf(":") > -1) {
//LOG.debug("Appointment " + historyWrapper.getResourceId() + " has a valid slot");
continue;
}
//if not, correct slot reference
Reference apptEdsReference = ReferenceHelper.createReference(appt.getResourceType(), appt.getId());
Reference apptLocalReference = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, apptEdsReference);
String sourceId = ReferenceHelper.getReferenceId(apptLocalReference);
Reference slotLocalReference = ReferenceHelper.createReference(ResourceType.Slot, sourceId);
Reference slotEdsReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(slotLocalReference, csvHelper);
String slotEdsReferenceValue = slotEdsReference.getReference();
String oldSlotRefValue = slotRef.getReference();
slotRef.setReference(slotEdsReferenceValue);
//LOG.debug("Appointment " + historyWrapper.getResourceId() + " slot ref changed from " + oldSlotRefValue + " to " + slotEdsReferenceValue);
//save appointment
json = FhirSerializationHelper.serializeResource(appt);
historyWrapper.setResourceData(json);
saveResourceWrapper(serviceId, historyWrapper);
fixed++;
}
}
done ++;
if (done % 1000 == 0) {
LOG.debug("Done " + done + " / " + patientIds.size() + " and fixed " + fixed + " appts");
}
}
LOG.debug("Done " + done + " / " + patientIds.size() + " and fixed " + fixed + " appts");
LOG.info("Finished Fixing Slot References in Appointments for " + serviceId);
} catch (Exception ex) {
LOG.error("", ex);
}
}
/*private static void fixReviews(String sharedStoragePath, UUID justThisService) {
LOG.info("Fixing Reviews using path " + sharedStoragePath + " and service " + justThisService);
ResourceRepository resourceRepository = new ResourceRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ParserPool parserPool = new ParserPool();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
Map<String, Long> problemCodes = new HashMap<>();
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
LOG.info("Doing Emis CSV exchange " + exchangeId + " with " + batches.size() + " batches");
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers);
org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem problemParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class);
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (problemParser.nextRecord()) {
String patientGuid = problemParser.getPatientGuid();
String observationGuid = problemParser.getObservationGuid();
String key = patientGuid + ":" + observationGuid;
if (!problemCodes.containsKey(key)) {
problemCodes.put(key, null);
}
}
problemParser.close();
while (observationParser.nextRecord()) {
String patientGuid = observationParser.getPatientGuid();
String observationGuid = observationParser.getObservationGuid();
String key = patientGuid + ":" + observationGuid;
if (problemCodes.containsKey(key)) {
Long codeId = observationParser.getCodeId();
if (codeId == null) {
continue;
}
problemCodes.put(key, codeId);
}
}
observationParser.close();
LOG.info("Found " + problemCodes.size() + " problem codes so far");
String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f);
EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId);
while (observationParser.nextRecord()) {
String problemGuid = observationParser.getProblemGuid();
if (!Strings.isNullOrEmpty(problemGuid)) {
String patientGuid = observationParser.getPatientGuid();
Long codeId = observationParser.getCodeId();
if (codeId == null) {
continue;
}
String key = patientGuid + ":" + problemGuid;
Long problemCodeId = problemCodes.get(key);
if (problemCodeId == null
|| problemCodeId.longValue() != codeId.longValue()) {
continue;
}
//if here, our code is the same as the problem, so it's a review
String locallyUniqueId = patientGuid + ":" + observationParser.getObservationGuid();
ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, helper);
for (UUID systemId: systemIds) {
UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid);
if (edsPatientId == null) {
throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid);
}
UUID edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId);
if (edsObservationId == null) {
//try observations as diagnostic reports, because it could be one of those instead
if (resourceType == ResourceType.Observation) {
resourceType = ResourceType.DiagnosticReport;
edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId);
}
if (edsObservationId == null) {
throw new Exception("Failed to find observation ID for service " + serviceId + " system " + systemId + " resourceType " + resourceType + " local ID " + locallyUniqueId);
}
}
List<UUID> batchIds = batchesPerPatient.get(edsPatientId);
if (batchIds == null) {
//if there are no batches for this patient, we'll be handling this data in another exchange
continue;
//throw new Exception("Failed to find batch ID for patient " + edsPatientId + " in exchange " + exchangeId + " for resource " + resourceType + " " + edsObservationId);
}
for (UUID batchId: batchIds) {
List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), edsObservationId);
if (resourceByExchangeBatches.isEmpty()) {
//if we've deleted data, this will be null
continue;
//throw new Exception("No resources found for batch " + batchId + " resource type " + resourceType + " and resource id " + edsObservationId);
}
for (ResourceByExchangeBatch resourceByExchangeBatch: resourceByExchangeBatches) {
String json = resourceByExchangeBatch.getResourceData();
if (Strings.isNullOrEmpty(json)) {
throw new Exception("No JSON in resource " + resourceType + " " + edsObservationId + " in batch " + batchId);
}
Resource resource = parserPool.parse(json);
if (addReviewExtension((DomainResource)resource)) {
json = parserPool.composeString(resource);
resourceByExchangeBatch.setResourceData(json);
LOG.info("Changed " + resourceType + " " + edsObservationId + " to have extension in batch " + batchId);
resourceRepository.save(resourceByExchangeBatch);
UUID versionUuid = resourceByExchangeBatch.getVersion();
ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(edsObservationId, resourceType.toString(), versionUuid);
if (resourceHistory == null) {
throw new Exception("Failed to find resource history for " + resourceType + " " + edsObservationId + " and version " + versionUuid);
}
resourceHistory.setResourceData(json);
resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json));
resourceRepository.save(resourceHistory);
ResourceByService resourceByService = resourceRepository.getResourceByServiceByKey(serviceId, systemId, resourceType.toString(), edsObservationId);
if (resourceByService != null) {
UUID serviceVersionUuid = resourceByService.getCurrentVersion();
if (serviceVersionUuid.equals(versionUuid)) {
resourceByService.setResourceData(json);
resourceRepository.save(resourceByService);
}
}
} else {
LOG.info("" + resourceType + " " + edsObservationId + " already has extension");
}
}
}
}
//1. find out resource type originall saved from
//2. retrieve from resource_by_exchange_batch
//3. update resource in resource_by_exchange_batch
//4. retrieve from resource_history
//5. update resource_history
//6. retrieve record from resource_by_service
//7. if resource_by_service version UUID matches the resource_history updated, then update that too
}
}
observationParser.close();
}
}
LOG.info("Finished Fixing Reviews");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static boolean addReviewExtension(DomainResource resource) {
if (ExtensionConverter.hasExtension(resource, FhirExtensionUri.IS_REVIEW)) {
return false;
}
Extension extension = ExtensionConverter.createExtension(FhirExtensionUri.IS_REVIEW, new BooleanType(true));
resource.addExtension(extension);
return true;
}*/
/*private static void runProtocolsForConfidentialPatients(String sharedStoragePath, UUID justThisService) {
LOG.info("Running Protocols for Confidential Patients using path " + sharedStoragePath + " and service " + justThisService);
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
//once we match the servce, set this to null to do all other services
justThisService = null;
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
List<String> interestingPatientGuids = new ArrayList<>();
Map<UUID, Map<UUID, List<UUID>>> batchesPerPatientPerExchange = new HashMap<>();
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
LOG.info("Doing Emis CSV exchange " + exchangeId);
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch batch : batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
batchesPerPatientPerExchange.put(exchangeId, batchesPerPatient);
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers);
org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class);
while (patientParser.nextRecord()) {
if (patientParser.getIsConfidential() || patientParser.getDeleted()) {
interestingPatientGuids.add(patientParser.getPatientGuid());
}
}
patientParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class);
while (consultationParser.nextRecord()) {
if (consultationParser.getIsConfidential()
&& !consultationParser.getDeleted()) {
interestingPatientGuids.add(consultationParser.getPatientGuid());
}
}
consultationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (observationParser.nextRecord()) {
if (observationParser.getIsConfidential()
&& !observationParser.getDeleted()) {
interestingPatientGuids.add(observationParser.getPatientGuid());
}
}
observationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class);
while (diaryParser.nextRecord()) {
if (diaryParser.getIsConfidential()
&& !diaryParser.getDeleted()) {
interestingPatientGuids.add(diaryParser.getPatientGuid());
}
}
diaryParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class);
while (drugRecordParser.nextRecord()) {
if (drugRecordParser.getIsConfidential()
&& !drugRecordParser.getDeleted()) {
interestingPatientGuids.add(drugRecordParser.getPatientGuid());
}
}
drugRecordParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class);
while (issueRecordParser.nextRecord()) {
if (issueRecordParser.getIsConfidential()
&& !issueRecordParser.getDeleted()) {
interestingPatientGuids.add(issueRecordParser.getPatientGuid());
}
}
issueRecordParser.close();
}
Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>();
for (String interestingPatientGuid: interestingPatientGuids) {
if (systemIds.size() > 1) {
throw new Exception("Multiple system IDs for service " + serviceId);
}
UUID systemId = systemIds.get(0);
UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, interestingPatientGuid);
if (edsPatientId == null) {
throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + interestingPatientGuid);
}
for (UUID exchangeId: batchesPerPatientPerExchange.keySet()) {
Map<UUID, List<UUID>> batchesPerPatient = batchesPerPatientPerExchange.get(exchangeId);
List<UUID> batches = batchesPerPatient.get(edsPatientId);
if (batches != null) {
Set<UUID> batchesForExchange = exchangeBatchesToPutInProtocolQueue.get(exchangeId);
if (batchesForExchange == null) {
batchesForExchange = new HashSet<>();
exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchesForExchange);
}
batchesForExchange.addAll(batches);
}
}
}
if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) {
//find the config for our protocol queue
String configXml = ConfigManager.getConfiguration("inbound", "queuereader");
//the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
Pipeline pipeline = configuration.getPipeline();
PostMessageToExchangeConfig config = pipeline
.getPipelineComponents()
.stream()
.filter(t -> t instanceof PostMessageToExchangeConfig)
.map(t -> (PostMessageToExchangeConfig) t)
.filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol"))
.collect(StreamExtension.singleOrNullCollector());
//post to the protocol exchange
for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) {
Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId);
org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId);
String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds);
exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString);
LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString);
PostMessageToExchange component = new PostMessageToExchange(config);
component.process(exchange);
}
}
}
LOG.info("Finished Running Protocols for Confidential Patients");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void fixOrgs() {
LOG.info("Posting orgs to protocol queue");
String[] orgIds = new String[]{
"332f31a2-7b28-47cb-af6f-18f65440d43d",
"c893d66b-eb89-4657-9f53-94c5867e7ed9"};
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ResourceRepository resourceRepository = new ResourceRepository();
Map<UUID, Set<UUID>> exchangeBatches = new HashMap<>();
for (String orgId: orgIds) {
LOG.info("Doing org ID " + orgId);
UUID orgUuid = UUID.fromString(orgId);
try {
//select batch_id from ehr.resource_by_exchange_batch where resource_type = 'Organization' and resource_id = 8f465517-729b-4ad9-b405-92b487047f19 LIMIT 1 ALLOW FILTERING;
ResourceByExchangeBatch resourceByExchangeBatch = resourceRepository.getFirstResourceByExchangeBatch(ResourceType.Organization.toString(), orgUuid);
UUID batchId = resourceByExchangeBatch.getBatchId();
//select exchange_id from ehr.exchange_batch where batch_id = 1a940e10-1535-11e7-a29d-a90b99186399 LIMIT 1 ALLOW FILTERING;
ExchangeBatch exchangeBatch = exchangeBatchRepository.retrieveFirstForBatchId(batchId);
UUID exchangeId = exchangeBatch.getExchangeId();
Set<UUID> list = exchangeBatches.get(exchangeId);
if (list == null) {
list = new HashSet<>();
exchangeBatches.put(exchangeId, list);
}
list.add(batchId);
} catch (Exception ex) {
LOG.error("", ex);
break;
}
}
try {
//find the config for our protocol queue (which is in the inbound config)
String configXml = ConfigManager.getConfiguration("inbound", "queuereader");
//the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
Pipeline pipeline = configuration.getPipeline();
PostMessageToExchangeConfig config = pipeline
.getPipelineComponents()
.stream()
.filter(t -> t instanceof PostMessageToExchangeConfig)
.map(t -> (PostMessageToExchangeConfig) t)
.filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol"))
.collect(StreamExtension.singleOrNullCollector());
//post to the protocol exchange
for (UUID exchangeId : exchangeBatches.keySet()) {
Set<UUID> batchIds = exchangeBatches.get(exchangeId);
org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId);
String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds);
exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString);
LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString);
PostMessageToExchange component = new PostMessageToExchange(config);
component.process(exchange);
}
} catch (Exception ex) {
LOG.error("", ex);
return;
}
LOG.info("Finished posting orgs to protocol queue");
}*/
/*private static void findCodes() {
LOG.info("Finding missing codes");
AuditRepository auditRepository = new AuditRepository();
ServiceRepository serviceRepository = new ServiceRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT service_id, system_id, exchange_id, version FROM audit.exchange_transform_audit ALLOW FILTERING;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID serviceId = row.get(0, UUID.class);
UUID systemId = row.get(1, UUID.class);
UUID exchangeId = row.get(2, UUID.class);
UUID version = row.get(3, UUID.class);
ExchangeTransformAudit audit = auditRepository.getExchangeTransformAudit(serviceId, systemId, exchangeId, version);
String xml = audit.getErrorXml();
if (xml == null) {
continue;
}
String codePrefix = "Failed to find clinical code CodeableConcept for codeId ";
int codeIndex = xml.indexOf(codePrefix);
if (codeIndex > -1) {
int startIndex = codeIndex + codePrefix.length();
int tagEndIndex = xml.indexOf("<", startIndex);
String code = xml.substring(startIndex, tagEndIndex);
Service service = serviceRepository.getById(serviceId);
String name = service.getName();
LOG.info(name + " clinical code " + code + " from " + audit.getStarted());
continue;
}
codePrefix = "Failed to find medication CodeableConcept for codeId ";
codeIndex = xml.indexOf(codePrefix);
if (codeIndex > -1) {
int startIndex = codeIndex + codePrefix.length();
int tagEndIndex = xml.indexOf("<", startIndex);
String code = xml.substring(startIndex, tagEndIndex);
Service service = serviceRepository.getById(serviceId);
String name = service.getName();
LOG.info(name + " drug code " + code + " from " + audit.getStarted());
continue;
}
}
LOG.info("Finished finding missing codes");
}*/
private static void createEmisSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating Emis Subset");
try {
Set<String> patientGuids = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line : lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
patientGuids.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createEmisSubsetForFile(sourceDir, destDir, patientGuids);
LOG.info("Finished Creating Emis Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createEmisSubsetForFile(File sourceDir, File destDir, Set<String> patientGuids) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile : files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
createEmisSubsetForFile(sourceFile, destFile, patientGuids);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
CSVFormat format = CSVFormat.DEFAULT.withHeader();
InputStreamReader reader = new InputStreamReader(
new BufferedInputStream(
new FileInputStream(sourceFile)));
CSVParser parser = new CSVParser(reader, format);
String filterColumn = null;
Map<String, Integer> headerMap = parser.getHeaderMap();
if (headerMap.containsKey("PatientGuid")) {
filterColumn = "PatientGuid";
} else {
//if no patient column, just copy the file
parser.close();
LOG.info("Copying non-patient file " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
String[] columnHeaders = new String[headerMap.size()];
Iterator<String> headerIterator = headerMap.keySet().iterator();
while (headerIterator.hasNext()) {
String headerName = headerIterator.next();
int headerIndex = headerMap.get(headerName);
columnHeaders[headerIndex] = headerName;
}
BufferedWriter bw =
new BufferedWriter(
new OutputStreamWriter(
new FileOutputStream(destFile)));
CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders));
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String patientGuid = csvRecord.get(filterColumn);
if (Strings.isNullOrEmpty(patientGuid) //if empty, carry over this record
|| patientGuids.contains(patientGuid)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
}
}
}
private static void createTppSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating TPP Subset");
try {
Set<String> personIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line : lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
personIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createTppSubsetForFile(sourceDir, destDir, personIds);
LOG.info("Finished Creating TPP Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createTppSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile : files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
//LOG.info("Doing dir " + sourceFile);
createTppSubsetForFile(sourceFile, destFile, personIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
Charset encoding = Charset.forName("CP1252");
InputStreamReader reader =
new InputStreamReader(
new BufferedInputStream(
new FileInputStream(sourceFile)), encoding);
CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL).withHeader();
CSVParser parser = new CSVParser(reader, format);
String filterColumn = null;
Map<String, Integer> headerMap = parser.getHeaderMap();
if (headerMap.containsKey("IDPatient")) {
filterColumn = "IDPatient";
} else if (name.equalsIgnoreCase("SRPatient.csv")) {
filterColumn = "RowIdentifier";
} else {
//if no patient column, just copy the file
parser.close();
LOG.info("Copying non-patient file " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
String[] columnHeaders = new String[headerMap.size()];
Iterator<String> headerIterator = headerMap.keySet().iterator();
while (headerIterator.hasNext()) {
String headerName = headerIterator.next();
int headerIndex = headerMap.get(headerName);
columnHeaders[headerIndex] = headerName;
}
BufferedWriter bw =
new BufferedWriter(
new OutputStreamWriter(
new FileOutputStream(destFile), encoding));
CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders));
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String patientId = csvRecord.get(filterColumn);
if (personIds.contains(patientId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
/*} else {
//the 2.1 files are going to be a pain to split by patient, so just copy them over
LOG.info("Copying 2.1 file " + sourceFile);
copyFile(sourceFile, destFile);
}*/
}
}
}
private static void createVisionSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating Vision Subset");
try {
Set<String> personIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line : lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
personIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createVisionSubsetForFile(sourceDir, destDir, personIds);
LOG.info("Finished Creating Vision Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createVisionSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile : files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
createVisionSubsetForFile(sourceFile, destFile, personIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL);
CSVParser parser = new CSVParser(br, format);
int filterColumn = -1;
if (name.contains("encounter_data") || name.contains("journal_data") ||
name.contains("patient_data") || name.contains("referral_data")) {
filterColumn = 0;
} else {
//if no patient column, just copy the file
parser.close();
LOG.info("Copying non-patient file " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
CSVPrinter printer = new CSVPrinter(bw, format);
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String patientId = csvRecord.get(filterColumn);
if (personIds.contains(patientId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
}
}
}
private static void createHomertonSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating Homerton Subset");
try {
Set<String> PersonIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line : lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
PersonIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createHomertonSubsetForFile(sourceDir, destDir, PersonIds);
LOG.info("Finished Creating Homerton Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createHomertonSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile : files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
createHomertonSubsetForFile(sourceFile, destFile, personIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
//fully quote destination file to fix CRLF in columns
CSVFormat format = CSVFormat.DEFAULT.withHeader();
CSVParser parser = new CSVParser(br, format);
int filterColumn = -1;
//PersonId column at 1
if (name.contains("ENCOUNTER") || name.contains("PATIENT")) {
filterColumn = 1;
} else if (name.contains("DIAGNOSIS")) {
//PersonId column at 13
filterColumn = 13;
} else if (name.contains("ALLERGY")) {
//PersonId column at 2
filterColumn = 2;
} else if (name.contains("PROBLEM")) {
//PersonId column at 4
filterColumn = 4;
} else {
//if no patient column, just copy the file (i.e. PROCEDURE)
parser.close();
LOG.info("Copying file without PatientId " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
Map<String, Integer> headerMap = parser.getHeaderMap();
String[] columnHeaders = new String[headerMap.size()];
Iterator<String> headerIterator = headerMap.keySet().iterator();
while (headerIterator.hasNext()) {
String headerName = headerIterator.next();
int headerIndex = headerMap.get(headerName);
columnHeaders[headerIndex] = headerName;
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders));
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String patientId = csvRecord.get(filterColumn);
if (personIds.contains(patientId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
}
}
}
private static void createAdastraSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating Adastra Subset");
try {
Set<String> caseIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line : lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
//adastra extract files are all keyed on caseId
caseIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createAdastraSubsetForFile(sourceDir, destDir, caseIds);
LOG.info("Finished Creating Adastra Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createAdastraSubsetForFile(File sourceDir, File destDir, Set<String> caseIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile : files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
createAdastraSubsetForFile(sourceFile, destFile, caseIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
//fully quote destination file to fix CRLF in columns
CSVFormat format = CSVFormat.DEFAULT.withDelimiter('|');
CSVParser parser = new CSVParser(br, format);
int filterColumn = -1;
//CaseRef column at 0
if (name.contains("NOTES") || name.contains("CASEQUESTIONS") ||
name.contains("OUTCOMES") || name.contains("CONSULTATION") ||
name.contains("CLINICALCODES") || name.contains("PRESCRIPTIONS") ||
name.contains("PATIENT")) {
filterColumn = 0;
} else if (name.contains("CASE")) {
//CaseRef column at 2
filterColumn = 2;
} else if (name.contains("PROVIDER")) {
//CaseRef column at 7
filterColumn = 7;
} else {
//if no patient column, just copy the file
parser.close();
LOG.info("Copying non-patient file " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
CSVPrinter printer = new CSVPrinter(bw, format);
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String caseId = csvRecord.get(filterColumn);
if (caseIds.contains(caseId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
}
}
}
private static void exportFhirToCsv(UUID serviceId, String destinationPath) {
try {
File dir = new File(destinationPath);
if (dir.exists()) {
dir.mkdirs();
}
Map<String, CSVPrinter> hmPrinters = new HashMap<>();
EntityManager entityManager = ConnectionManager.getEhrEntityManager(serviceId);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection connection = session.connection();
PreparedStatement ps = connection.prepareStatement("SELECT resource_id, resource_type, resource_data FROM resource_current");
LOG.debug("Running query");
ResultSet rs = ps.executeQuery();
LOG.debug("Got result set");
while (rs.next()) {
String id = rs.getString(1);
String type = rs.getString(2);
String json = rs.getString(3);
CSVPrinter printer = hmPrinters.get(type);
if (printer == null) {
String path = FilenameUtils.concat(dir.getAbsolutePath(), type + ".tsv");
FileWriter fileWriter = new FileWriter(new File(path));
BufferedWriter bufferedWriter = new BufferedWriter(fileWriter);
CSVFormat format = CSVFormat.DEFAULT
.withHeader("resource_id", "resource_json")
.withDelimiter('\t')
.withEscape((Character) null)
.withQuote((Character) null)
.withQuoteMode(QuoteMode.MINIMAL);
printer = new CSVPrinter(bufferedWriter, format);
hmPrinters.put(type, printer);
}
printer.printRecord(id, json);
}
for (String type : hmPrinters.keySet()) {
CSVPrinter printer = hmPrinters.get(type);
printer.flush();
printer.close();
}
ps.close();
entityManager.close();
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void fixTPPNullOrgs(String sourceDir, String orgODS) throws Exception {
final String COLUMN_ORG = "IDOrganisationVisibleTo";
File[] files = new File(sourceDir).listFiles();
if (files == null)
return;
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile : files) {
String sourceFileName = sourceFile.getName();
if (sourceFile.isDirectory()) {
fixTPPNullOrgs(sourceFileName, orgODS);
} else {
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(sourceFileName);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
Charset encoding = Charset.forName("CP1252");
InputStreamReader reader =
new InputStreamReader(
new BufferedInputStream(
new FileInputStream(sourceFile)), encoding);
CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL).withHeader();
CSVParser parser = new CSVParser(reader, format);
Map<String, Integer> headerMap = parser.getHeaderMap();
if (!headerMap.containsKey(COLUMN_ORG)) {
//if no COLUMN_ORG column, ignore
LOG.info("Ignoring file with no " + COLUMN_ORG + " column: " + sourceFile);
parser.close();
continue;
}
String[] columnHeaders = new String[headerMap.size()];
Iterator<String> headerIterator = headerMap.keySet().iterator();
while (headerIterator.hasNext()) {
String headerName = headerIterator.next();
int headerIndex = headerMap.get(headerName);
columnHeaders[headerIndex] = headerName;
}
String destFileName = sourceFileName.concat(".FIXED");
BufferedWriter bw =
new BufferedWriter(
new OutputStreamWriter(
new FileOutputStream(destFileName), encoding));
CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders));
//iterate down the file and look at Org Column
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String fileOrgODS = csvRecord.get(COLUMN_ORG);
//set the empty value to that orgODS value passed in
if (Strings.isNullOrEmpty(fileOrgODS)) {
Map <String, String> recordMap = csvRecord.toMap();
recordMap.put(COLUMN_ORG, String.valueOf(orgODS));
List<String> alteredCsvRecord = new ArrayList<String>();
for (String key : columnHeaders) {
alteredCsvRecord.add(recordMap.get(key));
}
printer.printRecord(alteredCsvRecord);
printer.flush();
} else {
if (!fileOrgODS.equalsIgnoreCase(orgODS)) {
parser.close();
printer.flush();
printer.close();
throw new Exception("File contains different ODS codes to parameter value - aborting");
}
//write the record back unchanged
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
//Finally, delete source file and rename the fixed destination file back to source
sourceFile.delete();
new File (destFileName).renameTo(new File (sourceFileName));
}
}
}
}
/*class ResourceFiler extends FhirResourceFiler {
public ResourceFiler(UUID exchangeId, UUID serviceId, UUID systemId, TransformError transformError,
List<UUID> batchIdsCreated, int maxFilingThreads) {
super(exchangeId, serviceId, systemId, transformError, batchIdsCreated, maxFilingThreads);
}
private List<Resource> newResources = new ArrayList<>();
public List<Resource> getNewResources() {
return newResources;
}
@Override
public void saveAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception {
throw new Exception("shouldn't be calling saveAdminResource");
}
@Override
public void deleteAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception {
throw new Exception("shouldn't be calling deleteAdminResource");
}
@Override
public void savePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception {
for (Resource resource: resources) {
if (mapIds) {
IdHelper.mapIds(getServiceId(), getSystemId(), resource);
}
newResources.add(resource);
}
}
@Override
public void deletePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception {
throw new Exception("shouldn't be calling deletePatientResource");
}
}*/
/*
class MoveToS3Runnable implements Runnable {
private static final Logger LOG = LoggerFactory.getLogger(MoveToS3Runnable.class);
private List<FileInfo> files = null;
private AtomicInteger done = null;
public MoveToS3Runnable(List<FileInfo> files, AtomicInteger done) {
this.files = files;
this.done = done;
}
@Override
public void run() {
try {
doWork();
} catch (Exception ex) {
LOG.error("", ex);
}
}
private void doWork() throws Exception {
SourceFileMappingDalI db = DalProvider.factorySourceFileMappingDal();
//write to database
//Map<ResourceWrapper, ResourceFieldMappingAudit> batch = new HashMap<>();
for (FileInfo info: files) {
String path = info.getFilePath();
InputStream inputStream = FileHelper.readFileFromSharedStorage(path);
ZipInputStream zis = new ZipInputStream(inputStream);
ZipEntry entry = zis.getNextEntry();
if (entry == null) {
throw new Exception("No entry in zip file " + path);
}
byte[] entryBytes = IOUtils.toByteArray(zis);
String json = new String(entryBytes);
inputStream.close();
ResourceFieldMappingAudit audit = ResourceFieldMappingAudit.readFromJson(json);
ResourceWrapper wrapper = new ResourceWrapper();
String versionStr = FilenameUtils.getBaseName(path);
wrapper.setVersion(UUID.fromString(versionStr));
Date d = info.getLastModified();
wrapper.setCreatedAt(d);
File f = new File(path);
f = f.getParentFile();
String resourceIdStr = f.getName();
wrapper.setResourceId(UUID.fromString(resourceIdStr));
f = f.getParentFile();
String resourceTypeStr = f.getName();
wrapper.setResourceType(resourceTypeStr);
f = f.getParentFile();
String serviceIdStr = f.getName();
wrapper.setServiceId(UUID.fromString(serviceIdStr));
Map<ResourceWrapper, ResourceFieldMappingAudit> batch = new HashMap<>();
batch.put(wrapper, audit);
try {
db.saveResourceMappings(batch);
} catch (Exception ex) {
String msg = ex.getMessage();
if (msg.indexOf("Duplicate entry") == -1) {
throw ex;
}
}
*/
}*//*
/*if (batch.size() > 5) {
db.saveResourceMappings(batch);
batch.clear();
int nowDone = done.incrementAndGet();
if (nowDone % 1000 == 0) {
LOG.debug("Done " + nowDone + " / " + files.size());
}
}
*/
}*//*
/*if (!batch.isEmpty()) {
db.saveResourceMappings(batch);
batch.clear();
}
}*/
class PopulateDataDateCallable implements Callable {
private static final Logger LOG = LoggerFactory.getLogger(PopulateDataDateCallable.class);
private static ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
private UUID exchangeId = null;
private AtomicInteger fixed = null;
public PopulateDataDateCallable(UUID exchangeId, AtomicInteger fixed) {
this.exchangeId = exchangeId;
this.fixed = fixed;
}
private void doWork() throws Exception {
Exchange exchange = exchangeDal.getExchange(exchangeId);
//check if already done
String existingVal = exchange.getHeader(HeaderKeys.DataDate);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
String version = exchange.getHeader(HeaderKeys.SystemVersion);
if (!Strings.isNullOrEmpty(existingVal)) {
LOG.info("Already done exchange " + exchange.getId() + " software " + software + " version " + version);
markAsDone();
return;
}
String body = exchange.getBody();
if (body.equals("[]")) {
LOG.error("Empty body found in exchange " + exchange.getId() + " software " + software + " version " + version);
markAsDone();
return;
}
Date lastDataDate = OpenEnvelope.calculateLastDataDate(software, version, body);
if (lastDataDate == null) {
LOG.error("Failed to calculate data for exchange " + exchange.getId() + " software " + software + " version " + version);
markAsDone();
return;
}
exchange.setHeaderAsDate(HeaderKeys.DataDate, lastDataDate);
exchangeDal.save(exchange);
//mark as done
markAsDone();
fixed.incrementAndGet();
}
private void markAsDone() throws Exception {
EntityManager auditEntityManager = ConnectionManager.getAuditEntityManager();
auditEntityManager.getTransaction().begin();
SessionImpl auditSession = (SessionImpl)auditEntityManager.getDelegate();
Connection auditConnection = auditSession.connection();
String sql = "UPDATE drewtest.exchange_ids SET done = 1 WHERE id = ?";
PreparedStatement ps = auditConnection.prepareStatement(sql);
ps.setString(1, exchangeId.toString());
ps.executeUpdate();
auditEntityManager.getTransaction().commit();
ps.close();
auditEntityManager.close();
//LOG.debug("Marked as done using: " + sql);
}
@Override
public Object call() throws Exception {
try {
doWork();
} catch (Throwable ex) {
LOG.error("Error with " + exchangeId, ex);
}
return null;
}
}
class TestRabbitConsumer extends DefaultConsumer {
private static final Logger LOG = LoggerFactory.getLogger(TestRabbitConsumer.class);
public TestRabbitConsumer(Channel channel) {
super(channel);
}
@Override
public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] bytes) throws IOException {
long deliveryTag = envelope.getDeliveryTag();
String bodyStr = new String(bytes, "UTF-8");
LOG.info("Received exchange body: " + bodyStr);
try {
Thread.sleep(1000);
} catch (Throwable t) {
LOG.error("", t);
}
this.getChannel().basicAck(deliveryTag, false);
}
}
|
package org.voltdb.importclient.kafka;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import java.io.IOException;
import java.net.URI;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import org.voltcore.logging.Level;
import org.voltdb.client.ClientResponse;
import org.voltdb.client.ProcedureCallback;
import org.voltdb.importclient.kafka.KafkaStreamImporterConfig.HostAndPort;
import org.voltdb.importer.AbstractImporter;
import org.voltdb.importer.Invocation;
import org.voltdb.importer.formatter.FormatException;
import org.voltdb.importer.formatter.Formatter;
import kafka.api.ConsumerMetadataRequest;
import kafka.api.FetchRequest;
import kafka.api.FetchRequestBuilder;
import kafka.api.PartitionOffsetRequestInfo;
import kafka.cluster.Broker;
import kafka.common.ErrorMapping;
import kafka.common.OffsetAndMetadata;
import kafka.common.TopicAndPartition;
import kafka.javaapi.ConsumerMetadataResponse;
import kafka.javaapi.FetchResponse;
import kafka.javaapi.OffsetCommitRequest;
import kafka.javaapi.OffsetCommitResponse;
import kafka.javaapi.OffsetFetchRequest;
import kafka.javaapi.OffsetFetchResponse;
import kafka.javaapi.OffsetResponse;
import kafka.javaapi.PartitionMetadata;
import kafka.javaapi.TopicMetadata;
import kafka.javaapi.TopicMetadataRequest;
import kafka.javaapi.consumer.SimpleConsumer;
import kafka.message.MessageAndOffset;
import kafka.network.BlockingChannel;
/**
* Implementation that imports from a Kafka topic. This is for a single partition of a Kafka topic.
*/
public class KafkaTopicPartitionImporter extends AbstractImporter
{
private final static PartitionOffsetRequestInfo LATEST_OFFSET =
new PartitionOffsetRequestInfo(kafka.api.OffsetRequest.LatestTime(), 1);
private final static PartitionOffsetRequestInfo EARLIEST_OFFSET =
new PartitionOffsetRequestInfo(kafka.api.OffsetRequest.EarliestTime(), 1);
private final int m_waitSleepMs = 1;
private final AtomicBoolean m_dead = new AtomicBoolean(false);
//Start with invalid so consumer will fetch it.
private final AtomicLong m_currentOffset = new AtomicLong(-1);
private long m_lastCommittedOffset = -1;
private final AtomicReference<BlockingChannel> m_offsetManager = new AtomicReference<BlockingChannel>();
private SimpleConsumer m_consumer = null;
private final TopicAndPartition m_topicAndPartition;
private final Gap m_gapTracker = new Gap(Integer.getInteger("KAFKA_IMPORT_GAP_LEAD", 32_768));
private final KafkaStreamImporterConfig m_config;
private HostAndPort m_coordinator;
private final FetchRequestBuilder m_fetchRequestBuilder;
public KafkaTopicPartitionImporter(KafkaStreamImporterConfig config)
{
m_config = config;
m_coordinator = m_config.getPartitionLeader();
m_topicAndPartition = new TopicAndPartition(config.getTopic(), config.getPartition());
m_fetchRequestBuilder = new FetchRequestBuilder().clientId(KafkaStreamImporterConfig.CLIENT_ID);
}
@Override
public URI getResourceID()
{
return m_config.getResourceID();
}
//Find leader for the topic+partition.
private PartitionMetadata findLeader() {
PartitionMetadata returnMetaData = null;
loop:
for (HostAndPort broker : m_config.getBrokers()) {
SimpleConsumer consumer = null;
try {
consumer = new SimpleConsumer(broker.getHost(), broker.getPort(), m_config.getSocketTimeout(), m_config.getFetchSize(), "findLeader");
List<String> topics = singletonList(m_topicAndPartition.topic());
TopicMetadataRequest req = new TopicMetadataRequest(topics);
kafka.javaapi.TopicMetadataResponse resp = consumer.send(req);
List<TopicMetadata> metaData = resp.topicsMetadata();
for (TopicMetadata item : metaData) {
for (PartitionMetadata part : item.partitionsMetadata()) {
if (part.partitionId() == m_topicAndPartition.partition()) {
returnMetaData = part;
break loop;
}
}
}
} catch (Exception e) {
rateLimitedLog(Level.WARN, e, "Error in finding leader for " + m_topicAndPartition);
} finally {
KafkaStreamImporterConfig.closeConsumer(consumer);
}
}
if (returnMetaData == null) {
rateLimitedLog(Level.WARN, null, "Failed to find Leader for " + m_topicAndPartition);
}
return returnMetaData;
}
private int nextCorrelationId() {
FetchRequest fr = m_fetchRequestBuilder.addFetch(m_topicAndPartition.topic(),
m_topicAndPartition.partition(), 1L, m_config.getFetchSize())
.build();
return fr.correlationId();
}
//Find leader for this topic partition.
private HostAndPort findNewLeader() {
for (int i = 0; i < 3; i++) {
boolean shouldSleep = false;
PartitionMetadata metadata = findLeader();
if (metadata == null) {
shouldSleep = true;
} else if (metadata.leader() == null) {
shouldSleep = true;
} else if (m_config.getPartitionLeader().getHost().equalsIgnoreCase(metadata.leader().host()) && i == 0) {
// first time through if the leader hasn't changed give ZooKeeper a second to recover
// second time, assume the broker did recover before failover, or it was a non-Broker issue
shouldSleep = true;
} else {
return new HostAndPort(metadata.leader().host(), metadata.leader().port());
}
if (shouldSleep) {
backoffSleep(i+1);
}
}
//Unable to find return null for recheck.
rateLimitedLog(Level.WARN, null, "Failed to find new leader for " + m_topicAndPartition);
return null;
}
public void getOffsetCoordinator() {
KafkaStreamImporterException probeException = null;
OUTER: for (int attempts = 0; attempts < 3; ++attempts) {
for (HostAndPort hp: m_config.getBrokers()) {
BlockingChannel channel = null;
try {
channel = new BlockingChannel(
hp.getHost(), hp.getPort(),
BlockingChannel.UseDefaultBufferSize(),
BlockingChannel.UseDefaultBufferSize(),
m_config.getSocketTimeout()
);
channel.connect();
channel.send(new ConsumerMetadataRequest(
m_config.getGroupId(),
ConsumerMetadataRequest.CurrentVersion(),
nextCorrelationId(),
KafkaStreamImporterConfig.CLIENT_ID
));
ConsumerMetadataResponse metadataResponse = ConsumerMetadataResponse.readFrom(channel.receive().buffer());
if (metadataResponse.errorCode() == ErrorMapping.NoError()) {
Broker offsetManager = metadataResponse.coordinator();
m_coordinator = new HostAndPort(offsetManager.host(), offsetManager.port());
BlockingChannel consumer = m_offsetManager.getAndSet(
new BlockingChannel(
m_coordinator.getHost(), m_coordinator.getPort(),
BlockingChannel.UseDefaultBufferSize(),
BlockingChannel.UseDefaultBufferSize(),
m_config.getSocketTimeout()
)
);
m_offsetManager.get().connect();
info(null, "Offset Coordinator for " + m_topicAndPartition + " is " + offsetManager);
if (consumer != null) try {
consumer.disconnect();
} catch (Exception ignoreIt) {
}
probeException = null;
consumer = null;
break OUTER;
}
probeException = new KafkaStreamImporterException("Failed to get Offset Coordinator for %s",
ErrorMapping.exceptionFor(metadataResponse.errorCode()), m_topicAndPartition
);
} catch (Exception e) {
probeException = new KafkaStreamImporterException(
"Failed to get Offset Coordinator for %s", e, m_topicAndPartition
);
} finally {
if (channel != null) {
channel.disconnect();
}
}
}
if (probeException != null) {
warn(probeException, "Failed to query all brokers for the offset coordinator for " + m_topicAndPartition);
}
backoffSleep(attempts+1);
}
}
private OffsetResponse getTopicOffset(PartitionOffsetRequestInfo pori) {
final int partition = m_topicAndPartition.partition();
final String topic = m_topicAndPartition.topic();
kafka.javaapi.OffsetRequest earlyRq = new kafka.javaapi.OffsetRequest(
singletonMap(m_topicAndPartition, pori),
kafka.api.OffsetRequest.CurrentVersion(), KafkaStreamImporterConfig.CLIENT_ID
);
OffsetResponse response = null;
Throwable fault = null;
for (int attempts = 0; attempts < 3; ++attempts) try {
response = m_consumer.getOffsetsBefore(earlyRq);
if (response.hasError()) {
short code = response.errorCode(topic, partition);
fault = ErrorMapping.exceptionFor(code);
resetLeader();
} else {
return response;
}
} catch (Exception e) {
if (e instanceof IOException) {
resetLeader();
}
fault = e;
}
if (fault != null) {
rateLimitedLog(Level.ERROR, fault, "unable to fetch earliest offset for " + m_topicAndPartition);
response = null;
}
return response;
}
private OffsetFetchResponse getClientTopicOffset() {
final short version = 1;
OffsetFetchResponse rsp = null;
Throwable fault = null;
for (int attempts = 0; attempts < 3; ++attempts) try {
final OffsetFetchRequest rq = new OffsetFetchRequest(
m_config.getGroupId(),
singletonList(m_topicAndPartition),
version, nextCorrelationId(),
KafkaStreamImporterConfig.CLIENT_ID
);
BlockingChannel channel = m_offsetManager.get();
channel.send(rq.underlying());
rsp = OffsetFetchResponse.readFrom(channel.receive().buffer());
short code = rsp.offsets().get(m_topicAndPartition).error();
if (code != ErrorMapping.NoError()) {
fault = ErrorMapping.exceptionFor(code);
backoffSleep(attempts+1);
if (code == ErrorMapping.NotCoordinatorForConsumerCode()) {
getOffsetCoordinator();
} else if (code == ErrorMapping.ConsumerCoordinatorNotAvailableCode()) {
getOffsetCoordinator();
} else if (code == ErrorMapping.UnknownTopicOrPartitionCode()) {
getOffsetCoordinator();
fault = null;
continue;
}
} else {
fault = null;
break;
}
} catch (Exception e) {
if (e instanceof IOException) {
getOffsetCoordinator();
}
fault = e;
}
if (fault != null) {
rateLimitedLog(Level.WARN, fault, "unable to fetch earliest offset for " + m_topicAndPartition);
rsp = null;
}
return rsp;
}
public long getLastOffset() {
final int partition = m_topicAndPartition.partition();
final String topic = m_topicAndPartition.topic();
OffsetResponse response = getTopicOffset(EARLIEST_OFFSET);
if (response == null) return -1L;
long earliest = response.offsets(topic, partition)[0];
response = getTopicOffset(LATEST_OFFSET);
if (response == null) return -1L;
long latest = response.offsets(topic, partition)[0];
if (latest == earliest) return latest;
OffsetFetchResponse ofr = getClientTopicOffset();
if (ofr == null) return earliest;
long current = ofr.offsets().get(m_topicAndPartition).offset();
if (current < earliest) return earliest;
if (current < latest) return current;
return latest;
}
//Sleep with backoff.
private int backoffSleep(int fetchFailedCount) {
try {
Thread.sleep(1000 * fetchFailedCount++);
if (fetchFailedCount > 10) fetchFailedCount = 1;
} catch (InterruptedException ie) {
}
return fetchFailedCount;
}
private void resetLeader() {
KafkaStreamImporterConfig.closeConsumer(m_consumer);
m_consumer = null;
HostAndPort leaderBroker = findNewLeader();
if (leaderBroker == null) {
//point to original leader which will fail and we fall back again here.
rateLimitedLog(Level.WARN, null, "Fetch Failed to find leader continue with old leader: " + m_config.getPartitionLeader());
leaderBroker = m_config.getPartitionLeader();
} else {
if (!leaderBroker.equals(m_config.getPartitionLeader())) {
info(null, "Fetch Found new leader for " + m_topicAndPartition + " New Leader: " + leaderBroker);
m_config.setPartitionLeader(leaderBroker);
}
}
m_consumer = new SimpleConsumer(
leaderBroker.getHost(), leaderBroker.getPort(),
m_config.getSocketTimeout(), m_config.getFetchSize(), KafkaStreamImporterConfig.CLIENT_ID
);
}
@Override
protected void accept() {
info(null, "Starting partition fetcher for " + m_topicAndPartition);
long submitCount = 0;
AtomicLong cbcnt = new AtomicLong(0);
@SuppressWarnings("unchecked")
Formatter<String> formatter = (Formatter<String>) m_config.getFormatterBuilder().create();
try {
//Start with the starting leader.
resetLeader();
int sleepCounter = 1;
while (shouldRun()) {
if (m_currentOffset.get() < 0) {
getOffsetCoordinator();
if (m_offsetManager.get() == null) {
sleepCounter = backoffSleep(sleepCounter);
continue;
}
long lastOffset = getLastOffset();
if (lastOffset == -1) {
sleepCounter = backoffSleep(sleepCounter);
continue;
}
m_gapTracker.resetTo(lastOffset);
m_lastCommittedOffset = lastOffset;
m_currentOffset.set(lastOffset);
if (m_currentOffset.get() < 0) {
//If we dont know the offset get it backoff if we fail.
sleepCounter = backoffSleep(sleepCounter);
info(null, "No valid offset found for " + m_topicAndPartition);
continue;
}
info(null, "Starting offset for " + m_topicAndPartition + " is " + m_currentOffset.get());
}
long currentFetchCount = 0;
//Build fetch request of we have a valid offset and not too many are pending.
FetchRequest req = m_fetchRequestBuilder.addFetch(m_topicAndPartition.topic(),
m_topicAndPartition.partition(), m_currentOffset.get(), m_config.getFetchSize())
.build();
FetchResponse fetchResponse = null;
try {
fetchResponse = m_consumer.fetch(req);
if (fetchResponse == null) {
sleepCounter = backoffSleep(sleepCounter);
continue;
}
} catch (Exception ex) {
rateLimitedLog(Level.WARN, ex, "Failed to fetch from " + m_topicAndPartition);
//See if its network error and find new leader for this partition.
if (ex instanceof IOException) {
resetLeader();
//find leader in resetLeader would sleep and backoff
continue;
}
sleepCounter = backoffSleep(sleepCounter);
continue;
}
if (fetchResponse.hasError()) {
// Something went wrong!
short code = fetchResponse.errorCode(m_topicAndPartition.topic(), m_topicAndPartition.partition());
warn(ErrorMapping.exceptionFor(code), "Failed to fetch messages for %s", m_topicAndPartition);
sleepCounter = backoffSleep(sleepCounter);
if (code == ErrorMapping.OffsetOutOfRangeCode()) {
// We asked for an invalid offset. For simple case ask for the last element to reset
info(null, "Invalid offset requested for " + m_topicAndPartition);
getOffsetCoordinator();
m_currentOffset.set(-1L);
continue;
}
resetLeader();
continue;
}
sleepCounter = 1;
for (MessageAndOffset messageAndOffset : fetchResponse.messageSet(m_topicAndPartition.topic(), m_topicAndPartition.partition())) {
//You may be catchin up so dont sleep.
currentFetchCount++;
long currentOffset = messageAndOffset.offset();
//if currentOffset is less means we have already pushed it and also check pending queue.
if (currentOffset < m_currentOffset.get()) {
continue;
}
if (currentOffset > m_currentOffset.get()) {
if (isDebugEnabled()) {
debug(null, "Kafka messageAndOffset currentOffset %d is ahead of m_currentOffset %d.", currentOffset, m_currentOffset.get());
}
}
ByteBuffer payload = messageAndOffset.message().payload();
String line = new String(payload.array(),payload.arrayOffset(),payload.limit(),StandardCharsets.UTF_8);
try {
m_gapTracker.submit(messageAndOffset.nextOffset());
Invocation invocation = new Invocation(m_config.getProcedure(), formatter.transform(line));
TopicPartitionInvocationCallback cb = new TopicPartitionInvocationCallback(
messageAndOffset.nextOffset(), cbcnt, m_gapTracker, m_dead,
invocation);
if (!callProcedure(invocation, cb)) {
if (isDebugEnabled()) {
debug(null, "Failed to process Invocation possibly bad data: " + line);
}
m_gapTracker.commit(messageAndOffset.nextOffset());
}
} catch (FormatException e) {
rateLimitedLog(Level.WARN, e, "Failed to tranform data: %s" ,line);
m_gapTracker.commit(messageAndOffset.nextOffset());
}
submitCount++;
m_currentOffset.set(messageAndOffset.nextOffset());
if (!shouldRun()) {
break;
}
}
if (!shouldRun()) {
break;
}
//wait to fetch more if we read nothing last time.
if (currentFetchCount == 0) {
try {
Thread.sleep(m_waitSleepMs);
} catch (InterruptedException ie) {
}
}
commitOffset();
}
} catch (Exception ex) {
error(ex, "Failed to start topic partition fetcher for " + m_topicAndPartition);
} finally {
commitOffset();
KafkaStreamImporterConfig.closeConsumer(m_consumer);
m_consumer = null;
BlockingChannel channel = m_offsetManager.getAndSet(null);
if (channel != null) {
try { channel.disconnect(); } catch (Exception ignoreIt) {}
}
}
m_dead.compareAndSet(false, true);
info(null, "Partition fetcher stopped for " + m_topicAndPartition
+ " Last commit point is: " + m_lastCommittedOffset
+ " Callback Rcvd: " + cbcnt.get()
+ " Submitted: " + submitCount);
}
public boolean commitOffset() {
final short version = 1;
final long safe = m_gapTracker.commit(-1L);
if (safe > m_lastCommittedOffset) {
long now = System.currentTimeMillis();
OffsetCommitResponse offsetCommitResponse = null;
try {
BlockingChannel channel = null;
int retries = 3;
while (channel == null && --retries >= 0) {
if ((channel = m_offsetManager.get()) == null) {
getOffsetCoordinator();
rateLimitedLog(Level.ERROR, null, "Commit Offset Failed to get offset coordinator for " + m_topicAndPartition);
continue;
}
OffsetCommitRequest offsetCommitRequest = new OffsetCommitRequest(
m_config.getGroupId(),
singletonMap(m_topicAndPartition, new OffsetAndMetadata(safe, "commit", now)),
nextCorrelationId(),
KafkaStreamImporterConfig.CLIENT_ID,
version
);
channel.send(offsetCommitRequest.underlying());
offsetCommitResponse = OffsetCommitResponse.readFrom(channel.receive().buffer());
final short code = ((Short)offsetCommitResponse.errors().get(m_topicAndPartition)).shortValue();
if (code == ErrorMapping.NotCoordinatorForConsumerCode() || code == ErrorMapping.ConsumerCoordinatorNotAvailableCode()) {
info(null, "Not coordinator for committing offset for " + m_topicAndPartition + " Updating coordinator.");
getOffsetCoordinator();
channel = null;
continue;
}
}
if (retries < 0 || offsetCommitResponse == null) {
return false;
}
} catch (Exception e) {
rateLimitedLog(Level.ERROR, e, "Failed to commit Offset for " + m_topicAndPartition);
if (e instanceof IOException) {
getOffsetCoordinator();
}
return false;
}
final short code = ((Short) offsetCommitResponse.errors().get(m_topicAndPartition)).shortValue();
if (code != ErrorMapping.NoError()) {
final String msg = "Commit Offset Failed to commit for " + m_topicAndPartition;
rateLimitedLog(Level.ERROR, ErrorMapping.exceptionFor(code), msg);
return false;
}
m_lastCommittedOffset = safe;
}
return true;
}
final class Gap {
long c = 0;
long s = -1L;
long offer = -1L;
final long [] lag;
private final long gapTrackerCheckMaxTimeMs = 2_000;
Gap(int leeway) {
if (leeway <= 0) {
throw new IllegalArgumentException("leeways is zero or negative");
}
lag = new long[leeway];
}
synchronized void submit(long offset) {
if (s == -1L && offset >= 0) {
lag[idx(offset)] = c = s = offset;
}
if ((offset - c) >= lag.length) {
offer = offset;
try {
wait(gapTrackerCheckMaxTimeMs);
} catch (InterruptedException e) {
rateLimitedLog(Level.WARN, e, "Gap tracker wait was interrupted for" + m_topicAndPartition);
}
}
if (offset > s) {
s = offset;
}
}
private final int idx(long offset) {
return (int)(offset % lag.length);
}
synchronized void resetTo(long offset) {
if (offset < 0) {
throw new IllegalArgumentException("offset is negative");
}
lag[idx(offset)] = s = c = offset;
offer = -1L;
}
synchronized long commit(long offset) {
if (offset <= s && offset > c) {
int ggap = (int)Math.min(lag.length, offset-c);
if (ggap == lag.length) {
rateLimitedLog(Level.WARN,
null, "Gap tracker moving topic commit point from %d to %d for "
+ m_topicAndPartition, c, (offset - lag.length + 1)
);
c = offset - lag.length + 1;
lag[idx(c)] = c;
}
lag[idx(offset)] = offset;
while (ggap > 0 && lag[idx(c)]+1 == lag[idx(c+1)]) {
++c;
}
if (offer >=0 && (offer-c) < lag.length) {
offer = -1L;
notify();
}
}
return c;
}
}
@Override
public String getName()
{
return "KafkaImporter";
}
@Override
protected void stop()
{
// Nothing to stop. shouldRun() should take care of exiting the work loop.
}
//Per topic per partition that we are responsible for.
//Callback for each invocation we have submitted.
private final static class TopicPartitionInvocationCallback implements ProcedureCallback
{
private final long m_offset;
private final AtomicLong m_cbcnt;
private final Gap m_tracker;
private final AtomicBoolean m_dontCommit;
private final Invocation m_invocation;
public TopicPartitionInvocationCallback(
final long offset,
final AtomicLong cbcnt,
final Gap tracker,
final AtomicBoolean dontCommit,
final Invocation invocation) {
m_offset = offset;
m_cbcnt = cbcnt;
m_tracker = tracker;
m_dontCommit = dontCommit;
m_invocation = invocation;
}
@Override
public void clientCallback(ClientResponse response) throws Exception {
m_cbcnt.incrementAndGet();
if (!m_dontCommit.get() && response.getStatus() != ClientResponse.SERVER_UNAVAILABLE) {
m_tracker.commit(m_offset);
}
}
@SuppressWarnings("unused")
public Invocation getInvocation() {
return m_invocation;
}
}
}
|
package hex.glm;
import hex.DataInfo;
import hex.DataInfo.Row;
import hex.DataInfo.Rows;
import hex.FrameTask2;
import hex.glm.GLMModel.GLMParameters;
import hex.glm.GLMModel.GLMParameters.Link;
import hex.gram.Gram;
import hex.glm.GLMModel.GLMParameters.Family;
import jsr166y.CountedCompleter;
import water.H2O.H2OCountedCompleter;
import water.*;
import water.fvec.*;
import water.util.ArrayUtils;
import java.util.Arrays;
/**
* All GLM related distributed tasks:
*
* YMUTask - computes response means on actual datasets (if some rows are ignored - e.g ignoring rows with NA and/or doing cross-validation)
* GLMGradientTask - computes gradient at given Beta, used by L-BFGS, for KKT condition check
* GLMLineSearchTask - computes residual deviance(s) at given beta(s), used by line search (both L-BFGS and IRLSM)
* GLMIterationTask - used by IRLSM to compute Gram matrix and response t(X) W X, t(X)Wz
*
* @author tomasnykodym
*/
public abstract class GLMTask {
static class YMUTask extends MRTask<YMUTask> {
double _yMin = Double.POSITIVE_INFINITY, _yMax = Double.NEGATIVE_INFINITY;
long _nobs;
double _wsum;
final int _responseId;
final int _weightId;
final int _offsetId;
final int _nums; // number of numeric columns
final int _numOff;
final boolean _comupteWeightedSigma;
double [] _xsum; // weighted sum of x
double [] _xxsum; // weighted sum of x^2
double [] _yMu;
final int _nClasses;
public YMUTask(DataInfo dinfo, int nclasses, boolean computeWeightedSigma, H2OCountedCompleter cmp){
super(cmp);
_nums = dinfo._nums;
_numOff = dinfo._cats;
_responseId = dinfo.responseChunkId();
_weightId = dinfo._weights?dinfo.weightChunkId():-1;
_offsetId = dinfo._offset?dinfo.offsetChunkId():-1;
_nClasses = nclasses;
_comupteWeightedSigma = computeWeightedSigma;
}
@Override public void setupLocal(){}
// public double _wY; // (Weighted) sum of the response
// public double _wYY; // (Weighted) sum of the squared response
// public double weightedSigma() {
//// double sampleCorrection = _count/(_count-1); //sample variance -> depends on the number of ACTUAL ROWS (not the weighted count)
// double sampleCorrection = 1; //this will make the result (and R^2) invariant to globally scaling the weights
// return _count <= 1 ? 0 : Math.sqrt(sampleCorrection*(_wYY/_wcount - (_wY*_wY)/(_wcount*_wcount)));
@Override public void map(Chunk [] chunks) {
_yMu = new double[_nClasses > 2?_nClasses:1];
boolean [] good = MemoryManager.mallocZ(chunks[0]._len);
Arrays.fill(good,true);
Chunk weight = chunks[_weightId];
for(int i = 0; i < chunks.length; ++i)
for(int r = chunks[i].nextNZ(-1); r < chunks[i]._len; r = chunks[i].nextNZ(r))
if(weight.atd(i) != 0 && chunks[i].isNA(r))
good[r] = false;
Chunk response = chunks[_responseId];
if(_comupteWeightedSigma) {
_xsum = MemoryManager.malloc8d(_nums);
_xxsum = MemoryManager.malloc8d(_nums);
}
for(int r = 0; r < response._len; ++r) {
double w = weight.atd(r);
if(skip[r] || w == 0) continue;
if(_comupteWeightedSigma) {
for(int i = 0; i < _nums; ++i) {
double d = chunks[i+_numOff].atd(r);
_xsum[i] += w*d;
_xxsum[i] += w*d*d;
}
}
_wsum += w;
double d = w*response.atd(r);
assert !Double.isNaN(d);
if(_nClasses > 2)
_yMu[(int)d] += 1;
else
_yMu[0] += d;
if(d < _yMin)
_yMin = d;
if(d > _yMax)
_yMax = d;
_nobs++;
}
boolean has_skips = false;
for(boolean b:skip) has_skips |= b;
if(has_skips) {
if (weight instanceof C0DChunk && weight.atd(0) == 1) // shortcut for h2o-made binary weights
DKV.put(weight.vec().chunkKey(chunks[0].cidx()), new CBSChunk(skip));
else {
for(int i = 0; i < skip.length; ++i) // already got weights, need to set the zeros
if(skip[i]) weight.set(i,0);
}
}
}
@Override public void postGlobal() {
ArrayUtils.mult(_yMu,1.0/_wsum);
Futures fs = new Futures();
// _fVec.postWrite(fs); // we just overwrote the vec
fs.blockForPending();
}
@Override public void reduce(YMUTask ymt) {
if(_nobs > 0 && ymt._nobs > 0) {
_wsum += ymt._wsum;
ArrayUtils.add(_yMu,ymt._yMu);
_nobs += ymt._nobs;
if(_yMin > ymt._yMin)
_yMin = ymt._yMin;
if(_yMax < ymt._yMax)
_yMax = ymt._yMax;
if(_comupteWeightedSigma) {
ArrayUtils.add(_xsum, ymt._xsum);
ArrayUtils.add(_xxsum, ymt._xxsum);
}
} else if (_nobs == 0) {
_wsum = ymt._wsum;
_yMu = ymt._yMu;
_nobs = ymt._nobs;
_yMin = ymt._yMin;
_yMax = ymt._yMax;
_xsum = ymt._xsum;
_xxsum = ymt._xxsum;
}
}
}
static class GLMLineSearchTask extends MRTask<GLMLineSearchTask> {
final DataInfo _dinfo;
final double [] _beta;
final double [][] _betaMultinomial;
final int _c;
final double [] _direction;
final double _step;
final double _initStep;
final int _nSteps;
final GLMParameters _params;
boolean _useFasterMetrics = false;
public GLMLineSearchTask(DataInfo dinfo, GLMParameters params, double [] beta, double [] direction, double initStep, double step, int nsteps, CountedCompleter cc) {
super ((H2OCountedCompleter)cc);
_dinfo = dinfo;
_beta = beta;
_betaMultinomial = null;
_direction = direction;
_step = step;
_nSteps = nsteps;
_params = params;
_initStep = initStep;
_c = -1;
}
long _nobs;
double [] _likelihoods; // result
@Override
public void map(Chunk [] chks) {
Chunk responseChunk = chks[_dinfo.responseChunkId()];
boolean[] skip = MemoryManager.mallocZ(chks[0]._len);
double [][] eta = new double[responseChunk._len][_nSteps];
if(_dinfo._offset) {
Chunk offsetChunk = chks[_dinfo.offsetChunkId()];
for (int r = 0; r < eta.length; ++r)
Arrays.fill(eta[r], offsetChunk.atd(r));
}
Chunk weightsChunk = _dinfo._weights?chks[_dinfo.weightChunkId()]:new C0DChunk(1,chks[0]._len);
double [] beta = _beta;
double [] pk = _direction;
// intercept
for (int r = 0; r < eta.length; ++r) {
double b = beta[beta.length - 1];
double t = pk[beta.length - 1] * _initStep;
for (int j = 0; j < _nSteps; ++j, t *= _step) {
eta[r][j] += b + t;
}
}
// categoricals
for(int i = 0; i < _dinfo._cats; ++i) {
Chunk c = chks[i];
for(int r = 0; r < c._len; ++r) { // categoricals can not be sparse
if(skip[r] || c.isNA(r)) {
skip[r] = true;
continue;
}
int off = _dinfo.getCategoricalId(i,(int)c.at8(r)); // get pos in beta vector.
if(off != -1) {
double t = pk[off] * _initStep;
double b = beta[off];
for (int j = 0; j < _nSteps; ++j, t *= _step)
eta[r][j] += b + t;
}
}
}
// compute default eta offset for 0s
final int numStart = _dinfo.numStart();
double [] off = new double[_nSteps];
if(_dinfo._normMul != null && _dinfo._normSub != null) {
for (int i = 0; i < _dinfo._nums; ++i) {
double b = beta[numStart+i];
double s = pk[numStart+i] * _initStep;
double d = _dinfo._normSub[i] * _dinfo._normMul[i];
for (int j = 0; j < _nSteps; ++j, s *= _step)
off[j] -= (b + s) * d;
}
}
// non-zero numbers
for (int i = 0; i < _dinfo._nums; ++i) {
Chunk c = chks[i + _dinfo._cats];
for (int r = c.nextNZ(-1); r < c._len; r = c.nextNZ(r)) {
if(skip[r] || c.isNA(r)) {
skip[r] = true;
continue;
}
double d = c.atd(r);
if(d == 0) continue;
if (_dinfo._normMul != null)
d *= _dinfo._normMul[i];
double b = beta[numStart+i];
double s = pk[numStart+i] * _initStep;
for (int j = 0; j < _nSteps; ++j, s *= _step)
eta[r][j] += (b + s) * d;
}
}
_likelihoods = MemoryManager.malloc8d(_nSteps);
for (int r = 0; r < chks[0]._len; ++r) {
double w = weightsChunk.atd(r);
if(w == 0 || responseChunk.isNA(r))
continue;
_nobs++;
double y = responseChunk.atd(r);
double yy = -1 + 2*y;
for (int i = 0; i < _nSteps; ++i) {
double e = eta[r][i] + off[i];
if (_params._family == Family.binomial && _useFasterMetrics) {
_likelihoods[i] += w*Math.log(1 + Math.exp(-yy * e));
} else {
double mu = _params.linkInv(e);
_likelihoods[i] += w*_params.likelihood(y,mu);
}
}
}
}
@Override public void reduce(GLMLineSearchTask glt){
ArrayUtils.add(_likelihoods,glt._likelihoods);
_nobs += glt._nobs;
}
}
static class GLMMultinomialLineSearchTask extends MRTask<GLMMultinomialLineSearchTask> {
private final DataInfo _dinfo;
public final double _initialStep;
public final double _stepDec;
public final int _nSteps;
private double [][] _betaBase;
private double [][] _direction2D;
private double [] _direction1D;
final int _c;
public long _nobs;
// output
double [] _likelihoods;
static double[][] reshapeAry(double[] ary, int n) {
int d = ary.length/n;
if(d*n != ary.length)
throw new IllegalArgumentException("Array length is not multiple of n");;
double [][] res = new double[d][n];
int off = 0;
for(int i = 0; i < d; ++i)
for(int j = 0; j < n; ++j)
res[i][j] = ary[off++];
return res;
}
GLMMultinomialLineSearchTask(H2OCountedCompleter cc, DataInfo dinfo, double [] beta, double [] direction, double initialStep, double stepDec, int nSteps) {
this(cc,dinfo,reshapeAry(beta,dinfo.fullN()+1),reshapeAry(direction,dinfo.fullN()+1),initialStep, stepDec, nSteps);
}
GLMMultinomialLineSearchTask(H2OCountedCompleter cc, DataInfo dinfo, double [][] beta, double [] direction, int c, double initialStep, double stepDec, int nSteps) {
super(cc);
_dinfo = dinfo;
_betaBase = beta;
_direction1D = direction;
_direction2D = null;
_c = c;
_initialStep = initialStep;
_stepDec = stepDec;
_nSteps = nSteps;
}
GLMMultinomialLineSearchTask(H2OCountedCompleter cc, DataInfo dinfo, double [][] beta, double [][] direction, double initialStep, double stepDec, int nSteps) {
super(cc);
_dinfo = dinfo;
_betaBase = beta;
_direction2D = direction;
_direction1D = null;
_initialStep = initialStep;
_stepDec = stepDec;
_nSteps = nSteps;
_c = -1;
}
public void map(Chunk [] chks) {
double t = _initialStep;
_nobs = 0;
double [][] beta = _betaBase.clone();
for(int i = 0; i < beta.length; ++i)
beta[i] = beta[i].clone();
Rows rows = _dinfo.rows(chks);
double [] etas = new double[_betaBase.length];
double [] etaOffsets = new double [etas.length];
double [] exps = new double[_betaBase.length+1];
double [] likelihoods = new double[_nSteps];
for(int i = 0; i < _nSteps; i++) {
for(int j = 0; j < _betaBase.length; ++j) {
double [] base = _betaBase[j];
double [] b = beta[j];
if(_direction2D != null) {
double [] d = _direction2D[j];
for(int k = 0; k < base.length; ++k)
b[k] = base[k] + t*d[k];
} else if(j == _c){
for(int k = 0; k < base.length; ++k)
b[k] = base[k] + t*_direction1D[k];
}
if(rows._sparse)
etaOffsets[j] = GLM.sparseOffset(b,_dinfo);
}
for(int j = 0; j < rows._nrows; ++j) {
Row row = rows.row(j);
if(i == 0)++_nobs;
double logSumExp = computeMultinomialEtas(row,beta,etas,etaOffsets,exps);
likelihoods[i] -= row.weight * (etas[(int)row.response(0)] - logSumExp);
}
t *= _stepDec;
}
_likelihoods = likelihoods;
_betaBase = null;
_direction1D = null;
_direction2D = null;
}
public void reduce(GLMMultinomialLineSearchTask glmt) {
ArrayUtils.add(_likelihoods,glmt._likelihoods);
_nobs += glmt._nobs;
}
}
static double computeMultinomialEtas(Row row, double [][]beta, final double [] etas, double [] etaOffsets, double [] exps) {
double maxRow = 0;
for (int c = 0; c < beta.length; ++c) {
double e = etaOffsets[c] + row.innerProduct(beta[c]);
if (e > maxRow) maxRow = e;
etas[c] = e;
}
double sumExp = 0;
for(int c = 0; c < beta.length; ++c) {
double x = Math.exp(etas[c] - maxRow);
sumExp += x;
exps[c+1] = x;
}
double reg = 1.0/(sumExp);
for(int c = 0; c < etas.length; ++c)
exps[c+1] *= reg;
exps[0] = 0;
exps[0] = ArrayUtils.maxIndex(exps)-1;
return Math.log(sumExp) + maxRow;
}
static class GLMMultinomialGradientTask extends MRTask<GLMMultinomialGradientTask> {
final double [][] _beta;
final DataInfo _dinfo;
final double _currentLambda;
final double _reg;
final double [] _ymu;
double [] _gradient;
long _nobs;
double _wsum;
double _likelihood;
GLMValidation _val;
final boolean _validate;
public GLMMultinomialGradientTask(DataInfo dinfo, double lambda, double [] ymu, double[][] beta, double reg,boolean validate, H2OCountedCompleter cmp) {
super(cmp);
_dinfo = dinfo;
_currentLambda = lambda;
_reg = reg;
_validate = validate;
_beta = beta;
_ymu = ymu;
}
private final void processRow(Row row, double [][] beta, final double [] etas, double [] etaOffsets, final double [] exps) {
int y = (int)row.response(0);
assert y == row.response(0);
double logSumExp = computeMultinomialEtas(row, beta, etas, etaOffsets, exps);
int P = _dinfo.fullN()+1;
_likelihood -= row.weight * (etas[(int)row.response(0)] - logSumExp);
_val.add(y,exps, row.weight,row.offset);
int numOff = _dinfo.numStart();
for(int c = 0; c < beta.length; ++c) {
double val = row.weight * (exps[c+1] - (y == c?1:0));
for (int j = 0; j < row.nBins; ++j)
_gradient[c*P + row.binIds[j]] += val;
for (int j = 0; j < row.nNums; ++j)
_gradient[c*P + (row.numIds == null ? j + numOff : row.numIds[j])] += row.numVals[j] * val;
_gradient[(c+1) * P - 1] += val;
}
}
public void map(Chunk [] chks) {
int rank = 0;
for(int i = 0; i < _beta.length; ++i)
for(int j = 0; j < _beta[i].length; ++j)
if(_beta[i][j] != 0)
++rank;
_gradient = new double[_beta.length*_beta[0].length];
_val = new GLMValidation(_dinfo._adaptedFrame.lastVec().domain(), _ymu, new GLMParameters(Family.multinomial),rank,0,_validate, true);
final int P = _beta[0].length;
double [] etas = new double[_beta.length];
double [] exps = new double[_beta.length+1];
double [] etaOffsets = new double[_beta.length] ;
Rows rows = _dinfo.rows(chks);
if(rows._sparse)
for(int i = 0; i < _beta.length; ++i)
etaOffsets[i] = GLM.sparseOffset(_beta[i],_dinfo);
for(int r = 0; r < rows._nrows; ++r) {
final Row row = rows.row(r);
if(row.bad || row.weight == 0) continue;
_wsum += row.weight;
_nobs++;
processRow(row, _beta, etas, etaOffsets, exps);
}
int off = _dinfo.numStart();
for(int c = 0; c < _beta.length; ++c) {
if (rows._sparse && _dinfo._normSub != null) { // adjust for centering
double val = _gradient[(c+1)*P-1];
for (int i = 0; i < _dinfo._nums; ++i)
_gradient[c * P + off + i] -= val * _dinfo._normSub[i] * _dinfo._normMul[i];
}
}
}
@Override
public void reduce(GLMMultinomialGradientTask gmgt){
ArrayUtils.add(_gradient,gmgt._gradient);
_nobs += gmgt._nobs;
_wsum += gmgt._wsum;
_likelihood += gmgt._likelihood;
_val.reduce(gmgt._val);
}
@Override public void postGlobal(){
ArrayUtils.mult(_gradient, _reg);
int P = _beta[0].length;
for(int c = 0; c < _beta.length; ++c)
for(int j = 0; j < P-1; ++j)
_gradient[c*P+j] += _currentLambda * _beta[c][j];
}
}
static class GLMGradientTask extends MRTask<GLMGradientTask> {
final GLMParameters _params;
GLMValidation _val;
double _currentLambda;
final double [] _beta;
final protected DataInfo _dinfo;
final double _reg;
public double [] _gradient;
public double _likelihood;
protected transient boolean [] _skip;
boolean _validate;
long _nobs;
double _wsum;
double _ymu;
final boolean _intercept;
public GLMGradientTask(DataInfo dinfo, GLMParameters params, double lambda, double[] beta, double reg,boolean intercept){this(dinfo,params, lambda, beta,reg, intercept, null);}
public GLMGradientTask(DataInfo dinfo, GLMParameters params, double lambda, double[] beta, double reg, boolean intercept, H2OCountedCompleter cc){
super(cc);
_dinfo = dinfo;
_params = params;
_beta = beta;
_reg = reg;
_currentLambda = lambda;
_intercept = intercept;
}
public GLMGradientTask setValidate(double ymu, boolean validate) {
_ymu = ymu;
_validate = validate;
return this;
}
protected void goByRows(Chunk [] chks, boolean [] skp){
Row row = _dinfo.newDenseRow();
double [] g = _gradient;
double [] b = _beta;
for(int rid = 0; rid < chks[0]._len; ++rid) {
if(skp[rid]) continue;
row = _dinfo.extractDenseRow(chks, rid, row);
if(row.bad || row.weight == 0) continue;
_nobs++;
_wsum += row.weight;
double eta = row.innerProduct(b) + row.offset;
double mu = _params.linkInv(eta);
_val.add(row.response(0), mu, row.weight, row.offset);
_likelihood += row.weight*_params.likelihood(row.response(0), mu);
double var = _params.variance(mu);
if(var < 1e-6) var = 1e-6; // to avoid numerical problems with 0 variance
double gval =row.weight * (mu-row.response(0)) / (var * _params.linkDeriv(mu));
// categoricals
for(int i = 0; i < row.nBins; ++i)
g[row.binIds[i]] += gval;
int off = _dinfo.numStart();
// numbers
for(int j = 0; j < _dinfo._nums; ++j)
g[j + off] += row.numVals[j] * gval;
// intercept
if(_dinfo._intercept)
g[g.length-1] += gval;
}
}
@Override
public void postGlobal(){
ArrayUtils.mult(_gradient,_reg);
for(int j = 0; j < _beta.length - (_dinfo._intercept?1:0); ++j)
_gradient[j] += _currentLambda * _beta[j];
}
// compute linear estimate by summing contributions for all columns
// (looping by column in the outer loop to have good access pattern and to exploit sparsity)
protected final double [] computeEtaByCols(Chunk [] chks, boolean [] skip) {
double [] eta = MemoryManager.malloc8d(chks[0]._len);
if(_dinfo._intercept)
Arrays.fill(eta,_beta[_beta.length-1]);
if(_dinfo._offset) {
for (int i = 0; i < eta.length; ++i) {
if(!skip[i]) {
eta[i] += chks[_dinfo.offsetChunkId()].atd(i);
if (Double.isNaN(eta[i]))
skip[i] = true;
}
}
}
double [] b = _beta;
// do categoricals first
for(int i = 0; i < _dinfo._cats; ++i) {
Chunk c = chks[i];
for(int r = 0; r < c._len; ++r) { // categoricals can not be sparse
if(skip[r] || c.isNA(r)) {
skip[r] = true;
continue;
}
int off = _dinfo.getCategoricalId(i,(int)c.at8(r));
if(off != -1)
eta[r] += b[off];
}
}
final int numStart = _dinfo.numStart();
// compute default eta offset for 0s
if(_dinfo._normMul != null && _dinfo._normSub != null) {
double off = 0;
for (int i = 0; i < _dinfo._nums; ++i)
off -= b[numStart + i] * _dinfo._normSub[i] * _dinfo._normMul[i];
for(int r = 0; r < chks[0]._len; ++r)
eta[r] += off;
}
// now numerics
for (int i = 0; i < _dinfo._nums; ++i) {
Chunk c = chks[i + _dinfo._cats];
for (int r = c.nextNZ(-1); r < c._len; r = c.nextNZ(r)) {
if(skip[r] || c.isNA(r)) {
skip[r] = true;
continue;
}
double d = c.atd(r);
if (_dinfo._normMul != null)
d *= _dinfo._normMul[i];
eta[r] += b[numStart + i] * d;
}
}
return eta;
}
protected void goByCols(Chunk [] chks, boolean [] skp){
int numStart = _dinfo.numStart();
double [] eta = computeEtaByCols(chks, skp);
double [] b = _beta;
double [] g = _gradient;
Chunk offsetChunk = _dinfo._offset?chks[_dinfo.offsetChunkId()]:new C0DChunk(0,chks[0]._len);
Chunk weightChunk = _dinfo._weights ?chks[_dinfo.weightChunkId()]:new C0DChunk(1,chks[0]._len);
Chunk responseChunk = chks[_dinfo.responseChunkId()];
double eta_sum = 0;
// compute the predicted mean and variance and ginfo for each row
for(int r = 0; r < chks[0]._len; ++r){
if(skp[r] || responseChunk.isNA(r))
continue;
double w = weightChunk.atd(r);
if(w == 0 || Double.isNaN(w))
continue;
_nobs++;
_wsum += w;
assert w > 0;
double y = responseChunk.atd(r);
double mu = _params.linkInv(eta[r]);
_val.add(y, mu, w, offsetChunk.atd(r));
_likelihood += w * _params.likelihood(y, mu);
double var = _params.variance(mu);
if(var < 1e-6) var = 1e-6; // to avoid numerical problems with 0 variance
eta[r] = w * (mu-y) / (var * _params.linkDeriv(mu));
eta_sum += eta[r];
}
// finally go over the columns again and compute ginfo for each column
// first handle eta offset and intercept
if(_dinfo._intercept)
g[g.length-1] = eta_sum;
if(_dinfo._normMul != null && _dinfo._normSub != null)
for(int i = 0; i < _dinfo._nums; ++i)
g[numStart + i] = -_dinfo._normSub[i]*_dinfo._normMul[i]*eta_sum;
// categoricals
for(int i = 0; i < _dinfo._cats; ++i) {
Chunk c = chks[i];
for(int r = 0; r < c._len; ++r) { // categoricals can not be sparse
if(skp[r]) continue;
int off = _dinfo.getCategoricalId(i,(int)chks[i].at8(r));
if(off != -1)
g[off] += eta[r];
}
}
// numerics
for (int i = 0; i < _dinfo._nums; ++i) {
Chunk c = chks[i + _dinfo._cats];
for (int r = c.nextNZ(-1); r < c._len; r = c.nextNZ(r)) {
if(skp[r] || c.isNA(r))
continue;
double d = c.atd(r);
if (_dinfo._normMul != null)
d = d*_dinfo._normMul[i];
g[numStart + i] += eta[r] * d;
}
}
_skip = skp;
}
private boolean mostlySparse(Chunk [] chks){
int cnt = 0;
for(Chunk chk:chks)
if(chk.isSparse())
++cnt;
return cnt >= chks.length >> 1;
}
private boolean _forceRows;
private boolean _forceCols;
public GLMGradientTask forceColAccess() {
_forceCols = true;
_forceRows = false;
return this;
}
public GLMGradientTask forceRowAccess() {
_forceCols = false;
_forceRows = true;
return this;
}
public void map(Chunk [] chks){
int rank = 0;
for(int i = 0; i < _beta.length; ++i)
if(_beta[i] != 0)
++rank;
_gradient = MemoryManager.malloc8d(_beta.length);
String [] domain = _dinfo._adaptedFrame.lastVec().domain();
if(domain == null && _params._family == Family.binomial)
domain = new String[]{"0","1"}; // special hard-coded case for binomial on binary col
_val = new GLMValidation(domain, new double[]{_ymu}, _params,rank,0,_validate,_intercept);
boolean [] skp = MemoryManager.mallocZ(chks[0]._len);
if(_forceCols || (!_forceRows && (chks.length >= 100 || mostlySparse(chks))))
goByCols(chks, skp);
else
goByRows(chks, skp);
// apply reg
}
public void reduce(GLMGradientTask grt) {
_likelihood += grt._likelihood;
_nobs += grt._nobs;
_wsum += grt._wsum;
_val.reduce(grt._val);
ArrayUtils.add(_gradient, grt._gradient);
}
}
/**
* Tassk with simplified ginfo computation for logistic regression (and least squares)
* Looks like
*/
public static class LBFGS_LogisticGradientTask extends GLMGradientTask {
public LBFGS_LogisticGradientTask(DataInfo dinfo, GLMParameters params, double lambda, double[] beta, double reg, boolean intercept) {
super(dinfo, params, lambda, beta, reg, intercept);
}
@Override protected void goByRows(Chunk [] chks, boolean [] skp){
Row row = _dinfo.newDenseRow();
double [] g = _gradient;
double [] b = _beta;
for(int rid = 0; rid < chks[0]._len; ++rid) {
if(skp[rid])continue;
row = _dinfo.extractDenseRow(chks, rid, row);
if(row.bad || row.weight == 0) continue;
double y = -1 + 2*row.response(0);
++_nobs;
double eta = row.innerProduct(b) + row.offset;
double gval;
double d = 1 + Math.exp(-y * eta);
_likelihood += row.weight*Math.log(d);
gval = row.weight*-y*(1-1.0/d);
// categoricals
for(int i = 0; i < row.nBins; ++i)
g[row.binIds[i]] += gval;
int off = _dinfo.numStart();
// numbers
for(int j = 0; j < _dinfo._nums; ++j)
g[j + off] += row.numVals[j] * gval;
// intercept
if(_dinfo._intercept)
g[g.length-1] += gval;
}
}
@Override protected void goByCols(Chunk [] chks, boolean [] skp){
int numStart = _dinfo.numStart();
double [] eta = computeEtaByCols(chks,skp);
double [] g = _gradient;
Chunk offsetChunk = null;
int nxs = chks.length-1; // -1 for response
if(_dinfo._offset) {
nxs -= 1;
offsetChunk = chks[nxs];
}
Chunk responseChunk = chks[nxs];
Chunk weightsChunk = _dinfo._weights?chks[_dinfo.weightChunkId()]:new C0DChunk(1,chks[0]._len);
double eta_sum = 0;
// compute the predicted mean and variance and ginfo for each row
for(int r = 0; r < chks[0]._len; ++r){
double w = weightsChunk.atd(r);
if(skp[r] || responseChunk.isNA(r) || w == 0)
continue;
++_nobs;
double off = (_dinfo._offset?offsetChunk.atd(r):0);
double e = eta[r] + off;
switch(_params._family) {
case gaussian:
double diff = e - responseChunk.atd(r);
_likelihood += w*diff*diff;
eta[r] = diff;
break;
case binomial:
double y = -1 + 2*responseChunk.atd(r);
double d = 1 + Math.exp(-y * e);
_likelihood += w*Math.log(d);
eta[r] = w * -y * (1 - 1.0 / d);
break;
default:
throw H2O.unimpl();
}
eta_sum += eta[r];
}
// finally go over the columns again and compute ginfo for each column
// first handle eta offset and intercept
if(_dinfo._intercept)
g[g.length-1] = eta_sum;
if(_dinfo._normMul != null && _dinfo._normSub != null)
for(int i = 0; i < _dinfo._nums; ++i)
g[numStart + i] = -_dinfo._normSub[i]*_dinfo._normMul[i]*eta_sum;
// categoricals
for(int i = 0; i < _dinfo._cats; ++i) {
Chunk c = chks[i];
for(int r = 0; r < c._len; ++r) { // categoricals can not be sparse
if(skp[r]) continue;
int off = _dinfo.getCategoricalId(i,(int)chks[i].at8(r));
if(off != -1)
g[off] += eta[r];
}
}
// numerics
for (int i = 0; i < _dinfo._nums; ++i) {
Chunk c = chks[i + _dinfo._cats]; //not expanded
for (int r = c.nextNZ(-1); r < c._len; r = c.nextNZ(r)) {
if(skp[r] || c.isNA(r))
continue;
double d = c.atd(r);
if (_dinfo._normMul != null)
d = d*_dinfo._normMul[i];
g[numStart + i] += eta[r] * d;
}
}
_skip = skp;
}
}
// public static class GLMCoordinateDescentTask extends MRTask<GLMCoordinateDescentTask> {
// final double [] _betaUpdate;
// final double [] _beta;
// final double _xOldSub;
// final double _xOldMul;
// final double _xNewSub;
// final double _xNewMul;
// double [] _xy;
// public GLMCoordinateDescentTask(double [] betaUpdate, double [] beta, double xOldSub, double xOldMul, double xNewSub, double xNewMul) {
// _betaUpdate = betaUpdate;
// _beta = beta;
// _xOldSub = xOldSub;
// _xOldMul = xOldMul;
// _xNewSub = xNewSub;
// _xNewMul = xNewMul;
// public void map(Chunk [] chks) {
// Chunk xOld = chks[0];
// Chunk xNew = chks[1];
// if(xNew.vec().isCategorical()){
// _xy = MemoryManager.malloc8d(xNew.vec().domain().length);
// } else
// _xy = new double[1];
// Chunk eta = chks[2];
// Chunk weights = chks[3];
// Chunk res = chks[4];
// for(int i = 0; i < eta._len; ++i) {
// double w = weights.atd(i);
// double e = eta.atd(i);
// if(_betaUpdate != null) {
// if (xOld.vec().isCategorical()) {
// int cid = (int) xOld.at8(i);
// e = +_betaUpdate[cid];
// } else
// e += _betaUpdate[0] * (xOld.atd(i) - _xOldSub) * _xOldMul;
// eta.set(i, e);
// int cid = 0;
// double x = w;
// if(xNew.vec().isCategorical()) {
// cid = (int) xNew.at8(i);
// e -= _beta[cid];
// } else {
// x = (xNew.atd(i) - _xNewSub) * _xNewMul;
// e -= _beta[0] * x;
// x *= w;
// _xy[cid] += x * (res.atd(i) - e);
// @Override public void reduce(GLMCoordinateDescentTask t) {
// ArrayUtils.add(_xy, t._xy);
// /**
// * Compute initial solution for multinomial problem (Simple weighted LR with all weights = 1/4)
// */
// public static final class GLMMultinomialInitTsk extends MRTask<GLMMultinomialInitTsk> {
// double [] _mu;
// DataInfo _dinfo;
// Gram _gram;
// double [][] _xy;
// @Override public void map(Chunk [] chks) {
// Rows rows = _dinfo.rows(chks);
// _gram = new Gram(_dinfo);
// _xy = new double[_mu.length][_dinfo.fullN()+1];
// int numStart = _dinfo.numStart();
// double [] ds = new double[_mu.length];
// for(int i = 0; i < ds.length; ++i)
// ds[i] = 1.0/(_mu[i] * (1-_mu[i]));
// for(int i = 0; i < rows._nrows; ++i) {
// Row r = rows.row(i);
// double y = r.response(0);
// _gram.addRow(r,.25);
// for(int c = 0; c < _mu.length; ++c) {
// double iY = y == c?1:0;
// double z = (y-_mu[c]) * ds[i];
// for(int j = 0; j < r.nBins; ++j)
// _xy[c][r.binIds[j]] += z;
// for(int j = 0; j < r.nNums; ++j){
// int id = r.numIds == null?(j + numStart):r.numIds[j];
// double val = r.numVals[j];
// _xy[c][id] += z*val;
// @Override public void reduce(){
/**
* One iteration of glm, computes weighted gram matrix and t(x)*y vector and t(y)*y scalar.
*
* @author tomasnykodym
*/
public static class GLMIterationTask extends FrameTask2<GLMIterationTask> {
final GLMParameters _params;
final double [][]_beta_multinomial;
final double []_beta;
final int _c;
protected Gram _gram; // wx%*%x
double [] _xy; // wx^t%*%z,
GLMValidation _val; // validation of previous model
final double [] _ymu;
long _nobs;
final boolean _validate;
int [] _ti;
public double _likelihood;
final double _lambda;
double wsum, wsumu;
final boolean _intercept;
public GLMIterationTask(Key jobKey, DataInfo dinfo, double lambda, GLMModel.GLMParameters glm, boolean validate,
double [][] betaMultinomial, double [] beta, int c, double [] ymu, boolean intercept, H2OCountedCompleter cmp) {
super(cmp,dinfo,jobKey);
_params = glm;
_beta = beta;
_beta_multinomial = betaMultinomial.clone();
_beta_multinomial[c] = beta;
_c = c;
_ymu = ymu;
_validate = validate;
_lambda = lambda;
_intercept = intercept;
}
public GLMIterationTask(Key jobKey, DataInfo dinfo, double lambda, GLMModel.GLMParameters glm, boolean validate,
double [] beta, double ymu,boolean intercept, H2OCountedCompleter cmp) {
super(cmp,dinfo,jobKey);
_params = glm;
_beta = beta;
_beta_multinomial = null;
_c = -1;
_ymu = new double[]{ymu};
_validate = validate;
_lambda = lambda;
_intercept = intercept;
}
@Override public boolean handlesSparseData(){return true;}
transient private double [] _etas;
transient private double [] _sparseOffsets;
transient private double _sparseOffset;
@Override
public void chunkInit() {
// initialize
_gram = new Gram(_dinfo.fullN(), _dinfo.largestCat(), _dinfo._nums, _dinfo._cats,true);
if(_params._family == Family.multinomial)
_etas = new double[_beta_multinomial.length];
// public GLMValidation(Key dataKey, double ymu, GLMParameters glm, int rank, float [] thresholds){
if(_validate) {
int rank = 0;
if(_beta != null) for(double d:_beta) if(d != 0)++rank;
String [] domain = _dinfo._adaptedFrame.lastVec().domain();
if(domain == null && _params._family == Family.binomial)
domain = new String[]{"0","1"}; // special hard-coded case for binomial on binary col
_val = new GLMValidation(domain, _ymu, _params, rank, .5, true,_intercept); // todo pass correct threshold
}
_xy = MemoryManager.malloc8d(_dinfo.fullN()+1); // + 1 is for intercept
if(_params._family == Family.binomial && _validate){
_ti = new int[2];
}
if(_params._family == Family.multinomial) {
_sparseOffsets = new double[_beta_multinomial.length];
if(_sparse)
for (int i = 0; i < _beta_multinomial.length; ++i)
_sparseOffsets[i] = GLM.sparseOffset(_beta_multinomial[i],_dinfo);
} else if(_sparse)
_sparseOffset = GLM.sparseOffset(_beta,_dinfo);
}
@Override
protected void processRow(Row r) { // called for every row in the chunk
if(r.bad || r.weight == 0) return;
++_nobs;
double y = r.response(0);
assert ((_params._family != Family.gamma) || y > 0) : "illegal response column, y must be > 0 for family=Gamma.";
assert ((_params._family != Family.binomial) || (0 <= y && y <= 1)) : "illegal response column, y must be <0,1> for family=Binomial. got " + y;
final double w;
final double eta;
double mu;
final double var;
final double wz;
final int numStart = _dinfo.numStart();
double d = 1;
if( _params._family == Family.gaussian && _params._link == Link.identity){
w = r.weight;
wz = w*(y - r.offset);
mu = 0;
eta = mu;
} else {
if(_params._family == Family.multinomial) {
y = (y == _c)?1:0;
double maxrow = 0;
for(int i = 0; i < _beta_multinomial.length; ++i) {
_etas[i] = r.innerProduct(_beta_multinomial[i]) + _sparseOffsets[i];
if(_etas[i] > maxrow /*|| -_etas[i] > maxrow*/)
maxrow = _etas[i];
}
eta = _etas[_c];
double etaExp = Math.exp(_etas[_c]-maxrow);
double sumExp = 0;
for(int i = 0; i < _beta_multinomial.length; ++i)
sumExp += Math.exp(_etas[i]-maxrow);
mu = etaExp / sumExp;
if(mu < 1e-16)
mu = 1e-16;
double logSumExp = Math.log(sumExp) + maxrow;
_likelihood -= r.weight * (_etas[(int)r.response(0)] - logSumExp);
d = mu*(1-mu);
wz = r.weight * (eta * d + (y-mu));
w = r.weight * d;
} else {
eta = r.innerProduct(_beta) + _sparseOffset;
mu = _params.linkInv(eta + r.offset);
_likelihood += r.weight*_params.likelihood(y,mu);
var = Math.max(1e-6, _params.variance(mu)); // avoid numerical problems with 0 variance
d = _params.linkDeriv(mu);
double z = eta + (y-mu)*d;
w = r.weight/(var*d*d);
wz = w*z;
if(_validate)
_val.add(y, mu, r.weight, r.offset);
}
}
assert w >= 0|| Double.isNaN(w) : "invalid weight " + w; // allow NaNs - can occur if line-search is needed!
wsum+=w;
wsumu+=r.weight; // just add the user observation weight for the scaling.
for(int i = 0; i < r.nBins; ++i) {
_xy[r.binIds[i]] += wz;
}
for(int i = 0; i < r.nNums; ++i){
int id = r.numIds == null?(i + numStart):r.numIds[i];
double val = r.numVals[i];
_xy[id] += wz*val;
}
if(_dinfo._intercept)
_xy[_xy.length-1] += wz;
_gram.addRow(r, w);
}
@Override
public void reduce(GLMIterationTask git){
ArrayUtils.add(_xy, git._xy);
_gram.add(git._gram);
_nobs += git._nobs;
wsum += git.wsum;
wsumu += git.wsumu;
if (_validate) _val.reduce(git._val);
_likelihood += git._likelihood;
super.reduce(git);
}
@Override protected void postGlobal(){
if(_sparse && _dinfo._normSub != null) { // need to adjust gram for missing centering!
int ns = _dinfo.numStart();
int interceptIdx = _xy.length-1;
double [] interceptRow = _gram._xx[interceptIdx-_gram._diagN];
double nobs = interceptRow[interceptRow.length-1]; // weighted _nobs
for(int i = ns; i < _dinfo.fullN(); ++i) {
double iMean = _dinfo._normSub[i - ns] * _dinfo._normMul[i - ns];
for (int j = 0; j < ns; ++j)
_gram._xx[i - _gram._diagN][j] -= interceptRow[j]*iMean;
for (int j = ns; j <= i; ++j) {
double jMean = _dinfo._normSub[j - ns] * _dinfo._normMul[j - ns];
_gram._xx[i - _gram._diagN][j] -= interceptRow[i]*jMean + interceptRow[j]*iMean - nobs * iMean * jMean;
}
}
if(_dinfo._intercept) { // do the intercept row
for(int j = ns; j < _dinfo.fullN(); ++j)
interceptRow[j] -= nobs * _dinfo._normSub[j-ns]*_dinfo._normMul[j-ns];
}
// and the xy vec as well
for(int i = ns; i < _dinfo.fullN(); ++i) {
_xy[i] -= _xy[_xy.length - 1] * _dinfo._normSub[i - ns] * _dinfo._normMul[i - ns];
}
}
if(_val != null){
_val.computeAIC();
}
}
public boolean hasNaNsOrInf() {
return ArrayUtils.hasNaNsOrInfs(_xy) || _gram.hasNaNsOrInfs();
}
}
public static class GLMCoordinateDescentTaskSeqNaive extends MRTask<GLMCoordinateDescentTaskSeqNaive> {
public double [] _normMulold;
public double [] _normSubold;
public double [] _normMulnew;
public double [] _normSubnew;
final double [] _betaold; // current old value at j
final double [] _betanew; // global beta @ j-1 that was just updated.
final int [] _catLvls_new; // sorted list of indices of active levels only for one categorical variable
final int [] _catLvls_old;
public double [] _temp;
boolean _skipFirst;
long _nobs;
int _cat_num; // 1: c and p categorical, 2:c numeric and p categorical, 3:c and p numeric , 4: c categorical and previous num.
boolean _interceptnew;
boolean _interceptold;
public GLMCoordinateDescentTaskSeqNaive(boolean interceptold, boolean interceptnew, int cat_num ,
double [] betaold, double [] betanew, int [] catLvlsold, int [] catLvlsnew,
double [] normMulold, double [] normSubold, double [] normMulnew, double [] normSubnew,
boolean skipFirst ) { // pass it norm mul and norm sup - in the weights already done. norm
//mul and mean will be null without standardization.
_normMulold = normMulold;
_normSubold = normSubold;
_normMulnew = normMulnew;
_normSubnew = normSubnew;
_cat_num = cat_num;
_betaold = betaold;
_betanew = betanew;
_interceptold = interceptold; // if updating beta_1, then the intercept is the previous column
_interceptnew = interceptnew; // if currently updating the intercept value
_catLvls_old = catLvlsold;
_catLvls_new = catLvlsnew;
_skipFirst = skipFirst;
}
@Override
public void map(Chunk [] chunks) {
int cnt = 0;
Chunk wChunk = chunks[cnt++];
Chunk zChunk = chunks[cnt++];
Chunk ztildaChunk = chunks[cnt++];
Chunk xpChunk=null, xChunk=null;
_temp = new double[_betaold.length];
if (_interceptnew) {
xChunk = new C0DChunk(1,chunks[0]._len);
xpChunk = chunks[cnt++];
} else {
if (_interceptold) {
xChunk = chunks[cnt++];
xpChunk = new C0DChunk(1,chunks[0]._len);
}
else {
xChunk = chunks[cnt++];
xpChunk = chunks[cnt++];
}
}
// For each observation, add corresponding term to temp - or if categorical variable only add the term corresponding to its active level and the active level
// of the most recently updated variable before it (if also cat). If for an obs the active level corresponds to an inactive column, we just dont want to include
// it - same if inactive level in most recently updated var. so set these to zero ( Wont be updating a betaj which is inactive) .
for (int i = 0; i < chunks[0]._len; ++i) { // going over all the rows in the chunk
double betanew = 0; // most recently updated prev variable
double betaold = 0; // old value of current variable being updated
double w = wChunk.atd(i);
if(w == 0) continue;
++_nobs;
int observation_level = 0, observation_level_p = 0;
double val = 1, valp = 1;
if(_cat_num == 1) {
observation_level = (int) xChunk.at8(i); // only need to change one temp value per observation.
if (_catLvls_old != null)
observation_level = Arrays.binarySearch(_catLvls_old, observation_level);
observation_level_p = (int) xpChunk.at8(i); // both cat
if (_catLvls_new != null)
observation_level_p = Arrays.binarySearch(_catLvls_new, observation_level_p);
if(_skipFirst){
observation_level
observation_level_p
}
}
else if(_cat_num == 2){
val = xChunk.atd(i); // current num and previous cat
if (_normMulold != null && _normSubold != null)
val = (val - _normSubold[0]) * _normMulold[0];
observation_level_p = (int) xpChunk.at8(i);
if (_catLvls_new != null)
observation_level_p = Arrays.binarySearch(_catLvls_new, observation_level_p);
if(_skipFirst){
observation_level_p
}
}
else if(_cat_num == 3){
val = xChunk.atd(i); // both num
if (_normMulold != null && _normSubold != null)
val = (val - _normSubold[0]) * _normMulold[0];
valp = xpChunk.atd(i);
if (_normMulnew != null && _normSubnew != null)
valp = (valp - _normSubnew[0]) * _normMulnew[0];
}
else if(_cat_num == 4){
observation_level = (int) xChunk.at8(i); // current cat
if (_catLvls_old != null)
observation_level = Arrays.binarySearch(_catLvls_old, observation_level); // search to see if this level is active.
if(_skipFirst){
observation_level
}
valp = xpChunk.atd(i); //prev numeric
if (_normMulnew != null && _normSubnew != null)
valp = (valp - _normSubnew[0]) * _normMulnew[0];
}
if(observation_level >= 0)
betaold = _betaold[observation_level];
if(observation_level_p >= 0)
betanew = _betanew[observation_level_p];
if (_interceptnew) {
ztildaChunk.set(i, ztildaChunk.atd(i) - betaold + valp * betanew);
_temp[0] += w * (zChunk.atd(i) - ztildaChunk.atd(i));
} else {
ztildaChunk.set(i, ztildaChunk.atd(i) - val * betaold + valp * betanew);
if(observation_level >=0 ) // if the active level for that observation is an "inactive column" don't want to add contribution to temp for that observation
_temp[observation_level] += w * val * (zChunk.atd(i) - ztildaChunk.atd(i));
}
}
}
@Override
public void reduce(GLMCoordinateDescentTaskSeqNaive git){
ArrayUtils.add(_temp, git._temp);
_nobs += git._nobs;
super.reduce(git);
}
}
public static class GLMCoordinateDescentTaskSeqIntercept extends MRTask<GLMCoordinateDescentTaskSeqIntercept> {
final double [] _betaold;
public double _temp;
DataInfo _dinfo;
public GLMCoordinateDescentTaskSeqIntercept( double [] betaold, DataInfo dinfo) {
_betaold = betaold;
_dinfo = dinfo;
}
@Override
public void map(Chunk [] chunks) {
int cnt = 0;
Chunk wChunk = chunks[cnt++];
Chunk zChunk = chunks[cnt++];
Chunk filterChunk = chunks[cnt++];
Row r = _dinfo.newDenseRow();
for(int i = 0; i < chunks[0]._len; ++i) {
if(filterChunk.atd(i)==1) continue;
_dinfo.extractDenseRow(chunks,i,r);
_temp = wChunk.at8(i)* (zChunk.atd(i)- r.innerProduct(_betaold) );
}
}
@Override
public void reduce(GLMCoordinateDescentTaskSeqIntercept git){
_temp+= git._temp;
super.reduce(git);
}
}
public static class GLMGenerateWeightsTask extends MRTask<GLMGenerateWeightsTask> {
final GLMParameters _params;
final double [] _betaw;
double [] denums;
double wsum,wsumu;
DataInfo _dinfo;
double _likelihood;
public GLMGenerateWeightsTask(Key jobKey, DataInfo dinfo, GLMModel.GLMParameters glm, double[] betaw) {
_params = glm;
_betaw = betaw;
_dinfo = dinfo;
}
@Override
public void map(Chunk [] chunks) {
Chunk wChunk = chunks[chunks.length-3];
Chunk zChunk = chunks[chunks.length-2];
Chunk zTilda = chunks[chunks.length-1];
chunks = Arrays.copyOf(chunks,chunks.length-3);
denums = new double[_dinfo.fullN()+1]; // full N is expanded variables with categories
Row r = _dinfo.newDenseRow();
for(int i = 0; i < chunks[0]._len; ++i) {
_dinfo.extractDenseRow(chunks,i,r);
if (r.bad || r.weight == 0) {
wChunk.set(i,0);
zChunk.set(i,0);
zTilda.set(i,0);
continue;
}
final double y = r.response(0);
assert ((_params._family != Family.gamma) || y > 0) : "illegal response column, y must be > 0 for family=Gamma.";
assert ((_params._family != Family.binomial) || (0 <= y && y <= 1)) : "illegal response column, y must be <0,1> for family=Binomial. got " + y;
final double w, eta, mu, var, z;
final int numStart = _dinfo.numStart();
double d = 1;
eta = r.innerProduct(_betaw);
if (_params._family == Family.gaussian && _params._link == Link.identity) {
w = r.weight;
z = y - r.offset;
mu = 0;
} else {
mu = _params.linkInv(eta + r.offset);
var = Math.max(1e-6, _params.variance(mu)); // avoid numerical problems with 0 variance
d = _params.linkDeriv(mu);
z = eta + (y - mu) * d;
w = r.weight / (var * d * d);
}
_likelihood += _params.likelihood(y,mu);
zTilda.set(i,eta-_betaw[_betaw.length-1]);
assert w >= 0 || Double.isNaN(w) : "invalid weight " + w; // allow NaNs - can occur if line-search is needed!
wChunk.set(i,w);
zChunk.set(i,z);
wsum+=w;
wsumu+=r.weight; // just add the user observation weight for the scaling.
for(int j = 0; j < r.nBins; ++j) { // go over cat variables
denums[r.binIds[j]] += w; // binIds skips the zeros.
}
for(int j = 0; j < r.nNums; ++j){ // num vars
int id = r.numIds == null?(j + numStart):r.numIds[j];
denums[id]+= w*r.get(id)*r.get(id);
}
}
}
@Override
public void reduce(GLMGenerateWeightsTask git){ // adding contribution of all the chunks
ArrayUtils.add(denums, git.denums);
wsum+=git.wsum;
wsumu += git.wsumu;
_likelihood += git._likelihood;
super.reduce(git);
}
}
// public static class GLMValidationTask<T extends GLMValidationTask<T>> extends MRTask<T> {
// protected final GLMModel _model;
// protected GLMValidation _res;
// public final double _lambda;
// public boolean _improved;
// Key _jobKey;
// public static Key makeKey(){return Key.make("__GLMValidation_" + Key.make().toString());}
// public GLMValidationTask(GLMModel model, double lambda){this(model,lambda,null);}
// public GLMValidationTask(GLMModel model, double lambda, H2OCountedCompleter completer){super(completer); _lambda = lambda; _model = model;}
// @Override public void map(Chunk[] chunks){
// _res = new GLMValidation(null,_model._ymu,_model._parms,_model.rank(_lambda));
// final int nrows = chunks[0]._len;
// double [] row = MemoryManager.malloc8d(_model._output._names.length);
// float [] preds = MemoryManager.malloc4f(_model._parms._family == Family.binomial?3:1);
// OUTER:
// for(int i = 0; i < nrows; ++i){
// if(chunks[chunks.length-1].isNA(i))continue;
// for(int j = 0; j < chunks.length-1; ++j){
// if(chunks[j].isNA(i))continue OUTER;
// row[j] = chunks[j].atd(i);
// _model.score0(row, preds);
// double response = chunks[chunks.length-1].atd(i);
// _res.add(response, _model._parms._family == Family.binomial?preds[2]:preds[0]);
// @Override public void reduce(GLMValidationTask gval){_res.add(gval._res);}
// @Override public void postGlobal(){
// _res.computeAIC();
// _res.computeAUC();
// use general score to reduce number of possible different code paths
// public static class GLMXValidationTask extends GLMValidationTask<GLMXValidationTask>{
// protected final GLMModel [] _xmodels;
// protected GLMValidation [] _xvals;
// long _nobs;
// final float [] _thresholds;
// public static Key makeKey(){return Key.make("__GLMValidation_" + Key.make().toString());}
// public GLMXValidationTask(GLMModel mainModel,double lambda, GLMModel [] xmodels, float [] thresholds){this(mainModel,lambda,xmodels,thresholds,null);}
// public GLMXValidationTask(GLMModel mainModel,double lambda, GLMModel [] xmodels, float [] thresholds, final H2OCountedCompleter completer){
// super(mainModel, lambda,completer);
// _xmodels = xmodels;
// _thresholds = thresholds;
// @Override public void map(Chunk [] chunks) {
// long gid = chunks[0].start();
// _xvals = new GLMValidation[_xmodels.length];
// for(int i = 0; i < _xmodels.length; ++i)
// _xvals[i] = new GLMValidation(null,_xmodels[i]._ymu,_xmodels[i]._parms,_xmodels[i]._output.rank(),_thresholds);
// final int nrows = chunks[0]._len;
// double [] row = MemoryManager.malloc8d(_xmodels[0]._output._names.length);
// float [] preds = MemoryManager.malloc4f(_xmodels[0]._parms._family == Family.binomial?3:1);
// OUTER:
// for(int i = 0; i < nrows; ++i){
// if(chunks[chunks.length-1].isNA(i))continue;
// for(int j = 0; j < chunks.length-1; ++j) {
// if(chunks[j].isNA(i))continue OUTER;
// row[j] = chunks[j].atd(i);
// ++_nobs;
// final int mid = (int)((i + gid) % _xmodels.length);
// final GLMModel model = _xmodels[mid];
// final GLMValidation val = _xvals[mid];
// model.score0(row, preds);
// double response = chunks[chunks.length-1].at8(i);
// val.add(response, model._parms._family == Family.binomial?preds[2]:preds[0]);
// @Override public void reduce(GLMXValidationTask gval){
// _nobs += gval._nobs;
// for(int i = 0; i < _xvals.length; ++i)
// _xvals[i].add(gval._xvals[i]);}
// @Override public void postGlobal() {
// H2OCountedCompleter cmp = (H2OCountedCompleter)getCompleter();
// if(cmp != null)cmp.addToPendingCount(_xvals.length + 1);
// for (int i = 0; i < _xvals.length; ++i) {
// _xvals[i].computeAIC();
// _xvals[i].computeAUC();
// _xvals[i]._nobs = _nobs - _xvals[i]._nobs;
// GLMModel.setXvalidation(cmp, _xmodels[i]._key, _lambda, _xvals[i]);
// GLMModel.setXvalidation(cmp, _model._key, _lambda, new GLMXValidation(_model, _xmodels, _xvals, _lambda, _nobs,_thresholds));
}
|
package hex;
import hex.schemas.ModelBuilderSchema;
import jsr166y.CountedCompleter;
import water.*;
import water.exceptions.H2OIllegalArgumentException;
import water.exceptions.H2OModelBuilderIllegalArgumentException;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.FrameUtils;
import water.util.Log;
import water.util.MRUtils;
import water.util.ReflectionUtils;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import static water.util.RandomUtils.getRNG;
/**
* Model builder parent class. Contains the common interfaces and fields across all model builders.
*/
abstract public class ModelBuilder<M extends Model<M,P,O>, P extends Model.Parameters, O extends Model.Output> extends Job<M> {
/** All the parameters required to build the model. */
public P _parms;
/** Training frame: derived from the parameter's training frame, excluding
* all ignored columns, all constant and bad columns, perhaps flipping the
* response column to an Categorical, etc. */
public final Frame train() { return _train; }
protected transient Frame _train;
/** Validation frame: derived from the parameter's validation frame, excluding
* all ignored columns, all constant and bad columns, perhaps flipping the
* response column to a Categorical, etc. Is null if no validation key is set. */
public final Frame valid() { return _valid; }
protected transient Frame _valid;
// TODO: tighten up the type
// Map the algo name (e.g., "deeplearning") to the builder class (e.g., DeepLearning.class) :
private static final Map<String, Class<? extends ModelBuilder>> _builders = new HashMap<>();
// Map the Model class (e.g., DeepLearningModel.class) to the algo name (e.g., "deeplearning"):
private static final Map<Class<? extends Model>, String> _model_class_to_algo = new HashMap<>();
// Map the simple algo name (e.g., deeplearning) to the full algo name (e.g., "Deep Learning"):
private static final Map<String, String> _algo_to_algo_full_name = new HashMap<>();
// Map the algo name (e.g., "deeplearning") to the Model class (e.g., DeepLearningModel.class):
private static final Map<String, Class<? extends Model>> _algo_to_model_class = new HashMap<>();
/** Train response vector. */
public Vec response(){return _response;}
/** Validation response vector. */
public Vec vresponse(){return _vresponse;}
/**
* Compute the (weighted) mean of the response (subtracting possible offset terms)
* @return mean
*/
protected double responseMean() {
if (hasWeightCol() || hasOffsetCol()) {
return new FrameUtils.WeightedMean().doAll(
_response,
hasWeightCol() ? _weights : _response.makeCon(1),
hasOffsetCol() ? _offset : _response.makeCon(0)
).weightedMean();
}
return _response.mean();
}
/**
* Register a ModelBuilder, assigning it an algo name.
*/
public static void registerModelBuilder(String name, String full_name, Class<? extends ModelBuilder> clz) {
_builders.put(name, clz);
Class<? extends Model> model_class = (Class<? extends Model>)ReflectionUtils.findActualClassParameter(clz, 0);
_model_class_to_algo.put(model_class, name);
_algo_to_algo_full_name.put(name, full_name);
_algo_to_model_class.put(name, model_class);
}
/** Get a Map of all algo names to their ModelBuilder classes. */
public static Map<String, Class<? extends ModelBuilder>>getModelBuilders() { return _builders; }
/** Get the ModelBuilder class for the given algo name. */
public static Class<? extends ModelBuilder> getModelBuilder(String name) {
return _builders.get(name);
}
/** Get the Model class for the given algo name. */
public static Class<? extends Model> getModelClass(String name) {
return _algo_to_model_class.get(name);
}
/** Get the algo name for the given Model. */
public static String getAlgo(Model model) {
return _model_class_to_algo.get(model.getClass());
}
/** Get the algo full name for the given algo. */
public static String getAlgoFullName(String algo) {
return _algo_to_algo_full_name.get(algo);
}
public String getAlgo() {
return getAlgo(this.getClass());
}
public static String getAlgo(Class<? extends ModelBuilder> clz) {
// Check for unknown algo names, but if none are registered keep going; we're probably in JUnit.
if (_builders.isEmpty())
return "Unknown algo (should only happen under JUnit)";
if (! _builders.containsValue(clz))
throw new H2OIllegalArgumentException("Failed to find ModelBuilder class in registry: " + clz, "Failed to find ModelBuilder class in registry: " + clz);
for (Map.Entry<String, Class<? extends ModelBuilder>> entry : _builders.entrySet())
if (entry.getValue().equals(clz))
return entry.getKey();
// Note: unreachable:
throw new H2OIllegalArgumentException("Failed to find ModelBuilder class in registry: " + clz, "Failed to find ModelBuilder class in registry: " + clz);
}
/**
* Externally visible default schema
* TODO: this is in the wrong layer: the internals should not know anything about the schemas!!!
* This puts a reverse edge into the dependency graph.
*/
public abstract ModelBuilderSchema schema();
/** Constructor called from an http request; MUST override in subclasses. */
public ModelBuilder(P ignore) {
super(Key.<M>make("Failed"),"ModelBuilder constructor needs to be overridden.");
throw H2O.fail("ModelBuilder subclass failed to override the params constructor: " + this.getClass());
}
/** Constructor making a default destination key */
public ModelBuilder(String desc, P parms) {
this((parms == null || parms._model_id == null) ? Key.make(H2O.calcNextUniqueModelId(desc)) : parms._model_id, desc, parms);
}
/** Default constructor, given all arguments */
public ModelBuilder(Key dest, String desc, P parms) {
super(dest,desc);
_parms = parms;
}
/** Factory method to create a ModelBuilder instance of the correct class given the algo name. */
public static ModelBuilder createModelBuilder(String algo) {
ModelBuilder modelBuilder;
Class<? extends ModelBuilder> clz = null;
try {
clz = ModelBuilder.getModelBuilder(algo);
}
catch (Exception ignore) {}
if (clz == null) {
throw new H2OIllegalArgumentException("algo", "createModelBuilder", "Algo not known (" + algo + ")");
}
try {
if (! (clz.getGenericSuperclass() instanceof ParameterizedType)) {
throw H2O.fail("Class is not parameterized as expected: " + clz);
}
Type[] handler_type_parms = ((ParameterizedType)(clz.getGenericSuperclass())).getActualTypeArguments();
// [0] is the Model type; [1] is the Model.Parameters type; [2] is the Model.Output type.
Class<? extends Model.Parameters> pclz = (Class<? extends Model.Parameters>)handler_type_parms[1];
Constructor<ModelBuilder> constructor = (Constructor<ModelBuilder>)clz.getDeclaredConstructor(new Class[] { (Class)handler_type_parms[1] });
Model.Parameters p = pclz.newInstance();
modelBuilder = constructor.newInstance(p);
} catch (java.lang.reflect.InvocationTargetException e) {
throw H2O.fail("Exception when trying to instantiate ModelBuilder for: " + algo + ": " + e.getCause(), e);
} catch (Exception e) {
throw H2O.fail("Exception when trying to instantiate ModelBuilder for: " + algo + ": " + e.getCause(), e);
}
return modelBuilder;
}
/** Method to launch training of a Model, based on its parameters. */
final public Job<M> trainModel() {
// init(false); //parameter sanity check (such as _fold_column, etc.)
if (error_count() > 0) {
throw H2OModelBuilderIllegalArgumentException.makeFromBuilder(this);
}
return _parms._nfolds == 0 && _parms._fold_column == null ? trainModelImpl(progressUnits()) :
// cross-validation needs to be forked off to allow continuous (non-blocking) progress bar
start(new H2O.H2OCountedCompleter(){
@Override protected void compute2() {
computeCrossValidation();
tryComplete();
}
@Override public boolean onExceptionalCompletion(Throwable ex, CountedCompleter caller) {
failed(ex);
return true;
}
}, (_parms._nfolds+1)*progressUnits());
}
/**
* Model-specific implementation of model training
* @param progressUnits Number of progress units (each advances the Job's progress bar by a bit)
* @return ModelBuilder job
*/
abstract public Job<M> trainModelImpl(long progressUnits);
abstract public long progressUnits();
/**
* Default naive (serial) implementation of N-fold cross-validation
* @return Cross-validation Job
* (builds N+1 models, all have train+validation metrics, the main model has N-fold cross-validated validation metrics)
*/
public Job<M> computeCrossValidation() {
final Frame origTrainFrame = train();
// Step 1: Assign each row to a fold
final Vec foldAssignment;
final Integer N;
if (_parms._fold_column != null) {
foldAssignment = origTrainFrame.vec(_parms._fold_column);
N = (int)foldAssignment.max() - (int)foldAssignment.min() + 1;
assert(N>1); //should have been already checked in init();
} else {
N = _parms._nfolds;
long seed = new Random().nextLong();
for (Field f : _parms.getClass().getFields()) {
if (f.getName().equals("_seed")) {
try {
seed = (long)(f.get(_parms));
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
}
final long actualSeed = seed;
Log.info("Creating " + N + " cross-validation splits with random number seed: " + actualSeed);
foldAssignment = origTrainFrame.anyVec().makeZero();
final Model.Parameters.FoldAssignmentScheme foldAssignmentScheme = _parms._fold_assignment;
new MRTask() {
@Override
public void map(Chunk foldAssignment) {
for (int i = 0; i < foldAssignment._len; ++i) {
int fold;
switch (foldAssignmentScheme) {
case Random:
fold = Math.abs(getRNG(foldAssignment.start() + actualSeed + i).nextInt()) % N;
break;
case Modulo:
fold = ((int) (foldAssignment.start() + i)) % N;
break;
default:
throw H2O.unimpl();
}
foldAssignment.set(i, fold);
}
}
}.doAll(foldAssignment);
}
final Key[] modelKeys = new Key[N];
final Key[] predictionKeys = new Key[N];
// Step 2: Make 2*N binary weight vectors
final String origWeightsName = _parms._weights_column;
final Vec[] weights = new Vec[2*N];
final Vec origWeight = origWeightsName != null ? origTrainFrame.vec(origWeightsName) : origTrainFrame.anyVec().makeCon(1.0);
for (int i=0; i<N; ++i) {
// Make weights
weights[2*i] = origTrainFrame.anyVec().makeZero();
weights[2*i+1] = origTrainFrame.anyVec().makeZero();
// Now update the weights in place
final int whichFold = i;
new MRTask() {
@Override
public void map(Chunk chks[]) {
Chunk fold = chks[0];
Chunk orig = chks[1];
Chunk train = chks[2];
Chunk valid = chks[3];
for (int i=0; i< orig._len; ++i) {
int foldAssignment = (int)fold.at8(i) % N;
assert(foldAssignment >= 0 && foldAssignment <N);
boolean holdout = foldAssignment == whichFold;
double w = orig.atd(i);
train.set(i, holdout ? 0 : w);
valid.set(i, holdout ? w : 0);
}
}
}.doAll(new Vec[]{foldAssignment, origWeight, weights[2*i], weights[2*i+1]});
if (weights[2*i].isConst() || weights[2*i+1].isConst()) {
String msg = "Not enough data to create " + N + " random cross-validation splits. Either reduce nfolds, specify a larger dataset (or specify another random number seed, if applicable).";
throw new H2OIllegalArgumentException(msg);
}
}
if (_parms._fold_column == null) {
foldAssignment.remove();
}
// Build N cross-validation models
final Key<M> origDest = dest();
// adapt main Job's progress bar to build N+1 models
ModelMetrics.MetricBuilder[] mb = new ModelMetrics.MetricBuilder[N];
_deleteProgressKey = false; // keep the same progress bar for all N+1 jobs
for (int i=0; i<N; ++i) {
Log.info("Building cross-validation model " + (i+1) + " / " + N + ".");
final String identifier = origDest.toString() + "_cv_" + (i+1);
final String weightName = "weights";
// Training/Validation share the same data, but will have exclusive weights
final Frame cvTrain = new Frame(Key.make(identifier+"_"+_parms._train.toString()+"_train"), origTrainFrame.names(), origTrainFrame.vecs());
cvTrain.add(weightName, weights[2*i]);
DKV.put(cvTrain);
final Frame cvVal = new Frame(Key.make(identifier+"_"+_parms._train.toString()+"_valid"), origTrainFrame.names(), origTrainFrame.vecs());
cvVal.add(weightName, weights[2*i+1]);
DKV.put(cvVal);
modelKeys[i] = Key.make(identifier);
// Build CV model - launch a separate Job
Model m;
{
ModelBuilder<M, P, O> cvModel = (ModelBuilder<M, P, O>) this.clone();
cvModel._dest = modelKeys[i];
cvModel._key = Key.make(_key.toString() + "_cv" + i);
cvModel._state = JobState.CREATED;
cvModel._description = identifier;
// Fix up some parameters
cvModel._parms = (P)_parms.clone();
cvModel._parms._weights_column = weightName;
cvModel._parms._train = cvTrain._key;
cvModel._parms._valid = cvVal._key;
cvModel.modifyParmsForCrossValidationSplits(i, N);
cvModel.trainModelImpl(-1);
m = cvModel.get();
cvModel.remove(); //keep the cv jobs around
}
// holdout scoring
{
Frame adaptFr = new Frame(cvVal);
m.adaptTestForTrain(adaptFr, true, !isSupervised());
mb[i] = m.scoreMetrics(adaptFr);
if (_parms._keep_cross_validation_predictions) {
String predName = "prediction_" + modelKeys[i].toString();
predictionKeys[i] = Key.make(predName);
m.predictScoreImpl(cvVal, adaptFr, predName);
}
if (!_parms._keep_cross_validation_splits) {
weights[2 * i].remove();
weights[2 * i + 1].remove();
DKV.remove(cvTrain._key);
DKV.remove(cvVal._key);
if (origWeightsName == null) origWeight.remove();
}
Model.cleanup_adapt(adaptFr, cvVal);
DKV.remove(adaptFr._key);
}
}
Log.info("Building main model.");
_state = JobState.CREATED;
_deleteProgressKey = true; //delete progress after the main model is done
modifyParmsForCrossValidationMainModel(N);
Job<M> main = trainModelImpl(-1);
Model mainModel = main.get();
// new TAtomic<JobList>() {
// @Override public JobList atomic(JobList old) {
// if( old == null ) old = new JobList();
// Key[] jobs = old._jobs;
// old._jobs = Arrays.copyOf(jobs, jobs.length - 1);
// return old;
// }.invoke(LIST);
Log.info("Computing " + N + "-fold cross-validation metrics.");
mainModel._output._cross_validation_models = new Key[N];
mainModel._output._cross_validation_predictions = _parms._keep_cross_validation_predictions ? new Key[N] : null;
for (int i=0; i<N; ++i) {
if (i>0) mb[0].reduce(mb[i]);
mainModel._output._cross_validation_models[i] = modelKeys[i];
if (_parms._keep_cross_validation_predictions)
mainModel._output._cross_validation_predictions[i] = predictionKeys[i];
}
mainModel._output._cross_validation_metrics = mb[0].makeModelMetrics(mainModel, _parms.train());
mainModel._output._cross_validation_metrics._description = N + "-fold cross-validation on training data";
DKV.put(mainModel);
return main;
}
/**
* Override with model-specific checks / modifications to _parms for N-fold cross-validation splits.
* For example, the models might need to be told to not do early stopping.
* @param i which model index [0...N-1]
* @param N Total number of cross-validation folds
*/
public void modifyParmsForCrossValidationSplits(int i, int N) {
_parms._nfolds = 0;
}
/**
* Override for model-specific checks / modifications to _parms for the main model during N-fold cross-validation.
* For example, the model might need to be told to not do early stopping.
*/
public void modifyParmsForCrossValidationMainModel(int N) {
}
boolean _deleteProgressKey = true;
@Override
protected boolean deleteProgressKey() {
return _deleteProgressKey;
}
/** List containing the categories of models that this builder can
* build. Each ModelBuilder must have one of these. */
abstract public ModelCategory[] can_build();
/**
* Visibility for this algo: is it always visible, is it beta (always visible but with a note in the UI)
* or is it experimental (hidden by default, visible in the UI if the user gives an "experimental" flag
* at startup).
*/
public enum BuilderVisibility {
Experimental,
Beta,
Stable
}
/**
* Visibility for this algo: is it always visible, is it beta (always visible but with a note in the UI)
* or is it experimental (hidden by default, visible in the UI if the user gives an "experimental" flag
* at startup).
*/
abstract public BuilderVisibility builderVisibility();
/** Clear whatever was done by init() so it can be run again. */
public void clearInitState() {
clearValidationErrors();
}
public boolean isSupervised(){return false;}
protected transient Vec _response; // Handy response column
protected transient Vec _vresponse; // Handy response column
protected transient Vec _offset; // Handy offset column
protected transient Vec _weights;
protected transient Vec _fold;
public boolean hasOffsetCol(){ return _parms._offset_column != null;} // don't look at transient Vec
public boolean hasWeightCol(){return _parms._weights_column != null;} // don't look at transient Vec
public boolean hasFoldCol(){return _parms._fold_column != null;} // don't look at transient Vec
public int numSpecialCols() { return (hasOffsetCol() ? 1 : 0) + (hasWeightCol() ? 1 : 0) + (hasFoldCol() ? 1 : 0); }
// no hasResponse, call isSupervised instead (response is mandatory if isSupervised is true)
protected int _nclass; // Number of classes; 1 for regression; 2+ for classification
public int nclasses(){return _nclass;}
public final boolean isClassifier() { return _nclass > 1; }
/**
* Find and set response/weights/offset/fold and put them all in the end,
* @return number of non-feature vecs
*/
protected int separateFeatureVecs() {
int res = 0;
if(_parms._weights_column != null) {
Vec w = _train.remove(_parms._weights_column);
if(w == null)
error("_weights_column","Weights column '" + _parms._weights_column + "' not found in the training frame");
else {
if(!w.isNumeric())
error("_weights_column","Invalid weights column '" + _parms._weights_column + "', weights must be numeric");
_weights = w;
if(w.naCnt() > 0)
error("_weights_columns","Weights cannot have missing values.");
if(w.min() < 0)
error("_weights_columns","Weights must be >= 0");
if(w.max() == 0)
error("_weights_columns","Max. weight must be > 0");
_train.add(_parms._weights_column, w);
++res;
}
} else {
_weights = null;
assert(!hasWeightCol());
}
if(_parms._offset_column != null) {
Vec o = _train.remove(_parms._offset_column);
if(o == null)
error("_offset_column","Offset column '" + _parms._offset_column + "' not found in the training frame");
else {
if(!o.isNumeric())
error("_offset_column","Invalid offset column '" + _parms._offset_column + "', offset must be numeric");
_offset = o;
if(o.naCnt() > 0)
error("_offset_column","Offset cannot have missing values.");
if(_weights == _offset)
error("_offset_column", "Offset must be different from weights");
_train.add(_parms._offset_column, o);
++res;
}
} else {
_offset = null;
assert(!hasOffsetCol());
}
if(_parms._fold_column != null) {
Vec f = _train.remove(_parms._fold_column);
if(f == null)
error("_fold_column","Fold column '" + _parms._fold_column + "' not found in the training frame");
else {
if(!f.isInt())
error("_fold_column","Invalid fold column '" + _parms._fold_column + "', fold must be integer");
if(f.min() < 0)
error("_fold_column","Invalid fold column '" + _parms._fold_column + "', fold must be non-negative");
if(f.isConst())
error("_fold_column","Invalid fold column '" + _parms._fold_column + "', fold cannot be constant");
_fold = f;
if(f.naCnt() > 0)
error("_fold_column","Fold cannot have missing values.");
if(_fold == _weights)
error("_fold_column", "Fold must be different from weights");
if(_fold == _offset)
error("_fold_column", "Fold must be different from offset");
_train.add(_parms._fold_column, f);
++res;
}
} else {
_fold = null;
assert(!hasFoldCol());
}
if(isSupervised() && _parms._response_column != null) {
_response = _train.remove(_parms._response_column);
if (_response == null) {
if (isSupervised())
error("_response_column", "Response column '" + _parms._response_column + "' not found in the training frame");
} else {
_train.add(_parms._response_column, _response);
++res;
}
} else {
_response = null;
}
return res;
}
protected boolean ignoreStringColumns(){return true;}
/**
* Ignore constant columns, columns with all NAs and strings.
* @param npredictors
* @param expensive
*/
protected void ignoreBadColumns(int npredictors, boolean expensive){
// Drop all-constant and all-bad columns.
if( _parms._ignore_const_cols)
new FilterCols(npredictors) {
@Override protected boolean filter(Vec v) { return v.isConst() || v.isBad() || (ignoreStringColumns() && v.isString()); }
}.doIt(_train,"Dropping constant columns: ",expensive);
}
/**
* Override this method to call error() if the model is expected to not fit in memory, and say why
*/
protected void checkMemoryFootPrint() {}
transient double [] _distribution;
transient double [] _priorClassDist;
protected boolean computePriorClassDistribution(){
return _parms._balance_classes;
}
@Override
public int error_count() { assert error_count_or_uninitialized() >= 0 : "init() not run yet"; return super.error_count(); }
/** Initialize the ModelBuilder, validating all arguments and preparing the
* training frame. This call is expected to be overridden in the subclasses
* and each subclass will start with "super.init();". This call is made by
* the front-end whenever the GUI is clicked, and needs to be fast whenever
* {@code expensive} is false; it will be called once again at the start of
* model building {@see #trainModel()} with expensive set to true.
*<p>
* The incoming training frame (and validation frame) will have ignored
* columns dropped out, plus whatever work the parent init did.
*<p>
* NOTE: The front end initially calls this through the parameters validation
* endpoint with no training_frame, so each subclass's {@code init()} method
* has to work correctly with the training_frame missing.
*<p>
* @see #updateValidationMessages()
*/
public void init(boolean expensive) {
// Log parameters
if (expensive) {
Log.info("Building H2O " + this.getClass().getSimpleName().toString() + " model with these parameters:");
Log.info(new String(_parms.writeJSON(new AutoBuffer()).buf()));
}
// NOTE: allow re-init:
clearInitState();
assert _parms != null; // Parms must already be set in
if( _parms._train == null ) {
if (expensive)
error("_train","Missing training frame");
return;
}
Frame tr = _parms.train();
if( tr == null ) { error("_train","Missing training frame: "+_parms._train); return; }
_train = new Frame(null /* not putting this into KV */, tr._names.clone(), tr.vecs().clone());
if (_parms._nfolds < 0 || _parms._nfolds == 1) {
error("_nfolds", "nfolds must be either 0 or >1.");
}
if (_parms._nfolds > 1 && _parms._nfolds > train().numRows()) {
error("_nfolds", "nfolds cannot be larger than the number of rows (" + train().numRows() + ").");
}
if (_parms._fold_column != null) {
hide("_fold_assignment", "Fold assignment is ignored when a fold column is specified.");
if (_parms._nfolds > 1) {
error("_nfolds", "nfolds cannot be specified at the same time as a fold column.");
} else {
hide("_nfolds", "nfolds is ignored when a fold column is specified.");
}
}
if (_parms._nfolds > 1) {
hide("_fold_column", "Fold column is ignored when nfolds > 1.");
}
// hide cross-validation parameters unless cross-val is enabled
if (_parms._nfolds ==0 && _parms._fold_column == null) {
hide("_keep_cross_validation_splits", "Only for cross-validation.");
hide("_keep_cross_validation_predictions", "Only for cross-validation.");
hide("_fold_assignment", "Only for cross-validation.");
}
// Drop explicitly dropped columns
if( _parms._ignored_columns != null ) {
_train.remove(_parms._ignored_columns);
if( expensive ) Log.info("Dropping ignored columns: "+Arrays.toString(_parms._ignored_columns));
}
// Drop all non-numeric columns (e.g., String and UUID). No current algo
// can use them, and otherwise all algos will then be forced to remove
// them. Text algos (grep, word2vec) take raw text columns - which are
// numeric (arrays of bytes).
ignoreBadColumns(separateFeatureVecs(), expensive);
// Check that at least some columns are not-constant and not-all-NAs
if( _train.numCols() == 0 )
error("_train","There are no usable columns to generate model");
if(isSupervised()) {
if(_response != null) {
_nclass = _response.isEnum() ? _response.cardinality() : 1;
if (_response.isConst())
error("_response","Response cannot be constant.");
}
if (! _parms._balance_classes)
hide("_max_after_balance_size", "Balance classes is false, hide max_after_balance_size");
else if (_parms._weights_column != null && _weights != null && !_weights.isBinary())
error("_balance_classes", "Balance classes and observation weights are not currently supported together.");
if( _parms._max_after_balance_size <= 0.0 )
error("_max_after_balance_size","Max size after balancing needs to be positive, suggest 1.0f");
if( _train != null ) {
if (_train.numCols() <= 1)
error("_train", "Training data must have at least 2 features (incl. response).");
if( null == _parms._response_column) {
error("_response_column", "Response column parameter not set.");
return;
}
if(_response != null && computePriorClassDistribution()) {
if (isClassifier() && isSupervised()) {
MRUtils.ClassDist cdmt =
_weights != null ? new MRUtils.ClassDist(nclasses()).doAll(_response, _weights) : new MRUtils.ClassDist(nclasses()).doAll(_response);
_distribution = cdmt.dist();
_priorClassDist = cdmt.rel_dist();
} else { // Regression; only 1 "class"
_distribution = new double[]{ (_weights != null ? _weights.mean() : 1.0) * train().numRows() };
_priorClassDist = new double[]{1.0f};
}
}
}
if( !isClassifier() ) {
hide("_balance_classes", "Balance classes is only applicable to classification problems.");
hide("_class_sampling_factors", "Class sampling factors is only applicable to classification problems.");
hide("_max_after_balance_size", "Max after balance size is only applicable to classification problems.");
hide("_max_confusion_matrix_size", "Max confusion matrix size is only applicable to classification problems.");
}
if (_nclass <= 2) {
hide("_max_hit_ratio_k", "Max K-value for hit ratio is only applicable to multi-class classification problems.");
hide("_max_confusion_matrix_size", "Only for multi-class classification problems.");
}
if( !_parms._balance_classes ) {
hide("_max_after_balance_size", "Only used with balanced classes");
hide("_class_sampling_factors", "Class sampling factors is only applicable if balancing classes.");
}
}
else {
hide("_response_column", "Ignored for unsupervised methods.");
hide("_balance_classes", "Ignored for unsupervised methods.");
hide("_class_sampling_factors", "Ignored for unsupervised methods.");
hide("_max_after_balance_size", "Ignored for unsupervised methods.");
hide("_max_confusion_matrix_size", "Ignored for unsupervised methods.");
_response = null;
_vresponse = null;
_nclass = 1;
}
// Build the validation set to be compatible with the training set.
// Toss out extra columns, complain about missing ones, remap enums
Frame va = _parms.valid(); // User-given validation set
if (va != null) {
_valid = new Frame(null /* not putting this into KV */, va._names.clone(), va.vecs().clone());
try {
String[] msgs = Model.adaptTestForTrain(_train._names, _parms._weights_column, _parms._offset_column, _parms._fold_column, null, _train.domains(), _valid, _parms.missingColumnsType(), expensive, true);
_vresponse = _valid.vec(_parms._response_column);
if (_vresponse == null && _parms._response_column != null)
error("_validation_frame", "Validation frame must have a response column '" + _parms._response_column + "'.");
if (expensive) {
for (String s : msgs) {
Log.info(s);
info("_valid", s);
}
}
assert !expensive || (_valid == null || Arrays.equals(_train._names, _valid._names));
} catch (IllegalArgumentException iae) {
error("_valid", iae.getMessage());
}
} else {
_valid = null;
_vresponse = null;
}
assert(_weights != null == hasWeightCol());
assert(_parms._weights_column != null == hasWeightCol());
assert(_offset != null == hasOffsetCol());
assert(_parms._offset_column != null == hasOffsetCol());
assert(_fold != null == hasFoldCol());
assert(_parms._fold_column != null == hasFoldCol());
}
abstract class FilterCols {
final int _specialVecs; // special vecs to skip at the end
public FilterCols(int n) {_specialVecs = n;}
abstract protected boolean filter(Vec v);
void doIt( Frame f, String msg, boolean expensive ) {
boolean any=false;
for( int i = 0; i < f.vecs().length - _specialVecs; i++ ) {
if( filter(f.vecs()[i]) ) {
if( any ) msg += ", "; // Log dropped cols
any = true;
msg += f._names[i];
f.remove(i);
i--; // Re-run at same iteration after dropping a col
}
}
if( any ) {
warn("_train", msg);
if (expensive) Log.info(msg);
}
}
}
}
|
package hex;
import water.*;
import water.exceptions.H2OIllegalArgumentException;
import water.exceptions.H2OModelBuilderIllegalArgumentException;
import water.fvec.*;
import water.rapids.ASTKFold;
import water.util.ArrayUtils;
import water.util.Log;
import water.util.MRUtils;
import water.util.VecUtils;
import java.util.*;
/**
* Model builder parent class. Contains the common interfaces and fields across all model builders.
*/
abstract public class ModelBuilder<M extends Model<M,P,O>, P extends Model.Parameters, O extends Model.Output> extends Iced {
public Job _job; // Job controlling this build
/** Block till completion, and return the built model from the DKV. Note the
* funny assert: the Job does NOT have to be controlling this model build,
* but might, e.g. be controlling a Grid search for which this is just one
* of many results. Calling 'get' means that we are blocking on the Job
* which is controlling ONLY this ModelBuilder, and when the Job completes
* we can return built Model. */
public final M get() { assert _job._result == _result; return (M)_job.get(); }
public final boolean isStopped() { return _job.isStopped(); }
// Key of the model being built; note that this is DIFFERENT from
// _job._result if the Job is being shared by many sub-models
// e.g. cross-validation.
protected Key<M> _result; // Built Model key
public final Key<M> dest() { return _result; }
private long _start_time; //start time in msecs - only used for time-based stopping
protected boolean timeout() {
assert(_start_time > 0) : "Must set _start_time for each individual model.";
return _parms._max_runtime_secs > 0 && System.currentTimeMillis() - _start_time > (long) (_parms._max_runtime_secs * 1e3);
}
protected boolean stop_requested() {
return _job.stop_requested() || timeout();
}
/** Default model-builder key */
public static Key<? extends Model> defaultKey(String algoName) {
return Key.make(H2O.calcNextUniqueModelId(algoName));
}
/** Default easy constructor: Unique new job and unique new result key */
protected ModelBuilder(P parms) {
this(parms, (Key<M>)defaultKey(parms.algoName()));
}
/** Unique new job and named result key */
protected ModelBuilder(P parms, Key<M> key) {
_job = new Job<>(_result = key, parms.javaName(), parms.algoName());
_parms = parms;
}
/** Shared pre-existing Job and unique new result key */
protected ModelBuilder(P parms, Job job) {
_job = job;
_result = (Key<M>)defaultKey(parms.algoName());
_parms = parms;
}
/** List of known ModelBuilders with all default args; endlessly cloned by
* the GUI for new private instances, then the GUI overrides some of the
* defaults with user args. */
private static String[] ALGOBASES = new String[0];
public static String[] algos() { return ALGOBASES; }
private static String[] SCHEMAS = new String[0];
private static ModelBuilder[] BUILDERS = new ModelBuilder[0];
/** One-time start-up only ModelBuilder, endlessly cloned by the GUI for the
* default settings. */
protected ModelBuilder(P parms, boolean startup_once) { this(parms,startup_once,"hex.schemas."); }
protected ModelBuilder(P parms, boolean startup_once, String externalSchemaDirectory ) {
assert startup_once;
_job = null;
_result = null;
_parms = parms;
init(false); // Default cheap init
String base = getClass().getSimpleName().toLowerCase();
if( ArrayUtils.find(ALGOBASES,base) != -1 )
throw H2O.fail("Only called once at startup per ModelBuilder, and "+base+" has already been called");
ALGOBASES = Arrays.copyOf(ALGOBASES,ALGOBASES.length+1);
BUILDERS = Arrays.copyOf(BUILDERS ,BUILDERS .length+1);
SCHEMAS = Arrays.copyOf(SCHEMAS ,SCHEMAS .length+1);
ALGOBASES[ALGOBASES.length-1] = base;
BUILDERS [BUILDERS .length-1] = this;
SCHEMAS [SCHEMAS .length-1] = externalSchemaDirectory;
}
/** gbm -> GBM, deeplearning -> DeepLearning */
public static String algoName(String urlName) { return BUILDERS[ArrayUtils.find(ALGOBASES,urlName)]._parms.algoName(); }
/** gbm -> hex.tree.gbm.GBM, deeplearning -> hex.deeplearning.DeepLearning */
public static String javaName(String urlName) { return BUILDERS[ArrayUtils.find(ALGOBASES,urlName)]._parms.javaName(); }
/** gbm -> GBMParameters */
public static String paramName(String urlName) { return algoName(urlName)+"Parameters"; }
/** gbm -> "hex.schemas." ; custAlgo -> "org.myOrg.schemas." */
public static String schemaDirectory(String urlName) { return SCHEMAS[ArrayUtils.find(ALGOBASES,urlName)]; }
/** Factory method to create a ModelBuilder instance for given the algo name.
* Shallow clone of both the default ModelBuilder instance and a Parameter. */
public static <B extends ModelBuilder> B make(String algo, Job job, Key<Model> result) {
int idx = ArrayUtils.find(ALGOBASES,algo.toLowerCase());
assert idx != -1 : "Unregistered algorithm "+algo;
B mb = (B)BUILDERS[idx].clone();
mb._job = job;
mb._result = result;
mb._parms = BUILDERS[idx]._parms.clone();
return mb;
}
/** All the parameters required to build the model. */
public P _parms; // Not final, so CV can set-after-clone
/** Training frame: derived from the parameter's training frame, excluding
* all ignored columns, all constant and bad columns, perhaps flipping the
* response column to an Categorical, etc. */
public final Frame train() { return _train; }
protected transient Frame _train;
/** Validation frame: derived from the parameter's validation frame, excluding
* all ignored columns, all constant and bad columns, perhaps flipping the
* response column to a Categorical, etc. Is null if no validation key is set. */
protected final Frame valid() { return _valid; }
protected transient Frame _valid;
// TODO: tighten up the type
// Map the algo name (e.g., "deeplearning") to the builder class (e.g., DeepLearning.class) :
private static final Map<String, Class<? extends ModelBuilder>> _builders = new HashMap<>();
// Map the Model class (e.g., DeepLearningModel.class) to the algo name (e.g., "deeplearning"):
private static final Map<Class<? extends Model>, String> _model_class_to_algo = new HashMap<>();
// Map the simple algo name (e.g., deeplearning) to the full algo name (e.g., "Deep Learning"):
private static final Map<String, String> _algo_to_algo_full_name = new HashMap<>();
// Map the algo name (e.g., "deeplearning") to the Model class (e.g., DeepLearningModel.class):
private static final Map<String, Class<? extends Model>> _algo_to_model_class = new HashMap<>();
/** Train response vector. */
public Vec response(){return _response;}
/** Validation response vector. */
public Vec vresponse(){return _vresponse == null ? _response : _vresponse;}
abstract protected class Driver extends H2O.H2OCountedCompleter<Driver> {
protected Driver(){ super(); }
protected Driver(H2O.H2OCountedCompleter completer){ super(completer); }
}
/** Method to launch training of a Model, based on its parameters. */
final public Job<M> trainModel() {
if (error_count() > 0)
throw H2OModelBuilderIllegalArgumentException.makeFromBuilder(this);
_start_time = System.currentTimeMillis();
if( !nFoldCV() )
return _job.start(trainModelImpl(), _parms.progressUnits());
// cross-validation needs to be forked off to allow continuous (non-blocking) progress bar
return _job.start(new H2O.H2OCountedCompleter() {
@Override
public void compute2() {
computeCrossValidation();
tryComplete();
}
}, (1/*for all pre-fold work*/+nFoldWork()+1/*for all the post-fold work*/) * _parms.progressUnits());
}
/** Train a model as part of a larger Job; the Job already exists and has started. */
final public M trainModelNested() {
if (error_count() > 0)
throw H2OModelBuilderIllegalArgumentException.makeFromBuilder(this);
_start_time = System.currentTimeMillis();
if( !nFoldCV() ) trainModelImpl().compute2();
else computeCrossValidation();
return _result.get();
}
/** Model-specific implementation of model training
* @return A F/J Job, which, when executed, does the build. F/J is NOT started. */
abstract protected Driver trainModelImpl();
/**
* How many should be trained in parallel during N-fold cross-validation?
* Train all CV models in parallel when parallelism is enabled, otherwise train one at a time
* Each model can override this logic, based on parameters, dataset size, etc.
* @return How many models to train in parallel during cross-validation
*/
protected int nModelsInParallel() {
if (!_parms._parallelize_cross_validation || _parms._max_runtime_secs != 0) return 1; //user demands serial building (or we need to honor the time constraints for all CV models equally)
if (_train.byteSize() < 1e6) return _parms._nfolds; //for small data, parallelize over CV models
return 1; //safe fallback
}
// Work for each requested fold
private int nFoldWork() {
if( _parms._fold_column == null ) return _parms._nfolds;
Vec f = train().vec(_parms._fold_column);
Vec fc = VecUtils.toCategoricalVec(f);
int N = fc.domain().length;
fc.remove();
return N;
}
/**
* Default naive (serial) implementation of N-fold cross-validation
* @return Cross-validation Job
* (builds N+1 models, all have train+validation metrics, the main model has N-fold cross-validated validation metrics)
*/
public void computeCrossValidation() {
assert _job.isRunning(); // main Job is still running
final Integer N = nFoldWork();
init(false);
try {
Scope.enter();
// Step 1: Assign each row to a fold
final Vec foldAssignment = cv_AssignFold(N);
// Step 2: Make 2*N binary weight vectors
final Vec[] weights = cv_makeWeights(N,foldAssignment);
_job.update(1); // Did the major pre-fold work
// Step 3: Build N train & validation frames; build N ModelBuilders; error check them all
ModelBuilder<M, P, O> cvModelBuilders[] = cv_makeFramesAndBuilders(N,weights);
// Step 4: Run all the CV models and launch the main model
H2O.H2OCountedCompleter mainMB = cv_buildModels(N, cvModelBuilders);
// Step 5: Score the CV models
ModelMetrics.MetricBuilder mbs[] = cv_scoreCVModels(N, weights, cvModelBuilders);
// wait for completion of the main model
if (mainMB!=null) mainMB.join();
// Step 6: Combine cross-validation scores; compute main model x-val
// scores; compute gains/lifts
cv_mainModelScores(N, mbs, cvModelBuilders);
} finally {
Scope.exit();
}
}
// Step 1: Assign each row to a fold
// TODO: Implement better splitting algo (with Strata if response is
public Vec cv_AssignFold(int N) {
Vec fold = train().vec(_parms._fold_column);
if( fold != null ) {
if( !fold.isInt() ||
(!(fold.min() == 0 && fold.max() == N-1) &&
!(fold.min() == 1 && fold.max() == N ) )) // Allow 0 to N-1, or 1 to N
throw new H2OIllegalArgumentException("Fold column must be either categorical or contiguous integers from 0..N-1 or 1..N");
return fold;
}
final long seed = _parms.nFoldSeed();
Log.info("Creating " + N + " cross-validation splits with random number seed: " + seed);
switch( _parms._fold_assignment ) {
case AUTO:
case Random: return ASTKFold. kfoldColumn(train().anyVec().makeZero(),N,seed);
case Modulo: return ASTKFold. moduloKfoldColumn(train().anyVec().makeZero(),N );
case Stratified: return ASTKFold.stratifiedKFoldColumn(response(),N,seed);
default: throw H2O.unimpl();
}
}
// Step 2: Make 2*N binary weight vectors
public Vec[] cv_makeWeights( final int N, Vec foldAssignment ) {
String origWeightsName = _parms._weights_column;
Vec origWeight = origWeightsName != null ? train().vec(origWeightsName) : train().anyVec().makeCon(1.0);
Frame folds_and_weights = new Frame(new Vec[]{foldAssignment, origWeight});
Vec[] weights = new MRTask() {
@Override public void map(Chunk chks[], NewChunk nchks[]) {
Chunk fold = chks[0], orig = chks[1];
for( int row=0; row< orig._len; row++ ) {
int foldAssignment = (int)fold.at8(row) % N;
double w = orig.atd(row);
for( int f = 0; f < N; f++ ) {
boolean holdout = foldAssignment == f;
nchks[2*f+0].addNum(holdout ? 0 : w);
nchks[2*f+1].addNum(holdout ? w : 0);
}
}
}
}.doAll(2*N,Vec.T_NUM,folds_and_weights).outputFrame().vecs();
if( _parms._fold_column == null ) foldAssignment.remove();
if( origWeightsName == null ) origWeight.remove(); // Cleanup temp
for( Vec weight : weights )
if( weight.isConst() )
throw new H2OIllegalArgumentException("Not enough data to create " + N + " random cross-validation splits. Either reduce nfolds, specify a larger dataset (or specify another random number seed, if applicable).");
return weights;
}
// Step 3: Build N train & validation frames; build N ModelBuilders; error check them all
public ModelBuilder<M, P, O>[] cv_makeFramesAndBuilders( int N, Vec[] weights ) {
final long old_cs = _parms.checksum();
final String origDest = _result.toString();
final String weightName = "__internal_cv_weights__";
if (train().find(weightName) != -1) throw new H2OIllegalArgumentException("Frame cannot contain a Vec called '" + weightName + "'.");
Frame cv_fr = new Frame(train().names(),train().vecs());
if( _parms._weights_column!=null ) cv_fr.remove( _parms._weights_column ); // The CV frames will have their own private weight column
ModelBuilder<M, P, O>[] cvModelBuilders = new ModelBuilder[N];
for( int i=0; i<N; i++ ) {
String identifier = origDest + "_cv_" + (i+1);
// Training/Validation share the same data, but will have exclusive weights
Frame cvTrain = new Frame(Key.make(identifier+"_train"),cv_fr.names(),cv_fr.vecs());
cvTrain.add(weightName, weights[2*i]);
DKV.put(cvTrain);
Frame cvValid = new Frame(Key.make(identifier+"_valid"),cv_fr.names(),cv_fr.vecs());
cvValid.add(weightName, weights[2*i+1]);
DKV.put(cvValid);
// Shallow clone - not everything is a private copy!!!
ModelBuilder<M, P, O> cv_mb = (ModelBuilder)this.clone();
cv_mb._result = Key.make(identifier); // Each submodel gets its own key
cv_mb._parms = (P) _parms.clone();
// Fix up some parameters of the clone
cv_mb._parms._weights_column = weightName;// All submodels have a weight column, which the main model does not
cv_mb._parms._train = cvTrain._key; // All submodels have a weight column, which the main model does not
cv_mb._parms._valid = cvValid._key;
cv_mb._parms._fold_assignment = Model.Parameters.FoldAssignmentScheme.AUTO;
cv_mb._parms._nfolds = 0; // Each submodel is not itself folded
cv_mb.init(false); // Arg check submodels
// Error-check all the cross-validation Builders before launching any
if( cv_mb.error_count() > 0 ) // Gather all submodel error messages
for( ValidationMessage vm : cv_mb._messages )
message(vm._log_level, vm._field_name, vm._message);
cvModelBuilders[i] = cv_mb;
}
if( error_count() > 0 ) // Error in any submodel
throw H2OModelBuilderIllegalArgumentException.makeFromBuilder(this);
// check that this Job's original _params haven't changed
assert old_cs == _parms.checksum();
return cvModelBuilders;
}
// Step 4: Run all the CV models and launch the main model
public H2O.H2OCountedCompleter cv_buildModels(int N, ModelBuilder<M, P, O>[] cvModelBuilders ) {
H2O.H2OCountedCompleter submodel_tasks[] = new H2O.H2OCountedCompleter[N];
int nRunning=0;
for( int i=0; i<N; ++i ) {
if( _job.stop_requested() ) break; // Stop launching but still must block for all async jobs
Log.info("Building cross-validation model " + (i + 1) + " / " + N + ".");
cvModelBuilders[i]._start_time = System.currentTimeMillis();
submodel_tasks[i] = H2O.submitTask(cvModelBuilders[i].trainModelImpl());
if(++nRunning == nModelsInParallel()) //piece-wise advance in training the CV models
while (nRunning>0) submodel_tasks[i+1-nRunning--].join();
}
for( int i=0; i<N; ++i ) //all sub-models must be completed before the main model can be built
submodel_tasks[i].join();
// Now do the main model
if( _job.stop_requested() ) return null;
assert _job.isRunning();
Log.info("Building main model.");
_start_time = System.currentTimeMillis();
modifyParmsForCrossValidationMainModel(cvModelBuilders); //tell the main model that it shouldn't stop early either
H2O.H2OCountedCompleter mainMB = H2O.submitTask(trainModelImpl()); //non-blocking: start the main model
return mainMB;
}
// Step 5: Score the CV models
public ModelMetrics.MetricBuilder[] cv_scoreCVModels(int N, Vec[] weights, ModelBuilder<M, P, O>[] cvModelBuilders) {
if( _job.stop_requested() ) return null;
ModelMetrics.MetricBuilder[] mbs = new ModelMetrics.MetricBuilder[N];
Futures fs = new Futures();
for (int i=0; i<N; ++i) {
if( _job.stop_requested() ) return null; //don't waste time scoring if the CV run is stopped
Frame cvValid = cvModelBuilders[i].valid();
Frame adaptFr = new Frame(cvValid);
M cvModel = cvModelBuilders[i].dest().get();
cvModel.adaptTestForTrain(adaptFr, true, !isSupervised());
mbs[i] = cvModel.scoreMetrics(adaptFr);
if (nclasses() == 2 /* need holdout predictions for gains/lift table */ || _parms._keep_cross_validation_predictions) {
String predName = "prediction_" + cvModelBuilders[i]._result.toString();
cvModel.predictScoreImpl(cvValid, adaptFr, predName);
}
// free resources as early as possible
if (adaptFr != null) {
Model.cleanup_adapt(adaptFr, cvValid);
DKV.remove(adaptFr._key,fs);
}
DKV.remove(cvModelBuilders[i]._parms._train,fs);
DKV.remove(cvModelBuilders[i]._parms._valid,fs);
weights[2*i ].remove(fs);
weights[2*i+1].remove(fs);
}
fs.blockForPending();
return mbs;
}
// Step 6: Combine cross-validation scores; compute main model x-val scores; compute gains/lifts
public void cv_mainModelScores(int N, ModelMetrics.MetricBuilder mbs[], ModelBuilder<M, P, O> cvModelBuilders[]) {
if( _job.stop_requested() ) return;
assert _job.isRunning();
M mainModel = _result.get();
// Compute and put the cross-validation metrics into the main model
Log.info("Computing " + N + "-fold cross-validation metrics.");
mainModel._output._cross_validation_models = new Key[N];
Key<Frame>[] predKeys = new Key[N];
mainModel._output._cross_validation_predictions = _parms._keep_cross_validation_predictions ? predKeys : null;
for (int i = 0; i < N; ++i) {
if (i > 0) mbs[0].reduce(mbs[i]);
Key<M> cvModelKey = cvModelBuilders[i]._result;
mainModel._output._cross_validation_models[i] = cvModelKey;
predKeys[i] = Key.make("prediction_" + cvModelKey.toString());
}
Frame preds = null;
//stitch together holdout predictions into one Vec, to compute the Gains/Lift table
if (nclasses() == 2) {
Vec[] p1s = new Vec[N];
for (int i=0;i<N;++i) {
p1s[i] = ((Frame)DKV.getGet(predKeys[i])).lastVec();
}
Frame p1combined = new HoldoutPredictionCombiner().doAll(1,Vec.T_NUM,new Frame(p1s)).outputFrame(new String[]{"p1"},null);
Vec p1 = p1combined.anyVec();
preds = new Frame(new Vec[]{p1, p1, p1}); //pretend to have labels,p0,p1, but will only need p1 anyway
}
// Keep or toss predictions
for (Key<Frame> k : predKeys) {
Frame fr = DKV.getGet(k);
if( fr != null ) {
if (_parms._keep_cross_validation_predictions) Scope.untrack(fr.keys());
else fr.remove();
}
}
mainModel._output._cross_validation_metrics = mbs[0].makeModelMetrics(mainModel, _parms.train(), null, preds);
if (preds!=null) preds.remove();
mainModel._output._cross_validation_metrics._description = N + "-fold cross-validation on training data (Metrics computed for combined holdout predictions)";
Log.info(mainModel._output._cross_validation_metrics.toString());
// Now, the main model is complete (has cv metrics)
DKV.put(mainModel);
}
// helper to combine multiple holdout prediction Vecs (each only has 1/N-th filled with non-zeros) into 1 Vec
private static class HoldoutPredictionCombiner extends MRTask<HoldoutPredictionCombiner> {
@Override
public void map(Chunk[] cs, NewChunk[] nc) {
double [] vals = new double[cs[0].len()];
for (int i=0;i<cs.length;++i)
for (int row = 0; row < cs[0].len(); ++row)
vals[row] += cs[i].atd(row);
nc[0].setDoubles(vals);
}
}
/** Override for model-specific checks / modifications to _parms for the main model during N-fold cross-validation.
* For example, the model might need to be told to not do early stopping.
*/
public void modifyParmsForCrossValidationMainModel(ModelBuilder<M, P, O>[] cvModelBuilders) { }
/** @return Whether n-fold cross-validation is done */
public boolean nFoldCV() {
return _parms._fold_column != null || _parms._nfolds != 0;
}
/** List containing the categories of models that this builder can
* build. Each ModelBuilder must have one of these. */
abstract public ModelCategory[] can_build();
/** Visibility for this algo: is it always visible, is it beta (always
* visible but with a note in the UI) or is it experimental (hidden by
* default, visible in the UI if the user gives an "experimental" flag at
* startup); test-only builders are "experimental" */
public enum BuilderVisibility { Experimental, Beta, Stable }
public BuilderVisibility builderVisibility() { return BuilderVisibility.Stable; }
/** Clear whatever was done by init() so it can be run again. */
public void clearInitState() {
clearValidationErrors();
}
protected boolean logMe() { return true; }
public boolean isSupervised(){return false;}
protected transient Vec _response; // Handy response column
protected transient Vec _vresponse; // Handy response column
protected transient Vec _offset; // Handy offset column
protected transient Vec _weights; // observation weight column
protected transient Vec _fold; // fold id column
public boolean hasOffsetCol(){ return _parms._offset_column != null;} // don't look at transient Vec
public boolean hasWeightCol(){return _parms._weights_column != null;} // don't look at transient Vec
public boolean hasFoldCol(){return _parms._fold_column != null;} // don't look at transient Vec
public int numSpecialCols() { return (hasOffsetCol() ? 1 : 0) + (hasWeightCol() ? 1 : 0) + (hasFoldCol() ? 1 : 0); }
// no hasResponse, call isSupervised instead (response is mandatory if isSupervised is true)
protected int _nclass; // Number of classes; 1 for regression; 2+ for classification
public int nclasses(){return _nclass;}
public final boolean isClassifier() { return nclasses() > 1; }
/**
* Find and set response/weights/offset/fold and put them all in the end,
* @return number of non-feature vecs
*/
protected int separateFeatureVecs() {
int res = 0;
if(_parms._weights_column != null) {
Vec w = _train.remove(_parms._weights_column);
if(w == null)
error("_weights_column","Weights column '" + _parms._weights_column + "' not found in the training frame");
else {
if(!w.isNumeric())
error("_weights_column","Invalid weights column '" + _parms._weights_column + "', weights must be numeric");
_weights = w;
if(w.naCnt() > 0)
error("_weights_columns","Weights cannot have missing values.");
if(w.min() < 0)
error("_weights_columns","Weights must be >= 0");
if(w.max() == 0)
error("_weights_columns","Max. weight must be > 0");
_train.add(_parms._weights_column, w);
++res;
}
} else {
_weights = null;
assert(!hasWeightCol());
}
if(_parms._offset_column != null) {
Vec o = _train.remove(_parms._offset_column);
if(o == null)
error("_offset_column","Offset column '" + _parms._offset_column + "' not found in the training frame");
else {
if(!o.isNumeric())
error("_offset_column","Invalid offset column '" + _parms._offset_column + "', offset must be numeric");
_offset = o;
if(o.naCnt() > 0)
error("_offset_column","Offset cannot have missing values.");
if(_weights == _offset)
error("_offset_column", "Offset must be different from weights");
_train.add(_parms._offset_column, o);
++res;
}
} else {
_offset = null;
assert(!hasOffsetCol());
}
if(_parms._fold_column != null) {
Vec f = _train.remove(_parms._fold_column);
if(f == null)
error("_fold_column","Fold column '" + _parms._fold_column + "' not found in the training frame");
else {
if(!f.isInt() && !f.isCategorical())
error("_fold_column","Invalid fold column '" + _parms._fold_column + "', fold must be integer or categorical");
if(f.min() < 0)
error("_fold_column","Invalid fold column '" + _parms._fold_column + "', fold must be non-negative");
if(f.isConst())
error("_fold_column","Invalid fold column '" + _parms._fold_column + "', fold cannot be constant");
_fold = f;
if(f.naCnt() > 0)
error("_fold_column","Fold cannot have missing values.");
if(_fold == _weights)
error("_fold_column", "Fold must be different from weights");
if(_fold == _offset)
error("_fold_column", "Fold must be different from offset");
_train.add(_parms._fold_column, f);
++res;
}
} else {
_fold = null;
assert(!hasFoldCol());
}
if(isSupervised() && _parms._response_column != null) {
_response = _train.remove(_parms._response_column);
if (_response == null) {
if (isSupervised())
error("_response_column", "Response column '" + _parms._response_column + "' not found in the training frame");
} else {
if(_response == _offset)
error("_response_column", "Response column must be different from offset_column");
if(_response == _weights)
error("_response_column", "Response column must be different from weights_column");
if(_response == _fold)
error("_response_column", "Response column must be different from fold_column");
_train.add(_parms._response_column, _response);
++res;
}
} else {
_response = null;
}
return res;
}
protected boolean ignoreStringColumns(){return true;}
protected boolean ignoreConstColumns(){return _parms._ignore_const_cols;}
/**
* Ignore constant columns, columns with all NAs and strings.
* @param npredictors
* @param expensive
*/
protected void ignoreBadColumns(int npredictors, boolean expensive){
// Drop all-constant and all-bad columns.
if(_parms._ignore_const_cols)
new FilterCols(npredictors) {
@Override protected boolean filter(Vec v) {
return (ignoreConstColumns() && v.isConst()) || v.isBad() || (ignoreStringColumns() && v.isString()); }
}.doIt(_train,"Dropping constant columns: ",expensive);
}
/**
* Override this method to call error() if the model is expected to not fit in memory, and say why
*/
protected void checkMemoryFootPrint() {}
transient double [] _distribution;
transient protected double [] _priorClassDist;
protected boolean computePriorClassDistribution(){
return isClassifier();
}
/** A list of field validation issues. */
public ValidationMessage[] _messages = new ValidationMessage[0];
private int _error_count = -1; // -1 ==> init not run yet, for those Jobs that have an init, like ModelBuilder. Note, this counts ONLY errors, not WARNs and etc.
public int error_count() { assert _error_count >= 0 : "init() not run yet"; return _error_count; }
public void hide (String field_name, String message) { message(Log.TRACE, field_name, message); }
public void info (String field_name, String message) { message(Log.INFO , field_name, message); }
public void warn (String field_name, String message) { message(Log.WARN , field_name, message); }
public void error(String field_name, String message) { message(Log.ERRR , field_name, message); _error_count++; }
public void clearValidationErrors() {
_messages = new ValidationMessage[0];
_error_count = 0;
}
public void message(byte log_level, String field_name, String message) {
_messages = Arrays.copyOf(_messages, _messages.length + 1);
_messages[_messages.length - 1] = new ValidationMessage(log_level, field_name, message);
}
/** Get a string representation of only the ERROR ValidationMessages (e.g., to use in an exception throw). */
public String validationErrors() {
StringBuilder sb = new StringBuilder();
for( ValidationMessage vm : _messages )
if( vm._log_level == Log.ERRR )
sb.append(vm.toString()).append("\n");
return sb.toString();
}
/** Can be an ERROR, meaning the parameters can't be used as-is,
* a TRACE, which means the specified field should be hidden given
* the values of other fields, or a WARN or INFO for informative
* messages to the user. */
public static final class ValidationMessage extends Iced {
final byte _log_level; // See util/Log.java for levels
final String _field_name;
final String _message;
public ValidationMessage(byte log_level, String field_name, String message) {
_log_level = log_level;
_field_name = field_name;
_message = message;
Log.log(log_level,field_name + ": " + message);
}
public int log_level() { return _log_level; }
@Override public String toString() { return Log.LVLS[_log_level] + " on field: " + _field_name + ": " + _message; }
}
/** Initialize the ModelBuilder, validating all arguments and preparing the
* training frame. This call is expected to be overridden in the subclasses
* and each subclass will start with "super.init();". This call is made by
* the front-end whenever the GUI is clicked, and needs to be fast whenever
* {@code expensive} is false; it will be called once again at the start of
* model building {@see #trainModel()} with expensive set to true.
*<p>
* The incoming training frame (and validation frame) will have ignored
* columns dropped out, plus whatever work the parent init did.
*<p>
* NOTE: The front end initially calls this through the parameters validation
* endpoint with no training_frame, so each subclass's {@code init()} method
* has to work correctly with the training_frame missing.
*<p>
*/
public void init(boolean expensive) {
// Log parameters
if( expensive && logMe() ) {
Log.info("Building H2O " + this.getClass().getSimpleName().toString() + " model with these parameters:");
Log.info(new String(_parms.writeJSON(new AutoBuffer()).buf()));
}
// NOTE: allow re-init:
clearInitState();
assert _parms != null; // Parms must already be set in
if( _parms._train == null ) {
if (expensive)
error("_train","Missing training frame");
return;
}
Frame tr = _parms.train();
if( tr == null ) { error("_train","Missing training frame: "+_parms._train); return; }
_train = new Frame(null /* not putting this into KV */, tr._names.clone(), tr.vecs().clone());
if (_parms._nfolds < 0 || _parms._nfolds == 1) {
error("_nfolds", "nfolds must be either 0 or >1.");
}
if (_parms._nfolds > 1 && _parms._nfolds > train().numRows()) {
error("_nfolds", "nfolds cannot be larger than the number of rows (" + train().numRows() + ").");
}
if (_parms._fold_column != null) {
hide("_fold_assignment", "Fold assignment is ignored when a fold column is specified.");
if (_parms._nfolds > 1) {
error("_nfolds", "nfolds cannot be specified at the same time as a fold column.");
} else {
hide("_nfolds", "nfolds is ignored when a fold column is specified.");
}
if (_parms._fold_assignment != Model.Parameters.FoldAssignmentScheme.AUTO) {
error("_fold_assignment", "Fold assignment is not allowed in conjunction with a fold column.");
}
}
if (_parms._nfolds > 1) {
hide("_fold_column", "Fold column is ignored when nfolds > 1.");
}
// hide cross-validation parameters unless cross-val is enabled
if (!nFoldCV()) {
hide("_keep_cross_validation_predictions", "Only for cross-validation.");
hide("_fold_assignment", "Only for cross-validation.");
if (_parms._fold_assignment != Model.Parameters.FoldAssignmentScheme.AUTO) {
error("_fold_assignment", "Fold assignment is only allowed for cross-validation.");
}
}
if (_parms._distribution != Distribution.Family.tweedie) {
hide("_tweedie_power", "Only for Tweedie Distribution.");
}
if (_parms._tweedie_power <= 1 || _parms._tweedie_power >= 2) {
error("_tweedie_power", "Tweedie power must be between 1 and 2 (exclusive).");
}
// Drop explicitly dropped columns
if( _parms._ignored_columns != null ) {
_train.remove(_parms._ignored_columns);
if( expensive ) Log.info("Dropping ignored columns: "+Arrays.toString(_parms._ignored_columns));
}
// Rebalance train and valid datasets
if (expensive && error_count() == 0) {
_train = rebalance(_train, false, _result + ".temporary.train");
_valid = rebalance(_valid, false, _result + ".temporary.valid");
}
// Drop all non-numeric columns (e.g., String and UUID). No current algo
// can use them, and otherwise all algos will then be forced to remove
// them. Text algos (grep, word2vec) take raw text columns - which are
// numeric (arrays of bytes).
ignoreBadColumns(separateFeatureVecs(), expensive);
// Check that at least some columns are not-constant and not-all-NAs
if( _train.numCols() == 0 )
error("_train","There are no usable columns to generate model");
if(isSupervised()) {
if(_response != null) {
if (_parms._distribution != Distribution.Family.tweedie) {
hide("_tweedie_power", "Tweedie power is only used for Tweedie distribution.");
}
if (_parms._distribution != Distribution.Family.quantile) {
hide("_quantile_alpha", "Quantile (alpha) is only used for Quantile regression.");
}
if (expensive) checkDistributions();
_nclass = _response.isCategorical() ? _response.cardinality() : 1;
if (_response.isConst())
error("_response","Response cannot be constant.");
}
if (! _parms._balance_classes)
hide("_max_after_balance_size", "Balance classes is false, hide max_after_balance_size");
else if (_parms._weights_column != null && _weights != null && !_weights.isBinary())
error("_balance_classes", "Balance classes and observation weights are not currently supported together.");
if( _parms._max_after_balance_size <= 0.0 )
error("_max_after_balance_size","Max size after balancing needs to be positive, suggest 1.0f");
if( _train != null ) {
if (_train.numCols() <= 1)
error("_train", "Training data must have at least 2 features (incl. response).");
if( null == _parms._response_column) {
error("_response_column", "Response column parameter not set.");
return;
}
if(_response != null && computePriorClassDistribution()) {
if (isClassifier() && isSupervised()) {
MRUtils.ClassDist cdmt =
_weights != null ? new MRUtils.ClassDist(nclasses()).doAll(_response, _weights) : new MRUtils.ClassDist(nclasses()).doAll(_response);
_distribution = cdmt.dist();
_priorClassDist = cdmt.rel_dist();
} else { // Regression; only 1 "class"
_distribution = new double[]{ (_weights != null ? _weights.mean() : 1.0) * train().numRows() };
_priorClassDist = new double[]{1.0f};
}
}
}
if( !isClassifier() ) {
hide("_balance_classes", "Balance classes is only applicable to classification problems.");
hide("_class_sampling_factors", "Class sampling factors is only applicable to classification problems.");
hide("_max_after_balance_size", "Max after balance size is only applicable to classification problems.");
hide("_max_confusion_matrix_size", "Max confusion matrix size is only applicable to classification problems.");
}
if (_nclass <= 2) {
hide("_max_hit_ratio_k", "Max K-value for hit ratio is only applicable to multi-class classification problems.");
hide("_max_confusion_matrix_size", "Only for multi-class classification problems.");
}
if( !_parms._balance_classes ) {
hide("_max_after_balance_size", "Only used with balanced classes");
hide("_class_sampling_factors", "Class sampling factors is only applicable if balancing classes.");
}
}
else {
hide("_response_column", "Ignored for unsupervised methods.");
hide("_balance_classes", "Ignored for unsupervised methods.");
hide("_class_sampling_factors", "Ignored for unsupervised methods.");
hide("_max_after_balance_size", "Ignored for unsupervised methods.");
hide("_max_confusion_matrix_size", "Ignored for unsupervised methods.");
_response = null;
_vresponse = null;
_nclass = 1;
}
if( _nclass > Model.Parameters.MAX_SUPPORTED_LEVELS ) {
error("_nclass", "Too many levels in response column: " + _nclass + ", maximum supported number of classes is " + Model.Parameters.MAX_SUPPORTED_LEVELS + ".");
}
// Build the validation set to be compatible with the training set.
// Toss out extra columns, complain about missing ones, remap categoricals
Frame va = _parms.valid(); // User-given validation set
if (va != null) {
_valid = new Frame(null /* not putting this into KV */, va._names.clone(), va.vecs().clone());
try {
String[] msgs = Model.adaptTestForTrain(_train._names, _parms._weights_column, _parms._offset_column, _parms._fold_column, null, _train.domains(), _valid, _parms.missingColumnsType(), expensive, true);
_vresponse = _valid.vec(_parms._response_column);
if (_vresponse == null && _parms._response_column != null)
error("_validation_frame", "Validation frame must have a response column '" + _parms._response_column + "'.");
if (expensive) {
for (String s : msgs) {
Log.info(s);
warn("_valid", s);
}
}
assert !expensive || (_valid == null || Arrays.equals(_train._names, _valid._names));
} catch (IllegalArgumentException iae) {
error("_valid", iae.getMessage());
}
} else {
_valid = null;
_vresponse = null;
}
if (_parms._checkpoint != null && DKV.get(_parms._checkpoint) == null) {
error("_checkpoint", "Checkpoint has to point to existing model!");
}
if (_parms._stopping_tolerance < 0) {
error("_stopping_tolerance", "Stopping tolerance must be >= 0.");
}
if (_parms._stopping_tolerance >= 1) {
error("_stopping_tolerance", "Stopping tolerance must be < 1.");
}
if (_parms._stopping_rounds == 0) {
if (_parms._stopping_metric != ScoreKeeper.StoppingMetric.AUTO)
warn("_stopping_metric", "Stopping metric is ignored for _stopping_rounds=0.");
if (_parms._stopping_tolerance != _parms.defaultStoppingTolerance())
warn("_stopping_tolerance", "Stopping tolerance is ignored for _stopping_rounds=0.");
} else if (_parms._stopping_rounds < 0) {
error("_stopping_rounds", "Stopping rounds must be >= 0.");
} else {
if (isClassifier()) {
if (_parms._stopping_metric == ScoreKeeper.StoppingMetric.deviance) {
error("_stopping_metric", "Stopping metric cannot be deviance for classification.");
}
if (nclasses()!=2 && _parms._stopping_metric == ScoreKeeper.StoppingMetric.AUC) {
error("_stopping_metric", "Stopping metric cannot be AUC for multinomial classification.");
}
} else {
if (_parms._stopping_metric == ScoreKeeper.StoppingMetric.misclassification ||
_parms._stopping_metric == ScoreKeeper.StoppingMetric.AUC ||
_parms._stopping_metric == ScoreKeeper.StoppingMetric.logloss)
{
error("_stopping_metric", "Stopping metric cannot be " + _parms._stopping_metric.toString() + " for regression.");
}
}
}
if (_parms._max_runtime_secs < 0) {
error("_max_runtime_secs", "Max runtime (in seconds) must be greater than 0 (or 0 for unlimited).");
}
}
/**
* Rebalance a frame for load balancing
* @param original_fr Input frame
* @param local Whether to only create enough chunks to max out all cores on one node only
* @param name Name of rebalanced frame
* @return Frame that has potentially more chunks
*/
protected Frame rebalance(final Frame original_fr, boolean local, final String name) {
if (original_fr == null) return null;
int chunks = desiredChunks(original_fr, local);
if (original_fr.anyVec().nChunks() >= chunks) {
if (chunks>1)
Log.info(name.substring(name.length()-5)+ " dataset already contains " + original_fr.anyVec().nChunks() +
" chunks. No need to rebalance.");
return original_fr;
}
Log.info("Rebalancing " + name.substring(name.length()-5) + " dataset into " + chunks + " chunks.");
Key newKey = Key.makeUserHidden(name + ".chunks" + chunks);
RebalanceDataSet rb = new RebalanceDataSet(original_fr, newKey, chunks);
H2O.submitTask(rb).join();
Frame rebalanced_fr = DKV.get(newKey).get();
Scope.track(rebalanced_fr);
return rebalanced_fr;
}
/**
* Find desired number of chunks. If fewer, dataset will be rebalanced.
* @return Lower bound on number of chunks after rebalancing.
*/
protected int desiredChunks(final Frame original_fr, boolean local) {
return Math.min((int) Math.ceil(original_fr.numRows() / 1e3), H2O.NUMCPUS);
}
public void checkDistributions() {
if (_parms._distribution == Distribution.Family.poisson) {
if (_response.min() < 0)
error("_response", "Response must be non-negative for Poisson distribution.");
} else if (_parms._distribution == Distribution.Family.gamma) {
if (_response.min() < 0)
error("_response", "Response must be non-negative for Gamma distribution.");
} else if (_parms._distribution == Distribution.Family.tweedie) {
if (_parms._tweedie_power >= 2 || _parms._tweedie_power <= 1)
error("_tweedie_power", "Tweedie power must be between 1 and 2.");
if (_response.min() < 0)
error("_response", "Response must be non-negative for Tweedie distribution.");
} else if (_parms._distribution == Distribution.Family.quantile) {
if (_parms._quantile_alpha > 1 || _parms._quantile_alpha < 0)
error("_quantile_alpha", "Quantile (alpha) must be between 0 and 1.");
}
}
transient public HashSet<String> _removedCols = new HashSet<>();
abstract class FilterCols {
final int _specialVecs; // special vecs to skip at the end
public FilterCols(int n) {_specialVecs = n;}
abstract protected boolean filter(Vec v);
void doIt( Frame f, String msg, boolean expensive ) {
List<Integer> rmcolsList = new ArrayList<>();
for( int i = 0; i < f.vecs().length - _specialVecs; i++ )
if( filter(f.vecs()[i]) ) rmcolsList.add(i);
if( !rmcolsList.isEmpty() ) {
_removedCols = new HashSet<>(rmcolsList.size());
int[] rmcols = new int[rmcolsList.size()];
for (int i=0;i<rmcols.length;++i) {
rmcols[i]=rmcolsList.get(i);
_removedCols.add(f._names[rmcols[i]]);
}
f.remove(rmcols); //bulk-remove
msg += _removedCols.toString();
warn("_train", msg);
if (expensive) Log.info(msg);
}
}
}
}
|
package org.helioviewer.gl3d.scenegraph.math;
import java.util.ArrayList;
import java.util.List;
public class GL3DMeshSlice {
private List<GL3DVec3d> triangles;
private GL3DVec3d planeNormal;
private List<GL3DVec3d> resultLeft = new ArrayList<GL3DVec3d>();
private List<GL3DVec3d> resultRight = new ArrayList<GL3DVec3d>();
public GL3DMeshSlice() {
triangles = new ArrayList<GL3DVec3d>();
planeNormal = new GL3DVec3d(0, 0, 1);
}
public GL3DMeshSlice(List<GL3DVec3d> triangles, GL3DVec3d planeNormal) {
setInput(triangles, planeNormal);
}
public void setInput(List<GL3DVec3d> triangles, GL3DVec3d planeNormal) {
this.triangles = triangles;
this.planeNormal = planeNormal;
}
public boolean hasBothSides() {
return resultLeft.size() > 0 && resultRight.size() > 0;
}
public List<GL3DVec3d> getOnlyResult()
{
if(resultLeft.size() > 0 && resultRight.size() == 0)
return resultLeft;
if(resultLeft.size() == 0 && resultRight.size() > 0)
return resultRight;
return null;
}
public List<GL3DVec3d> getResultLeft() {
return resultLeft;
}
public List<GL3DVec3d> getResultRight() {
return resultRight;
}
public void slice()
{
resultLeft.clear();
resultRight.clear();
GL3DVec3d[] tri = new GL3DVec3d[3];
int side1;
int side2;
int sign;
// Reset callbacks
//reset();
// Loop through faces, slice it if necessary
// In all cases: Use callback objects to process results
for(int i=0; i<triangles.size()-2; i+=3)
{
tri[0] = triangles.get(i);
tri[1] = triangles.get(i+1);
tri[2] = triangles.get(i+2);
side1 = 0x00;
side2 = 0x00;
// Calculate sign mask for vertices
// .... 0000 0000 0cba -> bit flags for 3 vertices (a, b, c).
// bit is 1 if vertex lies on that side, 0 if not
// if bit is 0 for both sides -> vertex lies directly on the plane
sign = pointPlaneSign(tri[2]);
side1 |= (sign== 1 ? 1 : 0); side1 <<= 1;
side2 |= (sign==-1 ? 1 : 0); side2 <<= 1;
sign = pointPlaneSign(tri[1]);
side1 |= (sign== 1 ? 1 : 0); side1 <<= 1;
side2 |= (sign==-1 ? 1 : 0); side2 <<= 1;
sign = pointPlaneSign(tri[0]);
side1 |= (sign== 1 ? 1 : 0);
side2 |= (sign==-1 ? 1 : 0);
// No vertices directly on plane
if((side1 | side2) == 7)
{
// Case 1a) All on side1
if(side2==0)
case1(tri, true);
// Case 1b) All on side2
else if(side1==0)
case1(tri, false);
// Case 2a) 2 on side1, 1 on side2
else if(side2 == (side2 & -side2))
case2(tri, side2, true);
// Case 2b) 1 on side1, 2 on side2
else
case2(tri, side1, false);
}
// Minimum of 1 vertex lies on the plane -> 1 or 2 bits set in total
else if((side1 | side2) != 0)
{
// Min 1 vertex on side1
if(side1>0)
{
// Case 3) 1 on both sides, 1 on plane
if(side2>0)
case3(tri, side1, side2);
// Case 4a) 2 vertices on side1, 1 on plane
else if(side1 != (side1 & -side1))
case1(tri, true); // Handle like case 1
// Case 5a) 1 vertex on side1, 2 on plane
else
case1(tri, true); // Use case 1 without edges
}
// v-- No vertices on side1 -> 1 or 2 vertices on side2 --v
// Case 4b) 2 vertices on side2, 1 on plane
else if(side2 != (side2 & -side2))
case1(tri, false); // Handle like case 1
// Case 5b) 1 vertex on side2, 2 on plane
else
case1(tri, false); // Use case 1 without edges
}
// Case 6) All 3 vertices on the plane
else
case1(tri, true); // Handle like case 1
}
// Finalize callbacks
//finalize();
}
private int pointPlaneSign(GL3DVec3d p)
{
// Plane lies on (0, 0, 0)
double dot = planeNormal.dot(p);
if(dot < -0.0000001)
return -1;
else if(dot > 0.0000001)
return 1;
else
return 0;
}
private GL3DVec3d linePlaneIntersection(GL3DVec3d l1, GL3DVec3d l2)
{
// Plane lies on (0, 0, 0)
GL3DVec3d diff = new GL3DVec3d(l2);
diff.subtract(l1);
GL3DVec3d n = new GL3DVec3d(planeNormal);
n.multiply(l1);
double t = planeNormal.dot( GL3DVec3d.multiply(l1, -1) ) / planeNormal.dot(diff);
return new GL3DVec3d(
l1.x + t * diff.x,
l1.y + t * diff.y,
l1.z + t * diff.z );
}
private void case1(GL3DVec3d[] tri, boolean left)
{
triangle(tri[0], tri[1], tri[2], left);
}
private void case2(GL3DVec3d[] tri, int side, boolean twoLeft)
{
// Find index of single vertex
int idx = 0;
for(; (side & 1) == 0; ++idx, side >>= 1);
// Calculate intersection points
final int suc = idx==2 ? 0 : idx+1;
final int pre = idx==0 ? 2 : idx-1;
GL3DVec3d a = linePlaneIntersection(tri[idx], tri[suc]); // Successor of the single vertex, counterclockwise
GL3DVec3d b = linePlaneIntersection(tri[idx], tri[pre]); // Predecessor of the single vertex, counterclockwise
// Triangluate and add to volume
if(twoLeft)
{
triangle(tri[suc], tri[pre], a, true);
triangle(tri[pre], b, a, true);
//edge(b, a);
triangle(tri[idx], a, b, false);
}
else
{
triangle(tri[idx], a, b, true);
//edge(a, b);
triangle(tri[suc], tri[pre], a, false);
triangle(tri[pre], b, a, false);
}
}
private void case3(GL3DVec3d[] tri, int side1, int side2)
{
int left = 0;
int right = 0;
int onPlane = 0;
// Assign vertices
for(int i=0; i<=2; ++i, side1>>=1, side2>>=1)
{
if((side1 & 1) == 1)
left = i;
else if((side2 & 1) == 1)
right = i;
else
onPlane = i;
}
// Calculate intersection point
GL3DVec3d a = linePlaneIntersection(tri[left], tri[right]);
// Check winding
if(right == (left==2 ? 0 : left+1))
{
triangle(tri[left], a, tri[onPlane], true);
triangle(tri[right], tri[onPlane], a, false);
//edge(a, tri[onPlane]);
}
else
{
triangle(tri[left], tri[onPlane], a, true);
triangle(tri[right], a, tri[onPlane], false);
//edge(tri[onPlane], a);
}
}
/*private void case5(GL3DVec3d[] tri, int side, boolean left)
{
triangle(tri[0], tri[1], tri[2], left);
// Which is the single vertex?
int idx = 0;
for(; (side & 1) == 0; ++idx, side>>=1);
final int suc = idx==2 ? 0 : idx+1;
final int pre = idx==0 ? 2 : idx-1;
// Add polygon edge
if(left)
edge(*tri[suc], *tri[pre]);
else
edge(*tri[pre], *tri[suc]);
}*/
private void triangle(GL3DVec3d a, GL3DVec3d b, GL3DVec3d c, boolean left)
{
List<GL3DVec3d> result = left ? resultLeft : resultRight;
result.add(new GL3DVec3d(a));
result.add(new GL3DVec3d(b));
result.add(new GL3DVec3d(c));
}
}
|
package water.api;
import hex.schemas.ModelBuilderSchema;
import org.reflections.Reflections;
import water.*;
import water.exceptions.H2OIllegalArgumentException;
import water.exceptions.H2OKeyNotFoundArgumentException;
import water.exceptions.H2ONotFoundArgumentException;
import water.fvec.Frame;
import water.util.*;
import java.lang.annotation.Annotation;
import java.lang.reflect.*;
import java.util.*;
abstract public class Schema<I extends Iced, S extends Schema<I,S>> extends Iced {
// These fields are declared transient so that they do not get included when a schema is serialized into JSON.
private transient Class<I> _impl_class;
private transient int _schema_version;
private transient String _schema_name;
private transient String _schema_type;
/** Default constructor; triggers lazy schema registration.
* @throws water.exceptions.H2OFailException if there is a name collision or
* there is more than one schema which maps to the same Iced class */
public Schema() {
init_meta();
SchemaServer.checkIfRegistered(this);
}
protected void init_meta() {
if (_schema_name != null) return;
_schema_name = this.getClass().getSimpleName();
_schema_version = extractVersionFromSchemaName(_schema_name);
_schema_type = getImplClass().getSimpleName();
}
/** Extract the version number from the schema class name. Returns -1 if
* there's no version number at the end of the classname. */
public static int extractVersionFromSchemaName(String clz_name) {
int idx = clz_name.lastIndexOf('V');
if (idx == -1) return -1;
try { return Integer.valueOf(clz_name.substring(idx+1)); }
catch( NumberFormatException ex) { return -1; }
}
/** Get the version number of this schema, for example 3 or 99. Note that 99
* is the "experimental" version, meaning that there are no stability
* guarantees between H2O versions. */
public int getSchemaVersion() { return _schema_version; }
public String getSchemaName() { return _schema_name; }
public String getSchemaType() { return _schema_type; }
/**
* Create an appropriate implementation object and any child objects but does not fill them.
* The standard purpose of a createImpl without a fillImpl is to be able to get the default
* values for all the impl's fields.
* <p>
* For objects without children this method does all the required work. For objects
* with children the subclass will need to override, e.g. by calling super.createImpl()
* and then calling createImpl() on its children.
* <p>
* Note that impl objects for schemas which override this method don't need to have
* a default constructor (e.g., a Keyed object constructor can still create and set
* the Key), but they must not fill any fields which can be filled later from the schema.
* <p>
* TODO: We could handle the common case of children with the same field names here
* by finding all of our fields that are themselves Schemas.
*/
public I createImpl() {
try { return getImplClass().newInstance(); }
catch (Exception e) { throw H2O.fail("Exception making a newInstance",e); }
}
protected I fillImpl(I impl, String[] fieldsToSkip) {
PojoUtils.copyProperties(impl, this, PojoUtils.FieldNaming.CONSISTENT, fieldsToSkip); // TODO: make field names in the impl classes consistent and remove
PojoUtils.copyProperties(impl, this, PojoUtils.FieldNaming.DEST_HAS_UNDERSCORES, fieldsToSkip);
return impl;
}
/** Fill an impl object and any children from this schema and its children.
* If a schema doesn't need to adapt any fields if does not need to override
* this method. */
public I fillImpl(I impl) {
return fillImpl(impl, null);
}
/** Convenience helper which creates and fills an impl object from this schema. */
final public I createAndFillImpl() {
return this.fillImpl(this.createImpl());
}
/** Fill this Schema from the given implementation object. If a schema doesn't need to adapt any fields if does not need to override this method. */
public S fillFromImpl(I impl) {
return fillFromImpl(impl, null);
}
protected S fillFromImpl(I impl, String[] fieldsToSkip) {
PojoUtils.copyProperties(this, impl, PojoUtils.FieldNaming.ORIGIN_HAS_UNDERSCORES, fieldsToSkip);
PojoUtils.copyProperties(this, impl, PojoUtils.FieldNaming.CONSISTENT, fieldsToSkip); // TODO: make field names in the impl classes consistent and remove
//noinspection unchecked (parameter <S> should be the derived class itself)
return (S) this;
}
/** Return the class of the implementation type parameter I for the
* given Schema class. Used by the metadata facilities and the
* reflection-base field-copying magic in PojoUtils. */
public static Class<? extends Iced> getImplClass(Class<? extends Schema> clz) {
Class<? extends Iced> impl_class = ReflectionUtils.findActualClassParameter(clz, 0);
if (null == impl_class)
Log.warn("Failed to find an impl class for Schema: " + clz);
return impl_class;
}
/** Return the class of the implementation type parameter I for this Schema.
* Used by generic code which deals with arbitrary schemas and their backing
* impl classes. Never returns null. */
public Class<I> getImplClass() {
return _impl_class != null ? _impl_class : (_impl_class = ReflectionUtils.findActualClassParameter(this.getClass(), 0));
}
/**
* Fill this Schema object from a set of parameters.
*
* @param parms parameters - set of tuples (parameter name, parameter value)
* @return this schema
*
* @see #fillFromParms(Properties, boolean)
*/
public S fillFromParms(Properties parms) {
return fillFromParms(parms, true);
}
public S fillFromParms(Properties parms, boolean checkRequiredFields) {
// Get passed-in fields, assign into Schema
Class thisSchemaClass = this.getClass();
Map<String, Field> fields = new HashMap<>();
Field current = null; // declare here so we can print in catch{}
try {
Class clz = thisSchemaClass;
do {
Field[] some_fields = clz.getDeclaredFields();
for (Field f : some_fields) {
current = f;
if (null == fields.get(f.getName()))
fields.put(f.getName(), f);
}
clz = clz.getSuperclass();
} while (Iced.class.isAssignableFrom(clz.getSuperclass()));
}
catch (SecurityException e) {
throw H2O.fail("Exception accessing field: " + current + " in class: " + this.getClass() + ": " + e);
}
for( String key : parms.stringPropertyNames() ) {
try {
Field f = fields.get(key); // No such field error, if parm is junk
if (null == f) {
throw new H2OIllegalArgumentException("Unknown parameter: " + key, "Unknown parameter in fillFromParms: " + key + " for class: " + this.getClass().toString());
}
int mods = f.getModifiers();
if( Modifier.isTransient(mods) || Modifier.isStatic(mods) ) {
// Attempting to set a transient or static; treat same as junk fieldname
throw new H2OIllegalArgumentException(
"Bad parameter for field: " + key + " for class: " + this.getClass().toString(),
"Bad parameter definition for field: " + key + " in fillFromParms for class: " + this.getClass().toString() + " (field was declared static or transient)");
}
// Only support a single annotation which is an API, and is required
Annotation[] apis = f.getAnnotations();
if( apis.length == 0 ) throw H2O.fail("Broken internal schema; missing API annotation for field: " + key);
API api = (API)apis[0];
// Must have one of these set to be an input field
if( api.direction() == API.Direction.OUTPUT ) {
throw new H2OIllegalArgumentException(
"Attempting to set output field: " + key + " for class: " + this.getClass().toString(),
"Attempting to set output field: " + key + " in fillFromParms for class: " + this.getClass().toString() + " (field was annotated as API.Direction.OUTPUT)");
}
// Parse value and set the field
setField(this, f, key, parms.getProperty(key), api.required(), thisSchemaClass);
} catch( IllegalAccessException iae ) {
// Come here if field is final or private
throw H2O.fail("Broken internal schema; field cannot be private nor final: " + key);
}
}
// Here every thing in 'parms' was set into some field - so we have already
// checked for unknown or extra parms.
// Confirm required fields are set
if (checkRequiredFields) {
for (Field f : fields.values()) {
int mods = f.getModifiers();
if (Modifier.isTransient(mods) || Modifier.isStatic(mods))
continue; // Ignore transient & static
try {
API api = (API) f.getAnnotations()[0]; // TODO: is there a more specific way we can do this?
if (api.required()) {
if (parms.getProperty(f.getName()) == null) {
IcedHashMap.IcedHashMapStringObject values = new IcedHashMap.IcedHashMapStringObject();
values.put("schema", this.getClass().getSimpleName());
values.put("argument", f.getName());
throw new H2OIllegalArgumentException(
"Required field " + f.getName() + " not specified",
"Required field " + f.getName() + " not specified for schema class: " + this.getClass(),
values);
}
}
} catch (ArrayIndexOutOfBoundsException e) {
throw H2O.fail("Missing annotation for API field: " + f.getName());
}
}
}
//noinspection unchecked (parameter <S> should be the derived class itself)
return (S) this;
}
public static <T extends Schema> void setField(T o, Field f, String key, String value, boolean required, Class thisSchemaClass) throws IllegalAccessException {
// Primitive parse by field type
Object parse_result = parse(key, value, f.getType(), required, thisSchemaClass);
if (parse_result != null && f.getType().isArray() && parse_result.getClass().isArray() && (f.getType().getComponentType() != parse_result.getClass().getComponentType())) {
// We have to conform an array of primitives. There's got to be a better way. . .
if (parse_result.getClass().getComponentType() == int.class && f.getType().getComponentType() == Integer.class) {
int[] from = (int[])parse_result;
Integer[] copy = new Integer[from.length];
for (int i = 0; i < from.length; i++)
copy[i] = from[i];
f.set(o, copy);
} else if (parse_result.getClass().getComponentType() == Integer.class && f.getType().getComponentType() == int.class) {
Integer[] from = (Integer[])parse_result;
int[] copy = new int[from.length];
for (int i = 0; i < from.length; i++)
copy[i] = from[i];
f.set(o, copy);
} else if (parse_result.getClass().getComponentType() == Double.class && f.getType().getComponentType() == double.class) {
Double[] from = (Double[])parse_result;
double[] copy = new double[from.length];
for (int i = 0; i < from.length; i++)
copy[i] = from[i];
f.set(o, copy);
} else if (parse_result.getClass().getComponentType() == Float.class && f.getType().getComponentType() == float.class) {
Float[] from = (Float[])parse_result;
float[] copy = new float[from.length];
for (int i = 0; i < from.length; i++)
copy[i] = from[i];
f.set(o, copy);
} else {
throw H2O.fail("Don't know how to cast an array of: " + parse_result.getClass().getComponentType() + " to an array of: " + f.getType().getComponentType());
}
} else {
f.set(o, parse_result);
}
}
static <E> Object parsePrimitve(String s, Class fclz) {
if (fclz.equals(String.class)) return s; // Strings already the right primitive type
if (fclz.equals(int.class)) return parseInteger(s, int.class);
if (fclz.equals(long.class)) return parseInteger(s, long.class);
if (fclz.equals(short.class)) return parseInteger(s, short.class);
if (fclz.equals(boolean.class)) {
if (s.equals("0")) return Boolean.FALSE;
if (s.equals("1")) return Boolean.TRUE;
return Boolean.valueOf(s);
}
if (fclz.equals(byte.class)) return parseInteger(s, byte.class);
if (fclz.equals(double.class)) return Double.valueOf(s);
if (fclz.equals(float.class)) return Float.valueOf(s);
//FIXME: if (fclz.equals(char.class)) return Character.valueOf(s);
throw H2O.fail("Unknown primitive type to parse: " + fclz.getSimpleName());
}
// URL parameter parse
static <E> Object parse(String field_name, String s, Class fclz, boolean required, Class schemaClass) {
if (fclz.isPrimitive() || String.class.equals(fclz)) {
try {
return parsePrimitve(s, fclz);
} catch (NumberFormatException ne) {
String msg = "Illegal argument for field: " + field_name + " of schema: " + schemaClass.getSimpleName() + ": cannot convert \"" + s + "\" to type " + fclz.getSimpleName();
throw new H2OIllegalArgumentException(msg);
}
}
// An array?
if (fclz.isArray()) {
// Get component type
Class<E> afclz = (Class<E>) fclz.getComponentType();
// Result
E[] a = null;
// Handle simple case with null-array
if (s.equals("null") || s.length() == 0) return null;
// Splitted values
String[] splits; // "".split(",") => {""} so handle the empty case explicitly
if (s.startsWith("[") && s.endsWith("]") ) { // It looks like an array
read(s, 0, '[', fclz);
read(s, s.length() - 1, ']', fclz);
String inside = s.substring(1, s.length() - 1).trim();
if (inside.length() == 0)
splits = new String[]{};
else
splits = splitArgs(inside);
} else { // Lets try to parse single value as an array!
// See PUBDEV-1955
splits = new String[] { s.trim() };
}
// Can't cast an int[] to an Object[]. Sigh.
if (afclz == int.class) { // TODO: other primitive types. . .
a = (E[]) Array.newInstance(Integer.class, splits.length);
} else if (afclz == double.class) {
a = (E[]) Array.newInstance(Double.class, splits.length);
} else if (afclz == float.class) {
a = (E[]) Array.newInstance(Float.class, splits.length);
} else {
// Fails with primitive classes; need the wrapper class. Thanks, Java.
a = (E[]) Array.newInstance(afclz, splits.length);
}
for (int i = 0; i < splits.length; i++) {
if (String.class == afclz || KeyV3.class.isAssignableFrom(afclz)) {
// strip quotes off string values inside array
String stripped = splits[i].trim();
if ("null".equals(stripped.toLowerCase()) || "na".equals(stripped.toLowerCase())) {
a[i] = null;
continue;
}
// Quotes are now optional because standard clients will send arrays of length one as just strings.
if (stripped.startsWith("\"") && stripped.endsWith("\"")) {
stripped = stripped.substring(1, stripped.length() - 1);
}
a[i] = (E) parse(field_name, stripped, afclz, required, schemaClass);
} else {
a[i] = (E) parse(field_name, splits[i].trim(), afclz, required, schemaClass);
}
}
return a;
}
if (fclz.equals(Key.class))
if ((s == null || s.length() == 0) && required) throw new H2OKeyNotFoundArgumentException(field_name, s);
else if (!required && (s == null || s.length() == 0)) return null;
else
return Key.make(s.startsWith("\"") ? s.substring(1, s.length() - 1) : s); // If the key name is in an array we need to trim surrounding quotes.
if (KeyV3.class.isAssignableFrom(fclz)) {
if ((s == null || s.length() == 0) && required) throw new H2OKeyNotFoundArgumentException(field_name, s);
if (!required && (s == null || s.length() == 0)) return null;
return KeyV3.make(fclz, Key.make(s.startsWith("\"") ? s.substring(1, s.length() - 1) : s)); // If the key name is in an array we need to trim surrounding quotes.
}
// Enums can match either 1:1 or all lower or all upper case
if (Enum.class.isAssignableFrom(fclz)) {
try {
return Enum.valueOf(fclz, s);
} catch (Throwable t1) {
try {
return Enum.valueOf(fclz, s.toLowerCase());
} catch (Throwable t2) {
return Enum.valueOf(fclz, s.toUpperCase());
}
}
}
// TODO: these can be refactored into a single case using the facilities in Schema:
if (FrameV3.class.isAssignableFrom(fclz)) {
if ((s == null || s.length() == 0) && required) throw new H2OKeyNotFoundArgumentException(field_name, s);
else if (!required && (s == null || s.length() == 0)) return null;
else {
Value v = DKV.get(s);
if (null == v) return null; // not required
if (!v.isFrame()) throw H2OIllegalArgumentException.wrongKeyType(field_name, s, "Frame", v.get().getClass());
return new FrameV3((Frame) v.get()); // TODO: version!
}
}
if (JobV3.class.isAssignableFrom(fclz)) {
if ((s == null || s.length() == 0) && required) throw new H2OKeyNotFoundArgumentException(s);
else if (!required && (s == null || s.length() == 0)) return null;
else {
Value v = DKV.get(s);
if (null == v) return null; // not required
if (!v.isJob()) throw H2OIllegalArgumentException.wrongKeyType(field_name, s, "Job", v.get().getClass());
return new JobV3().fillFromImpl((Job) v.get()); // TODO: version!
}
}
// TODO: for now handle the case where we're only passing the name through; later we need to handle the case
// where the frame name is also specified.
if (FrameV3.ColSpecifierV3.class.isAssignableFrom(fclz)) {
return new FrameV3.ColSpecifierV3(s);
}
if (ModelSchema.class.isAssignableFrom(fclz))
throw H2O.fail("Can't yet take ModelSchema as input.");
throw H2O.fail("Unimplemented schema fill from " + fclz.getSimpleName());
} // parse()
/**
* Helper functions for parse()
**/
static private <T> T parseInteger(String s, Class<T> return_type) {
try {
java.math.BigDecimal num = new java.math.BigDecimal(s);
T result = (T) num.getClass().getDeclaredMethod(return_type.getSimpleName() + "ValueExact", new Class[0]).invoke(num);
return result;
} catch (InvocationTargetException ite) {
throw new NumberFormatException("The expression's numeric value is out of the range of type " + return_type.getSimpleName());
} catch (NoSuchMethodException nsme) {
throw new IllegalArgumentException(return_type.getSimpleName() + " is not an integer data type");
} catch (IllegalAccessException iae) {
throw H2O.fail("Cannot parse expression as " + return_type.getSimpleName() + " (Illegal Access)");
}
}
static private int read( String s, int x, char c, Class fclz ) {
if( peek(s,x,c) ) return x+1;
throw new IllegalArgumentException("Expected '"+c+"' while reading a "+fclz.getSimpleName()+", but found "+s);
}
static private boolean peek( String s, int x, char c ) { return x < s.length() && s.charAt(x) == c; }
// Splits on commas, but ignores commas in double quotes. Required
// since using a regex blow the stack on long column counts
// TODO: detect and complain about malformed JSON
private static String[] splitArgs(String argStr) {
StringBuilder sb = new StringBuilder(argStr);
StringBuilder arg = new StringBuilder();
List<String> splitArgList = new ArrayList<String> ();
boolean inDoubleQuotes = false;
boolean inSquareBrackets = false; // for arrays of arrays
for (int i=0; i < sb.length(); i++) {
if (sb.charAt(i) == '"' && !inDoubleQuotes && !inSquareBrackets) {
inDoubleQuotes = true;
arg.append(sb.charAt(i));
} else if (sb.charAt(i) == '"' && inDoubleQuotes && !inSquareBrackets) {
inDoubleQuotes = false;
arg.append(sb.charAt(i));
} else if (sb.charAt(i) == ',' && !inDoubleQuotes && !inSquareBrackets) {
splitArgList.add(arg.toString());
// clear the field for next word
arg.setLength(0);
} else if (sb.charAt(i) == '[') {
inSquareBrackets = true;
arg.append(sb.charAt(i));
} else if (sb.charAt(i) == ']') {
inSquareBrackets = false;
arg.append(sb.charAt(i));
} else {
arg.append(sb.charAt(i));
}
}
if (arg.length() > 0)
splitArgList.add(arg.toString());
return splitArgList.toArray(new String[splitArgList.size()]);
}
/**
* Returns a new Schema instance. Does not throw, nor returns null.
* @return New instance of Schema Class 'clz'.
*/
public static <T extends Schema> T newInstance(Class<T> clz) {
try { return clz.newInstance(); }
catch (Exception e) { throw H2O.fail("Failed to instantiate schema of class: " + clz.getCanonicalName(),e); }
}
/**
* For a given schema_name (e.g., "FrameV2") return an appropriate new schema object (e.g., a water.api.Framev2).
*/
protected static Schema newInstance(String schema_name) {
return Schema.newInstance(SchemaServer.getSchema(schema_name));
}
/**
* Generate Markdown documentation for this Schema possibly including only the input or output fields.
* @throws H2ONotFoundArgumentException if reflection on a field fails
*/
public StringBuffer markdown(boolean include_input_fields, boolean include_output_fields) {
return markdown(new SchemaMetadata(this), include_input_fields, include_output_fields);
}
/**
* Generate Markdown documentation for this Schema, given we already have the metadata constructed.
* @throws H2ONotFoundArgumentException if reflection on a field fails
*/
public StringBuffer markdown(SchemaMetadata meta, boolean include_input_fields, boolean include_output_fields) {
MarkdownBuilder builder = new MarkdownBuilder();
builder.comment("Preview with http://jbt.github.io/markdown-editor");
builder.heading1("schema ", this.getClass().getSimpleName());
builder.hline();
// builder.paragraph(metadata.summary);
// TODO: refactor with Route.markdown():
// fields
boolean first; // don't print the table at all if there are no rows
try {
if (include_input_fields) {
first = true;
builder.heading2("input fields");
for (SchemaMetadata.FieldMetadata field_meta : meta.fields) {
if (field_meta.direction == API.Direction.INPUT || field_meta.direction == API.Direction.INOUT) {
if (first) {
builder.tableHeader("name", "required?", "level", "type", "schema?", "schema", "default", "description", "values", "is member of frames", "is mutually exclusive with");
first = false;
}
builder.tableRow(
field_meta.name,
String.valueOf(field_meta.required),
field_meta.level.name(),
field_meta.type,
String.valueOf(field_meta.is_schema),
field_meta.is_schema ? field_meta.schema_name : "", (null == field_meta.value ? "(null)" : field_meta.value.toString()), // Something better for toString()?
field_meta.help,
(field_meta.values == null || field_meta.values.length == 0 ? "" : Arrays.toString(field_meta.values)),
(field_meta.is_member_of_frames == null ? "[]" : Arrays.toString(field_meta.is_member_of_frames)),
(field_meta.is_mutually_exclusive_with == null ? "[]" : Arrays.toString(field_meta.is_mutually_exclusive_with))
);
}
}
if (first)
builder.paragraph("(none)");
}
if (include_output_fields) {
first = true;
builder.heading2("output fields");
for (SchemaMetadata.FieldMetadata field_meta : meta.fields) {
if (field_meta.direction == API.Direction.OUTPUT || field_meta.direction == API.Direction.INOUT) {
if (first) {
builder.tableHeader("name", "type", "schema?", "schema", "default", "description", "values", "is member of frames", "is mutually exclusive with");
first = false;
}
builder.tableRow(
field_meta.name,
field_meta.type,
String.valueOf(field_meta.is_schema),
field_meta.is_schema ? field_meta.schema_name : "",
(null == field_meta.value ? "(null)" : field_meta.value.toString()), // something better than toString()?
field_meta.help,
(field_meta.values == null || field_meta.values.length == 0 ? "" : Arrays.toString(field_meta.values)),
(field_meta.is_member_of_frames == null ? "[]" : Arrays.toString(field_meta.is_member_of_frames)),
(field_meta.is_mutually_exclusive_with == null ? "[]" : Arrays.toString(field_meta.is_mutually_exclusive_with)));
}
}
if (first)
builder.paragraph("(none)");
}
// TODO: render examples and other stuff, if it's passed in
}
catch (Exception e) {
IcedHashMap.IcedHashMapStringObject values = new IcedHashMap.IcedHashMapStringObject();
values.put("schema", this);
// TODO: This isn't quite the right exception type:
throw new H2OIllegalArgumentException("Caught exception using reflection on schema: " + this,
"Caught exception using reflection on schema: " + this + ": " + e,
values);
}
return builder.stringBuffer();
}
}
|
package water.fvec;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import water.*;
import water.parser.ValueString;
import water.util.Log;
import water.util.PrettyPrint;
import water.util.TwoDimTable;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.HashMap;
/** A collection of named {@link Vec}s, essentially an R-like Distributed Data Frame.
*
* <p>Frames represent a large distributed 2-D table with named columns
* ({@link Vec}s) and numbered rows. A reasonable <em>column</em> limit is
* 100K columns, but there's no hard-coded limit. There's no real <em>row</em>
* limit except memory; Frames (and Vecs) with many billions of rows are used
* routinely.
*
* <p>A Frame is a collection of named Vecs; a Vec is a collection of numbered
* {@link Chunk}s. A Frame is small, cheaply and easily manipulated, it is
* commonly passed-by-Value. It exists on one node, and <em>may</em> be
* stored in the {@link DKV}. Vecs, on the other hand, <em>must</em> be stored in the
* {@link DKV}, as they represent the shared common management state for a collection
* of distributed Chunks.
*
* <p>Multiple Frames can reference the same Vecs, although this sharing can
* make Vec lifetime management complex. Commonly temporary Frames are used
* to work with a subset of some other Frame (often during algorithm
* execution, when some columns are dropped from the modeling process). The
* temporary Frame can simply be ignored, allowing the normal GC process to
* reclaim it. Such temp Frames usually have a {@code null} key.
*
* <p>All the Vecs in a Frame belong to the same {@link Vec.VectorGroup} which
* then enforces {@link Chunk} row alignment across Vecs (or at least enforces
* a low-cost access model). Parallel and distributed execution touching all
* the data in a Frame relies on this alignment to get good performance.
*
* <p>Example: Make a Frame from a CSV file:<pre>
* File file = ...
* NFSFileVec nfs = NFSFileVec.make(file); // NFS-backed Vec, lazily read on demand
* Frame fr = water.parser.ParseDataset.parse(Key.make("myKey"),nfs._key);
* </pre>
*
* <p>Example: Find and remove the Vec called "unique_id" from the Frame,
* since modeling with a unique_id can lead to overfitting:
* <pre>
* Vec uid = fr.remove("unique_id");
* </pre>
*
* <p>Example: Move the response column to the last position:
* <pre>
* fr.add("response",fr.remove("response"));
* </pre>
*
*/
public class Frame extends Lockable<Frame> {
/** Vec names */
public String[] _names;
private boolean _lastNameBig; // Last name is "Cxxx" and has largest number
private Key<Vec>[] _keys; // Keys for the vectors
private transient Vec[] _vecs; // The Vectors (transient to avoid network traffic)
private transient Vec _col0; // First readable vec; fast access to the VectorGroup's Chunk layout
/** Creates an internal frame composed of the given Vecs and default names. The frame has no key. */
public Frame( Vec... vecs ){ this(null,vecs);}
/** Creates an internal frame composed of the given Vecs and names. The frame has no key. */
public Frame( String names[], Vec vecs[] ) { this(null,names,vecs); }
/** Creates an empty frame with given key. */
public Frame( Key key ) { this(key,null,new Vec[0]); }
/**
* Special constructor for data with unnamed columns (e.g. svmlight) bypassing *all* checks.
* @param key
* @param vecs
* @param noChecks
*/
public Frame( Key key, Vec vecs[], boolean noChecks) {
super(key);
assert noChecks;
_vecs = vecs;
_names = new String[vecs.length];
_keys = new Key[vecs.length];
for (int i = 0; i < vecs.length; i++) {
_names[i] = defaultColName(i);
_keys[i] = vecs[i]._key;
}
}
/** Creates a frame with given key, names and vectors. */
public Frame( Key key, String names[], Vec vecs[] ) {
super(key);
// Require all Vecs already be installed in the K/V store
for( Vec vec : vecs ) DKV.prefetch(vec._key);
for( Vec vec : vecs ) assert DKV.get(vec._key) != null;
// Always require names
if( names==null ) { // Make default names, all known to be unique
_names = new String[vecs.length];
_keys = new Key [vecs.length];
_vecs = vecs;
for( int i=0; i<vecs.length; i++ ) _names[i] = defaultColName(i);
for( int i=0; i<vecs.length; i++ ) _keys [i] = vecs[i]._key;
for( int i=0; i<vecs.length; i++ ) checkCompatible(_names[i],vecs[i]);
_lastNameBig = true;
} else {
// Make empty to dodge asserts, then "add()" them all which will check
// for compatible Vecs & names.
_names = new String[0];
_keys = new Key [0];
_vecs = new Vec [0];
add(names,vecs);
}
assert _names.length == vecs.length;
}
/** Deep copy of Vecs and Keys and Names (but not data!) to a new random Key.
* The resulting Frame does not share with the original, so the set of Vecs
* can be freely hacked without disturbing the original Frame. */
public Frame( Frame fr ) {
super( Key.make() );
_names= fr._names.clone();
_keys = fr._keys .clone();
_vecs = fr.vecs().clone();
_lastNameBig = fr._lastNameBig;
}
/** Default column name maker */
public static String defaultColName( int col ) { return "C"+(1+col); }
// Make unique names. Efficient for the special case of appending endless
// versions of "C123" style names where the next name is +1 over the prior
// name. All other names take the O(n^2) lookup.
private int pint( String name ) {
try { return Integer.valueOf(name.substring(1)); }
catch( NumberFormatException fe ) { }
return 0;
}
private String uniquify( String name ) {
String n = name;
int lastName = 0;
if( name.length() > 0 && name.charAt(0)=='C' )
lastName = pint(name);
if( _lastNameBig && _names.length > 0 ) {
String last = _names[_names.length-1];
if( last.charAt(0)=='C' && lastName == pint(last)+1 )
return name;
}
int cnt=0, again, max=0;
do {
again = cnt;
for( String s : _names ) {
if( lastName > 0 && s.charAt(0)=='C' )
max = Math.max(max,pint(s));
if( n.equals(s) )
n = name+(cnt++);
}
} while( again != cnt );
if( lastName == max+1 ) _lastNameBig = true;
return n;
}
/** Check that the vectors are all compatible. All Vecs have their content
* sharded using same number of rows per chunk, and all names are unique.
* Throw an IAE if something does not match. */
private void checkCompatible( String name, Vec vec ) {
if( vec instanceof AppendableVec ) return; // New Vectors are endlessly compatible
Vec v0 = anyVec();
if( v0 == null ) return; // No fixed-size Vecs in the Frame
// Vector group has to be the same, or else the layout has to be the same,
// or else the total length has to be small.
if( !v0.checkCompatible(vec) ) {
if(!Vec.VectorGroup.sameGroup(v0,vec))
Log.err("Unexpected incompatible vector group, " + v0.group() + " != " + vec.group());
if(!Arrays.equals(v0._espc, vec._espc))
Log.err("Unexpected incompatible espc, " + Arrays.toString(v0._espc) + " != " + Arrays.toString(vec._espc));
throw new IllegalArgumentException("Vec " + name + " is not compatible with the rest of the frame");
}
}
/** Quick compatibility check between Frames. Used by some tests for efficient equality checks. */
public boolean checkCompatible( Frame fr ) {
if( numCols() != fr.numCols() ) return false;
if( numRows() != fr.numRows() ) return false;
for( int i=0; i<vecs().length; i++ )
if( !vecs()[i].checkCompatible(fr.vecs()[i]) )
return false;
return true;
}
/** Number of columns
* @return Number of columns */
public int numCols() { return _keys.length; }
/** Number of rows
* @return Number of rows */
public long numRows() { Vec v = anyVec(); return v==null ? 0 : v.length(); }
/** Returns the first readable vector.
* @return the first readable Vec */
public final Vec anyVec() {
Vec c0 = _col0; // single read
if( c0 != null ) return c0;
for( Vec v : vecs() )
if( v.readable() )
return (_col0 = v);
return null;
}
/** The array of column names.
* @return the array of column names */
public String[] names() { return _names; }
/** A single column name.
* @return the column name */
public String name(int i) { return _names[i]; } // TODO: saw a non-reproducible NPE here
/** The array of keys.
* @return the array of keys for each vec in the frame.
*/
public Key[] keys() { return _keys; }
/** The internal array of Vecs. For efficiency Frames contain an array of
* Vec Keys - and the Vecs themselves are lazily loaded from the {@link DKV}.
* @return the internal array of Vecs */
public final Vec[] vecs() {
Vec[] tvecs = _vecs; // read the content
return tvecs == null ? (_vecs=vecs_impl()) : tvecs;
}
public final Vec[] vecs(int [] idxs) {
Vec [] all = vecs();
Vec [] res = new Vec[idxs.length];
for(int i = 0; i < idxs.length; ++i)
res[i] = all[idxs[i]];
return res;
}
// Compute vectors for caching
private Vec[] vecs_impl() {
// Load all Vec headers; load them all in parallel by starting prefetches
for( Key<Vec> key : _keys ) DKV.prefetch(key);
Vec [] vecs = new Vec[_keys.length];
for( int i=0; i<_keys.length; i++ ) vecs[i] = _keys[i].get();
return vecs;
}
/** Convenience to accessor for last Vec
* @return last Vec */
public Vec lastVec() { vecs(); return _vecs [_vecs.length -1]; }
/** Convenience to accessor for last Vec name
* @return last Vec name */
public String lastVecName() { return _names[_names.length-1]; }
/** Force a cache-flush and reload, assuming vec mappings were altered
* remotely, or that the _vecs array was shared and now needs to be a
* defensive copy.
* @return the new instance of the Frame's Vec[] */
public final Vec[] reloadVecs() { _vecs=null; return vecs(); }
/** Returns the Vec by given index, implemented by code: {@code vecs()[idx]}.
* @param idx idx of column
* @return this frame idx-th vector, never returns <code>null</code> */
public final Vec vec(int idx) { return vecs()[idx]; }
/** Return a Vec by name, or null if missing
* @return a Vec by name, or null if missing */
public Vec vec(String name) { int idx = find(name); return idx==-1 ? null : vecs()[idx]; }
/** Finds the column index with a matching name, or -1 if missing
* @return the column index with a matching name, or -1 if missing */
public int find( String name ) {
if( name == null ) return -1;
assert _names != null;
for( int i=0; i<_names.length; i++ )
if( name.equals(_names[i]) )
return i;
return -1;
}
/** Finds the matching column index, or -1 if missing
* @return the matching column index, or -1 if missing */
public int find( Vec vec ) {
Vec[] vecs = vecs();
for( int i=0; i<vecs.length; i++ )
if( vec.equals(vecs[i]) )
return i;
return -1;
}
/** Bulk {@link #find(String)} api
* @return An array of column indices matching the {@code names} array */
public int[] find(String[] names) {
if( names == null ) return null;
int[] res = new int[names.length];
for(int i = 0; i < names.length; ++i)
res[i] = find(names[i]);
return res;
}
/** Pair of (column name, Frame key). */
public static class VecSpecifier extends Iced {
public Key<Frame> _frame;
String _column_name;
public Vec vec() {
Value v = DKV.get(_frame);
if (null == v) return null;
Frame f = v.get();
if (null == f) return null;
return f.vec(_column_name);
}
}
/** Type for every Vec */
byte[] types() {
Vec[] vecs = vecs();
byte bs[] = new byte[vecs.length];
for( int i=0; i<vecs.length; i++ )
bs[i] = vecs[i]._type;
return bs;
}
/** All the domains for enum columns; null for non-enum columns.
* @return the domains for enum columns */
public String[][] domains() {
Vec[] vecs = vecs();
String ds[][] = new String[vecs.length][];
for( int i=0; i<vecs.length; i++ )
ds[i] = vecs[i].domain();
return ds;
}
/** All the column means.
* @return the mean of each column */
public double[] means() {
Vec[] vecs = vecs();
double[] means = new double[vecs.length];
for( int i = 0; i < vecs.length; i++ )
means[i] = vecs[i].mean();
return means;
}
/** One over the standard deviation of each column.
* @return Reciprocal the standard deviation of each column */
public double[] mults() {
Vec[] vecs = vecs();
double[] mults = new double[vecs.length];
for( int i = 0; i < vecs.length; i++ ) {
double sigma = vecs[i].sigma();
mults[i] = standardize(sigma) ? 1.0 / sigma : 1.0;
}
return mults;
}
private static boolean standardize(double sigma) {
// TODO unify handling of constant columns
return sigma > 1e-6;
}
/** The {@code Vec.byteSize} of all Vecs
* @return the {@code Vec.byteSize} of all Vecs */
public long byteSize() {
long sum=0;
Vec[] vecs = vecs();
for (Vec vec : vecs) sum += vec.byteSize();
return sum;
}
/** 64-bit checksum of the checksums of the vecs. SHA-265 checksums of the
* chunks are XORed together. Since parse always parses the same pieces of
* files into the same offsets in some chunk this checksum will be
* consistent across reparses.
* @return 64-bit Frame checksum */
@Override protected long checksum_impl() {
Vec[] vecs = vecs();
long _checksum = 0;
for( int i = 0; i < _names.length; ++i ) {
long vec_checksum = vecs[i].checksum();
_checksum ^= vec_checksum;
long tmp = (2147483647L * i);
_checksum ^= tmp;
}
_checksum *= (0xBABE + Arrays.hashCode(_names));
// TODO: include column types? Vec.checksum() should include type?
return _checksum;
}
// Add a bunch of vecs
public void add( String[] names, Vec[] vecs ) {
if (null == vecs || null == names) return;
for( int i=0; i<vecs.length; i++ )
add(names[i],vecs[i]);
}
/** Append a named Vec to the Frame. Names are forced unique, by appending a
* unique number if needed.
* @return the added Vec, for flow-coding */
public Vec add( String name, Vec vec ) {
checkCompatible(name=uniquify(name),vec); // Throw IAE is mismatch
int ncols = _keys.length;
_names = Arrays.copyOf(_names,ncols+1); _names[ncols] = name;
_keys = Arrays.copyOf(_keys ,ncols+1); _keys [ncols] = vec._key;
_vecs = Arrays.copyOf(_vecs ,ncols+1); _vecs [ncols] = vec;
return vec;
}
/** Append a Frame onto this Frame. Names are forced unique, by appending
* unique numbers if needed.
* @return the expanded Frame, for flow-coding */
public Frame add( Frame fr ) { add(fr._names,fr.vecs()); return this; }
/** Insert a named column as the first column */
public Frame prepend( String name, Vec vec ) {
if( find(name) != -1 ) throw new IllegalArgumentException("Duplicate name '"+name+"' in Frame");
if( _vecs.length != 0 ) {
if( !anyVec().group().equals(vec.group()) && !Arrays.equals(anyVec()._espc,vec._espc) )
throw new IllegalArgumentException("Vector groups differs - adding vec '"+name+"' into the frame " + Arrays.toString(_names));
if( numRows() != vec.length() )
throw new IllegalArgumentException("Vector lengths differ - adding vec '"+name+"' into the frame " + Arrays.toString(_names));
}
final int len = _names != null ? _names.length : 0;
String[] _names2 = new String[len+1];
Vec[] _vecs2 = new Vec [len+1];
Key[] _keys2 = new Key [len+1];
_names2[0] = name;
_vecs2 [0] = vec ;
_keys2 [0] = vec._key;
System.arraycopy(_names, 0, _names2, 1, len);
System.arraycopy(_vecs, 0, _vecs2, 1, len);
System.arraycopy(_keys, 0, _keys2, 1, len);
_names = _names2;
_vecs = _vecs2;
_keys = _keys2;
return this;
}
/** Swap two Vecs in-place; useful for sorting columns by some criteria */
public void swap( int lo, int hi ) {
assert 0 <= lo && lo < _keys.length;
assert 0 <= hi && hi < _keys.length;
if( lo==hi ) return;
Vec vecs[] = vecs();
Vec v = vecs [lo]; vecs [lo] = vecs [hi]; vecs [hi] = v;
Key k = _keys[lo]; _keys [lo] = _keys [hi]; _keys [hi] = k;
String n=_names[lo]; _names[lo] = _names[hi]; _names[hi] = n;
}
public Frame subframe(String[] names) { return subframe(names, false, 0)[0]; }
/** Returns a new frame composed of vectors of this frame selected by given names.
* The method replaces missing vectors by a constant column filled by given value.
* @param names names of vector to compose a subframe
* @param c value to fill missing columns.
* @return two frames, the first contains subframe, the second contains newly created constant vectors or null
*/
public Frame[] subframe(String[] names, double c) { return subframe(names, true, c); }
private Frame[] subframe(String[] names, boolean replaceBy, double c){
Vec [] vecs = new Vec[names.length];
Vec [] cvecs = replaceBy ? new Vec [names.length] : null;
String[] cnames = replaceBy ? new String[names.length] : null;
int ccv = 0; // counter of constant columns
vecs(); // Preload the vecs
HashMap<String, Integer> map = new HashMap<>((int) ((names.length/0.75f)+1)); // avoid rehashing by set up initial capacity
for(int i = 0; i < _names.length; ++i) map.put(_names[i], i);
for(int i = 0; i < names.length; ++i)
if(map.containsKey(names[i])) vecs[i] = _vecs[map.get(names[i])];
else if (replaceBy) {
Log.warn("Column " + names[i] + " is missing, filling it in with " + c);
assert cnames != null;
cnames[ccv] = names[i];
vecs[i] = cvecs[ccv++] = anyVec().makeCon(c);
}
return new Frame[] { new Frame(Key.make("subframe"+Key.make().toString()), names,vecs), ccv>0 ? new Frame(Key.make("subframe"+Key.make().toString()), Arrays.copyOf(cnames, ccv), Arrays.copyOf(cvecs,ccv)) : null };
}
/** Allow rollups for all written-into vecs; used by {@link MRTask} once
* writing is complete.
* @return the original Futures, for flow-coding */
public Futures postWrite(Futures fs) {
for( Vec v : vecs() ) v.postWrite(fs);
return fs;
}
/** Actually remove/delete all Vecs from memory, not just from the Frame.
* @return the original Futures, for flow-coding */
@Override public Futures remove_impl(Futures fs) {
for( Vec v : vecs() ) if( v != null ) v.remove(fs);
_names = new String[0];
_vecs = new Vec[0];
_keys = new Key[0];
return fs;
}
/** Replace one column with another. Caller must perform global update (DKV.put) on
* this updated frame.
* @return The old column, for flow-coding */
public Vec replace(int col, Vec nv) {
assert DKV.get(nv._key)!=null; // Already in DKV
Vec rv = vecs()[col];
assert rv.group().equals(nv.group());
_vecs[col] = nv;
_keys[col] = nv._key;
return rv;
}
/** Create a subframe from given interval of columns.
* @param startIdx index of first column (inclusive)
* @param endIdx index of the last column (exclusive)
* @return a new Frame containing specified interval of columns */
public Frame subframe(int startIdx, int endIdx) {
return new Frame(Arrays.copyOfRange(_names,startIdx,endIdx),Arrays.copyOfRange(vecs(),startIdx,endIdx));
}
/** Split this Frame; return a subframe created from the given column interval, and
* remove those columns from this Frame.
* @param startIdx index of first column (inclusive)
* @param endIdx index of the last column (exclusive)
* @return a new Frame containing specified interval of columns */
public Frame extractFrame(int startIdx, int endIdx) {
Frame f = subframe(startIdx, endIdx);
remove(startIdx, endIdx);
return f;
}
/** Removes the column with a matching name.
* @return The removed column */
public Vec remove( String name ) { return remove(find(name)); }
public Frame remove( String[] names ) {
for( String name : names )
remove(find(name));
return this;
}
/** Removes a list of columns by index; the index list must be sorted
* @return an array of the removed columns */
public Vec[] remove( int[] idxs ) {
for( int i : idxs )
if(i < 0 || i >= _vecs.length)
throw new ArrayIndexOutOfBoundsException();
Arrays.sort(idxs);
Vec[] res = new Vec[idxs.length];
Vec[] rem = new Vec[_vecs.length-idxs.length];
String[] names = new String[rem.length];
Key [] keys = new Key [rem.length];
int j = 0;
int k = 0;
int l = 0;
for(int i = 0; i < _vecs.length; ++i) {
if(j < idxs.length && i == idxs[j]) {
++j;
res[k++] = _vecs[i];
} else {
rem [l] = _vecs [i];
names[l] = _names[i];
keys [l] = _keys [i];
++l;
}
}
_vecs = rem;
_names= names;
_keys = keys;
assert l == rem.length && k == idxs.length;
return res;
}
/** Removes a numbered column.
* @return the removed column */
public final Vec remove( int idx ) {
int len = _names.length;
if( idx < 0 || idx >= len ) return null;
Vec v = vecs()[idx];
System.arraycopy(_names,idx+1,_names,idx,len-idx-1);
System.arraycopy(_vecs ,idx+1,_vecs ,idx,len-idx-1);
System.arraycopy(_keys ,idx+1,_keys ,idx,len-idx-1);
_names = Arrays.copyOf(_names,len-1);
_vecs = Arrays.copyOf(_vecs ,len-1);
_keys = Arrays.copyOf(_keys ,len-1);
if( v == _col0 ) _col0 = null;
return v;
}
/** Remove given interval of columns from frame. Motivated by R intervals.
* @param startIdx - start index of column (inclusive)
* @param endIdx - end index of column (exclusive)
* @return array of removed columns */
Vec[] remove(int startIdx, int endIdx) {
int len = _names.length;
int nlen = len - (endIdx-startIdx);
String[] names = new String[nlen];
Key[] keys = new Key[nlen];
Vec[] vecs = new Vec[nlen];
vecs();
if (startIdx > 0) {
System.arraycopy(_names, 0, names, 0, startIdx);
System.arraycopy(_vecs, 0, vecs, 0, startIdx);
System.arraycopy(_keys, 0, keys, 0, startIdx);
}
nlen -= startIdx;
if (endIdx < _names.length+1) {
System.arraycopy(_names, endIdx, names, startIdx, nlen);
System.arraycopy(_vecs, endIdx, vecs, startIdx, nlen);
System.arraycopy(_keys, endIdx, keys, startIdx, nlen);
}
Vec[] vecX = Arrays.copyOfRange(_vecs,startIdx,endIdx);
_names = names;
_vecs = vecs;
_keys = keys;
_col0 = null;
return vecX;
}
/** Restructure a Frame completely */
public void restructure( String[] names, Vec[] vecs ) {
// Make empty to dodge asserts, then "add()" them all which will check for
// compatible Vecs & names.
_names = new String[0];
_keys = new Key [0];
_vecs = new Vec [0];
add(names,vecs);
}
// Utilities to help external Frame constructors, e.g. Spark.
// Make an initial Frame & lock it for writing. Build Vec Keys.
void preparePartialFrame( String[] names ) {
// Nuke any prior frame (including freeing storage) & lock this one
if( _keys != null ) delete_and_lock(null);
else write_lock(null);
_names = names;
_keys = new Vec.VectorGroup().addVecs(names.length);
// No Vectors tho!!! These will be added *after* the import
}
// Only serialize strings, not H2O internal structures
// Make NewChunks to for holding data from e.g. Spark. Once per set of
// Chunks in a Frame, before filling them. This can be called in parallel
// for different Chunk#'s (cidx); each Chunk can be filled in parallel.
static NewChunk[] createNewChunks( String name, int cidx ) {
Frame fr = (Frame)Key.make(name).get();
NewChunk[] nchks = new NewChunk[fr.numCols()];
for( int i=0; i<nchks.length; i++ )
nchks[i] = new NewChunk(new AppendableVec(fr._keys[i]),cidx);
return nchks;
}
// Compress & DKV.put NewChunks. Once per set of Chunks in a Frame, after
// filling them. Can be called in parallel for different sets of Chunks.
static void closeNewChunks( NewChunk[] nchks ) {
Futures fs = new Futures();
for( NewChunk nchk : nchks ) nchk.close(fs);
fs.blockForPending();
}
// Build real Vecs from loose Chunks, and finalize this Frame. Called once
// after any number of [create,close]NewChunks.
// FIXME: have proper representation of column type
void finalizePartialFrame( long[] espc, String[][] domains, byte[] types ) {
// Compute elems-per-chunk.
// Roll-up elem counts, so espc[i] is the starting element# of chunk i.
int nchunk = espc.length;
long espc2[] = new long[nchunk+1]; // Shorter array
long x=0; // Total row count so far
for( int i=0; i<nchunk; i++ ) {
espc2[i] = x; // Start elem# for chunk i
x += espc[i]; // Raise total elem count
}
espc2[nchunk]=x; // Total element count in last
// For all Key/Vecs - insert Vec header
Futures fs = new Futures();
_vecs = new Vec[_keys.length];
for( int i=0; i<_keys.length; i++ ) {
// Insert Vec header
Vec vec = _vecs[i] = new Vec( _keys[i],
espc2,
domains!=null ? domains[i] : null,
types[i]);
// Here we have to save vectors since
// saving during unlock will invoke Frame vector
// refresh
DKV.put(_keys[i],vec,fs);
}
fs.blockForPending();
unlock(null);
}
static final int MAX_EQ2_COLS = 100000; // Limit of columns user is allowed to request
/** In support of R, a generic Deep Copy and Slice.
*
* <p>Semantics are a little odd, to match R's. Each dimension spec can be:<ul>
* <li><em>null</em> - all of them
* <li><em>a sorted list of negative numbers (no dups)</em> - all BUT these
* <li><em>an unordered list of positive</em> - just these, allowing dups
* </ul>
*
* <p>The numbering is 1-based; zero's are not allowed in the lists, nor are out-of-range values.
* @return the sliced Frame
*/
public Frame deepSlice( Object orows, Object ocols ) {
// ocols is either a long[] or a Frame-of-1-Vec
long[] cols;
if( ocols == null ) cols = null;
else if (ocols instanceof long[]) cols = (long[])ocols;
else if (ocols instanceof Frame) {
Frame fr = (Frame) ocols;
if (fr.numCols() != 1)
throw new IllegalArgumentException("Columns Frame must have only one column (actually has " + fr.numCols() + " columns)");
long n = fr.anyVec().length();
if (n > MAX_EQ2_COLS)
throw new IllegalArgumentException("Too many requested columns (requested " + n +", max " + MAX_EQ2_COLS + ")");
cols = new long[(int)n];
Vec v = fr.anyVec();
for (long i = 0; i < v.length(); i++)
cols[(int)i] = v.at8(i);
} else
throw new IllegalArgumentException("Columns is specified by an unsupported data type (" + ocols.getClass().getName() + ")");
// Since cols is probably short convert to a positive list.
int c2[];
if( cols==null ) {
c2 = new int[numCols()];
for( int i=0; i<c2.length; i++ ) c2[i]=i;
} else if( cols.length==0 ) {
c2 = new int[0];
} else if( cols[0] >= 0 ) {
c2 = new int[cols.length];
for( int i=0; i<cols.length; i++ )
c2[i] = (int)cols[i]; // Conversion of 1-based cols to 0-based is handled by a 1-based front-end!
} else {
c2 = new int[numCols()-cols.length];
int j=0;
for( int i=0; i<numCols(); i++ ) {
if( j >= cols.length || i < (-(1+cols[j])) ) c2[i-j] = i;
else j++;
}
}
for (int aC2 : c2)
if (aC2 >= numCols())
throw new IllegalArgumentException("Trying to select column " + (aC2 + 1) + " but only " + numCols() + " present.");
if( c2.length==0 )
throw new IllegalArgumentException("No columns selected (did you try to select column 0 instead of column 1?)");
// Do Da Slice
// orows is either a long[] or a Vec
if (numRows() == 0) {
return new MRTask() {
@Override public void map(Chunk[] chks, NewChunk[] nchks) { for (NewChunk nc : nchks) nc.addNA(); }
}.doAll(c2.length, this).outputFrame(names(c2), domains(c2));
}
if (orows == null)
return new DeepSlice(null,c2,vecs()).doAll(c2.length,this).outputFrame(names(c2),domains(c2));
else if (orows instanceof long[]) {
final long CHK_ROWS=1000000;
final long[] rows = (long[])orows;
if (this.numRows() == 0) {
return this;
}
if( rows.length==0 || rows[0] < 0 ) {
if (rows.length != 0 && rows[0] < 0) {
Vec v0 = this.anyVec().makeZero();
Vec v = new MRTask() {
@Override public void map(Chunk cs) {
for (long er : rows) {
if (er >= 0) continue;
er = Math.abs(er);
if (er < cs._start || er > (cs._len + cs._start - 1)) continue;
cs.set((int) (er - cs._start), 1);
}
}
}.doAll(v0).getResult()._fr.anyVec();
Keyed.remove(v0._key);
Frame slicedFrame = new DeepSlice(rows, c2, vecs()).doAll(c2.length, this.add("select_vec", v)).outputFrame(names(c2), domains(c2));
Keyed.remove(v._key);
Keyed.remove(this.remove(this.numCols() - 1)._key);
return slicedFrame;
} else {
return new DeepSlice(rows.length == 0 ? null : rows, c2, vecs()).doAll(c2.length, this).outputFrame(names(c2), domains(c2));
}
}
// Vec'ize the index array
Futures fs = new Futures();
AppendableVec av = new AppendableVec(Vec.newKey(Key.make("rownames_vec")));
int r = 0;
int c = 0;
while (r < rows.length) {
NewChunk nc = new NewChunk(av, c);
long end = Math.min(r+CHK_ROWS, rows.length);
for (; r < end; r++) {
nc.addNum(rows[r]);
}
nc.close(c++, fs);
}
Vec c0 = av.close(fs); // c0 is the row index vec
fs.blockForPending();
Frame ff = new Frame(new String[]{"rownames"}, new Vec[]{c0});
Frame fr2 = new Slice(c2, this).doAll(c2.length,ff)
.outputFrame(names(c2), domains(c2));
Keyed.remove(c0._key);
Keyed.remove(av._key);
ff.delete();
return fr2;
}
Frame frows = (Frame)orows;
Vec vrows = frows.anyVec();
// It's a compatible Vec; use it as boolean selector.
// Build column names for the result.
Vec [] vecs = new Vec[c2.length+1];
String [] names = new String[c2.length+1];
for(int i = 0; i < c2.length; ++i){
vecs[i] = _vecs[c2[i]];
names[i] = _names[c2[i]];
}
vecs[c2.length] = vrows;
names[c2.length] = "predicate";
Frame ff = new Frame(names, vecs);
return new DeepSelect().doAll(c2.length,ff).outputFrame(names(c2),domains(c2));
}
// Slice and return in the form of new chunks.
private static class Slice extends MRTask<Slice> {
final Frame _base; // the base frame to slice from
final int[] _cols;
Slice(int[] cols, Frame base) { _cols = cols; _base = base; }
@Override public void map(Chunk[] ix, NewChunk[] ncs) {
final Vec[] vecs = new Vec[_cols.length];
final Vec anyv = _base.anyVec();
final long nrow = anyv.length();
long r = ix[0].at8(0);
int last_ci = anyv.elem2ChunkIdx(r<nrow?r:0); // memoize the last chunk index
long last_c0 = anyv._espc[last_ci]; // ... last chunk start
long last_c1 = anyv._espc[last_ci + 1]; // ... last chunk end
Chunk[] last_cs = new Chunk[vecs.length]; // ... last chunks
for (int c = 0; c < _cols.length; c++) {
vecs[c] = _base.vecs()[_cols[c]];
last_cs[c] = vecs[c].chunkForChunkIdx(last_ci);
}
for (int i = 0; i < ix[0]._len; i++) {
// select one row
r = ix[0].at8(i); // next row to select
if (r < 0) continue;
if (r >= nrow) {
for (int c = 0; c < vecs.length; c++) ncs[c].addNum(Double.NaN);
} else {
if (r < last_c0 || r >= last_c1) {
last_ci = anyv.elem2ChunkIdx(r);
last_c0 = anyv._espc[last_ci];
last_c1 = anyv._espc[last_ci + 1];
for (int c = 0; c < vecs.length; c++)
last_cs[c] = vecs[c].chunkForChunkIdx(last_ci);
}
for (int c = 0; c < vecs.length; c++)
if( vecs[c].isUUID() ) ncs[c].addUUID(last_cs[c],r);
else ncs[c].addNum (last_cs[c].at_abs(r));
}
}
}
}
// Convert first 100 rows to a 2-d table
@Override public String toString( ) { return toString(0,20); }
// Convert len rows starting at off to a 2-d ascii table
public String toString( long off, int len ) {
if( off > numRows() ) off = numRows();
if( off+len > numRows() ) len = (int)(numRows()-off);
String[] rowHeaders = new String[len+5];
rowHeaders[0] = "min";
rowHeaders[1] = "mean";
rowHeaders[2] = "stddev";
rowHeaders[3] = "max";
rowHeaders[4] = "missing";
for( int i=0; i<len; i++ ) rowHeaders[i+5]=""+(off+i);
final int ncols = numCols();
final Vec[] vecs = vecs();
String[] coltypes = new String[ncols];
String[][] strCells = new String[len+5][ncols];
double[][] dblCells = new double[len+5][ncols];
for( int i=0; i<ncols; i++ ) {
Vec vec = vecs[i];
dblCells[0][i] = vec.min();
dblCells[1][i] = vec.mean();
dblCells[2][i] = vec.sigma();
dblCells[3][i] = vec.max();
dblCells[4][i] = vec.naCnt();
switch( vec.get_type() ) {
case Vec.T_BAD:
coltypes[i] = "string";
for( int j=0; j<len; j++ ) { strCells[j+5][i] = null; dblCells[j+5][i] = TwoDimTable.emptyDouble; }
break;
case Vec.T_STR :
coltypes[i] = "string";
ValueString vstr = new ValueString();
for( int j=0; j<len; j++ ) { strCells[j+5][i] = vec.isNA(off+j) ? "" : vec.atStr(vstr,off+j).toString(); dblCells[j+5][i] = TwoDimTable.emptyDouble; }
break;
case Vec.T_ENUM:
coltypes[i] = "string";
for( int j=0; j<len; j++ ) { strCells[j+5][i] = vec.isNA(off+j) ? "" : vec.factor(vec.at8(off+j)); dblCells[j+5][i] = TwoDimTable.emptyDouble; }
break;
case Vec.T_TIME:
case Vec.T_TIME+1:
case Vec.T_TIME+2:
coltypes[i] = "string";
DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss");
for( int j=0; j<len; j++ ) { strCells[j+5][i] = fmt.print(vec.at8(off+j)); dblCells[j+5][i] = TwoDimTable.emptyDouble; }
break;
case Vec.T_NUM:
coltypes[i] = vec.isInt() ? "long" : "double";
for( int j=0; j<len; j++ ) { dblCells[j+5][i] = vec.isNA(off+j) ? TwoDimTable.emptyDouble : vec.at(off + j); strCells[j+5][i] = null; }
break;
case Vec.T_UUID:
throw H2O.unimpl();
default:
System.err.println("bad vector type during debug print: "+vec.get_type());
throw H2O.fail();
}
}
return new TwoDimTable("Frame "+_key+" with "+numRows()+" rows and "+numCols()+" cols",rowHeaders,_names,coltypes,null, "", strCells, dblCells).toString();
}
// Bulk (expensive) copy from 2nd cols into 1st cols.
// Sliced by the given cols & rows
private static class DeepSlice extends MRTask<DeepSlice> {
final int _cols[];
final long _rows[];
final byte _isInt[];
DeepSlice( long rows[], int cols[], Vec vecs[] ) {
_cols=cols;
_rows=rows;
_isInt = new byte[cols.length];
for( int i=0; i<cols.length; i++ )
_isInt[i] = (byte)(vecs[cols[i]].isInt() ? 1 : 0);
}
@Override public boolean logVerbose() { return false; }
@Override public void map( Chunk chks[], NewChunk nchks[] ) {
long rstart = chks[0]._start;
int rlen = chks[0]._len; // Total row count
int rx = 0; // Which row to in/ex-clude
int rlo = 0; // Lo/Hi for this block of rows
int rhi = rlen;
while (true) { // Still got rows to include?
if (_rows != null) { // Got a row selector?
if (rx >= _rows.length) break; // All done with row selections
long r = _rows[rx++];// Next row selector
if (r < rstart) continue;
rlo = (int) (r - rstart);
rhi = rlo + 1; // Stop at the next row
while (rx < _rows.length && (_rows[rx] - rstart) == rhi && rhi < rlen) {
rx++;
rhi++; // Grab sequential rows
}
}
// Process this next set of rows
// For all cols in the new set
for (int i = 0; i < _cols.length; i++) {
Chunk oc = chks[_cols[i]];
NewChunk nc = nchks[i];
if (_isInt[i] == 1) { // Slice on integer columns
for (int j = rlo; j < rhi; j++)
if (oc._vec.isUUID()) nc.addUUID(oc, j);
else if (oc.isNA(j)) nc.addNA();
else nc.addNum(oc.at8(j), 0);
} else if (oc._vec.isString()) {
for (int j = rlo; j < rhi; j++)
nc.addStr(oc.atStr(new ValueString(), j));
} else {// Slice on double columns
for (int j = rlo; j < rhi; j++)
nc.addNum(oc.atd(j));
}
}
rlo = rhi;
if (_rows == null) break;
}
}
}
/**
* Last column is a bit vec indicating whether or not to take the row.
*/
private static class DeepSelect extends MRTask<DeepSelect> {
@Override public void map( Chunk chks[], NewChunk nchks[] ) {
Chunk pred = chks[chks.length-1];
for(int i = 0; i < pred._len; ++i) {
if( pred.atd(i) != 0 && !pred.isNA(i) ) {
for( int j = 0; j < chks.length - 1; j++ ) {
Chunk chk = chks[j];
if( chk._vec.isUUID() ) nchks[j].addUUID(chk, i);
else if(chk._vec.isString()) nchks[j].addStr((chk.atStr(new ValueString(), i)));
else nchks[j].addNum(chk.atd(i));
}
}
}
}
}
private String[][] domains(int [] cols){
Vec[] vecs = vecs();
String[][] res = new String[cols.length][];
for(int i = 0; i < cols.length; ++i)
res[i] = vecs[cols[i]].domain();
return res;
}
private String [] names(int [] cols){
if(_names == null)return null;
String [] res = new String[cols.length];
for(int i = 0; i < cols.length; ++i)
res[i] = _names[cols[i]];
return res;
}
/** Return Frame 'f' if 'f' is compatible with 'this', else return a new
* Frame compatible with 'this' and a copy of 'f's data otherwise. Note
* that this can, in the worst case, copy all of {@code this}s' data.
* @return This Frame's data in a Frame that is compatible with {@code f}. */
public Frame makeCompatible( Frame f) {
// Small data frames are always "compatible"
if (anyVec() == null) // Or it is small
return f; // Then must be compatible
// Same VectorGroup is also compatible
Vec v1 = anyVec();
Vec v2 = f.anyVec();
if(v1.length() != v2.length())
throw new IllegalArgumentException("Can not make vectors of different length compatible!");
if (v2 == null || v1.checkCompatible(v2))
return f;
// Ok, here make some new Vecs with compatible layout
Key k = Key.make();
H2O.submitTask(new RebalanceDataSet(this, f, k)).join();
Frame f2 = (Frame)k.get();
DKV.remove(k);
return f2;
}
/** Convert this Frame to a CSV (in an {@link InputStream}), that optionally
* is compatible with R 3.1's recent change to read.csv()'s behavior.
* @return An InputStream containing this Frame as a CSV */
public InputStream toCSV(boolean headers, boolean hex_string) {
return new CSVStream(headers, hex_string);
}
private class CSVStream extends InputStream {
private final boolean _hex_string;
byte[] _line;
int _position;
long _row;
CSVStream(boolean headers, boolean hex_string) {
_hex_string = hex_string;
StringBuilder sb = new StringBuilder();
Vec vs[] = vecs();
if( headers ) {
sb.append('"').append(_names[0]).append('"');
for(int i = 1; i < vs.length; i++)
sb.append(',').append('"').append(_names[i]).append('"');
sb.append('\n');
}
_line = sb.toString().getBytes();
}
@Override public int available() throws IOException {
if(_position == _line.length) {
if(_row == numRows())
return 0;
StringBuilder sb = new StringBuilder();
Vec vs[] = vecs();
for( int i = 0; i < vs.length; i++ ) {
if(i > 0) sb.append(',');
if(!vs[i].isNA(_row)) {
if( vs[i].isEnum() ) sb.append('"').append(vs[i].factor(vs[i].at8(_row))).append('"');
else if( vs[i].isUUID() ) sb.append(PrettyPrint.UUID(vs[i].at16l(_row), vs[i].at16h(_row)));
else if( vs[i].isInt() ) sb.append(vs[i].at8(_row));
else if (vs[i].isString()) sb.append(vs[i].atStr(new ValueString(), _row));
else {
double d = vs[i].at(_row);
// R 3.1 unfortunately changed the behavior of read.csv().
// (Really type.convert()).
// Numeric values with too much precision now trigger a type conversion in R 3.1 into a factor.
// See these discussions:
String s = _hex_string ? Double.toHexString(d) : Double.toString(d);
sb.append(s);
}
}
}
sb.append('\n');
_line = sb.toString().getBytes();
_position = 0;
_row++;
}
return _line.length - _position;
}
@Override public void close() throws IOException {
super.close();
_line = null;
}
@Override public int read() throws IOException {
return available() == 0 ? -1 : _line[_position++];
}
@Override public int read(byte[] b, int off, int len) throws IOException {
int n = available();
if(n > 0) {
n = Math.min(n, len);
System.arraycopy(_line, _position, b, off, n);
_position += n;
}
return n;
}
}
}
|
package com.billybyte.clientserver.httpserver;
import java.io.IOException;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import com.billybyte.commoninterfaces.QueryInterface;
import com.billybyte.commonstaticmethods.CollectionsStaticMethods;
import com.billybyte.commonstaticmethods.Utils;
import com.sun.net.httpserver.Headers;
import com.sun.net.httpserver.HttpExchange;
import com.sun.net.httpserver.HttpHandler;
import com.sun.net.httpserver.HttpServer;
@SuppressWarnings("restriction")
/**
* Generalized http server which calls a QueryInterface<String,List<String[]>
* instance to get its csv data, and then sends it back to the http caller
* @author bill perlman
*
*/
public class HttpCsvQueryServer {
private final QueryInterface<String, List<String[]>> csvQuery;
private final int httpPort;
private final HttpServer server;
private final String httpPath;
private final int timeoutValue;
private final TimeUnit timeUnitType;
private final String returnFileName;
/**
* @param httpPort
* @param httpPath
* @param csvQuery
* @param timeoutValue
* @param timeUnitType
* @param returnFileName
* @throws IOException
*/
public HttpCsvQueryServer(int httpPort, String httpPath,
QueryInterface<String, List<String[]>> csvQuery,
int timeoutValue,
TimeUnit timeUnitType,
String returnFileName) throws IOException {
super();
this.timeoutValue = timeoutValue;
this.timeUnitType = timeUnitType;
this.httpPort = httpPort;
this.httpPath = httpPath;
this.csvQuery = csvQuery;
this.returnFileName = returnFileName;
server = HttpServer.create(new InetSocketAddress(httpPort), 0);
server.createContext(httpPath, new MyHandler());
server.setExecutor(null); // creates a default executor
}
/**
*
* @param httpPort
* @param httpPath
* @param csvQuery
* @param timeoutValue
* @param timeUnitType
* @throws IOException
*/
public HttpCsvQueryServer(int httpPort, String httpPath,
QueryInterface<String, List<String[]>> csvQuery,
int timeoutValue,
TimeUnit timeUnitType) throws IOException {
this(httpPort,httpPath,csvQuery,timeoutValue,timeUnitType,"myFileName.csv");
}
public void start(){
server.start();
}
private class MyHandler implements HttpHandler {
public void handle(HttpExchange t) throws IOException {
String q = t.getRequestURI().getQuery();
String response = "";
List<String[]> csvList =
csvQuery.get(q,timeoutValue,timeUnitType);
// turn list of csv into a string
for(String[] csvLine: csvList){
String line = "";
for(String token: csvLine){
line += token+",";
}
line = line.substring(0,line.length()-1);
response += line + "\n";
}
// if a returnFileName has been specified, then add header info to
// the repsonse that will cause a file download rather than a display
// in the browser.
Headers headers = t.getResponseHeaders();
if(returnFileName!=null){
// This is a header to permit the download of the csv
headers.add("Content-Type", "text/csv");
headers.add("Content-Disposition", "attachment;filename="+returnFileName);
}
headers.add("Access-Control-Allow-Origin", "*");
t.sendResponseHeaders(200,response.length());
OutputStream os=t.getResponseBody();
Utils.prt(response);
os.write(response.getBytes());
os.close();
}
}
public int getHttpPort() {
return httpPort;
}
public String getHttpPath() {
return httpPath;
}
/**
* test HttpCsvQueryServer
* @param args
*/
public static void main(String[] args) {
int httpPort = 8888;
String httpPath = "/dummyCrude";
QueryInterface<String, List<String[]>> csvQuery =
new TestQuery();
int timeoutValue = 1;
TimeUnit timeUnitType = TimeUnit.SECONDS;
try {
HttpCsvQueryServer csvs =
new HttpCsvQueryServer(
httpPort, httpPath,
csvQuery, timeoutValue, timeUnitType,null);
csvs.start();
Utils.prtObErrMess(HttpCsvQueryServer.class, "server started on port 8888.");
Utils.prtObErrMess(HttpCsvQueryServer.class, "Enter http://127.0.0.1:8888/dummyCrude?p1=data");
CollectionsStaticMethods.prtCsv(Utils.getCSVData("http://127.0.0.1:8888/dummyCrude?p1=data"));
} catch (IOException e) {
e.printStackTrace();
}
}
private static final class TestQuery
implements QueryInterface<String, List<String[]>> {
private final String[][] dummyCsv = {
{"shortName","bid","bidsize","ask","asksize"},
{"CL.FUT.NYMEX.USD.201601","65.25","100","65.30","105"},
{"CL.FUT.NYMEX.USD.201602","66.25","100","66.30","105"},
{"CL.FUT.NYMEX.USD.201603","67.25","100","67.30","105"},
{"CL.FUT.NYMEX.USD.201604","68.25","100","68.30","105"},
{"CL.FUT.NYMEX.USD.201605","69.25","100","69.30","105"},
};
private final String[][] badRet = {{"bad key"}};
@Override
public List<String[]> get(String key, int timeoutValue,
TimeUnit timeUnitType) {
String[] tokens = key.split("=");
if(tokens.length>1 && tokens[1].compareTo("data")==0){
List<String[]> ret =
Arrays.asList(dummyCsv);
return ret;
}
List<String[]> ret =
Arrays.asList(badRet);
return ret;
}
}
}
|
package com.conveyal.r5.point_to_point.builder;
import com.conveyal.r5.analyst.cluster.TaskStatistics;
import com.conveyal.r5.api.ProfileResponse;
import com.conveyal.r5.api.util.LegMode;
import com.conveyal.r5.api.util.ProfileOption;
import com.conveyal.r5.api.util.StreetSegment;
import com.conveyal.r5.profile.*;
import com.conveyal.r5.streets.StreetRouter;
import com.conveyal.r5.streets.VertexStore;
import com.conveyal.r5.transit.RouteInfo;
import com.conveyal.r5.transit.TransportNetwork;
import com.conveyal.r5.transit.TripPattern;
import gnu.trove.iterator.TIntIntIterator;
import gnu.trove.list.TIntList;
import gnu.trove.list.array.TIntArrayList;
import gnu.trove.map.TIntIntMap;
import gnu.trove.map.TIntObjectMap;
import gnu.trove.map.hash.TIntIntHashMap;
import gnu.trove.map.hash.TIntObjectHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.ZoneId;
import java.util.*;
public class PointToPointQuery {
private static final Logger LOG = LoggerFactory.getLogger(PointToPointQuery.class);
/**
* The largest number of stops to consider boarding at. If there are 1000 stops within 2km, only consider boarding at the closest 200.
*
* It's not clear this has a major effect on speed, so we could consider removing it.
*/
private static final int MAX_ACCESS_STOPS = 200;
private final TransportNetwork transportNetwork;
// interpretation of below parameters: if biking is less than BIKE_PENALTY seconds faster than walking, we prefer to walk
/** how many seconds worse biking to transit is than walking */
private static final int BIKE_PENALTY = 600;
/** how many seconds worse bikeshare is than just walking */
private static final int BIKESHARE_PENALTY = 300;
/** How many seconds worse driving to transit is than just walking */
private static final int CAR_PENALTY = 1200;
private static final EnumSet<LegMode> currentlyUnsupportedModes = EnumSet.of(LegMode.CAR_PARK);
/** Time to rent a bike in seconds */
private static final int BIKE_RENTAL_PICKUP_TIME_S = 60;
/**
* Cost of renting a bike. The cost is a bit more than actual time to model the associated cost and trouble.
*/
private static final int BIKE_RENTAL_PICKUP_COST = 120;
/** Time to drop-off a rented bike in seconds */
private static final int BIKE_RENTAL_DROPOFF_TIME_S = 30;
/** Cost of dropping-off a rented bike */
private static final int BIKE_RENTAL_DROPOFF_COST = 30;
/** Time to park car in P+R in seconds **/
private static final int CAR_PARK_DROPOFF_TIME_S = 120;
private static final int CAR_PARK_DROPOFF_COST = 120;
public PointToPointQuery(TransportNetwork transportNetwork) {
this.transportNetwork = transportNetwork;
}
public ZoneId getTimezone() {
return this.transportNetwork.getTimeZone();
}
//Does point to point routing with data from request
public ProfileResponse getPlan(ProfileRequest request) {
long startRouting = System.currentTimeMillis();
request.zoneId = transportNetwork.getTimeZone();
//Do the query and return result
ProfileResponse profileResponse = new ProfileResponse();
boolean transit = request.hasTransit();
TaskStatistics ts = new TaskStatistics();
EnumSet<LegMode> modes = transit ? request.accessModes : request.directModes;
ProfileOption option = new ProfileOption();
Map<LegMode, StreetRouter> accessRouter = new HashMap<>(modes.size());
Map<LegMode, StreetRouter> egressRouter = new HashMap<>(request.egressModes.size());
//This map saves which access mode was used to access specific stop in access mode
TIntObjectMap<LegMode> stopModeAccessMap = new TIntObjectHashMap<>();
//This map saves which egress mode was used to access specific stop in egress mode
TIntObjectMap<LegMode> stopModeEgressMap = new TIntObjectHashMap<>();
//Routes all direct (if no transit)/access modes
for(LegMode mode: modes) {
long initialStopStartTime = System.currentTimeMillis();
StreetRouter streetRouter = new StreetRouter(transportNetwork.streetLayer);
StreetPath streetPath;
streetRouter.profileRequest = request;
if (!transit) {
//All direct modes have same time limit
streetRouter.timeLimitSeconds = request.streetTime * 60;
}
if (mode == LegMode.CAR_PARK && !transit) {
LOG.warn("Can't search for P+R without transit");
continue;
}
if (mode == LegMode.CAR_PARK) {
streetRouter = findParkRidePath(request, streetRouter);
if (streetRouter != null) {
accessRouter.put(LegMode.CAR_PARK, streetRouter);
ts.initialStopSearch += (int) (System.currentTimeMillis() - initialStopStartTime);
} else {
LOG.warn(
"MODE:{}, Edge near the origin coordinate wasn't found. Routing didn't start!",
mode);
}
continue;
} else if (mode == LegMode.BICYCLE_RENT) {
if (!transportNetwork.streetLayer.bikeSharing) {
LOG.warn("Bike sharing trip requested but no bike sharing stations in the streetlayer");
continue;
}
streetRouter = findBikeRentalPath(request, streetRouter, !transit);
if (streetRouter != null) {
if (transit) {
accessRouter.put(LegMode.BICYCLE_RENT, streetRouter);
continue;
} else {
StreetRouter.State lastState = streetRouter.getState(request.toLat, request.toLon);
if (lastState != null) {
streetPath = new StreetPath(lastState, streetRouter, LegMode.BICYCLE_RENT, transportNetwork);
} else {
LOG.warn("MODE:{}, Edge near the destination coordinate wasn't found. Routing didn't start!", mode);
continue;
}
}
} else {
LOG.warn("Not found path from cycle to end");
continue;
}
} else {
//TODO: add support for bike sharing and park and ride
streetRouter.streetMode = StreetMode.valueOf(mode.toString());
// TODO add time and distance limits to routing, not just weight.
// TODO apply walk and bike speeds and maxBike time.
//streetRouter.distanceLimitMeters = transit ? 2000 : 100_000; // FIXME arbitrary, and account for bike or car access mode
if (transit) {
//Gets correct maxCar/Bike/Walk time in seconds for access leg based on mode since it depends on the mode
streetRouter.timeLimitSeconds = request.getTimeLimit(mode);
} else {
//Time in direct search doesn't depend on mode
streetRouter.timeLimitSeconds = request.streetTime * 60;
}
if(streetRouter.setOrigin(request.fromLat, request.fromLon)) {
streetRouter.route();
//Searching for access paths
if (transit) {
//TIntIntMap stops = streetRouter.getReachedStops();
//reachedTransitStops.putAll(stops);
//LOG.info("Added {} stops for mode {}",stops.size(), mode);
accessRouter.put(mode, streetRouter);
ts.initialStopSearch += (int) (System.currentTimeMillis() - initialStopStartTime);
continue;
//Searching for direct paths
} else{
StreetRouter.State lastState = streetRouter.getState(request.toLat, request.toLon);
if (lastState != null) {
streetPath = new StreetPath(lastState, transportNetwork);
} else {
LOG.warn("MODE:{}, Edge near the end coordinate wasn't found. Routing didn't start!", mode);
continue;
}
}
} else {
LOG.warn("MODE:{}, Edge near the origin coordinate wasn't found. Routing didn't start!", mode);
continue;
}
}
StreetSegment streetSegment = new StreetSegment(streetPath, mode,
transportNetwork.streetLayer);
option.addDirect(streetSegment, request.getFromTimeDateZD());
}
if (transit) {
//For direct modes
for(LegMode mode: request.directModes) {
StreetRouter streetRouter = new StreetRouter(transportNetwork.streetLayer);
StreetPath streetPath;
streetRouter.profileRequest = request;
if (mode == LegMode.BICYCLE_RENT) {
if (!transportNetwork.streetLayer.bikeSharing) {
LOG.warn("Bike sharing trip requested but no bike sharing stations in the streetlayer");
continue;
}
streetRouter = findBikeRentalPath(request, streetRouter, true);
if (streetRouter != null) {
StreetRouter.State lastState = streetRouter.getState(request.toLat, request.toLon);
if (lastState != null) {
streetPath = new StreetPath(lastState, streetRouter, LegMode.BICYCLE_RENT, transportNetwork);
} else {
LOG.warn("MODE:{}, Edge near the destination coordinate wasn't found. Routing didn't start!", mode);
continue;
}
} else {
LOG.warn("Not found path from cycle to end");
continue;
}
} else {
streetRouter.streetMode = StreetMode.valueOf(mode.toString());
streetRouter.timeLimitSeconds = request.streetTime * 60;
if(streetRouter.setOrigin(request.fromLat, request.fromLon)) {
streetRouter.setDestination(request.toLat, request.toLon);
streetRouter.route();
StreetRouter.State lastState = streetRouter.getState(streetRouter.getDestinationSplit());
if (lastState == null) {
LOG.warn("Direct mode {} last state wasn't found", mode);
continue;
}
streetPath = new StreetPath(lastState, transportNetwork);
} else {
LOG.warn("Direct mode {} origin wasn't found!", mode);
continue;
}
}
StreetSegment streetSegment = new StreetSegment(streetPath, mode,
transportNetwork.streetLayer);
option.addDirect(streetSegment, request.getFromTimeDateZD());
}
//For egress
//TODO: this must be reverse search
for(LegMode mode: request.egressModes) {
StreetRouter streetRouter = new StreetRouter(transportNetwork.streetLayer);
if (currentlyUnsupportedModes.contains(mode)) {
continue;
}
//TODO: add support for bike sharing
streetRouter.streetMode = StreetMode.valueOf(mode.toString());
streetRouter.profileRequest = request;
// TODO add time and distance limits to routing, not just weight.
// TODO apply walk and bike speeds and maxBike time.
//streetRouter.distanceLimitMeters = 2000; // FIXME arbitrary, and account for bike or car access mode
streetRouter.timeLimitSeconds = request.getTimeLimit(mode);
if(streetRouter.setOrigin(request.toLat, request.toLon)) {
streetRouter.route();
TIntIntMap stops = streetRouter.getReachedStops();
egressRouter.put(mode, streetRouter);
LOG.info("Added {} edgres stops for mode {}",stops.size(), mode);
} else {
LOG.warn("MODE:{}, Edge near the origin coordinate wasn't found. Routing didn't start!", mode);
}
}
option.summary = option.generateSummary();
profileResponse.addOption(option);
// fold access and egress times into single maps
TIntIntMap accessTimes = combineMultimodalRoutingAccessTimes(accessRouter, stopModeAccessMap, request);
TIntIntMap egressTimes = combineMultimodalRoutingAccessTimes(egressRouter, stopModeEgressMap, request);
McRaptorSuboptimalPathProfileRouter router = new McRaptorSuboptimalPathProfileRouter(transportNetwork, request, accessTimes, egressTimes);
List<PathWithTimes> usefullpathList = new ArrayList<>();
// getPaths actually returns a set, which is important so that things are deduplicated. However we need a list
// so we can sort it below.
usefullpathList.addAll(router.getPaths());
//This sort is necessary only for text debug output so it will be disabled when it is finished
/**
* Orders first no transfers then one transfers 2 etc
* - then orders according to first trip:
* - board stop
* - alight stop
* - alight time
* - same for one transfer trip
*/
usefullpathList.sort((o1, o2) -> {
int c;
c = Integer.compare(o1.patterns.length, o2.patterns.length);
if (c==0) {
c = Integer.compare(o1.boardStops[0], o2.boardStops[0]);
}
if (c==0) {
c = Integer.compare(o1.alightStops[0], o2.alightStops[0]);
}
if (c==0) {
c = Integer.compare(o1.alightTimes[0], o2.alightTimes[0]);
}
if (c==0 && o1.patterns.length == 2) {
c = Integer.compare(o1.boardStops[1], o2.boardStops[1]);
if (c==0) {
c = Integer.compare(o1.alightStops[1], o2.alightStops[1]);
}
if (c==0) {
c = Integer.compare(o1.alightTimes[1], o2.alightTimes[1]);
}
}
return c;
});
LOG.info("Usefull paths:{}", usefullpathList.size());
int seen_paths = 0;
int boardStop =-1, alightStop = -1;
for (PathWithTimes path : usefullpathList) {
profileResponse.addTransitPath(accessRouter, egressRouter, stopModeAccessMap, stopModeEgressMap, path, transportNetwork, request.getFromTimeDateZD());
//LOG.info("Num patterns:{}", path.patterns.length);
//ProfileOption transit_option = new ProfileOption();
/*if (path.patterns.length == 1) {
continue;
}*/
/*if (seen_paths > 20) {
break;
}*/
if (LOG.isDebugEnabled()) {
LOG.debug(" ");
for (int i = 0; i < path.patterns.length; i++) {
//TransitSegment transitSegment = new TransitSegment(transportNetwork.transitLayer, path.boardStops[i], path.alightStops[i], path.patterns[i]);
if (!(((boardStop == path.boardStops[i] && alightStop == path.alightStops[i])))) {
LOG.debug(" BoardStop: {} pattern: {} allightStop: {}", path.boardStops[i],
path.patterns[i], path.alightStops[i]);
}
TripPattern pattern = transportNetwork.transitLayer.tripPatterns.get(path.patterns[i]);
if (pattern.routeIndex >= 0) {
RouteInfo routeInfo = transportNetwork.transitLayer.routes.get(pattern.routeIndex);
LOG.debug(" Pattern:{} on route:{} ({}) with {} stops", path.patterns[i],
routeInfo.route_long_name, routeInfo.route_short_name, pattern.stops.length);
}
LOG.debug(" {}->{} ({}:{})", transportNetwork.transitLayer.stopNames.get(path.boardStops[i]),
transportNetwork.transitLayer.stopNames.get(path.alightStops[i]),
path.alightTimes[i] / 3600, path.alightTimes[i] % 3600 / 60);
//transit_option.addTransit(transitSegment);
}
boardStop = path.boardStops[0];
alightStop = path.alightStops[0];
}
seen_paths++;
}
profileResponse.generateStreetTransfers(transportNetwork, request);
} else {
option.summary = option.generateSummary();
profileResponse.addOption(option);
}
LOG.info("Returned {} options", profileResponse.getOptions().size());
LOG.info("Took {} ms", System.currentTimeMillis() - startRouting);
return profileResponse;
}
/**
* Uses 2 streetSearches to get P+R path
*
* First CAR search from fromLat/fromLon to all car parks. Then from those found places WALK search.
*
* Result is then used as access part. Since P+R in direct mode is useless.
* @param request profileRequest from which from/to destination is used
* @param streetRouter where profileRequest was already set
* @return null if path isn't found
*/
private StreetRouter findParkRidePath(ProfileRequest request, StreetRouter streetRouter) {
streetRouter.streetMode = StreetMode.CAR;
streetRouter.timeLimitSeconds = request.maxCarTime * 60;
if(streetRouter.setOrigin(request.fromLat, request.fromLon)) {
streetRouter.route();
TIntObjectMap<StreetRouter.State> carParks = streetRouter.getReachedVertices(VertexStore.VertexFlag.PARK_AND_RIDE);
LOG.info("CAR PARK: Found {} car parks", carParks.size());
StreetRouter walking = new StreetRouter(transportNetwork.streetLayer);
walking.streetMode = StreetMode.WALK;
walking.profileRequest = request;
walking.timeLimitSeconds = request.maxCarTime * 60;
walking.setOrigin(carParks, CAR_PARK_DROPOFF_TIME_S, CAR_PARK_DROPOFF_COST, LegMode.CAR_PARK);
walking.route();
walking.previousRouter = streetRouter;
return walking;
} else {
return null;
}
}
/**
* Uses 3 streetSearches to first search from fromLat/fromLon to all the bike renting places in
* WALK mode. Then from all found bike renting places to other bike renting places with BIKE
* and then just routing from those found bike renting places in WALK mode.
*
* This can then be used as streetRouter for access paths or as a direct search for specific destination
*
* Last streetRouter (WALK from bike rentals) is returned
* @param request profileRequest from which from/to destination is used
* @param streetRouter where profileRequest was already set
* @param direct
* @return null if path isn't found
*/
private StreetRouter findBikeRentalPath(ProfileRequest request, StreetRouter streetRouter,
boolean direct) {
streetRouter.streetMode = StreetMode.WALK;
// TODO add time and distance limits to routing, not just weight.
streetRouter.timeLimitSeconds = request.maxWalkTime * 60;
if(streetRouter.setOrigin(request.fromLat, request.fromLon)) {
streetRouter.route();
//This finds all the nearest bicycle rent stations when walking
TIntObjectMap<StreetRouter.State> bikeStations = streetRouter.getReachedVertices(VertexStore.VertexFlag.BIKE_SHARING);
LOG.info("BIKE RENT: Found {} bike stations which are {} minutes away", bikeStations.size(), streetRouter.timeLimitSeconds/60);
/*LOG.info("Start to bike share:");
bikeStations.forEachEntry((idx, state) -> {
LOG.info(" {} ({}m)", idx, state.distance);
return true;
});*/
//This finds best cycling path from best start bicycle station to end bicycle station
StreetRouter bicycle = new StreetRouter(transportNetwork.streetLayer);
bicycle.previousRouter = streetRouter;
bicycle.streetMode = StreetMode.BICYCLE;
bicycle.profileRequest = request;
//Longer bike part if this is direct search
if (direct) {
bicycle.timeLimitSeconds = request.streetTime * 60;
} else {
bicycle.timeLimitSeconds = request.maxBikeTime * 60;
}
bicycle.setOrigin(bikeStations, BIKE_RENTAL_PICKUP_TIME_S, BIKE_RENTAL_PICKUP_COST, LegMode.BICYCLE_RENT);
bicycle.route();
TIntObjectMap<StreetRouter.State> cycledStations = bicycle.getReachedVertices(VertexStore.VertexFlag.BIKE_SHARING);
LOG.info("BIKE RENT: Found {} cycled stations which are {} minutes away", cycledStations.size(), bicycle.timeLimitSeconds/60);
/*LOG.info("Bike share to bike share:");
cycledStations.retainEntries((idx, state) -> {
if (bikeStations.containsKey(idx)) {
LOG.warn(" MM:{} ({}m)", idx, state.distance/1000);
return false;
} else {
LOG.info(" {} ({}m)", idx, state.distance / 1000);
return true;
}
});*/
//This searches for walking path from end bicycle station to end point
StreetRouter end = new StreetRouter(transportNetwork.streetLayer);
end.streetMode = StreetMode.WALK;
end.profileRequest = request;
end.timeLimitSeconds = request.maxBikeTime * 60;
end.setOrigin(cycledStations, BIKE_RENTAL_DROPOFF_TIME_S, BIKE_RENTAL_DROPOFF_COST, LegMode.BICYCLE_RENT);
end.route();
end.previousRouter = bicycle;
return end;
} else {
return null;
}
}
/** Combine the results of several street searches using different modes into a single map
* It also saves with which mode was stop reached into stopModeMap. This map is then used
* to create itineraries in response */
private TIntIntMap combineMultimodalRoutingAccessTimes(Map<LegMode, StreetRouter> routers,
TIntObjectMap<LegMode> stopModeMap, ProfileRequest request) {
// times at transit stops
TIntIntMap times = new TIntIntHashMap();
// weights at transit stops
TIntIntMap weights = new TIntIntHashMap();
for (Map.Entry<LegMode, StreetRouter> entry : routers.entrySet()) {
int maxTime = 30;
int minTime = 0;
int penalty = 0;
LegMode mode = entry.getKey();
switch (mode) {
case BICYCLE:
maxTime = request.maxBikeTime;
minTime = request.minBikeTime;
penalty = BIKE_PENALTY;
break;
case BICYCLE_RENT:
// TODO this is not strictly correct, bike rent is partly walking
maxTime = request.maxBikeTime;
minTime = request.minBikeTime;
penalty = BIKESHARE_PENALTY;
break;
case WALK:
maxTime = request.maxWalkTime;
break;
case CAR:
//TODO this is not strictly correct, CAR PARK is partly walking
case CAR_PARK:
maxTime = request.maxCarTime;
minTime = request.minCarTime;
penalty = CAR_PENALTY;
break;
}
maxTime *= 60; // convert to seconds
minTime *= 60; // convert to seconds
final int maxTimeFinal = maxTime;
final int minTimeFinal = minTime;
final int penaltyFinal = penalty;
StreetRouter router = entry.getValue();
router.getReachedStops().forEachEntry((stop, time) -> {
if (time > maxTimeFinal || time < minTimeFinal) return true;
//Skip stops that can't be used with wheelchairs if wheelchair routing is requested
if (request.wheelchair && !transportNetwork.transitLayer.stopsWheelchair.get(stop)) {
return true;
}
int weight = time + penaltyFinal;
// There are penalties for using certain modes, to avoid bike/car trips that are only marginally faster
// than walking, so we use weights to decide which mode "wins" to access a particular stop.
if (!weights.containsKey(stop) || weight < weights.get(stop)) {
times.put(stop, time);
weights.put(stop, weight);
stopModeMap.put(stop, mode);
}
return true; // iteration should continue
});
}
// we don't want to explore a boatload of access/egress stops. Pick only the closest several hundred.
// What this means is that in urban environments you'll get on the bus nearby, in suburban environments
// you may walk/bike/drive a very long way.
// NB in testing it's not clear this actually does a lot for performance, maybe 1-1.5s
int stopsFound = times.size();
if (stopsFound > MAX_ACCESS_STOPS) {
TIntList timeList = new TIntArrayList();
times.forEachValue(timeList::add);
timeList.sort();
//This gets last time in timeList
int cutoff = timeList.get(MAX_ACCESS_STOPS); //it needs to be same as MAX_ACCESS_STOPS since if there are minimally MAX_ACCESS_STOPS + 1 stops the indexes are from 0-MAX_ACCESS_STOPS
for (TIntIntIterator it = times.iterator(); it.hasNext();) {
it.advance();
if (it.value() > cutoff) it.remove();
}
LOG.warn("{} stops found, using {} nearest", stopsFound, times.size());
}
LOG.info("{} stops found", stopsFound);
// return the times, not the weights
return times;
}
}
|
package cn.cerc.ui.vcl;
import java.util.ArrayList;
import java.util.List;
import cn.cerc.ui.core.HtmlContent;
import cn.cerc.ui.core.HtmlWriter;
import cn.cerc.ui.core.UICustomComponent;
import cn.cerc.ui.parts.UIComponent;
public class UIDiv extends UICustomComponent {
private List<HtmlContent> contents = new ArrayList<>();
private String text = null;
public UIDiv() {
super();
}
public UIDiv(UIComponent owner) {
super(owner);
}
@Override
public void output(HtmlWriter html) {
html.print("<div");
super.outputCss(html);
html.print(">");
super.output(html);
if (text != null)
html.print(text);
for (HtmlContent content : contents) {
content.output(html);
}
html.println("</div>");
}
public UIDiv setText(String text) {
this.text = text;
return this;
}
public UIDiv setText(String format, Object... args) {
this.text = String.format(format, args);
return this;
}
public String getText() {
return text;
}
}
|
package com.conveyal.r5.point_to_point.builder;
import com.conveyal.r5.api.ProfileResponse;
import com.conveyal.r5.api.util.LegMode;
import com.conveyal.r5.api.util.ProfileOption;
import com.conveyal.r5.api.util.StreetSegment;
import com.conveyal.r5.profile.*;
import com.conveyal.r5.streets.Split;
import com.conveyal.r5.streets.StreetRouter;
import com.conveyal.r5.streets.VertexStore;
import com.conveyal.r5.transit.RouteInfo;
import com.conveyal.r5.transit.TransportNetwork;
import com.conveyal.r5.transit.TripPattern;
import gnu.trove.iterator.TIntIntIterator;
import gnu.trove.list.TIntList;
import gnu.trove.list.array.TIntArrayList;
import gnu.trove.map.TIntIntMap;
import gnu.trove.map.TIntObjectMap;
import gnu.trove.map.hash.TIntIntHashMap;
import gnu.trove.map.hash.TIntObjectHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.ZoneId;
import java.util.*;
public class PointToPointQuery {
private static final Logger LOG = LoggerFactory.getLogger(PointToPointQuery.class);
/**
* The largest number of stops to consider boarding at.
* If there are 1000 stops within 2km, only consider boarding at the closest 200.
* It's not clear this has a major effect on speed, so we could consider removing it.
*/
public static final int MAX_ACCESS_STOPS = 200;
private final TransportNetwork transportNetwork;
// interpretation of below parameters: if biking is less than BIKE_PENALTY seconds faster than walking, we prefer to walk
/** how many seconds worse biking to transit is than walking */
private static final int BIKE_PENALTY = 600;
/** how many seconds worse bikeshare is than just walking */
private static final int BIKESHARE_PENALTY = 300;
/** How many seconds worse driving to transit is than just walking */
private static final int CAR_PENALTY = 1200;
private static final EnumSet<LegMode> egressUnsupportedModes = EnumSet.of(LegMode.CAR_PARK);
/** Time to rent a bike in seconds */
private static final int BIKE_RENTAL_PICKUP_TIME_S = 60;
/**
* Cost of renting a bike. The cost is a bit more than actual time to model the associated cost and trouble.
*/
private static final int BIKE_RENTAL_PICKUP_COST = 120;
/** Time to drop-off a rented bike in seconds */
private static final int BIKE_RENTAL_DROPOFF_TIME_S = 30;
/** Cost of dropping-off a rented bike */
private static final int BIKE_RENTAL_DROPOFF_COST = 30;
/** Time to park car in P+R in seconds **/
private static final int CAR_PARK_DROPOFF_TIME_S = 120;
private static final int CAR_PARK_DROPOFF_COST = 120;
public PointToPointQuery(TransportNetwork transportNetwork) {
this.transportNetwork = transportNetwork;
}
public ZoneId getTimezone() {
return this.transportNetwork.getTimeZone();
}
//Does point to point routing with data from request
public ProfileResponse getPlan(ProfileRequest request) {
long startRouting = System.currentTimeMillis();
request.zoneId = transportNetwork.getTimeZone();
//Do the query and return result
ProfileResponse profileResponse = new ProfileResponse();
ProfileOption option = new ProfileOption();
findDirectPaths(request, option);
option.summary = option.generateSummary();
profileResponse.addOption(option);
if (request.hasTransit()) {
Map<LegMode, StreetRouter> accessRouter = new HashMap<>(request.accessModes.size());
Map<LegMode, StreetRouter> egressRouter = new HashMap<>(request.egressModes.size());
//This map saves which access mode was used to access specific stop in access mode
TIntObjectMap<LegMode> stopModeAccessMap = new TIntObjectHashMap<>();
//This map saves which egress mode was used to access specific stop in egress mode
TIntObjectMap<LegMode> stopModeEgressMap = new TIntObjectHashMap<>();
findAccessPaths(request, accessRouter);
findEgressPaths(request, egressRouter);
// fold access and egress times into single maps
TIntIntMap accessTimes = combineMultimodalRoutingAccessTimes(accessRouter, stopModeAccessMap, request);
TIntIntMap egressTimes = combineMultimodalRoutingAccessTimes(egressRouter, stopModeEgressMap, request);
McRaptorSuboptimalPathProfileRouter router = new McRaptorSuboptimalPathProfileRouter(transportNetwork, request, accessTimes, egressTimes);
List<PathWithTimes> usefullpathList = new ArrayList<>();
// getPaths actually returns a set, which is important so that things are deduplicated. However we need a list
// so we can sort it below.
usefullpathList.addAll(router.getPaths());
//This sort is necessary only for text debug output so it will be disabled when it is finished
/**
* Orders first no transfers then one transfers 2 etc
* - then orders according to first trip:
* - board stop
* - alight stop
* - alight time
* - same for one transfer trip
*/
usefullpathList.sort((o1, o2) -> {
int c;
c = Integer.compare(o1.patterns.length, o2.patterns.length);
if (c==0) {
c = Integer.compare(o1.boardStops[0], o2.boardStops[0]);
}
if (c==0) {
c = Integer.compare(o1.alightStops[0], o2.alightStops[0]);
}
if (c==0) {
c = Integer.compare(o1.alightTimes[0], o2.alightTimes[0]);
}
if (c==0 && o1.patterns.length == 2) {
c = Integer.compare(o1.boardStops[1], o2.boardStops[1]);
if (c==0) {
c = Integer.compare(o1.alightStops[1], o2.alightStops[1]);
}
if (c==0) {
c = Integer.compare(o1.alightTimes[1], o2.alightTimes[1]);
}
}
return c;
});
LOG.info("Usefull paths:{}", usefullpathList.size());
int seen_paths = 0;
int boardStop =-1, alightStop = -1;
for (PathWithTimes path : usefullpathList) {
profileResponse.addTransitPath(accessRouter, egressRouter, stopModeAccessMap, stopModeEgressMap, path, transportNetwork, request.getFromTimeDateZD());
//LOG.info("Num patterns:{}", path.patterns.length);
//ProfileOption transit_option = new ProfileOption();
/*if (path.patterns.length == 1) {
continue;
}*/
/*if (seen_paths > 20) {
break;
}*/
if (LOG.isDebugEnabled()) {
LOG.debug(" ");
for (int i = 0; i < path.patterns.length; i++) {
//TransitSegment transitSegment = new TransitSegment(transportNetwork.transitLayer, path.boardStops[i], path.alightStops[i], path.patterns[i]);
if (!(((boardStop == path.boardStops[i] && alightStop == path.alightStops[i])))) {
LOG.debug(" BoardStop: {} pattern: {} allightStop: {}", path.boardStops[i],
path.patterns[i], path.alightStops[i]);
}
TripPattern pattern = transportNetwork.transitLayer.tripPatterns.get(path.patterns[i]);
if (pattern.routeIndex >= 0) {
RouteInfo routeInfo = transportNetwork.transitLayer.routes.get(pattern.routeIndex);
LOG.debug(" Pattern:{} on route:{} ({}) with {} stops", path.patterns[i],
routeInfo.route_long_name, routeInfo.route_short_name, pattern.stops.length);
}
LOG.debug(" {}->{} ({}:{})", transportNetwork.transitLayer.stopNames.get(path.boardStops[i]),
transportNetwork.transitLayer.stopNames.get(path.alightStops[i]),
path.alightTimes[i] / 3600, path.alightTimes[i] % 3600 / 60);
//transit_option.addTransit(transitSegment);
}
boardStop = path.boardStops[0];
alightStop = path.alightStops[0];
}
seen_paths++;
}
profileResponse.generateStreetTransfers(transportNetwork, request);
}
LOG.info("Returned {} options", profileResponse.getOptions().size());
LOG.info("Took {} ms", System.currentTimeMillis() - startRouting);
return profileResponse;
}
/**
* Finds all egress paths from to coordinate to end stop and adds routers to egressRouter
* @param request
* @param egressRouter
*/
private void findEgressPaths(ProfileRequest request, Map<LegMode, StreetRouter> egressRouter) {
//For egress
//TODO: this must be reverse search
for(LegMode mode: request.egressModes) {
StreetRouter streetRouter = new StreetRouter(transportNetwork.streetLayer);
streetRouter.transitStopSearch = true;
streetRouter.dominanceVariable = StreetRouter.State.RoutingVariable.DURATION_SECONDS;
if (egressUnsupportedModes.contains(mode)) {
continue;
}
//TODO: add support for bike sharing
streetRouter.streetMode = StreetMode.valueOf(mode.toString());
streetRouter.profileRequest = request;
streetRouter.timeLimitSeconds = request.getTimeLimit(mode);
if(streetRouter.setOrigin(request.toLat, request.toLon)) {
streetRouter.route();
TIntIntMap stops = streetRouter.getReachedStops();
egressRouter.put(mode, streetRouter);
LOG.info("Added {} edgres stops for mode {}",stops.size(), mode);
} else {
LOG.warn("MODE:{}, Edge near the origin coordinate wasn't found. Routing didn't start!", mode);
}
}
}
/**
* Finds direct paths between from and to coordinates in request and adds them to option
* @param request
* @param option
*/
private void findDirectPaths(ProfileRequest request, ProfileOption option) {
//For direct modes
for(LegMode mode: request.directModes) {
StreetRouter streetRouter = new StreetRouter(transportNetwork.streetLayer);
StreetPath streetPath;
streetRouter.profileRequest = request;
if (mode == LegMode.BICYCLE_RENT) {
if (!transportNetwork.streetLayer.bikeSharing) {
LOG.warn("Bike sharing trip requested but no bike sharing stations in the streetlayer");
continue;
}
streetRouter = findBikeRentalPath(request, streetRouter, true);
if (streetRouter != null) {
StreetRouter.State lastState = streetRouter.getState(request.toLat, request.toLon);
if (lastState != null) {
streetPath = new StreetPath(lastState, streetRouter, LegMode.BICYCLE_RENT, transportNetwork);
} else {
LOG.warn("MODE:{}, Edge near the destination coordinate wasn't found. Routing didn't start!", mode);
continue;
}
} else {
LOG.warn("Not found path from cycle to end");
continue;
}
} else {
streetRouter.streetMode = StreetMode.valueOf(mode.toString());
streetRouter.timeLimitSeconds = request.streetTime * 60;
if(streetRouter.setOrigin(request.fromLat, request.fromLon)) {
if(!streetRouter.setDestination(request.toLat, request.toLon)) {
LOG.warn("Direct mode {} destination wasn't found!", mode);
continue;
}
streetRouter.route();
StreetRouter.State lastState = streetRouter.getState(streetRouter.getDestinationSplit());
if (lastState == null) {
LOG.warn("Direct mode {} last state wasn't found", mode);
continue;
}
streetPath = new StreetPath(lastState, transportNetwork);
} else {
LOG.warn("Direct mode {} origin wasn't found!", mode);
continue;
}
}
StreetSegment streetSegment = new StreetSegment(streetPath, mode,
transportNetwork.streetLayer);
option.addDirect(streetSegment, request.getFromTimeDateZD());
}
}
/**
* Finds access paths from from coordinate in request and adds all routers with paths to accessRouter map
* @param request
* @param accessRouter
*/
private void findAccessPaths(ProfileRequest request, Map<LegMode, StreetRouter> accessRouter) {
//Routes all access modes
for(LegMode mode: request.accessModes) {
StreetRouter streetRouter = new StreetRouter(transportNetwork.streetLayer);
streetRouter.profileRequest = request;
if (mode == LegMode.CAR_PARK) {
streetRouter = findParkRidePath(request, streetRouter);
if (streetRouter != null) {
accessRouter.put(LegMode.CAR_PARK, streetRouter);
} else {
LOG.warn(
"MODE:{}, Edge near the origin coordinate wasn't found. Routing didn't start!",
mode);
}
} else if (mode == LegMode.BICYCLE_RENT) {
if (!transportNetwork.streetLayer.bikeSharing) {
LOG.warn("Bike sharing trip requested but no bike sharing stations in the streetlayer");
continue;
}
streetRouter = findBikeRentalPath(request, streetRouter, false);
if (streetRouter != null) {
accessRouter.put(LegMode.BICYCLE_RENT, streetRouter);
} else {
LOG.warn("Not found path from cycle to end");
}
} else {
streetRouter.streetMode = StreetMode.valueOf(mode.toString());
//Gets correct maxCar/Bike/Walk time in seconds for access leg based on mode since it depends on the mode
streetRouter.timeLimitSeconds = request.getTimeLimit(mode);
streetRouter.transitStopSearch = true;
streetRouter.dominanceVariable = StreetRouter.State.RoutingVariable.DURATION_SECONDS;
if(streetRouter.setOrigin(request.fromLat, request.fromLon)) {
streetRouter.route();
//Searching for access paths
accessRouter.put(mode, streetRouter);
} else {
LOG.warn("MODE:{}, Edge near the origin coordinate wasn't found. Routing didn't start!", mode);
}
}
}
}
/**
* Uses 2 streetSearches to get P+R path
*
* First CAR search from fromLat/fromLon to all car parks. Then from those found places WALK search.
*
* Result is then used as access part. Since P+R in direct mode is useless.
* @param request profileRequest from which from/to destination is used
* @param streetRouter where profileRequest was already set
* @return null if path isn't found
*/
private StreetRouter findParkRidePath(ProfileRequest request, StreetRouter streetRouter) {
streetRouter.streetMode = StreetMode.CAR;
streetRouter.timeLimitSeconds = request.maxCarTime * 60;
streetRouter.flagSearch = VertexStore.VertexFlag.PARK_AND_RIDE;
streetRouter.dominanceVariable = StreetRouter.State.RoutingVariable.DURATION_SECONDS;
if(streetRouter.setOrigin(request.fromLat, request.fromLon)) {
streetRouter.route();
TIntObjectMap<StreetRouter.State> carParks = streetRouter.getReachedVertices(VertexStore.VertexFlag.PARK_AND_RIDE);
LOG.info("CAR PARK: Found {} car parks", carParks.size());
StreetRouter walking = new StreetRouter(transportNetwork.streetLayer);
walking.streetMode = StreetMode.WALK;
walking.profileRequest = request;
walking.timeLimitSeconds = request.maxCarTime * 60;
walking.transitStopSearch = true;
walking.setOrigin(carParks, CAR_PARK_DROPOFF_TIME_S, CAR_PARK_DROPOFF_COST, LegMode.CAR_PARK);
walking.dominanceVariable = StreetRouter.State.RoutingVariable.DURATION_SECONDS;
walking.route();
walking.previousRouter = streetRouter;
return walking;
} else {
return null;
}
}
/**
* Uses 3 streetSearches to first search from fromLat/fromLon to all the bike renting places in
* WALK mode. Then from all found bike renting places to other bike renting places with BIKE
* and then just routing from those found bike renting places in WALK mode.
*
* This can then be used as streetRouter for access paths or as a direct search for specific destination
*
* Last streetRouter (WALK from bike rentals) is returned
* @param request profileRequest from which from/to destination is used
* @param streetRouter where profileRequest was already set
* @param direct
* @return null if path isn't found
*/
private StreetRouter findBikeRentalPath(ProfileRequest request, StreetRouter streetRouter,
boolean direct) {
streetRouter.streetMode = StreetMode.WALK;
// TODO add time and distance limits to routing, not just weight.
streetRouter.timeLimitSeconds = request.maxWalkTime * 60;
if (!direct) {
streetRouter.dominanceVariable = StreetRouter.State.RoutingVariable.DURATION_SECONDS;
}
if(streetRouter.setOrigin(request.fromLat, request.fromLon)) {
//if we can't find destination we can stop search before even trying
if (direct && !streetRouter.setDestination(request.toLat, request.toLon)) {
return null;
}
Split destinationSplit = streetRouter.getDestinationSplit();
//reset destinationSplit since we need it at the last part of routing
streetRouter.setDestination(null);
streetRouter.route();
//This finds all the nearest bicycle rent stations when walking
TIntObjectMap<StreetRouter.State> bikeStations = streetRouter.getReachedVertices(VertexStore.VertexFlag.BIKE_SHARING);
LOG.info("BIKE RENT: Found {} bike stations which are {} minutes away", bikeStations.size(), streetRouter.timeLimitSeconds/60);
/*LOG.info("Start to bike share:");
bikeStations.forEachEntry((idx, state) -> {
LOG.info(" {} ({}m)", idx, state.distance);
return true;
});*/
//This finds best cycling path from best start bicycle station to end bicycle station
StreetRouter bicycle = new StreetRouter(transportNetwork.streetLayer);
bicycle.previousRouter = streetRouter;
bicycle.streetMode = StreetMode.BICYCLE;
bicycle.profileRequest = request;
//Longer bike part if this is direct search
if (direct) {
bicycle.timeLimitSeconds = request.streetTime * 60;
} else {
bicycle.timeLimitSeconds = request.maxBikeTime * 60;
bicycle.dominanceVariable = StreetRouter.State.RoutingVariable.DURATION_SECONDS;
}
bicycle.setOrigin(bikeStations, BIKE_RENTAL_PICKUP_TIME_S, BIKE_RENTAL_PICKUP_COST, LegMode.BICYCLE_RENT);
bicycle.route();
TIntObjectMap<StreetRouter.State> cycledStations = bicycle.getReachedVertices(VertexStore.VertexFlag.BIKE_SHARING);
LOG.info("BIKE RENT: Found {} cycled stations which are {} minutes away", cycledStations.size(), bicycle.timeLimitSeconds/60);
/*LOG.info("Bike share to bike share:");
cycledStations.retainEntries((idx, state) -> {
if (bikeStations.containsKey(idx)) {
LOG.warn(" MM:{} ({}m)", idx, state.distance/1000);
return false;
} else {
LOG.info(" {} ({}m)", idx, state.distance / 1000);
return true;
}
});*/
//This searches for walking path from end bicycle station to end point
StreetRouter end = new StreetRouter(transportNetwork.streetLayer);
end.streetMode = StreetMode.WALK;
end.profileRequest = request;
end.timeLimitSeconds = request.maxBikeTime * 60;
if (!direct) {
end.transitStopSearch = true;
end.dominanceVariable = StreetRouter.State.RoutingVariable.DURATION_SECONDS;
}
end.setOrigin(cycledStations, BIKE_RENTAL_DROPOFF_TIME_S, BIKE_RENTAL_DROPOFF_COST, LegMode.BICYCLE_RENT);
end.route();
end.previousRouter = bicycle;
return end;
} else {
return null;
}
}
/** Combine the results of several street searches using different modes into a single map
* It also saves with which mode was stop reached into stopModeMap. This map is then used
* to create itineraries in response */
private TIntIntMap combineMultimodalRoutingAccessTimes(Map<LegMode, StreetRouter> routers,
TIntObjectMap<LegMode> stopModeMap, ProfileRequest request) {
// times at transit stops
TIntIntMap times = new TIntIntHashMap();
// weights at transit stops
TIntIntMap weights = new TIntIntHashMap();
for (Map.Entry<LegMode, StreetRouter> entry : routers.entrySet()) {
int maxTime = 30;
int minTime = 0;
int penalty = 0;
LegMode mode = entry.getKey();
switch (mode) {
case BICYCLE:
maxTime = request.maxBikeTime;
minTime = request.minBikeTime;
penalty = BIKE_PENALTY;
break;
case BICYCLE_RENT:
// TODO this is not strictly correct, bike rent is partly walking
maxTime = request.maxBikeTime;
minTime = request.minBikeTime;
penalty = BIKESHARE_PENALTY;
break;
case WALK:
maxTime = request.maxWalkTime;
break;
case CAR:
//TODO this is not strictly correct, CAR PARK is partly walking
case CAR_PARK:
maxTime = request.maxCarTime;
minTime = request.minCarTime;
penalty = CAR_PENALTY;
break;
}
maxTime *= 60; // convert to seconds
minTime *= 60; // convert to seconds
final int maxTimeFinal = maxTime;
final int minTimeFinal = minTime;
final int penaltyFinal = penalty;
StreetRouter router = entry.getValue();
router.getReachedStops().forEachEntry((stop, time) -> {
if (time > maxTimeFinal || time < minTimeFinal) return true;
//Skip stops that can't be used with wheelchairs if wheelchair routing is requested
if (request.wheelchair && !transportNetwork.transitLayer.stopsWheelchair.get(stop)) {
return true;
}
int weight = time + penaltyFinal;
// There are penalties for using certain modes, to avoid bike/car trips that are only marginally faster
// than walking, so we use weights to decide which mode "wins" to access a particular stop.
if (!weights.containsKey(stop) || weight < weights.get(stop)) {
times.put(stop, time);
weights.put(stop, weight);
stopModeMap.put(stop, mode);
}
return true; // iteration should continue
});
}
// we don't want to explore a boatload of access/egress stops. Pick only the closest several hundred.
// What this means is that in urban environments you'll get on the bus nearby, in suburban environments
// you may walk/bike/drive a very long way.
// NB in testing it's not clear this actually does a lot for performance, maybe 1-1.5s
int stopsFound = times.size();
if (stopsFound > MAX_ACCESS_STOPS) {
TIntList timeList = new TIntArrayList();
times.forEachValue(timeList::add);
timeList.sort();
//This gets last time in timeList
int cutoff = timeList.get(MAX_ACCESS_STOPS); //it needs to be same as MAX_ACCESS_STOPS since if there are minimally MAX_ACCESS_STOPS + 1 stops the indexes are from 0-MAX_ACCESS_STOPS
for (TIntIntIterator it = times.iterator(); it.hasNext();) {
it.advance();
if (it.value() > cutoff) it.remove();
}
LOG.warn("{} stops found, using {} nearest", stopsFound, times.size());
} else {
LOG.info("{} stops found", stopsFound);
}
// return the times, not the weights
return times;
}
}
|
package com.creativemd.littletiles.common.entity;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import javax.annotation.Nullable;
import javax.vecmath.Vector3d;
import com.creativemd.creativecore.common.utils.math.RotationUtils;
import com.creativemd.creativecore.common.utils.math.box.BoxPlane;
import com.creativemd.creativecore.common.utils.math.box.BoxUtils;
import com.creativemd.creativecore.common.utils.math.box.OrientatedBoundingBox;
import com.creativemd.creativecore.common.utils.math.collision.CollidingPlane;
import com.creativemd.creativecore.common.utils.math.collision.CollidingPlane.PushCache;
import com.creativemd.creativecore.common.utils.math.collision.CollisionCoordinator;
import com.creativemd.creativecore.common.utils.math.vec.ChildVecOrigin;
import com.creativemd.creativecore.common.utils.math.vec.IVecOrigin;
import com.creativemd.creativecore.common.utils.math.vec.VecUtils;
import com.creativemd.creativecore.common.world.CreativeWorld;
import com.creativemd.creativecore.common.world.FakeWorld;
import com.creativemd.creativecore.common.world.SubWorld;
import com.creativemd.littletiles.client.render.world.LittleRenderChunkSuppilier;
import com.creativemd.littletiles.common.action.LittleAction;
import com.creativemd.littletiles.common.action.LittleActionException;
import com.creativemd.littletiles.common.action.block.LittleActionPlaceStack;
import com.creativemd.littletiles.common.action.block.LittleActionPlaceStack.LittlePlaceResult;
import com.creativemd.littletiles.common.blocks.BlockTile;
import com.creativemd.littletiles.common.events.LittleDoorHandler;
import com.creativemd.littletiles.common.items.ItemLittleWrench;
import com.creativemd.littletiles.common.structure.IAnimatedStructure;
import com.creativemd.littletiles.common.structure.LittleStructure;
import com.creativemd.littletiles.common.structure.relative.StructureAbsolute;
import com.creativemd.littletiles.common.tileentity.TileEntityLittleTiles;
import com.creativemd.littletiles.common.tiles.LittleTile;
import com.creativemd.littletiles.common.tiles.place.PlacePreviewTile;
import com.creativemd.littletiles.common.tiles.place.PlacePreviews;
import com.creativemd.littletiles.common.tiles.preview.LittleAbsolutePreviewsStructure;
import com.creativemd.littletiles.common.tiles.vec.LittleTileBox;
import com.creativemd.littletiles.common.tiles.vec.LittleTileIdentifierStructureAbsolute;
import com.creativemd.littletiles.common.tiles.vec.LittleTilePos;
import com.creativemd.littletiles.common.tiles.vec.LittleTileVec;
import com.creativemd.littletiles.common.utils.animation.AnimationState;
import com.creativemd.littletiles.common.utils.placing.PlacementMode;
import com.creativemd.littletiles.common.utils.vec.LittleRayTraceResult;
import com.creativemd.littletiles.common.utils.vec.LittleTransformation;
import com.google.common.base.Predicate;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityList;
import net.minecraft.entity.MoverType;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumFacing.Axis;
import net.minecraft.util.EnumFacing.AxisDirection;
import net.minecraft.util.EnumHand;
import net.minecraft.util.math.AxisAlignedBB;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.RayTraceResult;
import net.minecraft.util.math.Vec3d;
import net.minecraft.world.World;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
public class EntityAnimation extends Entity {
protected static final Predicate<Entity> noAnimation = (x) -> !(x instanceof EntityAnimation);
public EntityAnimation(World worldIn) {
super(worldIn);
}
public EntityAnimation(World world, CreativeWorld fakeWorld, EntityAnimationController controller, BlockPos absolutePreviewPos, UUID uuid, StructureAbsolute center, LittleTileIdentifierStructureAbsolute identifier) {
this(world);
this.structureIdentifier = identifier;
try {
if (identifier == null)
this.structure = null;
else
this.structure = LittleAction.getTile(fakeWorld, identifier).connection.getStructureWithoutLoading();
} catch (LittleActionException e) {
throw new RuntimeException(e);
}
this.controller = controller;
this.controller.setParent(this);
this.absolutePreviewPos = absolutePreviewPos;
setFakeWorld(fakeWorld);
this.entityUniqueID = uuid;
this.cachedUniqueIdString = this.entityUniqueID.toString();
setCenter(center);
updateWorldCollision();
setPosition(center.baseOffset.getX(), center.baseOffset.getY(), center.baseOffset.getZ());
addDoor();
preventPush = true;
updateTickState();
updateBoundingBox();
this.initalOffX = origin.offX();
this.initalOffY = origin.offY();
this.initalOffZ = origin.offZ();
this.initalRotX = origin.rotX();
this.initalRotY = origin.rotY();
this.initalRotZ = origin.rotZ();
preventPush = false;
origin.tick();
}
public void setFakeWorld(CreativeWorld fakeWorld) {
this.fakeWorld = fakeWorld;
this.fakeWorld.parent = this;
if (world.isRemote && this.fakeWorld.renderChunkSupplier == null)
this.fakeWorld.renderChunkSupplier = new LittleRenderChunkSuppilier();
}
public boolean shouldAddDoor() {
return !(world instanceof FakeWorld) && !(world instanceof SubWorld && ((SubWorld) world).getRealWorld() instanceof FakeWorld);
}
public World getRealWorld() {
if (world instanceof SubWorld)
return ((SubWorld) world).getParent();
return world;
}
public boolean isDoorAdded() {
return addedDoor;
}
public void addDoor() {
if (!shouldAddDoor())
return;
if (!addedDoor) {
LittleDoorHandler.getHandler(world).createDoor(this);
addedDoor = true;
}
}
public void markRemoved() {
isDead = true;
addedDoor = false;
if (fakeWorld == null || fakeWorld.loadedEntityList == null)
return;
for (Entity entity : fakeWorld.loadedEntityList)
if (entity instanceof EntityAnimation)
((EntityAnimation) entity).markRemoved();
}
@Override
protected void entityInit() {
addDoor();
}
public double initalOffX;
public double initalOffY;
public double initalOffZ;
public double initalRotX;
public double initalRotY;
public double initalRotZ;
public CreativeWorld fakeWorld;
public IVecOrigin origin;
public EntityAnimationController controller;
public boolean enteredAsChild = false;
public void setCenter(StructureAbsolute center) {
this.center = center;
this.fakeWorld.setOrigin(center.rotationCenter);
this.origin = this.fakeWorld.getOrigin();
if (fakeWorld.loadedEntityList.isEmpty())
return;
for (Entity entity : fakeWorld.loadedEntityList)
if (entity instanceof EntityAnimation)
((ChildVecOrigin) ((EntityAnimation) entity).origin).parent = this.origin;
}
public void setCenterVec(LittleTilePos axis, LittleTileVec additional) {
setCenter(new StructureAbsolute(axis, additional));
}
public void setParentWorld(World world) {
this.enteredAsChild = this.world instanceof CreativeWorld && !(world instanceof CreativeWorld);
this.world = world;
if (fakeWorld instanceof SubWorld)
((SubWorld) fakeWorld).parentWorld = world;
this.fakeWorld.setOrigin(center.rotationCenter);
this.origin = this.fakeWorld.getOrigin();
hasOriginChanged = true;
}
public LittleStructure structure;
public LittleTileIdentifierStructureAbsolute structureIdentifier;
public StructureAbsolute center;
public BlockPos absolutePreviewPos;
public boolean preventPush = false;
/** Is true when animation moves other entities */
public boolean noCollision = false;
public AABBCombiner collisionBoxWorker;
/** Static not affected by direction or entity offset */
public List<OrientatedBoundingBox> worldCollisionBoxes;
/** Static not affected by direction or entity offset */
public OrientatedBoundingBox worldBoundingBox;
/** Should be called if the world of the animation will be modified (Currently
* not possible) */
public void updateWorldCollision() {
double minX = Double.MAX_VALUE;
double minY = Double.MAX_VALUE;
double minZ = Double.MAX_VALUE;
double maxX = -Double.MAX_VALUE;
double maxY = -Double.MAX_VALUE;
double maxZ = -Double.MAX_VALUE;
worldCollisionBoxes = new ArrayList<>();
for (Iterator<TileEntity> iterator = fakeWorld.loadedTileEntityList.iterator(); iterator.hasNext();) {
TileEntity tileEntity = iterator.next();
if (tileEntity instanceof TileEntityLittleTiles) {
TileEntityLittleTiles te = (TileEntityLittleTiles) tileEntity;
if (te.isEmpty())
continue;
AxisAlignedBB bb = te.getSelectionBox();
minX = Math.min(minX, bb.minX);
minY = Math.min(minY, bb.minY);
minZ = Math.min(minZ, bb.minZ);
maxX = Math.max(maxX, bb.maxX);
maxY = Math.max(maxY, bb.maxY);
maxZ = Math.max(maxZ, bb.maxZ);
ArrayList<AxisAlignedBB> boxes = new ArrayList<>();
for (LittleTile tile : te) {
List<LittleTileBox> tileBoxes = tile.getCollisionBoxes();
for (LittleTileBox box : tileBoxes) {
boxes.add(box.getBox(te.getContext(), te.getPos()));
}
}
// BoxUtils.compressBoxes(boxes, 0.0F);
for (AxisAlignedBB box : boxes) {
worldCollisionBoxes.add(new OrientatedBoundingBox(origin, box));
}
}
}
fakeWorld.hasChanged = false;
hasOriginChanged = true;
collisionBoxWorker = new AABBCombiner(worldCollisionBoxes, 0);
if (minX == Double.MAX_VALUE)
worldBoundingBox = new OrientatedBoundingBox(origin, 0, 0, 0, 1, 1, 1);
else
worldBoundingBox = new OrientatedBoundingBox(origin, minX, minY, minZ, maxX, maxY, maxZ);
}
private static double minIgnore(double par1, double par2) {
if (Math.abs(par2) < Math.abs(par1))
return par2;
return par1;
}
private static double maxIgnore(double par1, double par2) {
if (Math.abs(par2) > Math.abs(par1))
return par2;
return par1;
}
public void moveAndRotateAnimation(double x, double y, double z, double rotX, double rotY, double rotZ) {
if (x == 0 && y == 0 && z == 0 && rotX == 0 && rotY == 0 && rotZ == 0)
return;
CollisionCoordinator coordinator = new CollisionCoordinator(x, y, z, rotX, rotY, rotZ, origin, origin);
moveAndRotateAnimation(coordinator);
coordinator.move();
}
public void moveAndRotateAnimation(CollisionCoordinator coordinator) {
if (preventPush)
return;
noCollision = true;
World world = getRealWorld();
List<Entity> entities = world.getEntitiesWithinAABB(Entity.class, coordinator.computeSurroundingBox(worldBoundingBox), EntityAnimation.noAnimation);
if (!entities.isEmpty()) {
// PHASE ONE
// Gather all affected boxes
List<AxisAlignedBB> surroundingBoxes = new ArrayList<>(worldCollisionBoxes.size());
for (OrientatedBoundingBox box : worldCollisionBoxes) {
if (box.cache == null)
box.buildCache();
box.cache.reset();
surroundingBoxes.add(coordinator.computeSurroundingBox(box));
}
// PHASE TWO
// Move entities by their center
PushCache[] caches = new PushCache[entities.size()];
for (int j = 0; j < entities.size(); j++) {
Entity entity = entities.get(j);
AxisAlignedBB entityBB = entity.getEntityBoundingBox();
Vector3d center = new Vector3d(entityBB.minX + (entityBB.maxX - entityBB.minX) * 0.5D, entityBB.minY + (entityBB.maxY - entityBB.minY) * 0.5D, entityBB.minZ + (entityBB.maxZ - entityBB.minZ) * 0.5D);
double radius = VecUtils.distanceToSquared(entityBB.minX, entityBB.minY, entityBB.minZ, center);
Double t = null;
OrientatedBoundingBox pushingBox = null;
EnumFacing facing = null;
checking_all_boxes: for (int i = 0; i < surroundingBoxes.size(); i++) {
if (surroundingBoxes.get(i).intersects(entityBB)) {
// Check for earliest hit
OrientatedBoundingBox box = worldCollisionBoxes.get(i);
if (!box.cache.isCached())
box.cache.planes = CollidingPlane.getPlanes(box, box.cache, coordinator);
// Binary search
for (CollidingPlane plane : box.cache.planes) {
Double tempT = plane.binarySearch(t, entityBB, radius, center, coordinator);
if (tempT != null) {
t = tempT;
pushingBox = box;
facing = plane.facing;
if (t == 0)
break checking_all_boxes;
}
}
}
}
// Applying found t
if (t != null) {
PushCache cache = new PushCache();
cache.facing = facing;
Vector3d newCenter = new Vector3d(center);
coordinator.transform(newCenter, 1 - t);
origin.transformPointToWorld(center);
origin.transformPointToWorld(newCenter);
cache.pushBox = pushingBox;
cache.entityBox = entityBB.offset(newCenter.x - center.x, newCenter.y - center.y, newCenter.z - center.z);
caches[j] = cache;
}
}
coordinator.move();
// PHASE THREE
for (int i = 0; i < entities.size(); i++) {
Entity entity = entities.get(i);
PushCache cache = caches[i];
boolean cached = cache != null;
if (!cached) {
cache = new PushCache();
cache.entityBox = entity.getEntityBoundingBox();
}
OrientatedBoundingBox fakeBox = coordinator.origin.getOrientatedBox(cache.entityBox);
Vector3d center = new Vector3d(cache.entityBox.minX + (cache.entityBox.maxX - cache.entityBox.minX) * 0.5D, cache.entityBox.minY + (cache.entityBox.maxY - cache.entityBox.minY) * 0.5D, cache.entityBox.minZ + (cache.entityBox.maxZ - cache.entityBox.minZ) * 0.5D);
//coordinator.origin.transformPointToFakeWorld(center);
Axis one = cached ? RotationUtils.getDifferentAxisFirst(cache.facing.getAxis()) : null;
Axis two = cached ? RotationUtils.getDifferentAxisSecond(cache.facing.getAxis()) : null;
boolean ignoreOne = false;
Boolean positiveOne = null;
boolean ignoreTwo = false;
Boolean positiveTwo = null;
double maxVolume = 0;
List<OrientatedBoundingBox> intersecting = new ArrayList<>();
List<EnumFacing> intersectingFacing = new ArrayList<>();
if (cached) {
intersecting.add(cache.pushBox);
intersectingFacing.add(cache.facing);
}
for (OrientatedBoundingBox box : worldCollisionBoxes) {
if ((!cached || box != cache.pushBox) && box.intersects(fakeBox)) {
box.cache.planes = CollidingPlane.getPlanes(box, box.cache, coordinator);
EnumFacing facing = CollidingPlane.getDirection(box, box.cache.planes, center);
if (facing == null || (!coordinator.hasRotation && (!coordinator.hasTranslation || RotationUtils.get(facing.getAxis(), coordinator.translation) == 0)))
continue;
boolean add = false;
if (cached) {
if (facing == cache.facing)
add = true;
else if (!ignoreOne && facing.getAxis() == one) {
add = true;
if (positiveOne == null)
positiveOne = facing.getAxisDirection() == AxisDirection.POSITIVE;
else if (facing.getAxisDirection() == AxisDirection.POSITIVE != positiveOne) {
ignoreOne = true;
add = false;
}
} else if (!ignoreTwo && facing.getAxis() == two) {
add = true;
if (positiveTwo == null)
positiveTwo = facing.getAxisDirection() == AxisDirection.POSITIVE;
else if (facing.getAxisDirection() == AxisDirection.POSITIVE != positiveTwo) {
ignoreTwo = true;
add = false;
}
}
} else
add = true;
if (add) {
double intersectingVolume = box.getIntersectionVolume(fakeBox);
if (intersectingVolume > maxVolume) {
cache.pushBox = box;
maxVolume = intersectingVolume;
cache.facing = facing;
}
intersecting.add(box);
intersectingFacing.add(facing);
}
}
}
if (intersecting.isEmpty())
continue;
if (!cached) {
one = RotationUtils.getDifferentAxisFirst(cache.facing.getAxis());
two = RotationUtils.getDifferentAxisSecond(cache.facing.getAxis());
positiveOne = null;
positiveTwo = null;
for (EnumFacing facing : intersectingFacing) {
if (!ignoreOne && facing.getAxis() == one) {
if (positiveOne == null)
positiveOne = facing.getAxisDirection() == AxisDirection.POSITIVE;
else if (facing.getAxisDirection() == AxisDirection.POSITIVE != positiveOne)
ignoreOne = true;
} else if (!ignoreTwo && facing.getAxis() == two) {
if (positiveTwo == null)
positiveTwo = facing.getAxisDirection() == AxisDirection.POSITIVE;
else if (facing.getAxisDirection() == AxisDirection.POSITIVE != positiveTwo)
ignoreTwo = true;
}
if (ignoreOne && ignoreTwo)
break;
}
}
// Now things are ready. Go through all intersecting ones and push the box out
Vector3d pushVec = new Vector3d();
RotationUtils.setValue(pushVec, cache.facing.getAxisDirection().getOffset(), cache.facing.getAxis());
if (!ignoreOne && positiveOne != null)
RotationUtils.setValue(pushVec, positiveOne ? 1 : -1, one);
if (!ignoreTwo && positiveTwo != null)
RotationUtils.setValue(pushVec, positiveTwo ? 1 : -1, two);
Vector3d pushInv = new Vector3d(-pushVec.x, -pushVec.y, -pushVec.z);
Vector3d rotatedVec = new Vector3d(pushVec);
origin.rotation().transform(rotatedVec);
Vector3d[] corners = BoxUtils.getRotatedCorners(cache.entityBox, coordinator.origin);
BoxPlane xPlane = BoxPlane.createOppositePlane(Axis.X, rotatedVec, corners);
BoxPlane yPlane = BoxPlane.createOppositePlane(Axis.Y, rotatedVec, corners);
BoxPlane zPlane = BoxPlane.createOppositePlane(Axis.Z, rotatedVec, corners);
double scale = 0;
for (int j = 0; j < intersecting.size(); j++) {
EnumFacing facing = intersectingFacing.get(j);
if ((ignoreOne && facing.getAxis() == one) || (ignoreTwo && facing.getAxis() == two))
continue;
scale = intersecting.get(j).getPushOutScale(scale, fakeBox, cache.entityBox, pushVec, pushInv, xPlane, yPlane, zPlane);
}
boolean collidedHorizontally = entity.collidedHorizontally;
boolean collidedVertically = entity.collidedVertically;
boolean onGround = entity.onGround;
AxisAlignedBB originalBox = entity.getEntityBoundingBox();
double moveX = cache.entityBox.minX - originalBox.minX + rotatedVec.x * scale;
double moveY = cache.entityBox.minY - originalBox.minY + rotatedVec.y * scale;
double moveZ = cache.entityBox.minZ - originalBox.minZ + rotatedVec.z * scale;
entity.move(MoverType.PISTON, moveX, moveY, moveZ);
if (entity instanceof EntityPlayerMP)
LittleDoorHandler.setPushedByDoor((EntityPlayerMP) entity);
/* entity.motionX += moveX; entity.motionY += moveY; entity.motionZ += moveZ; used to apply motion but this is broken for doors because they can be way too fast */
if (moveX != 0 || moveZ != 0)
collidedHorizontally = true;
if (moveY != 0) {
collidedVertically = true;
onGround = true;
}
entity.collidedHorizontally = collidedHorizontally;
entity.collidedVertically = collidedVertically;
entity.onGround = onGround;
entity.collided = collidedHorizontally || collidedVertically;
}
for (OrientatedBoundingBox box : worldCollisionBoxes)
box.cache.reset();
}
for (int i = 0; i < fakeWorld.loadedEntityList.size(); i++) {
Entity entity = fakeWorld.loadedEntityList.get(i);
if (entity instanceof EntityAnimation) {
coordinator.reset(((EntityAnimation) entity).origin);
((EntityAnimation) entity).moveAndRotateAnimation(coordinator);
}
}
noCollision = false;
}
@SideOnly(Side.CLIENT)
public boolean spawnedInWorld;
@SideOnly(Side.CLIENT)
public void createClient() {
}
public LittleRenderChunkSuppilier getRenderChunkSuppilier() {
return (LittleRenderChunkSuppilier) fakeWorld.renderChunkSupplier;
}
protected void handleForces() {
motionX = 0;
motionY = 0;
motionZ = 0;
}
protected boolean hasOriginChanged = false;
protected void markOriginChange() {
hasOriginChanged = true;
if (fakeWorld.loadedEntityList.isEmpty())
return;
for (Entity entity : fakeWorld.loadedEntityList)
if (entity instanceof EntityAnimation)
((EntityAnimation) entity).markOriginChange();
}
public void updateBoundingBox() {
if (worldBoundingBox == null || fakeWorld == null)
return;
if (origin.hasChanged() || hasOriginChanged) {
markOriginChange();
setEntityBoundingBox(origin.getAxisAlignedBox(worldBoundingBox));
hasOriginChanged = false;
}
}
public void updateTickState() {
if (controller == null)
return;
AnimationState state = controller.getTickingState();
Vector3d offset = state.getOffset();
Vector3d rotation = state.getRotation();
moveAndRotateAnimation(offset.x - origin.offX(), offset.y - origin.offY(), offset.z - origin.offZ(), rotation.x - origin.rotX(), rotation.y - origin.rotY(), rotation.z - origin.rotZ());
origin.tick();
hasOriginChanged = true;
}
public void onTick() {
if (controller == null)
return;
AnimationState state = controller.tick();
Vector3d offset = state.getOffset();
Vector3d rotation = state.getRotation();
moveAndRotateAnimation(offset.x - origin.offX(), offset.y - origin.offY(), offset.z - origin.offZ(), rotation.x - origin.rotX(), rotation.y - origin.rotY(), rotation.z - origin.rotZ());
}
private boolean addedDoor;
@Override
public void onUpdate() {
}
public void onUpdateForReal() {
if (fakeWorld == null && !world.isRemote)
isDead = true;
if (fakeWorld == null)
return;
if (fakeWorld.hasChanged)
updateWorldCollision();
if (collisionBoxWorker != null) {
collisionBoxWorker.work();
if (collisionBoxWorker.hasFinished())
collisionBoxWorker = null;
}
origin.tick();
handleForces();
super.onUpdate();
for (int i = 0; i < fakeWorld.loadedEntityList.size(); i++) {
Entity entity = fakeWorld.loadedEntityList.get(i);
if (entity instanceof EntityAnimation)
((EntityAnimation) entity).onUpdateForReal();
}
fakeWorld.loadedEntityList.removeIf((x) -> {
if (x.isDead) {
if (x instanceof EntityAnimation)
((EntityAnimation) x).markRemoved();
return true;
}
return false;
});
onTick();
updateBoundingBox();
List<BlockPos> positions = new ArrayList<>();
for (Iterator<TileEntity> iterator = fakeWorld.loadedTileEntityList.iterator(); iterator.hasNext();) {
TileEntity te = iterator.next();
List<LittleTile> tickingTiles = ((TileEntityLittleTiles) te).getTickingTiles();
if (!tickingTiles.isEmpty())
for (LittleTile tile : tickingTiles)
tile.updateEntity();
}
prevPosX = center.baseOffset.getX() + origin.offXLast();
prevPosY = center.baseOffset.getY() + origin.offYLast();
prevPosZ = center.baseOffset.getZ() + origin.offZLast();
posX = center.baseOffset.getX() + origin.offX();
posY = center.baseOffset.getY() + origin.offY();
posZ = center.baseOffset.getZ() + origin.offZ();
}
@Override
@SideOnly(Side.CLIENT)
public void setPositionAndRotationDirect(double x, double y, double z, float yaw, float pitch, int posRotationIncrements, boolean teleport) {
}
@Override
public void setPositionAndRotation(double x, double y, double z, float yaw, float pitch) {
}
@Override
public void setPositionAndUpdate(double x, double y, double z) {
}
@Override
public void setLocationAndAngles(double x, double y, double z, float yaw, float pitch) {
}
public void setInitialPosition(double x, double y, double z) {
setPosition(x, y, z);
}
@Override
public void setPosition(double x, double y, double z) {
this.posX = x;
this.posY = y;
this.posZ = z;
updateBoundingBox();
}
@Override
public void setDead() {
if (!this.isDead && (!world.isRemote || controller == null || !controller.isWaitingForRender())) {
this.isDead = true;
}
}
public void destroyAnimation() {
this.isDead = true;
}
@Override
public boolean canBeCollidedWith() {
return true;
}
@Override
public AxisAlignedBB getCollisionBox(Entity entityIn) {
return null;
}
@Override
public AxisAlignedBB getCollisionBoundingBox() {
return null;
}
@Override
public boolean processInitialInteract(EntityPlayer player, EnumHand hand) {
return true;
}
public LittleRayTraceResult getRayTraceResult(Vec3d pos, Vec3d look) {
return getTarget(fakeWorld, origin.transformPointToFakeWorld(pos), origin.transformPointToFakeWorld(look), pos, look);
}
private static LittleRayTraceResult getTarget(CreativeWorld world, Vec3d pos, Vec3d look, Vec3d originalPos, Vec3d originalLook) {
LittleRayTraceResult result = null;
double distance = 0;
if (!world.loadedEntityList.isEmpty()) {
for (Entity entity : world.loadedEntityList) {
if (entity instanceof EntityAnimation) {
EntityAnimation animation = (EntityAnimation) entity;
Vec3d newPos = animation.origin.transformPointToFakeWorld(originalPos);
Vec3d newLook = animation.origin.transformPointToFakeWorld(originalLook);
if (animation.worldBoundingBox.intersects(new AxisAlignedBB(newPos, newLook))) {
LittleRayTraceResult tempResult = getTarget(animation.fakeWorld, newPos, newLook, originalPos, originalLook);
if (tempResult == null)
continue;
double tempDistance = newPos.distanceTo(tempResult.getHitVec());
if (result == null || tempDistance < distance) {
result = tempResult;
distance = tempDistance;
}
}
}
}
}
RayTraceResult tempResult = world.rayTraceBlocks(pos, look);
if (tempResult == null || tempResult.typeOfHit != RayTraceResult.Type.BLOCK)
return result;
tempResult.hitInfo = world;
if (result == null || pos.distanceTo(tempResult.hitVec) < distance)
return new LittleRayTraceResult(tempResult, world);
return result;
}
public boolean onRightClick(@Nullable EntityPlayer player, Vec3d pos, Vec3d look) {
if (player != null && player.getHeldItemMainhand().getItem() instanceof ItemLittleWrench) {
ItemLittleWrench.rightClickAnimation(this, player);
return true;
}
LittleRayTraceResult result = getRayTraceResult(pos, look);
if (result == null)
return false;
TileEntity te = result.world.getTileEntity(result.getBlockPos());
IBlockState state = result.world.getBlockState(result.getBlockPos());
Vec3d hit = result.getHitVec();
return state.getBlock().onBlockActivated(fakeWorld, result.getBlockPos(), state, player, EnumHand.MAIN_HAND, result.result.sideHit, (float) hit.x, (float) hit.y, (float) hit.z);
}
@Override
public boolean isBurning() {
return false;
}
@Override
@SideOnly(Side.CLIENT)
public boolean canRenderOnFire() {
return false;
}
public void transformWorld(LittleTransformation transformation) {
if (!structure.loadTiles() || !structure.loadChildren() || !structure.loadParent())
return;
LittleAbsolutePreviewsStructure previews = structure.getAbsolutePreviewsSameWorldOnly(transformation.center);
transformation.transform(previews);
List<BlockPos> positions = new ArrayList<>();
for (TileEntity te : fakeWorld.loadedTileEntityList) {
if (te instanceof TileEntityLittleTiles) {
((TileEntityLittleTiles) te).updateTilesSecretly((x) -> x.clear());
positions.add(te.getPos());
}
}
for (BlockPos pos : positions) {
fakeWorld.setBlockToAir(pos);
fakeWorld.removeTileEntity(pos);
}
if (world.isRemote)
getRenderChunkSuppilier().unloadRenderCache();
List<PlacePreviewTile> placePreviews = new ArrayList<>();
previews.getPlacePreviews(placePreviews, null, true, LittleTileVec.ZERO);
HashMap<BlockPos, PlacePreviews> splitted = LittleActionPlaceStack.getSplittedTiles(previews.context, placePreviews, previews.pos);
int childId = this.structure.parent.getChildID();
LittleStructure parentStructure = this.structure.parent.getStructure(fakeWorld);
LittlePlaceResult result = LittleActionPlaceStack.placeTilesWithoutPlayer(fakeWorld, previews.context, splitted, previews.getStructure(), PlacementMode.all, previews.pos, null, null, null, null);
this.structure = result.parentStructure;
((IAnimatedStructure) this.structure).setAnimation(this);
parentStructure.updateChildConnection(childId, this.structure);
this.structure.updateParentConnection(childId, parentStructure);
this.structure.transformAnimation(transformation);
this.controller.transform(transformation);
absolutePreviewPos = transformation.transform(absolutePreviewPos);
updateWorldCollision();
updateBoundingBox();
updateTickState();
}
@Deprecated
private LittleStructure searchForParent() {
for (TileEntity te : fakeWorld.loadedTileEntityList) {
if (te instanceof TileEntityLittleTiles) {
for (LittleTile tile : (TileEntityLittleTiles) te) {
if (!tile.connection.isLink()) {
LittleStructure structure = tile.connection.getStructureWithoutLoading();
if (structure.parent == null || structure.parent.isLinkToAnotherWorld())
return structure;
}
}
}
}
throw new RuntimeException("Could not find parent structure!");
}
@Override
public void onRemovedFromWorld() {
super.onRemovedFromWorld();
markRemoved();
}
@Override
protected void readEntityFromNBT(NBTTagCompound compound) {
setFakeWorld(compound.getBoolean("subworld") ? SubWorld.createFakeWorld(world) : FakeWorld.createFakeWorld(getCachedUniqueIdString(), world.isRemote));
this.initalOffX = compound.getDouble("initOffX");
this.initalOffY = compound.getDouble("initOffY");
this.initalOffZ = compound.getDouble("initOffZ");
this.initalRotX = compound.getDouble("initRotX");
this.initalRotY = compound.getDouble("initRotY");
this.initalRotZ = compound.getDouble("initRotZ");
if (compound.hasKey("axis"))
setCenterVec(new LittleTilePos("axis", compound), new LittleTileVec("additional", compound));
else
setCenter(new StructureAbsolute("center", compound));
NBTTagList list = compound.getTagList("tileEntity", 10);
for (int i = 0; i < list.tagCount(); i++) {
NBTTagCompound nbt = list.getCompoundTagAt(i);
BlockPos pos = new BlockPos(nbt.getInteger("x"), nbt.getInteger("y"), nbt.getInteger("z"));
fakeWorld.setBlockState(pos, BlockTile.getState(nbt.getInteger("stateId")));
TileEntityLittleTiles te = (TileEntityLittleTiles) fakeWorld.getTileEntity(pos);
te.readFromNBT(nbt);
if (world.isRemote)
te.updateCustomRenderer();
}
fakeWorld.loadedTileEntityList.removeIf(x -> x.isInvalid());
int[] array = compound.getIntArray("previewPos");
if (array.length == 3)
absolutePreviewPos = new BlockPos(array[0], array[1], array[2]);
else
absolutePreviewPos = center.baseOffset;
if (compound.hasKey("identifier")) {
structureIdentifier = new LittleTileIdentifierStructureAbsolute(compound.getCompoundTag("identifier"));
try {
this.structure = LittleAction.getTile(fakeWorld, structureIdentifier).connection.getStructureWithoutLoading();
} catch (LittleActionException e) {
throw new RuntimeException(e);
}
} else {
structure = searchForParent();
structureIdentifier = structure.getAbsoluteIdentifier();
}
controller = EntityAnimationController.parseController(this, compound.getCompoundTag("controller"));
if (compound.hasKey("subEntities")) {
NBTTagList subEntities = compound.getTagList("subEntities", 10);
for (int i = 0; i < subEntities.tagCount(); i++) {
Entity entity = EntityList.createEntityFromNBT(subEntities.getCompoundTagAt(i), fakeWorld);
if (entity != null)
fakeWorld.spawnEntity(entity);
}
}
updateWorldCollision();
updateBoundingBox();
}
@Override
protected void writeEntityToNBT(NBTTagCompound compound) {
center.writeToNBT("center", compound);
compound.setDouble("initOffX", initalOffX);
compound.setDouble("initOffY", initalOffY);
compound.setDouble("initOffZ", initalOffZ);
compound.setDouble("initRotX", initalRotX);
compound.setDouble("initRotY", initalRotY);
compound.setDouble("initRotZ", initalRotZ);
compound.setBoolean("subworld", fakeWorld.hasParent());
NBTTagList list = new NBTTagList();
for (Iterator<TileEntity> iterator = fakeWorld.loadedTileEntityList.iterator(); iterator.hasNext();) {
TileEntity te = iterator.next();
if (te instanceof TileEntityLittleTiles) {
NBTTagCompound nbt = new NBTTagCompound();
nbt.setInteger("stateId", BlockTile.getStateId((TileEntityLittleTiles) te));
list.appendTag(te.writeToNBT(nbt));
}
}
compound.setTag("controller", controller.writeToNBT(new NBTTagCompound()));
compound.setTag("tileEntity", list);
compound.setIntArray("previewPos", new int[] { absolutePreviewPos.getX(), absolutePreviewPos.getY(), absolutePreviewPos.getZ() });
compound.setTag("identifier", structureIdentifier.writeToNBT(new NBTTagCompound()));
if (!fakeWorld.loadedEntityList.isEmpty()) {
NBTTagList subEntities = new NBTTagList();
for (Entity entity : fakeWorld.loadedEntityList) {
NBTTagCompound nbt = new NBTTagCompound();
entity.writeToNBTAtomically(nbt);
subEntities.appendTag(nbt);
}
compound.setTag("subEntities", subEntities);
}
}
}
|
package com.github.agomezmoron.multimedia.recorder;
import java.awt.Dimension;
import java.awt.Rectangle;
import java.awt.Robot;
import java.awt.Toolkit;
import java.io.File;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Vector;
import javax.media.MediaLocator;
import com.github.agomezmoron.multimedia.capture.ScreenCapture;
import com.github.agomezmoron.multimedia.external.JpegImagesToMovie;
/**
* It models the video recorder.
*
* @author Alejandro Gomez <agommor@gmail.com>
*
*/
public class VideoRecorder {
/**
* Status of the recorder.
*/
private static boolean recording = false;
/**
* Interval where the images will be capture (in milliseconds).
*/
private static int captureInterval = 100;
/**
* Screen Width.
*/
private static int width = (int) Toolkit.getDefaultToolkit().getScreenSize().getWidth();
/**
* Screen Height.
*/
private static int height = (int) Toolkit.getDefaultToolkit().getScreenSize().getHeight();
/**
* Associated frames.
*/
private static List<String> frames;
/**
* Temporal directory to be used.
*/
private static File tempDirectory = (System.getProperty("java.io.tmpdir") != null)
? new File(System.getProperty("java.io.tmpdir")) : new File(".");
/**
* Flag to know if the user want to keep the frames.
*/
private static boolean keepFrames = false;
/**
* Video name.
*/
private static String videoName = "output.mov";
/**
* Video path where the video will be saved.
*/
private static File videoPath = (System.getProperty("java.io.tmpdir") != null)
? new File(System.getProperty("java.io.tmpdir")) : new File(".");
/**
* Strategy to record using {@link Thread}.
*/
private static final Thread recordThread = new Thread() {
@Override
public void run() {
Robot rt;
ScreenCapture capture;
try {
rt = new Robot();
do {
capture = new ScreenCapture(rt.createScreenCapture(new Rectangle(width, height)));
frames.add(VideoRecorderUtil.saveIntoDirectory(capture, new File(
tempDirectory.getAbsolutePath() + File.separatorChar + videoName.replace(".mov", ""))));
Thread.sleep(captureInterval);
} while (recording);
} catch (Exception e) {
recording = false;
}
}
};
/**
* We don't allow to create objects for this class.
*/
private VideoRecorder() {
}
/**
* It stops the recording and creates the video.
* @return a {@link String} with the path where the video was created or null if the video couldn't be created.
* @throws MalformedURLException
*/
public static String stop() throws MalformedURLException {
String videoPathString = null;
if (recording) {
recording = false;
if (recordThread.isAlive()) {
recordThread.interrupt();
}
videoPathString = createVideo();
frames.clear();
if (!keepFrames) {
deleteDirectory(new File(
tempDirectory.getAbsolutePath() + File.separatorChar + videoName.replace(".mov", "")));
}
}
return videoPathString;
}
/**
* It deletes recursively a directory.
* @param directory to be deleted.
* @return true if the directory was deleted successfully.
*/
private static boolean deleteDirectory(File directory) {
if (directory.exists()) {
File[] files = directory.listFiles();
if (null != files) {
for (int i = 0; i < files.length; i++) {
if (files[i].isDirectory()) {
deleteDirectory(files[i]);
} else {
files[i].delete();
}
}
}
}
return (directory.delete());
}
/**
* It creates the video.
* @return a {@link String} with the path where the video was created or null if the video couldn't be created.
* @throws MalformedURLException
*/
private static String createVideo() throws MalformedURLException {
String videoPathString = null;
JpegImagesToMovie jpegImaveToMovie = new JpegImagesToMovie();
MediaLocator oml;
if ((oml = JpegImagesToMovie
.createMediaLocator(videoPath.getAbsolutePath() + File.separatorChar + videoName)) == null) {
System.exit(0);
}
if (jpegImaveToMovie.doIt(width, height, (1000 / captureInterval), new Vector<String>(frames), oml)) {
videoPathString = videoPath.getAbsolutePath() + File.separatorChar + videoName;
}
return videoPathString;
}
/**
* It starts recording (if it wasn't started before).
* @param newVideoName with the output of the video.
*/
public static void start(String newVideoName) {
if (!recording) {
videoName = newVideoName;
if (!videoName.endsWith(".mov")) {
videoName += ".mov";
}
recording = true;
frames = new ArrayList<String>();
recordThread.start();
}
}
/**
* @return the captureInterval.
*/
public static int getCaptureInterval() {
return captureInterval;
}
/**
* @param captureInterval the captureInterval to set.
*/
public static void setCaptureInterval(int captureInterval) {
VideoRecorder.captureInterval = captureInterval;
}
/**
* @return the width.
*/
public static int getWidth() {
return width;
}
/**
* @param width the width to set.
*/
public static void setWidth(int width) {
VideoRecorder.width = width;
}
/**
* @return the height.
*/
public static int getHeight() {
return height;
}
/**
* @param height the height to set.
*/
public static void setHeight(int height) {
VideoRecorder.height = height;
}
/**
* It sets the directory where the video will be created.
* @param path where the video will be created.
*/
public static void setVideoDirectory(String path) {
File f = new File(path);
if (!f.exists()) {
f.mkdirs();
}
if (f.exists() && f.canWrite()) {
videoPath = f;
}
}
/**
* It sets the temporal directory to be use.
* @param path to be use as temporal directory.
*/
public static void setTempDirectory(String path) {
File f = new File(path);
if (!f.exists()) {
f.mkdirs();
}
if (f.exists() && f.canWrite()) {
tempDirectory = f;
}
}
/**
* It enables or disables keeping the frames after making the video.
* @param keepFrames yes or no.
*/
public static void keepFramesInTempDirectory(boolean keep) {
keepFrames = keep;
}
/**
* It sets the width and height value to the full screen size, ignoring previous setWidth and setHeight calls.
* @param useFullScreen if you want to record the full screen or not. If false, the recorder will record the
*/
public static void fullScreenMode(boolean useFullScreen) {
if (useFullScreen) {
Dimension size = Toolkit.getDefaultToolkit().getScreenSize();
width = (int) size.getWidth();
width = (int) size.getWidth();
}
}
}
|
package com.github.anba.es6draft.runtime.objects.reflect;
import static com.github.anba.es6draft.runtime.AbstractOperations.IsExtensible;
import static com.github.anba.es6draft.runtime.AbstractOperations.ToObject;
import static com.github.anba.es6draft.runtime.AbstractOperations.ToPropertyKey;
import static com.github.anba.es6draft.runtime.internal.Errors.throwTypeError;
import static com.github.anba.es6draft.runtime.internal.Properties.createProperties;
import static com.github.anba.es6draft.runtime.types.Null.NULL;
import static com.github.anba.es6draft.runtime.types.PropertyDescriptor.FromPropertyDescriptor;
import static com.github.anba.es6draft.runtime.types.PropertyDescriptor.ToPropertyDescriptor;
import com.github.anba.es6draft.runtime.ExecutionContext;
import com.github.anba.es6draft.runtime.Realm;
import com.github.anba.es6draft.runtime.internal.Initialisable;
import com.github.anba.es6draft.runtime.internal.Messages;
import com.github.anba.es6draft.runtime.internal.Properties.Function;
import com.github.anba.es6draft.runtime.internal.Properties.Optional;
import com.github.anba.es6draft.runtime.internal.Properties.Prototype;
import com.github.anba.es6draft.runtime.modules.Module;
import com.github.anba.es6draft.runtime.types.IntegrityLevel;
import com.github.anba.es6draft.runtime.types.Intrinsics;
import com.github.anba.es6draft.runtime.types.Property;
import com.github.anba.es6draft.runtime.types.PropertyDescriptor;
import com.github.anba.es6draft.runtime.types.ScriptObject;
import com.github.anba.es6draft.runtime.types.Symbol;
import com.github.anba.es6draft.runtime.types.Type;
import com.github.anba.es6draft.runtime.types.builtins.OrdinaryObject;
/**
* 15.17 The Reflect Module
* <p>
* 15.17.1 Exported Function Properties Reflecting the Essentional Internal Methods<br>
*
* TODO: remove representation as ordinary object
*/
public class Reflect extends OrdinaryObject implements Initialisable, Module {
public Reflect(Realm realm) {
super(realm);
}
@Override
public void initialise(ExecutionContext cx) {
createProperties(this, cx, ReflectedFunctions.class);
setIntegrity(cx, IntegrityLevel.NonExtensible);
}
/**
* 15.17.1 Exported Function Properties Reflecting the Essentional Internal Methods
*/
public enum ReflectedFunctions {
;
@Prototype
public static final Intrinsics __proto__ = null;
/**
* 15.17.1.1 Reflect.getPrototypeOf (target)
*/
@Function(name = "getPrototypeOf", arity = 1)
public static Object getPrototypeOf(ExecutionContext cx, Object thisValue, Object target) {
ScriptObject obj = ToObject(cx, target);
ScriptObject proto = obj.getInheritance(cx);
return (proto != null ? proto : NULL);
}
/**
* 15.17.1.2 Reflect.setPrototypeOf (target, proto)
*/
@Function(name = "setPrototypeOf", arity = 2)
public static Object setPrototypeOf(ExecutionContext cx, Object thisValue, Object target,
Object proto) {
ScriptObject obj = ToObject(cx, target);
if (!(Type.isObject(proto) || Type.isNull(proto))) {
throw throwTypeError(cx, Messages.Key.NotObjectOrNull);
}
ScriptObject p = Type.isObject(proto) ? Type.objectValue(proto) : null;
return obj.setInheritance(cx, p);
}
/**
* 15.17.1.3 Reflect.isExtensible (target)
*/
@Function(name = "isExtensible", arity = 1)
public static Object isExtensible(ExecutionContext cx, Object thisValue, Object target) {
ScriptObject obj = ToObject(cx, target);
return IsExtensible(cx, obj);
}
/**
* 15.17.1.4 Reflect.preventExtensions (target)
*/
@Function(name = "preventExtensions", arity = 1)
public static Object preventExtensions(ExecutionContext cx, Object thisValue, Object target) {
ScriptObject obj = ToObject(cx, target);
return obj.setIntegrity(cx, IntegrityLevel.NonExtensible);
}
/**
* 15.17.1.5 Reflect.has (target, propertyKey)
*/
@Function(name = "has", arity = 2)
public static Object has(ExecutionContext cx, Object thisValue, Object target,
Object propertyKey) {
ScriptObject obj = ToObject(cx, target);
Object key = ToPropertyKey(cx, propertyKey);
if (key instanceof String) {
return obj.hasProperty(cx, (String) key);
} else {
assert key instanceof Symbol;
return obj.hasProperty(cx, (Symbol) key);
}
}
/**
* 15.17.1.6 Reflect.hasOwn (target, propertyKey)
*/
@Function(name = "hasOwn", arity = 2)
public static Object hasOwn(ExecutionContext cx, Object thisValue, Object target,
Object propertyKey) {
ScriptObject obj = ToObject(cx, target);
Object key = ToPropertyKey(cx, propertyKey);
if (key instanceof String) {
return obj.hasOwnProperty(cx, (String) key);
} else {
assert key instanceof Symbol;
return obj.hasOwnProperty(cx, (Symbol) key);
}
}
/**
* 15.17.1.7 Reflect.getOwnPropertyDescriptor(target, propertyKey)
*/
@Function(name = "getOwnPropertyDescriptor", arity = 2)
public static Object getOwnPropertyDescriptor(ExecutionContext cx, Object thisValue,
Object target, Object propertyKey) {
ScriptObject obj = ToObject(cx, target);
Object key = ToPropertyKey(cx, propertyKey);
Property desc;
if (key instanceof String) {
desc = obj.getOwnProperty(cx, (String) key);
} else {
assert key instanceof Symbol;
desc = obj.getOwnProperty(cx, (Symbol) key);
}
return FromPropertyDescriptor(cx, desc);
}
/**
* 15.17.1.8 Reflect.get (target, propertyKey, receiver=target)
*/
@Function(name = "get", arity = 3)
public static Object get(ExecutionContext cx, Object thisValue, Object target,
Object propertyKey, @Optional(Optional.Default.NONE) Object receiver) {
if (receiver == null) {
receiver = target;
}
ScriptObject obj = ToObject(cx, target);
Object key = ToPropertyKey(cx, propertyKey);
if (key instanceof String) {
return obj.get(cx, (String) key, receiver);
} else {
assert key instanceof Symbol;
return obj.get(cx, (Symbol) key, receiver);
}
}
/**
* 15.17.1.9 Reflect.set (target, propertyKey, V, receiver=target)
*/
@Function(name = "set", arity = 4)
public static Object set(ExecutionContext cx, Object thisValue, Object target,
Object propertyKey, Object value, @Optional(Optional.Default.NONE) Object receiver) {
if (receiver == null) {
receiver = target;
}
ScriptObject obj = ToObject(cx, target);
Object key = ToPropertyKey(cx, propertyKey);
if (key instanceof String) {
return obj.set(cx, (String) key, value, receiver);
} else {
assert key instanceof Symbol;
return obj.set(cx, (Symbol) key, value, receiver);
}
}
/**
* 15.17.1.10 Reflect.deleteProperty (target, propertyKey)
*/
@Function(name = "deleteProperty", arity = 2)
public static Object deleteProperty(ExecutionContext cx, Object thisValue, Object target,
Object propertyKey) {
ScriptObject obj = ToObject(cx, target);
Object key = ToPropertyKey(cx, propertyKey);
if (key instanceof String) {
return obj.delete(cx, (String) key);
} else {
assert key instanceof Symbol;
return obj.delete(cx, (Symbol) key);
}
}
/**
* 15.17.1.11 Reflect.defineProperty(target, propertyKey, Attributes)
*/
@Function(name = "defineProperty", arity = 3)
public static Object defineProperty(ExecutionContext cx, Object thisValue, Object target,
Object propertyKey, Object attributes) {
ScriptObject obj = ToObject(cx, target);
Object key = ToPropertyKey(cx, propertyKey);
PropertyDescriptor desc = ToPropertyDescriptor(cx, attributes);
if (key instanceof String) {
return obj.defineOwnProperty(cx, (String) key, desc);
} else {
assert key instanceof Symbol;
return obj.defineOwnProperty(cx, (Symbol) key, desc);
}
}
/**
* 15.17.1.12 Reflect.enumerate (target)
*/
@Function(name = "enumerate", arity = 1)
public static Object enumerate(ExecutionContext cx, Object thisValue, Object target) {
ScriptObject obj = ToObject(cx, target);
ScriptObject itr = obj.enumerate(cx);
return itr;
}
/**
* 15.17.1.13 Reflect.ownKeys (target)
*/
@Function(name = "ownKeys", arity = 1)
public static Object ownKeys(ExecutionContext cx, Object thisValue, Object target) {
ScriptObject obj = ToObject(cx, target);
ScriptObject keys = obj.ownPropertyKeys(cx);
// FIXME: spec bug (algorithm end at step 4 without return)
return keys;
}
/**
* 15.17.1.14 Reflect.freeze (target)
*/
@Function(name = "freeze", arity = 1)
public static Object freeze(ExecutionContext cx, Object thisValue, Object target) {
ScriptObject obj = ToObject(cx, target);
return obj.setIntegrity(cx, IntegrityLevel.Frozen);
}
/**
* 15.17.1.15 Reflect.seal (target)
*/
@Function(name = "seal", arity = 1)
public static Object seal(ExecutionContext cx, Object thisValue, Object target) {
ScriptObject obj = ToObject(cx, target);
return obj.setIntegrity(cx, IntegrityLevel.Sealed);
}
/**
* 15.17.1.16 Reflect.isFrozen (target)
*/
@Function(name = "isFrozen", arity = 1)
public static Object isFrozen(ExecutionContext cx, Object thisValue, Object target) {
ScriptObject obj = ToObject(cx, target);
return obj.hasIntegrity(cx, IntegrityLevel.Frozen);
}
/**
* 15.17.1.17 Reflect.isSealed (target)
*/
@Function(name = "isSealed", arity = 1)
public static Object isSealed(ExecutionContext cx, Object thisValue, Object target) {
ScriptObject obj = ToObject(cx, target);
return obj.hasIntegrity(cx, IntegrityLevel.Sealed);
}
}
}
|
package com.github.theholywaffle.lolchatapi;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import org.jdom2.JDOMException;
import org.jivesoftware.smack.RosterListener;
import org.jivesoftware.smack.XMPPConnection;
import org.jivesoftware.smack.packet.Presence;
import org.jivesoftware.smack.util.StringUtils;
import com.github.theholywaffle.lolchatapi.listeners.FriendListener;
import com.github.theholywaffle.lolchatapi.wrapper.Friend;
import com.github.theholywaffle.lolchatapi.wrapper.Friend.FriendStatus;
public class LeagueRosterListener implements RosterListener {
private final HashMap<String, Presence.Type> typeUsers = new HashMap<>();
private final HashMap<String, Presence.Mode> modeUsers = new HashMap<>();
private final HashMap<String, LolStatus> statusUsers = new HashMap<>();
private final HashMap<String, FriendStatus> friendStatusUsers = new HashMap<>();
private final LolChat api;
private boolean added;
private final XMPPConnection connection;
public LeagueRosterListener(LolChat api, XMPPConnection connection) {
this.api = api;
this.connection = connection;
}
public void entriesAdded(Collection<String> e) {
for (final String s : e) {
final Friend f = api.getFriendById(s);
if (!added && !api.isLoaded()) {
if (f.isOnline()) {
typeUsers.put(s, Presence.Type.available);
modeUsers.put(s, f.getChatMode().mode);
statusUsers.put(s, f.getStatus());
} else {
typeUsers.put(s, Presence.Type.unavailable);
}
}
if (f.getGroup() == null) {
api.getDefaultFriendGroup().addFriend(f);
}
if (f.getFriendStatus() != FriendStatus.MUTUAL_FRIENDS) {
friendStatusUsers.put(s, f.getFriendStatus());
}
}
added = true;
}
public void entriesDeleted(Collection<String> entries) {
for (final String s : entries) {
friendStatusUsers.put(s, null);
for (final FriendListener l : api.getFriendListeners()) {
String name = null;
if (api.getRiotApi() != null) {
try {
name = api.getRiotApi().getName(s);
} catch (final IOException e) {
e.printStackTrace();
}
}
l.onRemoveFriend(s, name);
}
}
}
public void entriesUpdated(Collection<String> e) {
for (final String s : e) {
final Friend f = api.getFriendById(s);
final FriendStatus previous = friendStatusUsers.get(s);
if (((previous != null && previous != FriendStatus.MUTUAL_FRIENDS)
|| previous == null || !api.isLoaded())
&& f.getFriendStatus() == FriendStatus.MUTUAL_FRIENDS) {
onNewFriend(f);
}
friendStatusUsers.put(s, f.getFriendStatus());
}
}
public boolean isLoaded() {
return added;
}
private void onNewFriend(Friend f) {
for (final FriendListener l : api.getFriendListeners()) {
l.onNewFriend(f);
}
}
public void presenceChanged(Presence p) {
String from = p.getFrom();
if (from != null) {
p = connection.getRoster().getPresence(from);
from = StringUtils.parseBareAddress(from);
final Friend friend = api.getFriendById(from);
if (friend != null) {
for (final FriendListener l : api.getFriendListeners()) {
final Presence.Type previousType = typeUsers.get(from);
if (p.getType() == Presence.Type.available
&& (previousType == null || previousType != Presence.Type.available)) {
l.onFriendJoin(friend);
} else if (p.getType() == Presence.Type.unavailable
&& (previousType == null || previousType != Presence.Type.unavailable)) {
l.onFriendLeave(friend);
}
final Presence.Mode previousMode = modeUsers.get(from);
if (p.getMode() == Presence.Mode.chat
&& (previousMode == null || previousMode != Presence.Mode.chat)) {
l.onFriendAvailable(friend);
} else if (p.getMode() == Presence.Mode.away
&& (previousMode == null || previousMode != Presence.Mode.away)) {
l.onFriendAway(friend);
} else if (p.getMode() == Presence.Mode.dnd
&& (previousMode == null || previousMode != Presence.Mode.dnd)) {
l.onFriendBusy(friend);
}
if (p.getStatus() != null) {
try {
final LolStatus previousStatus = statusUsers
.get(from);
final LolStatus newStatus = new LolStatus(
p.getStatus());
if (previousStatus != null
&& !newStatus.equals(previousStatus)) {
l.onFriendStatusChange(friend);
}
} catch (JDOMException | IOException e) {
}
}
}
typeUsers.put(from, p.getType());
modeUsers.put(from, p.getMode());
if (p.getStatus() != null) {
try {
statusUsers.put(from, new LolStatus(p.getStatus()));
} catch (JDOMException | IOException e) {
}
}
}
}
}
}
|
package com.lenis0012.bukkit.loginsecurity.session;
import com.lenis0012.bukkit.loginsecurity.LoginSecurity;
import com.lenis0012.bukkit.loginsecurity.events.AuthActionEvent;
import com.lenis0012.bukkit.loginsecurity.events.AuthModeChangedEvent;
import com.lenis0012.bukkit.loginsecurity.session.action.ActionCallback;
import com.lenis0012.bukkit.loginsecurity.session.action.ActionResponse;
import com.lenis0012.bukkit.loginsecurity.session.exceptions.ProfileRefreshException;
import com.lenis0012.bukkit.loginsecurity.storage.PlayerProfile;
import org.bukkit.Bukkit;
import org.bukkit.entity.Player;
import java.sql.SQLException;
import java.util.UUID;
import java.util.logging.Level;
/**
* Player session
*/
public class PlayerSession {
private PlayerProfile profile;
private AuthMode mode;
protected PlayerSession(PlayerProfile profile, AuthMode mode) {
this.profile = profile;
this.mode = mode;
}
/**
* Get session player's profile.
*
* @return Profile
*/
public PlayerProfile getProfile() {
return profile;
}
/**
* Save the profile on a separate thread.
*/
public void saveProfileAsync() {
if(!isRegistered()) {
throw new IllegalStateException("Can't save profile when not registered!");
}
LoginSecurity.getDatastore().getProfileRepository().update(profile, result -> {
if(!result.isSuccess()) LoginSecurity.getInstance().getLogger().log(Level.SEVERE, "Failed to save user profile", result.getError());
});
}
/**
* Refreshes player's profile.
*/
public void refreshProfile() throws ProfileRefreshException {
PlayerProfile newProfile;
try {
newProfile = LoginSecurity.getDatastore().getProfileRepository().findByUniqueUserIdBlocking(UUID.fromString(profile.getUniqueUserId()));
} catch (SQLException e) {
throw new ProfileRefreshException("Failed to load profile from database", e);
}
if(newProfile != null && !isRegistered()) {
throw new ProfileRefreshException("Profile was registered while in database!");
}
if(newProfile == null && isRegistered()) {
throw new ProfileRefreshException("Profile was not found, even though it should be there!");
}
if(newProfile == null) {
// Player isn't registered, nothing to update.
return;
}
this.profile = newProfile;
}
/**
* Reset the player's profile to a blank profile.
*/
public void resetProfile() {
String lastName = profile.getLastName();
this.profile = LoginSecurity.getSessionManager().createBlankProfile(UUID.fromString(profile.getUniqueUserId()));
profile.setLastName(lastName);
}
/**
* Check whether the player has an account and is logged in.
* Note: You're probably looking for {@link #isAuthorized() isAuthorized}.
*
* @return Logged in
*/
public boolean isLoggedIn() {
return isAuthorized() && profile.getPassword() != null;
}
/**
* Check whether or not the player's auth mode is "AUTHENTICATED".
* This means they're allowed to move etc.
* Returns true when player is logged in OR password is not required and player has no account.
*
* @return Authorized
*/
public boolean isAuthorized() {
return mode == AuthMode.AUTHENTICATED;
}
/**
* Check whether or not player is registered.
*
* @return True if registered, False otherwise
*/
public boolean isRegistered() {
return profile.getPassword() != null;
}
/**
* Get the player's current auth mode.
*
* @return Auth mode
*/
public AuthMode getAuthMode() {
return mode;
}
/**
* Get the player for this session if player is online.
*
* @return Player
*/
public Player getPlayer() {
return Bukkit.getPlayer(profile.getLastName());
}
/**
* Perform an action in an async task.
* Runs callback when action is finished.
*
* @param action Action to perform
* @param callback To run when action has been performed.
*/
public void performActionAsync(final AuthAction action, final ActionCallback callback) {
LoginSecurity.getExecutorService().execute(() -> {
final ActionResponse response = performAction(action);
Bukkit.getScheduler().runTask(LoginSecurity.getInstance(), () -> callback.call(response));
});
}
/**
* Perform an action on this session.
*
* @param action to perform
*/
private ActionResponse performAction(AuthAction action) {
AuthActionEvent event = new AuthActionEvent(this, action, true);
Bukkit.getPluginManager().callEvent(event);
if(event.isCancelled()) {
return new ActionResponse(false, event.getCancelledMessage());
}
// Run
final ActionResponse response = new ActionResponse();
AuthMode previous = mode;
AuthMode current = action.run(this, response);
if(current == null || !response.isSuccess()) return response; // Something went wrong
this.mode = current;
// If auth mode changed, run event
if(previous != mode) {
AuthModeChangedEvent event1 = new AuthModeChangedEvent(this, previous, mode, isAsync);
Bukkit.getPluginManager().callEvent(event1);
}
// Complete
return response;
}
}
|
/**
* Illustrates using counters and broadcast variables for chapter 6
*/
package com.oreilly.learningsparkexamples.java;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.util.Map.Entry;
import java.util.regex.*;
import java.util.Scanner;
import java.util.Iterator;
import java.io.File;
import scala.Tuple2;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.DeserializationFeature;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.Accumulator;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.SparkFiles;
import org.eclipse.jetty.client.ContentExchange;
import org.eclipse.jetty.client.HttpClient;
public class ChapterSixExample {
public static void main(String[] args) throws Exception {
if (args.length != 4) {
throw new Exception("Usage AccumulatorExample sparkMaster inputFile outDirectory");
}
String sparkMaster = args[0];
String inputFile = args[1];
String inputFile2 = args[2];
String outputDir = args[3];
JavaSparkContext sc = new JavaSparkContext(
sparkMaster, "ChapterSixExample", System.getenv("SPARK_HOME"), System.getenv("JARS"));
JavaRDD<String> rdd = sc.textFile(inputFile);
// Count the number of lines with KK6JKQ
final Accumulator<Integer> count = sc.accumulator(0);
rdd.foreach(new VoidFunction<String>(){ public void call(String line) {
if (line.contains("KK6JKQ")) {
count.add(1);
}
}});
System.out.println("Lines with 'KK6JKQ': " + count.value());
// Create Accumulators initialized at 0
final Accumulator<Integer> blankLines = sc.accumulator(0);
JavaRDD<String> callSigns = rdd.flatMap(
new FlatMapFunction<String, String>() { public Iterable<String> call(String line) {
if (line.equals("")) {
blankLines.add(1);
}
return Arrays.asList(line.split(" "));
}});
callSigns.saveAsTextFile(outputDir + "/callsigns");
System.out.println("Blank lines: "+ blankLines.value());
// Start validating the call signs
final Accumulator<Integer> validSignCount = sc.accumulator(0);
final Accumulator<Integer> invalidSignCount = sc.accumulator(0);
JavaRDD<String> validCallSigns = callSigns.filter(
new Function<String, Boolean>(){ public Boolean call(String callSign) {
Pattern p = Pattern.compile("\\A\\d?\\p{Alpha}{1,2}\\d{1,4}\\p{Alpha}{1,3}\\Z");
Matcher m = p.matcher(callSign);
boolean b = m.matches();
if (b) {
validSignCount.add(1);
} else {
invalidSignCount.add(1);
}
return b;
}
});
JavaPairRDD<String, Integer> contactCount = validCallSigns.mapToPair(
new PairFunction<String, String, Integer>() {
public Tuple2<String, Integer> call(String callSign) {
return new Tuple2(callSign, 1);
}}).reduceByKey(new Function2<Integer, Integer, Integer>() {
public Integer call(Integer x, Integer y) {
return x + y;
}});
// Force evaluation so the counters are populated
contactCount.count();
if (invalidSignCount.value() < 0.1 * validSignCount.value()) {
contactCount.saveAsTextFile(outputDir + "/contactCount");
} else {
System.out.println("Too many errors " + invalidSignCount.value() + " for " + validSignCount.value());
System.exit(1);
}
// Read in the call sign table
Scanner callSignTbl = new Scanner(new File("./files/callsign_tbl_sorted"));
ArrayList<String> callSignList = new ArrayList<String>();
while (callSignTbl.hasNextLine()) {
callSignList.add(callSignTbl.nextLine());
}
final Broadcast<String[]> callSignsMap = sc.broadcast(callSignList.toArray(new String[0]));
JavaPairRDD<String, Integer> countryContactCount = contactCount.mapToPair(
new PairFunction<Tuple2<String, Integer>, String, Integer> (){
public Tuple2<String, Integer> call(Tuple2<String, Integer> callSignCount) {
String[] callSignInfo = callSignsMap.value();
String sign = callSignCount._1();
Integer pos = java.util.Arrays.binarySearch(callSignInfo, sign);
if (pos < 0) {
pos = -pos-1;
}
return new Tuple2(callSignInfo[pos].split(",")[1], callSignCount._2());
}}).reduceByKey(new Function2<Integer, Integer, Integer>() {
public Integer call(Integer x, Integer y) {
return x + y;
}});
countryContactCount.saveAsTextFile(outputDir + "/countries");
// use mapPartitions to re-use setup work
JavaPairRDD<String, QSO[]> contactsContactList = validCallSigns.mapPartitionsToPair(
new PairFlatMapFunction<Iterator<String>, String, QSO[]>() {
public Iterable<Tuple2<String, QSO[]>> call(Iterator<String> input) {
ArrayList<Tuple2<String, QSO[]>> callsignQsos = new ArrayList<Tuple2<String, QSO[]>>();
ArrayList<Tuple2<String, ContentExchange>> ccea = new ArrayList<Tuple2<String, ContentExchange>>();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
HttpClient client = new HttpClient();
client.setMaxConnectionsPerAddress(10);
client.setTimeout(30000); // 30 seconds timeout; if no server reply, the request expires
try {
client.start();
while (input.hasNext()) {
ContentExchange exchange = new ContentExchange(true);
String sign = input.next();
exchange.setURL("http://new73s.herokuapp.com/qsos/" + input.next() + ".json");
client.send(exchange);
ccea.add(new Tuple2(sign, exchange));
}
for (Tuple2<String, ContentExchange> signExchange : ccea) {
String sign = signExchange._1();
ContentExchange exchange = signExchange._2();
exchange.waitForDone();
String responseJson = exchange.getResponseContent();
QSO[] qsos = mapper.readValue(responseJson, QSO[].class);
callsignQsos.add(new Tuple2(sign, qsos));
}
} catch (Exception e) {
}
return callsignQsos;
}});
System.out.println(StringUtils.join(contactsContactList.collect(), ","));
// Computer the distance of each call using an external R program
// adds our script to a list of files for each node to download with this job
String distScript = "/home/holden/repos/learning-spark-examples/src/R/finddistance.R";
sc.addFile(distScript);
JavaRDD<String> pipeInputs = contactsContactList.values().flatMap(
new FlatMapFunction<QSO[], String>() { public Iterable<String> call(QSO[] calls) {
ArrayList<String> latLons = new ArrayList<String>();
if (calls == null) {
return latLons;
}
for (QSO call: calls) {
if (call != null && call.mylat != null && call.mylong != null
&& call.contactlat != null && call.contactlong != null) {
latLons.add(call.mylat+","+call.mylong+","+call.contactlat+","+call.contactlong);
}
}
return latLons;
}
});
HashMap<String, String> argMap = new HashMap<String, String>();
argMap.put("SEPARATOR", ",");
ArrayList<String> command = new ArrayList<String>();
// in local mode just use the script, in distributed mode get the file
command.add(distScript);
// command.add(SparkFiles.get(distScript));
JavaRDD<String> distance = pipeInputs.pipe(command,
argMap);
System.out.println(StringUtils.join(distance.collect(), ","));
}
}
|
package com.sonicjumper.enhancedvisuals.event;
import java.awt.Color;
import java.awt.geom.Point2D;
import java.util.ArrayList;
import java.util.ConcurrentModificationException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import net.minecraft.block.Block;
import net.minecraft.client.Minecraft;
import net.minecraft.client.audio.PositionedSound;
import net.minecraft.client.audio.PositionedSoundRecord;
import net.minecraft.client.gui.GuiButton;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.monster.EntityCreeper;
import net.minecraft.entity.monster.EntityEnderman;
import net.minecraft.entity.monster.EntityGolem;
import net.minecraft.entity.monster.EntitySkeleton;
import net.minecraft.entity.monster.EntitySpider;
import net.minecraft.entity.monster.EntityZombie;
import net.minecraft.entity.passive.EntityOcelot;
import net.minecraft.entity.passive.EntityWolf;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.projectile.EntityArrow;
import net.minecraft.entity.projectile.EntityPotion;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.Item;
import net.minecraft.potion.PotionHelper;
import net.minecraft.util.AxisAlignedBB;
import net.minecraft.util.BlockPos;
import net.minecraft.util.DamageSource;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.event.entity.living.LivingAttackEvent;
import net.minecraftforge.event.entity.living.LivingDeathEvent;
import net.minecraftforge.event.entity.living.LivingHurtEvent;
import net.minecraftforge.event.world.ExplosionEvent;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import net.minecraftforge.fml.common.gameevent.TickEvent.Phase;
import net.minecraftforge.fml.common.gameevent.TickEvent.PlayerTickEvent;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import org.lwjgl.input.Keyboard;
import com.google.common.base.Predicates;
import com.sonicjumper.enhancedvisuals.Base;
import com.sonicjumper.enhancedvisuals.ConfigCore;
import com.sonicjumper.enhancedvisuals.environment.BaseEnvironmentEffect;
import com.sonicjumper.enhancedvisuals.environment.EyeSensitivityHandler;
import com.sonicjumper.enhancedvisuals.environment.PotionSplashHandler;
import com.sonicjumper.enhancedvisuals.environment.TemperatureHandler;
import com.sonicjumper.enhancedvisuals.environment.WetnessHandler;
import com.sonicjumper.enhancedvisuals.render.RenderShaderBlurFade;
import com.sonicjumper.enhancedvisuals.util.SplatUtil;
import com.sonicjumper.enhancedvisuals.visuals.Blur;
import com.sonicjumper.enhancedvisuals.visuals.BoxBlur;
import com.sonicjumper.enhancedvisuals.visuals.Shader;
import com.sonicjumper.enhancedvisuals.visuals.ShaderBlurFade;
import com.sonicjumper.enhancedvisuals.visuals.Splat;
import com.sonicjumper.enhancedvisuals.visuals.Visual;
import com.sonicjumper.enhancedvisuals.visuals.VisualManager;
import com.sonicjumper.enhancedvisuals.visuals.VisualType;
public class VisualEventHandler {
private ArrayList<BaseEnvironmentEffect> environmentalEffects;
public WetnessHandler wetnessHandler = new WetnessHandler(this);
public EyeSensitivityHandler eyeSensitivityHandler = new EyeSensitivityHandler(this);
public TemperatureHandler temperatureHandler = new TemperatureHandler(this);
public PotionSplashHandler potionSplashHandler = new PotionSplashHandler(this);
public Minecraft mc = Minecraft.getMinecraft();
// Swords and Axes are slashing
// Shovels and Pickaxes are impact
// Hoes and Arrows are piercing
private ArrayList<Item> sharpList;
private ArrayList<Item> bluntList;
private ArrayList<Item> pierceList;
//private HashMap<EntityLivingBase, Float> entityHealthMap;
private Random rand;
//private float playerWetness = 0.5F;
//private float playerTemp = 1.0F;
//private float eyeAdjustment = 0.4F;
//private int glowBuffer;
private int lowHealthBuffer;
public VisualEventHandler() {
sharpList = new ArrayList<Item>();
bluntList = new ArrayList<Item>();
pierceList = new ArrayList<Item>();
sharpList.add(Items.iron_sword);
sharpList.add(Items.wooden_sword);
sharpList.add(Items.stone_sword);
sharpList.add(Items.diamond_sword);
sharpList.add(Items.golden_sword);
sharpList.add(Items.iron_axe);
sharpList.add(Items.wooden_axe);
sharpList.add(Items.stone_axe);
sharpList.add(Items.diamond_axe);
sharpList.add(Items.golden_axe);
bluntList.add(Items.iron_pickaxe);
bluntList.add(Items.wooden_pickaxe);
bluntList.add(Items.stone_pickaxe);
bluntList.add(Items.diamond_pickaxe);
bluntList.add(Items.golden_pickaxe);
bluntList.add(Items.iron_shovel);
bluntList.add(Items.wooden_shovel);
bluntList.add(Items.stone_shovel);
bluntList.add(Items.diamond_shovel);
bluntList.add(Items.golden_shovel);
pierceList.add(Items.iron_hoe);
pierceList.add(Items.wooden_hoe);
pierceList.add(Items.stone_hoe);
pierceList.add(Items.diamond_hoe);
pierceList.add(Items.golden_hoe);
pierceList.add(Items.arrow);
//entityHealthMap = new HashMap<EntityLivingBase, Float>();
rand = new Random();
environmentalEffects = new ArrayList<BaseEnvironmentEffect>();
environmentalEffects.add(wetnessHandler);
environmentalEffects.add(eyeSensitivityHandler);
environmentalEffects.add(temperatureHandler);
environmentalEffects.add(potionSplashHandler);
}
/*@SubscribeEvent
public void onPlayerDamage(LivingHurtEvent e) {
if (e.source.equals(na.i)) {
return;
}
if (((e.entityLiving instanceof ue)) && (((ue)e.entityLiving).equals(this.mc.h)))
{
ue p = (ue)e.entityLiving;
if ((e.source.i() != null) && ((e.source.i() instanceof oe)))
{
oe elb = (oe)e.source.i();
if (((elb.aY() != null) && (isSharp(elb.aY().b()))) || ((elb instanceof tv))) {
VisualManager.createVisualFromDamage(VisualType.slash, e.ammount, e.entityLiving);
} else if (((elb.aY() != null) && (isBlunt(elb.aY().b()))) || ((elb instanceof ru))) {
VisualManager.createVisualFromDamage(VisualType.impact, e.ammount, e.entityLiving);
} else if (((elb.aY() != null) && (isPierce(elb.aY().b()))) || ((elb instanceof ts)) || ((elb instanceof se))) {
VisualManager.createVisualFromDamage(VisualType.pierce, e.ammount, e.entityLiving);
} else {
VisualManager.createVisualFromDamage(VisualType.splatter, e.ammount, e.entityLiving);
}
}
if (((e.source.i() != null) && ((e.source.i() instanceof ug))) || (e.source.equals(na.g))) {
VisualManager.createVisualFromDamage(VisualType.pierce, e.ammount, e.entityLiving);
}
if ((e.source.equals(na.h)) || (e.source.equals(na.n))) {
VisualManager.createVisualFromDamage(VisualType.impact, e.ammount, e.entityLiving);
}
if (e.source.c()) {
if ((e.source.h() != null) && (e.source.h().d(this.mc.h) < 16.0D))
{
VisualManager.createVisualFromDamageAndDistance(VisualType.dust, e.ammount, e.entityLiving, e.source.h().e(this.mc.h));
Blur b = new BoxBlur(VisualType.blur, (int)(e.ammount * 10.0F), new Color(1.0F, 1.0F, 1.0F, 0.8F), 10, 1, true, ConfigCore.blurQuality);
VisualManager.addVisual(b);
}
else
{
VisualManager.createVisualFromDamage(VisualType.dust, e.ammount, e.entityLiving);
Blur b = new BoxBlur(VisualType.blur, (int)(e.ammount * 10.0F), new Color(1.0F, 1.0F, 1.0F, 0.8F), 10, 1, true, ConfigCore.blurQuality);
VisualManager.addVisual(b);
}
}
if (getOverlayFromSource(e.source) != null) {
VisualManager.addVisualsWithColor(getOverlayFromSource(e.source), 1, (int)(e.ammount * 10.0F), (int)(e.ammount * 15.0F), new Color(1.0F, 1.0F, 1.0F, 0.6F));
}
if (e.source.equals(na.e)) {
VisualManager.addRandomNumVisualsWithColor(VisualType.waterS, 4, 8, (int)(e.ammount * 10.0F), (int)(e.ammount * 15.0F), new Color(1.0F, 1.0F, 1.0F, 1.0F));
}
}
else if (this.mc.h != null)
{
na ds = e.source;
if ((ds.equals(na.m)) || (ds.equals(na.h)) || (ds.equals(na.n)) || (ds.n().equals("mob")) || (ds.n().equals("player"))) {
if (e.entityLiving.d(this.mc.h) < 8.0D) {
VisualManager.createVisualFromDamageAndDistance(VisualType.splatter, e.ammount, e.entityLiving, e.entityLiving.e(this.mc.h));
}
}
}
}*/
@SideOnly(Side.CLIENT)
@SubscribeEvent
public void onPlayerDamage(LivingAttackEvent event)
{
if(event.entity instanceof EntityPlayer)
{
entityDamaged(event.entityLiving, event.source, event.ammount);
}
}
/*@SubscribeEvent
public void onEntityReceiveDamage(LivingHurtEvent e) {
EntityLivingBase entity = e.entityLiving;
float ammount = e.ammount;
DamageSource source = e.source;
Base.log.info("LivingHurtEvent thrown; Entity: " + entity.getName() + "; Source: " + source.getDamageType() + "; Ammount: " + ammount);
if(e.source.getEntity() != null) {
Base.log.info("Source Entity: " + e.source.getEntity().getName());
} else {
Base.log.info("Source Entity: null source entity");
}
}*/
@SubscribeEvent
public void playerTickEvent(PlayerTickEvent event) {
if(event.phase.equals(Phase.END)) {
onTickInGame();
/*boolean doesBlurExist = false;
for(Shader s : Base.instance.manager.getActiveShaders()) {
if(s instanceof ShaderBlurFade) {
doesBlurExist = true;
}
}
if(!doesBlurExist) {
Base.log.info("Creating new blur shader");
Shader s = new ShaderBlurFade(VisualType.blur, 100, 10.0F);
Base.instance.manager.addVisualDirect(s);
}
}*/
}
}
@SubscribeEvent
public void onPlayerDeath(LivingDeathEvent e) {
if(e.entityLiving.equals(mc.thePlayer)) {
Base.instance.manager.clearAllVisuals();
//this.playerWetness = 0.5F;
//this.playerTemp = 1.0F;
//this.eyeAdjustment = 0.4F;
}
}
/*@SubscribeEvent
public void onEntityTakeDamage(LivingHurtEvent event) {
//System.out.println("Entity took damage: " + event.toString());
//TODO See if this event fires on multiplayer damage events. Historically it hasn't, but maybe it's updated
}*/
/*@SubscribeEvent
public void onExplosion(ExplosionEvent event) {
//System.out.println("Explosion occurred: " + event.toString());
//TODO See if this event fires on multiplayer explosions. If it does, then use this for explosion dust control
}*/
private void entityDamaged(EntityLivingBase entity, DamageSource source, float damage) {
//Base.log.info("Damage amount:" + damage + " called for entity " + entity.toString());
/*for(Point2D point : SplatUtil.generateRandomSplatStreak(25)) {
Splat s = new Splat(VisualType.splatter, 200, Color.WHITE, (float) point.getX(), (float) point.getY());
Base.instance.manager.addVisualDirect(s);
}*/
if(source == DamageSource.outOfWorld)
return ;
// Check distance to player and use that as splat distance pattern
double distanceSq = Minecraft.getMinecraft().thePlayer.getDistanceSqToEntity(entity);
if(distanceSq > 64.0D) {
return;
}
if(entity instanceof EntityPlayer)
{
Entity attacker = source.getSourceOfDamage();
if(attacker instanceof EntityLivingBase) {
EntityLivingBase lastAttacker = (EntityLivingBase) attacker;
// Check weapons
if(lastAttacker.getHeldItem() != null) {
if(isSharp(lastAttacker.getHeldItem().getItem())) {
Base.instance.manager.createVisualFromDamageAndDistance(VisualType.slash, damage, entity, distanceSq);
} else if(isBlunt(lastAttacker.getHeldItem().getItem())) {
Base.instance.manager.createVisualFromDamageAndDistance(VisualType.impact, damage, entity, distanceSq);
} else if(isPierce(lastAttacker.getHeldItem().getItem())) {
Base.instance.manager.createVisualFromDamageAndDistance(VisualType.pierce, damage, entity, distanceSq);
} else {
// Default to splatter type
Base.instance.manager.createVisualFromDamageAndDistance(VisualType.splatter, damage, entity, distanceSq);
}
} else {
if(source.getEntity() != null && source.getEntity() instanceof EntityArrow) {
Base.instance.manager.createVisualFromDamage(VisualType.pierce, damage, entity);
}
// If player received fall damage
if(lastAttacker instanceof EntityZombie || lastAttacker instanceof EntitySkeleton || lastAttacker instanceof EntityOcelot) {
Base.instance.manager.createVisualFromDamageAndDistance(VisualType.slash, damage, entity, distanceSq);
} else if(lastAttacker instanceof EntityGolem || lastAttacker instanceof EntityPlayer) {
Base.instance.manager.createVisualFromDamageAndDistance(VisualType.impact, damage, entity, distanceSq);
} else if(lastAttacker instanceof EntityWolf || lastAttacker instanceof EntitySpider) {
Base.instance.manager.createVisualFromDamageAndDistance(VisualType.pierce, damage, entity, distanceSq);
}
}
}
if(source == DamageSource.cactus) {
Base.instance.manager.createVisualFromDamage(VisualType.pierce, damage, entity);
}
if(source == DamageSource.fall || source == DamageSource.fallingBlock) {
Base.instance.manager.createVisualFromDamage(VisualType.impact, damage, entity);
}
if(source.isExplosion()) {
if(source.getSourceOfDamage() != null && source.getSourceOfDamage().getDistanceToEntity(mc.thePlayer) < 16.0D) {
Base.instance.manager.createVisualFromDamageAndDistance(VisualType.dust, damage, entity, source.getSourceOfDamage().getDistanceSqToEntity(mc.thePlayer));
Blur b = new BoxBlur(VisualType.blur, (int) (damage * 10), new Color(1.0F, 1.0F, 1.0F, 0.8F), true, ConfigCore.blurQuality, 10, 1);
Base.instance.manager.addVisualDirect(b);
} else {
Base.instance.manager.createVisualFromDamage(VisualType.dust, damage, entity);
Blur b = new BoxBlur(VisualType.blur, (int) (damage * 10), new Color(1.0F, 1.0F, 1.0F, 0.8F), true, ConfigCore.blurQuality, 10, 1);
Base.instance.manager.addVisualDirect(b);
}
}
if(source.equals(DamageSource.drown)) {
Base.instance.manager.addRandomNumVisualsWithColor(VisualType.waterS, 4, 8, (int) (damage * 10), (int) (damage * 15), new Color(1.0F, 1.0F, 1.0F, 1.0F));
}
if(source.isFireDamage() || source == DamageSource.onFire)
{
Base.instance.manager.addVisualsWithShading(VisualType.fire, (int) damage, 100, 1000, new Color(1, 1, 1));
//if (event.source.n() == "lava") {
//burnOverlay = new Overlay(Overlay.OverlayType.Burn, 0.5F, 25 + rand.nextInt(25));
}
}else{
if(mc.thePlayer.isBurning())
Base.instance.manager.addVisualsWithShading(VisualType.fire, (int) damage, 100, 1000, new Color(1, 1, 1));
// For now, just assume damage was another source(falling, drowning, cactus, etc.) and use splatter
if(source == DamageSource.anvil || source == DamageSource.fall || source == DamageSource.fallingBlock
|| source.getDamageType().equals("mob") || source.getDamageType().equals("player"))
if(source.getEntity().getDistanceToEntity(mc.thePlayer) < 8.0D)
Base.instance.manager.createVisualFromDamageAndDistance(VisualType.splatter, damage, entity, distanceSq);
}
}
public void onTickInGame() {
EntityPlayer player = Minecraft.getMinecraft().thePlayer;
if(player == null)
return ;
// Tick all visuals
boolean hasBlurShader = false;
ArrayList<Visual> vList = Base.instance.manager.getActiveVisuals();
for (int i = 0; i < vList.size(); i++) {
Visual v = vList.get(i);
if(v instanceof ShaderBlurFade)
hasBlurShader = true;
v.tickUpdate();
}
if(!hasBlurShader && RenderShaderBlurFade.lastBlurRadius != 1)
RenderShaderBlurFade.resetBlur();
/*// Sample health values of all entities, if an entity has lost health, then throw a damage event
try {
ArrayList<EntityLivingBase> entitiesInWorld = (ArrayList<EntityLivingBase>) Minecraft.getMinecraft().theWorld.getEntities(EntityLivingBase.class, Predicates.alwaysTrue());
for(int i = 0; i < entitiesInWorld.size(); i++) {
EntityLivingBase entity = entitiesInWorld.get(i);
if(!entityHealthMap.containsKey(entity)) {
if(entity instanceof EntityPlayer) {
entityHealthMap.put(entity, 0.0F);
} else {
entityHealthMap.put(entity, entity.getHealth());
}
} else {
if(entityHealthMap.get(entity) > entity.getHealth()) {
entityDamaged(entity, entityHealthMap.get(entity) - entity.getHealth());
}
entityHealthMap.put(entity, entity.getHealth());
}
}
// A little cleanup for the entityHealthMap, removing missing or dead entities
Iterator it = entityHealthMap.entrySet().iterator();
while(it.hasNext()) {
Map.Entry pair = (Entry) it.next();
EntityLivingBase entity = (EntityLivingBase) pair.getKey();
if(!entitiesInWorld.contains(entity) || entity.getHealth() <= 0.0F || entity.isDead) {
it.remove();
}
}
} catch(ConcurrentModificationException e) {
System.out.println("Caught a possible concurrent modification exception, maybe the client lagged?");
}*/
for(BaseEnvironmentEffect ee : environmentalEffects) {
// Keep track of Player's "wetness"
// Adjust the player's vision depending on sudden increases in light
// Keep track of Player's "temperature"
ee.onTick();
}
// Check if player has splashed in water
if(hasSplashed(player)) {
Shader s = new ShaderBlurFade(VisualType.blur, 100 + rand.nextInt(100), 10.0F + rand.nextFloat() * 5.0F);
Base.instance.manager.addVisualDirect(s);
}
// Check if player is in water, then wash certain splats away
if(player.isInWater()) {
for(Visual v : Base.instance.manager.getActiveVisuals()) {
if(v.getType().substractByTime) {
v.subtractTickPercent(2.5F);
}
}
}
// Check if player has less than three hearts, then play heartbeat sound and flash lowhealth screen
if (player.getHealth() <= 6.0F) {
if(this.lowHealthBuffer <= 0) {
float f1 = (7.0F - player.getHealth()) * 0.2F;
//Base.instance.manager.addVisualsWithShading(VisualType.lowhealth, 1, (int)(6.0F * (6.0F - player.getHealth())), (int)(10.0F * (6.0F - player.getHealth())), new Color(1.0F, 1.0F, 1.0F, f1 <= 1.0F ? f1 : 1.0F));
this.lowHealthBuffer = (int) (player.getHealth() * 10 + 15);
Base.instance.manager.addVisualsWithShading(VisualType.lowhealth, 1, this.lowHealthBuffer - 5, this.lowHealthBuffer - 5, new Color(1.0F, 1.0F, 1.0F, Math.min(0.7F, f1)));
Shader s = new ShaderBlurFade(VisualType.blur, 10, Math.min(0.7F, f1)*50F);
Base.instance.manager.addVisualDirect(s);
mc.getSoundHandler().playSound(PositionedSoundRecord.create(new ResourceLocation("sonicvisuals:heartbeatOut"), (float)player.posX, (float)player.posY, (float)player.posZ));
//Minecraft.getMinecraft().theWorld.playSoundEffect(player.posX, player.posY, player.posZ, "sonicvisuals:heartbeatOut", 1, 1);
} else if(this.lowHealthBuffer == 5) {
mc.getSoundHandler().playSound(PositionedSoundRecord.create(new ResourceLocation("sonicvisuals:heartbeatIn"), (float)player.posX, (float)player.posY, (float)player.posZ));
Shader s = new ShaderBlurFade(VisualType.blur, 10, 50F);
Base.instance.manager.addVisualDirect(s);
//Minecraft.getMinecraft().theWorld.playSoundEffect(player.posX, player.posY, player.posZ, "sonicvisuals:heartbeatIn", 1, 1);
this.lowHealthBuffer -= 1;
} else {
this.lowHealthBuffer -= 1;
}
}
// Check to see if potions are splashing
checkRecentPotions();
// Check surrounding light values and "adjust" eyes(TODO)
// Check surrounding temperatures and show cold or heat overlays(TODO)
//Slender
checkSlender();
//Sand
addSandSplatFromTick();
}
public void addSandSplatFromTick()
{
float modifier = 0.5F;
boolean sand = false;
boolean onSand = isOnSand(mc.thePlayer);
if (mc.thePlayer.isSprinting())
{
modifier *= 1.5F;
sand = true;
}
if (mc.thePlayer.onGround)
{
modifier *= 1.5F;
sand = true;
}
//if (mc.thePlayer.)
// modifier *= 2.0F;
// sand = true;
if ((sand) && (onSand))
{
int small = (int)(rand.nextInt(3) * modifier);
int medium = 0;int large = 0;
if (rand.nextInt(16) == 1) {
medium = (int)(1.0F * modifier);
}
if (rand.nextInt(32) == 1) {
large = (int)(1.0F * modifier);
}
Base.instance.manager.addVisuals(VisualType.sand, (int) modifier, 100, 100);
//addSplat(small, medium, large, Splat.SplatType.Sand)
}
}
private boolean isOnSand(EntityPlayer entityPlayer)
{
int posX = (int)entityPlayer.posX;
int posY = (int)(entityPlayer.posY - 2.0D);
int posZ = (int)entityPlayer.posZ;
if (mc.theWorld.getBlockState(new BlockPos(posX, posY, posZ)).getBlock() == Blocks.sand && mc.theWorld.getBlockState(new BlockPos(posX, posY+1, posZ)).getBlock() == Blocks.sand) {
return true;
}
return false;
}
private void checkSlender()
{
boolean angryNearby = false;
double modifier = 0.0D;
double d0 = mc.thePlayer.posX;
double d1 = mc.thePlayer.posY;
double d2 = mc.thePlayer.posZ;
AxisAlignedBB box = mc.thePlayer.getEntityBoundingBox();
box = box.expand(16, 16, 16);
EntityEnderman mob = (EntityEnderman) mc.theWorld.findNearestEntityWithinAABB(EntityEnderman.class, box, mc.thePlayer);
if(mob != null)
{
angryNearby = true;
double distModifier = 1.0D / Math.pow(Math.sqrt(Math.pow(d0 - mob.posX, 2) + Math.pow(d1 - mob.posY, 2) + Math.pow(d2 - mob.posZ, 2)) / 3.0D, 2);
if (distModifier > modifier)
{
modifier = distModifier;
if (modifier > 3.5D) {
modifier = 3.5D;
}
}
}
if (angryNearby) {
//Base.instance.manager.slenderOverlay.setActive(true);
if (0.25D * modifier < 0.6000000238418579D) {
Base.instance.manager.slenderOverlay.intensity = (float)(0.25D * modifier);
} else {
Base.instance.manager.slenderOverlay.intensity = 0.6F;
}
}else
Base.instance.manager.slenderOverlay.intensity = 0;
}
private void checkRecentPotions() {
EntityPlayer player = Minecraft.getMinecraft().thePlayer;
AxisAlignedBB axisBox = AxisAlignedBB.fromBounds(Math.floor(player.posX) - 4.5D, Math.floor(player.posY) - 5.0D, Math.floor(player.posZ) - 4.5D, Math.floor(player.posX) + 4.5D, Math.floor(player.posY) + 2.0D, Math.floor(player.posZ) + 4.5D);
for (EntityPotion entityPotion : (ArrayList<EntityPotion>)Minecraft.getMinecraft().theWorld.getEntitiesWithinAABB(EntityPotion.class, axisBox)) {
if (entityPotion.isDead) {
double distance = Math.sqrt(Math.pow(Math.floor(player.posX) - entityPotion.posX, 2) + Math.pow(Math.floor(player.posY + player.eyeHeight) - entityPotion.posY, 2) + Math.pow(Math.floor(player.posZ) - entityPotion.posZ, 2));
double modifier = 1.0D / distance;
int bitColor = PotionHelper.getLiquidColor(entityPotion.getPotionDamage(), false);
float r = (bitColor >> 16 & 0xFF) / 255.0F;
float g = (bitColor >> 8 & 0xFF) / 255.0F;
float b = (bitColor & 0xFF) / 255.0F;
float f1 = (float)(modifier * 2.0D);
Base.instance.manager.addVisualsWithShading(VisualType.potion, 1, 30, 60, new Color(r, g, b, f1 <= 1.0F ? f1 : 1.0F));
}
}
}
/*private VisualType getOverlayFromSource(DamageSource ds) {
if (ds.equals(DamageSource.lava)) {
return VisualType.lavaO;
}
if ((ds.equals(DamageSource.cactus)) || (ds.equals(DamageSource.drown)) || (ds.equals(DamageSource.fall)) || (ds.equals(DamageSource.generic)) || (ds.getDamageType().equals("mob")) || (ds.getDamageType().equals("player"))) {
return VisualType.damaged;
}
return null;
}*/
private boolean isSharp(Item item)
{
return sharpList.contains(item);
}
private boolean isBlunt(Item item)
{
return bluntList.contains(item);
}
private boolean isPierce(Item item)
{
return pierceList.contains(item);
}
private boolean hasSplashed(EntityPlayer entityPlayer) {
int x = (int)Math.floor(entityPlayer.posX);
int y = (int)(entityPlayer.posY + entityPlayer.getDefaultEyeHeight());
int z = (int)Math.floor(entityPlayer.posZ);
int prevX = (int)Math.floor(entityPlayer.prevPosX);
int prevY = (int)(entityPlayer.prevPosY + entityPlayer.getDefaultEyeHeight());
int prevZ = (int)Math.floor(entityPlayer.prevPosZ);
if (Minecraft.getMinecraft().theWorld != null) {
Block currentBlockEyesIn = Minecraft.getMinecraft().theWorld.getBlockState(new BlockPos(x, y, z)).getBlock();
Block pastBlockEyesIn = Minecraft.getMinecraft().theWorld.getBlockState(new BlockPos(prevX, prevY, prevZ)).getBlock();
return (currentBlockEyesIn.equals(Blocks.flowing_water) ^ pastBlockEyesIn.equals(Blocks.flowing_water)) || (currentBlockEyesIn.equals(Blocks.water) ^ pastBlockEyesIn.equals(Blocks.water));
}
return false;
}
}
|
package com.tinkerpop.rexster.config;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.configuration.HierarchicalConfiguration;
import org.apache.log4j.Logger;
import com.tinkerpop.blueprints.pgm.Graph;
import com.tinkerpop.rexster.RexsterApplicationGraph;
import com.tinkerpop.rexster.Tokens;
public class GraphConfigurationContainer {
protected static final Logger logger = Logger.getLogger(GraphConfigurationContainer.class);
private Map<String, RexsterApplicationGraph> graphs = new HashMap<String, RexsterApplicationGraph>();
private List<HierarchicalConfiguration> failedConfigurations = new ArrayList<HierarchicalConfiguration>();
public GraphConfigurationContainer(List<HierarchicalConfiguration> configurations) throws GraphConfigurationException {
if (configurations == null){
throw new GraphConfigurationException("No graph configurations");
}
// create one graph for each configuration for each <graph> element
Iterator<HierarchicalConfiguration> it = configurations.iterator();
while (it.hasNext()) {
HierarchicalConfiguration graphConfig = it.next();
String graphName = graphConfig.getString(Tokens.REXSTER_GRAPH_NAME, "");
if (graphName.equals("")) {
// all graphs must have a graph name
logger.warn("Could not load graph " + graphName + ". The graph-name element was not set.");
this.failedConfigurations.add(graphConfig);
} else {
// check for duplicate graph configuration
if (!this.graphs.containsKey(graphName)) {
boolean enabled = graphConfig.getBoolean(Tokens.REXSTER_GRAPH_ENABLED, true);
if (enabled) {
// one graph failing initialization will not prevent the rest in
// their attempt to be created
try {
Graph graph = getGraphFromConfiguration(graphConfig);
RexsterApplicationGraph rag = new RexsterApplicationGraph(graphName, graph);
rag.loadPackageNames(graphConfig.getString(Tokens.REXSTER_PACKAGES_ALLOWED));
this.graphs.put(rag.getGraphName(), rag);
logger.info("Graph " + graphName + " - " + graph + " loaded");
} catch (GraphConfigurationException gce) {
logger.warn("Could not load graph " + graphName + ". Please check the XML configuration.");
failedConfigurations.add(graphConfig);
} catch (Exception e) {
logger.warn("Could not load graph " + graphName + ".", e);
failedConfigurations.add(graphConfig);
}
} else {
logger.info("Graph " + graphName + " - " + " not enabled and not loaded.");
}
} else {
logger.warn("A graph with the name " + graphName + " was already configured. Please check the XML configuration.");
failedConfigurations.add(graphConfig);
}
}
}
}
public Map<String, RexsterApplicationGraph> getApplicationGraphs(){
return this.graphs;
}
public List<HierarchicalConfiguration> getFailedConfigurations() {
return this.failedConfigurations;
}
private Graph getGraphFromConfiguration(HierarchicalConfiguration graphConfiguration) throws GraphConfigurationException {
String graphConfigurationType = graphConfiguration.getString(Tokens.REXSTER_GRAPH_TYPE);
if (graphConfigurationType.equals("neo4j")) {
graphConfigurationType = Neo4jGraphConfiguration.class.getName();
} else if (graphConfigurationType.equals("orientdb")) {
graphConfigurationType = OrientGraphConfiguration.class.getName();
} else if (graphConfigurationType.equals("tinkergraph")) {
graphConfigurationType = TinkerGraphGraphConfiguration.class.getName();
}
Graph graph = null;
Class clazz = null;
GraphConfiguration graphConfigInstance = null;
try {
clazz = Class.forName(graphConfigurationType, true, Thread.currentThread().getContextClassLoader());
graphConfigInstance = (GraphConfiguration) clazz.newInstance();
graph = graphConfigInstance.configureGraphInstance(graphConfiguration);
} catch (Exception ex) {
throw new GraphConfigurationException(
"GraphConfiguration could not be found or otherwise instantiated:." + graphConfigurationType, ex);
}
return graph;
}
}
|
package com.whizzosoftware.hobson.api.variable;
/**
* A class the defines static constants for common device variable names.
*
* @author Dan Noguerol
*/
public class VariableConstants {
public static final String COLOR = "color";
public static final String ENERGY_CONSUMPTION_WATTS = "ecw";
public static final String FIRMWARE_VERSION = "firmwareVersion";
public static final String IMAGE_STATUS_URL = "imageStatusUrl";
public static final String LEVEL = "level";
public static final String ON = "on";
public static final String TARGET_TEMP_C = "targetTempC";
public static final String TARGET_TEMP_F = "targetTempF";
public static final String TARGET_HEAT_TEMP_C = "targetHeatTempC";
public static final String TARGET_HEAT_TEMP_F = "targetHeatTempF";
public static final String TARGET_COOL_TEMP_C = "targetCoolTempC";
public static final String TARGET_COOL_TEMP_F = "targetCoolTempF";
public static final String TEMP_C = "tempC";
public static final String TEMP_F = "tempF";
public static final String TSTAT_FAN_MODE = "tstatFanMode";
public static final String TSTAT_MODE = "tstatMode";
public static final String VIDEO_STATUS_URL = "videoStatusUrl";
}
|
package com.youtube.gaming.mightybot.modules;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.ListIterator;
import java.util.Random;
import java.util.Set;
import javax.annotation.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.api.client.util.DateTime;
import com.google.api.services.youtube.YouTube;
import com.google.api.services.youtube.model.LiveBroadcast;
import com.google.api.services.youtube.model.LiveChatMessage;
import com.google.api.services.youtube.model.LiveChatMessageSnippet;
import com.google.api.services.youtube.model.LiveChatTextMessageDetails;
import com.google.api.services.youtube.model.Subscription;
import com.google.api.services.youtube.model.SubscriptionListResponse;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.youtube.gaming.mightybot.MightyContext;
import com.youtube.gaming.mightybot.Module;
import com.youtube.gaming.mightybot.exceptions.InvalidConfigurationException;
import com.youtube.gaming.mightybot.properties.MightyProperty;
/**
* Posts a message in chat on behalf of the user when someone subscribes to the channel. Saves the
* names of the new subscribers while the bot is running to avoid announcing the same name several
* times. This list is reset when the bot is stopped.
*/
public class NewSubChatAnnouncer extends Module {
private static final Logger logger = LoggerFactory.getLogger(NewSubChatAnnouncer.class);
private static final Random r = new Random();
private static final String IGNORE_PERSISTENT_BROADCASTS = "ignorePersistentBroadcasts";
private static final String INTERVAL = "interval";
private static final int MINIMUM_INTERVAL = 5;
private static final String MESSAGES_PREFIX = "message";
private Set<String> alreadySubscribedCache = new HashSet<String>();
private List<String> messages;
private DateTime lastPublishedAt;
private ImmutableList<String> liveChatIds = ImmutableList.of();
@Override
public void checkProperties() throws InvalidConfigurationException {
getProperties().throwIfNullOrEmpty(INTERVAL, "Interval can't be empty");
if (getProperties().getInt(INTERVAL) < MINIMUM_INTERVAL) {
throw new InvalidConfigurationException(getProperties().addPrefix(INTERVAL),
"Interval can't be less than 5s");
}
getProperties().throwIfNoneByPrefix(MESSAGES_PREFIX);
List<String> messages = getProperties().getByPrefix(MESSAGES_PREFIX);
for (String message : messages) {
int firstMatch = message.indexOf("{name}");
if (firstMatch == -1 || firstMatch != message.lastIndexOf("{name}")) {
throw new InvalidConfigurationException(String.format(
"There must be one and only one occurrence of '{name}' in your message: '%s'",
message));
}
}
}
@Override
public void init() {
messages = getProperties().getByPrefix(MESSAGES_PREFIX);
}
@Override
public long getIntervalSecond() {
return getProperties().getInt(INTERVAL);
}
@Override
@Nullable
public Set<String> getRequiredOauthScopes() {
return ImmutableSet.of("https:
}
@Override
public void run(MightyContext context) throws Exception {
List<String> newSubscribers = getNewSubscribers(context);
if (!newSubscribers.isEmpty()) {
updateActiveLiveChatIds(context);
if (liveChatIds.isEmpty()) {
if (shouldIgnorePersistentBroadcasts()) {
logger.info(
"No active chat where to announce new subs, are you live streaming to a persistent "
+ "broadcast?");
} else {
logger.info(
"No active chat where to announce new subs");
}
return;
}
// For each new subscriber, post to all chats
for (String newSubscriber : newSubscribers) {
if (!alreadySubscribedCache.contains(newSubscriber)) {
for (String liveChatId : liveChatIds) {
postNewSubscriberMessage(liveChatId, newSubscriber, context);
}
alreadySubscribedCache.add(newSubscriber);
}
}
}
}
private List<String> getNewSubscribers(MightyContext context) throws IOException {
YouTube.Subscriptions.List request =
context.youTube().subscriptions().list("snippet,subscriberSnippet");
request.setKey(getProperties().get(MightyProperty.API_KEY));
request.setMyRecentSubscribers(true);
if (lastPublishedAt == null) {
request.setMaxResults(2L);
} else {
request.setMaxResults(20L);
}
SubscriptionListResponse response = request.execute();
List<String> newSubscribers = new ArrayList<>();
if (lastPublishedAt == null) {
if (response.getPageInfo().getTotalResults() > 1) {
lastPublishedAt = response.getItems().get(0).getSnippet().getPublishedAt();
}
} else if (response.getItems().size() > 0) {
ListIterator<Subscription> iterator =
response.getItems().listIterator(response.getItems().size());
while (iterator.hasPrevious()) {
Subscription subscription = iterator.previous();
if (subscription.getSnippet().getPublishedAt().getValue() > lastPublishedAt.getValue()) {
newSubscribers.add(subscription.getSubscriberSnippet().getTitle());
lastPublishedAt = subscription.getSnippet().getPublishedAt();
}
}
}
return newSubscribers;
}
private void updateActiveLiveChatIds(MightyContext context) {
try {
List<String> activeLiveChatIds = new ArrayList<>();
if (!shouldIgnorePersistentBroadcasts()) {
// Persistent broadcasts
List<LiveBroadcast> activePermanentBroadcasts =
context.youTubeHelper().getActivePersistentBroadcasts();
activeLiveChatIds.addAll(getLiveChatIds(activePermanentBroadcasts));
}
// Active broadcasts
List<LiveBroadcast> activeBroadcasts = context.youTubeHelper().getActiveBroadcasts();
activeLiveChatIds.addAll(getLiveChatIds(activeBroadcasts));
liveChatIds = ImmutableList.copyOf(activeLiveChatIds);
} catch (IOException e) {
logger.error("Could not refresh live chat ids", e);
}
}
private List<String> getLiveChatIds(List<LiveBroadcast> liveBroadcasts) {
List<String> recordingLiveChatIds = new ArrayList<>();
for (LiveBroadcast liveBroadcast : liveBroadcasts) {
recordingLiveChatIds.add(liveBroadcast.getSnippet().getLiveChatId());
}
return recordingLiveChatIds;
}
private void postNewSubscriberMessage(String liveChatId, String subscriberName,
MightyContext context) throws IOException {
String message = messages.get(r.nextInt(messages.size()));
LiveChatMessage content = new LiveChatMessage()
.setSnippet(new LiveChatMessageSnippet()
.setLiveChatId(liveChatId)
.setType("textMessageEvent")
.setTextMessageDetails(new LiveChatTextMessageDetails()
.setMessageText(message.replace("{name}", subscriberName))));
YouTube.LiveChatMessages.Insert request =
context.youTube().liveChatMessages().insert("snippet", content);
request.execute();
}
private boolean shouldIgnorePersistentBroadcasts() {
return "true".equalsIgnoreCase(getProperties().get(IGNORE_PERSISTENT_BROADCASTS));
}
}
|
package de.is24.deadcode4j.analyzer;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.collect.Sets;
import de.is24.deadcode4j.AnalysisContext;
import de.is24.deadcode4j.analyzer.javassist.ClassPoolAccessor;
import de.is24.javaparser.FixedVoidVisitorAdapter;
import japa.parser.ast.CompilationUnit;
import japa.parser.ast.ImportDeclaration;
import japa.parser.ast.body.*;
import japa.parser.ast.expr.*;
import japa.parser.ast.stmt.*;
import japa.parser.ast.type.ClassOrInterfaceType;
import javassist.CtClass;
import javassist.CtField;
import javassist.Modifier;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Deque;
import java.util.HashSet;
import java.util.Set;
import static com.google.common.base.Optional.absent;
import static com.google.common.base.Optional.of;
import static com.google.common.base.Predicates.and;
import static com.google.common.base.Predicates.not;
import static com.google.common.collect.Iterables.*;
import static com.google.common.collect.Lists.newLinkedList;
import static com.google.common.collect.Sets.newHashSet;
import static de.is24.deadcode4j.Utils.emptyIfNull;
import static de.is24.deadcode4j.analyzer.javassist.ClassPoolAccessor.classPoolAccessorFor;
import static de.is24.deadcode4j.analyzer.javassist.CtClasses.*;
import static de.is24.javaparser.ImportDeclarations.isAsterisk;
import static de.is24.javaparser.ImportDeclarations.isStatic;
import static de.is24.javaparser.Nodes.getTypeName;
public class ReferenceToConstantsAnalyzer extends JavaFileAnalyzer {
@Nonnull
private static String getFirstElement(@Nonnull FieldAccessExpr fieldAccessExpr) {
return getFirstNode(fieldAccessExpr).getName();
}
@Nonnull
private static NameExpr getFirstNode(@Nonnull FieldAccessExpr fieldAccessExpr) {
Expression scope = fieldAccessExpr.getScope();
if (NameExpr.class.isInstance(scope)) {
return NameExpr.class.cast(scope);
} else if (FieldAccessExpr.class.isInstance(scope)) {
return getFirstNode(FieldAccessExpr.class.cast(scope));
}
throw new RuntimeException("Should not have reached this point!");
}
private static boolean isRegularFieldAccessExpr(@Nonnull FieldAccessExpr fieldAccessExpr) {
Expression scope = fieldAccessExpr.getScope();
if (NameExpr.class.isInstance(scope)) {
return true;
} else if (FieldAccessExpr.class.isInstance(scope)) {
return isRegularFieldAccessExpr(FieldAccessExpr.class.cast(scope));
}
return false;
}
/**
* This is not entirely correct: while we want to filter calls like
* <code>org.slf4j.LoggerFactory.getLogger("foo")</code>, we want to analyze
* <code>foo.bar.FOO.substring(1)</code>.
*/
private static boolean isScopeOfAMethodCall(@Nonnull Expression expression) {
return MethodCallExpr.class.isInstance(expression.getParentNode())
&& expression == MethodCallExpr.class.cast(expression.getParentNode()).getScope();
}
private static boolean isScopeOfThisExpression(@Nonnull Expression expression) {
return ThisExpr.class.isInstance(expression.getParentNode());
}
private static boolean isTargetOfAnAssignment(@Nonnull Expression expression) {
return AssignExpr.class.isInstance(expression.getParentNode())
&& expression == AssignExpr.class.cast(expression.getParentNode()).getTarget();
}
private static Predicate<? super ImportDeclaration> refersTo(final String name) {
return new Predicate<ImportDeclaration>() {
@Override
public boolean apply(@Nullable ImportDeclaration input) {
return input != null && input.getName().getName().equals(name);
}
};
}
private static Function<? super ImportDeclaration, ? extends String> toImportedType() {
return new Function<ImportDeclaration, String>() {
@Nullable
@Override
public String apply(@Nullable ImportDeclaration input) {
if (input == null)
return null;
NameExpr name = input.getName();
if (input.isStatic() && !input.isAsterisk()) {
name = QualifiedNameExpr.class.cast(name).getQualifier();
}
return name.toString();
}
};
}
private static boolean isConstant(CtField ctField) {
return Modifier.isStatic(ctField.getModifiers()) && Modifier.isFinal(ctField.getModifiers());
}
@Override
protected void analyzeCompilationUnit(@Nonnull final AnalysisContext analysisContext,
@Nonnull final CompilationUnit compilationUnit) {
compilationUnit.accept(new LocalVariableRecordingVisitor<Void>() {
private final ClassPoolAccessor classPoolAccessor = classPoolAccessorFor(analysisContext);
@Override
public void visit(FieldAccessExpr n, Void arg) {
if (isTargetOfAnAssignment(n)
|| isScopeOfAMethodCall(n)
|| isScopeOfThisExpression(n)) {
return;
}
if (!isRegularFieldAccessExpr(n)) {
super.visit(n, arg);
return;
}
if (aLocalVariableExists(getFirstElement(n))) {
return;
}
resolveFieldReference(n);
}
@Override
public void visit(NameExpr n, Void arg) {
if (isTargetOfAnAssignment(n)
|| isScopeOfThisExpression(n)
|| aLocalVariableExists(n.getName())) {
return;
}
resolveNameReference(n);
}
private Optional<String> resolveClass(String qualifier) {
return this.classPoolAccessor.resolveClass(qualifier);
}
private void resolveFieldReference(FieldAccessExpr fieldAccessExpr) {
Optional<String> resolvedClass = resolveType(analysisContext, qualifierFor(fieldAccessExpr));
String referencingType = getTypeName(fieldAccessExpr);
if (resolvedClass.isPresent()) {
analysisContext.addDependencies(referencingType, resolvedClass.get());
} else {
logger.debug("Could not resolve reference [{}] found within [{}].", fieldAccessExpr, referencingType);
}
}
private Qualifier qualifierFor(FieldAccessExpr fieldAccessExpr) {
Expression scope = fieldAccessExpr.getScope();
return NameExpr.class.isInstance(scope)
? new NameExprQualifier(NameExpr.class.cast(scope))
: new FieldAccessExprQualifier(FieldAccessExpr.class.cast(scope));
}
private void resolveNameReference(NameExpr reference) {
if (refersToInheritedField(reference)
|| refersToStaticImport(reference)
|| refersToAsteriskStaticImport(reference)) {
return;
}
logger.debug("Could not resolve name reference [{}] found within [{}].",
reference, getTypeName(reference));
}
private boolean refersToInheritedField(NameExpr reference) {
CtClass referencingClazz = getCtClass(classPoolAccessor.getClassPool(), getTypeName(reference));
if (referencingClazz == null) {
return false;
}
for (CtClass declaringClazz : getDeclaringClassesOf(referencingClazz)) {
if (refersToInheritedField(referencingClazz, declaringClazz, reference)) {
return true;
}
}
return false;
}
private boolean refersToInheritedField(@Nonnull final CtClass referencingClazz,
@Nullable CtClass clazz,
@Nonnull final NameExpr reference) {
if (clazz == null || isJavaLangObject(clazz)) {
return false;
}
for (CtField ctField : clazz.getDeclaredFields()) {
if (ctField.getName().equals(reference.getName()) && ctField.visibleFrom(referencingClazz)) {
if (isConstant(ctField)) {
analysisContext.addDependencies(referencingClazz.getName(), clazz.getName());
}
return true;
}
}
if (refersToInheritedField(referencingClazz, getSuperclassOf(clazz), reference)) {
return true;
}
for (CtClass interfaceClazz : getInterfacesOf(clazz)) {
if (refersToInheritedField(referencingClazz, interfaceClazz, reference)) {
return true;
}
}
return false;
}
private boolean refersToStaticImport(NameExpr reference) {
String referenceName = reference.getName();
String staticImport = getStaticImport(referenceName);
if (staticImport == null) {
return false;
}
String typeName = getTypeName(reference);
Optional<String> resolvedClass = resolveClass(staticImport);
if (resolvedClass.isPresent()) {
analysisContext.addDependencies(typeName, resolvedClass.get());
} else {
logger.warn("Could not resolve static import [{}.{}] found within [{}]!",
staticImport, referenceName, typeName);
}
return true;
}
private boolean refersToAsteriskStaticImport(NameExpr reference) {
CtClass referencingClazz = getCtClass(classPoolAccessor.getClassPool(), getTypeName(reference));
if (referencingClazz == null) {
return false;
}
for (String asteriskImport : getStaticAsteriskImports()) {
Optional<String> resolvedClass = resolveClass(asteriskImport);
if (!resolvedClass.isPresent()) {
String typeName = getTypeName(reference);
logger.warn("Could not resolve static import [{}.*] found within [{}]!",
asteriskImport, typeName);
continue;
}
CtClass potentialTarget = getCtClass(classPoolAccessor.getClassPool(), resolvedClass.get());
if (refersToInheritedField(referencingClazz, potentialTarget, reference)) {
return true;
}
}
return false;
}
@Nullable
@SuppressWarnings("unchecked")
private String getStaticImport(String referenceName) {
return getOnlyElement(transform(filter(emptyIfNull(compilationUnit.getImports()),
and(refersTo(referenceName), not(isAsterisk()), isStatic())), toImportedType()), null);
}
@Nonnull
private Iterable<String> getStaticAsteriskImports() {
return transform(filter(emptyIfNull(compilationUnit.getImports()),
and(isAsterisk(), isStatic())), toImportedType());
}
@Override
public void visit(ClassOrInterfaceType n, Void arg) {
// performance
}
@Override
public void visit(CompilationUnit n, Void arg) {
// performance
for (final TypeDeclaration typeDeclaration : emptyIfNull(n.getTypes())) {
typeDeclaration.accept(this, arg);
}
}
@Override
public void visit(MarkerAnnotationExpr n, Void arg) {
// performance
}
@Override
public void visit(NormalAnnotationExpr n, Void arg) {
// performance
for (final MemberValuePair m : emptyIfNull(n.getPairs())) {
m.accept(this, arg);
}
}
@Override
public void visit(SingleMemberAnnotationExpr n, Void arg) {
// performance
n.getMemberValue().accept(this, arg);
}
}, null);
}
private static class LocalVariableRecordingVisitor<A> extends FixedVoidVisitorAdapter<A> {
@Nonnull
private final Deque<Set<String>> localVariables = newLinkedList();
@Override
public void visit(ClassOrInterfaceDeclaration n, A arg) {
HashSet<String> fields = newHashSet();
this.localVariables.addLast(fields);
try {
addFieldVariables(n, fields);
super.visit(n, arg);
} finally {
this.localVariables.removeLast();
}
}
@Override
public void visit(EnumDeclaration n, A arg) {
HashSet<String> fieldsAndEnums = newHashSet();
this.localVariables.addLast(fieldsAndEnums);
try {
for (EnumConstantDeclaration enumConstantDeclaration : emptyIfNull(n.getEntries())) {
fieldsAndEnums.add(enumConstantDeclaration.getName());
}
addFieldVariables(n, fieldsAndEnums);
super.visit(n, arg);
} finally {
this.localVariables.removeLast();
}
}
@Override
public void visit(ObjectCreationExpr n, A arg) {
HashSet<String> fields = newHashSet();
this.localVariables.addLast(fields);
try {
addFieldVariables(n.getAnonymousClassBody(), fields);
super.visit(n, arg);
} finally {
this.localVariables.removeLast();
}
}
@Override
public void visit(ConstructorDeclaration n, A arg) {
HashSet<String> blockVariables = newHashSet();
this.localVariables.addLast(blockVariables);
try {
for (Parameter parameter : emptyIfNull(n.getParameters())) {
blockVariables.add(parameter.getId().getName());
}
for (AnnotationExpr annotationExpr : emptyIfNull(n.getAnnotations())) {
annotationExpr.accept(this, arg);
}
BlockStmt body = n.getBlock();
if (body != null) {
visit(body, arg);
}
} finally {
this.localVariables.removeLast();
}
}
@Override
public void visit(MethodDeclaration n, A arg) {
HashSet<String> blockVariables = newHashSet();
this.localVariables.addLast(blockVariables);
try {
for (Parameter parameter : emptyIfNull(n.getParameters())) {
blockVariables.add(parameter.getId().getName());
}
for (AnnotationExpr annotationExpr : emptyIfNull(n.getAnnotations())) {
annotationExpr.accept(this, arg);
}
BlockStmt body = n.getBody();
if (body != null) {
visit(body, arg);
}
} finally {
this.localVariables.removeLast();
}
}
@Override
public void visit(CatchClause n, A arg) {
MultiTypeParameter multiTypeParameter = n.getExcept();
HashSet<String> blockVariables = newHashSet();
this.localVariables.addLast(blockVariables);
try {
blockVariables.add(multiTypeParameter.getId().getName());
for (AnnotationExpr annotationExpr : emptyIfNull(multiTypeParameter.getAnnotations())) {
annotationExpr.accept(this, arg);
}
BlockStmt body = n.getCatchBlock();
if (body != null) {
visit(body, arg);
}
} finally {
this.localVariables.removeLast();
}
}
@Override
public void visit(BlockStmt n, A arg) {
this.localVariables.addLast(Sets.<String>newHashSet());
try {
super.visit(n, arg);
} finally {
this.localVariables.removeLast();
}
}
@Override
public void visit(ForeachStmt n, A arg) {
HashSet<String> blockVariables = newHashSet();
this.localVariables.addLast(blockVariables);
try {
for (VariableDeclarator variableDeclarator : emptyIfNull(n.getVariable().getVars())) {
blockVariables.add(variableDeclarator.getId().getName());
}
n.getIterable().accept(this, arg);
n.getBody().accept(this, arg);
} finally {
this.localVariables.removeLast();
}
}
@Override
public void visit(ForStmt n, A arg) {
this.localVariables.addLast(Sets.<String>newHashSet());
try {
super.visit(n, arg);
} finally {
this.localVariables.removeLast();
}
}
@Override
public void visit(TryStmt n, A arg) {
HashSet<String> blockVariables = newHashSet();
this.localVariables.addLast(blockVariables);
try {
for (VariableDeclarationExpr variableDeclarationExpr : emptyIfNull(n.getResources())) {
for (VariableDeclarator variableDeclarator : variableDeclarationExpr.getVars()) {
blockVariables.add(variableDeclarator.getId().getName());
}
}
super.visit(n, arg);
} finally {
this.localVariables.removeLast();
}
}
@Override
public void visit(VariableDeclarationExpr n, A arg) {
for (AnnotationExpr annotationExpr : emptyIfNull(n.getAnnotations())) {
annotationExpr.accept(this, arg);
}
n.getType().accept(this, arg);
Set<String> blockVariables = this.localVariables.getLast();
for (VariableDeclarator variableDeclarator : n.getVars()) {
Expression expr = variableDeclarator.getInit();
if (expr != null) {
expr.accept(this, arg);
}
blockVariables.add(variableDeclarator.getId().getName());
}
}
protected final boolean aLocalVariableExists(@Nonnull String name) {
return contains(concat(this.localVariables), name);
}
private void addFieldVariables(@Nonnull TypeDeclaration typeDeclaration, @Nonnull Set<String> variables) {
addFieldVariables(typeDeclaration.getMembers(), variables);
}
private void addFieldVariables(@Nullable Iterable<? extends BodyDeclaration> declarations, @Nonnull Set<String> variables) {
for (BodyDeclaration bodyDeclaration : emptyIfNull(declarations)) {
if (FieldDeclaration.class.isInstance(bodyDeclaration)) {
for (VariableDeclarator variableDeclarator : FieldDeclaration.class.cast(bodyDeclaration).getVariables()) {
variables.add(variableDeclarator.getId().getName());
}
}
}
}
}
private static class NameExprQualifier extends Qualifier<NameExpr> {
public NameExprQualifier(NameExpr nameExpr) {
super(nameExpr);
}
public NameExprQualifier(NameExpr nameExpr, FieldAccessExprQualifier parent) {
super(nameExpr, parent);
}
@Nullable
@Override
protected Qualifier getScopeQualifier(@Nonnull NameExpr reference) {
return null;
}
@Nonnull
@Override
protected String getName(@Nonnull NameExpr reference) {
return reference.getName();
}
@Nonnull
@Override
protected String getFullQualifier(@Nonnull NameExpr reference) {
return reference.getName();
}
@Override
public boolean allowsPartialResolving() {
return true;
}
@Nonnull
@Override
public Optional<String> examineInheritedType(@Nonnull CtClass referencingClazz,
@Nonnull CtClass inheritedClazz) {
for (CtField ctField : inheritedClazz.getDeclaredFields()) {
if (ctField.getName().equals(getName()) && ctField.visibleFrom(referencingClazz)) {
if (isConstant(ctField)) {
return of(inheritedClazz.getName());
}
// we want no reference to be established, so we refer to ourselves
return of(referencingClazz.getName());
}
}
return absent();
}
}
private static class FieldAccessExprQualifier extends Qualifier<FieldAccessExpr> {
public FieldAccessExprQualifier(FieldAccessExpr fieldAccessExpr) {
super(fieldAccessExpr);
}
private FieldAccessExprQualifier(FieldAccessExpr fieldAccessExpr, FieldAccessExprQualifier parent) {
super(fieldAccessExpr, parent);
}
@Nullable
@Override
protected Qualifier<?> getScopeQualifier(@Nonnull FieldAccessExpr reference) {
Expression scope = reference.getScope();
return NameExpr.class.isInstance(scope)
? new NameExprQualifier(NameExpr.class.cast(scope), this)
: new FieldAccessExprQualifier(FieldAccessExpr.class.cast(scope), this);
}
@Nonnull
@Override
protected String getName(@Nonnull FieldAccessExpr reference) {
return reference.getField();
}
@Nonnull
@Override
protected String getFullQualifier(@Nonnull FieldAccessExpr reference) {
StringBuilder buffy = new StringBuilder(reference.getField());
for (FieldAccessExpr loop = reference; loop != null; ) {
Expression scope = loop.getScope();
final String qualifier;
if (NameExpr.class.isInstance(scope)) {
loop = null;
qualifier = NameExpr.class.cast(scope).getName();
} else {
loop = FieldAccessExpr.class.cast(scope);
qualifier = loop.getField();
}
buffy.insert(0, '.');
buffy.insert(0, qualifier);
}
return buffy.toString();
}
@Override
public boolean allowsPartialResolving() {
return true;
}
}
}
|
package eu.bellettini.eventstore4j.write;
import java.sql.*;
import java.util.UUID;
public class PostgresEventRepository implements EventRepository {
private final Connection connection;
public PostgresEventRepository(Connection connection) {
this.connection = connection;
}
@Override
public long count() {
try (
Statement stmt = connection.createStatement();
ResultSet result = stmt.executeQuery("SELECT COUNT(*) FROM events")
) {
result.next();
return result.getLong(1);
} catch (SQLException e) {
throw new EventStoreException(e);
}
}
@Override
public void clean() {
Statement stmt;
try {
stmt = connection.createStatement();
stmt.execute("TRUNCATE TABLE events");
} catch (SQLException e) {
throw new EventStoreException(e);
}
}
@Override
public void store(EventDTO... events) {
if (events.length == 0) {
return;
}
ensureSubsequentAggregateVersions(events);
final String sql = "INSERT INTO events " +
"(event_id, aggregate_id, aggregate_version, created_at, source," +
"type, type_version, payload, received_at) " +
"VALUES (?::uuid,?,?,?,?,?,?,?::json,?)";
try (PreparedStatement stmt = connection.prepareStatement(sql)) {
for (EventDTO event: events) {
int i = 0;
stmt.setString(++i, event.getId().toString());
stmt.setString(++i, event.getAggregateId());
stmt.setLong(++i, event.getAggregateVersion());
stmt.setTimestamp(++i, Timestamp.from(event.getCreatedAt()));
stmt.setString(++i, event.getSource());
stmt.setString(++i, event.getType());
stmt.setInt(++i, event.getTypeVersion());
stmt.setString(++i, event.getPayload().toString());
stmt.setTimestamp(++i, Timestamp.from(event.getReceivedAt()));
stmt.execute();
}
} catch (SQLException e) {
throw new EventStoreException(e);
}
}
@Override
public EventDTO findOneById(UUID id) {
final String sql = "SELECT " +
"event_id::text, aggregate_id, aggregate_version, created_at, source, type, type_version, " +
"payload::text, received_at " +
"FROM events WHERE event_id = ?::uuid";
try (PreparedStatement stmt = connection.prepareStatement(sql)) {
stmt.setString(1, id.toString());
ResultSet resultSet = stmt.executeQuery();
resultSet.next();
return dtoFromResultSet(resultSet);
} catch (SQLException e) {
throw new EventStoreException(e);
}
}
private EventDTO dtoFromResultSet(ResultSet resultSet) throws SQLException {
return new EventDTO(
UUID.fromString(resultSet.getString(1)),
resultSet.getString(2),
resultSet.getLong(3),
resultSet.getTimestamp(4).toInstant(),
resultSet.getString(5),
resultSet.getString(6),
resultSet.getInt(7),
resultSet.getString(8),
resultSet.getTimestamp(9).toInstant()
);
}
private void ensureSubsequentAggregateVersions(EventDTO... events)
{
long start = events[0].getAggregateVersion();
for (int i = 0; i < events.length; ++i) {
if (events[i].getAggregateVersion() != i + start) {
throw new ConsistencyException("Non subsequent aggregate versions");
}
}
}
}
|
package io.github.lukehutch.fastclasspathscanner.utils;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.nio.file.Paths;
public class FileUtils {
/**
* Get current dir (without resolving symlinks), and normalize path by calling FastPathResolver.resolve().
*/
public static String getCurrDirPathStr() {
String currDirPathStr = "";
try {
// The result is moved to currDirPathStr after each step, so we can provide fine-grained debug info and
// a best guess at the path, if the current dir doesn't exist (#109), or something goes wrong while
// trying to get the current dir path.
Path currDirPath = Paths.get("").toAbsolutePath();
currDirPathStr = currDirPath.toString();
currDirPath = currDirPath.normalize();
currDirPathStr = currDirPath.toString();
currDirPath = currDirPath.toRealPath(LinkOption.NOFOLLOW_LINKS);
currDirPathStr = currDirPath.toString();
currDirPathStr = FastPathResolver.resolve(currDirPathStr);
} catch (final IOException e) {
throw new RuntimeException("Could not resolve current directory: " + currDirPathStr, e);
}
return currDirPathStr;
}
/** Read all the bytes in an InputStream. */
public static byte[] readAllBytes(final InputStream inputStream, final long fileSize, final LogNode log)
throws IOException {
// Java arrays can only currently have 32-bit indices
if (fileSize > Integer.MAX_VALUE
// ZipEntry#getSize() can wrap around to negative for files larger than 2GB
|| (fileSize < 0
// ZipEntry#getSize() can return -1 for unknown size
&& fileSize != -1L)) {
throw new IOException("File larger that 2GB, cannot read contents into a Java array");
}
// We can't always trust the fileSize, unfortunately, so we just use it as a hint
final ByteArrayOutputStream baos = new ByteArrayOutputStream(fileSize <= 0 ? 16384 : (int) fileSize);
// N.B. there is a better solution for this in Java 9, byte[] bytes = inputStream.readAllBytes()
final byte[] buf = new byte[4096];
int totBytesRead = 0;
for (int bytesRead; (bytesRead = inputStream.read(buf)) != -1;) {
baos.write(buf, 0, bytesRead);
totBytesRead += bytesRead;
}
if (totBytesRead != fileSize) {
if (log != null) {
log.log("File length expected to be " + fileSize + " bytes, but read " + totBytesRead + " bytes");
}
}
return baos.toByteArray();
}
/** Returns true if path has a .class extension, ignoring case. */
public static boolean isClassfile(final String path) {
final int len = path.length();
return len > 6 && path.regionMatches(true, len - 6, ".class", 0, 6);
}
public static class ByteBufferBackedInputStream extends InputStream {
private final ByteBuffer buf;
public ByteBufferBackedInputStream(final ByteBuffer buf) {
this.buf = buf;
}
@Override
public int read() throws IOException {
if (!buf.hasRemaining()) {
return -1;
}
return buf.get() & 0xFF;
}
@Override
public int read(final byte[] bytes, final int off, int len) throws IOException {
if (!buf.hasRemaining()) {
return -1;
}
int minLen = Math.min(len, buf.remaining());
buf.get(bytes, off, minLen);
return minLen;
}
}
}
|
package io.github.mzmine.gui.chartbasics.gui.javafx;
import java.awt.image.BufferedImage;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jfree.chart.ChartPanel;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.axis.NumberAxis;
import org.jfree.chart.axis.ValueAxis;
import org.jfree.chart.fx.ChartViewer;
import org.jfree.chart.fx.interaction.MouseHandlerFX;
import org.jfree.chart.plot.CombinedDomainXYPlot;
import org.jfree.chart.plot.CombinedRangeXYPlot;
import org.jfree.chart.plot.Plot;
import org.jfree.chart.plot.XYPlot;
import org.jfree.data.Range;
import org.jfree.data.RangeType;
import org.jfree.data.xy.XYDataset;
import org.jfree.data.xy.XYZDataset;
import io.github.mzmine.gui.chartbasics.gestures.ChartGestureHandler;
import io.github.mzmine.gui.chartbasics.gestures.interf.GestureHandlerFactory;
import io.github.mzmine.gui.chartbasics.graphicsexport.GraphicsExportModule;
import io.github.mzmine.gui.chartbasics.graphicsexport.GraphicsExportParameters;
import io.github.mzmine.gui.chartbasics.gui.javafx.menu.MenuExportToClipboard;
import io.github.mzmine.gui.chartbasics.gui.javafx.menu.MenuExportToExcel;
import io.github.mzmine.gui.chartbasics.gui.swing.ChartGestureMouseAdapter;
import io.github.mzmine.gui.chartbasics.gui.wrapper.ChartViewWrapper;
import io.github.mzmine.gui.chartbasics.listener.AxesRangeChangedListener;
import io.github.mzmine.gui.chartbasics.listener.AxisRangeChangedListener;
import io.github.mzmine.gui.chartbasics.listener.ZoomHistory;
import io.github.mzmine.main.MZmineCore;
import io.github.mzmine.util.SaveImage;
import io.github.mzmine.util.SaveImage.FileType;
/*
import io.github.mzmine.util.dialogs.AxesSetupDialog;
*/
import io.github.mzmine.util.io.XSSFExcelWriterReader;
import javafx.embed.swing.SwingFXUtils;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.print.PrinterJob;
import javafx.scene.control.ContextMenu;
import javafx.scene.control.Menu;
import javafx.scene.control.MenuItem;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.input.Clipboard;
import javafx.scene.input.ClipboardContent;
import javafx.stage.FileChooser;
import javafx.stage.FileChooser.ExtensionFilter;
import javafx.stage.Stage;
/**
* This is an extended version of the ChartViewer (JFreeChartFX). it Adds: ChartGestures (with a set
* of standard chart gestures), ZoomHistory, AxesRangeChangeListener, data export, graphics export,
*
* @author Robin Schmid (robinschmid@uni-muenster.de)
*/
public class EChartViewer extends ChartViewer {
private Logger logger = Logger.getLogger(this.getClass().getName());
// one history for each plot/subplot
protected ZoomHistory zoomHistory;
protected List<AxesRangeChangedListener> axesRangeListener;
protected boolean isMouseZoomable = true;
protected boolean stickyZeroForRangeAxis = false;
protected boolean standardGestures = true;
// only for XYData (not for categoryPlots)
protected boolean addZoomHistory = true;
private ChartGestureMouseAdapterFX mouseAdapter;
private Menu exportMenu;
/**
* Enhanced ChartPanel with extra scrolling methods, zoom history, graphics and data export<br>
* stickyZeroForRangeAxis = false <br>
* Graphics and data export menu are added
*
*/
public EChartViewer() {
this(null, true, true, true, true, false);
}
/**
* Enhanced ChartPanel with extra scrolling methods, zoom history, graphics and data export<br>
* stickyZeroForRangeAxis = false <br>
* Graphics and data export menu are added
*
* @param chart
*/
public EChartViewer(JFreeChart chart) {
this(chart, true, true, true, true, false);
}
/**
* Enhanced ChartPanel with extra scrolling methods, zoom history, graphics and data export<br>
* stickyZeroForRangeAxis = false
*
* @param chart
* @param graphicsExportMenu adds graphics export menu
* @param standardGestures adds the standard ChartGestureHandlers
* @param dataExportMenu adds data export menu
*/
public EChartViewer(JFreeChart chart, boolean graphicsExportMenu, boolean dataExportMenu,
boolean standardGestures) {
this(chart, graphicsExportMenu, dataExportMenu, standardGestures, false);
}
/**
* Enhanced ChartPanel with extra scrolling methods, zoom history, graphics and data export
*
* @param chart
* @param graphicsExportMenu adds graphics export menu
* @param dataExportMenu adds data export menu
* @param standardGestures adds the standard ChartGestureHandlers
* @param stickyZeroForRangeAxis
*/
public EChartViewer(JFreeChart chart, boolean graphicsExportMenu, boolean dataExportMenu,
boolean standardGestures, boolean stickyZeroForRangeAxis) {
this(chart, graphicsExportMenu, dataExportMenu, standardGestures, true, stickyZeroForRangeAxis);
}
/**
* Enhanced ChartPanel with extra scrolling methods, zoom history, graphics and data export
*
* @param chart
* @param graphicsExportMenu adds graphics export menu
* @param dataExportMenu adds data export menu
* @param standardGestures adds the standard ChartGestureHandlers
* @param stickyZeroForRangeAxis
*/
public EChartViewer(JFreeChart chart, boolean graphicsExportMenu, boolean dataExportMenu,
boolean standardGestures, boolean addZoomHistory, boolean stickyZeroForRangeAxis) {
super(null);
this.stickyZeroForRangeAxis = stickyZeroForRangeAxis;
this.standardGestures = standardGestures;
this.addZoomHistory = addZoomHistory;
// Add chart and configure
if (chart != null)
setChart(chart);
exportMenu = (Menu) getContextMenu().getItems().get(0);
// Add Export to Excel and graphics export menu
if (graphicsExportMenu || dataExportMenu) {
addExportMenu(graphicsExportMenu, dataExportMenu);
}
addMenuItem(getContextMenu(), "Reset Zoom", event -> {
ValueAxis xAxis = getChart().getXYPlot().getDomainAxis();
ValueAxis yAxis = getChart().getXYPlot().getDomainAxis();
xAxis.setAutoRange(true);
yAxis.setAutoRange(true);
});
// TODO:
/*
addMenuItem(getContextMenu(), "Set Range on Axis", event -> {
AxesSetupDialog dialog =
new AxesSetupDialog((Stage) this.getScene().getWindow(), chart.getXYPlot());
dialog.show();
});
*/
addMenuItem(exportMenu, "EPS..", event -> handleSave("EMF Image", "EMF", ".emf", FileType.EMF));
addMenuItem(exportMenu, "EMF..", event -> handleSave("EPS Image", "EPS", ".eps", FileType.EPS));
addMenuItem(getContextMenu(), "Copy chart to clipboard", event -> {
BufferedImage bufferedImage =
getChart().createBufferedImage((int) this.getWidth(), (int) this.getHeight());
Image image = SwingFXUtils.toFXImage(bufferedImage, null);
ClipboardContent content = new ClipboardContent();
content.putImage(image);
Clipboard.getSystemClipboard().setContent(content);
});
addMenuItem(getContextMenu(), "Print", event -> {
BufferedImage bufferedImage =
getChart().createBufferedImage((int) this.getWidth(), (int) this.getHeight());
Image image = SwingFXUtils.toFXImage(bufferedImage, null);
ImageView imageView = new ImageView(image);
PrinterJob job = PrinterJob.createPrinterJob();
if (job != null) {
boolean doPrint = job.showPrintDialog(this.getScene().getWindow());
if (doPrint) {
job.printPage(imageView);
job.endJob();
}
} else {
MZmineCore.getDesktop().displayErrorMessage("No Printing Service Found");
}
});
}
private void handleSave(String description, String extensions, String extension,
FileType filetype) {
FileChooser chooser = new FileChooser();
chooser.getExtensionFilters().add(new ExtensionFilter(description, extensions));
File file = chooser.showSaveDialog(null);
if (file != null) {
String filepath = file.getPath();
if (!filepath.toLowerCase().endsWith(extension)) {
filepath += extension;
}
int width = (int) this.getWidth();
int height = (int) this.getHeight();
// Save image
SaveImage SI = new SaveImage(getChart(), filepath, width, height, filetype);
new Thread(SI).start();
}
}
protected void addMenuItem(Menu parent, String title, EventHandler<ActionEvent> al) {
MenuItem pngItem = new MenuItem(title);
pngItem.setOnAction(al);
parent.getItems().add(pngItem);
}
protected void addMenuItem(ContextMenu parent, String title, EventHandler<ActionEvent> al) {
MenuItem pngItem = new MenuItem(title);
pngItem.setOnAction(al);
parent.getItems().add(pngItem);
}
@Override
public void setChart(JFreeChart chart) {
super.setChart(chart);
// If no chart, end here
if (chart == null)
return;
final EChartViewer chartPanel = this;
// apply the theme here, let's see how that works
MZmineCore.getConfiguration().getDefaultChartTheme().apply(chart);
// remove old init
if (mouseAdapter != null) {
this.getCanvas().removeMouseHandler(mouseAdapter);
}
if (chartPanel.getChart().getPlot() instanceof XYPlot) {
// set sticky zero
if (stickyZeroForRangeAxis) {
ValueAxis rangeAxis = chartPanel.getChart().getXYPlot().getRangeAxis();
if (rangeAxis instanceof NumberAxis) {
NumberAxis axis = (NumberAxis) rangeAxis;
axis.setAutoRangeIncludesZero(true);
axis.setAutoRange(true);
axis.setAutoRangeStickyZero(true);
axis.setRangeType(RangeType.POSITIVE);
}
}
Plot p = getChart().getPlot();
if (addZoomHistory && p instanceof XYPlot
&& !(p instanceof CombinedDomainXYPlot || p instanceof CombinedRangeXYPlot)) {
// zoom history
zoomHistory = new ZoomHistory(this, 20);
// axis range changed listener for zooming and more
ValueAxis rangeAxis = this.getChart().getXYPlot().getRangeAxis();
ValueAxis domainAxis = this.getChart().getXYPlot().getDomainAxis();
if (rangeAxis != null) {
rangeAxis.addChangeListener(new AxisRangeChangedListener(new ChartViewWrapper(this)) {
@Override
public void axisRangeChanged(ChartViewWrapper chart, ValueAxis axis, Range lastR,
Range newR) {
// notify listeners of changed range
if (axesRangeListener != null) {
for (AxesRangeChangedListener l : axesRangeListener) {
l.axesRangeChanged(chart, axis, lastR, newR);
}
}
}
});
}
if (domainAxis != null) {
domainAxis.addChangeListener(new AxisRangeChangedListener(new ChartViewWrapper(this)) {
@Override
public void axisRangeChanged(ChartViewWrapper chart, ValueAxis axis, Range lastR,
Range newR) {
// notify listeners of changed range
if (axesRangeListener != null) {
for (AxesRangeChangedListener l : axesRangeListener) {
l.axesRangeChanged(chart, axis, lastR, newR);
}
}
}
});
}
}
// mouse adapter for scrolling and zooming
mouseAdapter = new ChartGestureMouseAdapterFX("gestures", this);
addMouseHandler(mouseAdapter);
// add gestures
if (standardGestures) {
addStandardGestures();
}
// mouseAdapter.addDebugHandler();
}
}
public void addMouseHandler(MouseHandlerFX handler) {
this.getCanvas().addAuxiliaryMouseHandler(handler);
}
/**
* Adds all standard gestures defined in {@link ChartGestureHandler#getStandardGestures()}
*/
public void addStandardGestures() {
// add ChartGestureHandlers
ChartGestureMouseAdapterFX m = getGestureAdapter();
if (m != null) {
m.clearHandlers();
for (GestureHandlerFactory f : ChartGestureHandler.getStandardGestures()) {
m.addGestureHandler(f.createHandler());
}
logger.log(Level.FINEST, "Added standard gestures: " + m.getGestureHandlers().size());
}
}
/**
* Adds the GraphicsExportDialog menu and the data export menu
*/
protected void addExportMenu(boolean graphics, boolean data) {
if (graphics) {
// Graphics Export
addMenuItem(getContextMenu(), "Export graphics...", e -> {
GraphicsExportParameters parameters = (GraphicsExportParameters) MZmineCore
.getConfiguration().getModuleParameters(GraphicsExportModule.class);
MZmineCore.getModuleInstance(GraphicsExportModule.class).openDialog(getChart(), parameters);
});
}
if (data) {
// General data export
Menu export = new Menu("Export data ...");
// Excel XY
MenuExportToExcel exportXY =
new MenuExportToExcel(new XSSFExcelWriterReader(), "to Excel", this);
export.getItems().add(exportXY);
// clip board
MenuExportToClipboard exportXYClipboard = new MenuExportToClipboard("to Clipboard", this);
export.getItems().add(exportXYClipboard);
// add to panel
getContextMenu().getItems().add(export);
}
}
/**
* Default tries to extract all series from an XYDataset or XYZDataset<br>
* series 1 | Series 2 <br>
* x y x y x y z x y z
*
* @return Data array[columns][rows]
*/
public Object[][] getDataArrayForExport() {
if (getChart().getPlot() instanceof XYPlot && getChart().getXYPlot() != null
/*&& getChart().getXYPlot().getDataset() != null*/) { // getDataset() may be null if the
// first dataset was removed, but the plot may still hold other datasets
try {
List<Object[]> modelList = new ArrayList<>();
for (int d = 0; d < getChart().getXYPlot().getDatasetCount(); d++) {
XYDataset data = getChart().getXYPlot().getDataset(d);
if (data instanceof XYZDataset) {
XYZDataset xyz = (XYZDataset) data;
int series = data.getSeriesCount();
Object[][] model = new Object[series * 3][];
for (int s = 0; s < series; s++) {
int size = 2 + xyz.getItemCount(s);
Object[] x = new Object[size];
Object[] y = new Object[size];
Object[] z = new Object[size];
// create new Array model[row][col]
// Write header
Comparable title = data.getSeriesKey(series);
x[0] = title;
y[0] = "";
z[0] = "";
x[1] = getChart().getXYPlot().getDomainAxis().getLabel();
y[1] = getChart().getXYPlot().getRangeAxis().getLabel();
z[1] = "z-axis";
// write data
for (int i = 0; i < xyz.getItemCount(s); i++) {
x[i + 2] = xyz.getX(s, i);
y[i + 2] = xyz.getY(s, i);
z[i + 2] = xyz.getZ(s, i);
}
model[s * 3] = x;
model[s * 3 + 1] = y;
model[s * 3 + 2] = z;
}
for (Object[] o : model) {
modelList.add(o);
}
} else if (data != null) {
int series = data.getSeriesCount();
Object[][] model = new Object[series * 2][];
for (int s = 0; s < series; s++) {
int size = 2 + data.getItemCount(s);
Object[] x = new Object[size];
Object[] y = new Object[size];
// create new Array model[row][col]
// Write header
Comparable title = data.getSeriesKey(s);
x[0] = title;
y[0] = "";
x[1] = getChart().getXYPlot().getDomainAxis().getLabel();
y[1] = getChart().getXYPlot().getRangeAxis().getLabel();
// write data
for (int i = 0; i < data.getItemCount(s); i++) {
x[i + 2] = data.getX(s, i);
y[i + 2] = data.getY(s, i);
}
model[s * 2] = x;
model[s * 2 + 1] = y;
}
for (Object[] o : model) {
modelList.add(o);
}
}
}
return modelList.toArray(new Object[modelList.size()][]);
} catch (Exception ex) {
logger.log(Level.WARNING, "Cannot retrieve data for export", ex);
return null;
}
}
return null;
}
public void addAxesRangeChangedListener(AxesRangeChangedListener l) {
if (axesRangeListener == null) {
axesRangeListener = new ArrayList<AxesRangeChangedListener>(1);
}
axesRangeListener.add(l);
}
public void removeAxesRangeChangedListener(AxesRangeChangedListener l) {
if (axesRangeListener != null) {
axesRangeListener.remove(l);
}
}
public void clearAxesRangeChangedListeners() {
if (axesRangeListener != null) {
axesRangeListener.clear();
}
}
public void setMouseZoomable(boolean flag) {
setDomainZoomable(flag);
setRangeZoomable(flag);
isMouseZoomable = flag;
// TODO find better solution
// clear handler to stop zoom rectangle (hacky solution)
getCanvas().clearLiveHandler();
}
public void setRangeZoomable(boolean flag) {
getCanvas().setRangeZoomable(flag);
}
public void setDomainZoomable(boolean flag) {
getCanvas().setDomainZoomable(flag);
}
public boolean isMouseZoomable() {
return isMouseZoomable;
}
public boolean isDomainZoomable() {
return getCanvas().isDomainZoomable();
}
public boolean isRangeZoomable() {
return getCanvas().isRangeZoomable();
}
public ZoomHistory getZoomHistory() {
return zoomHistory;
}
public void setZoomHistory(ZoomHistory h) {
zoomHistory = h;
}
/**
* Returns the {@link ChartGestureMouseAdapter} alternatively for other ChartPanel classes use:
*
* <pre>
* this.getCanvas().addAuxiliaryMouseHandler(handler);
* </pre>
*
* @return
*/
public ChartGestureMouseAdapterFX getGestureAdapter() {
return mouseAdapter;
}
public void setGestureAdapter(ChartGestureMouseAdapterFX mouseAdapter) {
this.mouseAdapter = mouseAdapter;
}
}
|
package io.github.mzmine.modules.io.export_scans;
import io.github.msdk.MSDKException;
import io.github.msdk.datamodel.FileType;
import io.github.msdk.datamodel.MsScan;
import io.github.msdk.datamodel.SimpleRawDataFile;
import io.github.msdk.io.mzml.MzMLFileExportMethod;
import io.github.msdk.io.mzml.data.MzMLCompressionType;
import io.github.mzmine.datamodel.DataPoint;
import io.github.mzmine.datamodel.MassList;
import io.github.mzmine.datamodel.Scan;
import io.github.mzmine.datamodel.impl.MZmineToMSDKMsScan;
import io.github.mzmine.parameters.ParameterSet;
import io.github.mzmine.taskcontrol.AbstractTask;
import io.github.mzmine.taskcontrol.TaskStatus;
import io.github.mzmine.util.files.FileAndPathUtil;
import io.github.mzmine.util.scans.ScanUtils;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.time.Instant;
import java.util.Objects;
import java.util.Optional;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Exports a spectrum to a file.
*/
public class ExportScansTask extends AbstractTask {
// Logger
private static final Logger logger = Logger.getLogger(ExportScansTask.class.getName());
private final File exportFile;
private final Scan[] scans;
private final String extension;
private int progress;
private int progressMax;
private boolean useMassList;
private MzMLFileExportMethod method;
public ExportScansTask(Scan[] scans, ParameterSet parameters) {
super(null, Instant.now()); // no new data stored -> null, date irrelevant (not used in batch)
progress = 0;
progressMax = 0;
this.scans = scans;
useMassList = parameters.getParameter(ExportScansParameters.export_masslist).getValue();
extension = parameters.getParameter(ExportScansParameters.formats).getValue().toString();
this.exportFile = FileAndPathUtil
.getRealFilePath(parameters.getParameter(ExportScansParameters.file).getValue(), extension);
}
@Override
public String getTaskDescription() {
if (scans == null) {
return "";
}
if (scans.length == 1) {
return "Exporting spectrum # " + scans[0].getScanNumber() + " for "
+ scans[0].getDataFile().getName();
} else {
return "Exporting " + scans.length + " spectra";
}
}
@Override
public double getFinishedPercentage() {
if(method != null) {
return Objects.requireNonNullElse(method.getFinishedPercentage(), 0f);
} else if(progressMax != 0) {
return (double) progress / (double) progressMax;
}
return 0;
}
@Override
public void run() {
// Update the status of this task
setStatus(TaskStatus.PROCESSING);
// Handle text export below
try {
// Handle mzML export
if (extension.equalsIgnoreCase("mzML")) {
exportmzML();
} else {
// Handle text export
exportText();
}
// Success
logger.info("Export of spectra finished");
setStatus(TaskStatus.FINISHED);
} catch (Throwable t) {
logger.log(Level.SEVERE, "Spectrum export error", t);
setStatus(TaskStatus.ERROR);
setErrorMessage(t.getMessage());
}
}
/**
* Export the chromatogram - text formats
*
* @throws IOException if there are i/o problems.
*/
public void exportText() throws IOException {
// Open the writer - append data if file already exists
final BufferedWriter writer = new BufferedWriter(new FileWriter(exportFile, true));
try {
for (Scan scan : scans) {
logger.info("Exporting scan #" + scan.getScanNumber() + " of raw file: "
+ scan.getDataFile().getName());
// Write Header row
switch (extension) {
case "txt":
writer.write("Name: Scan#: " + scan.getScanNumber() + ", RT: " + scan.getRetentionTime()
+ " min");
writer.newLine();
break;
case "mgf":
writer.write("BEGIN IONS");
writer.newLine();
writer.write("PEPMASS=" + Objects.requireNonNullElse(scan.getPrecursorMz(), 0));
writer.newLine();
writer.write("CHARGE=" + Objects.requireNonNullElse(scan.getPrecursorCharge(), 0));
writer.newLine();
writer.write("MSLEVEL=" + scan.getMSLevel());
writer.newLine();
writer.write("Title: Scan#: " + scan.getScanNumber() + ", RT: "
+ scan.getRetentionTime() + " min");
writer.newLine();
break;
case "msp":
break;
}
// Write the data points
DataPoint[] dataPoints = null;
if (useMassList) {
MassList list = scan.getMassList();
if (list != null) {
dataPoints = list.getDataPoints();
}
}
if (dataPoints == null) {
dataPoints = ScanUtils.extractDataPoints(scan);
}
final int itemCount = dataPoints.length;
progressMax = itemCount;
for (int i = 0; i < itemCount; i++) {
// Write data point row
writer.write(dataPoints[i].getMZ() + " " + dataPoints[i].getIntensity());
writer.newLine();
progress = i + 1;
}
// Write footer row
if (extension.equals("mgf")) {
writer.write("END IONS");
writer.newLine();
}
writer.newLine();
}
} catch (Exception e) {
throw (new IOException(e));
} finally {
// Close
writer.close();
}
}
/**
* Export the chromatogram - mzML format
*
* @throws IOException if there are i/o problems.
*/
public void exportmzML() throws MSDKException {
progressMax = scans.length;
// Initialize objects
SimpleRawDataFile msdkRawFile =
new SimpleRawDataFile("MZmine mzML export", Optional.empty(), FileType.MZML);
for (Scan scan : scans) {
MsScan MSDKscan = new MZmineToMSDKMsScan(scan);
msdkRawFile.addScan(MSDKscan);
}
// Actually write to disk
method = new MzMLFileExportMethod(msdkRawFile, exportFile,
MzMLCompressionType.ZLIB, MzMLCompressionType.ZLIB);
method.execute();
}
}
|
package me.ryleykimmel.brandywine.network.game.frame;
import java.util.LinkedList;
import java.util.List;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Bytes;
import me.ryleykimmel.brandywine.common.Assertions;
import me.ryleykimmel.brandywine.common.util.ByteBufUtil;
import me.ryleykimmel.brandywine.network.game.frame.FrameBuffer.ReadingFrameBuffer;
/**
* An implementation of a {@link ReadingFrameBuffer} which reads data from a Frames payload.
*
* @author Graham
* @author Ryley Kimmel <ryley.kimmel@live.com>
*/
public final class FrameReader extends ReadingFrameBuffer {
/**
* Constructs a new {@link FrameReader} with the specified Frame.
*
* @param frame The Frame to read.
*/
public FrameReader(Frame frame) {
super(frame.content());
}
/**
* Reads a single String if and only if this buffer is in {@link AccessMode#BYTE_ACCESS byte
* access}.
*
* @return The read String.
*/
public String getString() {
checkByteAccess();
return ByteBufUtil.readJString(buffer);
}
/**
* Reads a single signed smart if and only if this buffer is in {@link AccessMode#BYTE_ACCESS byte
* access}.
*
* @return The read smart.
*/
public int getSignedSmart() {
checkByteAccess();
int peek = buffer.getUnsignedByte(buffer.readerIndex());
int value = peek > Byte.MAX_VALUE ? buffer.readShort() - 49152 : buffer.readByte() - 64;
return value;
}
/**
* Reads a single unsigned smart if and only if this buffer is in {@link AccessMode#BYTE_ACCESS
* byte access}.
*
* @return The read String.
*/
public int getUnsignedSmart() {
checkByteAccess();
int peek = buffer.getUnsignedByte(buffer.readerIndex());
int value = peek > Byte.MAX_VALUE ? buffer.readShort() + Short.MIN_VALUE : buffer.readByte();
return value;
}
/**
* Reads a single signed number of the specified DataType in the {@link DataOrder#BIG big}
* DataOrder if and only if this buffer is in {@link AccessMode#BYTE_ACCESS byte access} .
*
* @param type The type of the number to read.
* @return The read number, signed.
*/
public long getSigned(DataType type) {
return getSigned(type, DataOrder.BIG, DataTransformation.NONE);
}
/**
* Reads a single signed number of the specified DataType in the specified DataOrder if and only
* if this buffer is in {@link AccessMode#BYTE_ACCESS byte access}.
*
* @param type The type of the number to read.
* @param order The DataOrder to read the number in.
* @return The read number, signed.
*/
public long getSigned(DataType type, DataOrder order) {
return getSigned(type, order, DataTransformation.NONE);
}
/**
* Reads a single signed number of the specified DataType in the {@link DataOrder#BIG big}
* DataOrder and performs the specified DataTransformation on the number if and only if this
* buffer is in {@link AccessMode#BYTE_ACCESS byte access}.
*
* @param type The type of the number to read.
* @param transformation The DataTransformation to perform on the number.
* @return The read number, signed.
*/
public long getSigned(DataType type, DataTransformation transformation) {
return getSigned(type, DataOrder.BIG, transformation);
}
/**
* Reads a single signed number of the specified DataType in the specified DataOrder and performs
* the specified DataTransformation on the number if and only if this buffer is in
* {@link AccessMode#BYTE_ACCESS byte access}.
*
* @param type The type of the number to read.
* @param order The DataOrder to read the number in.
* @param transformation The DataTransformation to perform on the number.
* @return The read number, signed.
*/
public long getSigned(DataType type, DataOrder order, DataTransformation transformation) {
long longValue = get(type, order, transformation);
if (type != DataType.LONG) {
int max = (int) (Math.pow(2, type.getBytes() * 8 - 1) - 1);
if (longValue > max) {
longValue -= (max + 1) * 2;
}
}
return longValue;
}
/**
* Reads a single unsigned number of the specified DataType in the {@link DataOrder#BIG big}
* DataOrder if and only if this buffer is in {@link AccessMode#BYTE_ACCESS byte access}.
*
* @param type The type of the number to read.
* @return The read number, unsigned.
*/
public long getUnsigned(DataType type) {
return getUnsigned(type, DataOrder.BIG, DataTransformation.NONE);
}
/**
* Reads a single unsigned number of the specified DataType in the specified DataOrder if and only
* if this buffer is in {@link AccessMode#BYTE_ACCESS byte access}.
*
* @param type The type of the number to read.
* @param order The DataOrder to read the number in.
* @return The read number, unsigned.
*/
public long getUnsigned(DataType type, DataOrder order) {
return getUnsigned(type, order, DataTransformation.NONE);
}
/**
* Reads a single unsigned number of the specified DataType in the {@link DataOrder#BIG big}
* DataOrder and performs the specified DataTransformation on the number if and only if this
* buffer is in {@link AccessMode#BYTE_ACCESS byte access}.
*
* @param type The type of the number to read.
* @param transformation The DataTransformation to perform on the number.
* @return The read number, unsigned.
*/
public long getUnsigned(DataType type, DataTransformation transformation) {
return getUnsigned(type, DataOrder.BIG, transformation);
}
/**
* Reads a single unsigned number of the specified DataType in the specified DataOrder and
* performs the specified DataTransformation on the number if and only if this buffer is in
* {@link AccessMode#BYTE_ACCESS byte access}.
*
* @param type The type of the number to read.
* @param order The DataOrder to read the number in.
* @param transformation The DataTransformation to perform on the number.
* @return The read number, unsigned.
*/
public long getUnsigned(DataType type, DataOrder order, DataTransformation transformation) {
Preconditions.checkArgument(type != DataType.LONG, "Longs must be read as signed types.");
return get(type, order, transformation) & 0xFFFFFFFFFFFFFFFFL;
}
/**
* Reads a single number of the specified DataType in the specified DataOrder and performs the
* specified DataTransformation on the number if and only if this buffer is in
* {@link AccessMode#BYTE_ACCESS byte access}.
*
* @param type The type of the number to read.
* @param order The DataOrder to read the number in.
* @param transformation The DataTransformation to perform on the number.
* @return The read number.
*/
private long get(DataType type, DataOrder order, DataTransformation transformation) {
checkByteAccess();
long longValue = 0;
int length = type.getBytes();
switch (order) {
case LITTLE:
for (int i = 0; i < length; i++) {
if (i == 0 && transformation != DataTransformation.NONE) {
switch (transformation) {
case ADD:
longValue |= buffer.readByte() - 128 & 0xFFL;
break;
case SUBTRACT:
longValue |= 128 - buffer.readByte() & 0xFFL;
break;
case NEGATE:
longValue |= -buffer.readByte() & 0xFFL;
break;
default:
throw new UnsupportedOperationException(transformation + " is not supported!");
}
} else {
longValue |= (buffer.readByte() & 0xFFL) << i * 8;
}
}
break;
case BIG:
for (int i = length - 1; i >= 0; i
if (i == 0 && transformation != DataTransformation.NONE) {
switch (transformation) {
case ADD:
longValue |= buffer.readByte() - 128 & 0xFFL;
break;
case SUBTRACT:
longValue |= 128 - buffer.readByte() & 0xFFL;
break;
case NEGATE:
longValue |= -buffer.readByte() & 0xFFL;
break;
default:
throw new UnsupportedOperationException(transformation + " is not supported!");
}
} else {
longValue |= (buffer.readByte() & 0xFFL) << i * 8;
}
}
break;
case MIDDLE:
Preconditions.checkArgument(transformation == DataTransformation.NONE,
"middle endian cannot be transformed");
Preconditions.checkArgument(type == DataType.INT,
"middle endian can only be used with an integer");
longValue |= (buffer.readByte() & 0xFF) << 8;
longValue |= buffer.readByte() & 0xFF;
longValue |= (buffer.readByte() & 0xFF) << 24;
longValue |= (buffer.readByte() & 0xFF) << 16;
break;
case INVERSED_MIDDLE:
Preconditions.checkArgument(transformation == DataTransformation.NONE,
"inversed middle endian cannot be transformed");
Preconditions.checkArgument(type == DataType.INT,
"inversed middle endian can only be used with an integer");
longValue |= (buffer.readByte() & 0xFF) << 16;
longValue |= (buffer.readByte() & 0xFF) << 24;
longValue |= buffer.readByte() & 0xFF;
longValue |= (buffer.readByte() & 0xFF) << 8;
break;
default:
throw new UnsupportedOperationException(order + " is not supported!");
}
return longValue;
}
/**
* Reads the specified amount of bits if and only if this buffer is in
* {@link AccessMode#BIT_ACCESS bit access}.
*
* @param amount The amount of bits to read.
* @return The value of the bits.
*/
public int getBits(int amount) {
Assertions.checkWithin(1, 32, amount, "Number of bits must be between 1 and 32 inclusive.");
checkBitAccess();
int bytePos = bitIndex >> 3;
int bitOffset = 8 - (bitIndex & 7);
int value = 0;
bitIndex += amount;
for (; amount > bitOffset; bitOffset = 8) {
value += (buffer.getByte(bytePos++) & BIT_MASKS[bitOffset]) << amount - bitOffset;
amount -= bitOffset;
}
if (amount == bitOffset) {
value += buffer.getByte(bytePos) & BIT_MASKS[bitOffset];
} else {
value += buffer.getByte(bytePos) >> bitOffset - amount & BIT_MASKS[amount];
}
return value;
}
/**
* Reads {@code length} bytes into a byte array if and only if this buffer is in
* {@link AccessMode#BYTE_ACCESS byte access}.
*
* @param length The amount of bytes to read.
* @return The byte array.
*/
public byte[] getBytes(int length) {
return getBytes(DataTransformation.NONE, length);
}
/**
* Reads {@code length} bytes into a byte array if and only if this buffer is in
* {@link AccessMode#BYTE_ACCESS byte access}.
*
* @param transformation The DataTransformation to perform on the bytes.
* @param length The amount of bytes to read.
* @return The byte array.
*/
public byte[] getBytes(DataTransformation transformation, int length) {
Assertions.checkNonNegative(length, "Length: [" + length + "] may not be negative!");
byte[] bytes = new byte[length];
for (int i = 0; i < length && buffer.isReadable(); i++) {
bytes[i] = (byte) getSigned(DataType.BYTE, transformation);
}
return bytes;
}
/**
* Reads {@code length} bytes into a byte array, in reverse, if and only if this buffer is in
* {@link AccessMode#BYTE_ACCESS byte access}.
*
* @param length The amount of bytes to read.
* @return The byte array.
*/
public byte[] getBytesReverse(int length) {
return getBytesReverse(DataTransformation.NONE, length);
}
/**
* Reads {@code length} bytes into a byte array, in reverse, if and only if this buffer is in
* {@link AccessMode#BYTE_ACCESS byte access}.
*
* @param transformation The DataTransformation to perform on the bytes.
* @param length The amount of bytes to read.
* @return The byte array.
*/
public byte[] getBytesReverse(DataTransformation transformation, int length) {
Assertions.checkNonNegative(length, "Length: [" + length + "] may not be negative!");
byte[] bytes = new byte[length];
for (int i = length - 1; i >= 0 && buffer.isReadable(); i
bytes[i] = (byte) getSigned(DataType.BYTE, transformation);
}
return bytes;
}
}
|
package ml.duncte123.skybot.commands.essentials.eval;
import groovy.lang.GroovyShell;
import ml.duncte123.skybot.commands.essentials.eval.filter.EvalFilter;
import ml.duncte123.skybot.exceptions.VRCubeException;
import ml.duncte123.skybot.objects.command.Command;
import ml.duncte123.skybot.objects.delegate.JDADelegate;
import ml.duncte123.skybot.utils.AirUtils;
import ml.duncte123.skybot.utils.Settings;
import net.dv8tion.jda.core.JDA;
import net.dv8tion.jda.core.events.message.guild.GuildMessageReceivedEvent;
import org.codehaus.groovy.control.CompilerConfiguration;
import org.kohsuke.groovy.sandbox.SandboxTransformer;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.*;
public class EvalCommand extends Command {
private GroovyShell protected_;
private ScriptEngine engine;
private List<String> packageImports;
private ScheduledExecutorService service = Executors.newScheduledThreadPool(1, r -> new Thread(r, "Eval-Thread"));
private EvalFilter filter = new EvalFilter();
/**
* This initialises the engine
*/
public EvalCommand() {
//the GroovyShell is for the public eval
protected_ = new GroovyShell(
new CompilerConfiguration()
.addCompilationCustomizers(new SandboxTransformer()));
engine = new ScriptEngineManager(protected_.getClassLoader()).getEngineByName("groovy");
packageImports = Arrays.asList(
"java.io",
"java.lang",
"java.util",
"net.dv8tion.jda.core",
"net.dv8tion.jda.core.entities",
"net.dv8tion.jda.core.entities.impl",
"net.dv8tion.jda.core.managers",
"net.dv8tion.jda.core.managers.impl",
"net.dv8tion.jda.core.utils",
"ml.duncte123.skybot.utils");
}
@Override
public void executeCommand(String invoke, String[] args, GuildMessageReceivedEvent event) {
boolean isRanByBotOwner = Arrays.asList(Settings.wbkxwkZPaG4ni5lm8laY).contains(
event.getAuthor().getId()) ||
event.getAuthor().getId().equals(Settings.wbkxwkZPaG4ni5lm8laY[0]);
ScheduledFuture<Object> future = null;
try {
StringBuilder importStringBuilder = new StringBuilder();
for (final String s : packageImports) {
importStringBuilder.append("import ").append(s).append(".*;\n");
}
String script = importStringBuilder.toString() +
event.getMessage().getRawContent()
.substring(event.getMessage().getRawContent()
.split(" ")[0].length());
int timeout = 5;
if(isRanByBotOwner) {
timeout = 60;
engine.put("commandmanager", AirUtils.commandManager);
engine.put("message", event.getMessage());
engine.put("channel", event.getMessage().getTextChannel());
engine.put("guild", event.getGuild());
engine.put("member", event.getMember());
engine.put("jda", event.getJDA());
engine.put("shardmanager", event.getJDA().asBot().getShardManager());
engine.put("event", event);
engine.put("args", args);
future = service.schedule(() -> {
return engine.eval(script);
}, 0, TimeUnit.MILLISECONDS);
} else {
if(filter.filterArrays(script))
throw new VRCubeException("Arrays are not allowed");
if(filter.filterLoops(script))
throw new VRCubeException("Loops are not allowed");
future = service.schedule(() -> {
filter.register();
return protected_.evaluate(script);
}, 0, TimeUnit.MILLISECONDS);
}
Object out = future.get(timeout, TimeUnit.SECONDS);
if (out != null && !String.valueOf(out).isEmpty() ) {
sendMsg(event, (!isRanByBotOwner ? "**" + event.getAuthor().getName() + ":** " : "") + out.toString());
} else {
sendSuccess(event.getMessage());
}
}
catch (ExecutionException e1) {
event.getChannel().sendMessage("ERROR: " + e1.getCause().toString()).queue();
//e.printStackTrace();
sendError(event.getMessage());
}
catch (TimeoutException | InterruptedException e2) {
future.cancel(true);
event.getChannel().sendMessage("ERROR: " + e2.toString()).queue();
//e.printStackTrace();
if(!future.isCancelled()) future.cancel(true);
sendError(event.getMessage());
}
catch (IllegalArgumentException | VRCubeException e3) {
sendMsg(event, "ERROR: " + e3.getClass().getName() + ": " + e3.getMessage());
sendError(event.getMessage());
} finally {
// Clear variables in owner??
//Unregister the filter
filter.unregister();
}
System.gc();
}
public void shutdown() {
service.shutdownNow();
}
@Override
public String help() {
return "A simple eval command";
}
@Override
public String getName() {
return "eval";
}
@Override
public String[] getAliases() {
return new String[] {"eval™", "evaluate"};
}
}
|
package net.wizardsoflua.lua.classes.event;
import net.minecraft.item.ItemStack;
import net.minecraftforge.event.entity.living.LivingEntityUseItemEvent;
import net.wizardsoflua.annotation.GenerateLuaClass;
import net.wizardsoflua.annotation.GenerateLuaDoc;
import net.wizardsoflua.annotation.LuaProperty;
import net.wizardsoflua.lua.classes.DelegatorLuaClass;
@GenerateLuaClass(name = UseItemFinishEventApi.NAME)
@GenerateLuaDoc(subtitle = "When an Entity finishes using an Item")
public class UseItemFinishEventApi<D extends LivingEntityUseItemEvent.Finish>
extends UseItemEventApi<D> {
public static final String NAME = "UseItemFinishEvent";
public UseItemFinishEventApi(DelegatorLuaClass<?, ?> luaClass, D delegate) {
super(luaClass, delegate);
}
@LuaProperty
public ItemStack getResultItem() {
return delegate.getResultStack();
}
@LuaProperty
public void setResultItem(ItemStack result) {
delegate.setResultStack(result);
}
}
|
package org.hive2hive.processframework.processes;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.hive2hive.processframework.Process;
import org.hive2hive.processframework.ProcessComponent;
import org.hive2hive.processframework.ProcessState;
import org.hive2hive.processframework.RollbackReason;
import org.hive2hive.processframework.decorators.AsyncComponent;
import org.hive2hive.processframework.exceptions.InvalidProcessStateException;
import org.hive2hive.processframework.exceptions.ProcessExecutionException;
import org.hive2hive.processframework.exceptions.ProcessRollbackException;
/**
* A {@link Process} that traverses its components in preorder (i.e., left-to-right).
*
* @author Christian Lthold
*
*/
public class PreorderProcess extends Process {
private List<ProcessComponent> components = new ArrayList<ProcessComponent>();
private List<Future<RollbackReason>> asyncHandles = new ArrayList<Future<RollbackReason>>();
private ProcessExecutionException exception = null;
private int executionIndex = 0;
private int rollbackIndex = 0;
@Override
protected void doExecute() throws InvalidProcessStateException, ProcessExecutionException,
ProcessRollbackException {
// execute all child components
while (!components.isEmpty() && executionIndex < components.size()
&& getState() == ProcessState.EXECUTING) {
checkAsyncComponentsForFail(asyncHandles);
rollbackIndex = executionIndex;
ProcessComponent next = components.get(executionIndex);
next.start();
executionIndex++;
if (next instanceof AsyncComponent) {
asyncHandles.add(((AsyncComponent) next).getHandle());
}
}
// wait for async child components
awaitAsync();
}
/**
* Handles the rollback of this {@code PreorderProcess}.
* If this {@code PreorderProcess} is part (child) of a higher level {@link Process}, the rollback order
* of the (sibling) {@link ProcessComponent}s is delegated to this {@link Process}.
*/
@Override
protected void doRollback(RollbackReason reason) throws InvalidProcessStateException,
ProcessRollbackException {
// inform parent (if exists and not informed yet)
Process parent = getParent();
if (parent != null && parent.getState() != ProcessState.ROLLBACKING) {
getParent().cancel(reason);
} else {
// no parent, or called from parent
while (!components.isEmpty() && rollbackIndex >= 0 && getState() == ProcessState.ROLLBACKING) {
ProcessComponent last = components.get(rollbackIndex);
last.cancel(reason);
rollbackIndex
}
}
}
@Override
protected void doAdd(ProcessComponent component) {
components.add(component);
}
@Override
protected void doAdd(int index, ProcessComponent component) {
components.add(index, component);
}
@Override
protected void doRemove(ProcessComponent component) {
components.remove(component);
}
@Override
public List<ProcessComponent> getComponents() {
return Collections.unmodifiableList(components);
}
@Override
public ProcessComponent getComponent(int index) {
// TODO implement
throw new UnsupportedOperationException("Operation not implemented.");
}
private void awaitAsync() throws ProcessExecutionException {
if (asyncHandles.isEmpty())
return;
if (getState() != ProcessState.EXECUTING)
return;
// logger.debug("Awaiting async components for completion.");
final CountDownLatch latch = new CountDownLatch(1);
ScheduledExecutorService executor = new ScheduledThreadPoolExecutor(1);
ScheduledFuture<?> handle = executor.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
// assure still in running state
if (getState() != ProcessState.EXECUTING) {
latch.countDown();
return;
}
// check for potential fails
try {
checkAsyncComponentsForFail(asyncHandles);
} catch (ProcessExecutionException e) {
exception = e;
latch.countDown();
return;
}
// check for completion
for (Future<RollbackReason> handle : asyncHandles) {
if (!handle.isDone())
return;
}
latch.countDown();
}
}, 1, 1, TimeUnit.SECONDS);
// blocking wait for completion or potential fail
try {
latch.await();
} catch (InterruptedException e) {
// logger.error("Exception while waiting for async components.", e);
}
handle.cancel(true);
if (exception != null) {
throw exception;
}
}
private static void checkAsyncComponentsForFail(List<Future<RollbackReason>> handles)
throws ProcessExecutionException {
if (handles.isEmpty())
return;
for (Future<RollbackReason> handle : handles) {
if (!handle.isDone())
continue;
RollbackReason result = null;
try {
result = handle.get();
} catch (InterruptedException e) {
// logger.error("Error while checking async component.", e);
} catch (ExecutionException e) {
throw new ProcessExecutionException("AsyncComponent threw an exception.", e.getCause());
}
// initiate rollback if necessary
if (result != null) {
throw new ProcessExecutionException(result);
}
}
}
}
|
package org.jenkinsci.plugins.graniteclient;
import hudson.FilePath;
import hudson.model.Result;
import hudson.model.TaskListener;
import hudson.remoting.VirtualChannel;
import net.adamcin.granite.client.packman.*;
import java.io.File;
import java.io.IOException;
/**
* Implementation of {@link hudson.FilePath.FileCallable} used by the {@link org.jenkinsci.plugins.graniteclient.BuildPackageBuilder}
*/
public class BuildPackageCallable implements FilePath.FileCallable<Result> {
private static final long serialVersionUID = 1329103722879551699L;
private final GraniteClientConfig clientConfig;
private final TaskListener listener;
private final PackId packId;
private final WspFilter wspFilter;
private final boolean download;
private final ResponseProgressListener progressListener;
public BuildPackageCallable(GraniteClientConfig clientConfig, TaskListener listener,
PackId packId, WspFilter wspFilter, boolean download) {
this.clientConfig = clientConfig;
this.listener = listener;
this.packId = packId;
this.wspFilter = wspFilter;
this.download = download;
this.progressListener = new JenkinsResponseProgressListener(listener);
}
private class Execution implements PackageManagerClientCallable<Result> {
final File toDirectory;
private Execution(File toDirectory) {
this.toDirectory = toDirectory;
}
public Result doExecute(PackageManagerClient client) throws Exception {
Result result = Result.SUCCESS;
client.setRequestTimeout(clientConfig.getRequestTimeout());
client.setServiceTimeout(clientConfig.getServiceTimeout());
client.waitForService();
listener.getLogger().printf(
"Checking for package %s on server %s%n", packId, clientConfig.getBaseUrl()
);
// first, create the package if it doesn't exist.
if (client.existsOnServer(packId)) {
listener.getLogger().printf("Found package: %s%n", client.getConsoleUiUrl(packId));
} else {
listener.getLogger().printf("Creating package.%n");
SimpleResponse r_create = client.create(packId);
if (r_create.isSuccess()) {
listener.getLogger().println(r_create.getMessage());
} else {
listener.fatalError(r_create.getMessage());
return Result.FAILURE;
}
}
// next, update the workspace filter if it is defined
if (wspFilter != null) {
SimpleResponse r_updateFilter = client.updateFilter(packId, wspFilter);
if (r_updateFilter.isSuccess()) {
listener.getLogger().println(r_updateFilter.getMessage());
} else {
listener.fatalError(r_updateFilter.getMessage());
return Result.FAILURE;
}
}
// next, build the package
listener.getLogger().printf("Building package %s.%n", packId);
DetailedResponse r_rebuild = client.build(packId, progressListener);
if (r_rebuild.isSuccess()) {
if (r_rebuild.hasErrors()) {
result = result.combine(Result.UNSTABLE);
}
listener.getLogger().println(r_rebuild.getMessage());
listener.getLogger().printf("Package location: %s%n", client.getConsoleUiUrl(packId));
} else {
listener.fatalError(r_rebuild.getMessage());
return Result.FAILURE;
}
// finally, download the package if requested
if (download) {
listener.getLogger().printf("Downloading %s to %s%n", packId, toDirectory);
DownloadResponse response = client.downloadToDirectory(packId, toDirectory);
listener.getLogger().printf("Downloaded %d bytes to file %s.%n", response.getLength(), response.getContent());
listener.getLogger().printf("Verifying downloaded package...%n");
PackId reId = PackId.identifyPackage(response.getContent());
if (packId.equals(reId)) {
listener.getLogger().printf("Package verified as %s.%n", packId);
} else {
throw new Exception("Package verification failed: " + response.getContent());
}
}
return result;
}
}
public Result invoke(File toDirectory, VirtualChannel channel) throws IOException, InterruptedException {
try {
return GraniteClientExecutor.execute(new Execution(toDirectory), clientConfig, listener);
} catch (Exception e) {
e.printStackTrace(listener.fatalError("Failed to build package.", e.getMessage()));
return Result.FAILURE;
}
}
}
|
package org.nishen.alma.toolkit.tasks;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import javax.xml.bind.JAXBElement;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.nishen.alma.toolkit.entity.partners.Address;
import org.nishen.alma.toolkit.entity.partners.Address.AddressTypes;
import org.nishen.alma.toolkit.entity.partners.Address.Country;
import org.nishen.alma.toolkit.entity.partners.Addresses;
import org.nishen.alma.toolkit.entity.partners.ContactInfo;
import org.nishen.alma.toolkit.entity.partners.Email;
import org.nishen.alma.toolkit.entity.partners.Email.EmailTypes;
import org.nishen.alma.toolkit.entity.partners.Emails;
import org.nishen.alma.toolkit.entity.partners.IsoDetails;
import org.nishen.alma.toolkit.entity.partners.Notes;
import org.nishen.alma.toolkit.entity.partners.ObjectFactory;
import org.nishen.alma.toolkit.entity.partners.Partner;
import org.nishen.alma.toolkit.entity.partners.PartnerDetails;
import org.nishen.alma.toolkit.entity.partners.PartnerDetails.LocateProfile;
import org.nishen.alma.toolkit.entity.partners.PartnerDetails.SystemType;
import org.nishen.alma.toolkit.entity.partners.Partners;
import org.nishen.alma.toolkit.entity.partners.Phone;
import org.nishen.alma.toolkit.entity.partners.Phone.PhoneTypes;
import org.nishen.alma.toolkit.entity.partners.Phones;
import org.nishen.alma.toolkit.entity.partners.ProfileDetails;
import org.nishen.alma.toolkit.entity.partners.ProfileType;
import org.nishen.alma.toolkit.entity.partners.RequestExpiryType;
import org.nishen.alma.toolkit.entity.partners.Status;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.gargoylesoftware.htmlunit.TextPage;
import com.gargoylesoftware.htmlunit.WebClient;
import com.gargoylesoftware.htmlunit.html.HtmlPage;
import com.gargoylesoftware.htmlunit.html.HtmlTable;
import com.gargoylesoftware.htmlunit.html.HtmlTableRow;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.name.Named;
/**
* A sample task using the Alma API. This is a simple task that gets a list of
* users and provides their primaryId, name and status.
*
* <p>
* The optional parameters that can be provided are limit and offset. Otherwise,
* the defaults are 100 and 0 respectively.
*
* @author nishen
*/
public class TaskUpdateResourcePartners implements Task
{
private static final Logger log = LoggerFactory.getLogger(TaskUpdateResourcePartners.class);
private static final String TASKNAME = "updateResourcePartners";
private static final int PARTNERS_LIMIT = 100;
private static final ObjectFactory of = new ObjectFactory();
private Properties config;
private Provider<WebTarget> webTargetProviderAlma;
private Provider<WebClient> webClientProvider;
private String laddUrl;
private String tepunaUrl;
@Inject
private TaskUpdateResourcePartners(@Named("app.cmdline") final String[] args,
@Named("app.config") final Properties config,
@Named("ws.url.alma") Provider<WebTarget> webTargetProviderAlma,
Provider<WebClient> webClientProvider)
{
this.config = config;
this.laddUrl = config.getProperty("ws.url.ladd");
this.tepunaUrl = config.getProperty("ws.url.tepuna");
this.webTargetProviderAlma = webTargetProviderAlma;
this.webClientProvider = webClientProvider;
log.debug("initialised taskupdateresourcepartners");
}
@Override
public void run()
{
log.info("executing task: {}", this.getClass().getSimpleName());
ConcurrentMap<String, Partner> almaPartners = getAlmaPartners();
ConcurrentMap<String, Partner> partners = getLaddPartners();
partners.putAll(getTepunaPartners());
try
{
WebTarget t = webTargetProviderAlma.get().path("partners");
ExecutorService executor = Executors.newFixedThreadPool(6);
for (String s : partners.keySet())
{
Partner p = partners.get(s);
Partner ap = almaPartners.get(s);
if (ap == null)
{
log.debug("starting thread for create partner: {}", p.getPartnerDetails().getCode());
executor.execute(new UpdatePartnerTask(t, p, false));
}
else if (!isEqual(p, ap))
{
log.debug("starting thread for update partner: {}", p.getPartnerDetails().getCode());
executor.execute(new UpdatePartnerTask(t, p, true));
}
}
executor.shutdown();
executor.awaitTermination(1L, TimeUnit.HOURS);
}
catch (InterruptedException ie)
{
log.error("executor awaiting termination was interrupted: {}", ie);
log.debug("{}", ie);
}
catch (Exception e)
{
log.error("execution failure: {}", e);
log.debug("{}", e);
}
}
public Partner getAlmaPartner(String nuc)
{
WebTarget t = webTargetProviderAlma.get().path("partners").path(nuc);
Partner result = t.request(MediaType.APPLICATION_XML).get(Partner.class);
return result;
}
public ConcurrentMap<String, Partner> getAlmaPartners()
{
ConcurrentMap<String, Partner> partnerMap = new ConcurrentHashMap<String, Partner>();
long offset = 0;
long total = -1;
long count = 0;
WebTarget target = webTargetProviderAlma.get().path("partners");
try
{
ExecutorService executor = Executors.newFixedThreadPool(6);
log.debug("getAlmaPartners [count/total/offset]: {}/{}/{}", count, total, offset);
Future<Partners> initial = executor.submit(new FetchResourcePartners(target, offset));
Partners partners = initial.get();
total = partners.getTotalRecordCount();
offset += PARTNERS_LIMIT;
count += partners.getPartner().size();
for (Partner p : partners.getPartner())
partnerMap.put(p.getPartnerDetails().getCode(), p);
List<Future<Partners>> partial = new ArrayList<Future<Partners>>();
while (count < total)
{
log.debug("getAlmaPartners [count/total/offset]: {}/{}/{}", count, total, offset);
partial.add(executor.submit(new FetchResourcePartners(target, offset)));
offset += PARTNERS_LIMIT;
count += partners.getPartner().size();
}
for (Future<Partners> future : partial)
{
partners = future.get();
for (Partner p : partners.getPartner())
partnerMap.put(p.getPartnerDetails().getCode(), p);
}
executor.shutdown();
}
catch (ExecutionException ee)
{
log.error("execution failed: {}", ee.getMessage(), ee);
}
catch (InterruptedException ie)
{
log.error("execution interrupted: {}", ie.getMessage(), ie);
}
return partnerMap;
}
public ConcurrentMap<String, Partner> getLaddPartners()
{
String prefix = "NLA";
String institutionCode = config.getProperty("ladd.institution.code");
ConcurrentMap<String, Partner> result = new ConcurrentHashMap<String, Partner>();
WebClient webClient = webClientProvider.get();
HtmlPage page = null;
try
{
page = webClient.getPage(laddUrl);
}
catch (IOException e)
{
log.error("unable to acquire page: {}", laddUrl);
return result;
}
HtmlTable table = (HtmlTable) page.getElementById("suspension");
for (HtmlTableRow row : table.getRows())
{
String nuc = row.getCell(0).asText();
if ("NUC symbol".equals(nuc) || institutionCode.equals(nuc))
{
log.debug("skipping nuc: {}", nuc);
continue;
}
String org = row.getCell(1).asText();
boolean suspended = "Suspended".equals(row.getCell(3).asText());
Partner partner = new Partner();
partner.setLink("https://api-ap.hosted.exlibrisgroup.com/almaws/v1/partners/" + nuc);
PartnerDetails partnerDetails = new PartnerDetails();
partner.setPartnerDetails(partnerDetails);
ProfileDetails profileDetails = new ProfileDetails();
partnerDetails.setProfileDetails(profileDetails);
profileDetails.setProfileType(ProfileType.ISO);
RequestExpiryType requestExpiryType = new RequestExpiryType();
requestExpiryType.setValue("INTEREST_DATE");
requestExpiryType.setDesc("Expire by interest date");
IsoDetails isoDetails = new IsoDetails();
profileDetails.setIsoDetails(isoDetails);
isoDetails.setAlternativeDocumentDelivery(false);
isoDetails.setIllServer(config.getProperty("alma.ill.server"));
isoDetails.setIllPort(Integer.parseInt(config.getProperty("alma.ill.port")));
isoDetails.setIsoSymbol(prefix + ":" + nuc);
isoDetails.setSendRequesterInformation(false);
isoDetails.setSharedBarcodes(true);
isoDetails.setRequestExpiryType(requestExpiryType);
SystemType systemType = new SystemType();
systemType.setValue("LADD");
systemType.setDesc("LADD");
LocateProfile locateProfile = new LocateProfile();
locateProfile.setValue("LADD");
locateProfile.setDesc("LADD Locate Profile");
partnerDetails.setStatus(suspended ? Status.INACTIVE : Status.ACTIVE);
partnerDetails.setCode(nuc);
partnerDetails.setName(org);
partnerDetails.setSystemType(systemType);
partnerDetails.setAvgSupplyTime(4);
partnerDetails.setDeliveryDelay(4);
partnerDetails.setCurrency("AUD");
partnerDetails.setBorrowingSupported(true);
partnerDetails.setBorrowingWorkflow("LADD_Borrowing");
partnerDetails.setLendingSupported(true);
partnerDetails.setLendingWorkflow("LADD_Lending");
partnerDetails.setLocateProfile(locateProfile);
partnerDetails.setHoldingCode(nuc);
ContactInfo contactInfo = new ContactInfo();
partner.setContactInfo(contactInfo);
Addresses addresses = new Addresses();
contactInfo.setAddresses(addresses);
Emails emails = new Emails();
contactInfo.setEmails(emails);
Phones phones = new Phones();
contactInfo.setPhones(phones);
Notes notes = new Notes();
partner.setNotes(notes);
result.put(partner.getPartnerDetails().getCode(), partner);
}
return result;
}
public ConcurrentMap<String, Partner> getTepunaPartners()
{
String prefix = "NLNZ";
String institutionCode = config.getProperty("ladd.institution.code");
ConcurrentMap<String, Partner> result = new ConcurrentHashMap<String, Partner>();
WebClient webClient = webClientProvider.get();
TextPage page = null;
try
{
log.debug("tepuna url: {}", tepunaUrl);
page = webClient.getPage(tepunaUrl);
}
catch (IOException e)
{
log.error("unable to acquire page: {}", tepunaUrl);
return result;
}
log.debug("{}", page.getContent());
try (CSVParser parser = CSVParser.parse(page.getContent(), CSVFormat.DEFAULT.withHeader()))
{
for (CSVRecord record : parser)
{
String nuc = record.get(0);
if ("NUC symbol".equals(nuc) || institutionCode.equals(nuc))
{
log.debug("skipping nuc: {}", nuc);
continue;
}
nuc = prefix + ":" + nuc;
String org = record.get(2);
Partner partner = new Partner();
partner.setLink("https://api-ap.hosted.exlibrisgroup.com/almaws/v1/partners/" + nuc);
PartnerDetails partnerDetails = new PartnerDetails();
partner.setPartnerDetails(partnerDetails);
ProfileDetails profileDetails = new ProfileDetails();
partnerDetails.setProfileDetails(profileDetails);
profileDetails.setProfileType(ProfileType.ISO);
RequestExpiryType requestExpiryType = new RequestExpiryType();
requestExpiryType.setValue("INTEREST_DATE");
requestExpiryType.setDesc("Expire by interest date");
IsoDetails isoDetails = new IsoDetails();
profileDetails.setIsoDetails(isoDetails);
isoDetails.setAlternativeDocumentDelivery(false);
isoDetails.setIllServer(config.getProperty("alma.ill.server"));
isoDetails.setIllPort(Integer.parseInt(config.getProperty("alma.ill.port")));
isoDetails.setIsoSymbol(nuc);
isoDetails.setSendRequesterInformation(false);
isoDetails.setSharedBarcodes(true);
isoDetails.setRequestExpiryType(requestExpiryType);
SystemType systemType = new SystemType();
systemType.setValue("LADD");
systemType.setDesc("LADD");
LocateProfile locateProfile = new LocateProfile();
locateProfile.setValue("LADD");
locateProfile.setDesc("LADD Locate Profile");
partnerDetails.setStatus(Status.ACTIVE);
partnerDetails.setCode(nuc);
partnerDetails.setName(org);
partnerDetails.setSystemType(systemType);
partnerDetails.setAvgSupplyTime(4);
partnerDetails.setDeliveryDelay(4);
partnerDetails.setCurrency("AUD");
partnerDetails.setBorrowingSupported(true);
partnerDetails.setBorrowingWorkflow("LADD_Borrowing");
partnerDetails.setLendingSupported(true);
partnerDetails.setLendingWorkflow("LADD_Lending");
partnerDetails.setLocateProfile(locateProfile);
partnerDetails.setHoldingCode(nuc);
ContactInfo contactInfo = new ContactInfo();
partner.setContactInfo(contactInfo);
Addresses addresses = new Addresses();
contactInfo.setAddresses(addresses);
String s = record.get(5);
if (s == null || "".equals(s.trim()))
s = record.get(4);
if (s != null && !"".equals(s.trim()))
{
Address address = getAddress(s);
address.setPreferred(true);
address.setAddressTypes(new AddressTypes());
address.getAddressTypes().getAddressType().add("ALL");
addresses.getAddress().add(address);
log.debug("nuc/address [{}]: {}", nuc, address);
}
Emails emails = new Emails();
contactInfo.setEmails(emails);
s = record.get(6);
if (s != null && !"".equals(s.trim()))
{
Email email = new Email();
email.setEmailTypes(new EmailTypes());
email.setEmailAddress(s);
email.setPreferred(true);
email.setDescription("Primary Email Address");
email.getEmailTypes().getEmailType().add("ALL");
emails.getEmail().add(email);
log.debug("nuc/email1 [{}]: {}", nuc, email);
}
s = record.get(13);
if (s != null && !"".equals(s.trim()))
{
Email email = new Email();
email.setEmailTypes(new EmailTypes());
email.setEmailAddress(s);
email.setPreferred(true);
String m = record.get(12);
if (m != null && !"".equals(m))
email.setDescription("Manager Email Address: " + m);
else
email.setDescription("Manager Email Address");
email.getEmailTypes().getEmailType().add("ALL");
emails.getEmail().add(email);
log.debug("nuc/email2 [{}]: {}", nuc, email);
}
Phones phones = new Phones();
contactInfo.setPhones(phones);
s = record.get(15);
if (s == null || "".equals(s.trim()))
s = record.get(7);
if (s != null && !"".equals(s.trim()))
{
Phone phone = new Phone();
phone.setPhoneTypes(new PhoneTypes());
phone.setPhoneNumber(s);
phone.setPreferred(true);
phone.setPreferredSMS(false);
phone.getPhoneTypes().getPhoneType().add("ALL");
phones.getPhone().add(phone);
log.debug("nuc/phone [{}]: {}", nuc, phone);
}
Notes notes = new Notes();
partner.setNotes(notes);
result.put(partner.getPartnerDetails().getCode(), partner);
}
}
catch (IOException ioe)
{
log.error("unable to parse data: {}", tepunaUrl);
}
return result;
}
public Address getAddress(String s)
{
Address address = new Address();
if (s == null || s.trim().length() == 0)
return address;
List<String> tmpl = Arrays.asList(s.split(" *, *"));
List<String> addr = new ArrayList<String>();
for (String tli : tmpl)
if (tli != null && !"".equals(tli))
addr.add(0, tli);
if (addr.size() == 0)
return address;
address.setLine1(addr.get(addr.size() - 1));
Country country = new Country();
switch (addr.get(0))
{
case "Australia":
country.setValue("AUS");
country.setDesc("Australia");
addr.remove(0);
address.setCountry(country);
break;
case "New Zealand":
country.setValue("NZL");
country.setDesc("New Zealand");
addr.remove(0);
address.setCountry(country);
break;
default:
}
if (addr.size() == 0)
return address;
if (addr.get(0).matches("\\d{4}"))
{
address.setPostalCode(addr.get(0));
addr.remove(0);
if (addr.size() == 0)
return address;
address.setCity(addr.get(0));
addr.remove(0);
if (addr.size() == 0)
return address;
}
else
{
address.setCity(addr.get(0));
addr.remove(0);
if (addr.size() == 0)
return address;
}
Collections.reverse(addr);
address.setLine1(addr.get(0));
addr.remove(0);
if (addr.size() == 0)
return address;
address.setLine2(addr.get(0));
addr.remove(0);
if (addr.size() == 0)
return address;
address.setLine3(addr.get(0));
addr.remove(0);
if (addr.size() == 0)
return address;
address.setLine4(addr.get(0));
addr.remove(0);
if (addr.size() == 0)
return address;
address.setLine5(addr.get(0));
addr.remove(0);
return address;
}
public boolean isEqual(Partner a, Partner b)
{
if (a == null && b == null)
return true;
if (a == null || b == null)
return false;
if (a.getContactInfo() != null)
if (a.getContactInfo().getAddresses() != null)
for (Address address : a.getContactInfo().getAddresses().getAddress())
address.setStartDate(null);
if (b.getContactInfo() != null)
if (b.getContactInfo().getAddresses() != null)
for (Address address : b.getContactInfo().getAddresses().getAddress())
address.setStartDate(null);
return a.equals(b);
}
@Override
public Map<String, String> getUsageOptions()
{
Map<String, String> options = new HashMap<String, String>();
return options;
}
public static String getTaskName()
{
return TASKNAME;
}
private class UpdatePartnerTask implements Runnable
{
private WebTarget target;
private Partner partner;
private boolean replace;
public UpdatePartnerTask(WebTarget target, Partner partner, boolean replace)
{
this.target = target;
this.partner = partner;
this.replace = replace;
}
@Override
public void run()
{
String m = MediaType.APPLICATION_XML;
String action = replace ? "Updating" : "Creating";
log.info("{} partner[{}]: {}", action, partner.getPartnerDetails().getCode(),
partner.getPartnerDetails().getName());
Partner result = null;
JAXBElement<Partner> p = of.createPartner(partner);
String code = partner.getPartnerDetails().getCode();
try
{
if (replace)
{
result = target.path(code).request(m).put(Entity.entity(p, m), Partner.class);
}
else
{
result = target.request(m).post(Entity.entity(p, m), Partner.class);
}
}
catch (Exception e)
{
log.error("error adding partner:\n{}", result, e);
}
log.debug("result:\n{}", result);
}
}
private class FetchResourcePartners implements Callable<Partners>
{
private WebTarget target;
private long offset;
public FetchResourcePartners(WebTarget target, long offset)
{
this.target = target;
this.offset = offset;
}
@Override
public Partners call() throws Exception
{
WebTarget t = target.queryParam("limit", PARTNERS_LIMIT).queryParam("offset", offset);
Partners partners = t.request(MediaType.APPLICATION_XML).get(Partners.class);
log.debug("fetchResourcePartners [offset]: {}", offset);
return partners;
}
}
}
|
package org.openlmis.referencedata.dto;
import java.util.UUID;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import lombok.ToString;
import org.openlmis.referencedata.domain.Program;
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
@ToString(callSuper = true)
@EqualsAndHashCode(callSuper = true)
public final class ProgramObjectReferenceDto extends ObjectReferenceDto
implements Program.Exporter {
private static final String PROGRAMS = "programs";
private String code;
private String name;
private String description;
private Boolean active;
private Boolean periodsSkippable;
private Boolean skipAuthorization;
private Boolean showNonFullSupplyTab;
private Boolean enableDatePhysicalStockCountCompleted;
public ProgramObjectReferenceDto(UUID id, String serviceUrl) {
super(serviceUrl, PROGRAMS, id);
}
}
|
package org.quartzpowered.protocol.codec.v1_8_R1;
import org.quartzpowered.network.protocol.codec.NoopCodec;
import org.quartzpowered.protocol.codec.indentifier.IdentifierProtocol;
import org.quartzpowered.protocol.codec.indentifier.common.client.KickCodec;
import org.quartzpowered.protocol.codec.v1_8_R1.common.client.CompressionCodec;
import org.quartzpowered.protocol.codec.v1_8_R1.login.client.EncryptionRequestCodec;
import org.quartzpowered.protocol.codec.v1_8_R1.login.client.LoginResponseCodec;
import org.quartzpowered.protocol.codec.v1_8_R1.login.server.EncryptionResponseCodec;
import org.quartzpowered.protocol.codec.v1_8_R1.login.server.LoginRequestCodec;
import org.quartzpowered.protocol.codec.v1_8_R1.play.client.*;
import org.quartzpowered.protocol.codec.v1_8_R1.play.server.*;
import org.quartzpowered.protocol.codec.v1_8_R1.play.shared.ConfirmTransactionCodec;
import org.quartzpowered.protocol.codec.v1_8_R1.play.shared.KeepAliveCodec;
import org.quartzpowered.protocol.codec.v1_8_R1.play.shared.PluginMessageCodec;
import org.quartzpowered.protocol.codec.v1_8_R1.status.client.PongCodec;
import org.quartzpowered.protocol.codec.v1_8_R1.status.client.StatusResponseCodec;
import org.quartzpowered.protocol.codec.v1_8_R1.status.server.PingCodec;
import org.quartzpowered.protocol.codec.v1_8_R1.status.server.StatusRequestCodec;
import org.quartzpowered.protocol.packet.common.client.CompressionPacket;
import org.quartzpowered.protocol.packet.common.client.KickPacket;
import org.quartzpowered.protocol.packet.login.client.EncryptionRequestPacket;
import org.quartzpowered.protocol.packet.login.client.LoginResponsePacket;
import org.quartzpowered.protocol.packet.login.server.EncryptionResponsePacket;
import org.quartzpowered.protocol.packet.login.server.LoginRequestPacket;
import org.quartzpowered.protocol.packet.play.client.*;
import org.quartzpowered.protocol.packet.play.server.*;
import org.quartzpowered.protocol.packet.play.shared.ConfirmTransactionPacket;
import org.quartzpowered.protocol.packet.play.shared.HeldItemChangePacket;
import org.quartzpowered.protocol.packet.play.shared.KeepAlivePacket;
import org.quartzpowered.protocol.packet.play.shared.PluginMessagePacket;
import org.quartzpowered.protocol.packet.status.client.PongPacket;
import org.quartzpowered.protocol.packet.status.client.StatusResponsePacket;
import org.quartzpowered.protocol.packet.status.server.PingPacket;
import org.quartzpowered.protocol.packet.status.server.StatusRequestPacket;
import static org.quartzpowered.network.protocol.ProtocolState.*;
public class ProtocolV1_8_R1 extends IdentifierProtocol {
@Override
public String getName() {
return "1.8";
}
@Override
public int getVersion() {
return 47;
}
@Override
protected void registerPackets() {
super.registerPackets();
serverBoundPacket(STATUS, 0x00, StatusRequestPacket.class, new StatusRequestCodec());
serverBoundPacket(STATUS, 0x01, PingPacket.class, new PingCodec());
clientBoundPacket(STATUS, 0x00, StatusResponsePacket.class, new StatusResponseCodec());
clientBoundPacket(STATUS, 0x01, PongPacket.class, new PongCodec());
serverBoundPacket(LOGIN, 0x00, LoginRequestPacket.class, new LoginRequestCodec());
serverBoundPacket(LOGIN, 0x01, EncryptionResponsePacket.class, new EncryptionResponseCodec());
clientBoundPacket(LOGIN, 0x01, EncryptionRequestPacket.class, new EncryptionRequestCodec());
clientBoundPacket(LOGIN, 0x02, LoginResponsePacket.class, new LoginResponseCodec());
clientBoundPacket(LOGIN, 0x03, CompressionPacket.class, new CompressionCodec());
serverBoundPacket(PLAY, 0x00, KeepAlivePacket.class, new KeepAliveCodec());
serverBoundPacket(PLAY, 0x01, PlayerChatMessagePacket.class, new PlayerChatMessageCodec());
serverBoundPacket(PLAY, 0x02, UseEntityPacket.class, new UseEntityCodecIn());
serverBoundPacket(PLAY, 0x03, PlayerPacket.class, new PlayerCodec());
serverBoundPacket(PLAY, 0x04, PlayerPositionPacket.class, new PlayerPositionCodec());
serverBoundPacket(PLAY, 0x05, PlayerLookPacket.class, new PlayerLookCodec());
serverBoundPacket(PLAY, 0x06, PlayerPositionLookPacket.class, new PlayerPositionLookCodec());
serverBoundPacket(PLAY, 0x07, PlayerActionPacket.class, new PlayerActionCodec());
// serverBoundPacket(PLAY, 0x08, PlayerBlockPlacementPacket.class, new PlayerBlockPlacementCodec());
serverBoundPacket(PLAY, 0x09, HeldItemChangePacket.class, new HeldItemChangeCodec());
serverBoundPacket(PLAY, 0x0A, PlayerAnimationPacket.class, new NoopCodec<>());
serverBoundPacket(PLAY, 0x0B, EntityActionPacket.class, new EntityActionCodec());
// serverBoundPacket(PLAY, 0x0C, SteerVehiclePacket.class, new SteerVehicleCodec());
serverBoundPacket(PLAY, 0x0D, CloseWindowPacket.class, new CloseWindowCodec());
// serverBoundPacket(PLAY, 0x0E, ClickWindowPacket.class, new ClickWindowCodec());
serverBoundPacket(PLAY, 0x0F, ConfirmTransactionPacket.class, new ConfirmTransactionCodec());
// serverBoundPacket(PLAY, 0x10, CreativeInventoryActionPacket.class, new CreativeInventoryActionCodec());
// serverBoundPacket(PLAY, 0x11, EnchantItemPacket.class, new EnchantItemCodec());
// serverBoundPacket(PLAY, 0x12, UpdateSignPacket.class, new UpdateSignCodec());
serverBoundPacket(PLAY, 0x13, PlayerAbilitiesPacket.class, new PlayerAbilitiesCodec());
// serverBoundPacket(PLAY, 0x14, TabCompletePacket.class, new TabCompleteCodec());
serverBoundPacket(PLAY, 0x15, ClientSettingsPacket.class, new ClientSettingsCodec());
serverBoundPacket(PLAY, 0x16, ClientStatusPacket.class, new ClientStatusCodec());
serverBoundPacket(PLAY, 0x17, PluginMessagePacket.class, new PluginMessageCodec());
// serverBoundPacket(PLAY, 0x18, SpectatePacket.class, new SpectateCodec());
serverBoundPacket(PLAY, 0x19, ResourcePackStatusPacket.class, new ResourcePackStatusCodec());
clientBoundPacket(PLAY, 0x00, KeepAlivePacket.class, new KeepAliveCodec());
clientBoundPacket(PLAY, 0x01, JoinGamePacket.class, new JoinGameCodec());
clientBoundPacket(PLAY, 0x02, ChatMessagePacket.class, new ChatMessageCodec());
clientBoundPacket(PLAY, 0x03, TimeUpdatePacket.class, new TimeUpdateCodec());
// clientBoundPacket(PLAY, 0x04, EntityEquipmentPacket.class, new EntityEquipmentCodec());
clientBoundPacket(PLAY, 0x05, SpawnPositionPacket.class, new SpawnPositionCodec());
clientBoundPacket(PLAY, 0x06, UpdateHealthPacket.class, new UpdateHealthCodec());
clientBoundPacket(PLAY, 0x07, RespawnPacket.class, new RespawnCodec());
clientBoundPacket(PLAY, 0x08, PlayerTeleportPacket.class, new PlayerTeleportCodec());
clientBoundPacket(PLAY, 0x09, HeldItemChangePacket.class, new PlayerHeldItemChangeCodec());
// clientBoundPacket(PLAY, 0x0A, UseBedPacket.class, new UseBedCodec());
clientBoundPacket(PLAY, 0x0B, AnimationPacket.class, new AnimationCodec());
clientBoundPacket(PLAY, 0x0C, SpawnPlayerPacket.class, new SpawnPlayerCodec());
clientBoundPacket(PLAY, 0x0D, CollectItemPacket.class, new CollectItemCodec());
// clientBoundPacket(PLAY, 0x0E, SpawnObjectPacket.class, new SpawnObjectCodec());
// clientBoundPacket(PLAY, 0x0F, SpawnMobPacket.class, new SpawnMobCodec());
// clientBoundPacket(PLAY, 0x10, SpawnPaintingPacket.class, new SpawnPaintingCodec());
// clientBoundPacket(PLAY, 0x11, SpawnExperiencePacket.class, new SpawnExperienceCodec());
clientBoundPacket(PLAY, 0x12, EntityVelocityPacket.class, new EntityVelocityCodec());
clientBoundPacket(PLAY, 0x13, EntityDestroyPacket.class, new EntityDestroyCodec());
clientBoundPacket(PLAY, 0x14, EntityPacket.class, new EntityCodec());
clientBoundPacket(PLAY, 0x15, EntityMovePacket.class, new EntityMoveCodec());
clientBoundPacket(PLAY, 0x16, EntityLookPacket.class, new EntityLookCodec());
clientBoundPacket(PLAY, 0x17, EntityLookMovePacket.class, new EntityLookMoveCodec());
clientBoundPacket(PLAY, 0x18, EntityTeleportPacket.class, new EntityTeleportCodec());
clientBoundPacket(PLAY, 0x19, EntityHeadLookPacket.class, new EntityHeadLookCodec());
clientBoundPacket(PLAY, 0x1A, EntityStatusPacket.class, new EntityStatusCodec());
clientBoundPacket(PLAY, 0x1B, AttachEntityPacket.class, new AttachEntityCodec());
clientBoundPacket(PLAY, 0x1C, EntityMetadataPacket.class, new EntityMetadataCodec());
// clientBoundPacket(PLAY, 0x1D, EntityEffectPacket.class, new EntityEffectCodec());
clientBoundPacket(PLAY, 0x1E, RemoveEntityEffectPacket.class, new RemoveEntityEffectCodec());
// clientBoundPacket(PLAY, 0x1F, PlayerExperiencePacket.class, new PlayerExperienceCodec());
// clientBoundPacket(PLAY, 0x20, EntityPropertiesPacket.class, new EntityPropertiesCodec());
clientBoundPacket(PLAY, 0x21, ChunkPacket.class, new ChunkCodec());
// clientBoundPacket(PLAY, 0x22, MultiBlockChangePacket.class, new MultiBlockChangeCodec());
// clientBoundPacket(PLAY, 0x23, BlockChangePacket.class, new BlockChangeCodec());
// clientBoundPacket(PLAY, 0x24, BlockActionPacket.class, new BlockActionCodec());
// clientBoundPacket(PLAY, 0x25, BlockBreakAnimationPacket.cass, new BlockBreakAnimationCodec());
clientBoundPacket(PLAY, 0x26, ChunkBulkPacket.class, new ChunkBulkCodec());
// clientBoundPacket(PLAY, 0x27, ExplosionPacket.class, new ExplosionCodec());
// clientBoundPacket(PLAY, 0x28, EffectPacket.class, new EffectCodec());
// clientBoundPacket(PLAY, 0x29, SoundEffectPacket.class, new SoundEffectCodec());
clientBoundPacket(PLAY, 0x2A, ParticlePacket.class, new ParticleCodec());
// clientBoundPacket(PLAY, 0x2B, ChangeGameStatePacket.class, new ChangeGameStateCodec());
// clientBoundPacket(PLAY, 0x2C, SpawnGlobalEntityPacket.class, new SpawnGlobalEntityCodec());
clientBoundPacket(PLAY, 0x2D, OpenWindowPacket.class, new OpenWindowCodec());
clientBoundPacket(PLAY, 0x2E, CloseWindowPacket.class, new CloseWindowCodec());
clientBoundPacket(PLAY, 0x2F, SetExperiencePacket.class, new SetExperienceCodec());
clientBoundPacket(PLAY, 0x30, WindowItemsPacket.class, new WindowItemsCodec());
// clientBoundPacket(PLAY, 0x31, WindowPropertyPacket.class, new WindowPropertyCodec());
clientBoundPacket(PLAY, 0x32, ConfirmTransactionPacket.class, new ConfirmTransactionCodec());
// clientBoundPacket(PLAY, 0x33, UpdateSignPacket.class, new UpdateSigneCodec());
// clientBoundPacket(PLAY, 0x34, MapsPacket.class, new MapsCodec());
// clientBoundPacket(PLAY, 0x35, UpdateBlockEntityPacket.class, new UpdateBlockEntityCodec());
// clientBoundPacket(PLAY, 0x36, SignEditorOpenPacket.class, new SigneEditorOpenCodec());
clientBoundPacket(PLAY, 0x37, StatisticsPacket.class, new StatisticsCodec());
clientBoundPacket(PLAY, 0x38, PlayerInfoPacket.class, new PlayerInfoCodec());
clientBoundPacket(PLAY, 0x39, PlayerAbilitiesPacket.class, new PlayerAbilitiesCodec());
// clientBoundPacket(PLAY, 0x3A, TabCompletePacket.class, new TabCompleteCodec());
// clientBoundPacket(PLAY, 0x3B, ScoreboardObjectivePacket.class, new ScoreboardObjectiveCodec());
// clientBoundPacket(PLAY, 0x3C, UpdateScorePacket.class, new UpdateScoreCodec());
// clientBoundPacket(PLAY, 0x3D, DisplayScoreboardPacket.class, new DisplayScoreboardCodec());
// clientBoundPacket(PLAY, 0x3E, TeamsPacket.class, new TeamsCodec());
clientBoundPacket(PLAY, 0x3F, PluginMessagePacket.class, new PluginMessageCodec());
clientBoundPacket(PLAY, 0x40, KickPacket.class, new KickCodec());
clientBoundPacket(PLAY, 0x41, ServerDifficultyPacket.class, new ServerDifficultyCodec());
// clientBoundPacket(PLAY, 0x42, CombatEventPacket.class, new CombatEventCodec());
// clientBoundPacket(PLAY, 0x43, CameraPacket.class, new CameraCodec());
// clientBoundPacket(PLAY, 0x44, WorldBorderPacket.class, new WorldBorderCodec());
clientBoundPacket(PLAY, 0x45, TitlePacket.class, new TitleCodec());
clientBoundPacket(PLAY, 0x46, CompressionPacket.class, new CompressionCodec());
// clientBoundPacket(PLAY, 0x47, PlayerListHeaderFooterPacket.class, new PlayerListHeaderFooterCodec());
// clientBoundPacket(PLAY, 0x48, ResourcePackSendPacket.class, new ResourcePackeSendCodec());
// clientBoundPacket(PLAY, 0x49, UpdateEntityNBTPacket.class, new UpdateEntityNBTCodec());
}
}
|
package org.sagebionetworks.web.client.widget.entity;
import java.util.List;
import org.sagebionetworks.repo.model.Entity;
import org.sagebionetworks.repo.model.attachment.AttachmentData;
import org.sagebionetworks.schema.adapter.JSONObjectAdapter;
import org.sagebionetworks.schema.adapter.JSONObjectAdapterException;
import org.sagebionetworks.web.client.DisplayConstants;
import org.sagebionetworks.web.client.DisplayUtils;
import org.sagebionetworks.web.client.EntityTypeProvider;
import org.sagebionetworks.web.client.GlobalApplicationState;
import org.sagebionetworks.web.client.SynapseClientAsync;
import org.sagebionetworks.web.client.events.EntityUpdatedEvent;
import org.sagebionetworks.web.client.security.AuthenticationController;
import org.sagebionetworks.web.client.transform.NodeModelCreator;
import org.sagebionetworks.web.client.widget.SynapseWidgetPresenter;
import com.google.gwt.event.shared.EventBus;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.user.client.ui.Widget;
import com.google.inject.Inject;
public class Attachments implements AttachmentsView.Presenter,
SynapseWidgetPresenter {
private AttachmentsView view;
private SynapseClientAsync synapseClient;
private GlobalApplicationState globalApplicationState;
private AuthenticationController authenticationController;
private NodeModelCreator nodeModelCreator;
private EntityTypeProvider entityTypeProvider;
private JSONObjectAdapter jsonObjectAdapter;
private EventBus bus;
private Entity entity;
@Inject
public Attachments(AttachmentsView view, SynapseClientAsync synapseClient,
GlobalApplicationState globalApplicationState,
AuthenticationController authenticationController,
NodeModelCreator nodeModelCreator,
EntityTypeProvider entityTypeProvider,
JSONObjectAdapter jsonObjectAdapter,
EventBus bus) {
this.view = view;
this.synapseClient = synapseClient;
this.globalApplicationState = globalApplicationState;
this.authenticationController = authenticationController;
this.nodeModelCreator = nodeModelCreator;
this.entityTypeProvider = entityTypeProvider;
this.jsonObjectAdapter = jsonObjectAdapter;
this.bus = bus;
view.setPresenter(this);
}
public void configure(String baseUrl, Entity entity) {
this.entity = entity;
view.configure(baseUrl, entity.getId(), entity.getAttachments());
}
@Override
public Widget asWidget() {
return view.asWidget();
}
@Override
public void deleteAttachment(final String tokenId) {
List<AttachmentData> attachments = entity.getAttachments();
if(tokenId != null) {
// find attachment via token and remove it
AttachmentData found = null;
for(AttachmentData data : attachments) {
if(tokenId.equals(data.getTokenId())) {
found = data;
}
}
if(found != null) {
// save name and remove from entity
final String deletedName = found.getName();
attachments.remove(found);
JSONObjectAdapter adapter = jsonObjectAdapter.createNew();
try {
entity.writeToJSONObject(adapter);
} catch (JSONObjectAdapterException e) {
view.showErrorMessage(DisplayConstants.ERROR_DELETING_ATTACHMENT);
return;
}
// update entity minus attachment
synapseClient.createOrUpdateEntity(adapter.toJSONString(), null, false, new AsyncCallback<String>() {
@Override
public void onSuccess(String result) {
view.attachmentDeleted(tokenId, deletedName);
bus.fireEvent(new EntityUpdatedEvent());
}
@Override
public void onFailure(Throwable caught) {
if(!DisplayUtils.handleServiceException(caught, globalApplicationState.getPlaceChanger(), authenticationController.getLoggedInUser())) {
view.showErrorMessage(DisplayConstants.ERROR_DELETING_ATTACHMENT);
}
}
});
} else {
view.showErrorMessage(DisplayConstants.ERROR_DELETING_ATTACHMENT);
}
} else {
view.showErrorMessage(DisplayConstants.ERROR_DELETING_ATTACHMENT);
}
}
}
|
package org.sagebionetworks.web.server.servlet;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.entity.ContentType;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document.OutputSettings;
import org.jsoup.safety.Whitelist;
import org.sagebionetworks.StackConfiguration;
import org.sagebionetworks.client.AsynchJobType;
import org.sagebionetworks.client.exceptions.SynapseClientException;
import org.sagebionetworks.client.exceptions.SynapseException;
import org.sagebionetworks.client.exceptions.SynapseNotFoundException;
import org.sagebionetworks.client.exceptions.SynapseResultNotReadyException;
import org.sagebionetworks.client.exceptions.SynapseTableUnavailableException;
import org.sagebionetworks.repo.model.ACCESS_TYPE;
import org.sagebionetworks.repo.model.ACTAccessRequirement;
import org.sagebionetworks.repo.model.AccessApproval;
import org.sagebionetworks.repo.model.AccessControlList;
import org.sagebionetworks.repo.model.AccessRequirement;
import org.sagebionetworks.repo.model.Annotations;
import org.sagebionetworks.repo.model.Entity;
import org.sagebionetworks.repo.model.EntityBundle;
import org.sagebionetworks.repo.model.EntityHeader;
import org.sagebionetworks.repo.model.EntityPath;
import org.sagebionetworks.repo.model.EntityType;
import org.sagebionetworks.repo.model.FileEntity;
import org.sagebionetworks.repo.model.JoinTeamSignedToken;
import org.sagebionetworks.repo.model.LogEntry;
import org.sagebionetworks.repo.model.MembershipInvitation;
import org.sagebionetworks.repo.model.MembershipInvtnSubmission;
import org.sagebionetworks.repo.model.MembershipRequest;
import org.sagebionetworks.repo.model.MembershipRqstSubmission;
import org.sagebionetworks.repo.model.ObjectType;
import org.sagebionetworks.repo.model.Project;
import org.sagebionetworks.repo.model.ProjectHeader;
import org.sagebionetworks.repo.model.ProjectListSortColumn;
import org.sagebionetworks.repo.model.ProjectListType;
import org.sagebionetworks.repo.model.Reference;
import org.sagebionetworks.repo.model.ResponseMessage;
import org.sagebionetworks.repo.model.RestrictableObjectDescriptor;
import org.sagebionetworks.repo.model.RestrictableObjectType;
import org.sagebionetworks.repo.model.SignedTokenInterface;
import org.sagebionetworks.repo.model.Team;
import org.sagebionetworks.repo.model.TeamMember;
import org.sagebionetworks.repo.model.TeamMembershipStatus;
import org.sagebionetworks.repo.model.TrashedEntity;
import org.sagebionetworks.repo.model.UserGroupHeaderResponsePage;
import org.sagebionetworks.repo.model.UserProfile;
import org.sagebionetworks.repo.model.VersionInfo;
import org.sagebionetworks.repo.model.Versionable;
import org.sagebionetworks.repo.model.asynch.AsynchronousRequestBody;
import org.sagebionetworks.repo.model.asynch.AsynchronousResponseBody;
import org.sagebionetworks.repo.model.auth.NewUserSignedToken;
import org.sagebionetworks.repo.model.dao.WikiPageKey;
import org.sagebionetworks.repo.model.dao.WikiPageKeyHelper;
import org.sagebionetworks.repo.model.discussion.DiscussionFilter;
import org.sagebionetworks.repo.model.discussion.Forum;
import org.sagebionetworks.repo.model.doi.Doi;
import org.sagebionetworks.repo.model.entity.query.Condition;
import org.sagebionetworks.repo.model.entity.query.EntityFieldName;
import org.sagebionetworks.repo.model.entity.query.EntityQuery;
import org.sagebionetworks.repo.model.entity.query.EntityQueryResults;
import org.sagebionetworks.repo.model.entity.query.EntityQueryUtils;
import org.sagebionetworks.repo.model.entity.query.Operator;
import org.sagebionetworks.repo.model.entity.query.Sort;
import org.sagebionetworks.repo.model.entity.query.SortDirection;
import org.sagebionetworks.repo.model.file.BatchFileHandleCopyRequest;
import org.sagebionetworks.repo.model.file.BatchFileHandleCopyResult;
import org.sagebionetworks.repo.model.file.BatchFileRequest;
import org.sagebionetworks.repo.model.file.BatchFileResult;
import org.sagebionetworks.repo.model.file.ExternalFileHandle;
import org.sagebionetworks.repo.model.file.FileHandle;
import org.sagebionetworks.repo.model.file.FileHandleCopyRequest;
import org.sagebionetworks.repo.model.file.FileHandleCopyResult;
import org.sagebionetworks.repo.model.file.FileHandleResults;
import org.sagebionetworks.repo.model.file.UploadDestination;
import org.sagebionetworks.repo.model.message.MessageToUser;
import org.sagebionetworks.repo.model.message.NotificationSettingsSignedToken;
import org.sagebionetworks.repo.model.principal.AddEmailInfo;
import org.sagebionetworks.repo.model.principal.AliasCheckRequest;
import org.sagebionetworks.repo.model.principal.AliasCheckResponse;
import org.sagebionetworks.repo.model.principal.AliasType;
import org.sagebionetworks.repo.model.principal.PrincipalAliasRequest;
import org.sagebionetworks.repo.model.principal.PrincipalAliasResponse;
import org.sagebionetworks.repo.model.project.ProjectSettingsType;
import org.sagebionetworks.repo.model.project.StorageLocationSetting;
import org.sagebionetworks.repo.model.project.UploadDestinationListSetting;
import org.sagebionetworks.repo.model.provenance.Activity;
import org.sagebionetworks.repo.model.quiz.PassingRecord;
import org.sagebionetworks.repo.model.quiz.Quiz;
import org.sagebionetworks.repo.model.quiz.QuizResponse;
import org.sagebionetworks.repo.model.request.ReferenceList;
import org.sagebionetworks.repo.model.search.SearchResults;
import org.sagebionetworks.repo.model.search.query.SearchQuery;
import org.sagebionetworks.repo.model.subscription.Etag;
import org.sagebionetworks.repo.model.table.ColumnChange;
import org.sagebionetworks.repo.model.table.ColumnModel;
import org.sagebionetworks.repo.model.table.ColumnModelPage;
import org.sagebionetworks.repo.model.table.FacetColumnRequest;
import org.sagebionetworks.repo.model.table.RowReferenceSet;
import org.sagebionetworks.repo.model.table.RowSelection;
import org.sagebionetworks.repo.model.table.SortItem;
import org.sagebionetworks.repo.model.table.TableFileHandleResults;
import org.sagebionetworks.repo.model.table.TableSchemaChangeRequest;
import org.sagebionetworks.repo.model.table.TableUpdateRequest;
import org.sagebionetworks.repo.model.table.TableUpdateTransactionRequest;
import org.sagebionetworks.repo.model.table.ViewScope;
import org.sagebionetworks.repo.model.table.ViewType;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiHeader;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiHistorySnapshot;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiOrderHint;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiPage;
import org.sagebionetworks.repo.model.versionInfo.SynapseVersionInfo;
import org.sagebionetworks.repo.model.wiki.WikiPage;
import org.sagebionetworks.schema.adapter.AdapterFactory;
import org.sagebionetworks.schema.adapter.JSONArrayAdapter;
import org.sagebionetworks.schema.adapter.JSONEntity;
import org.sagebionetworks.schema.adapter.JSONObjectAdapter;
import org.sagebionetworks.schema.adapter.JSONObjectAdapterException;
import org.sagebionetworks.schema.adapter.org.json.AdapterFactoryImpl;
import org.sagebionetworks.schema.adapter.org.json.JSONArrayAdapterImpl;
import org.sagebionetworks.schema.adapter.org.json.JSONObjectAdapterImpl;
import org.sagebionetworks.table.query.ParseException;
import org.sagebionetworks.table.query.TableQueryParser;
import org.sagebionetworks.table.query.util.TableSqlProcessor;
import org.sagebionetworks.util.SerializationUtils;
import org.sagebionetworks.web.client.SynapseClient;
import org.sagebionetworks.web.client.view.TeamRequestBundle;
import org.sagebionetworks.web.shared.AccessRequirementUtils;
import org.sagebionetworks.web.shared.AccessRequirementsTransport;
import org.sagebionetworks.web.shared.EntityBundlePlus;
import org.sagebionetworks.web.shared.EntityConstants;
import org.sagebionetworks.web.shared.MembershipRequestBundle;
import org.sagebionetworks.web.shared.OpenTeamInvitationBundle;
import org.sagebionetworks.web.shared.OpenUserInvitationBundle;
import org.sagebionetworks.web.shared.PaginatedResults;
import org.sagebionetworks.web.shared.ProjectDisplayBundle;
import org.sagebionetworks.web.shared.ProjectPagedResults;
import org.sagebionetworks.web.shared.SerializableWhitelist;
import org.sagebionetworks.web.shared.TeamBundle;
import org.sagebionetworks.web.shared.TeamMemberBundle;
import org.sagebionetworks.web.shared.TeamMemberPagedResults;
import org.sagebionetworks.web.shared.WebConstants;
import org.sagebionetworks.web.shared.asynch.AsynchType;
import org.sagebionetworks.web.shared.exceptions.BadRequestException;
import org.sagebionetworks.web.shared.exceptions.ConflictException;
import org.sagebionetworks.web.shared.exceptions.ExceptionUtil;
import org.sagebionetworks.web.shared.exceptions.NotFoundException;
import org.sagebionetworks.web.shared.exceptions.RestServiceException;
import org.sagebionetworks.web.shared.exceptions.ResultNotReadyException;
import org.sagebionetworks.web.shared.exceptions.TableQueryParseException;
import org.sagebionetworks.web.shared.exceptions.TableUnavilableException;
import org.sagebionetworks.web.shared.exceptions.UnauthorizedException;
import org.sagebionetworks.web.shared.exceptions.UnknownErrorException;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.gwt.core.server.StackTraceDeobfuscator;
import com.google.gwt.thirdparty.guava.common.base.Supplier;
import com.google.gwt.thirdparty.guava.common.base.Suppliers;
public class SynapseClientImpl extends SynapseClientBase implements
SynapseClient, TokenProvider {
private static final Integer MAX_LIMIT = 300;
public static final Integer ZERO_OFFSET = 0;
public static final String DEFAULT_STORAGE_ID_PROPERTY_KEY = "org.sagebionetworks.portal.synapse_storage_id";
public static final String HTML_TEAM_ID_PROPERTY_KEY = "org.sagebionetworks.portal.html_team_id";
public static final String SYN_PREFIX = "syn";
public static final int MAX_LOG_ENTRY_LABEL_SIZE = 200;
public static final Charset MESSAGE_CHARSET = Charset.forName("UTF-8");
public static final ContentType HTML_MESSAGE_CONTENT_TYPE = ContentType
.create("text/html", MESSAGE_CHARSET);
public static final ContentType PLAIN_MESSAGE_CONTENT_TYPE = ContentType
.create("text/plain", MESSAGE_CHARSET);
public static final long LIMIT_50 = 50;
static private Log log = LogFactory.getLog(SynapseClientImpl.class);
private static StackTraceDeobfuscator deobfuscator = null;
private Cache<MarkdownCacheRequest, WikiPage> wiki2Markdown = CacheBuilder
.newBuilder().maximumSize(35).expireAfterAccess(1, TimeUnit.HOURS)
.build(new CacheLoader<MarkdownCacheRequest, WikiPage>() {
@Override
public WikiPage load(MarkdownCacheRequest key) throws Exception {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
WikiPage returnPage = null;
if (key.getVersion() == null)
returnPage = synapseClient.getWikiPage(key
.getWikiPageKey());
else
returnPage = synapseClient
.getWikiPageForVersion(
key.getWikiPageKey(),
key.getVersion());
return returnPage;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
});
private final Supplier<Set<String>> htmlTeamMembersCache = Suppliers.memoizeWithExpiration(teamMembersSupplier(), 1, TimeUnit.HOURS);
public Set<String> getHtmlTeamMembers() {
return htmlTeamMembersCache.get();
}
private Supplier<Set<String>> teamMembersSupplier() {
return new Supplier<Set<String>>() {
public Set<String> get() {
Set<String> userIdSet = new HashSet<String>();
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
long currentOffset = 0;
List<TeamMember> teamMembers = null;
do {
org.sagebionetworks.reflection.model.PaginatedResults<TeamMember> teamMembersPaginatedResults = synapseClient.getTeamMembers(htmlTeamId, null, LIMIT_50, currentOffset);
teamMembers = teamMembersPaginatedResults.getResults();
for (TeamMember teamMember : teamMembers) {
userIdSet.add(teamMember.getMember().getOwnerId());
}
currentOffset += LIMIT_50;
} while (teamMembers != null && !teamMembers.isEmpty());
} catch (SynapseException e) {
logError(e.getMessage());
}
return userIdSet;
}
};
}
AdapterFactory adapterFactory = new AdapterFactoryImpl();
private volatile HashMap<String, org.sagebionetworks.web.shared.WikiPageKey> pageName2WikiKeyMap;
private volatile HashSet<String> wikiBasedEntities;
public void setMarkdownCache(Cache<MarkdownCacheRequest, WikiPage> wikiToMarkdown) {
this.wiki2Markdown = wikiToMarkdown;
}
/*
* SynapseClient Service Methods
*/
@Override
public Entity getEntity(String entityId) throws RestServiceException {
return getEntityForVersion(entityId, null);
}
public Project getProject(String projectId) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return (Project) synapseClient.getEntityById(projectId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public Entity getEntityForVersion(String entityId, Long versionNumber)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
if (versionNumber == null) {
return synapseClient.getEntityById(entityId);
} else {
return synapseClient.getEntityByIdForVersion(entityId,
versionNumber);
}
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public EntityBundle getEntityBundle(String entityId, int partsMask)
throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return synapseClient.getEntityBundle(entityId, partsMask);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public EntityBundle getEntityBundleForVersion(String entityId,
Long versionNumber, int partsMask) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return synapseClient.getEntityBundle(entityId, versionNumber, partsMask);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public EntityBundlePlus getEntityBundlePlusForVersion(String entityId,
Long versionNumber, int partsMask) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
EntityBundlePlus ebp = new EntityBundlePlus();
EntityBundle eb;
Entity en = synapseClient.getEntityById(entityId);
if (en instanceof Versionable) {
// Get the correct version, now that we now it's Versionable
Long latestVersionNumber = synapseClient.getEntityVersions(entityId, ZERO_OFFSET, 1)
.getResults().get(0).getVersionNumber();
if (versionNumber == null || latestVersionNumber.equals(versionNumber)) {
versionNumber = latestVersionNumber;
eb = getEntityBundle(entityId, partsMask);
} else {
eb = getEntityBundleForVersion(entityId, versionNumber, partsMask);
}
ebp.setLatestVersionNumber(latestVersionNumber);
} else {
eb = synapseClient.getEntityBundle(entityId, partsMask);
}
ebp.setEntityBundle(eb);
return ebp;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
} catch (Throwable e) {
throw new UnknownErrorException(e.getMessage());
}
}
@Override
public PaginatedResults<VersionInfo> getEntityVersions(String entityId, int offset, int limit)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return convertPaginated(synapseClient
.getEntityVersions(entityId, offset, limit));
} catch (SynapseException e) {
log.error(e);
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public EntityPath getEntityPath(String entityId)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.getEntityPath(entityId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public SearchResults search(SearchQuery searchQuery)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.search(searchQuery);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
} catch (UnsupportedEncodingException e) {
throw new UnknownErrorException(e.getMessage());
}
}
/*
* Private Methods
*/
private JSONObject query(String query) throws SynapseException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return synapseClient.query(query);
}
public static String createJSONStringFromArray(
List<? extends JSONEntity> list) throws JSONObjectAdapterException {
JSONArrayAdapter aa = new JSONArrayAdapterImpl();
for (int i = 0; i < list.size(); i++) {
JSONObjectAdapter oa = new JSONObjectAdapterImpl();
list.get(i).writeToJSONObject(oa);
aa.put(i, oa);
}
return aa.toJSONString();
}
@Override
public SerializableWhitelist junk(SerializableWhitelist l) {
return null;
}
/**
* Helper to convert from the non-gwt compatible PaginatedResults to the compatible type.
* @param in
* @return
*/
public <T extends JSONEntity> PaginatedResults<T> convertPaginated(org.sagebionetworks.reflection.model.PaginatedResults<T> in){
return new PaginatedResults<T>(in.getResults(), in.getTotalNumberOfResults());
}
@Override
public void logDebug(String message) {
log.debug(message);
}
@Override
public void logError(String message) {
log.error(message);
}
public StackTraceDeobfuscator getDeobfuscator() {
//lazy init deobfuscator
if (deobfuscator == null) {
String path = getServletContext().getRealPath("/WEB-INF/");
deobfuscator = StackTraceDeobfuscator.fromFileSystem(path);
}
return deobfuscator;
}
/**
* Deobfuscate a client stack trace
* @param exceptionType
* @param exceptionMessage
* @param t
* @return
*/
public String deobfuscateException(String exceptionType, String exceptionMessage, StackTraceElement[] t, String permutationStrongName) {
StackTraceDeobfuscator deobfuscator = getDeobfuscator();
RuntimeException th = new RuntimeException(exceptionType + ":" + exceptionMessage);
th.setStackTrace(t);
deobfuscator.deobfuscateStackTrace(th, permutationStrongName);
return ExceptionUtils.getStackTrace(th).substring("java.lang.RuntimeException: ".length());
}
@Override
public void logErrorToRepositoryServices(String message, String exceptionType, String exceptionMessage, StackTraceElement[] t) throws RestServiceException {
logErrorToRepositoryServices(message, exceptionType, exceptionMessage, t, getPermutationStrongName());
}
//(tested)
public void logErrorToRepositoryServices(String message, String exceptionType, String exceptionMessage, StackTraceElement[] t, String strongName) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
String exceptionString = "";
String outputLabel = "";
if (t != null) {
exceptionString = deobfuscateException(exceptionType, exceptionMessage, t, strongName);
outputLabel = exceptionString.substring(0, Math.min(exceptionString.length(), MAX_LOG_ENTRY_LABEL_SIZE));
}
LogEntry entry = new LogEntry();
new PortalVersionHolder();
entry.setLabel("SWC/" + PortalVersionHolder.getVersionInfo() + "/" + outputLabel);
String userId = "";
UserProfile profile = synapseClient.getMyProfile();
if (profile != null) {
userId = "userId="+profile.getOwnerId()+" ";
}
String entryMessage = userId+message+"\n"+exceptionString;
entry.setMessage(entryMessage);
synapseClient.logError(entry);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void logInfo(String message) {
log.info(message);
}
/**
* Update an entity.
*/
public Entity updateEntity(Entity toUpdate) throws RestServiceException{
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return synapseClient.putEntity(toUpdate);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public Entity moveEntity(String entityId, String newParentEntityId) throws RestServiceException{
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
Entity entity = synapseClient.getEntityById(entityId);
entity.setParentId(newParentEntityId);
return synapseClient.putEntity(entity);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
/**
* Create or update an entity
*
* @param entity
* @param annos
* @param isNew
* @return
* @throws RestServiceException
*/
@Override
public String createOrUpdateEntity(Entity entity, Annotations annos,
boolean isNew) throws RestServiceException {
// First read the entity
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
if (isNew) {
// This is a create
entity = synapseClient.createEntity(entity);
} else {
// This is an update
entity = synapseClient.putEntity(entity);
}
// Update the annotations
if (annos != null) {
annos.setEtag(entity.getEtag());
annos.setId(entity.getId());
synapseClient.updateAnnotations(entity.getId(), annos);
}
return entity.getId();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
/**
* Parse an entity from its json.
*
* @param json
* @return
* @throws JSONObjectAdapterException
*/
public Entity parseEntityFromJson(String json)
throws JSONObjectAdapterException {
if (json == null)
throw new IllegalArgumentException("Entity cannot be null");
// Create an adapter
JSONObjectAdapter adapter = adapterFactory.createNew(json);
// Extrat the entity type.
if (!adapter.has(EntityConstants.ENTITY_TYPE)
|| adapter.isNull(EntityConstants.ENTITY_TYPE)) {
throw new IllegalArgumentException("JSON does not contain: "
+ EntityConstants.ENTITY_TYPE);
}
try {
String entityType = adapter.getString(EntityConstants.ENTITY_TYPE);
Entity entity = (Entity) Class.forName(entityType).newInstance();
entity.initializeFromJSONObject(adapter);
return entity;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public PaginatedResults<EntityHeader> getEntityTypeBatch(List<String> entityIds)
throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
org.sagebionetworks.reflection.model.PaginatedResults<EntityHeader> results = synapseClient
.getEntityTypeBatch(entityIds);
return new PaginatedResults<EntityHeader>(results.getResults(), results.getTotalNumberOfResults());
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public PaginatedResults<EntityHeader> getEntityHeaderBatch(ReferenceList list)
throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
org.sagebionetworks.reflection.model.PaginatedResults<EntityHeader> results = synapseClient
.getEntityHeaderBatch(list.getReferences());
return new PaginatedResults<EntityHeader>(results.getResults(), results.getTotalNumberOfResults());
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public ArrayList<EntityHeader> getEntityHeaderBatch(List<String> entityIds)
throws RestServiceException {
try {
List<Reference> list = new ArrayList<Reference>();
for (String entityId : entityIds) {
Reference ref = new Reference();
ref.setTargetId(entityId);
list.add(ref);
}
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
org.sagebionetworks.reflection.model.PaginatedResults<EntityHeader> results = synapseClient
.getEntityHeaderBatch(list);
ArrayList<EntityHeader> returnList = new ArrayList<EntityHeader>();
returnList.addAll(results.getResults());
return returnList;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void deleteEntityById(String entityId) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
synapseClient.deleteEntityById(entityId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void deleteEntityById(String entityId, Boolean skipTrashCan)
throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
synapseClient.deleteEntityById(entityId, skipTrashCan);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void deleteEntityVersionById(String entityId, Long versionNumber)
throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
synapseClient.deleteEntityVersionById(entityId, versionNumber);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public PaginatedResults<TrashedEntity> viewTrashForUser(long offset, long limit)
throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return convertPaginated(synapseClient
.viewTrashForUser(offset, limit));
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void restoreFromTrash(String entityId, String newParentId)
throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
synapseClient.restoreFromTrash(entityId, newParentId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void moveToTrash(String entityId) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
synapseClient.moveToTrash(entityId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void purgeTrashForUser() throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
synapseClient.purgeTrashForUser();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void purgeTrashForUser(String entityId) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
synapseClient.purgeTrashForUser(entityId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void purgeMultipleTrashedEntitiesForUser(Set<String> entityIds) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
for (String entityId : entityIds) {
synapseClient.purgeTrashForUser(entityId);
}
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public UserProfile getUserProfile() throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return synapseClient.getMyProfile();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public UserProfile getUserProfile(String userId) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
UserProfile profile;
if (userId == null) {
profile = synapseClient.getMyProfile();
} else {
profile = synapseClient.getUserProfile(userId);
}
return profile;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public Team getTeam(String teamId) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return synapseClient.getTeam(teamId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public UserGroupHeaderResponsePage getUserGroupHeadersById(ArrayList<String> ids)
throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return synapseClient.getUserGroupHeadersByIds(ids);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public UserGroupHeaderResponsePage getUserGroupHeadersByPrefix(String prefix, long limit, long offset) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.getUserGroupHeadersByPrefix(prefix, limit, offset);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
} catch (UnsupportedEncodingException e) {
throw new UnknownErrorException(e.getMessage());
}
}
@Override
public String getUserIdFromUsername(String username) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
PrincipalAliasRequest request = new PrincipalAliasRequest();
request.setAlias(username);
request.setType(AliasType.USER_NAME);
PrincipalAliasResponse response = synapseClient.getPrincipalAlias(request);
return response.getPrincipalId().toString();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public UserProfile getUserProfileFromUsername(String username) throws RestServiceException{
String userId = getUserIdFromUsername(username);
return getUserProfile(userId);
}
@Override
public void updateUserProfile(UserProfile profile)
throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
synapseClient.updateMyProfile(profile);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void additionalEmailValidation(String userId, String emailAddress,
String callbackUrl) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
Long userIdLong = Long.parseLong(userId);
synapseClient.additionalEmailValidation(userIdLong, emailAddress,
callbackUrl);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void addEmail(String emailValidationToken)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
AddEmailInfo newEmailInfo = new AddEmailInfo();
newEmailInfo.setEmailValidationToken(emailValidationToken);
synapseClient.addEmail(newEmailInfo, true);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public String getNotificationEmail() throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.getNotificationEmail();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void setNotificationEmail(String email) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
synapseClient.setNotificationEmail(email);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
public AccessControlList getBenefactorAcl(String id) throws RestServiceException {
EntityBundle bundle = getEntityBundle(id, EntityBundle.BENEFACTOR_ACL);
return bundle.getBenefactorAcl();
}
@Override
public AccessControlList getEntityBenefactorAcl(String id) throws RestServiceException {
return getBenefactorAcl(id);
}
@Override
public AccessControlList createAcl(AccessControlList acl)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.createACL(acl);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public AccessControlList updateAcl(AccessControlList aclEW)
throws RestServiceException {
return updateAcl(aclEW, false);
}
@Override
public AccessControlList updateAcl(AccessControlList acl, boolean recursive)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.updateACL(acl, recursive);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public AccessControlList updateTeamAcl(AccessControlList acl)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.updateTeamACL(acl);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public AccessControlList getTeamAcl(String teamId)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.getTeamACL(teamId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public AccessControlList deleteAcl(String ownerEntityId)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
// first delete the ACL
synapseClient.deleteACL(ownerEntityId);
// now get the ACL governing this entity, which will be some
return getBenefactorAcl(ownerEntityId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public boolean hasAccess(String ownerEntityId, String accessType)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.canAccess(ownerEntityId,
ACCESS_TYPE.valueOf(accessType));
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public boolean hasAccess(String ownerId, String ownerType, String accessType)
throws RestServiceException {
ObjectType type = ObjectType.valueOf(ownerType);
ACCESS_TYPE access = ACCESS_TYPE.valueOf(accessType);
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.canAccess(ownerId, type, access);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public AccessRequirement createAccessRequirement(AccessRequirement ar)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.createAccessRequirement(ar);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public ACTAccessRequirement createLockAccessRequirement(String entityId)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.createLockAccessRequirement(entityId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public AccessRequirementsTransport getUnmetAccessRequirements(
String entityId, ACCESS_TYPE accessType) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
RestrictableObjectDescriptor subjectId = new RestrictableObjectDescriptor();
subjectId.setId(entityId);
subjectId.setType(RestrictableObjectType.ENTITY);
boolean unmetOnly = true;
List<AccessRequirement> accessRequirements = getAllAccessRequirements(unmetOnly, subjectId, accessType, synapseClient);
AccessRequirementsTransport transport = new AccessRequirementsTransport();
transport.setAccessRequirements(new PaginatedResults<AccessRequirement>(
accessRequirements, accessRequirements.size()));
Entity e = synapseClient.getEntityById(entityId);
transport.setEntity(e);
UserProfile profile = getUserProfile();
transport.setUserProfile(profile);
return transport;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
private List<AccessRequirement> getAllAccessRequirements(boolean unmetOnly, RestrictableObjectDescriptor subjectId, ACCESS_TYPE accessType, org.sagebionetworks.client.SynapseClient synapseClient) throws SynapseException {
List<AccessRequirement> allAccessRequirements = new ArrayList<AccessRequirement>();
long offset = ZERO_OFFSET;
boolean isDone = false;
while (!isDone) {
List<AccessRequirement> accessRequirments;
if (unmetOnly) {
accessRequirments = synapseClient.getUnmetAccessRequirements(subjectId, accessType, LIMIT_50, offset).getResults();
} else {
accessRequirments = synapseClient.getAccessRequirements(subjectId, LIMIT_50, offset).getResults();
}
isDone = accessRequirments.size() < LIMIT_50;
allAccessRequirements.addAll(accessRequirments);
offset += LIMIT_50;
}
return allAccessRequirements;
}
@Override
public List<AccessRequirement> getAccessRequirements(RestrictableObjectDescriptor subject, Long limit, Long offset) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.getAccessRequirements(subject, limit, offset).getResults();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
private List<V2WikiHeader> getAllWikiHeaderTree(String ownerId, ObjectType ownerType, org.sagebionetworks.client.SynapseClient synapseClient) throws SynapseException {
List<V2WikiHeader> allHeaders = new ArrayList<V2WikiHeader>();
long offset = ZERO_OFFSET;
boolean isDone = false;
while (!isDone) {
List<V2WikiHeader> headers = synapseClient.getV2WikiHeaderTree(ownerId, ownerType, LIMIT_50, offset).getResults();
isDone = headers.size() < LIMIT_50;
allHeaders.addAll(headers);
offset += LIMIT_50;
}
return allHeaders;
}
@Override
public List<AccessRequirement> getTeamAccessRequirements(String teamId)
throws RestServiceException {
return getTeamAccessRequirements(teamId, false);
}
private List<AccessRequirement> getTeamAccessRequirements(String teamId, boolean unmetOnly)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
RestrictableObjectDescriptor subjectId = new RestrictableObjectDescriptor();
subjectId.setId(teamId);
subjectId.setType(RestrictableObjectType.TEAM);
List<AccessRequirement> accessRequirements = getAllAccessRequirements(unmetOnly, subjectId, ACCESS_TYPE.PARTICIPATE, synapseClient);
return accessRequirements;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public PaginatedResults<AccessRequirement> getAllEntityUploadAccessRequirements(String entityId)
throws RestServiceException {
return getEntityAccessRequirements(entityId, false, ACCESS_TYPE.UPLOAD);
}
public PaginatedResults<AccessRequirement> getEntityAccessRequirements(String entityId,
boolean unmetOnly, ACCESS_TYPE targetAccessType)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
RestrictableObjectDescriptor subjectId = new RestrictableObjectDescriptor();
subjectId.setId(entityId);
subjectId.setType(RestrictableObjectType.ENTITY);
List<AccessRequirement> accessRequirements = getAllAccessRequirements(unmetOnly, subjectId, targetAccessType, synapseClient);
// filter to the targetAccessType
if (targetAccessType != null) {
accessRequirements = AccessRequirementUtils.filterAccessRequirements(
accessRequirements, targetAccessType);
}
return new PaginatedResults<AccessRequirement>(accessRequirements, accessRequirements.size());
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public AccessApproval createAccessApproval(AccessApproval aaEW)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.createAccessApproval(aaEW);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void deleteAccessApproval(Long approvalId)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
synapseClient.deleteAccessApproval(approvalId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void deleteAccessApprovals(String accessRequirement, String accessorId)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
synapseClient.deleteAccessApprovals(accessRequirement, accessorId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public Entity updateExternalFile(String entityId, String externalUrl, String name, String contentType, Long fileSize, String md5, Long storageLocationId) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
Entity entity = synapseClient.getEntityById(entityId);
if (!(entity instanceof FileEntity)) {
throw new RuntimeException("Upload failed. Entity id: "
+ entity.getId() + " is not a File.");
}
ExternalFileHandle clone = createExternalFileHandle(externalUrl, name, contentType, fileSize, md5, storageLocationId, synapseClient);
((FileEntity) entity).setDataFileHandleId(clone.getId());
Entity updatedEntity = synapseClient.putEntity(entity);
return updatedEntity;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
private boolean isManuallySettingExternalName(String name) {
return name != null && name.trim().length() > 0;
}
public static String getFileNameFromExternalUrl(String path){
//grab the text between the last '/' and following '?'
String fileName = "";
if (path != null) {
int lastSlash = path.lastIndexOf("/");
if (lastSlash > -1) {
int firstQuestionMark = path.indexOf("?", lastSlash);
if (firstQuestionMark > -1) {
fileName = path.substring(lastSlash+1, firstQuestionMark);
} else {
fileName = path.substring(lastSlash+1);
}
}
}
return fileName;
}
private ExternalFileHandle createExternalFileHandle(
String externalUrl,
String name,
String contentType,
Long fileSize,
String md5,
Long storageLocationId,
org.sagebionetworks.client.SynapseClient synapseClient
) throws SynapseException {
boolean isManuallySettingName = isManuallySettingExternalName(name);
ExternalFileHandle efh = new ExternalFileHandle();
efh.setExternalURL(externalUrl.trim());
efh.setContentMd5(md5);
efh.setContentSize(fileSize);
efh.setContentType(contentType);
String fileName;
if (isManuallySettingName) {
fileName = name;
} else {
fileName = getFileNameFromExternalUrl(externalUrl);
}
efh.setFileName(fileName);
efh.setStorageLocationId(storageLocationId);
return synapseClient.createExternalFileHandle(efh);
}
@Override
public Entity createExternalFile(String parentEntityId, String externalUrl,
String name, String contentType, Long fileSize, String md5, Long storageLocationId) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
FileEntity newEntity = new FileEntity();
ExternalFileHandle clone = createExternalFileHandle(externalUrl, name, contentType, fileSize, md5, storageLocationId, synapseClient);
newEntity.setDataFileHandleId(clone.getId());
newEntity.setParentId(parentEntityId);
newEntity.setName(clone.getFileName());
return synapseClient.createEntity(newEntity);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public Activity getActivityForEntity(String entityId)
throws RestServiceException {
return getActivityForEntityVersion(entityId, null);
}
@Override
public Activity getActivityForEntityVersion(String entityId,
Long versionNumber) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.getActivityForEntityVersion(
entityId, versionNumber);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public Activity getActivity(String activityId) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.getActivity(activityId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public Activity getOrCreateActivityForEntityVersion(String entityId,
Long versionNumber) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.getActivityForEntityVersion(
entityId, versionNumber);
} catch (SynapseNotFoundException ex) {
// not found, so create
Activity newActivity;
try {
newActivity = synapseClient.createActivity(new Activity());
synapseClient.putEntity(synapseClient.getEntityById(entityId), newActivity.getId());
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
return newActivity;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void putActivity(Activity update) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
synapseClient.putActivity(update);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public PaginatedResults<Reference> getEntitiesGeneratedBy(String activityId, Integer limit,
Integer offset) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return convertPaginated(synapseClient
.getEntitiesGeneratedBy(activityId, limit, offset));
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public String getJSONEntity(String repoUri) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
JSONObject entity = synapseClient.getEntity(repoUri);
return entity.toString();
} catch (SynapseTableUnavailableException e) {
handleTableUnavailableException(e);
// TableUnavilableException is thrown in line above, should never
// reach the next line
return null;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
private String getRootWikiId(
org.sagebionetworks.client.SynapseClient synapseClient,
String ownerId, ObjectType ownerType) throws RestServiceException {
try {
WikiPageKey key= synapseClient.getRootWikiPageKey(ownerId, ownerType);
return key.getWikiPageId();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public String getFileEndpoint() throws RestServiceException {
// org.sagebionetworks.client.SynapseClient synapseClient =
// createSynapseClient();
// return synapseClient.getFileEndpoint();
return StackConfiguration.getFileServiceEndpoint();
}
@Override
public String getRootWikiId(String ownerObjectId, String ownerObjectType) throws RestServiceException{
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
// asking for the root. find the root id first
String rootWikiPageId = getRootWikiId(synapseClient,
ownerObjectId,
ObjectType.valueOf(ownerObjectType));
return rootWikiPageId;
}
@Override
public FileHandleResults getWikiAttachmentHandles(
org.sagebionetworks.web.shared.WikiPageKey key)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
if (key.getWikiPageId() == null) {
// asking for the root. find the root id first
String rootWikiPage = getRootWikiId(synapseClient,
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()));
key.setWikiPageId(rootWikiPage);
}
WikiPageKey properKey = WikiPageKeyHelper.createWikiPageKey(
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()),
key.getWikiPageId());
FileHandleResults results = synapseClient
.getWikiAttachmenthHandles(properKey);
return results;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
// V2 Wiki crud
@Override
public V2WikiPage createV2WikiPage(String ownerId, String ownerType,
V2WikiPage page) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.createV2WikiPage(ownerId,
ObjectType.valueOf(ownerType), page);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public V2WikiPage getV2WikiPage(org.sagebionetworks.web.shared.WikiPageKey key)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
if (key.getWikiPageId() == null) {
// asking for the root. find the root id first
String rootWikiPage = getRootWikiId(synapseClient,
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()));
key.setWikiPageId(rootWikiPage);
}
WikiPageKey properKey = WikiPageKeyHelper.createWikiPageKey(
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()),
key.getWikiPageId());
return synapseClient.getV2WikiPage(properKey);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public V2WikiPage getVersionOfV2WikiPage(
org.sagebionetworks.web.shared.WikiPageKey key, Long version)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
if (key.getWikiPageId() == null) {
// asking for the root. find the root id first
String rootWikiPage = getRootWikiId(synapseClient,
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()));
key.setWikiPageId(rootWikiPage);
}
WikiPageKey properKey = WikiPageKeyHelper.createWikiPageKey(
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()),
key.getWikiPageId());
return synapseClient.getVersionOfV2WikiPage(
properKey, version);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public V2WikiPage updateV2WikiPage(String ownerId, String ownerType,
V2WikiPage page) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.updateV2WikiPage(ownerId,
ObjectType.valueOf(ownerType), page);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public V2WikiPage restoreV2WikiPage(String ownerId, String ownerType,
String wikiId, Long versionToUpdate) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.restoreV2WikiPage(ownerId,
ObjectType.valueOf(ownerType), wikiId, versionToUpdate);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void deleteV2WikiPage(org.sagebionetworks.web.shared.WikiPageKey key)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
WikiPageKey properKey = WikiPageKeyHelper.createWikiPageKey(
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()),
key.getWikiPageId());
synapseClient.deleteV2WikiPage(properKey);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public List<V2WikiHeader> getV2WikiHeaderTree(String ownerId, String ownerType)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return getAllWikiHeaderTree(ownerId, ObjectType.valueOf(ownerType), synapseClient);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public V2WikiOrderHint getV2WikiOrderHint(org.sagebionetworks.web.shared.WikiPageKey key)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
WikiPageKey properKey = WikiPageKeyHelper.createWikiPageKey(
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()),
key.getWikiPageId());
try {
V2WikiOrderHint orderHint = synapseClient.getV2OrderHint(properKey);
return orderHint;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public V2WikiOrderHint updateV2WikiOrderHint(V2WikiOrderHint toUpdate) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
V2WikiOrderHint orderHint = synapseClient.updateV2WikiOrderHint(toUpdate);
return orderHint;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public FileHandleResults getV2WikiAttachmentHandles(
org.sagebionetworks.web.shared.WikiPageKey key)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
if (key.getWikiPageId() == null) {
// asking for the root. find the root id first
String rootWikiPage = getRootWikiId(synapseClient,
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()));
key.setWikiPageId(rootWikiPage);
}
WikiPageKey properKey = WikiPageKeyHelper.createWikiPageKey(
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()),
key.getWikiPageId());
return synapseClient
.getV2WikiAttachmentHandles(properKey);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public FileHandleResults getVersionOfV2WikiAttachmentHandles(
org.sagebionetworks.web.shared.WikiPageKey key, Long version)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
if (key.getWikiPageId() == null) {
// asking for the root. find the root id first
String rootWikiPage = getRootWikiId(synapseClient,
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()));
key.setWikiPageId(rootWikiPage);
}
WikiPageKey properKey = WikiPageKeyHelper.createWikiPageKey(
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()),
key.getWikiPageId());
return synapseClient
.getVersionOfV2WikiAttachmentHandles(properKey, version);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public PaginatedResults<V2WikiHistorySnapshot> getV2WikiHistory(
org.sagebionetworks.web.shared.WikiPageKey key, Long limit,
Long offset) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
WikiPageKey properKey = WikiPageKeyHelper.createWikiPageKey(
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()),
key.getWikiPageId());
return convertPaginated(synapseClient
.getV2WikiHistory(properKey, limit, offset));
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public String getMarkdown(org.sagebionetworks.web.shared.WikiPageKey key)
throws IOException, RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
String wikiPageKeyId = getWikiKeyId(synapseClient, key);
WikiPageKey properKey = WikiPageKeyHelper.createWikiPageKey(
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()), wikiPageKeyId);
try {
return synapseClient.downloadV2WikiMarkdown(properKey);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public String getVersionOfMarkdown(
org.sagebionetworks.web.shared.WikiPageKey key, Long version)
throws IOException, RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
WikiPageKey properKey = WikiPageKeyHelper.createWikiPageKey(
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()),
key.getWikiPageId());
try {
return synapseClient.downloadVersionOfV2WikiMarkdown(properKey,
version);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public WikiPage createV2WikiPageWithV1(String ownerId, String ownerType,
WikiPage page) throws IOException, RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.createWikiPage(ownerId, ObjectType.valueOf(ownerType), page);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public WikiPage updateV2WikiPageWithV1(String ownerId, String ownerType,
WikiPage page) throws IOException, RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.updateWikiPage(ownerId, ObjectType.valueOf(ownerType), page);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
private String getWikiKeyId(
org.sagebionetworks.client.SynapseClient synapseClient,
org.sagebionetworks.web.shared.WikiPageKey key)
throws RestServiceException {
String wikiPageId = key.getWikiPageId();
if (wikiPageId == null) {
// asking for the root. find the root id first
wikiPageId = getRootWikiId(synapseClient, key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()));
}
return wikiPageId;
}
@Override
public WikiPage getV2WikiPageAsV1(
org.sagebionetworks.web.shared.WikiPageKey key)
throws RestServiceException, IOException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
WikiPageKey properKey = WikiPageKeyHelper.createWikiPageKey(
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()),
getWikiKeyId(synapseClient, key));
String etag = null;
try {
V2WikiPage page = synapseClient.getV2WikiPage(properKey);
etag = page.getEtag();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
MarkdownCacheRequest request = new MarkdownCacheRequest(properKey,
etag, null);
return processMarkdownRequest(request);
}
@Override
public WikiPage getVersionOfV2WikiPageAsV1(
org.sagebionetworks.web.shared.WikiPageKey key, Long version)
throws RestServiceException, IOException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
WikiPageKey properKey = WikiPageKeyHelper.createWikiPageKey(
key.getOwnerObjectId(),
ObjectType.valueOf(key.getOwnerObjectType()),
getWikiKeyId(synapseClient, key));
String etag = null;
try {
V2WikiPage page = synapseClient.getVersionOfV2WikiPage(properKey,
version);
etag = page.getEtag();
key.setWikiPageId(page.getId());
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
MarkdownCacheRequest request = new MarkdownCacheRequest(properKey,
etag, version);
return processMarkdownRequest(request);
}
private WikiPage processMarkdownRequest(MarkdownCacheRequest request)
throws RestServiceException {
try {
return wiki2Markdown.get(request);
} catch (ExecutionException e) {
if (e.getCause() != null
&& e.getCause() instanceof SynapseException)
throw ExceptionUtil
.convertSynapseException((SynapseException) e
.getCause());
else
throw new RestServiceException(e.getMessage());
}
}
@Override
public EntityHeader addFavorite(String entityId) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.addFavorite(entityId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void removeFavorite(String entityId) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
synapseClient.removeFavorite(entityId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public List<EntityHeader> getFavorites()
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
org.sagebionetworks.reflection.model.PaginatedResults<EntityHeader> favorites = synapseClient
.getFavorites(MAX_LIMIT, ZERO_OFFSET);
List<EntityHeader> headers = favorites.getResults();
//sort by name
Collections.sort(headers, new Comparator<EntityHeader>() {
@Override
public int compare(EntityHeader o1, EntityHeader o2) {
return o1.getName().compareToIgnoreCase(o2.getName());
}
});
return headers;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
public String createTeam(String teamName) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
Team t = new Team();
t.setName(teamName);
t.setCanPublicJoin(false);
t = synapseClient.createTeam(t);
return t.getId();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
public void deleteTeam(String teamId) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
synapseClient.deleteTeam(teamId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public long getTeamMemberCount(String teamId) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.countTeamMembers(teamId, null);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public TeamMemberPagedResults getTeamMembers(String teamId, String fragment, Integer limit,
Integer offset) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
org.sagebionetworks.reflection.model.PaginatedResults<TeamMember> members = synapseClient
.getTeamMembers(teamId, fragment, limit, offset);
List<TeamMember> teamMembers = members.getResults();
//gather user ids to ask for all user profiles in bulk
List<Long> userIds = new ArrayList<Long>();
for (TeamMember member : members.getResults()) {
userIds.add(Long.parseLong(member.getMember().getOwnerId()));
}
List<UserProfile> profiles = synapseClient.listUserProfiles(userIds);
List<TeamMemberBundle> teamMemberBundles = new ArrayList<TeamMemberBundle>();
for (int i = 0; i < userIds.size(); i++) {
teamMemberBundles.add(new TeamMemberBundle(profiles.get(i), teamMembers.get(i).getIsAdmin(), teamMembers.get(i).getTeamId()));
}
TeamMemberPagedResults results = new TeamMemberPagedResults();
results.setResults(teamMemberBundles);
results.setTotalNumberOfResults(members.getTotalNumberOfResults());
return results;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public List<UserProfile> listUserProfiles(List<String> userIds) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
List<Long> userIdsLong = new LinkedList<Long>();
for (String idString :userIds) {
userIdsLong.add(Long.parseLong(idString));
}
return synapseClient.listUserProfiles(userIdsLong);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public List<TeamRequestBundle> getTeamsForUser(String userId, boolean includeOpenRequests)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
org.sagebionetworks.reflection.model.PaginatedResults<Team> teams = synapseClient.getTeamsForUser(
userId, MAX_LIMIT, ZERO_OFFSET);
List<Team> teamList = teams.getResults();
Collections.sort(teamList, new Comparator<Team>() {
@Override
public int compare(Team o1, Team o2) {
return o1.getName().compareToIgnoreCase(o2.getName());
}
});
List<TeamRequestBundle> bundle = new ArrayList<TeamRequestBundle>(teamList.size());
for (Team team: teamList) {
if (includeOpenRequests) {
Long openRequestCount = getOpenRequestCount(userId, team.getId());
bundle.add(new TeamRequestBundle(team, openRequestCount == null ? 0L : openRequestCount));
} else {
bundle.add(new TeamRequestBundle(team, 0L));
}
}
return bundle;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public PaginatedResults<Team> getTeams(String userId, Integer limit, Integer offset)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return convertPaginated(synapseClient.getTeamsForUser(
userId, limit, offset));
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
public PaginatedResults<Team> getTeamsBySearch(String searchTerm, Integer limit,
Integer offset) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
if (searchTerm != null && searchTerm.trim().length() == 0)
searchTerm = null;
if (offset == null)
offset = ZERO_OFFSET.intValue();
return convertPaginated(synapseClient.getTeams(searchTerm,
limit, offset));
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
public TeamMembershipStatus getTeamMembershipState(String currentUserId, String teamId)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient
.getTeamMembershipStatus(teamId, currentUserId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public TeamMembershipStatus requestMembership(String currentUserId, String teamId,
String message, String hostPageBaseURL, Date expiresOn) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
TeamMembershipStatus membershipStatus = synapseClient
.getTeamMembershipStatus(teamId, currentUserId);
// if we can join the team without creating the request (like if we
// are a team admin, or there is an open invitation), then just do
// that!
String settingsEndpoint = getNotificationEndpoint(NotificationTokenType.Settings, hostPageBaseURL);
if (membershipStatus.getCanJoin()) {
synapseClient.addTeamMember(teamId, currentUserId, getTeamEndpoint(hostPageBaseURL), settingsEndpoint);
} else if (!membershipStatus.getHasOpenRequest()) {
// otherwise, create the request
MembershipRqstSubmission membershipRequest = new MembershipRqstSubmission();
membershipRequest.setMessage(message);
membershipRequest.setTeamId(teamId);
membershipRequest.setUserId(currentUserId);
if (expiresOn != null) {
membershipRequest.setExpiresOn(expiresOn);
}
// make new Synapse call
String joinTeamEndpoint = getNotificationEndpoint(NotificationTokenType.JoinTeam, hostPageBaseURL);
synapseClient.createMembershipRequest(membershipRequest, joinTeamEndpoint, settingsEndpoint);
}
return synapseClient.getTeamMembershipStatus(teamId, currentUserId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void inviteMember(String userGroupId, String teamId, String message, String hostPageBaseURL)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
TeamMembershipStatus membershipStatus = synapseClient
.getTeamMembershipStatus(teamId, userGroupId);
String settingsEndpoint = getNotificationEndpoint(NotificationTokenType.Settings, hostPageBaseURL);
// if we can join the team without creating the invite (like if we
// are a team admin, or there is an open membership request), then
// just do that!
if (membershipStatus.getCanJoin()) {
synapseClient.addTeamMember(teamId, userGroupId, getTeamEndpoint(hostPageBaseURL), settingsEndpoint);
} else if (!membershipStatus.getHasOpenInvitation()) {
// check to see if there is already an open invite
MembershipInvtnSubmission membershipInvite = new MembershipInvtnSubmission();
membershipInvite.setMessage(message);
membershipInvite.setTeamId(teamId);
membershipInvite.setInviteeId(userGroupId);
// make new Synapse call
String joinTeamEndpoint = getNotificationEndpoint(NotificationTokenType.JoinTeam, hostPageBaseURL);
synapseClient.createMembershipInvitation(membershipInvite, joinTeamEndpoint, settingsEndpoint);
}
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public String getCertifiedUserPassingRecord(String userId)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
PassingRecord passingRecord = synapseClient
.getCertifiedUserPassingRecord(userId);
// This method only returns the PassingRecord if the user actually
// passed (portal does not currently care about the top failed
// attempt).
if (passingRecord.getPassed() == null || !passingRecord.getPassed()) {
throw new NotFoundException(
"The user has not passed the certification quiz.");
}
JSONObjectAdapter passingRecordJson = passingRecord
.writeToJSONObject(adapterFactory.createNew());
return passingRecordJson.toJSONString();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
} catch (JSONObjectAdapterException e) {
throw new UnknownErrorException(e.getMessage());
}
}
@Override
public String getCertificationQuiz() throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
Quiz quiz = synapseClient.getCertifiedUserTest();
JSONObjectAdapter quizJson = quiz.writeToJSONObject(adapterFactory
.createNew());
return quizJson.toJSONString();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
} catch (JSONObjectAdapterException e) {
throw new UnknownErrorException(e.getMessage());
}
}
@Override
public PassingRecord submitCertificationQuizResponse(QuizResponse response)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient
.submitCertifiedUserTestResponse(response);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public Boolean isTeamMember(String userId, Long groupPrincipalId)
throws RestServiceException {
Boolean isMember = null;
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
TeamMembershipStatus membershipStatus = synapseClient
.getTeamMembershipStatus(groupPrincipalId.toString(),
userId);
isMember = membershipStatus.getIsMember();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
return isMember;
}
@Override
public TeamBundle getTeamBundle(String userId, String teamId,
boolean isLoggedIn) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
long memberCount = synapseClient.countTeamMembers(teamId, null);
boolean isAdmin = false;
Team team = synapseClient.getTeam(teamId);
TeamMembershipStatus membershipStatus = null;
// get membership state for the current user
if (isLoggedIn) {
membershipStatus = synapseClient
.getTeamMembershipStatus(teamId, userId);
if (membershipStatus.getIsMember()) {
TeamMember teamMember = synapseClient.getTeamMember(teamId,
userId);
isAdmin = teamMember.getIsAdmin();
}
}
return new TeamBundle(team, memberCount,
membershipStatus, isAdmin);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public List<MembershipRequestBundle> getOpenRequests(String teamId)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
org.sagebionetworks.reflection.model.PaginatedResults<MembershipRequest> requests = synapseClient
.getOpenMembershipRequests(teamId, null, MAX_LIMIT,
ZERO_OFFSET);
// and ask for the team info for each invite, and fill that in the
// bundle
ArrayList<MembershipRequestBundle> returnList = new ArrayList<MembershipRequestBundle>();
// now go through and create a MembershipRequestBundle for each pair
for (MembershipRequest request : requests.getResults()) {
UserProfile profile = synapseClient.getUserProfile(request.getUserId());
MembershipRequestBundle b = new MembershipRequestBundle(profile, request);
returnList.add(b);
}
return returnList;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
private boolean isTeamAdmin(String currentUserId, String teamId, org.sagebionetworks.client.SynapseClient synapseClient) throws SynapseException {
TeamMember member = synapseClient.getTeamMember(teamId, currentUserId);
return member.getIsAdmin();
}
@Override
public Long getOpenRequestCount(String currentUserId, String teamId)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
// must be an admin to the team open requests. To get admin status,
// must be a member
// get's membership request count
if (isTeamAdmin(currentUserId, teamId, synapseClient)) {
org.sagebionetworks.reflection.model.PaginatedResults<MembershipRequest> requests = synapseClient
.getOpenMembershipRequests(teamId, null, 1, ZERO_OFFSET);
// need open request count.
return requests.getTotalNumberOfResults();
} else {
return null;
}
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public ArrayList<OpenUserInvitationBundle> getOpenInvitations(String userId)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
org.sagebionetworks.reflection.model.PaginatedResults<MembershipInvitation> invitations = synapseClient
.getOpenMembershipInvitations(userId, null, MAX_LIMIT,
ZERO_OFFSET);
// and ask for the team info for each invite, and fill that in the
// bundle
ArrayList<OpenUserInvitationBundle> returnList = new ArrayList<OpenUserInvitationBundle>();
// now go through and create a MembershipInvitationBundle for each
// pair
for (MembershipInvitation invite : invitations.getResults()) {
Team team = synapseClient.getTeam(invite.getTeamId());
OpenUserInvitationBundle b = new OpenUserInvitationBundle(team, invite);
returnList.add(b);
}
return returnList;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public ArrayList<OpenTeamInvitationBundle> getOpenTeamInvitations(
String teamId, Integer limit, Integer offset)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
org.sagebionetworks.reflection.model.PaginatedResults<MembershipInvtnSubmission> invitations = synapseClient
.getOpenMembershipInvitationSubmissions(teamId, null,
limit, offset);
// and ask for the team info for each invite, and fill that in the
// bundle
ArrayList<OpenTeamInvitationBundle> returnList = new ArrayList<OpenTeamInvitationBundle>();
// now go through and create a MembershipInvitationBundle for each
// pair
for (MembershipInvtnSubmission invite : invitations.getResults()) {
UserProfile profile = synapseClient.getUserProfile(invite
.getInviteeId());
OpenTeamInvitationBundle b = new OpenTeamInvitationBundle(invite,
profile);
returnList.add(b);
}
return returnList;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void deleteMembershipInvitation(String invitationId)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
synapseClient.deleteMembershipInvitation(invitationId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void setIsTeamAdmin(String currentUserId, String targetUserId,
String teamId, boolean isTeamAdmin) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
synapseClient.setTeamMemberPermissions(teamId, targetUserId,
isTeamAdmin);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void deleteTeamMember(String currentUserId, String targetUserId,
String teamId) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
synapseClient.removeTeamMember(teamId, targetUserId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public Team updateTeam(Team team, AccessControlList teamAcl) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
updateTeamAcl(teamAcl);
return synapseClient.updateTeam(team);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public Doi getEntityDoi(String entityId, Long versionNumber)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.getEntityDoi(entityId, versionNumber);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
} catch (Exception e) {
throw ExceptionUtil
.convertSynapseException(new SynapseNotFoundException());
}
}
@Override
public void createDoi(String entityId, Long versionNumber)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
synapseClient.createEntityDoi(entityId, versionNumber);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public String getFileEntityTemporaryUrlForVersion(String entityId,
Long versionNumber) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
URL url = synapseClient.getFileEntityTemporaryUrlForVersion(
entityId, versionNumber);
return url.toString();
} catch (Exception e) {
throw new UnknownErrorException(e.getMessage());
}
}
/**
* Gets the ID of the file entity with the given name whose parent has the given ID.
*
* @param fileName The name of the entity to find.
* @param parentEntityId The ID of the parent that the found entity must have.
* @return The ID of the file entity with the given name and parent ID.
* @throws NotFoundException If no file with given name and parent ID was found.
* @throws ConflictException If an entity with given name and parent ID was found, but that
* entity was not a File Entity.
*/
@Override
public String getFileEntityIdWithSameName(String fileName, String parentEntityId) throws RestServiceException, SynapseException {
String queryString = "select * from entity where parentId == '" + parentEntityId +
WebConstants.AND_NAME_EQUALS + fileName + WebConstants.LIMIT_ONE;
JSONObject query = query(queryString);
if (query == null) {
throw new SynapseClientException("Query service call returned null");
}
if(!query.has("totalNumberOfResults")){
throw new SynapseClientException("Query results did not have "+"totalNumberOfResults");
}
try {
if (query.getLong("totalNumberOfResults") != 0) {
JSONObject result = query.getJSONArray("results").getJSONObject(0);
// Get types associated with found entity.
JSONArray typeArray = result.getJSONArray("entity.concreteType");
Set<String> types = new HashSet<String>();
for (int i = 0; i < typeArray.length(); i++) {
types.add(typeArray.getString(i));
}
if (types.contains(FileEntity.class.getName())) {
// The found entity is a File Entity.
return result.getString("entity.id");
} else {
// The found entity is not a File Entity.
throw new ConflictException("An non-file entity with name " + fileName + " and parentId " + parentEntityId + " already exists.");
}
} else {
throw new NotFoundException("An entity with name " + fileName + " and parentId " + parentEntityId + " was not found.");
}
} catch (JSONException e) {
throw new SynapseClientException(e);
}
}
@Override
public String setFileEntityFileHandle(String fileHandleId, String entityId, String parentEntityId) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try{
//create entity if we have to
FileEntity fileEntity = null;
FileHandle newHandle = synapseClient.getRawFileHandle(fileHandleId);
if (entityId == null) {
fileEntity = FileHandleServlet.getNewFileEntity(parentEntityId, fileHandleId, newHandle.getFileName(), synapseClient);
}
else {
//get the file entity to update
fileEntity = (FileEntity) synapseClient.getEntityById(entityId);
//update data file handle id
fileEntity.setDataFileHandleId(fileHandleId);
fileEntity = (FileEntity)synapseClient.putEntity(fileEntity);
}
return fileEntity.getId();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
public static void lockDown(String entityId, boolean isRestricted, org.sagebionetworks.client.SynapseClient client) throws SynapseException {
// now lock down restricted data
if (isRestricted) {
// we only proceed if there aren't currently any access restrictions
RestrictableObjectDescriptor subjectId = new RestrictableObjectDescriptor();
subjectId.setId(entityId);
subjectId.setType(RestrictableObjectType.ENTITY);
org.sagebionetworks.reflection.model.PaginatedResults<AccessRequirement> currentARs = client.getAccessRequirements(subjectId, 1L, ZERO_OFFSET.longValue());
if (currentARs.getResults().isEmpty()) {
client.createLockAccessRequirement(entityId);
}
}
}
@Override
public String getSynapseVersions() throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createAnonymousSynapseClient();
try {
SynapseVersionInfo versionInfo = synapseClient.getVersionInfo();
new PortalVersionHolder();
return PortalVersionHolder.getVersionInfo() + ","
+ versionInfo.getVersion();
} catch (Exception e) {
throw new UnknownErrorException(e.getMessage());
}
}
private static class PortalVersionHolder {
private static String versionInfo = "";
static {
InputStream s = SynapseClientImpl.class
.getResourceAsStream("/version-info.properties");
Properties prop = new Properties();
try {
prop.load(s);
} catch (IOException e) {
throw new RuntimeException(
"version-info.properties file not found", e);
}
versionInfo = prop
.getProperty("org.sagebionetworks.portal.version");
}
private static String getVersionInfo() {
return versionInfo;
}
}
private String getSynapseProperty(String key) {
return PortalPropertiesHolder.getProperty(key);
}
@Override
public HashMap<String, String> getSynapseProperties(){
return PortalPropertiesHolder.getPropertiesMap();
}
public static class PortalPropertiesHolder {
private static Properties props;
private static HashMap<String, String> propsMap;
static {
InputStream s = SynapseClientImpl.class
.getResourceAsStream("/portal.properties");
props = new Properties();
try {
props.load(s);
} catch (IOException e) {
throw new RuntimeException("portal.properties file not found",
e);
}
}
public static String getProperty(String key) {
return props.getProperty(key);
}
public static HashMap<String, String> getPropertiesMap() {
if (propsMap == null) {
propsMap = new HashMap<String, String>();
for (Entry<Object, Object> entry : props.entrySet()) {
propsMap.put(entry.getKey().toString(), entry.getValue().toString());
}
}
return propsMap;
}
}
public static Long defaultStorageLocation = Long.parseLong(PortalPropertiesHolder.getProperty(DEFAULT_STORAGE_ID_PROPERTY_KEY));
public static String htmlTeamId = PortalPropertiesHolder.getProperty(HTML_TEAM_ID_PROPERTY_KEY);
@Override
public ResponseMessage handleSignedToken(SignedTokenInterface signedToken, String hostPageBaseURL) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
if (signedToken instanceof JoinTeamSignedToken) {
JoinTeamSignedToken joinTeamSignedToken = (JoinTeamSignedToken) signedToken;
String settingsEndpoint = getNotificationEndpoint(NotificationTokenType.Settings, hostPageBaseURL);
return synapseClient.addTeamMember(joinTeamSignedToken, getTeamEndpoint(hostPageBaseURL), settingsEndpoint);
} else if (signedToken instanceof NotificationSettingsSignedToken) {
NotificationSettingsSignedToken notificationSignedToken = (NotificationSettingsSignedToken) signedToken;
return synapseClient.updateNotificationSettings(notificationSignedToken);
} else if (signedToken instanceof NewUserSignedToken) {
//TODO
throw new BadRequestException("Not yet implemented");
} else {
throw new BadRequestException("token not supported: " + signedToken.getClass().getName());
}
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public SignedTokenInterface hexDecodeAndDeserialize(String tokenTypeName, String signedTokenString) throws RestServiceException {
if (!isValidEnum(NotificationTokenType.class, tokenTypeName)) {
//error interpreting the token type, respond with a bad request
throw new BadRequestException("Invalid notification token type: " + tokenTypeName);
}
NotificationTokenType tokenType = NotificationTokenType.valueOf(tokenTypeName);
SignedTokenInterface signedToken = null;
try {
signedToken = SerializationUtils.hexDecodeAndDeserialize(signedTokenString, tokenType.classType);
} catch (Exception e) {
//error decoding, respond with a bad request
throw new BadRequestException(e.getMessage());
}
return signedToken;
}
public static <E extends Enum<E>> boolean isValidEnum(Class<E> enumClass,
String enumName) {
if (enumName == null) {
return false;
}
try {
Enum.valueOf(enumClass, enumName);
return true;
} catch (IllegalArgumentException ex) {
return false;
}
}
public static String getTeamEndpoint(String hostPageBaseURL) {
return hostPageBaseURL + "#!Team:";
}
public static String getNotificationEndpoint(NotificationTokenType type, String hostPageBaseURL) {
return hostPageBaseURL + "#!SignedToken:"+ type.toString() + "/";
}
public static String getChallengeEndpoint(String hostPageBaseURL) {
return hostPageBaseURL + "#!Synapse:";
}
@Override
public LogEntry hexDecodeLogEntry(String encodedLogEntry) {
return SerializationUtils.hexDecodeAndDeserialize(encodedLogEntry, LogEntry.class);
}
@Override
public String hexEncodeLogEntry(LogEntry logEntry) {
return SerializationUtils.serializeAndHexEncode(logEntry);
}
@Override
public String getAPIKey() throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.retrieveApiKey();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public String createColumnModel(String columnModelJson)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
ColumnModel column = new ColumnModel(
adapterFactory.createNew(columnModelJson));
ColumnModel createdColumn = synapseClient.createColumnModel(column);
return createdColumn.writeToJSONObject(adapterFactory.createNew())
.toJSONString();
} catch (Exception e) {
throw new UnknownErrorException(e.getMessage());
}
}
@Override
public List<ColumnModel> getColumnModelsForTableEntity(String tableEntityId)
throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return synapseClient
.getColumnModelsForTableEntity(tableEntityId);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public List<ColumnModel> getDefaultColumnsForView(ViewType type) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
List<ColumnModel> defaultColumns = synapseClient.getDefaultColumnsForView(type);
return defaultColumns;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public String sendMessage(
Set<String> recipients,
String subject,
String messageBody,
String hostPageBaseURL) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
MessageToUser message = new MessageToUser();
message.setRecipients(recipients);
message.setSubject(subject);
String settingsEndpoint = getNotificationEndpoint(NotificationTokenType.Settings, hostPageBaseURL);
message.setNotificationUnsubscribeEndpoint(settingsEndpoint);
String cleanedMessageBody = Jsoup.clean(messageBody, "", Whitelist.simpleText().addTags("br"), new OutputSettings().prettyPrint(false));
MessageToUser sentMessage = synapseClient.sendStringMessage(message, cleanedMessageBody);
JSONObjectAdapter sentMessageJson = sentMessage
.writeToJSONObject(adapterFactory.createNew());
return sentMessageJson.toJSONString();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
} catch (JSONObjectAdapterException e) {
throw new UnknownErrorException(e.getMessage());
}
}
@Override
public String sendMessageToEntityOwner(
String entityId,
String subject,
String messageBody,
String hostPageBaseURL) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
MessageToUser message = new MessageToUser();
message.setSubject(subject);
String settingsEndpoint = getNotificationEndpoint(NotificationTokenType.Settings, hostPageBaseURL);
message.setNotificationUnsubscribeEndpoint(settingsEndpoint);
String cleanedMessageBody = Jsoup.clean(messageBody, Whitelist.none());
MessageToUser sentMessage = synapseClient.sendStringMessage(message, entityId, cleanedMessageBody);
JSONObjectAdapter sentMessageJson = sentMessage
.writeToJSONObject(adapterFactory.createNew());
return sentMessageJson.toJSONString();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
} catch (JSONObjectAdapterException e) {
throw new UnknownErrorException(e.getMessage());
}
}
@Override
public Boolean isAliasAvailable(String alias, String aliasType)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createAnonymousSynapseClient();
try {
AliasType type = AliasType.valueOf(aliasType);
AliasCheckRequest request = new AliasCheckRequest();
request.setAlias(alias);
request.setType(type);
AliasCheckResponse response = synapseClient
.checkAliasAvailable(request);
return response.getAvailable();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
private void handleTableUnavailableException(
SynapseTableUnavailableException e) throws TableUnavilableException {
try {
throw new TableUnavilableException(e.getStatus()
.writeToJSONObject(adapterFactory.createNew())
.toJSONString());
} catch (JSONObjectAdapterException e1) {
throw new TableUnavilableException(e.getMessage());
}
}
@Override
public HashMap<String, org.sagebionetworks.web.shared.WikiPageKey> getPageNameToWikiKeyMap()
throws RestServiceException {
initHelpPagesMap();
return pageName2WikiKeyMap;
}
private void initHelpPagesMap() {
if (pageName2WikiKeyMap == null) {
HashMap<String, org.sagebionetworks.web.shared.WikiPageKey> tempMap = new HashMap<String, org.sagebionetworks.web.shared.WikiPageKey>();
HashMap<String, String> properties = getSynapseProperties();
for (String key : properties.keySet()) {
if (key.startsWith(WebConstants.WIKI_PROPERTIES_PACKAGE)) {
String value = properties.get(key);
String[] tokens = value.split("/");
String synId = null;
String wikiId = null;
if (tokens.length == 2) {
synId = tokens[0];
wikiId = tokens[1];
} else if (tokens.length == 1) {
synId = value;
}
tempMap.put(key.substring(WebConstants.WIKI_PROPERTIES_PACKAGE.length()),
new org.sagebionetworks.web.shared.WikiPageKey(synId, ObjectType.ENTITY.toString(), wikiId));
}
}
//Workshop
addHelpPageMapping(tempMap, WebConstants.COLLABORATORIUM, WebConstants.COLLABORATORIUM_ENTITY_ID_PROPERTY, null);
addHelpPageMapping(tempMap, WebConstants.STAGE_I, WebConstants.STAGE_I_ENTITY_ID_PROPERTY, null);
addHelpPageMapping(tempMap, WebConstants.STAGE_II, WebConstants.STAGE_II_ENTITY_ID_PROPERTY, null);
addHelpPageMapping(tempMap, WebConstants.STAGE_III, WebConstants.STAGE_III_ENTITY_ID_PROPERTY, null);
addHelpPageMapping(tempMap, WebConstants.STAGE_IV, WebConstants.STAGE_IV_ENTITY_ID_PROPERTY, null);
addHelpPageMapping(tempMap, WebConstants.STAGE_V, WebConstants.STAGE_V_ENTITY_ID_PROPERTY, null);
addHelpPageMapping(tempMap, WebConstants.STAGE_VI, WebConstants.STAGE_VI_ENTITY_ID_PROPERTY, null);
addHelpPageMapping(tempMap, WebConstants.STAGE_VII, WebConstants.STAGE_VII_ENTITY_ID_PROPERTY, null);
addHelpPageMapping(tempMap, WebConstants.STAGE_VIII, WebConstants.STAGE_VIII_ENTITY_ID_PROPERTY, null);
addHelpPageMapping(tempMap, WebConstants.STAGE_IX, WebConstants.STAGE_IX_ENTITY_ID_PROPERTY, null);
addHelpPageMapping(tempMap, WebConstants.STAGE_X, WebConstants.STAGE_X_ENTITY_ID_PROPERTY, null);
pageName2WikiKeyMap = tempMap;
}
}
private void addHelpPageMapping(HashMap<String, org.sagebionetworks.web.shared.WikiPageKey> mapping, String token, String entityIdPropertyKey, String wikiIdPropertyKey) {
String wikiIdProperty = wikiIdPropertyKey != null ? getSynapseProperty(wikiIdPropertyKey) : "";
mapping.put(
token,
new org.sagebionetworks.web.shared.WikiPageKey(
getSynapseProperty(entityIdPropertyKey),
ObjectType.ENTITY.toString(),
wikiIdProperty));
}
public Set<String> getWikiBasedEntities() throws RestServiceException {
initWikiEntities();
return wikiBasedEntities;
}
private void initWikiEntities() {
if (wikiBasedEntities == null) {
HashSet<String> tempSet = new HashSet<String>();
tempSet.add(getSynapseProperty(WebConstants.GETTING_STARTED_GUIDE_ENTITY_ID_PROPERTY));
tempSet.add(getSynapseProperty(WebConstants.CREATE_PROJECT_ENTITY_ID_PROPERTY));
tempSet.add(getSynapseProperty(WebConstants.R_CLIENT_ENTITY_ID_PROPERTY));
tempSet.add(getSynapseProperty(WebConstants.PYTHON_CLIENT_ENTITY_ID_PROPERTY));
tempSet.add(getSynapseProperty(WebConstants.FORMATTING_GUIDE_ENTITY_ID_PROPERTY));
// because wikiBasedEntities is volatile, current state will be
// reflected in all threads
wikiBasedEntities = tempSet;
}
}
@Override
public String deleteApiKey() throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
synapseClient.invalidateApiKey();
return getAPIKey();
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public String deleteRowsFromTable(String toDelete)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
RowSelection toDeleteSet = new RowSelection(
adapterFactory.createNew(toDelete));
RowReferenceSet responseSet = synapseClient
.deleteRowsFromTable(toDeleteSet);
return responseSet.writeToJSONObject(adapterFactory.createNew())
.toJSONString();
} catch (SynapseTableUnavailableException e) {
try {
throw new TableUnavilableException(e.getStatus()
.writeToJSONObject(adapterFactory.createNew())
.toJSONString());
} catch (JSONObjectAdapterException e1) {
throw new TableUnavilableException(e.getMessage());
}
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
} catch (JSONObjectAdapterException e) {
throw new UnknownErrorException(e.getMessage());
}
}
@Override
public TableUpdateTransactionRequest getTableUpdateTransactionRequest(String tableId, List<ColumnModel> oldSchema, List<ColumnModel> proposedSchema)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
// Create any models that do not have an ID, or that have changed
Map<String, ColumnModel> oldColumnModelId2Model = new HashMap<String, ColumnModel>();
for (ColumnModel columnModel : oldSchema) {
oldColumnModelId2Model.put(columnModel.getId(), columnModel);
}
List<ColumnModel> newSchema = new ArrayList<ColumnModel>();
for (ColumnModel m : proposedSchema) {
// copy column model
ColumnModel copy = new ColumnModel();
JSONObjectAdapter adapter = adapterFactory.createNew();
m.writeToJSONObject(adapter);
copy.initializeFromJSONObject(adapter);
if (copy.getId() != null) {
// any changes to the existing column model?
ColumnModel oldColumnModel = oldColumnModelId2Model.get(copy.getId());
if (oldColumnModel != null && !oldColumnModel.equals(copy)) {
copy.setId(null);
}
}
newSchema.add(copy);
}
newSchema = synapseClient.createColumnModels(newSchema);
List<ColumnChange> changes = new ArrayList<ColumnChange>();
// now that all columns have been created, figure out the column changes (create, update, and no-op)
// keep track of column ids to figure out what columns were deleted.
Set<String> columnIds = new HashSet<String>();
for (int i = 0; i < proposedSchema.size(); i++) {
String oldColumnId = proposedSchema.get(i).getId();
String newColumnId = newSchema.get(i).getId();
columnIds.add(oldColumnId);
columnIds.add(newColumnId);
if (!Objects.equals(oldColumnId, newColumnId)) {
changes.add(createNewColumnChange(oldColumnId, newColumnId));
}
}
// delete columns that are not represented in the changes already (create or update)
for (ColumnModel oldColumnModel : oldSchema) {
String oldColumnId = oldColumnModel.getId();
if (!columnIds.contains(oldColumnId)) {
changes.add(createNewColumnChange(oldColumnId, null));
}
}
TableUpdateTransactionRequest request = new TableUpdateTransactionRequest();
request.setEntityId(tableId);
List<TableUpdateRequest> requestChangeList = new ArrayList<TableUpdateRequest>();
if (!changes.isEmpty()) {
TableSchemaChangeRequest newTableSchemaChangeRequest = new TableSchemaChangeRequest();
newTableSchemaChangeRequest.setEntityId(tableId);
newTableSchemaChangeRequest.setChanges(changes);
requestChangeList.add(newTableSchemaChangeRequest);
}
request.setChanges(requestChangeList);
return request;
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
} catch (JSONObjectAdapterException e) {
throw new UnknownErrorException(e.getMessage());
}
}
private ColumnChange createNewColumnChange(String oldColumnId, String newColumnId) {
ColumnChange columnChange = new ColumnChange();
columnChange.setOldColumnId(oldColumnId);
columnChange.setNewColumnId(newColumnId);
return columnChange;
}
@Override
public void validateTableQuery(String sql) throws RestServiceException {
try {
TableQueryParser.parserQuery(sql);
} catch (ParseException e) {
throw new TableQueryParseException(e.getMessage());
}
}
@Override
public String toggleSortOnTableQuery(String sql, String header) throws RestServiceException {
try {
return TableSqlProcessor.toggleSort(sql, header);
} catch (ParseException e) {
throw new TableQueryParseException(e.getMessage());
}
}
@Override
public List<SortItem> getSortFromTableQuery(String sql)
throws RestServiceException {
try {
return TableSqlProcessor.getSortingInfo(sql);
} catch (ParseException e) {
throw new TableQueryParseException(e.getMessage());
}
}
@Override
public String startAsynchJob(AsynchType type, AsynchronousRequestBody body)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try{
return synapseClient.startAsynchJob(AsynchJobType.valueOf(type.name()), body);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public AsynchronousResponseBody getAsynchJobResults(AsynchType type, String jobId, AsynchronousRequestBody body)
throws RestServiceException, ResultNotReadyException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try{
return synapseClient.getAsyncResult(AsynchJobType.valueOf(type.name()), jobId, body);
} catch (SynapseResultNotReadyException e){
// This occurs when the job is not ready.
// Re-throw the ResultNotReadyException with the status JSON.
throw new ResultNotReadyException(e.getJobStatus());
}catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public EntityQueryResults executeEntityQuery(EntityQuery query) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try{
return synapseClient.entityQuery(query);
}catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public Entity createEntity(Entity entity) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try{
return synapseClient.createEntity(entity);
}catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public FileHandle getFileHandle(String fileHandleId) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try{
return synapseClient.getRawFileHandle(fileHandleId);
}catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public String createFileHandleURL(String fileHandleId)
throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try{
URL url = synapseClient.getFileHandleTemporaryUrl(fileHandleId);
return url.toString();
}catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
} catch (IOException e) {
throw new UnknownErrorException(e.getMessage());
}
}
@Override
public List<ColumnModel> createTableColumns(List<ColumnModel> models) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try{
return synapseClient.createColumnModels(models);
}catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public List<UploadDestination> getUploadDestinations(String parentEntityId) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try{
return synapseClient.getUploadDestinations(parentEntityId);
}catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
public ProjectPagedResults getMyProjects(ProjectListType projectListType, int limit, int offset, ProjectListSortColumn sortBy, SortDirection sortDir) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
org.sagebionetworks.reflection.model.PaginatedResults<ProjectHeader> paginatedResults = synapseClient.getMyProjects(projectListType, sortBy, sortDir, limit, offset);
List<ProjectHeader> headers = (List<ProjectHeader>)paginatedResults.getResults();
List<String> lastModifiedByList = new LinkedList<String>();
for (ProjectHeader header: headers) {
if (header.getModifiedBy() != null)
lastModifiedByList.add(header.getModifiedBy().toString());
}
return new ProjectPagedResults(headers, safeLongToInt(paginatedResults.getTotalNumberOfResults()), listUserProfiles(lastModifiedByList));
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
public ProjectPagedResults getProjectsForTeam(String teamId, int limit, int offset, ProjectListSortColumn sortBy, SortDirection sortDir) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
Long teamIdLong = Long.parseLong(teamId);
org.sagebionetworks.reflection.model.PaginatedResults<ProjectHeader> paginatedResults = synapseClient.getProjectsForTeam(teamIdLong, sortBy, sortDir, limit, offset);
List<ProjectHeader> headers = (List<ProjectHeader>)paginatedResults.getResults();
List<String> lastModifiedByList = new LinkedList<String>();
for (ProjectHeader header: headers) {
if (header.getModifiedBy() != null)
lastModifiedByList.add(header.getModifiedBy().toString());
}
return new ProjectPagedResults(headers, safeLongToInt(paginatedResults.getTotalNumberOfResults()), listUserProfiles(lastModifiedByList));
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
public ProjectPagedResults getUserProjects(String userId, int limit, int offset, ProjectListSortColumn sortBy, SortDirection sortDir) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
Long userIdLong = Long.parseLong(userId);
org.sagebionetworks.reflection.model.PaginatedResults<ProjectHeader> paginatedResults = synapseClient.getProjectsFromUser(userIdLong, sortBy, sortDir, limit, offset);
List<ProjectHeader> headers = (List<ProjectHeader>)paginatedResults.getResults();
List<String> lastModifiedByList = new LinkedList<String>();
for (ProjectHeader header: headers) {
if (header.getModifiedBy() != null)
lastModifiedByList.add(header.getModifiedBy().toString());
}
return new ProjectPagedResults(headers, safeLongToInt(paginatedResults.getTotalNumberOfResults()), listUserProfiles(lastModifiedByList));
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
public static int safeLongToInt(long l) {
if (l < Integer.MIN_VALUE || l > Integer.MAX_VALUE) {
throw new IllegalArgumentException
("Cannot safely cast "+l+" to int without changing the value.");
}
return (int) l;
}
public String getHost(String urlString) throws RestServiceException {
if (urlString == null || urlString.length() == 0) {
throw new IllegalArgumentException("url is required");
}
if (urlString.toLowerCase().startsWith(WebConstants.SFTP_PREFIX)) {
urlString = "http://" + urlString.substring(WebConstants.SFTP_PREFIX.length());
}
try {
URL url = new URL(urlString);
return url.getHost();
} catch (MalformedURLException e) {
throw new BadRequestException(e.getMessage());
}
}
@Override
public TableFileHandleResults getTableFileHandle(RowReferenceSet set) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
try {
return synapseClient.getFileHandlesFromTable(set);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
public void updateAnnotations(String entityId, Annotations annotations) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
synapseClient.updateAnnotations(entityId, annotations);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void deleteOpenMembershipRequests(String currentUserId, String teamId)
throws RestServiceException {
// This method does nothing?
}
@Override
public List<String> getMyLocationSettingBanners() throws RestServiceException{
try {
Set<String> banners = new HashSet<String>();
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
List<StorageLocationSetting> existingStorageLocations = synapseClient.getMyStorageLocationSettings();
for (StorageLocationSetting storageLocationSetting : existingStorageLocations) {
if (storageLocationSetting.getBanner() != null) {
banners.add(storageLocationSetting.getBanner());
}
}
return new ArrayList<String>(banners);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public StorageLocationSetting getStorageLocationSetting(String parentEntityId) throws RestServiceException{
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
UploadDestination uploadDestination = synapseClient.getDefaultUploadDestination(parentEntityId);
if (uploadDestination == null || uploadDestination.getStorageLocationId().equals(defaultStorageLocation)) {
//default storage location
return null;
}
//else
return synapseClient.getMyStorageLocationSetting(uploadDestination.getStorageLocationId());
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public void createStorageLocationSetting(String parentEntityId, StorageLocationSetting setting) throws RestServiceException{
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
//first, try to find a matching storage location setting for this user, and reuse
List<StorageLocationSetting> existingStorageLocations = synapseClient.getMyStorageLocationSettings();
Long locationId = null;
if (setting != null) {
for (StorageLocationSetting existingStorageLocationSetting : existingStorageLocations) {
Long existingLocationId = existingStorageLocationSetting.getStorageLocationId();
existingStorageLocationSetting.setCreatedOn(null);
existingStorageLocationSetting.setEtag(null);
existingStorageLocationSetting.setStorageLocationId(null);
existingStorageLocationSetting.setCreatedBy(null);
existingStorageLocationSetting.setDescription(null);
if (setting.equals(existingStorageLocationSetting)) {
//found matching storage location setting
locationId = existingLocationId;
break;
}
}
if (locationId == null) {
//not found, create a new one
locationId = synapseClient.createStorageLocationSetting(setting).getStorageLocationId();
}
} else {
locationId = defaultStorageLocation;
}
ArrayList<Long> locationIds = new ArrayList<Long>();
locationIds.add(locationId);
//update existing upload destination project/folder setting
UploadDestinationListSetting projectSetting = (UploadDestinationListSetting)synapseClient.getProjectSetting(parentEntityId, ProjectSettingsType.upload);
if (projectSetting != null) {
projectSetting.setLocations(locationIds);
synapseClient.updateProjectSetting(projectSetting);
} else {
//create new upload destination project/folder setting
projectSetting = new UploadDestinationListSetting();
projectSetting.setProjectId(parentEntityId);
projectSetting.setSettingsType(ProjectSettingsType.upload);
projectSetting.setLocations(locationIds);
synapseClient.createProjectSetting(projectSetting);
}
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public Etag getEtag(String objectId, ObjectType objectType) throws RestServiceException{
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return synapseClient.getEtag(objectId, objectType);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public Entity updateFileEntity(FileEntity toUpdate, FileHandleCopyRequest copyRequest) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
BatchFileHandleCopyRequest batchRequest = new BatchFileHandleCopyRequest();
batchRequest.setCopyRequests(Collections.singletonList(copyRequest));
BatchFileHandleCopyResult batchCopyResults = synapseClient.copyFileHandles(batchRequest);
List<FileHandleCopyResult> copyResults = batchCopyResults.getCopyResults();
// sanity check
if (copyResults.size() != 1) {
throw new UnknownErrorException("Copy file handle resulted in unexpected response list size.");
}
FileHandleCopyResult copyResult = copyResults.get(0);
if (copyResult.getFailureCode() != null) {
switch(copyResult.getFailureCode()) {
case NOT_FOUND:
throw new NotFoundException();
case UNAUTHORIZED:
throw new UnauthorizedException();
default:
throw new UnknownErrorException();
}
} else {
FileHandle newFileHandle = copyResult.getNewFileHandle();
toUpdate.setDataFileHandleId(newFileHandle.getId());
return synapseClient.putEntity(toUpdate);
}
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public BatchFileResult getFileHandleAndUrlBatch(BatchFileRequest request) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return synapseClient.getFileHandleAndUrlBatch(request);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public String generateSqlWithFacets(String basicSql, List<FacetColumnRequest> selectedFacets, List<ColumnModel> schema) throws RestServiceException {
try {
return TableSqlProcessor.generateSqlWithFacets(basicSql, selectedFacets, schema);
} catch (Exception e) {
throw new BadRequestException(e.getMessage());
}
}
@Override
public ColumnModelPage getPossibleColumnModelsForViewScope(ViewScope scope, String nextPageToken) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return synapseClient.getPossibleColumnModelsForViewScope(scope, nextPageToken);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public Boolean isUserAllowedToRenderHTML(String userId) throws RestServiceException {
return getHtmlTeamMembers().contains(userId);
}
@Override
public ProjectDisplayBundle getCountsForTabs(String projectId) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
boolean isWiki = isWiki(projectId, synapseClient);
boolean isChallenge = isChallenge(projectId, synapseClient);
boolean isFile = isFileOrFolder(projectId, synapseClient);
boolean isTable = isTable(projectId, synapseClient);
boolean isDocker = isDocker(projectId, synapseClient);
boolean isForum = isForum(projectId, synapseClient);
return new ProjectDisplayBundle(isWiki, isFile, isTable, isChallenge, isForum, isDocker);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public boolean isChallenge(String projectId) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return isChallenge(projectId, synapseClient);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public boolean isForum(String projectId) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return isForum(projectId, synapseClient);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public boolean isDocker(String projectId) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return isDocker(projectId, synapseClient);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public boolean isFileOrFolder(String projectId) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return isFileOrFolder(projectId, synapseClient);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public boolean isTable(String projectId) throws RestServiceException {
try {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return isTable(projectId, synapseClient);
} catch (SynapseException e) {
throw ExceptionUtil.convertSynapseException(e);
}
}
@Override
public boolean isWiki(String projectId) throws RestServiceException {
org.sagebionetworks.client.SynapseClient synapseClient = createSynapseClient();
return isWiki(projectId, synapseClient);
}
private boolean isForum(String projectId, org.sagebionetworks.client.SynapseClient synapseClient) throws SynapseException {
Forum forum = synapseClient.getForumByProjectId(projectId);
return synapseClient.getThreadCountForForum(forum.getId(), DiscussionFilter.NO_FILTER).getCount() > 0;
}
private boolean isDocker(String projectId, org.sagebionetworks.client.SynapseClient synapseClient) throws SynapseException {
EntityQuery query = getQuery(projectId, new String[] {EntityType.dockerrepo.toString()});
EntityQueryResults results = synapseClient.entityQuery(query);
return results.getEntities().size() > 0;
}
private boolean isFileOrFolder(String projectId, org.sagebionetworks.client.SynapseClient synapseClient) throws SynapseException {
EntityQuery query = getQuery(projectId, new String[] {EntityType.file.toString(), EntityType.folder.toString()});
EntityQueryResults results = synapseClient.entityQuery(query);
return results.getEntities().size() > 0;
}
private boolean isTable(String projectId, org.sagebionetworks.client.SynapseClient synapseClient) throws SynapseException {
EntityQuery query = getQuery(projectId, new String[] {EntityType.table.toString()});
EntityQueryResults results = synapseClient.entityQuery(query);
return results.getEntities().size() > 0;
}
private EntityQuery getQuery(String parentId, String[] entityTypes) {
EntityQuery newQuery = new EntityQuery();
Sort sort = new Sort();
sort.setColumnName(EntityFieldName.name.name());
sort.setDirection(SortDirection.ASC);
newQuery.setSort(sort);
Condition parentCondition = EntityQueryUtils.buildCondition(
EntityFieldName.parentId, Operator.EQUALS, parentId);
Condition typeCondition = EntityQueryUtils.buildCondition(
EntityFieldName.nodeType, Operator.IN, entityTypes);
newQuery.setConditions(Arrays.asList(parentCondition, typeCondition));
newQuery.setLimit(1L);
newQuery.setOffset(0L);
return newQuery;
}
private boolean isWiki(String projectId, org.sagebionetworks.client.SynapseClient synapseClient) throws RestServiceException {
try {
getRootWikiId(synapseClient, projectId, ObjectType.ENTITY);
return true;
} catch (NotFoundException ex) {
return false;
}
}
private boolean isChallenge(String projectId, org.sagebionetworks.client.SynapseClient synapseClient) throws SynapseException {
// are there any evaluations that the current user can edit?
return ChallengeClientImpl.getShareableEvaluations(projectId, synapseClient).size() > 0;
}
}
|
package org.spongepowered.asm.mixin.transformer;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.UUID;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.spongepowered.asm.lib.tree.ClassNode;
import org.spongepowered.asm.mixin.MixinEnvironment;
import org.spongepowered.asm.mixin.MixinEnvironment.Option;
import org.spongepowered.asm.mixin.MixinEnvironment.Phase;
import org.spongepowered.asm.mixin.Mixins;
import org.spongepowered.asm.mixin.extensibility.IMixinConfig;
import org.spongepowered.asm.mixin.extensibility.IMixinConfigPlugin;
import org.spongepowered.asm.mixin.extensibility.IMixinErrorHandler;
import org.spongepowered.asm.mixin.extensibility.IMixinErrorHandler.ErrorAction;
import org.spongepowered.asm.mixin.extensibility.IMixinInfo;
import org.spongepowered.asm.mixin.throwables.ClassAlreadyLoadedException;
import org.spongepowered.asm.mixin.throwables.MixinApplyError;
import org.spongepowered.asm.mixin.throwables.MixinException;
import org.spongepowered.asm.mixin.throwables.MixinPrepareError;
import org.spongepowered.asm.mixin.transformer.MixinConfig.IListener;
import org.spongepowered.asm.mixin.transformer.MixinTransformerModuleCheckClass.ValidationFailedException;
import org.spongepowered.asm.mixin.transformer.debug.IDecompiler;
import org.spongepowered.asm.mixin.transformer.debug.IHotSwap;
import org.spongepowered.asm.mixin.transformer.meta.MixinMerged;
import org.spongepowered.asm.mixin.transformer.throwables.InvalidMixinException;
import org.spongepowered.asm.mixin.transformer.throwables.MixinTransformerError;
import org.spongepowered.asm.transformers.TreeTransformer;
import org.spongepowered.asm.util.Constants;
import org.spongepowered.asm.util.PrettyPrinter;
import net.minecraft.launchwrapper.IClassTransformer;
import net.minecraft.launchwrapper.Launch;
/**
* Transformer which manages the mixin configuration and application process
*/
public class MixinTransformer extends TreeTransformer {
/**
* Phase during which an error occurred, delegates to functionality in
* available handler
*/
static enum ErrorPhase {
/**
* Error during initialisation of a MixinConfig
*/
PREPARE {
@Override
ErrorAction onError(IMixinErrorHandler handler, String context, InvalidMixinException ex, IMixinInfo mixin, ErrorAction action) {
try {
return handler.onPrepareError(mixin.getConfig(), ex, mixin, action);
} catch (AbstractMethodError ame) {
// Catch if error handler is pre-0.5.4
return action;
}
}
@Override
protected String getContext(IMixinInfo mixin, String context) {
return String.format("preparing %s in %s", mixin.getName(), context);
}
},
/**
* Error during application of a mixin to a target class
*/
APPLY {
@Override
ErrorAction onError(IMixinErrorHandler handler, String context, InvalidMixinException ex, IMixinInfo mixin, ErrorAction action) {
try {
return handler.onApplyError(context, ex, mixin, action);
} catch (AbstractMethodError ame) {
// Catch if error handler is pre-0.5.4
return action;
}
}
@Override
protected String getContext(IMixinInfo mixin, String context) {
return String.format("%s -> %s", mixin, context);
}
};
/**
* Human-readable name
*/
private final String text;
private ErrorPhase() {
this.text = this.name().toLowerCase();
}
abstract ErrorAction onError(IMixinErrorHandler handler, String context, InvalidMixinException ex, IMixinInfo mixin, ErrorAction action);
protected abstract String getContext(IMixinInfo mixin, String context);
public String getLogMessage(String context, InvalidMixinException ex, IMixinInfo mixin) {
return String.format("Mixin %s failed %s: %s %s", this.text, this.getContext(mixin, context), ex.getClass().getName(), ex.getMessage());
}
public String getErrorMessage(IMixinInfo mixin, IMixinConfig config, Phase phase) {
return String.format("Mixin [%s] from phase [%s] in config [%s] FAILED during %s", mixin, phase, config, this.name());
}
}
/**
* Proxy transformer for the mixin transformer. These transformers are used
* to allow the mixin transformer to be re-registered in the transformer
* chain at a later stage in startup without having to fully re-initialise
* the mixin transformer itself. Only the latest proxy to be instantiated
* will actually provide callbacks to the underlying mixin transformer.
*/
public static class Proxy implements IClassTransformer {
/**
* All existing proxies
*/
private static List<Proxy> proxies = new ArrayList<Proxy>();
/**
* Actual mixin transformer instance
*/
private static MixinTransformer transformer = new MixinTransformer();
/**
* True if this is the active proxy, newer proxies disable their older
* siblings
*/
private boolean isActive = true;
public Proxy() {
for (Proxy hook : Proxy.proxies) {
hook.isActive = false;
}
Proxy.proxies.add(this);
LogManager.getLogger("mixin").debug("Adding new mixin transformer proxy #{}", Proxy.proxies.size());
}
@Override
public byte[] transform(String name, String transformedName, byte[] basicClass) {
if (this.isActive) {
return Proxy.transformer.transform(name, transformedName, basicClass);
}
return basicClass;
}
}
/**
* Re-entrance semaphore used to share re-entrance data with the TreeInfo
*/
class ReEntranceState {
/**
* Max valid depth
*/
private final int maxDepth;
/**
* Re-entrance depth
*/
private int depth = 0;
/**
* Semaphore set when check exceeds a depth of 1
*/
private boolean semaphore = false;
public ReEntranceState(int maxDepth) {
this.maxDepth = maxDepth;
}
/**
* Get max depth
*/
public int getMaxDepth() {
return this.maxDepth;
}
/**
* Get current depth
*/
public int getDepth() {
return this.depth;
}
/**
* Increase the re-entrance depth counter and set the semaphore if depth
* exceeds max depth
*
* @return fluent interface
*/
ReEntranceState push() {
this.depth++;
this.checkAndSet();
return this;
}
/**
* Decrease the re-entrance depth
*
* @return fluent interface
*/
ReEntranceState pop() {
if (this.depth == 0) {
throw new IllegalStateException("ReEntranceState pop() with zero depth");
}
this.depth
return this;
}
/**
* Run the depth check but do not set the semaphore
*
* @return true if depth has exceeded max
*/
boolean check() {
return this.depth > this.maxDepth;
}
/**
* Run the depth check and set the semaphore if depth is exceeded
*
* @return true if semaphore is set
*/
boolean checkAndSet() {
return this.semaphore |= this.check();
}
/**
* Set the semaphore
*
* @return fluent interface
*/
ReEntranceState set() {
this.semaphore = true;
return this;
}
/**
* Get whether the semaphore is set
*/
boolean isSet() {
return this.semaphore;
}
/**
* Clear the semaphore
*
* @return fluent interface
*/
ReEntranceState clear() {
this.semaphore = false;
return this;
}
}
/**
* Debug exporter
*/
static class Exporter {
/**
* Directory to export classes to when debug.export is enabled
*/
private final File classExportDir = new File(MixinTransformer.DEBUG_OUTPUT, "class");
/**
* Runtime decompiler for exported classes
*/
private final IDecompiler decompiler;
Exporter() {
this.decompiler = this.initDecompiler(new File(MixinTransformer.DEBUG_OUTPUT, "java"));
try {
FileUtils.deleteDirectory(this.classExportDir);
} catch (IOException ex) {
MixinTransformer.logger.warn("Error cleaning class output directory: {}", ex.getMessage());
}
}
private IDecompiler initDecompiler(File outputPath) {
MixinEnvironment env = MixinEnvironment.getCurrentEnvironment();
if (!env.getOption(Option.DEBUG_EXPORT_DECOMPILE)) {
return null;
}
try {
boolean as = env.getOption(Option.DEBUG_EXPORT_DECOMPILE_THREADED);
MixinTransformer.logger.info("Attempting to load Fernflower decompiler{}", as ? " (Threaded mode)" : "");
String className = "org.spongepowered.asm.mixin.transformer.debug.RuntimeDecompiler" + (as ? "Async" : "");
@SuppressWarnings("unchecked")
Class<? extends IDecompiler> clazz = (Class<? extends IDecompiler>)Class.forName(className);
Constructor<? extends IDecompiler> ctor = clazz.getDeclaredConstructor(File.class);
IDecompiler decompiler = ctor.newInstance(outputPath);
MixinTransformer.logger.info("Fernflower decompiler was successfully initialised, exported classes will be decompiled{}",
as ? " in a separate thread" : "");
return decompiler;
} catch (Throwable th) {
MixinTransformer.logger.info("Fernflower could not be loaded, exported classes will not be decompiled. {}: {}",
th.getClass().getSimpleName(), th.getMessage());
}
return null;
}
private String prepareFilter(String filter) {
filter = "^\\Q" + filter.replace("**", "\201").replace("*", "\202").replace("?", "\203") + "\\E$";
return filter.replace("\201", "\\E.*\\Q").replace("\202", "\\E[^\\.]+\\Q").replace("\203", "\\E.\\Q").replace("\\Q\\E", "");
}
private boolean applyFilter(String filter, String subject) {
return Pattern.compile(this.prepareFilter(filter), Pattern.CASE_INSENSITIVE).matcher(subject).matches();
}
void export(String transformedName, boolean force, byte[] bytes) {
// Export transformed class for debugging purposes
MixinEnvironment environment = MixinEnvironment.getCurrentEnvironment();
if (force || environment.getOption(Option.DEBUG_EXPORT)) {
String filter = environment.getOptionValue(Option.DEBUG_EXPORT_FILTER);
if (force || filter == null || this.applyFilter(filter, transformedName)) {
File outputFile = this.dumpClass(transformedName.replace('.', '/'), bytes);
if (this.decompiler != null) {
this.decompiler.decompile(outputFile);
}
}
}
}
File dumpClass(String fileName, byte[] bytes) {
File outputFile = new File(this.classExportDir, fileName + ".class");
try {
FileUtils.writeByteArrayToFile(outputFile, bytes);
} catch (IOException ex) {
// don't care
}
return outputFile;
}
}
static final File DEBUG_OUTPUT = new File(Constants.DEBUG_OUTPUT_PATH);
/**
* Log all the things
*/
static final Logger logger = LogManager.getLogger("mixin");
/**
* All mixin configuration bundles
*/
private final List<MixinConfig> configs = new ArrayList<MixinConfig>();
/**
* Uninitialised mixin configuration bundles
*/
private final List<MixinConfig> pendingConfigs = new ArrayList<MixinConfig>();
/**
* Transformer modules
*/
private final List<IMixinTransformerModule> modules = new ArrayList<IMixinTransformerModule>();
/**
* Re-entrance detector
*/
private final ReEntranceState lock = new ReEntranceState(1);
/**
* Session ID, used as a check when parsing {@link MixinMerged} annotations
* to prevent them being applied at compile time by people trying to
* circumvent mixin application
*/
private final String sessionId = UUID.randomUUID().toString();
/**
* Export manager
*/
private final Exporter exporter;
/**
* Hot-Swap agent
*/
private final IHotSwap hotSwapper;
/**
* Postprocessor for passthrough
*/
private final MixinPostProcessor postProcessor;
/**
* Current environment
*/
private MixinEnvironment currentEnvironment;
/**
* Logging level for verbose messages
*/
private Level verboseLoggingLevel = Level.DEBUG;
/**
* Handling an error state, do not process further mixins
*/
private boolean errorState = false;
/**
* Number of classes transformed in the current phase
*/
private int transformedCount = 0;
/**
* ctor
*/
MixinTransformer() {
MixinEnvironment environment = MixinEnvironment.getCurrentEnvironment();
Object globalMixinTransformer = environment.getActiveTransformer();
if (globalMixinTransformer instanceof IClassTransformer) {
throw new MixinException("Terminating MixinTransformer instance " + this);
}
// I am a leaf on the wind
environment.setActiveTransformer(this);
TreeInfo.setLock(this.lock);
this.exporter = new Exporter();
this.hotSwapper = this.initHotSwapper();
this.postProcessor = new MixinPostProcessor();
}
private IHotSwap initHotSwapper() {
if (!MixinEnvironment.getCurrentEnvironment().getOption(Option.HOT_SWAP)) {
return null;
}
try {
MixinTransformer.logger.info("Attempting to load Hot-Swap agent");
@SuppressWarnings("unchecked")
Class<? extends IHotSwap> clazz =
(Class<? extends IHotSwap>)Class.forName("org.spongepowered.tools.agent.MixinAgent");
Constructor<? extends IHotSwap> ctor = clazz.getDeclaredConstructor(MixinTransformer.class);
return ctor.newInstance(this);
} catch (Throwable th) {
MixinTransformer.logger.info("Hot-swap agent could not be loaded, hot swapping of mixins won't work. {}: {}",
th.getClass().getSimpleName(), th.getMessage());
}
return null;
}
/**
* Force-load all classes targetted by mixins but not yet applied
*/
public void audit() {
Set<String> unhandled = new HashSet<String>();
for (MixinConfig config : this.configs) {
unhandled.addAll(config.getUnhandledTargets());
}
Logger auditLogger = LogManager.getLogger("mixin/audit");
for (String target : unhandled) {
try {
auditLogger.info("Force-loading class {}", target);
Class.forName(target, true, Launch.classLoader);
} catch (ClassNotFoundException ex) {
auditLogger.error("Could not force-load " + target, ex);
}
}
for (MixinConfig config : this.configs) {
for (String target : config.getUnhandledTargets()) {
ClassAlreadyLoadedException ex = new ClassAlreadyLoadedException(target + " was already classloaded");
auditLogger.error("Could not force-load " + target, ex);
}
}
}
/* (non-Javadoc)
* @see net.minecraft.launchwrapper.IClassTransformer
* #transform(java.lang.String, java.lang.String, byte[])
*/
@Override
public synchronized byte[] transform(String name, String transformedName, byte[] basicClass) {
if (basicClass == null || transformedName == null || this.errorState) {
return basicClass;
}
boolean locked = this.lock.push().check();
MixinEnvironment environment = MixinEnvironment.getCurrentEnvironment();
if (!locked) {
try {
this.checkSelect(environment);
} catch (Exception ex) {
this.lock.pop();
throw new MixinException(ex);
}
}
try {
if (this.postProcessor.canTransform(transformedName)) {
byte[] bytes = this.postProcessor.transform(name, transformedName, basicClass);
this.exporter.export(transformedName, false, bytes);
return bytes;
}
SortedSet<MixinInfo> mixins = null;
boolean invalidRef = false;
for (MixinConfig config : this.configs) {
if (config.packageMatch(transformedName)) {
invalidRef = true;
continue;
}
if (config.hasMixinsFor(transformedName)) {
if (mixins == null) {
mixins = new TreeSet<MixinInfo>();
}
// Get and sort mixins for the class
mixins.addAll(config.getMixinsFor(transformedName));
}
}
if (invalidRef) {
throw new NoClassDefFoundError(String.format("%s is a mixin class and cannot be referenced directly", transformedName));
}
if (mixins != null) {
// Re-entrance is "safe" as long as we don't need to apply any mixins, if there are mixins then we need to panic now
if (locked) {
MixinTransformer.logger.warn("Re-entrance detected, this will cause serious problems.", new MixinException());
throw new MixinApplyError("Re-entrance error.");
}
if (this.hotSwapper != null) {
this.hotSwapper.registerTargetClass(transformedName, basicClass);
}
try {
// Tree for target class
ClassNode targetClassNode = this.readClass(basicClass, true);
TargetClassContext context = new TargetClassContext(this.sessionId, transformedName, targetClassNode, mixins);
basicClass = this.applyMixins(context);
this.transformedCount++;
} catch (InvalidMixinException th) {
this.dumpClassOnFailure(transformedName, basicClass, environment);
this.handleMixinApplyError(transformedName, th, environment);
}
}
return basicClass;
} catch (Throwable th) {
th.printStackTrace();
this.dumpClassOnFailure(transformedName, basicClass, environment);
throw new MixinTransformerError("An unexpected critical error was encountered", th);
} finally {
this.lock.pop();
}
}
/**
* Update a mixin class with new bytecode.
*
* @param mixinClass Name of the mixin
* @param bytes New bytecode
* @return List of classes that need to be updated
*/
public List<String> reload(String mixinClass, byte[] bytes) {
if (this.lock.getDepth() > 0) {
throw new MixinApplyError("Cannot reload mixin if re-entrant lock entered");
}
List<String> targets = new ArrayList<String>();
for (MixinConfig config : this.configs) {
targets.addAll(config.reloadMixin(mixinClass, bytes));
}
return targets;
}
private void checkSelect(MixinEnvironment environment) {
if (this.currentEnvironment != environment) {
this.select(environment);
return;
}
int unvisitedCount = Mixins.getUnvisitedCount();
if (unvisitedCount > 0 && this.transformedCount == 0) {
this.select(environment);
}
}
private void select(MixinEnvironment environment) {
this.verboseLoggingLevel = (environment.getOption(Option.DEBUG_VERBOSE)) ? Level.INFO : Level.DEBUG;
if (this.transformedCount > 0) {
MixinTransformer.logger.log(this.verboseLoggingLevel, "Ending {}, applied {} mixins", this.currentEnvironment, this.transformedCount);
}
String action = this.currentEnvironment == environment ? "Checking for additional" : "Preparing";
MixinTransformer.logger.log(this.verboseLoggingLevel, "{} mixins for {}", action, environment);
long startTime = System.currentTimeMillis();
this.selectConfigs(environment);
this.selectModules(environment);
int totalMixins = this.prepareConfigs(environment);
this.currentEnvironment = environment;
this.transformedCount = 0;
double elapsedTime = (System.currentTimeMillis() - startTime) * 0.001D;
if (elapsedTime > 0.25D) {
String elapsed = new DecimalFormat("###0.000").format(elapsedTime);
String perMixinTime = new DecimalFormat("###0.0").format((elapsedTime / totalMixins) * 1000.0);
MixinTransformer.logger.log(this.verboseLoggingLevel, "Prepared {} mixins in {} sec ({} msec avg.)", totalMixins, elapsed, perMixinTime);
}
}
/**
* Add configurations from the supplied mixin environment to the configs set
*
* @param environment Environment to query
*/
private void selectConfigs(MixinEnvironment environment) {
for (Iterator<Config> iter = Mixins.getConfigs().iterator(); iter.hasNext();) {
Config handle = iter.next();
try {
MixinConfig config = handle.get();
if (config.select(environment)) {
iter.remove();
MixinTransformer.logger.log(this.verboseLoggingLevel, "Selecting config {}", config);
config.onSelect();
this.pendingConfigs.add(config);
}
} catch (Exception ex) {
MixinTransformer.logger.warn(String.format("Failed to select mixin config: %s", handle), ex);
}
}
Collections.sort(this.pendingConfigs);
}
/**
* Set up this transformer using options from the supplied environment
*
* @param environment Environment to query
*/
private void selectModules(MixinEnvironment environment) {
this.modules.clear();
// Run CheckClassAdapter on the mixin bytecode if debug option is enabled
if (environment.getOption(Option.DEBUG_VERIFY)) {
this.modules.add(new MixinTransformerModuleCheckClass());
}
// Run implementation checker if option is enabled
if (environment.getOption(Option.CHECK_IMPLEMENTS)) {
this.modules.add(new MixinTransformerModuleInterfaceChecker());
}
}
/**
* Prepare mixin configs
*
* @param environment Environment
* @return total number of mixins initialised
*/
private int prepareConfigs(MixinEnvironment environment) {
int totalMixins = 0;
final IHotSwap hotSwapper = this.hotSwapper;
for (MixinConfig config : this.pendingConfigs) {
config.addListener(this.postProcessor);
if (hotSwapper != null) {
config.addListener(new IListener() {
@Override
public void onPrepare(MixinInfo mixin) {
hotSwapper.registerMixinClass(mixin.getClassName());
}
@Override
public void onInit(MixinInfo mixin) {
}
});
}
}
for (MixinConfig config : this.pendingConfigs) {
try {
MixinTransformer.logger.log(this.verboseLoggingLevel, "Preparing {} ({})", config, config.getDeclaredMixinCount());
config.prepare();
totalMixins += config.getMixinCount();
} catch (InvalidMixinException ex) {
this.handleMixinPrepareError(config, ex, environment);
} catch (Exception ex) {
String message = ex.getMessage();
MixinTransformer.logger.error("Error encountered whilst initialising mixin config '" + config.getName() + "': " + message, ex);
}
}
for (MixinConfig config : this.pendingConfigs) {
IMixinConfigPlugin plugin = config.getPlugin();
if (plugin == null) {
continue;
}
Set<String> otherTargets = new HashSet<String>();
for (MixinConfig otherConfig : this.pendingConfigs) {
if (!otherConfig.equals(config)) {
otherTargets.addAll(otherConfig.getTargets());
}
}
plugin.acceptTargets(config.getTargets(), Collections.unmodifiableSet(otherTargets));
}
for (MixinConfig config : this.pendingConfigs) {
try {
config.postInitialise();
} catch (InvalidMixinException ex) {
this.handleMixinPrepareError(config, ex, environment);
} catch (Exception ex) {
String message = ex.getMessage();
MixinTransformer.logger.error("Error encountered during mixin config postInit step'" + config.getName() + "': " + message, ex);
}
}
this.configs.addAll(this.pendingConfigs);
Collections.sort(this.configs);
this.pendingConfigs.clear();
return totalMixins;
}
/**
* Apply mixins for specified target class to the class described by the
* supplied byte array.
*
* @param context target class context
* @return class bytecode after application of mixins
*/
private byte[] applyMixins(TargetClassContext context) {
this.preApply(context);
this.apply(context);
try {
this.postApply(context);
} catch (ValidationFailedException ex) {
MixinTransformer.logger.info(ex.getMessage());
// If verify is enabled and failed, write out the bytecode to allow us to inspect it
if (context.isExportForced() || MixinEnvironment.getCurrentEnvironment().getOption(Option.DEBUG_EXPORT)) {
this.writeClass(context);
}
}
return this.writeClass(context);
}
/**
* Process tasks before mixin application
*
* @param context Target class context
*/
private void preApply(TargetClassContext context) {
for (IMixinTransformerModule module : this.modules) {
module.preApply(context);
}
}
/**
* Apply the mixins to the target class
*
* @param context Target class context
*/
private void apply(TargetClassContext context) {
context.applyMixins();
}
/**
* Process tasks after mixin application
*
* @param context Target class context
*/
private void postApply(TargetClassContext context) {
for (IMixinTransformerModule module : this.modules) {
module.postApply(context);
}
}
private void handleMixinPrepareError(MixinConfig config, InvalidMixinException ex, MixinEnvironment environment) throws MixinPrepareError {
this.handleMixinError(config.getName(), ex, environment, ErrorPhase.PREPARE);
}
private void handleMixinApplyError(String targetClass, InvalidMixinException ex, MixinEnvironment environment) throws MixinApplyError {
this.handleMixinError(targetClass, ex, environment, ErrorPhase.APPLY);
}
private void handleMixinError(String context, InvalidMixinException ex, MixinEnvironment environment, ErrorPhase errorPhase) throws Error {
this.errorState = true;
IMixinInfo mixin = ex.getMixin();
if (mixin == null) {
MixinTransformer.logger.error("InvalidMixinException has no mixin!", ex);
throw ex;
}
IMixinConfig config = mixin.getConfig();
Phase phase = mixin.getPhase();
ErrorAction action = config.isRequired() ? ErrorAction.ERROR : ErrorAction.WARN;
if (environment.getOption(Option.DEBUG_VERBOSE)) {
new PrettyPrinter()
.add("Invalid Mixin").centre()
.hr('-')
.kvWidth(10)
.kv("Action", errorPhase.name())
.kv("Mixin", mixin.getClassName())
.kv("Config", config.getName())
.kv("Phase", phase)
.hr('-')
.add(" %s", ex.getClass().getName())
.hr('-')
.addWrapped(" %s", ex.getMessage())
.hr('-')
.add(ex, 8)
.trace(action.logLevel);
}
for (IMixinErrorHandler handler : this.getErrorHandlers(mixin.getPhase())) {
ErrorAction newAction = errorPhase.onError(handler, context, ex, mixin, action);
if (newAction != null) {
action = newAction;
}
}
MixinTransformer.logger.log(action.logLevel, errorPhase.getLogMessage(context, ex, mixin), ex);
this.errorState = false;
if (action == ErrorAction.ERROR) {
throw new MixinApplyError(errorPhase.getErrorMessage(mixin, config, phase), ex);
}
}
private List<IMixinErrorHandler> getErrorHandlers(Phase phase) {
List<IMixinErrorHandler> handlers = new ArrayList<IMixinErrorHandler>();
for (String handlerClassName : Mixins.getErrorHandlerClasses()) {
try {
MixinTransformer.logger.info("Instancing error handler class {}", handlerClassName);
Class<?> handlerClass = Class.forName(handlerClassName, true, Launch.classLoader);
IMixinErrorHandler handler = (IMixinErrorHandler)handlerClass.newInstance();
if (handler != null) {
handlers.add(handler);
}
} catch (Throwable th) {
// skip bad handlers
}
}
return handlers;
}
private byte[] writeClass(TargetClassContext context) {
return this.writeClass(context.getClassName(), context.getClassNode(), context.isExportForced());
}
private byte[] writeClass(String transformedName, ClassNode targetClass, boolean forceExport) {
// Collapse tree to bytes
byte[] bytes = this.writeClass(targetClass);
this.exporter.export(transformedName, forceExport, bytes);
return bytes;
}
private void dumpClassOnFailure(String className, byte[] bytes, MixinEnvironment env) {
if (env.getOption(Option.DUMP_TARGET_ON_FAILURE)) {
this.exporter.dumpClass(className.replace('.', '/') + ".target", bytes);
}
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package sba.lib.grid;
import java.awt.Point;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.ListSelectionModel;
import javax.swing.RowSorter;
import javax.swing.SortOrder;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.AbstractTableModel;
import javax.swing.table.TableRowSorter;
import sba.lib.DLibConsts;
import sba.lib.DLibRpnArgument;
import sba.lib.DLibRpnArgumentType;
import sba.lib.DLibRpnUtils;
import sba.lib.DLibUtils;
import sba.lib.db.DDbConsts;
import sba.lib.grid.xml.DXmlColumnView;
import sba.lib.grid.xml.DXmlGridXml;
import sba.lib.grid.xml.DXmlRpnArgument;
import sba.lib.grid.xml.DXmlSortKey;
import sba.lib.gui.DGuiClient;
import sba.lib.gui.DGuiConsts;
import sba.lib.gui.DGuiParams;
import sba.lib.gui.DGuiUserGui;
import sba.lib.gui.DGuiUtils;
import sba.lib.xml.DXmlElement;
/**
*
* @author Sergio Flores
*/
public abstract class DGridPaneView extends JPanel implements DGridPane, ListSelectionListener {
protected DGuiClient miClient;
protected int mnGridType;
protected int mnGridSubtype;
protected int mnGridMode;
protected int mnGridSubmode;
protected int mnGridViewType;
protected int mnModuleType;
protected int mnModuleSubtype;
protected String msTitle;
protected String msSql;
protected DGuiParams moPaneParams;
protected DGuiParams moFormParams;
protected DGridModel moModel;
protected DGridSeeker moSeeker;
protected DGridPaneSettings moPaneSettings;
protected HashSet<Integer> moSuscriptionsSet;
protected List<RowSorter.SortKey> miSortKeysList;
protected HashMap<Integer, Object> moFiltersMap;
protected HashMap<Integer, Integer> moColumnComplementsMap;
protected int mnListSelectionModel;
protected int[] manUserGuiKey;
protected DGuiUserGui miUserGui;
/** Creates new form DGridPaneView */
public DGridPaneView(DGuiClient client, int viewType, int gridType, int gridSubtype, String title) {
this(client, viewType, gridType, gridSubtype, title, null);
}
/** Creates new form DGridPaneView */
public DGridPaneView(DGuiClient client, int viewType, int gridType, int gridSubtype, String title, DGuiParams params) {
miClient = client;
mnGridType = gridType;
mnGridSubtype = gridSubtype;
mnGridMode = params == null ? DLibConsts.UNDEFINED : params.getType();
mnGridSubmode = params == null ? DLibConsts.UNDEFINED : params.getSubtype();
mnGridViewType = viewType;
mnModuleType = miClient.getSession().getModuleUtils().getModuleTypeByType(gridType);
mnModuleSubtype = miClient.getSession().getModuleUtils().getModuleSubtypeBySubtype(gridType, gridSubtype);
msTitle = title;
msSql = "";
moPaneParams = params;
moFormParams = null;
initComponents();
initComponentsCustom();
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jLabel1 = new javax.swing.JLabel();
jpCommands = new javax.swing.JPanel();
jpCommandsSys = new javax.swing.JPanel();
jpCommandsSysLeft = new javax.swing.JPanel();
jbRowNew = new javax.swing.JButton();
jbRowEdit = new javax.swing.JButton();
jbRowCopy = new javax.swing.JButton();
jbRowDisable = new javax.swing.JButton();
jbRowDelete = new javax.swing.JButton();
jpCommandsSysCenter = new javax.swing.JPanel();
jtbFilterDeleted = new javax.swing.JToggleButton();
jpCommandsSysRight = new javax.swing.JPanel();
jbGridSaveCsv = new javax.swing.JButton();
jbGridClearSettings = new javax.swing.JButton();
jbGridReload = new javax.swing.JButton();
jpCommandsCustom = new javax.swing.JPanel();
jpCommandsCustomLeft = new javax.swing.JPanel();
jpCommandsCustomCenter = new javax.swing.JPanel();
jpCommandsCustomRight = new javax.swing.JPanel();
jspScrollPane = new javax.swing.JScrollPane();
jtTable = new javax.swing.JTable();
jpStatus = new javax.swing.JPanel();
jpStatusLeft = new javax.swing.JPanel();
jtfRows = new javax.swing.JTextField();
jpStatusCenter = new javax.swing.JPanel();
jtfGridSearch = new javax.swing.JTextField();
jbGridSearchNext = new javax.swing.JButton();
jpStatusRight = new javax.swing.JPanel();
jtbAutoReload = new javax.swing.JToggleButton();
jLabel1.setText("jLabel1");
setBorder(javax.swing.BorderFactory.createEmptyBorder(5, 0, 5, 0));
setLayout(new java.awt.BorderLayout());
jpCommands.setBorder(javax.swing.BorderFactory.createEmptyBorder(0, 0, 5, 0));
jpCommands.setLayout(new java.awt.BorderLayout());
jpCommandsSys.setLayout(new java.awt.BorderLayout());
jpCommandsSysLeft.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.LEFT, 5, 0));
jbRowNew.setIcon(new javax.swing.ImageIcon(getClass().getResource("/sba/lib/img/cmd_std_new.gif"))); // NOI18N
jbRowNew.setToolTipText("Nuevo (Ctrl+N)");
jbRowNew.setPreferredSize(new java.awt.Dimension(23, 23));
jbRowNew.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jbRowNewActionPerformed(evt);
}
});
jpCommandsSysLeft.add(jbRowNew);
jbRowEdit.setIcon(new javax.swing.ImageIcon(getClass().getResource("/sba/lib/img/cmd_std_edit.gif"))); // NOI18N
jbRowEdit.setToolTipText("Modificar (Ctrl+M)");
jbRowEdit.setPreferredSize(new java.awt.Dimension(23, 23));
jbRowEdit.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jbRowEditActionPerformed(evt);
}
});
jpCommandsSysLeft.add(jbRowEdit);
jbRowCopy.setIcon(new javax.swing.ImageIcon(getClass().getResource("/sba/lib/img/cmd_std_copy.gif"))); // NOI18N
jbRowCopy.setToolTipText("Duplicar (Ctrl+D)");
jbRowCopy.setPreferredSize(new java.awt.Dimension(23, 23));
jbRowCopy.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jbRowCopyActionPerformed(evt);
}
});
jpCommandsSysLeft.add(jbRowCopy);
jbRowDisable.setIcon(new javax.swing.ImageIcon(getClass().getResource("/sba/lib/img/cmd_std_disable.gif"))); // NOI18N
jbRowDisable.setToolTipText("Inhabilitar (Ctrl+I)");
jbRowDisable.setPreferredSize(new java.awt.Dimension(23, 23));
jbRowDisable.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jbRowDisableActionPerformed(evt);
}
});
jpCommandsSysLeft.add(jbRowDisable);
jbRowDelete.setIcon(new javax.swing.ImageIcon(getClass().getResource("/sba/lib/img/cmd_std_delete.gif"))); // NOI18N
jbRowDelete.setToolTipText("Eliminar (Ctrl+E)");
jbRowDelete.setPreferredSize(new java.awt.Dimension(23, 23));
jbRowDelete.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jbRowDeleteActionPerformed(evt);
}
});
jpCommandsSysLeft.add(jbRowDelete);
jpCommandsSys.add(jpCommandsSysLeft, java.awt.BorderLayout.WEST);
jpCommandsSysCenter.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.LEFT, 5, 0));
jtbFilterDeleted.setIcon(new javax.swing.ImageIcon(getClass().getResource("/sba/lib/img/swi_filter_off.gif"))); // NOI18N
jtbFilterDeleted.setToolTipText("Filtrar eliminados");
jtbFilterDeleted.setPreferredSize(new java.awt.Dimension(23, 23));
jtbFilterDeleted.setSelectedIcon(new javax.swing.ImageIcon(getClass().getResource("/sba/lib/img/swi_filter_on.gif"))); // NOI18N
jtbFilterDeleted.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jtbFilterDeletedActionPerformed(evt);
}
});
jpCommandsSysCenter.add(jtbFilterDeleted);
jpCommandsSys.add(jpCommandsSysCenter, java.awt.BorderLayout.CENTER);
jpCommandsSysRight.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.LEFT, 5, 0));
jbGridSaveCsv.setIcon(new javax.swing.ImageIcon(getClass().getResource("/sba/lib/img/cmd_grid_save.gif"))); // NOI18N
jbGridSaveCsv.setToolTipText("Guardar como CSV (Ctrl+G)");
jbGridSaveCsv.setPreferredSize(new java.awt.Dimension(23, 23));
jbGridSaveCsv.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jbGridSaveCsvActionPerformed(evt);
}
});
jpCommandsSysRight.add(jbGridSaveCsv);
jbGridClearSettings.setIcon(new javax.swing.ImageIcon(getClass().getResource("/sba/lib/img/cmd_grid_reset.gif"))); // NOI18N
jbGridClearSettings.setToolTipText("Limpiar preferencias (Ctrl+L)");
jbGridClearSettings.setPreferredSize(new java.awt.Dimension(23, 23));
jbGridClearSettings.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jbGridClearSettingsActionPerformed(evt);
}
});
jpCommandsSysRight.add(jbGridClearSettings);
jbGridReload.setIcon(new javax.swing.ImageIcon(getClass().getResource("/sba/lib/img/cmd_grid_reload.gif"))); // NOI18N
jbGridReload.setToolTipText("Refrescar (Ctrl+R)");
jbGridReload.setPreferredSize(new java.awt.Dimension(23, 23));
jbGridReload.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jbGridReloadActionPerformed(evt);
}
});
jpCommandsSysRight.add(jbGridReload);
jpCommandsSys.add(jpCommandsSysRight, java.awt.BorderLayout.EAST);
jpCommands.add(jpCommandsSys, java.awt.BorderLayout.PAGE_START);
jpCommandsCustom.setLayout(new java.awt.BorderLayout());
jpCommandsCustomLeft.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.LEFT, 5, 0));
jpCommandsCustom.add(jpCommandsCustomLeft, java.awt.BorderLayout.WEST);
jpCommandsCustomCenter.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.LEFT, 5, 0));
jpCommandsCustom.add(jpCommandsCustomCenter, java.awt.BorderLayout.CENTER);
jpCommandsCustomRight.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.LEFT, 5, 0));
jpCommandsCustom.add(jpCommandsCustomRight, java.awt.BorderLayout.EAST);
jpCommands.add(jpCommandsCustom, java.awt.BorderLayout.CENTER);
add(jpCommands, java.awt.BorderLayout.NORTH);
jtTable.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
{},
{},
{},
{}
},
new String [] {
}
));
jspScrollPane.setViewportView(jtTable);
add(jspScrollPane, java.awt.BorderLayout.CENTER);
jpStatus.setBorder(javax.swing.BorderFactory.createEmptyBorder(5, 0, 0, 0));
jpStatus.setLayout(new java.awt.BorderLayout());
jpStatusLeft.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.LEFT, 5, 0));
jtfRows.setEditable(false);
jtfRows.setText("000,000/000,000");
jtfRows.setToolTipText("Renglón actual");
jtfRows.setFocusable(false);
jtfRows.setPreferredSize(new java.awt.Dimension(100, 23));
jpStatusLeft.add(jtfRows);
jpStatus.add(jpStatusLeft, java.awt.BorderLayout.WEST);
jpStatusCenter.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.LEFT, 5, 0));
jtfGridSearch.setToolTipText("Buscar (Ctrl+B)");
jtfGridSearch.setPreferredSize(new java.awt.Dimension(100, 23));
jtfGridSearch.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jtfGridSearchActionPerformed(evt);
}
});
jpStatusCenter.add(jtfGridSearch);
jbGridSearchNext.setIcon(new javax.swing.ImageIcon(getClass().getResource("/sba/lib/img/cmd_grid_next.gif"))); // NOI18N
jbGridSearchNext.setToolTipText("Siguiente (F3)");
jbGridSearchNext.setPreferredSize(new java.awt.Dimension(23, 23));
jbGridSearchNext.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jbGridSearchNextActionPerformed(evt);
}
});
jpStatusCenter.add(jbGridSearchNext);
jpStatus.add(jpStatusCenter, java.awt.BorderLayout.CENTER);
jpStatusRight.setLayout(new java.awt.FlowLayout(java.awt.FlowLayout.LEFT, 5, 0));
jtbAutoReload.setIcon(new javax.swing.ImageIcon(getClass().getResource("/sba/lib/img/swi_action_off.gif"))); // NOI18N
jtbAutoReload.setToolTipText("Refrescar automáticamente");
jtbAutoReload.setPreferredSize(new java.awt.Dimension(23, 23));
jtbAutoReload.setSelectedIcon(new javax.swing.ImageIcon(getClass().getResource("/sba/lib/img/swi_action_on.gif"))); // NOI18N
jtbAutoReload.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jtbAutoReloadActionPerformed(evt);
}
});
jpStatusRight.add(jtbAutoReload);
jpStatus.add(jpStatusRight, java.awt.BorderLayout.EAST);
add(jpStatus, java.awt.BorderLayout.SOUTH);
}// </editor-fold>//GEN-END:initComponents
private void jbRowNewActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jbRowNewActionPerformed
actionRowNew();
}//GEN-LAST:event_jbRowNewActionPerformed
private void jbRowEditActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jbRowEditActionPerformed
actionRowEdit();
}//GEN-LAST:event_jbRowEditActionPerformed
private void jbRowCopyActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jbRowCopyActionPerformed
actionRowCopy();
}//GEN-LAST:event_jbRowCopyActionPerformed
private void jbRowDisableActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jbRowDisableActionPerformed
actionRowDisable();
}//GEN-LAST:event_jbRowDisableActionPerformed
private void jbRowDeleteActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jbRowDeleteActionPerformed
actionRowDelete();
}//GEN-LAST:event_jbRowDeleteActionPerformed
private void jbGridSaveCsvActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jbGridSaveCsvActionPerformed
actionGridSaveCsv();
}//GEN-LAST:event_jbGridSaveCsvActionPerformed
private void jbGridClearSettingsActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jbGridClearSettingsActionPerformed
actionGridClearSettings();
}//GEN-LAST:event_jbGridClearSettingsActionPerformed
private void jbGridReloadActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jbGridReloadActionPerformed
actionGridReload();
}//GEN-LAST:event_jbGridReloadActionPerformed
private void jtbFilterDeletedActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jtbFilterDeletedActionPerformed
actionToggleFilterDeleted();
}//GEN-LAST:event_jtbFilterDeletedActionPerformed
private void jtbAutoReloadActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jtbAutoReloadActionPerformed
actionToggleAutoReload();
}//GEN-LAST:event_jtbAutoReloadActionPerformed
private void jtfGridSearchActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jtfGridSearchActionPerformed
actionGridSearchValue();
}//GEN-LAST:event_jtfGridSearchActionPerformed
private void jbGridSearchNextActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jbGridSearchNextActionPerformed
actionGridSearchNextValue();
}//GEN-LAST:event_jbGridSearchNextActionPerformed
private void initComponentsCustom() {
moModel = new DGridModel();
moSeeker = new DGridSeeker(miClient.getFrame());
moPaneSettings = null;
moSuscriptionsSet = new HashSet<Integer>();
miSortKeysList = new ArrayList<RowSorter.SortKey>();
moFiltersMap = new HashMap<Integer, Object>();
moColumnComplementsMap = new HashMap<Integer, Integer>();
defineSuscriptions();
switch (mnGridViewType) {
case DGridConsts.GRID_VIEW_TAB:
mnListSelectionModel = ListSelectionModel.SINGLE_INTERVAL_SELECTION;
manUserGuiKey = new int[] { miClient.getSession().getUser().getPkUserId(), DGuiConsts.GUI_COMP_VIEW_TAB, mnGridType, mnGridSubtype, mnGridMode, mnGridSubmode };
jbRowNew.setEnabled(true);
jbRowEdit.setEnabled(true);
jbRowCopy.setEnabled(true);
jbRowDisable.setEnabled(true);
jbRowDelete.setEnabled(true);
jbGridSaveCsv.setEnabled(true);
jbGridClearSettings.setEnabled(true);
jbGridReload.setEnabled(true);
jtbFilterDeleted.setEnabled(true);
jtbAutoReload.setEnabled(true);
jtbFilterDeleted.setSelected(true);
jtbAutoReload.setSelected(true);
moFiltersMap.put(DGridConsts.FILTER_DELETED, true);
break;
case DGridConsts.GRID_VIEW_OPTION_PICKER:
mnListSelectionModel = ListSelectionModel.SINGLE_SELECTION;
manUserGuiKey = new int[] { miClient.getSession().getUser().getPkUserId(), DGuiConsts.GUI_COMP_VIEW_OPTION_PICKER, mnGridType, mnGridSubtype, mnGridMode, mnGridSubmode };
jbRowNew.setEnabled(true);
jbRowEdit.setEnabled(true);
jbRowCopy.setEnabled(true);
jbRowDisable.setEnabled(false);
jbRowDelete.setEnabled(false);
jbGridSaveCsv.setEnabled(true);
jbGridClearSettings.setEnabled(true);
jbGridReload.setEnabled(true);
jtbFilterDeleted.setEnabled(false);
jtbAutoReload.setEnabled(false);
break;
default:
miClient.showMsgBoxError(DLibConsts.ERR_MSG_OPTION_UNKNOWN);
}
miUserGui = miClient.readUserGui(manUserGuiKey);
DGuiUtils.createActionMap(this, this, "actionRowNew", "rowNew", KeyEvent.VK_N, KeyEvent.CTRL_DOWN_MASK);
DGuiUtils.createActionMap(this, this, "actionRowEdit", "rowEdit", KeyEvent.VK_M, KeyEvent.CTRL_DOWN_MASK);
DGuiUtils.createActionMap(this, this, "actionRowCopy", "rowCopy", KeyEvent.VK_D, KeyEvent.CTRL_DOWN_MASK);
DGuiUtils.createActionMap(this, this, "actionRowDisable", "rowDisable", KeyEvent.VK_I, KeyEvent.CTRL_DOWN_MASK);
DGuiUtils.createActionMap(this, this, "actionRowDelete", "rowDelete", KeyEvent.VK_E, KeyEvent.CTRL_DOWN_MASK);
DGuiUtils.createActionMap(this, this, "actionGridSaveCsv", "gridSaveCsv", KeyEvent.VK_G, KeyEvent.CTRL_DOWN_MASK);
DGuiUtils.createActionMap(this, this, "actionGridClearSettings", "gridClearSettings", KeyEvent.VK_L, KeyEvent.CTRL_DOWN_MASK);
DGuiUtils.createActionMap(this, this, "actionGridReload", "gridReload", KeyEvent.VK_R, KeyEvent.CTRL_DOWN_MASK);
DGuiUtils.createActionMap(this, this, "actionGridSeekValue", "gridSeekValue", KeyEvent.VK_S, KeyEvent.CTRL_DOWN_MASK);
DGuiUtils.createActionMap(this, this, "actionGridSearchValue", "gridSearchValue", KeyEvent.VK_B, KeyEvent.CTRL_DOWN_MASK);
DGuiUtils.createActionMap(this, this, "actionGridSearchNextValue", "gridSearchNextValue", KeyEvent.VK_F3, 0);
}
protected void computeUserGui() {
DXmlGridXml gridXml = new DXmlGridXml(DGridConsts.GRID_PANE_VIEW);
miSortKeysList.clear();
try {
gridXml.processXml(miUserGui.getGui());
for (DXmlElement element : gridXml.getXmlElements()) {
if (element instanceof DXmlColumnView) {
// Columns:
DXmlColumnView xmlColumn = (DXmlColumnView) element;
DGridColumnView gridColumn = new DGridColumnView(
(Integer) xmlColumn.getAttribute(DXmlColumnView.ATT_COLUMN_TYPE).getValue(),
(String) xmlColumn.getAttribute(DXmlColumnView.ATT_FIELD_NAME).getValue(),
(String) xmlColumn.getAttribute(DXmlColumnView.ATT_COLUMN_TITLE).getValue(),
(Integer) xmlColumn.getAttribute(DXmlColumnView.ATT_COLUMN_WIDTH).getValue());
gridColumn.setSumApplying((Boolean) xmlColumn.getAttribute(DXmlColumnView.ATT_IS_SUM_APPLYING).getValue());
if (!xmlColumn.getXmlElements().isEmpty()) {
for (DXmlElement child : xmlColumn.getXmlElements()) {
if (child.getName().compareTo(DXmlRpnArgument.NAME) == 0) {
DXmlRpnArgument xmlRpnArgument = (DXmlRpnArgument) child;
DLibRpnArgument rpnArgument = null;
switch (DLibRpnUtils.getArgumentType((String) xmlRpnArgument.getAttribute(DXmlRpnArgument.ATT_ARGUMENT_TYPE).getValue())) {
case OPERAND:
rpnArgument = new DLibRpnArgument(
(String) xmlRpnArgument.getAttribute(DXmlRpnArgument.ATT_ARGUMENT).getValue(),
DLibRpnArgumentType.OPERAND);
break;
case OPERATOR:
rpnArgument = new DLibRpnArgument(
DLibRpnUtils.getOperator((String) xmlRpnArgument.getAttribute(DXmlRpnArgument.ATT_ARGUMENT).getValue()),
DLibRpnArgumentType.OPERATOR);
break;
default:
}
if (rpnArgument != null) {
gridColumn.getRpnArguments().add(rpnArgument);
}
}
}
}
moModel.getGridColumns().add(gridColumn);
}
else if (element instanceof DXmlSortKey) {
// Sort keys:
DXmlSortKey xmlSortKey = (DXmlSortKey) element;
RowSorter.SortKey sortKey = null;
SortOrder sortOrder = null;
String sortOrderValue = (String) xmlSortKey.getAttribute(DXmlSortKey.ATT_SORT_ORDER).getValue();
if (sortOrderValue.compareTo(SortOrder.ASCENDING.toString()) == 0) {
sortOrder = SortOrder.ASCENDING;
}
else if (sortOrderValue.compareTo(SortOrder.DESCENDING.toString()) == 0) {
sortOrder = SortOrder.DESCENDING;
}
else {
sortOrder = SortOrder.UNSORTED;
}
sortKey = new RowSorter.SortKey(
(Integer) xmlSortKey.getAttribute(DXmlSortKey.ATT_COLUMN).getValue(),
sortOrder);
miSortKeysList.add(sortKey);
}
}
}
catch (Exception e) {
DLibUtils.printException(this, e);
miClient.showMsgBoxWarning(DGridConsts.ERR_MSG_PREFS_VIEW);
miUserGui = null; // reset grid's user preferences
populateGrid(DGridConsts.REFRESH_MODE_RESET);
}
}
protected void preserveUserGui() {
if (jtTable != null && jtTable.getRowSorter() != null) {
String xml = "";
DXmlGridXml gridXml = new DXmlGridXml(DGridConsts.GRID_PANE_VIEW);
@SuppressWarnings("unchecked")
List<RowSorter.SortKey> sortKeys = (List<RowSorter.SortKey>) jtTable.getRowSorter().getSortKeys();
// Columns:
for (int i = 0; i < jtTable.getColumnCount(); i++) {
DXmlColumnView xmlColumn = new DXmlColumnView();
DGridColumnView gridColumn = (DGridColumnView) (moModel.getGridColumns().get(jtTable.convertColumnIndexToModel(i)));
xmlColumn.getAttribute(DXmlColumnView.ATT_COLUMN_TYPE).setValue(gridColumn.getColumnType());
xmlColumn.getAttribute(DXmlColumnView.ATT_FIELD_NAME).setValue(gridColumn.getFieldName());
xmlColumn.getAttribute(DXmlColumnView.ATT_COLUMN_TITLE).setValue(gridColumn.getColumnTitle());
xmlColumn.getAttribute(DXmlColumnView.ATT_COLUMN_WIDTH).setValue(jtTable.getColumnModel().getColumn(i).getWidth());
xmlColumn.getAttribute(DXmlColumnView.ATT_IS_SUM_APPLYING).setValue(gridColumn.isSumApplying());
for (DLibRpnArgument argument : gridColumn.getRpnArguments()) {
DXmlRpnArgument xmlArgument = new DXmlRpnArgument();
xmlArgument.getAttribute(DXmlRpnArgument.ATT_ARGUMENT_TYPE).setValue(argument.getArgumentType());
xmlArgument.getAttribute(DXmlRpnArgument.ATT_ARGUMENT).setValue(argument.getArgument());
xmlColumn.getXmlElements().add(xmlArgument);
}
gridXml.getXmlElements().add(xmlColumn);
}
// Sort keys:
if (sortKeys.isEmpty()) {
sortKeys = new ArrayList<RowSorter.SortKey>();
sortKeys.add(new RowSorter.SortKey(0, SortOrder.ASCENDING)); // just in case there are not sort keys
}
else {
for (RowSorter.SortKey sortKey : sortKeys) {
DXmlSortKey xmlSortKey = new DXmlSortKey();
xmlSortKey.getAttribute(DXmlSortKey.ATT_COLUMN).setValue(jtTable.convertColumnIndexToView(sortKey.getColumn()));
xmlSortKey.getAttribute(DXmlSortKey.ATT_SORT_ORDER).setValue(sortKey.getSortOrder().toString());
gridXml.getXmlElements().add(xmlSortKey);
}
}
xml = gridXml.getXmlString();
miUserGui = miClient.saveUserGui(manUserGuiKey, xml);
}
}
protected void refreshGridWithRefresh() {
preserveUserGui();
populateGrid(DGridConsts.REFRESH_MODE_RESET);
}
protected void refreshGridWithReload() {
preserveUserGui();
populateGrid(DGridConsts.REFRESH_MODE_RELOAD);
}
protected void resetGrid() {
moModel.clearGrid();
if (jtTable != null) {
jtTable.invalidate();
validate();
}
}
protected void resetGridRows() {
moModel.clearGridRows();
if (jtTable != null) {
jtTable.invalidate();
validate();
}
}
protected void createGridView() {
DGridColumnView column = null;
// Create columns in table model:
clearGrid();
if (miUserGui != null) {
computeUserGui(); // customized columns added into moModel and sort keys into miSortKeysList
}
else {
initSortKeys();
createGridColumns(); // default columns added into moModel
}
// Create table:
jtTable = new JTable(moModel);
jtTable.setAutoResizeMode(JTable.AUTO_RESIZE_OFF);
jtTable.setSelectionMode(mnListSelectionModel);
jtTable.setColumnSelectionAllowed(false);
jtTable.setRowSorter(new TableRowSorter<AbstractTableModel>(moModel));
jtTable.setTableHeader(new DGridHeader(jtTable.getColumnModel(), moModel.getColumnNames()));
jtTable.getSelectionModel().addListSelectionListener(this);
jtTable.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
if (e.getClickCount() == 2) {
actionMouseClicked();
}
}
});
jtTable.addKeyListener(new java.awt.event.KeyAdapter() {
@Override
public void keyPressed(java.awt.event.KeyEvent evt) {
moSeeker.handleKeyPressedEvent(evt, getSeekerLocation());
if (moSeeker.isSeekRequested()) {
DGridUtils.seekValue(DGridPaneView.this, moSeeker.getText());
}
}
});
for (int i = 0; i < moModel.getColumnCount(); i++) {
column = (DGridColumnView) moModel.getGridColumns().get(i);
jtTable.getColumnModel().getColumn(i).setPreferredWidth(column.getColumnWidth());
if (column.getCellRenderer() != null) {
jtTable.getColumnModel().getColumn(i).setCellRenderer(column.getCellRenderer());
}
else {
jtTable.getColumnModel().getColumn(i).setCellRenderer(DGridUtils.getCellRenderer(column.getColumnType()));
}
}
jtfRows.setText("0/0");
jbGridSaveCsv.setEnabled(false);
jspScrollPane.setViewportView(jtTable);
}
protected void readGridData() {
int i = 0;
int col = 0;
int row = 0;
int dataType = DLibConsts.UNDEFINED;
int[] key = null;
int sumInt = 0;
long sumLng = 0;
double sumDbl = 0;
boolean rpnApplying = false;
boolean sumApplying = false;
boolean dataAvailable = false;
boolean[] colsWithRpn = null;
boolean[] colsWithSum = null;
Class colClass = null;
ResultSet resultSet = null;
DGridColumnView gridColumnView = null;
DGridRowView gridRowView = null;
try {
createGridView();
prepareSqlQuery();
colsWithRpn = new boolean[moModel.getColumnCount()];
for (col = 0; col < moModel.getColumnCount(); col++) {
if (!moModel.getGridColumns().get(col).getRpnArguments().isEmpty()) {
dataType = DGridUtils.getDataType(moModel.getGridColumns().get(col).getColumnType());
if (dataType != DLibConsts.DATA_TYPE_INT && dataType != DLibConsts.DATA_TYPE_DEC) {
throw new Exception(DLibConsts.ERR_MSG_WRONG_TYPE);
}
else {
rpnApplying = true;
colsWithRpn[col] = true;
}
}
}
colsWithSum = new boolean[moModel.getColumnCount()];
for (col = 0; col < moModel.getColumnCount(); col++) {
if (((DGridColumnView) moModel.getGridColumns().get(col)).isSumApplying()) {
dataType = DGridUtils.getDataType(moModel.getGridColumns().get(col).getColumnType());
if (dataType != DLibConsts.DATA_TYPE_INT && dataType != DLibConsts.DATA_TYPE_DEC) {
throw new Exception(DLibConsts.ERR_MSG_WRONG_TYPE);
}
else {
sumApplying = true;
colsWithSum[col] = true;
}
}
}
resultSet = miClient.getSession().getStatement().executeQuery(msSql);
while (resultSet.next()) {
key = new int[moPaneSettings.getPrimaryKeyLength()];
for (i = 0; i < moPaneSettings.getPrimaryKeyLength(); i++) {
key[i] = resultSet.getInt(DDbConsts.FIELD_ID + (i + 1));
}
gridRowView = new DGridRowView(key, resultSet.getString(DDbConsts.FIELD_CODE), resultSet.getString(DDbConsts.FIELD_NAME));
if (moPaneSettings.getTypeKeyLength() == 0) {
key = null;
}
else {
key = new int[moPaneSettings.getTypeKeyLength()];
for (i = 0; i < moPaneSettings.getTypeKeyLength(); i++) {
key[i] = resultSet.getInt(DDbConsts.FIELD_TYPE_ID + (i + 1));
}
gridRowView.setRowRegistryTypeKey(key);
gridRowView.setRowRegistryType(resultSet.getString(DDbConsts.FIELD_TYPE));
}
if (moPaneSettings.isDateApplying()) {
gridRowView.setRowDate(resultSet.getDate(DDbConsts.FIELD_DATE));
}
if (moPaneSettings.isUpdatableApplying()) {
gridRowView.setUpdatable(resultSet.getBoolean(DDbConsts.FIELD_CAN_UPD));
}
if (moPaneSettings.isDisableableApplying()) {
gridRowView.setDisableable(resultSet.getBoolean(DDbConsts.FIELD_CAN_DIS));
gridRowView.setDisabled(resultSet.getBoolean(DDbConsts.FIELD_IS_DIS));
}
if (moPaneSettings.isDeletableApplying()) {
gridRowView.setDeletable(resultSet.getBoolean(DDbConsts.FIELD_CAN_DEL));
gridRowView.setDeleted(resultSet.getBoolean(DDbConsts.FIELD_IS_DEL));
}
if (moPaneSettings.isDisabledApplying()) {
gridRowView.setDisabled(resultSet.getBoolean(DDbConsts.FIELD_IS_DIS));
}
if (moPaneSettings.isDeletedApplying()) {
gridRowView.setDeleted(resultSet.getBoolean(DDbConsts.FIELD_IS_DEL));
}
if (moPaneSettings.isSystemApplying()) {
gridRowView.setRowSystem(resultSet.getBoolean(DDbConsts.FIELD_IS_SYS));
}
if (moPaneSettings.isUserApplying()) {
gridRowView.setFkUserId(resultSet.getInt(DDbConsts.FIELD_USER_USR_ID));
gridRowView.setTsUser(resultSet.getDate(DDbConsts.FIELD_USER_USR_TS));
gridRowView.setUser(resultSet.getString(DDbConsts.FIELD_USER_USR_NAME));
}
if (moPaneSettings.isUserInsertApplying()) {
gridRowView.setFkUserInsertId(resultSet.getInt(DDbConsts.FIELD_USER_INS_ID));
gridRowView.setTsUserInsert(resultSet.getDate(DDbConsts.FIELD_USER_INS_TS));
gridRowView.setUserInsert(resultSet.getString(DDbConsts.FIELD_USER_INS_NAME));
}
if (moPaneSettings.isUserUpdateApplying()) {
gridRowView.setFkUserUpdateId(resultSet.getInt(DDbConsts.FIELD_USER_UPD_ID));
gridRowView.setTsUserUpdate(resultSet.getDate(DDbConsts.FIELD_USER_UPD_TS));
gridRowView.setUserUpdate(resultSet.getString(DDbConsts.FIELD_USER_UPD_NAME));
}
// Read grid row values:
for (col = 0; col < moModel.getColumnCount(); col++) {
gridColumnView = (DGridColumnView) moModel.getGridColumns().get(col);
if (colsWithRpn[col]) {
gridRowView.getValues().add(new Double(0));
}
else {
colClass = DGridUtils.getDataTypeClass(gridColumnView.getColumnType());
if (colClass == Long.class) {
gridRowView.getValues().add(resultSet.getLong(gridColumnView.getFieldName()));
}
else if (colClass == Integer.class) {
gridRowView.getValues().add(resultSet.getInt(gridColumnView.getFieldName()));
}
else if (colClass == Double.class) {
gridRowView.getValues().add(resultSet.getDouble(gridColumnView.getFieldName()));
}
else if (colClass == Float.class) {
gridRowView.getValues().add(resultSet.getFloat(gridColumnView.getFieldName()));
}
else if (colClass == Boolean.class) {
gridRowView.getValues().add(resultSet.getBoolean(gridColumnView.getFieldName()));
}
else if (colClass == String.class) {
gridRowView.getValues().add(resultSet.getString(gridColumnView.getFieldName()));
}
else if (colClass == Date.class) {
switch (gridColumnView.getColumnType()) {
case DGridConsts.COL_TYPE_DATE:
gridRowView.getValues().add(resultSet.getDate(gridColumnView.getFieldName()));
break;
case DGridConsts.COL_TYPE_DATE_DATETIME:
gridRowView.getValues().add(resultSet.getTimestamp(gridColumnView.getFieldName()));
break;
case DGridConsts.COL_TYPE_DATE_TIME:
gridRowView.getValues().add(resultSet.getTime(gridColumnView.getFieldName()));
break;
default:
throw new Exception(DLibConsts.ERR_MSG_OPTION_UNKNOWN);
}
}
else {
throw new Exception(DLibConsts.ERR_MSG_OPTION_UNKNOWN);
}
}
}
// Read aswell grid row complements if any:
if (moColumnComplementsMap.size() > 0) {
for (Integer complementKey : moColumnComplementsMap.keySet()) {
colClass = DGridUtils.getDataTypeClass(moColumnComplementsMap.get(complementKey));
if (colClass == Long.class) {
gridRowView.getComplementsMap().put(complementKey, resultSet.getLong(DDbConsts.FIELD_COMP + complementKey));
}
else if (colClass == Integer.class) {
gridRowView.getComplementsMap().put(complementKey, resultSet.getInt(DDbConsts.FIELD_COMP + complementKey));
}
else if (colClass == Double.class) {
gridRowView.getComplementsMap().put(complementKey, resultSet.getDouble(DDbConsts.FIELD_COMP + complementKey));
}
else if (colClass == Float.class) {
gridRowView.getComplementsMap().put(complementKey, resultSet.getFloat(DDbConsts.FIELD_COMP + complementKey));
}
else if (colClass == Boolean.class) {
gridRowView.getComplementsMap().put(complementKey, resultSet.getBoolean(DDbConsts.FIELD_COMP + complementKey));
}
else if (colClass == String.class) {
gridRowView.getComplementsMap().put(complementKey, resultSet.getString(DDbConsts.FIELD_COMP + complementKey));
}
else if (colClass == Date.class) {
switch (moColumnComplementsMap.get(complementKey)) {
case DGridConsts.COL_TYPE_DATE:
gridRowView.getComplementsMap().put(complementKey, resultSet.getDate(DDbConsts.FIELD_COMP + complementKey));
break;
case DGridConsts.COL_TYPE_DATE_DATETIME:
gridRowView.getComplementsMap().put(complementKey, resultSet.getTimestamp(DDbConsts.FIELD_COMP + complementKey));
break;
case DGridConsts.COL_TYPE_DATE_TIME:
gridRowView.getComplementsMap().put(complementKey, resultSet.getTime(DDbConsts.FIELD_COMP + complementKey));
break;
default:
throw new Exception(DLibConsts.ERR_MSG_OPTION_UNKNOWN);
}
}
else {
throw new Exception(DLibConsts.ERR_MSG_OPTION_UNKNOWN);
}
}
}
moModel.getGridRows().add(gridRowView);
dataAvailable = true;
}
if (rpnApplying) {
DGridUtils.computeRpn(moModel);
}
if (sumApplying && dataAvailable) {
gridRowView = new DGridRowView(null, "", "");
gridRowView.setRowType(DGridConsts.ROW_TYPE_SUM);
for (col = 0; col < moModel.getColumnCount(); col++) {
if (!colsWithSum[col]) {
gridRowView.getValues().add(null);
}
else {
switch (DGridUtils.getDataType(moModel.getGridColumns().get(col).getColumnType())) {
case DLibConsts.DATA_TYPE_INT:
if (DGridUtils.getDataTypeClass(moModel.getGridColumns().get(col).getColumnType()) == Long.class) {
sumLng = 0;
for (row = 0; row < moModel.getRowCount(); row++) {
sumLng += ((Number) moModel.getValueAt(row, col)).longValue();
}
gridRowView.getValues().add(sumLng);
}
else {
sumInt = 0;
for (row = 0; row < moModel.getRowCount(); row++) {
sumInt += ((Number) moModel.getValueAt(row, col)).intValue();
}
gridRowView.getValues().add(sumInt);
}
break;
case DLibConsts.DATA_TYPE_DEC:
sumDbl = 0;
for (row = 0; row < moModel.getRowCount(); row++) {
sumDbl += ((Number) moModel.getValueAt(row, col)).doubleValue();
}
gridRowView.getValues().add(sumDbl);
break;
default:
}
}
}
moModel.getGridRows().add(gridRowView);
}
}
catch (SQLException e) {
DLibUtils.showException(this, e);
DLibUtils.printSqlQuery(this, msSql);
}
catch (Exception e) {
DLibUtils.showException(this, e);
}
finally {
try {
jtTable.getRowSorter().setSortKeys(miSortKeysList);
}
catch (Exception e) {
DLibUtils.printException(this, e);
miClient.showMsgBoxWarning(DGridConsts.ERR_MSG_PREFS_VIEW);
miUserGui = null; // reset grid's user preferences
populateGrid(DGridConsts.REFRESH_MODE_RESET);
}
if (dataAvailable) {
jbGridSaveCsv.setEnabled(true);
setSelectedGridRow(0);
}
}
}
protected Point getSeekerLocation() {
Point point = jspScrollPane.getLocationOnScreen();
point.y += jtTable.getTableHeader().getHeight();
return point;
}
public void actionMouseClicked() {
actionRowEdit();
}
public void actionRowNew() {
if (jbRowNew.isEnabled()) {
miClient.getSession().getModule(mnModuleType, mnModuleSubtype).showForm(mnGridType, mnGridSubtype, moFormParams);
moFormParams = null;
}
}
public void actionRowEdit() {
actionRowEdit(false);
}
public void actionRowEdit(boolean showSystemRegistries) {
if (jbRowEdit.isEnabled()) {
if (jtTable.getSelectedRowCount() != 1) {
miClient.showMsgBoxInformation(DGridConsts.MSG_SELECT_ROW);
}
else {
DGridRowView gridRow = (DGridRowView) getSelectedGridRow();
DGuiParams params = null;
if (gridRow.getRowType() != DGridConsts.ROW_TYPE_DATA) {
miClient.showMsgBoxWarning(DGridConsts.ERR_MSG_ROW_TYPE_DATA);
}
else if (!showSystemRegistries && gridRow.isRowSystem()) {
miClient.showMsgBoxWarning(DDbConsts.MSG_REG_ + gridRow.getRowName() + DDbConsts.MSG_REG_IS_SYSTEM);
}
else if (!gridRow.isUpdatable()) {
miClient.showMsgBoxWarning(DDbConsts.MSG_REG_ + gridRow.getRowName() + DDbConsts.MSG_REG_NON_UPDATABLE);
}
else {
params = moFormParams != null ? moFormParams : new DGuiParams();
params.setKey(gridRow.getRowPrimaryKey());
miClient.getSession().getModule(mnModuleType, mnModuleSubtype).showForm(mnGridType, mnGridSubtype, params);
moFormParams = null;
}
}
}
}
public void actionRowCopy() {
if (jbRowCopy.isEnabled()) {
if (jtTable.getSelectedRowCount() != 1) {
miClient.showMsgBoxInformation(DGridConsts.MSG_SELECT_ROW);
}
else {
DGridRowView gridRow = (DGridRowView) getSelectedGridRow();
DGuiParams params = null;
if (gridRow.getRowType() != DGridConsts.ROW_TYPE_DATA) {
miClient.showMsgBoxWarning(DGridConsts.ERR_MSG_ROW_TYPE_DATA);
}
else {
params = new DGuiParams(getSelectedGridRow().getRowPrimaryKey(), true);
miClient.getSession().getModule(mnModuleType, mnModuleSubtype).showForm(mnGridType, mnGridSubtype, params);
}
}
}
}
public void actionRowDisable() {
if (jbRowDisable.isEnabled()) {
if (jtTable.getSelectedRowCount() == 0) {
miClient.showMsgBoxInformation(DGridConsts.MSG_SELECT_ROWS);
}
else if (miClient.showMsgBoxConfirm(DGridConsts.MSG_CONFIRM_REG_DIS) == JOptionPane.YES_OPTION) {
boolean updates = false;
DGridRow[] gridRows = getSelectedGridRows();
for (DGridRow gridRow : gridRows) {
if (((DGridRowView) gridRow).getRowType() != DGridConsts.ROW_TYPE_DATA) {
miClient.showMsgBoxWarning(DGridConsts.ERR_MSG_ROW_TYPE_DATA);
}
else if (((DGridRowView) gridRow).isRowSystem()) {
miClient.showMsgBoxWarning(DDbConsts.MSG_REG_ + gridRow.getRowName() + DDbConsts.MSG_REG_IS_SYSTEM);
}
else if (!((DGridRowView) gridRow).isDisableable()) {
miClient.showMsgBoxWarning(DDbConsts.MSG_REG_ + gridRow.getRowName() + DDbConsts.MSG_REG_NON_DISABLEABLE);
}
else {
if (miClient.getSession().getModule(mnModuleType, mnModuleSubtype).disableRegistry(mnGridType, gridRow.getRowPrimaryKey()) == DDbConsts.SAVE_OK) {
updates = true;
}
}
}
if (updates) {
miClient.getSession().notifySuscriptors(mnGridType);
}
}
}
}
public void actionRowDelete() {
if (jbRowDelete.isEnabled()) {
if (jtTable.getSelectedRowCount() == 0) {
miClient.showMsgBoxInformation(DGridConsts.MSG_SELECT_ROWS);
}
else if (miClient.showMsgBoxConfirm(DGridConsts.MSG_CONFIRM_REG_DEL) == JOptionPane.YES_OPTION) {
boolean updates = false;
DGridRow[] gridRows = getSelectedGridRows();
for (DGridRow gridRow : gridRows) {
if (((DGridRowView) gridRow).getRowType() != DGridConsts.ROW_TYPE_DATA) {
miClient.showMsgBoxWarning(DGridConsts.ERR_MSG_ROW_TYPE_DATA);
}
else if (((DGridRowView) gridRow).isRowSystem()) {
miClient.showMsgBoxWarning(DDbConsts.MSG_REG_ + gridRow.getRowName() + DDbConsts.MSG_REG_IS_SYSTEM);
}
else if (!((DGridRowView) gridRow).isDeletable()) {
miClient.showMsgBoxWarning(DDbConsts.MSG_REG_ + gridRow.getRowName() + DDbConsts.MSG_REG_NON_DELETABLE);
}
else {
if (miClient.getSession().getModule(mnModuleType, mnModuleSubtype).deleteRegistry(mnGridType, gridRow.getRowPrimaryKey()) == DDbConsts.SAVE_OK) {
updates = true;
}
}
}
if (updates) {
miClient.getSession().notifySuscriptors(mnGridType);
}
}
}
}
public void actionGridSaveCsv() {
if (jbGridSaveCsv.isEnabled()) {
DGridUtils.saveCsv(this, msTitle);
}
}
public void actionGridClearSettings() {
if (jbGridClearSettings.isEnabled()) {
if (miClient.showMsgBoxConfirm(DGridConsts.MSG_CONFIRM_RESET_SETTINGS) == JOptionPane.YES_OPTION) {
miUserGui = null;
populateGrid(DGridConsts.REFRESH_MODE_RESET);
}
}
}
public void actionGridReload() {
if (jbGridReload.isEnabled()) {
refreshGridWithRefresh();
}
}
public void actionGridSeekValue() {
if (jtTable.getRowCount() > 0) {
moSeeker.openSeeker(getSeekerLocation());
if (moSeeker.isSeekRequested()) {
DGridUtils.seekValue(this, moSeeker.getText());
}
}
}
public void actionGridSearchValue() {
if (jtTable.getRowCount() > 0) {
DGridUtils.searchValue(this, jtfGridSearch.getText(), true);
}
}
public void actionGridSearchNextValue() {
if (jtTable.getRowCount() > 0) {
DGridUtils.searchValue(this, jtfGridSearch.getText(), false);
}
}
public void actionToggleFilterDeleted() {
if (jtbFilterDeleted.isEnabled()) {
moFiltersMap.put(DGridConsts.FILTER_DELETED, jtbFilterDeleted.isSelected());
refreshGridWithRefresh();
}
}
public void actionToggleAutoReload() {
if (jtbAutoReload.isEnabled()) {
// By now this method has not any code
}
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel jLabel1;
private javax.swing.JButton jbGridClearSettings;
private javax.swing.JButton jbGridReload;
private javax.swing.JButton jbGridSaveCsv;
private javax.swing.JButton jbGridSearchNext;
protected javax.swing.JButton jbRowCopy;
protected javax.swing.JButton jbRowDelete;
protected javax.swing.JButton jbRowDisable;
protected javax.swing.JButton jbRowEdit;
protected javax.swing.JButton jbRowNew;
private javax.swing.JPanel jpCommands;
private javax.swing.JPanel jpCommandsCustom;
private javax.swing.JPanel jpCommandsCustomCenter;
private javax.swing.JPanel jpCommandsCustomLeft;
private javax.swing.JPanel jpCommandsCustomRight;
private javax.swing.JPanel jpCommandsSys;
private javax.swing.JPanel jpCommandsSysCenter;
private javax.swing.JPanel jpCommandsSysLeft;
private javax.swing.JPanel jpCommandsSysRight;
private javax.swing.JPanel jpStatus;
private javax.swing.JPanel jpStatusCenter;
private javax.swing.JPanel jpStatusLeft;
private javax.swing.JPanel jpStatusRight;
protected javax.swing.JScrollPane jspScrollPane;
protected javax.swing.JTable jtTable;
protected javax.swing.JToggleButton jtbAutoReload;
protected javax.swing.JToggleButton jtbFilterDeleted;
private javax.swing.JTextField jtfGridSearch;
private javax.swing.JTextField jtfRows;
// End of variables declaration//GEN-END:variables
/*
* Abstract methods
*/
public abstract void prepareSqlQuery();
public abstract void createGridColumns();
public abstract void defineSuscriptions();
/*
* Public methods
*/
public void setFormParams(DGuiParams params) { moFormParams = params; }
public int getGridViewType() { return mnGridViewType; }
public String getTitle() { return msTitle; }
public String getSql() { return msSql; }
public HashMap<Integer, Integer> getColumnComplementsMap() { return moColumnComplementsMap; }
public HashMap<Integer, Object> getFiltersMap() { return moFiltersMap; }
public void populateGrid(final int refreshMode) {
int index = -1;
int posVer = 0;
int posHor = 0;
if (refreshMode == DGridConsts.REFRESH_MODE_RELOAD && jtTable != null) {
index = jtTable.getSelectedRow();
posVer = jspScrollPane.getVerticalScrollBar().getValue();
posHor = jspScrollPane.getHorizontalScrollBar().getValue();
}
readGridData();
if (refreshMode == DGridConsts.REFRESH_MODE_RELOAD) {
jspScrollPane.validate();
jspScrollPane.getVerticalScrollBar().setValue(
posVer < jspScrollPane.getVerticalScrollBar().getMaximum() ? posVer : jspScrollPane.getVerticalScrollBar().getMaximum());
jspScrollPane.getHorizontalScrollBar().setValue(
posHor < jspScrollPane.getHorizontalScrollBar().getMaximum() ? posHor : jspScrollPane.getHorizontalScrollBar().getMaximum());
if (index != -1) {
if (index > jtTable.getRowCount()) {
index = jtTable.getRowCount() - 1;
}
setSelectedGridRow(index);
}
}
}
public void triggerSuscription(final int suscription) {
if (jtbAutoReload.isSelected()) {
if (moSuscriptionsSet.contains(suscription)) {
refreshGridWithReload();
}
}
}
public void paneViewClosed() {
preserveUserGui();
}
public void setRowButtonsEnabled(boolean enabled) {
setRowButtonsEnabled(enabled, enabled, enabled, enabled, enabled);
}
public void setRowButtonsEnabled(boolean newEnabled, boolean editEnabled, boolean copyEnabled, boolean disableEnabled, boolean deleteEnabled) {
jbRowNew.setEnabled(newEnabled);
jbRowEdit.setEnabled(editEnabled);
jbRowCopy.setEnabled(copyEnabled);
jbRowDisable.setEnabled(disableEnabled);
jbRowDelete.setEnabled(deleteEnabled);
}
public void setSelectedGridRowInterval(final int row0, final int row1) {
int value = 0;
if (row0 >= 0 && row0 < jtTable.getRowCount() && row1 >= 0 && row1 < jtTable.getRowCount() && row0 <= row1) {
jtTable.setRowSelectionInterval(row0, row1);
value = row0 * jtTable.getRowHeight();
if (value < jspScrollPane.getVerticalScrollBar().getValue() || value > jspScrollPane.getVerticalScrollBar().getValue() + jspScrollPane.getVerticalScrollBar().getVisibleAmount()) {
jspScrollPane.getVerticalScrollBar().setValue(value);
}
}
}
public DGridRow[] getSelectedGridRows() {
int[] rows = jtTable.getSelectedRows();
DGridRow[] gridRows = null;
if (rows != null) {
gridRows = new DGridRow[rows.length];
for (int i = 0; i < rows.length; i++) {
gridRows[i] = moModel.getGridRows().get(jtTable.convertRowIndexToModel(rows[i]));
}
}
return gridRows;
}
public JPanel getPanelCommandsSys(final int guiPanel) {
JPanel panel = null;
switch(guiPanel) {
case DGuiConsts.PANEL_LEFT:
panel = jpCommandsSysLeft;
break;
case DGuiConsts.PANEL_CENTER:
panel = jpCommandsSysCenter;
break;
case DGuiConsts.PANEL_RIGHT:
panel = jpCommandsSysRight;
break;
default:
}
return panel;
}
public JPanel getPanelCommandsCustom(final int guiPanel) {
JPanel panel = null;
switch(guiPanel) {
case DGuiConsts.PANEL_LEFT:
panel = jpCommandsCustomLeft;
break;
case DGuiConsts.PANEL_CENTER:
panel = jpCommandsCustomCenter;
break;
case DGuiConsts.PANEL_RIGHT:
panel = jpCommandsCustomRight;
break;
default:
}
return panel;
}
public JPanel getPanelStatus(final int guiPanel) {
JPanel panel = null;
switch(guiPanel) {
case DGuiConsts.PANEL_LEFT:
panel = jpStatusLeft;
break;
case DGuiConsts.PANEL_CENTER:
panel = jpStatusCenter;
break;
case DGuiConsts.PANEL_RIGHT:
panel = jpStatusRight;
break;
default:
}
return panel;
}
/*
* Overriden methods
*/
@Override
public DGuiClient getClient() {
return miClient;
}
@Override
public int getGridPaneType() {
return DGridConsts.GRID_PANE_VIEW;
}
@Override
public int getGridType() {
return mnGridType;
}
@Override
public int getGridSubtype() {
return mnGridSubtype;
}
@Override
public int getGridMode() {
return mnGridMode;
}
@Override
public int getGridSubmode() {
return mnGridSubmode;
}
@Override
public DGridModel getModel() {
return moModel;
}
@Override
public JTable getTable() {
return jtTable;
}
@Override
public JScrollPane getScrollPane() {
return jspScrollPane;
}
@Override
public void clearGrid() {
resetGrid();
renderGrid();
}
@Override
public void clearGridRows() {
resetGridRows();
renderGridRows();
}
@Override
public void renderGrid() {
moModel.renderGrid();
}
@Override
public void renderGridRows() {
moModel.renderGridRows();
}
@Override
public void initSortKeys() {
miSortKeysList.clear();
miSortKeysList.add(new RowSorter.SortKey(0, SortOrder.ASCENDING));
}
@Override
public void putFilter(final int filterType, final Object filterValue) {
moFiltersMap.put(filterType, filterValue);
refreshGridWithRefresh();
}
@Override
public void setGridRow(final DGridRow gridRow, final int row) {
moModel.getGridRows().setElementAt(gridRow, jtTable.convertRowIndexToModel(row));
}
@Override
public void setGridColumn(final DGridColumn gridColumn, final int col) {
moModel.getGridColumns().setElementAt(gridColumn, jtTable.convertColumnIndexToModel(col));
}
@Override
public DGridRow getGridRow(final int row) {
return moModel.getGridRows().get(jtTable.convertRowIndexToModel(row));
}
@Override
public DGridColumn getGridColumn(final int col) {
return moModel.getGridColumns().get(jtTable.convertColumnIndexToModel(col));
}
@Override
public void addGridRow(final DGridRow gridRow) {
moModel.getGridRows().add(gridRow);
}
@Override
public void addGridColumn(final DGridColumn gridColumn) {
moModel.getGridColumns().add(gridColumn);
}
@Override
public void insertGridRow(final DGridRow gridRow, final int row) {
moModel.getGridRows().insertElementAt(gridRow, jtTable.convertRowIndexToModel(row));
}
@Override
public void insertGridColumn(final DGridColumn gridColumn, final int col) {
moModel.getGridColumns().insertElementAt(gridColumn, jtTable.convertColumnIndexToModel(col));
}
@Override
public DGridRow removeGridRow(final int row) {
return moModel.getGridRows().remove(jtTable.convertRowIndexToModel(row));
}
@Override
public DGridColumn removeGridColumn(final int col) {
return moModel.getGridColumns().remove(jtTable.convertColumnIndexToModel(col));
}
@Override
public void setSelectedGridColumn(final int col) {
if (col >= 0 && col < jtTable.getColumnCount()) {
jtTable.setColumnSelectionInterval(col, col);
}
}
@Override
public void setSelectedGridRow(final int row) {
int value = 0;
if (row >= 0 && row < jtTable.getRowCount()) {
jtTable.setRowSelectionInterval(row, row);
value = row * jtTable.getRowHeight();
if (value < jspScrollPane.getVerticalScrollBar().getValue() || value > jspScrollPane.getVerticalScrollBar().getValue() + jspScrollPane.getVerticalScrollBar().getVisibleAmount()) {
jspScrollPane.getVerticalScrollBar().setValue(value);
}
}
}
@Override
public DGridRow getSelectedGridRow() {
return jtTable.getSelectedRow() == -1 ? null : moModel.getGridRows().get(jtTable.convertRowIndexToModel(jtTable.getSelectedRow()));
}
@Override
public void setRowValueAtFieldName(final Object value, final int row, final String fieldName) {
int modelColumnIndex = moModel.getColumnIndexAtFieldName(fieldName);
if (modelColumnIndex != -1) {
moModel.setValueAt(value, row, modelColumnIndex);
jtTable.setValueAt(value, row, jtTable.convertColumnIndexToView(modelColumnIndex));
}
}
@Override
public Object getRowValueAtFieldName(final int row, final String fieldName) {
return moModel.getValueAtFieldName(row, fieldName);
}
@Override
public void valueChanged(ListSelectionEvent e) {
jtfRows.setText(DLibUtils.DecimalFormatInteger.format(jtTable.getSelectedRow() + 1) + "/" + DLibUtils.DecimalFormatInteger.format(jtTable.getRowCount()));
}
}
|
package se.raddo.raddose3D;
/**
* This is a Dose Decay Model class that calculates the Relative
* Diffraction Efficiency (RDE) according to the model from the
* Leal et al. (2012) paper. The paper describes the loss of
* scattering power of a crystal as a product of the expected
* intensity, the Debye-waller factor and an empirically derived
* scale factor.
*/
public class DDMLeal implements DDM {
/**
* Decay parameters used in Leal et al. (2012) (eqn 4).
* The values were found for a cubic crystal of bovine
* pancreatic insulin (unpublished) at cryotemperatures (100K).
*/
/**
* Decay Parameter beta.
*/
private static final double BETA = 0.316928538944095;
/**
* Decay Parameter b0.
*/
private static final double B0 = 13.854805547210105;
/**
* Decay Parameter gamma.
*/
private static final double GAMMA = 0.029790991953658;
/**
* Print string to tell user the type of dose decay model being used.
*
* @return Informative string about the dose decay model being used.
*/
@Override
public String toString() {
return "Dose Decay Model from Leal et al. 2012 is being used.";
}
/**
* Method to calculate the Relative Diffraction Efficiency (RDE).
* The model used is from the Leal et al. 2012 paper that describes
* the loss of scattering power of a crystal as a product of the
* expected intensity, the Debye-waller factor and an empirically
* derived scale factor.
*
* @param dose
* This is the absorbed dose within the crystal voxel
*
* @return The Relative Diffraction Efficiency
*/
@Override
public double calcDecay(final double dose) {
DDMLeal ddmObj = new DDMLeal();
/** Relative intensity is the integrated intensity calculated
* using the current dose divided by the integrated intensity
* at dose = 0 MGy
*/
double relativeIntensityDecay = ddmObj.getIntegratedIntensity(dose)
/ ddmObj.getIntegratedIntensity(0);
return relativeIntensityDecay;
}
/**
* Method to calculate the expected integrated intensity.
* The integrated intensity can be found in the Leal et al. 2012
* paper equation 4.
*
* @param dose
* This is the absorbed dose within the crystal voxel
*
* @return The integrated intensity
*/
public double getIntegratedIntensity(final double dose) {
// TODO Write a 'check' to make sure there is an argument.
/**
* The integrated intensity according to leal et al. 2012 (eqn 4)
*/
double integratedIntensity;
/**
* Array that stores the BEST intensity data
*/
final double[][] BEST_DATA = getBESTData();
/**
* Array containing the differences between each resolution value
* in the BEST intensity data.
*/
double[] dh = new double[BEST_DATA.length - 1];
/**
* Calculate the dh values, i.e. the differences between each resolution
* from the BEST data
*/
for (int i = 0; i < BEST_DATA.length - 1; i++) {
dh[i] = Math.sqrt(BEST_DATA[i+1][0]) - Math.sqrt(BEST_DATA[i][0]);
}
/**
* Calculate integral of eqn 4 leal et al. 2012
*/
double integralSum = 0;
double eachTerm;
for (int j = 0; j < dh.length; j++) {
eachTerm = ((BEST_DATA[j+1][0] + BEST_DATA[j][0])/2)
* ((BEST_DATA[j+1][1] + BEST_DATA[j][1])/2)
*Math.exp(-0.5*((BEST_DATA[j+1][0] + BEST_DATA[j][0])/2)*(this.B0 + (this.BETA*dose)))
*dh[j];
integralSum = integralSum + eachTerm;
}
/**
* Calculate the integrated intensity of eqn 4 leal et al. 2012
*/
integratedIntensity = Math.exp(-Math.pow(this.GAMMA*dose,2)) * integralSum;
return integratedIntensity;
}
/**
* Method to extract the BEST intensity data (Popov & Bourenkov 2003)
* The intensity data is stored in a csv file in 2 columns:
* column 1 are h^2 values (h = 1/d and d is the resolution in Angstroms)
* column 2 are the expected intensity values (denoted J in the file).
* The file contains intensity values for each of the 300 resolution bins
* (i.e. 300 rows)
*
* @return An array containing the BEST intensity data
*/
public static double[][] getBESTData() {
/** Array to contain the BEST intensity data */
final double[][] BEST_DATA = new double[][]{
{0.009000,117970.000000},
{0.013100,100512.023438},
{0.017200,80882.992188},
{0.021300,62948.515625},
{0.025400,57507.757813},
{0.029500,61357.429688},
{0.033500,72234.062500},
{0.037600,89858.945313},
{0.041700,109460.929688},
{0.045800,126917.039063},
{0.049900,137405.062500},
{0.054000,139655.375000},
{0.058100,137483.218750},
{0.062200,133394.875000},
{0.066300,129394.328125},
{0.070400,125762.617188},
{0.074500,121035.289063},
{0.078600,116051.804688},
{0.082600,110836.078125},
{0.086700,104613.296875},
{0.090800,97322.054688},
{0.094900,89836.304688},
{0.099000,83216.187500},
{0.103100,78146.273438},
{0.107200,73459.671875},
{0.111300,69471.023438},
{0.115400,65299.644531},
{0.119500,61581.441406},
{0.123600,58510.613281},
{0.127700,55865.179688},
{0.131800,53658.789063},
{0.135800,52101.019531},
{0.139900,51070.417969},
{0.144000,50092.042969},
{0.148100,49350.722656},
{0.152200,48151.910156},
{0.156300,47058.906250},
{0.160400,46675.406250},
{0.164500,46597.675781},
{0.168600,45924.046875},
{0.172700,46080.671875},
{0.176800,45937.621094},
{0.180900,46096.023438},
{0.184900,45896.964844},
{0.189000,45990.093750},
{0.193100,46123.292969},
{0.197200,46343.515625},
{0.201300,45936.539063},
{0.205400,45715.695313},
{0.209500,45109.164063},
{0.213600,44549.132813},
{0.217700,43634.820313},
{0.221800,43566.085938},
{0.225900,43451.015625},
{0.230000,42696.292969},
{0.234100,41173.980469},
{0.238100,39972.753906},
{0.242200,39166.628906},
{0.246300,38020.367188},
{0.250400,36810.992188},
{0.254500,35497.308594},
{0.258600,34194.906250},
{0.262700,32992.742188},
{0.266800,31585.996094},
{0.270900,30211.492188},
{0.275000,29119.816406},
{0.279100,28151.564453},
{0.283200,27386.414063},
{0.287300,26232.775391},
{0.291300,25235.693359},
{0.295400,24318.244141},
{0.299500,23707.949219},
{0.303600,22821.910156},
{0.307700,22182.095703},
{0.311800,21694.740234},
{0.315900,21236.888672},
{0.320000,20733.123047},
{0.324100,20323.289063},
{0.328200,20073.404297},
{0.332300,19932.156250},
{0.336400,19631.480469},
{0.340400,19223.189453},
{0.344500,18920.273438},
{0.348600,18557.662109},
{0.352700,18134.789063},
{0.356800,17926.917969},
{0.360900,17909.144531},
{0.365000,17908.371094},
{0.369100,17781.652344},
{0.373200,17634.251953},
{0.377300,17607.757813},
{0.381400,17273.970703},
{0.385500,17132.121094},
{0.389600,16953.238281},
{0.393600,16883.560547},
{0.397700,16615.091797},
{0.401800,16435.376953},
{0.405900,16423.140625},
{0.410000,16351.833008},
{0.414100,16278.805664},
{0.418200,15998.300781},
{0.422300,15795.753906},
{0.426400,15589.185547},
{0.430500,15561.383789},
{0.434600,15467.072266},
{0.438700,15476.588867},
{0.442800,15331.998047},
{0.446800,15028.963867},
{0.450900,14745.987305},
{0.455000,14509.141602},
{0.459100,14445.925781},
{0.463200,14254.642578},
{0.467300,14111.920898},
{0.471400,13900.478516},
{0.475500,13785.526367},
{0.479600,13686.092773},
{0.483700,13464.845703},
{0.487800,13304.157227},
{0.491900,13084.092773},
{0.495900,13114.880859},
{0.500000,13089.595703},
{0.504100,13244.094727},
{0.508200,13117.398438},
{0.512300,13140.625977},
{0.516400,13031.726563},
{0.520500,12999.481445},
{0.524600,12835.458008},
{0.528700,12954.440430},
{0.532800,12937.747070},
{0.536900,12936.303711},
{0.541000,12825.827148},
{0.545100,12995.077148},
{0.549100,12994.031250},
{0.553200,13036.256836},
{0.557300,13006.765625},
{0.561400,13057.585938},
{0.565500,13010.015625},
{0.569600,12891.707031},
{0.573700,12966.081055},
{0.577800,13114.422852},
{0.581900,13119.473633},
{0.586000,13065.753906},
{0.590100,13052.747070},
{0.594200,13214.619141},
{0.598200,13376.884766},
{0.602300,13386.037109},
{0.606400,13244.183594},
{0.610500,13225.625000},
{0.614600,13203.177734},
{0.618700,13157.918945},
{0.622800,13058.344727},
{0.626900,13089.546875},
{0.631000,13236.269531},
{0.635100,13356.927734},
{0.639200,13294.084961},
{0.643300,13322.505859},
{0.647400,13356.877930},
{0.651400,13574.700195},
{0.655500,13741.788086},
{0.659600,13988.012695},
{0.663700,14126.933594},
{0.667800,14226.778320},
{0.671900,14096.913086},
{0.676000,14083.927734},
{0.680100,14170.342773},
{0.684200,14351.646484},
{0.688300,14494.584961},
{0.692400,14485.082031},
{0.696500,14514.433594},
{0.700600,14622.690430},
{0.704600,14725.596680},
{0.708700,14840.912109},
{0.712800,14869.136719},
{0.716900,14947.928711},
{0.721000,15039.328125},
{0.725100,15069.899414},
{0.729200,15058.230469},
{0.733300,14892.115234},
{0.737400,14829.183594},
{0.741500,14854.609375},
{0.745600,14911.042969},
{0.749700,14950.721680},
{0.753700,15113.783203},
{0.757800,15211.773438},
{0.761900,15205.695313},
{0.766000,15024.023438},
{0.770100,14926.859375},
{0.774200,14948.205078},
{0.778300,14968.500000},
{0.782400,14961.653320},
{0.786500,14880.744141},
{0.790600,14853.396484},
{0.794700,14715.400391},
{0.798800,14625.747070},
{0.802900,14476.197266},
{0.806900,14315.362305},
{0.811000,14115.835938},
{0.815100,14177.434570},
{0.819200,14214.168945},
{0.823300,13756.127930},
{0.827400,13478.938477},
{0.831500,13409.521484},
{0.835600,13313.304688},
{0.839700,13191.076172},
{0.843800,13068.227539},
{0.847900,13143.240234},
{0.852000,13034.021484},
{0.856100,12844.786133},
{0.860100,12565.625977},
{0.864200,12494.125977},
{0.868300,12431.333008},
{0.872400,12224.258789},
{0.876500,12045.228516},
{0.880600,11934.916992},
{0.884700,11999.309570},
{0.888800,12092.721680},
{0.892900,12073.926758},
{0.897000,12000.385742},
{0.901100,11492.284180},
{0.905200,11340.666016},
{0.909200,11261.278320},
{0.913300,11170.411133},
{0.917400,11033.553711},
{0.921500,10920.555664},
{0.925600,10805.260742},
{0.929700,10749.541992},
{0.933800,10633.936523},
{0.937900,10553.670898},
{0.942000,10396.851563},
{0.946100,10345.898438},
{0.950200,10439.532227},
{0.954300,10444.083984},
{0.958400,10338.727539},
{0.962400,10137.357422},
{0.966500,10024.374023},
{0.970600,9960.443359},
{0.974700,9843.068359},
{0.978800,9813.852539},
{0.982900,9774.963867},
{0.987000,9722.901367},
{0.991100,9668.754883},
{0.995200,9489.758789},
{0.999300,9437.469727},
{1.003400,9337.846680},
{1.007500,9232.355469},
{1.011500,9143.000977},
{1.015600,8946.202148},
{1.019700,9061.576172},
{1.023800,8927.707031},
{1.027900,8833.817383},
{1.032000,8559.502930},
{1.036100,8737.791016},
{1.040200,8741.252930},
{1.044300,8734.716797},
{1.048400,8730.012695},
{1.052500,8553.071289},
{1.056600,8567.203125},
{1.060700,8448.906250},
{1.064700,8348.450195},
{1.068800,8372.744141},
{1.072900,8420.621094},
{1.077000,8534.404297},
{1.081100,8515.739258},
{1.085200,8391.372070},
{1.089300,8376.128906},
{1.093400,8364.005859},
{1.097500,8370.607422},
{1.101600,8053.081055},
{1.105700,7885.546875},
{1.109800,7949.569824},
{1.113900,8098.683105},
{1.117900,8009.884766},
{1.122000,7884.853027},
{1.126100,7912.110840},
{1.130200,7977.089844},
{1.134300,8038.597168},
{1.138400,7984.880859},
{1.142500,7943.616211},
{1.146600,8002.785156},
{1.150700,7840.146973},
{1.154800,7771.714844},
{1.158900,7704.839844},
{1.163000,7606.397949},
{1.167000,7499.033203},
{1.171100,7380.200195},
{1.175200,7353.042969},
{1.179300,7373.826172},
{1.183400,7386.295898},
{1.187500,7445.311035},
{1.191600,7298.761230},
{1.195700,7163.548828},
{1.199800,6936.292969},
{1.203900,6920.410156},
{1.208000,6888.470215},
{1.212100,7020.129883},
{1.216200,6991.485840},
{1.220200,6970.270020},
{1.224300,6894.088867},
{1.228400,6915.407227},
{1.232500,6934.170898}
};
/** Return the BEST intensity data */
return BEST_DATA;
}
}
|
package se.raddo.raddose3D;
import java.util.HashMap;
import java.util.Map;
import se.raddo.raddose3D.ElementDatabase.DatabaseFields;
/**
* The Element class contains physical constants of an element associated with
* x-ray cross sections.
*/
public class Element {
/**
* Element name.
*/
private final String elementName;
/**
* Atomic number.
*/
private final int atomicNumber;
/**
* Stored information about the chemical element.
*/
private final Map<ElementDatabase.DatabaseFields, Double> elementData;
/**
* Stored absorption edge coefficients.
*/
private final Map<AbsorptionEdge, Double[]> coefficients;
/**
* List of absorption edges.
*/
private enum AbsorptionEdge {
/** innermost electron shell, 1 shell. */
K,
/** 2 shell. */
L,
/** 3 shell. */
M,
/** 4 shell. */
N,
/** Coherent scattering polynomial coefficients. */
C,
/** Incoherent scattering polynomial coefficients. */
I
}
/** Atomic mass unit in grams. */
private static final double ATOMIC_MASS_UNIT = 1.66E-24;
/** LJ_1 variable from Fortran, used to correct atomic elements < 29 Z. */
private static final double LJ_1 = 1.160;
/** LJ_2 variable from Fortran, used to correct atomic elements < 29 Z. */
private static final double LJ_2 = 1.41;
/** Light/heavy element threshold, 29 is treated as light atom. */
public static final int LIGHT_ATOM_MAX_NUM = 29;
/** Absorption edge room for error. */
private static final double ABSORPTION_EDGE_TOLERANCE = 0.001;
/** Number of expansions of the polynomial. */
private static final int POLYNOMIAL_EXPANSION = 4;
/** Conversion factor [Barns/Atom] = C * [cm^2/g]. */
public static final double C = 53.2400017;
/** Different types of calculated cross-sections. */
public enum CrossSection {
/**
* Cross-section for the photoelectric effect. This does not contribute to
* scattering.
*/
PHOTOELECTRIC,
/**
* Cross-section for coherent (elastic) scattering.
*/
COHERENT,
/**
* Attenuation cross-section.
*/
TOTAL
}
/**
* Create a new element with name, atomic number and associated information.
*
* @param element
* element name
* @param protons
* atomic number
* @param elementInformation
* Map containing the associated element information
*/
public Element(final String element, final int protons,
final Map<ElementDatabase.DatabaseFields, Double> elementInformation) {
elementName = element;
atomicNumber = protons;
elementData = new HashMap<ElementDatabase.DatabaseFields, Double>(
elementInformation);
coefficients = edgeCoefficients(elementInformation);
}
/**
* Converts the edge coefficients into easier-to-handle arrays.
*
* @param elementInformation
* The database fields of the current element
* @return
* Map containing all edge coefficients as Double[] arrays.
*/
private Map<AbsorptionEdge, Double[]> edgeCoefficients(
final Map<ElementDatabase.DatabaseFields, Double> elementInformation) {
Map<AbsorptionEdge, Double[]> coeffMap =
new HashMap<AbsorptionEdge, Double[]>();
Double[] coeff;
coeff = new Double[POLYNOMIAL_EXPANSION];
coeff[0] = elementInformation
.get(ElementDatabase.DatabaseFields.K_COEFF_0);
coeff[1] = elementInformation
.get(ElementDatabase.DatabaseFields.K_COEFF_1);
coeff[2] = elementInformation
.get(ElementDatabase.DatabaseFields.K_COEFF_2);
coeff[3] = elementInformation
.get(ElementDatabase.DatabaseFields.K_COEFF_3);
coeffMap.put(AbsorptionEdge.K, coeff);
coeff = new Double[POLYNOMIAL_EXPANSION];
coeff[0] = elementInformation
.get(ElementDatabase.DatabaseFields.L_COEFF_0);
coeff[1] = elementInformation
.get(ElementDatabase.DatabaseFields.L_COEFF_1);
coeff[2] = elementInformation
.get(ElementDatabase.DatabaseFields.L_COEFF_2);
coeff[3] = elementInformation
.get(ElementDatabase.DatabaseFields.L_COEFF_3);
coeffMap.put(AbsorptionEdge.L, coeff);
coeff = new Double[POLYNOMIAL_EXPANSION];
coeff[0] = elementInformation
.get(ElementDatabase.DatabaseFields.M_COEFF_0);
coeff[1] = elementInformation
.get(ElementDatabase.DatabaseFields.M_COEFF_1);
coeff[2] = elementInformation
.get(ElementDatabase.DatabaseFields.M_COEFF_2);
coeff[3] = elementInformation
.get(ElementDatabase.DatabaseFields.M_COEFF_3);
coeffMap.put(AbsorptionEdge.M, coeff);
coeff = new Double[POLYNOMIAL_EXPANSION];
coeff[0] = elementInformation
.get(ElementDatabase.DatabaseFields.N_COEFF_0);
coeff[1] = elementInformation
.get(ElementDatabase.DatabaseFields.N_COEFF_1);
coeff[2] = elementInformation
.get(ElementDatabase.DatabaseFields.N_COEFF_2);
coeff[3] = elementInformation
.get(ElementDatabase.DatabaseFields.N_COEFF_3);
coeffMap.put(AbsorptionEdge.N, coeff);
coeff = new Double[POLYNOMIAL_EXPANSION];
coeff[0] = elementInformation
.get(ElementDatabase.DatabaseFields.COHERENT_COEFF_0);
coeff[1] = elementInformation
.get(ElementDatabase.DatabaseFields.COHERENT_COEFF_1);
coeff[2] = elementInformation
.get(ElementDatabase.DatabaseFields.COHERENT_COEFF_2);
coeff[3] = elementInformation
.get(ElementDatabase.DatabaseFields.COHERENT_COEFF_3);
coeffMap.put(AbsorptionEdge.C, coeff);
coeff = new Double[POLYNOMIAL_EXPANSION];
coeff[0] = elementInformation
.get(ElementDatabase.DatabaseFields.INCOHERENT_COEFF_0);
coeff[1] = elementInformation
.get(ElementDatabase.DatabaseFields.INCOHERENT_COEFF_1);
coeff[2] = elementInformation
.get(ElementDatabase.DatabaseFields.INCOHERENT_COEFF_2);
coeff[3] = elementInformation
.get(ElementDatabase.DatabaseFields.INCOHERENT_COEFF_3);
coeffMap.put(AbsorptionEdge.I, coeff);
return coeffMap;
}
/**
* Calculation of "bax" for corresponding edge and energy.
*
* @param energy
* beam energy in keV
* @param edge
* Selected edge coefficient (K, L, M, N, C, I).
* @return
* value of bax
*/
private double baxForEdge(final double energy, final AbsorptionEdge edge) {
// calculation from logarithmic coefficients in McMaster tables.
double sum = 0;
Double[] coeffs = coefficients.get(edge);
for (int i = 0; i < POLYNOMIAL_EXPANSION; i++) {
if (energy == 1) {
/*
* This is actually wrong, as it causes a discontinuity in the
* cross-section function. However, this is how Pathikrit Bandyopadhyay
* chose to implement it, so it stays. It also only affects values at
* E = 1keV.
*/
sum += coeffs[i];
} else {
sum += coeffs[i] * Math.pow(Math.log(energy), i);
}
}
return Math.exp(sum);
}
/**
* energy is between two edges; this function finds the corresponding edge
* and
* calculates bax for this edge. Corrects bax if atomic number is below 29,
* and then uses this to calculate the cross-sections.
*
* @param energy
* X-ray photon energy in keV
* @return
* Map structure containing the photoelectric, coherent and total
* cross sections in units Barns/Atom.
*/
public Map<CrossSection, Double> calculateMu(final double energy) {
Double absorptionEdgeK =
elementData.get(ElementDatabase.DatabaseFields.EDGE_K);
Double absorptionEdgeL =
elementData.get(ElementDatabase.DatabaseFields.EDGE_L);
Double absorptionEdgeM =
elementData.get(ElementDatabase.DatabaseFields.EDGE_M);
if ((absorptionEdgeK != null && energy < absorptionEdgeK
&& energy > absorptionEdgeK - ABSORPTION_EDGE_TOLERANCE)
|| (absorptionEdgeL != null && energy < absorptionEdgeL
&& energy > absorptionEdgeL - ABSORPTION_EDGE_TOLERANCE)
|| (absorptionEdgeM != null && energy < absorptionEdgeM
&& energy > absorptionEdgeM - ABSORPTION_EDGE_TOLERANCE)) {
System.err.println("Warning: Energy is close to absorption edge of "
+ elementName);
}
double bax = 0;
if (energy > absorptionEdgeK) {
bax = baxForEdge(energy, AbsorptionEdge.K);
} else if (energy < absorptionEdgeK && energy > absorptionEdgeL) {
bax = baxForEdge(energy, AbsorptionEdge.L);
} else if (energy < absorptionEdgeL && energy > absorptionEdgeM) {
bax = baxForEdge(energy, AbsorptionEdge.M);
} else if (energy < absorptionEdgeM) {
bax = baxForEdge(energy, AbsorptionEdge.N);
}
// Fortran says...
// correct for L-edges since McMaster uses L1 edge.
// Use edge jumps for correct X-sections.
if (atomicNumber <= LIGHT_ATOM_MAX_NUM) {
if (energy > elementData.get(ElementDatabase.DatabaseFields.L3)
&& energy < elementData.get(ElementDatabase.DatabaseFields.L2)) {
bax /= (LJ_1 * LJ_2);
}
if (energy > elementData.get(ElementDatabase.DatabaseFields.L2)
&& energy < absorptionEdgeL) {
bax /= LJ_1;
}
}
double bcox = 0;
double binx = 0;
if (elementData.get(ElementDatabase.DatabaseFields.COHERENT_COEFF_0) != 0) {
bcox = baxForEdge(energy, AbsorptionEdge.C);
}
if (elementData.get(ElementDatabase.DatabaseFields.INCOHERENT_COEFF_0) != 0)
{
binx = baxForEdge(energy, AbsorptionEdge.I);
}
double btox = bax + bcox + binx;
Map<CrossSection, Double> results = new HashMap<CrossSection, Double>();
results.put(CrossSection.COHERENT, bcox); // elastic
results.put(CrossSection.PHOTOELECTRIC, bax); // mu, abs coefficient
results.put(CrossSection.TOTAL, btox); // attenuation
return results;
}
/**
* @return the elementName
*/
public String getElementName() {
return elementName;
}
/**
* @return the atomicNumber
*/
public int getAtomicNumber() {
return atomicNumber;
}
/**
* Return the atomic weight of this element in unified atomic mass (u).
*
* @return
* the atomic weight in u
*/
public Double getAtomicWeight() {
return elementData.get(DatabaseFields.ATOMIC_WEIGHT);
}
/**
* Return the atomic weight of this element in grams.
*
* @return
* the atomic weight in grams
*/
public Double getAtomicWeightInGrams() {
return getAtomicWeight() * ATOMIC_MASS_UNIT;
}
}
|
package smp.components.staff;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import javafx.concurrent.Service;
import javafx.concurrent.Task;
import javafx.scene.image.ImageView;
import javafx.scene.layout.HBox;
import javax.sound.midi.InvalidMidiDataException;
import smp.ImageIndex;
import smp.ImageLoader;
import smp.components.Constants;
import smp.components.general.Utilities;
import smp.components.staff.Staff.AnimationService.AnimationTask;
import smp.components.staff.sequences.StaffSequence;
import smp.components.staff.sequences.ams.AMSDecoder;
import smp.components.staff.sequences.mpc.MPCDecoder;
import smp.components.staff.sounds.SMPSequence;
import smp.components.staff.sounds.SMPSequencer;
import smp.stateMachine.StateMachine;
/**
* The staff on which notes go. The staff keeps track of notes
* in terms of discrete StaffNoteLines, placed inside an array.
* @author RehdBlob
* @since 2012.08.13
*/
public class Staff {
/** Milliseconds to delay between updating the play bars. */
private int delayTime = 50;
/** Whether we are playing a song. */
private boolean songPlaying = false;
/**
* The wrapper that holds a series of ImageView objects that are meant to
* display the staff measure lines.
*/
private StaffImages staffImages;
/**
* This is the backend portion of the staff, responsible for keeping track
* of all of the different positions of notes and sequences.
*/
private StaffBackend staffBackend;
/** This holds the notes on the staff. */
private NoteMatrix theMatrix;
/** This is the current sequence that we have displaying on the staff. */
private StaffSequence theSequence;
/**
* The Sequencer object that will be used to play sounds.
*/
private SMPSequencer seq;
/** The song that we are currently editing. */
private SMPSequence currentSong;
/**
* This is a service that will help run the animation and sound of playing a
* song.
*/
private AnimationService theService;
/**
* Creates a new Staff object.
* @param staffExtLines These are the lines that appear under notes for the
* lower and upper portions of the staff.
*/
public Staff(HBox[] staffExtLines) {
seq = new SMPSequencer();
theMatrix = new NoteMatrix(Constants.NOTELINES_IN_THE_WINDOW,
Constants.NOTES_IN_A_LINE, this);
staffBackend = new StaffBackend();
try {
currentSong = new SMPSequence();
} catch (InvalidMidiDataException e) {
// Do nothing
e.printStackTrace();
}
theSequence = new StaffSequence();
staffImages = new StaffImages(staffExtLines);
staffImages.setStaff(this);
staffImages.initialize();
theService = new AnimationService();
}
/**
* Moves the staff and notes left by 1.
*/
public void moveLeft() {
shift(-1);
}
/**
* Moves the staff and notes right by 1.
*/
public void moveRight() {
shift(1);
}
/**
* Shifts the staff by <code>num</code> spaces.
* @param num The number of spaces to shift. Positive
* values indicate an increasing measure number.
*/
public void shift(int num) {
setLocation(num + StateMachine.getMeasureLineNum());
}
/**
* Jumps to a certain position on the staff.
* @param num The first measure line number (usually between 1
* and 375) that is to be displayed.
*/
public synchronized void setLocation(int num) {
for(int i = 0; i < Constants.NOTELINES_IN_THE_WINDOW; i++)
theMatrix.redraw(i);
}
/**
* Force re-draws the staff.
*/
public synchronized void redraw() {
setLocation(StateMachine.getMeasureLineNum());
}
/**
* Begins animation of the Staff.
*/
public void startSong() {
songPlaying = true;
theService.start();
}
/**
* Stops the song that is currently playing.
*/
public void stopSong() {
songPlaying = false;
theService.cancel();
theService.reset();
}
/**
* Loads a Super Mario Paint song.
*/
public void loadSong() {
}
/**
* Saves a Super Mario Paint song.
*/
public void saveSong() {
}
/**
* Imports a Mario Paint Composer song.
*/
public void importMPCSong() {
try {
currentSong = MPCDecoder.decode(Utilities.openFileDialog());
} catch (NullPointerException e) {
e.printStackTrace();
} catch (ParseException e) {
e.printStackTrace();
} catch (InvalidMidiDataException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Imports an Advanced Mario Sequencer song.
*/
public void importAMSSong() {
try {
currentSong = AMSDecoder.decode(Utilities.openFileDialog());
} catch (NullPointerException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* @return The note matrix of the staff that we are working with.
*/
public NoteMatrix getNoteMatrix() {
return theMatrix;
}
/**
* @return The staff backend controller.
*/
public StaffBackend getStaffBackend() {
return staffBackend;
}
/**
* @return The current song that we are displaying.
*/
public StaffSequence getSequence() {
return theSequence;
}
/**
* @return The staff images.
*/
public StaffImages getStaffImages() {
return staffImages;
}
/**
* @param acc The offset that we are deciding upon.
* @return An <code>ImageIndex</code> based on the amount of
* sharp or flat we want to implement.
*/
public static ImageIndex switchAcc(int acc) {
switch (acc) {
case 2:
return ImageIndex.DOUBLESHARP;
case 1:
return ImageIndex.SHARP;
case 0:
return ImageIndex.BLANK;
case -1:
return ImageIndex.FLAT;
case -2:
return ImageIndex.DOUBLEFLAT;
default:
return ImageIndex.BLANK;
}
}
/**
* This is a worker thread that helps run the animation on the staff.
*/
class AnimationService extends Service<Staff> {
@Override
protected Task<Staff> createTask() {
return new AnimationTask();
}
/**
* Bumps the highlight of the notes to the next play bar.
* @param playBars The list of the measure highlights.
* @param index The current index of the measure that we're on.
*/
private void bumpHighlights(ArrayList<ImageView> playBars, int index) {
playBars.get(index).setImage(ImageLoader.getSpriteFX(ImageIndex.NONE));
if (index + 1 >= playBars.size()) {
playBars.get(0).setImage(
ImageLoader.getSpriteFX(ImageIndex.PLAY_BAR1));
} else {
playBars.get(index + 1).setImage(
ImageLoader.getSpriteFX(ImageIndex.PLAY_BAR1));
}
}
/**
* This class keeps track of animation and sound.
*/
class AnimationTask extends Task<Staff> {
/**
* This is the current index of the measure line that we are on
* on the staff.
*/
private int index = 0;
/** These are the play bars on the staff. */
private ArrayList<ImageView> playBars;
@Override
protected Staff call() throws Exception {
playBars = staffImages.getPlayBars();
do {
playNextLine();
try {
Thread.sleep(350);
} catch (InterruptedException e) {
// Do nothing
}
} while (songPlaying);
return null;
}
/**
* Plays the next line of notes in the queue. For ease-of-programming
* purposes, we'll not care about efficiency and just play things as
* they are.
*/
private void playNextLine() {
if (StateMachine.getMeasureLineNum() >=
Constants.DEFAULT_LINES_PER_SONG
- Constants.NOTELINES_IN_THE_WINDOW) {
songPlaying = false;
}
bumpHighlights(playBars, index);
if (index < Constants.NOTELINES_IN_THE_WINDOW - 1)
index++;
else
index = 0;
}
}
}
}
|
package net.sf.mzmine.modules.visualization.histogram;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.text.NumberFormat;
import java.util.Vector;
import java.util.logging.Logger;
import javax.swing.JComboBox;
import javax.swing.JComponent;
import javax.swing.JFormattedTextField;
import javax.swing.JPanel;
import net.sf.mzmine.data.ChromatographicPeak;
import net.sf.mzmine.data.Parameter;
import net.sf.mzmine.data.PeakList;
import net.sf.mzmine.data.RawDataFile;
import net.sf.mzmine.data.impl.SimpleParameterSet;
import net.sf.mzmine.main.mzmineclient.MZmineCore;
import net.sf.mzmine.modules.visualization.histogram.histogramdatalabel.HistogramDataType;
import net.sf.mzmine.util.Range;
import net.sf.mzmine.util.components.ExtendedCheckBox;
import net.sf.mzmine.util.dialogs.ParameterSetupDialog;
public class HistogramSetupDialog extends ParameterSetupDialog {
private SimpleParameterSet localParameters;
private JComboBox dataTypeComponent;
private JPanel dataRangeComponent;
private PeakList peakList;
private RawDataFile[] rawDataFiles;
public HistogramSetupDialog(String title, SimpleParameterSet parameters,
PeakList peakList) {
super(title, parameters);
this.localParameters = parameters;
this.peakList = peakList;
Parameter p = localParameters.getParameter("Plotted data type");
dataTypeComponent = (JComboBox) getComponentForParameter(p);
dataTypeComponent.addActionListener(this);
p = localParameters.getParameter("Plotted data range");
dataRangeComponent = (JPanel) getComponentForParameter(p);
}
@Override
public void actionPerformed(ActionEvent event) {
super.actionPerformed(event);
Object source = event.getSource();
if ((source instanceof JComboBox)
|| (source instanceof ExtendedCheckBox)) {
try {
Vector<RawDataFile> dataFiles = new Vector<RawDataFile>();
for (ExtendedCheckBox box : multipleCheckBoxes) {
if (box.isSelected()) {
Object genericObject = box.getObject();
dataFiles.add((RawDataFile) genericObject);
}
}
rawDataFiles = dataFiles.toArray(new RawDataFile[0]);
if (rawDataFiles.length == 0) {
throw (new Exception(
"Please select at least one option from multiple selection parameter"));
}
HistogramDataType dataType = (HistogramDataType) dataTypeComponent
.getSelectedItem();
Range valueRange = calculateRange(dataType);
NumberFormat formatter = getAxisNumberFormat(dataType);
JPanel panel = (JPanel) dataRangeComponent;
JFormattedTextField minField = new JFormattedTextField(
formatter);
minField.setValue(valueRange.getMin());
minField.setPreferredSize(new Dimension(80, minField
.getPreferredSize().height));
minField.setHorizontalAlignment(JFormattedTextField.CENTER);
panel.getComponent(0).setVisible(false);
panel.remove(0);
panel.add(minField, 0);
JFormattedTextField maxField = new JFormattedTextField(
formatter);
maxField.setValue(valueRange.getMax());
maxField.setPreferredSize(new Dimension(80, maxField
.getPreferredSize().height));
maxField.setHorizontalAlignment(JFormattedTextField.CENTER);
panel.getComponent(2).setVisible(false);
panel.remove(2);
panel.add(maxField, 2);
panel.addNotify();
pack();
} catch (Exception e) {
desktop.displayMessage(e.getMessage());
}
}
}
private Range calculateRange(HistogramDataType dataType) {
double minimum = Double.MAX_VALUE, maximum = 0;
ChromatographicPeak[] peaks;
double[] values = null;
for (RawDataFile dataFile : rawDataFiles) {
peaks = peakList.getPeaks(dataFile);
values = new double[peaks.length];
for (int i = 0; i < peaks.length; i++) {
switch (dataType) {
case AREA:
values[i] = peaks[i].getArea();
break;
case HEIGHT:
values[i] = peaks[i].getHeight();
break;
case MASS:
values[i] = peaks[i].getMZ();
break;
case RT:
values[i] = peaks[i].getRT();
break;
}
if (!Double.isNaN(values[i])) {
minimum = Math.min(values[i], minimum);
maximum = Math.max(values[i], maximum);
}
}
}
return new Range(minimum, maximum);
}
private NumberFormat getAxisNumberFormat(HistogramDataType dataType) {
NumberFormat formatter = null;
switch (dataType) {
case AREA:
formatter = MZmineCore.getIntensityFormat();
break;
case MASS:
formatter = MZmineCore.getMZFormat();
break;
case HEIGHT:
formatter = MZmineCore.getIntensityFormat();
break;
case RT:
formatter = MZmineCore.getRTFormat();
break;
}
return formatter;
}
}
|
package org.encog.neural.networks.training.competitive;
import java.util.Collection;
import org.encog.matrix.Matrix;
import org.encog.neural.data.NeuralData;
import org.encog.neural.data.NeuralDataPair;
import org.encog.neural.data.NeuralDataSet;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.Layer;
import org.encog.neural.networks.synapse.Synapse;
import org.encog.neural.networks.training.BasicTraining;
import org.encog.neural.networks.training.LearningRate;
import org.encog.neural.networks.training.competitive.neighborhood.NeighborhoodFunction;
import org.encog.util.math.BoundMath;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class implements competitive training, which would be used in a
* winner-take-all neural network, such as the self organizing map (SOM). This
* is an unsupervised training method, no ideal data is needed on the training
* set. If ideal data is provided, it will be ignored.
*
* A neighborhood function is required to determine the degree to which
* neighboring neurons (to the winning neuron) are updated by each training
* iteration.
*
* @author jheaton
*
*/
public class CompetitiveTraining extends BasicTraining implements LearningRate {
/**
* The neighborhood function to use to determine to what degree a neuron
* should be "trained".
*/
private final NeighborhoodFunction neighborhood;
/**
* The learning rate. To what degree should changes be applied.
*/
private double learningRate;
/**
* The network being trained.
*/
private final BasicNetwork network;
/**
* The input layer.
*/
private final Layer inputLayer;
/**
* The output layer.
*/
private final Layer outputLayer;
/**
* A collection of the synapses being modified.
*/
private final Collection<Synapse> synapses;
/**
* How many neurons in the input layer.
*/
private final int inputNeuronCount;
/**
* How many neurons in the output layer.
*/
private final int outputNeuronCount;
/**
* What is the worst BMU distance so far, this becomes the error.
*/
private double worstDistance;
/**
* The logging object.
*/
private final Logger logger = LoggerFactory.getLogger(this.getClass());
/**
* Create an instance of competitive training.
*
* @param network
* The network to train.
* @param learningRate
* The learning rate, how much to apply per iteration.
* @param training
* The training set (unsupervised).
* @param neighborhood
* The neighborhood function to use.
*/
public CompetitiveTraining(final BasicNetwork network,
final double learningRate, final NeuralDataSet training,
final NeighborhoodFunction neighborhood) {
this.neighborhood = neighborhood;
setTraining(training);
this.learningRate = learningRate;
this.network = network;
this.inputLayer = network.getInputLayer();
this.outputLayer = network.getOutputLayer();
this.synapses = network.getStructure().getPreviousSynapses(
this.outputLayer);
this.inputNeuronCount = this.inputLayer.getNeuronCount();
this.outputNeuronCount = this.outputLayer.getNeuronCount();
setError(0);
// set the threshold to zero
for (final Synapse synapse : this.synapses) {
final Matrix matrix = synapse.getMatrix();
for (int col = 0; col < matrix.getCols(); col++) {
matrix.set(matrix.getRows() - 1, col, 0);
}
}
}
/**
* Adjusts the weight for a single neuron during a training iteration.
*
* @param weight
* The starting weight.
* @param input
* The input to this neuron.
* @param currentNeuron
* The neuron who's weight is being updated.
* @param bmu
* The neuron that "won", the best matching unit.
* @return The new weight value.
*/
private double adjustWeight(final double weight, final double input,
final int currentNeuron, final int bmu) {
final double delta = this.neighborhood.function(currentNeuron, bmu)
* this.learningRate * (input - weight);
return weight + delta;
}
/**
* Calculate the best matching unit (BMU). This is the output neuron that
* has the lowest euclidean distance to the input vector.
*
* @param synapse
* The synapse to calculate for.
* @param input
* The input vector.
* @return The output neuron number that is the BMU.
*/
private int calculateBMU(final Synapse synapse, final NeuralData input) {
int result = 0;
double lowestDistance = Double.MAX_VALUE;
for (int i = 0; i < this.outputNeuronCount; i++) {
final double distance = calculateEuclideanDistance(synapse, input,
i);
// Track the lowest distance, this is the BMU.
if (distance < lowestDistance) {
lowestDistance = distance;
result = i;
}
}
// Track the worst distance, this is the error for the entire network.
if (lowestDistance > this.worstDistance) {
this.worstDistance = lowestDistance;
}
return result;
}
/**
* Calculate the euclidean distance for the specified output neuron and the
* input vector.
*
* @param synapse
* The synapse to get the weights from.
* @param input
* The input vector.
* @param outputNeuron
* The neuron we are calculating the distance for.
* @return The euclidean distance.
*/
private double calculateEuclideanDistance(final Synapse synapse,
final NeuralData input, final int outputNeuron) {
double result = 0;
for (int i = 0; i < input.size(); i++) {
final double diff = input.getData(i)
- synapse.getMatrix().get(i, outputNeuron);
result += diff * diff;
}
return BoundMath.sqrt(result);
}
/**
* @return The learning rate. This was set when the object was created.
*/
public double getLearningRate() {
return this.learningRate;
}
/**
* @return The network neighborhood function.
*/
public NeighborhoodFunction getNeighborhood() {
return this.neighborhood;
}
/**
* @return The network being trained.
*/
public BasicNetwork getNetwork() {
return this.network;
}
/**
* Perform one training iteration.
*/
public void iteration() {
if (this.logger.isInfoEnabled()) {
this.logger.info("Performing Competitive Training iteration.");
}
preIteration();
this.worstDistance = Double.MIN_VALUE;
int[] won = new int[this.outputNeuronCount];
int overworkedBMU = -1;
NeuralDataPair overworkedPair = null;
for (final Synapse synapse : this.synapses) {
// Apply competitive training
for (final NeuralDataPair pair : getTraining()) {
final NeuralData input = pair.getInput();
final int bmu = calculateBMU(synapse, input);
won[bmu]++;
// is the BMU "overworked"?
if (won[bmu] > 1) {
// have we found an overworked BMU?
if (overworkedBMU != -1) {
// is this BMU more overworked than the last?
if (won[bmu] > won[overworkedBMU]) {
overworkedBMU = bmu;
overworkedPair = pair;
}
} else {
overworkedBMU = bmu;
overworkedPair = pair;
}
}
train(bmu, synapse, input);
}
// force any non-winning neurons to share the burden somewhat\
if (overworkedPair != null) {
forceWinners(synapse, won, overworkedPair);
}
}
// update the error
setError(this.worstDistance);
postIteration();
}
/**
* Force any neurons that did not win to off-load patterns from
* overworked neurons.
* @param won An array that specifies how many times each output
* neuron has "won".
* @param overworkedPair A training pattern from the most
* overworked neuron.
* @param synapse The synapse to modify.
*/
private void forceWinners(final Synapse synapse, final int[] won,
final NeuralDataPair overworkedPair) {
for (int outputNeuron = 0; outputNeuron < won.length; outputNeuron++) {
if (won[outputNeuron] == 0) {
// copy
for (int inputNeuron = 0; inputNeuron < overworkedPair
.getInput().size(); inputNeuron++) {
synapse.getMatrix().set(inputNeuron,
outputNeuron,
overworkedPair.getInput().getData(inputNeuron));
}
break;
}
}
}
/**
* Set the learning rate. This is the rate at which the weights are changed.
*
* @param rate
* The learning rate.
*/
public void setLearningRate(final double rate) {
this.learningRate = rate;
}
/**
* Train for the specified synapse and BMU.
* @param bmu The best matching unit for this input.
* @param synapse The synapse to train.
* @param input The input to train for.
*/
private void train(final int bmu, final Synapse synapse,
final NeuralData input) {
// adjust the weight for the BMU and its neighborhood
for (int outputNeuron = 0; outputNeuron < this.outputNeuronCount;
outputNeuron++) {
for (int inputNeuron = 0; inputNeuron < this.inputNeuronCount;
inputNeuron++) {
final double currentWeight = synapse.getMatrix().get(
inputNeuron, outputNeuron);
final double inputValue = input.getData(inputNeuron);
final double newWeight = adjustWeight(currentWeight,
inputValue, outputNeuron, bmu);
synapse.getMatrix().set(inputNeuron, outputNeuron, newWeight);
}
}
}
}
|
package org.royaldev.royalcommands.rcommands;
import org.bukkit.ChatColor;
import org.bukkit.OfflinePlayer;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Player;
import org.royaldev.royalcommands.PConfManager;
import org.royaldev.royalcommands.RUtils;
import org.royaldev.royalcommands.RoyalCommands;
import java.util.Map;
public class CmdListHome implements CommandExecutor {
private RoyalCommands plugin;
public CmdListHome(RoyalCommands plugin) {
this.plugin = plugin;
}
@Override
public boolean onCommand(CommandSender cs, Command cmd, String label, String[] args) {
if (cmd.getName().equalsIgnoreCase("listhome")) {
if (!plugin.isAuthorized(cs, "rcmds.listhome")) {
RUtils.dispNoPerms(cs);
return true;
}
if (!(cs instanceof Player) && args.length < 1) {
cs.sendMessage(cmd.getDescription());
return false;
}
OfflinePlayer t;
if (args.length < 1) t = (OfflinePlayer) cs;
else {
if (!plugin.isAuthorized(cs, "rcmds.others.listhome")) {
cs.sendMessage(ChatColor.RED + "You cannot list other players' homes!");
return true;
}
t = plugin.getServer().getPlayer(args[0]);
if (t == null) t = plugin.getServer().getOfflinePlayer(args[0]);
if (plugin.isAuthorized(t, "rcmds.exempt.listhome")) {
cs.sendMessage(ChatColor.RED + "You can't list that player's homes!");
return true;
}
}
PConfManager pcm = PConfManager.getPConfManager(t);
if (!pcm.exists()) {
cs.sendMessage(ChatColor.RED + "No such player!");
return true;
}
ConfigurationSection cfgs = pcm.getConfigurationSection("home");
if (cfgs == null) {
cs.sendMessage(ChatColor.RED + "No homes found!");
return true;
}
final Map<String, Object> opts = cfgs.getValues(false);
if (opts.keySet().isEmpty()) {
cs.sendMessage(ChatColor.RED + "No homes found!");
return true;
}
String homes = opts.keySet().toString();
homes = homes.substring(1, homes.length() - 1);
cs.sendMessage(ChatColor.BLUE + "Homes:");
cs.sendMessage(homes);
return true;
}
return false;
}
}
|
package com.hyperwallet.clientsdk.util;
import com.hyperwallet.clientsdk.HyperwalletException;
import com.nimbusds.jose.EncryptionMethod;
import com.nimbusds.jose.JOSEException;
import com.nimbusds.jose.JWEAlgorithm;
import com.nimbusds.jose.JWSAlgorithm;
import org.apache.commons.io.IOUtils;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.URISyntaxException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.notNullValue;
import static org.testng.Assert.fail;
public class HyperwalletEncryptionTest {
private HyperwalletEncryption hyperwalletEncryption;
private List<String> fieldNames;
@BeforeMethod
public void setUp() {
this.hyperwalletEncryption = new HyperwalletEncryption.HyperwalletEncryptionBuilder()
.clientPrivateKeySetLocation("").hyperwalletKeySetLocation("").build();
this.fieldNames = collectFieldNames();
}
@Test(dataProvider = "fieldNames")
public void testBuilderMethod(String fieldName) throws Exception {
Method getter = findGetter(fieldName);
Method builderMethod = HyperwalletEncryption.HyperwalletEncryptionBuilder.class.getMethod(fieldName, getter.getReturnType());
Object oldValue = getter.invoke(hyperwalletEncryption);
assertThat(oldValue, is(notNullValue()));
HyperwalletEncryption.HyperwalletEncryptionBuilder builder = new HyperwalletEncryption.HyperwalletEncryptionBuilder();
builderMethod.invoke(builder, (Object) null);
assertThat(getter.invoke(builder.build()), is(getDefaultValue(fieldName)));
HyperwalletEncryption.HyperwalletEncryptionBuilder ret = (HyperwalletEncryption.HyperwalletEncryptionBuilder)builderMethod.invoke(builder, oldValue);
assertThat(getter.invoke(ret.build()), is(equalTo(oldValue)));
}
@Test
public void shouldCorrectlyEncryptAndDecryptInputText() throws IOException, ParseException, JOSEException, URISyntaxException {
ClassLoader classLoader = getClass().getClassLoader();
String hyperwalletKeysPath = new File(classLoader.getResource("encryption/public-jwkset").toURI()).getAbsolutePath();
String clientPrivateKeysPath = new File(classLoader.getResource("encryption/private-jwkset").toURI()).getAbsolutePath();
String testPayload = IOUtils.toString(classLoader.getResourceAsStream("encryption/test-payload.json"));
HyperwalletEncryption hyperwalletEncryption = new HyperwalletEncryption.HyperwalletEncryptionBuilder()
.clientPrivateKeySetLocation(clientPrivateKeysPath).hyperwalletKeySetLocation(hyperwalletKeysPath).build();
String encryptedPayload = hyperwalletEncryption.encrypt(testPayload);
String payloadAfterDescription = hyperwalletEncryption.decrypt(encryptedPayload);
assertThat("Payload text is the same after decryption" + testPayload,
payloadAfterDescription, is(testPayload));
}
@Test
public void shouldThrowExceptionWhenDecryptionIsMadeByKeyOtherThanUsedForEncryption()
throws IOException, ParseException, JOSEException, URISyntaxException {
ClassLoader classLoader = getClass().getClassLoader();
String hyperwalletKeysPath = "https://uat-api.paylution.com/jwkset";
String clientPrivateKeysPath = new File(classLoader.getResource("encryption/private-jwkset").toURI()).getAbsolutePath();
String testPayload = IOUtils.toString(classLoader.getResourceAsStream("encryption/test-payload.json"));
HyperwalletEncryption hyperwalletEncryption = new HyperwalletEncryption.HyperwalletEncryptionBuilder()
.clientPrivateKeySetLocation(clientPrivateKeysPath).hyperwalletKeySetLocation(hyperwalletKeysPath).build();
String encryptedPayload = hyperwalletEncryption.encrypt(testPayload);
try {
hyperwalletEncryption.decrypt(encryptedPayload);
fail("Expected JOSEException");
} catch (JOSEException e) {
assertThat(e.getMessage(), anyOf(containsString("Decryption error"),containsString("Message is larger than modulus")));
}
}
@Test
public void shouldThrowExceptionWhenEncryptionAlgorithmIsNotFoundInKeySet()
throws URISyntaxException, IOException, ParseException, JOSEException {
ClassLoader classLoader = getClass().getClassLoader();
String hyperwalletKeysPath = new File(classLoader.getResource("encryption/public-jwkset").toURI()).getAbsolutePath();
String clientPrivateKeysPath = new File(classLoader.getResource("encryption/private-jwkset").toURI()).getAbsolutePath();
String testPayload = IOUtils.toString(classLoader.getResourceAsStream("encryption/test-payload.json"));
HyperwalletEncryption hyperwalletEncryption = new HyperwalletEncryption.HyperwalletEncryptionBuilder()
.clientPrivateKeySetLocation(clientPrivateKeysPath).hyperwalletKeySetLocation(hyperwalletKeysPath)
.encryptionAlgorithm(JWEAlgorithm.A256GCMKW).build();
try {
hyperwalletEncryption.encrypt(testPayload);
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
assertThat(e.getMessage(), is(containsString("Algorithm = A256GCMKW is not found in client or Hyperwallet key set")));
}
}
@Test
public void shouldThrowExceptionWhenKeySetFileIsNotFound()
throws URISyntaxException, IOException, ParseException, JOSEException {
ClassLoader classLoader = getClass().getClassLoader();
String hyperwalletKeysPath = new File(classLoader.getResource("encryption/public-jwkset").toURI()).getAbsolutePath();
String clientPrivateKeysPath = "/encryption/public-jwkset/keyset.json";
String testPayload = IOUtils.toString(classLoader.getResourceAsStream("encryption/test-payload.json"));
HyperwalletEncryption hyperwalletEncryption = new HyperwalletEncryption.HyperwalletEncryptionBuilder()
.clientPrivateKeySetLocation(clientPrivateKeysPath).hyperwalletKeySetLocation(hyperwalletKeysPath).build();
try {
hyperwalletEncryption.encrypt(testPayload);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), is(containsString("Wrong client JWK set location")));
}
}
@Test
public void shouldThrowExceptionWhenJWSSignatureExpirationDateIsNull() throws URISyntaxException {
ClassLoader classLoader = getClass().getClassLoader();
String hyperwalletKeysPath = new File(classLoader.getResource("encryption/public-jwkset").toURI()).getAbsolutePath();
String clientPrivateKeysPath = new File(classLoader.getResource("encryption/private-jwkset").toURI()).getAbsolutePath();
HyperwalletEncryption hyperwalletEncryption = new HyperwalletEncryption.HyperwalletEncryptionBuilder()
.clientPrivateKeySetLocation(clientPrivateKeysPath).hyperwalletKeySetLocation(hyperwalletKeysPath).build();
try {
hyperwalletEncryption.verifySignatureExpirationDate(null);
fail("Expected HyperwalletException");
} catch (HyperwalletException e) {
assertThat(e.getMessage(), is(containsString("exp JWS header param was null")));
}
}
@Test
public void shouldThrowExceptionWhenJWSSignatureExpirationDateHasIncorrectType() throws URISyntaxException {
ClassLoader classLoader = getClass().getClassLoader();
String hyperwalletKeysPath = new File(classLoader.getResource("encryption/public-jwkset").toURI()).getAbsolutePath();
String clientPrivateKeysPath = new File(classLoader.getResource("encryption/private-jwkset").toURI()).getAbsolutePath();
HyperwalletEncryption hyperwalletEncryption = new HyperwalletEncryption.HyperwalletEncryptionBuilder()
.clientPrivateKeySetLocation(clientPrivateKeysPath).hyperwalletKeySetLocation(hyperwalletKeysPath).build();
try {
hyperwalletEncryption.verifySignatureExpirationDate("123123");
fail("Expected HyperwalletException");
} catch (HyperwalletException e) {
assertThat(e.getMessage(), is(containsString("exp JWS header must be of type Long")));
}
}
@Test
public void shouldThrowExceptionWhenJWSSignatureExpirationDateIsBeforeCurrentDate() throws URISyntaxException {
ClassLoader classLoader = getClass().getClassLoader();
String hyperwalletKeysPath = new File(classLoader.getResource("encryption/public-jwkset").toURI()).getAbsolutePath();
String clientPrivateKeysPath = new File(classLoader.getResource("encryption/private-jwkset").toURI()).getAbsolutePath();
HyperwalletEncryption hyperwalletEncryption = new HyperwalletEncryption.HyperwalletEncryptionBuilder()
.clientPrivateKeySetLocation(clientPrivateKeysPath).hyperwalletKeySetLocation(hyperwalletKeysPath).build();
try {
hyperwalletEncryption.verifySignatureExpirationDate(0L);
fail("Expected HyperwalletException");
} catch (HyperwalletException e) {
assertThat(e.getMessage(), is(containsString("Response message signature(JWS) has expired")));
}
}
private Method findGetter(String fieldName) throws Exception {
String getterName = "get" + fieldName.substring(0, 1).toUpperCase() + fieldName.substring(1);
return HyperwalletEncryption.class.getMethod(getterName);
}
private List<String> collectFieldNames() {
List<String> fieldNames = new ArrayList<String>();
for (Field field : HyperwalletEncryption.class.getDeclaredFields()) {
if (!Modifier.isPrivate(field.getModifiers()) || Modifier.isFinal(field.getModifiers())) {
continue;
}
fieldNames.add(field.getName());
}
return fieldNames;
}
@DataProvider(name = "fieldNames")
public Iterator<Object[]> createFieldNamesProvider() {
List<String> fieldNames = collectFieldNames();
List<Object[]> objects = new ArrayList<Object[]>(fieldNames.size());
for (String fieldName : fieldNames) {
objects.add(new Object[]{fieldName});
}
return objects.iterator();
}
private Object getDefaultValue(String fieldName) {
switch(fieldName) {
case "encryptionAlgorithm":
return JWEAlgorithm.RSA_OAEP_256;
case "signAlgorithm":
return JWSAlgorithm.RS256;
case "encryptionMethod":
return EncryptionMethod.A256CBC_HS512;
case "jwsExpirationMinutes":
return 5;
default:
return null;
}
}
}
|
package com.ning.http.client.async;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeoutException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.handler.AbstractHandler;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.ning.http.client.AsyncHttpClient;
import com.ning.http.client.AsyncHttpClientConfig;
import com.ning.http.client.BodyDeferringAsyncHandler;
import com.ning.http.client.BodyDeferringAsyncHandler.BodyDeferringInputStream;
import com.ning.http.client.Response;
public abstract class BodyDeferringAsyncHandlerTest extends AbstractBasicTest {
// not a half gig ;) for test shorter run's sake
protected static final int HALF_GIG = 100000;
public static class SlowAndBigHandler extends AbstractHandler {
public void handle(String pathInContext, Request request,
HttpServletRequest httpRequest, HttpServletResponse httpResponse)
throws IOException, ServletException {
// 512MB large download
// 512 * 1024 * 1024 = 536870912
httpResponse.setStatus(200);
httpResponse.setContentLength(HALF_GIG);
httpResponse.setContentType("application/octet-stream");
httpResponse.flushBuffer();
final boolean wantFailure = httpRequest
.getHeader("X-FAIL-TRANSFER") != null;
final boolean wantSlow = httpRequest.getHeader("X-SLOW") != null;
OutputStream os = httpResponse.getOutputStream();
for (int i = 0; i < HALF_GIG; i++) {
os.write(i % 255);
if (wantSlow) {
try {
Thread.sleep(300);
} catch (InterruptedException ex) {
// nuku
}
}
if (wantFailure) {
if (i > HALF_GIG / 2) {
// kaboom
// yes, response is commited, but Jetty does aborts and
// drops connection
httpResponse.sendError(500);
break;
}
}
}
httpResponse.getOutputStream().flush();
httpResponse.getOutputStream().close();
}
}
// a /dev/null but counting how many bytes it ditched
public static class CountingOutputStream extends OutputStream {
private int byteCount = 0;
@Override
public void write(int b) throws IOException {
// /dev/null
byteCount++;
}
public int getByteCount() {
return byteCount;
}
}
// simple stream copy just to "consume". It closes streams.
public static void copy(InputStream in, OutputStream out)
throws IOException {
byte[] buf = new byte[1024];
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
out.flush();
out.close();
in.close();
}
public AbstractHandler configureHandler() throws Exception {
return new SlowAndBigHandler();
}
public AsyncHttpClientConfig getAsyncHttpClientConfig() {
// for this test brevity's sake, we are limiting to 1 retries
return new AsyncHttpClientConfig.Builder().setMaxRequestRetry(0)
.setRequestTimeoutInMs(10000).build();
}
@Test(groups = { "standalone", "default_provider" })
public void deferredSimple() throws IOException, ExecutionException,
TimeoutException, InterruptedException {
AsyncHttpClient client = getAsyncHttpClient(getAsyncHttpClientConfig());
AsyncHttpClient.BoundRequestBuilder r = client
.prepareGet("http://127.0.0.1:" + port1 + "/deferredSimple");
CountingOutputStream cos = new CountingOutputStream();
BodyDeferringAsyncHandler bdah = new BodyDeferringAsyncHandler(cos);
Future<Response> f = r.execute(bdah);
Response resp = bdah.getResponse();
assertNotNull(resp);
assertEquals(resp.getStatusCode(), HttpServletResponse.SC_OK);
assertEquals(
true,
resp.getHeader("content-length").equals(
String.valueOf(HALF_GIG)));
// we got headers only, it's probably not all yet here (we have BIG file
// downloading)
assertEquals(true, HALF_GIG >= cos.getByteCount());
// now be polite and wait for body arrival too (otherwise we would be
// dropping the "line" on server)
f.get();
// it all should be here now
assertEquals(true, HALF_GIG == cos.getByteCount());
client.close();
}
@Test(groups = { "standalone", "default_provider" }, enabled = true)
public void deferredSimpleWithFailure() throws IOException,
ExecutionException, TimeoutException, InterruptedException {
AsyncHttpClient client = getAsyncHttpClient(getAsyncHttpClientConfig());
AsyncHttpClient.BoundRequestBuilder r = client.prepareGet(
"http://127.0.0.1:" + port1 + "/deferredSimpleWithFailure")
.addHeader("X-FAIL-TRANSFER", Boolean.TRUE.toString());
CountingOutputStream cos = new CountingOutputStream();
BodyDeferringAsyncHandler bdah = new BodyDeferringAsyncHandler(cos);
Future<Response> f = r.execute(bdah);
Response resp = bdah.getResponse();
assertNotNull(resp);
assertEquals(resp.getStatusCode(), HttpServletResponse.SC_OK);
assertEquals(
true,
resp.getHeader("content-length").equals(
String.valueOf(HALF_GIG)));
// we got headers only, it's probably not all yet here (we have BIG file
// downloading)
assertEquals(true, HALF_GIG >= cos.getByteCount());
// now be polite and wait for body arrival too (otherwise we would be
// dropping the "line" on server)
try {
f.get();
Assert.fail("get() should fail with IOException!");
} catch (Exception e) {
// good
}
// it's incomplete, there was an error
assertEquals(false, HALF_GIG == cos.getByteCount());
client.close();
}
@Test(groups = { "standalone", "default_provider" })
public void deferredInputStreamTrick() throws IOException,
ExecutionException, TimeoutException, InterruptedException {
AsyncHttpClient client = getAsyncHttpClient(getAsyncHttpClientConfig());
AsyncHttpClient.BoundRequestBuilder r = client
.prepareGet("http://127.0.0.1:" + port1
+ "/deferredInputStreamTrick");
PipedOutputStream pos = new PipedOutputStream();
PipedInputStream pis = new PipedInputStream(pos);
BodyDeferringAsyncHandler bdah = new BodyDeferringAsyncHandler(pos);
Future<Response> f = r.execute(bdah);
BodyDeferringInputStream is = new BodyDeferringInputStream(f, bdah, pis);
Response resp = is.getAsapResponse();
assertNotNull(resp);
assertEquals(resp.getStatusCode(), HttpServletResponse.SC_OK);
assertEquals(
true,
resp.getHeader("content-length").equals(
String.valueOf(HALF_GIG)));
// "consume" the body, but our code needs input stream
CountingOutputStream cos = new CountingOutputStream();
copy(is, cos);
// now we don't need to be polite, since consuming and closing
// BodyDeferringInputStream does all.
// it all should be here now
assertEquals(true, HALF_GIG == cos.getByteCount());
client.close();
}
@Test(groups = { "standalone", "default_provider" })
public void deferredInputStreamTrickWithFailure() throws IOException,
ExecutionException, TimeoutException, InterruptedException {
AsyncHttpClient client = getAsyncHttpClient(getAsyncHttpClientConfig());
AsyncHttpClient.BoundRequestBuilder r = client.prepareGet(
"http://127.0.0.1:" + port1
+ "/deferredInputStreamTrickWithFailure").addHeader(
"X-FAIL-TRANSFER", Boolean.TRUE.toString());
PipedOutputStream pos = new PipedOutputStream();
PipedInputStream pis = new PipedInputStream(pos);
BodyDeferringAsyncHandler bdah = new BodyDeferringAsyncHandler(pos);
Future<Response> f = r.execute(bdah);
BodyDeferringInputStream is = new BodyDeferringInputStream(f, bdah, pis);
Response resp = is.getAsapResponse();
assertNotNull(resp);
assertEquals(resp.getStatusCode(), HttpServletResponse.SC_OK);
assertEquals(
true,
resp.getHeader("content-length").equals(
String.valueOf(HALF_GIG)));
// "consume" the body, but our code needs input stream
CountingOutputStream cos = new CountingOutputStream();
try {
copy(is, cos);
Assert.fail("InputStream consumption should fail with IOException!");
} catch (IOException e) {
// good!
}
client.close();
}
@Test(groups = { "standalone", "default_provider" })
public void testConnectionRefused() throws IOException, ExecutionException,
TimeoutException, InterruptedException {
int newPortWithoutAnyoneListening = findFreePort();
AsyncHttpClient client = getAsyncHttpClient(getAsyncHttpClientConfig());
AsyncHttpClient.BoundRequestBuilder r = client
.prepareGet("http://127.0.0.1:" + newPortWithoutAnyoneListening
+ "/testConnectionRefused");
CountingOutputStream cos = new CountingOutputStream();
BodyDeferringAsyncHandler bdah = new BodyDeferringAsyncHandler(cos);
r.execute(bdah);
try {
bdah.getResponse();
Assert.fail("IOException should be thrown here!");
} catch (IOException e) {
// good
}
client.close();
}
}
|
package com.wizzardo.http.framework.template.taglib;
import com.wizzardo.http.framework.template.*;
import com.wizzardo.http.framework.template.taglib.g.CheckBox;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.Collections;
public class CheckBoxTest implements TagTest {
@Before
public void setup() {
TagLib.findTags(Collections.singletonList(CheckBox.class));
}
@Test
public void test_1() {
RenderResult result = prepare("<g:checkBox name=\"myCheckbox\" value=\"${true}\"/>").get(new Model());
Assert.assertEquals("<input type=\"checkbox\" name=\"myCheckbox\" id=\"myCheckbox\" value=\"true\"/>\n", result.toString());
}
@Test
public void test_2() {
RenderResult result = prepare("<g:checkBox name=\"myCheckbox\" id=\"myCheckbox_${i}\" checked=\"${true}\"/>")
.get(new Model().append("i", 2));
Assert.assertEquals("<input type=\"checkbox\" name=\"myCheckbox\" id=\"myCheckbox_2\" checked=\"checked\"/>\n", result.toString());
}
@Test
public void test_3() {
RenderResult result = prepare("<g:checkBox name=\"myCheckbox\" id=\"myCheckbox_${i}\" checked=\"true\"/>")
.get(new Model().append("i", 3));
Assert.assertEquals("<input type=\"checkbox\" name=\"myCheckbox\" id=\"myCheckbox_3\" checked=\"checked\"/>\n", result.toString());
}
}
|
package retrofit.http;
/**
* Server information. Applications may extend this class and return different URLs over time.
* Callers should always consult the Server instance for the latest values rather than caching URLs.
*
* @author Bob Lee (bob@squareup.com)
*/
public class Server {
public static final String DEFAULT_TYPE = "production";
private final String apiUrl;
private final String webUrl;
private final String type;
private final boolean ignoreSslWarnings;
public Server(String apiUrl, String webUrl) {
this(apiUrl, webUrl, false);
}
public Server(String apiUrl, String webUrl, boolean ignoreSslWarnings) {
this(apiUrl, webUrl, DEFAULT_TYPE, ignoreSslWarnings);
}
public Server(String apiUrl, String webUrl, String type, boolean ignoreSslWarnings) {
if (!apiUrl.endsWith("/")) {
apiUrl += "/";
}
this.apiUrl = apiUrl;
if (!webUrl.endsWith("/")) {
webUrl += "/";
}
this.webUrl = webUrl;
this.type = type;
this.ignoreSslWarnings = ignoreSslWarnings;
}
/**
* Gets the base API url. Includes a trailing '/'.
*/
public String apiUrl() {
return apiUrl;
}
/**
* Gets the base URL for Square's web site. Includes a trailing '/'.
*/
public String webUrl() {
return webUrl;
}
/**
* Gets a human-readable server type for differentiating between multiple instances.
*/
public String type() {
return type;
}
/**
* Returns true if we should ignore SSL warnings. Returns false by default.
* Ignored for development servers.
*/
public boolean ignoreSslWarnings() {
return ignoreSslWarnings;
}
}
|
package com.atexpose.dispatcher.channels.tasks;
import org.junit.Test;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
/**
* The extending ony for vanity coverage points.
*/
public class TaskUtilTest extends TaskUtil {
@Test
public void getZoneId_null_UTC() {
assertThat(TaskUtil.getZoneId(null)).isEqualTo(ZoneId.of("UTC"));
}
@Test
public void getZoneId_EmptyString_UTC() {
assertThat(TaskUtil.getZoneId("")).isEqualTo(ZoneId.of("UTC"));
}
@Test
public void getZoneId_AmericaNew_York_AmericaNew_York() {
assertThat(TaskUtil.getZoneId("America/New_York"))
.isEqualTo(ZoneId.of("America/New_York"));
}
@Test
public void getZonedDateTime_MidDayNewYorkInSummerTime_4HoursLaterUtc() {
//Exact time does not matter. Only that is summer time
IWatch watch = Watch.create().setDateTimeUTC(2017, 7, 27, 15, 30);
ZonedDateTime actual = TaskUtil.getZonedDateTime("14:30", IWatch.NEW_YORK, watch);
assertThat(actual).isEqualTo("2017-07-27T18:30:00Z");
assertThat(actual).isEqualTo("2017-07-27T14:30-04:00[America/New_York]");
}
@Test
public void getZonedDateTime_EveningNewYorkInTheSummerTime_NextDayUtc() {
//Exact time does not matter. Only that is summer time
IWatch watch = Watch.create().setDateTimeUTC(2017, 7, 27, 15, 30);
ZonedDateTime actual = TaskUtil.getZonedDateTime("22:30", IWatch.NEW_YORK, watch);
assertThat(actual).isEqualTo("2017-07-28T02:30:00Z");
assertThat(actual).isEqualTo("2017-07-27T22:30-04:00[America/New_York]");
}
@Test
public void getZonedDateTime_EveningNewYorkInTheSummerTimeDayOfMonth14_DayOfMonth15Utc() {
//Exact time does not matter. Only that is summer time
IWatch watch = Watch.create().setDateTimeUTC(2017, 7, 27, 15, 30);
ZonedDateTime actual = TaskUtil.getZonedDateTime("22:30", 14, IWatch.NEW_YORK, watch);
assertThat(actual).isEqualTo("2017-07-15T02:30:00Z");
assertThat(actual).isEqualTo("2017-07-14T22:30-04:00[America/New_York]");
}
@Test
public void validateTimeOfDay_CorrectTime_CorrectTime() {
String actual = TaskUtil.validateTimeOfDay("23:55");
assertThat(actual).isEqualTo("23:55");
}
@Test
public void validateTimeOfDay_IncorrectTime_Exception() {
assertThatExceptionOfType(RuntimeException.class)
.isThrownBy(() ->
TaskUtil.validateTimeOfDay("25:55")
).withMessageStartingWith("Incorrect task time: ");
}
@Test
public void validateDayOfMonth_CorrectDayOfMonth_CorrectDayOfMonth() {
int actual = TaskUtil.validateDayOfMonth(13);
assertThat(actual).isEqualTo(13);
}
@Test
public void validateDayOfMonth_ToLow_Exception() {
assertThatExceptionOfType(RuntimeException.class)
.isThrownBy(() ->
TaskUtil.validateDayOfMonth(0)
).withMessageStartingWith("The value 0 in variable 'dayOfMonth' is too small.");
}
@Test
public void validateDayOfMonth_ToHigh_Exception() {
assertThatExceptionOfType(RuntimeException.class)
.isThrownBy(() ->
TaskUtil.validateDayOfMonth(29)
).withMessageStartingWith("The value 29 in variable 'dayOfMonth' is too large.");
}
}
|
package us.kbase.workspace.test.controllers.handle;
import static us.kbase.common.test.controllers.ControllerCommon.findFreePort;
import static us.kbase.common.test.controllers.ControllerCommon.makeTempDirs;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.io.FileUtils;
import org.ini4j.Ini;
import org.ini4j.Profile.Section;
import us.kbase.auth.AuthToken;
import us.kbase.common.test.controllers.mongo.MongoController;
import us.kbase.common.test.controllers.shock.ShockController;
/** Q&D Utility to run Handle Service/Manager servers for the purposes of
* testing from Java.
* @author gaprice@lbl.gov
*
*/
public class HandleServiceController {
private final Process handleService;
private final int handleServicePort;
private final Path tempDir;
private final static String HANDLE_SERVICE_NAME = "handle_service";
public HandleServiceController(
final MongoController mongo,
final String shockHost,
final AuthToken shockAdminToken,
final Path rootTempDir,
final URL authURL,
final String handleAdminRole)
throws Exception {
tempDir = makeTempDirs(rootTempDir, "HandleServiceController-",
new LinkedList<String>());
handleServicePort = findFreePort();
File hsIniFile = createHandleServiceDeployCfg(mongo, shockHost, authURL,
shockAdminToken, handleAdminRole);
String lib_dir = "lib";
downloadSourceFiles(tempDir, lib_dir);
String lib_dir_path = tempDir.resolve(lib_dir).toAbsolutePath().toString();
ProcessBuilder handlepb = new ProcessBuilder("uwsgi", "--http",
":" + handleServicePort, "--wsgi-file",
"AbstractHandle/AbstractHandleServer.py", "--pythonpath", lib_dir_path)
.redirectErrorStream(true)
.redirectOutput(tempDir.resolve("handle_service.log").toFile());
Map<String, String> env = handlepb.environment();
env.put("KB_DEPLOYMENT_CONFIG", hsIniFile.getAbsolutePath().toString());
env.put("KB_SERVICE_NAME", HANDLE_SERVICE_NAME);
env.put("PYTHONPATH", lib_dir_path);
handlepb.directory(new File(lib_dir_path));
handleService = handlepb.start();
Thread.sleep(1000); //let the service start up
}
private void downloadSourceFiles(Path parentDir, String subDir) throws IOException {
// download source files from github repo
Path lib_root = parentDir.resolve(subDir);
Files.createDirectories(lib_root);
Path handle_dir = lib_root.resolve("AbstractHandle");
Files.createDirectories(handle_dir);
String handle_repo_prefix = "https://raw.githubusercontent.com/kbase/handle_service2/develop/lib/AbstractHandle/";
String [] handle_impl_files = {"__init__.py", "AbstractHandleImpl.py",
"AbstractHandleServer.py", "authclient.py", "baseclient.py"};
for (String file_name : handle_impl_files) {
FileUtils.copyURLToFile(new URL(handle_repo_prefix + file_name),
handle_dir.resolve(file_name).toFile());
}
Path handle_utils_dir = handle_dir.resolve("Utils");
Files.createDirectories(handle_utils_dir);
String [] handle_util_files = {"__init__.py", "Handler.py", "MongoUtil.py",
"ShockUtil.py", "TokenCache.py"};
for (String file_name : handle_util_files) {
FileUtils.copyURLToFile(new URL(handle_repo_prefix + "Utils/" + file_name),
handle_utils_dir.resolve(file_name).toFile());
}
Path biokbase_dir = lib_root.resolve("biokbase");
Files.createDirectories(biokbase_dir);
String biokbase_repo_prefix = "https://raw.githubusercontent.com/kbase/kb_sdk/develop/lib/biokbase/";
String [] biokbase_files = {"__init__.py", "log.py"};
for (String file_name : biokbase_files) {
FileUtils.copyURLToFile(new URL(biokbase_repo_prefix + file_name),
biokbase_dir.resolve(file_name).toFile());
}
Path log_file = null;
Stream <String> lines = null;
List <String> replaced = null;
log_file = biokbase_dir.resolve("log.py");
lines = Files.lines(log_file);
replaced = lines.map(line -> line.replaceAll("from ConfigParser import ConfigParser as _ConfigParser",
"try:\n" +
" from ConfigParser import ConfigParser as _ConfigParser\n" +
"except ImportError:\n" +
" from configparser import ConfigParser as _ConfigParser")).collect(Collectors.toList());
Files.write(log_file, replaced);
lines.close();
log_file = handle_dir.resolve("AbstractHandleServer.py");
lines = Files.lines(log_file);
replaced = lines.map(line -> line.replaceAll("loads\\(request_body\\)",
"loads(request_body.decode('utf-8'))")).collect(Collectors.toList());
Files.write(log_file, replaced);
lines.close();
log_file = handle_utils_dir.resolve("MongoUtil.py");
lines = Files.lines(log_file);
replaced = lines.map(line -> line.replaceAll("#print",
"print")).collect(Collectors.toList());
Files.write(log_file, replaced);
lines.close();
}
private File createHandleServiceDeployCfg(
final MongoController mongo,
final String shockHost,
final URL authURL,
final AuthToken shockAdminToken,
final String handleAdminRole) throws IOException {
final File iniFile = tempDir.resolve("handleService.cfg").toFile();
if (iniFile.exists()) {
iniFile.delete();
}
final Ini ini = new Ini();
final Section hs = ini.add(HANDLE_SERVICE_NAME);
hs.add("self-url", "http://localhost:" + handleServicePort);
hs.add("service-port", "" + handleServicePort);
hs.add("service-host", "localhost");
URL authServiceURL = new URL(authURL.toString() + "/api/legacy/KBase/Sessions/Login");
hs.add("auth-service-url", authServiceURL.toString());
hs.add("auth-url", authURL.toString());
hs.add("default-shock-server", shockHost);
hs.add("admin-token", shockAdminToken.getToken().toString());
hs.add("admin-roles", handleAdminRole);
hs.add("mongo-host", "127.0.0.1");
hs.add("mongo-port", "" + mongo.getServerPort());
ini.store(iniFile);
return iniFile;
}
public int getHandleServerPort() {
return handleServicePort;
}
public Path getTempDir() {
return tempDir;
}
public void destroy(boolean deleteTempFiles) throws IOException {
if (handleService != null) {
handleService.destroy();
}
if (tempDir != null && deleteTempFiles) {
FileUtils.deleteDirectory(tempDir.toFile());
}
}
public static void main(String[] args) throws Exception {
MongoController monc = new MongoController(
"/kb/runtime/bin/mongod",
Paths.get("workspacetesttemp"), false);
ShockController sc = new ShockController(
"/kb/deployment/bin/shock-server",
"0.9.6",
Paths.get("workspacetesttemp"),
System.getProperty("test.user1"),
"localhost:" + monc.getServerPort(),
"shockdb", "foo", "foo", new URL("http://foo.com"));
HandleServiceController hsc = new HandleServiceController(
monc,
"http://localhost:" + sc.getServerPort(),
null, //this will break the hm, need a token
Paths.get("workspacetesttemp"),
new URL("http://foo.com"),
"KBASE_ADMIN");
System.out.println("handlesrv: " + hsc.getHandleServerPort());
System.out.println(hsc.getTempDir());
Scanner reader = new Scanner(System.in);
System.out.println("any char to shut down");
//get user input for a
reader.next();
hsc.destroy(false);
sc.destroy(false);
monc.destroy(false);
reader.close();
}
}
|
package org.jboss.as.subsystem.test;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.jboss.as.controller.ModelVersion;
/**
*
* @author <a href="kabir.khan@jboss.com">Kabir Khan</a>
*/
class KnownVersions {
static final Map<String, ModelVersion> AS_CORE_MODEL_VERSION_BY_AS_VERSION;
private static final Map<String, Map<ModelVersion, ModelVersion>> KNOWN_SUBSYSTEM_VERSIONS;
static {
Map<String, Map<ModelVersion, ModelVersion>> map = new HashMap<String, Map<ModelVersion,ModelVersion>>();
//At the time of writing the main usage for this is to know if a given subsystem model version belongs to
//the 7.1.x series or above. From 7.2.x the host registration process includes which resources are ignored,
//meaning that resource transformers can fail (e.g. RejectExpressionValuesTransformer). In 7.1.x we will have
//no idea so we need to log a warning instead.
//The core model versions are 1.2.0 for AS 7.1.2 and 1.3.0 for AS 7.1.3
//7.2.x starts on core model version 1.4.0
//Keep this list in alphabetical and subsystem version order
final String CORE_MODEL_7_1_2 = "1.2.0";
final String CORE_MODEL_7_1_3 = "1.3.0";
addSubsystemVersion(map, "configadmin", "1.0.0", CORE_MODEL_7_1_3);
addSubsystemVersion(map, "ejb3", "1.1.0", CORE_MODEL_7_1_2);
addSubsystemVersion(map, "infinispan", "1.3.0", CORE_MODEL_7_1_3);
addSubsystemVersion(map, "jacorb", "1.1.0", CORE_MODEL_7_1_3);
addSubsystemVersion(map, "jgroups", "1.1.0", CORE_MODEL_7_1_3);
addSubsystemVersion(map, "jmx", "1.0.0", CORE_MODEL_7_1_2);
addSubsystemVersion(map, "jmx", "1.1.0", CORE_MODEL_7_1_3);
addSubsystemVersion(map, "jpa", "1.1.0", CORE_MODEL_7_1_3);
addSubsystemVersion(map, "logging", "1.1.0", CORE_MODEL_7_1_3);
addSubsystemVersion(map, "mail", "1.1.0", CORE_MODEL_7_1_3);
addSubsystemVersion(map, "messaging", "1.1.0", CORE_MODEL_7_1_3);
addSubsystemVersion(map, "modcluster", "1.2.0", CORE_MODEL_7_1_3);
addSubsystemVersion(map, "naming", "1.1.0", CORE_MODEL_7_1_3);
addSubsystemVersion(map, "osgi", "1.0.0", CORE_MODEL_7_1_3);
addSubsystemVersion(map, "remoting", "1.1.0", CORE_MODEL_7_1_3);
addSubsystemVersion(map, "security", "1.1.0", CORE_MODEL_7_1_2);
addSubsystemVersion(map, "threads", "1.0.0", CORE_MODEL_7_1_2);
addSubsystemVersion(map, "transactions", "1.1.0", CORE_MODEL_7_1_2);
addSubsystemVersion(map, "transactions", "1.1.1", CORE_MODEL_7_1_3);
addSubsystemVersion(map, "web", "1.1.0", CORE_MODEL_7_1_2);
addSubsystemVersion(map, "web", "1.1.1", CORE_MODEL_7_1_3);
addSubsystemVersion(map, "webservices", "1.1.0", CORE_MODEL_7_1_3);
KNOWN_SUBSYSTEM_VERSIONS = Collections.unmodifiableMap(map);
Map<String, ModelVersion> map2 = new HashMap<String, ModelVersion>();
map2.put("7.1.2", ModelVersion.create(1, 2, 0));
map2.put("7.1.3", ModelVersion.create(1, 3, 0));
AS_CORE_MODEL_VERSION_BY_AS_VERSION = Collections.unmodifiableMap(map2);
}
static ModelVersion getCoreModelVersionForSubsystemVersion(String subsystemName, ModelVersion subsystemVersion) {
Map<ModelVersion, ModelVersion> versionMap = KNOWN_SUBSYSTEM_VERSIONS.get(subsystemName);
if (versionMap == null) {
return null;
}
return versionMap.get(subsystemVersion);
}
private static void addSubsystemVersion(Map<String, Map<ModelVersion, ModelVersion>> map, String subsystem, String subsystemVersion, String coreVersion) {
ModelVersion subsystemModelVersion = ModelVersion.fromString(subsystemVersion);
ModelVersion coreModelVersion = ModelVersion.fromString(coreVersion);
Map<ModelVersion, ModelVersion> versionMap = map.get(subsystem);
if (versionMap == null) {
versionMap = new HashMap<ModelVersion, ModelVersion>();
map.put(subsystem, versionMap);
}
versionMap.put(subsystemModelVersion, coreModelVersion);
}
}
|
package org.tigris.subversion;
public class SubversionException extends Exception
{
public SubversionException()
{
}
}
|
package com.bbn.kbp.events;
import com.bbn.bue.common.Finishable;
import com.bbn.bue.common.HasDocID;
import com.bbn.bue.common.Inspector;
import com.bbn.bue.common.IntIDSequence;
import com.bbn.bue.common.StringUtils;
import com.bbn.bue.common.TextGroupPackageImmutable;
import com.bbn.bue.common.evaluation.AggregateBinaryFScoresInspector;
import com.bbn.bue.common.evaluation.BinaryErrorLogger;
import com.bbn.bue.common.evaluation.BinaryFScoreBootstrapStrategy;
import com.bbn.bue.common.evaluation.BootstrapInspector;
import com.bbn.bue.common.evaluation.EquivalenceBasedProvenancedAligner;
import com.bbn.bue.common.evaluation.EvalPair;
import com.bbn.bue.common.evaluation.InspectionNode;
import com.bbn.bue.common.evaluation.InspectorTreeDSL;
import com.bbn.bue.common.evaluation.InspectorTreeNode;
import com.bbn.bue.common.evaluation.ProvenancedAlignment;
import com.bbn.bue.common.evaluation.ScoringEventObserver;
import com.bbn.bue.common.files.FileUtils;
import com.bbn.bue.common.parameters.Parameters;
import com.bbn.bue.common.symbols.Symbol;
import com.bbn.bue.common.symbols.SymbolUtils;
import com.bbn.kbp.events.ontology.EREToKBPEventOntologyMapper;
import com.bbn.kbp.events.ontology.SimpleEventOntologyMapper;
import com.bbn.kbp.events2014.CharOffsetSpan;
import com.bbn.kbp.events2014.DocumentSystemOutput2015;
import com.bbn.kbp.events2014.KBPRealis;
import com.bbn.kbp.events2014.Response;
import com.bbn.kbp.events2014.ResponseLinking;
import com.bbn.kbp.events2014.ResponseSet;
import com.bbn.kbp.events2014.SystemOutputLayout;
import com.bbn.kbp.events2014.TACKBPEALException;
import com.bbn.kbp.events2014.io.SystemOutputStore;
import com.bbn.kbp.events2014.transformers.QuoteFilter;
import com.bbn.kbp.linking.ExplicitFMeasureInfo;
import com.bbn.kbp.linking.LinkF1;
import com.bbn.nlp.corenlp.CoreNLPDocument;
import com.bbn.nlp.corenlp.CoreNLPParseNode;
import com.bbn.nlp.corenlp.CoreNLPXMLLoader;
import com.bbn.nlp.corpora.ere.EREArgument;
import com.bbn.nlp.corpora.ere.EREDocument;
import com.bbn.nlp.corpora.ere.EREEvent;
import com.bbn.nlp.corpora.ere.EREEventMention;
import com.bbn.nlp.corpora.ere.ERELoader;
import com.bbn.nlp.corpora.ere.ERESpan;
import com.bbn.nlp.corpora.ere.LinkRealis;
import com.bbn.nlp.events.HasEventType;
import com.bbn.nlp.parsing.HeadFinders;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Multiset;
import com.google.common.collect.Sets;
import com.google.common.io.CharSink;
import com.google.common.io.Files;
import com.google.common.reflect.TypeToken;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Inject;
import com.google.inject.Provides;
import com.google.inject.TypeLiteral;
import com.google.inject.multibindings.MapBinder;
import org.immutables.func.Functional;
import org.immutables.value.Value;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import javax.annotation.Nullable;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.inspect;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformBoth;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformLeft;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformRight;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformed;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Predicates.compose;
import static com.google.common.base.Predicates.equalTo;
import static com.google.common.base.Predicates.in;
import static com.google.common.base.Predicates.not;
import static com.google.common.collect.Iterables.concat;
import static com.google.common.collect.Iterables.filter;
import static com.google.common.collect.Iterables.getFirst;
import static com.google.common.collect.Iterables.transform;
/**
* Scores KBP 2016 event argument output against an ERE gold standard. Scoring is in terms of
* (Event Type, Event Role, Entity) tuples. This program is an experimental rough draft and has a
* number of limitations: <ul> <li>We only handle arguments which are entity mentions; others are
* ignored according to the ERE structure on the gold side and by filtering out a (currently
* hardcoded) set of argument roles on the system side.</li> <i>We map system responses to entities
* by looking for an entity which has a mention which shares the character offsets of the base
* filler exactly either by itself or by its nominal head (given in ERE). In the future we may
* implement more lenient alignment strategies.</i> <li> Currently system responses which fail to
* align to any entity at all are discarded rather than penalized.</li> </ul>
*/
public final class ScoreKBPAgainstERE {
private static final Logger log = LoggerFactory.getLogger(ScoreKBPAgainstERE.class);
private final EREToKBPEventOntologyMapper ontologyMapper;
private ScoreKBPAgainstERE() {
throw new UnsupportedOperationException();
}
// left over from pre-Guice version
private final Parameters params;
private final ImmutableMap<String, ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>>
scoringEventObservers;
// we exclude text in quoted regions froms scoring
private final QuoteFilter quoteFilter;
@Inject
ScoreKBPAgainstERE(
final Parameters params,
final Map<String, ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>> scoringEventObservers,
final EREToKBPEventOntologyMapper ontologyMapper,
final QuoteFilter quoteFilter) {
this.params = checkNotNull(params);
// we use a sorted map because the binding of plugins may be non-deterministic
this.scoringEventObservers = ImmutableSortedMap.copyOf(scoringEventObservers);
this.ontologyMapper = checkNotNull(ontologyMapper);
this.quoteFilter = checkNotNull(quoteFilter);
}
public void go() throws IOException {
log.info(params.dump());
final ImmutableSet<Symbol> docIDsToScore = ImmutableSet.copyOf(
FileUtils.loadSymbolList(params.getExistingFile("docIDsToScore")));
final ImmutableMap<Symbol, File> goldDocIDToFileMap = FileUtils.loadSymbolToFileMap(
Files.asCharSource(params.getExistingFile("goldDocIDToFileMap"), Charsets.UTF_8));
final File outputDir = params.getCreatableDirectory("ereScoringOutput");
final SystemOutputLayout outputLayout = SystemOutputLayout.ParamParser.fromParamVal(
params.getString("outputLayout"));
final SystemOutputStore outputStore =
outputLayout.open(params.getExistingDirectory("systemOutput"));
final CoreNLPXMLLoader coreNLPXMLLoader =
CoreNLPXMLLoader.builder(HeadFinders.<CoreNLPParseNode>getEnglishPTBHeadFinder()).build();
final boolean relaxUsingCORENLP = params.getBoolean("relaxUsingCoreNLP");
final ImmutableMap<Symbol, File> coreNLPProcessedRawDocs;
if (relaxUsingCORENLP) {
log.info("Relaxing scoring using CoreNLP");
coreNLPProcessedRawDocs = FileUtils.loadSymbolToFileMap(
Files.asCharSource(params.getExistingFile("coreNLPDocIDMap"), Charsets.UTF_8));
} else {
coreNLPProcessedRawDocs = ImmutableMap.of();
}
log.info("Scoring over {} documents", docIDsToScore.size());
// on the gold side we take an ERE document as input
final TypeToken<EREDocument> inputIsEREDoc = new TypeToken<EREDocument>() {
};
// on the test side we take an AnswerKey, but we bundle it with the gold ERE document
// for use in alignment later
final TypeToken<EREDocAndResponses> inputIsEREDocAndAnswerKey =
new TypeToken<EREDocAndResponses>() {
};
final InspectionNode<EvalPair<EREDocument, EREDocAndResponses>>
input = InspectorTreeDSL.pairedInput(inputIsEREDoc, inputIsEREDocAndAnswerKey);
// these will extract the scoring tuples from the KBP system input and ERE docs, respectively
// we create these here because we will call their .finish method()s
// at the end to record some statistics about alignment failures,
// so we need to keep references to them
final ResponsesAndLinkingFromKBPExtractor responsesAndLinkingFromKBPExtractor =
new ResponsesAndLinkingFromKBPExtractor(coreNLPProcessedRawDocs,
coreNLPXMLLoader, relaxUsingCORENLP, ontologyMapper,
Files.asCharSink(new File(outputDir, "alignmentFailures.txt"), Charsets.UTF_8));
final ResponsesAndLinkingFromEREExtractor responsesAndLinkingFromEREExtractor =
new ResponsesAndLinkingFromEREExtractor(EREToKBPEventOntologyMapper.create2016Mapping(),
quoteFilter);
// this sets it up so that everything fed to input will be scored in various ways
setupScoring(input, responsesAndLinkingFromKBPExtractor, responsesAndLinkingFromEREExtractor,
scoringEventObservers.values(), outputDir);
// we want globally unique IDs here
final ERELoader loader = ERELoader.builder().prefixDocIDToAllIDs(true).build();
for (final Symbol docID : docIDsToScore) {
final File ereFileName = goldDocIDToFileMap.get(docID);
if (ereFileName == null) {
throw new RuntimeException("Missing key file for " + docID);
}
final EREDocument ereDoc = loader.loadFrom(ereFileName);
// the LDC provides certain ERE documents with "-kbp" in the name. The -kbp is used by them
// internally for some form of tracking but doesn't appear to the world, so we remove it.
if (!ereDoc.getDocId().replace("-kbp", "").equals(docID.asString().replace(".kbp", ""))) {
log.warn("Fetched document ID {} does not equal stored {}", ereDoc.getDocId(), docID);
}
final Iterable<Response>
responses = filter(outputStore.read(docID).arguments().responses(), bannedRolesFilter);
final ResponseLinking linking =
((DocumentSystemOutput2015) outputStore.read(docID)).linking();
linking.copyWithFilteredResponses(in(ImmutableSet.copyOf(responses)));
// feed this ERE doc/ KBP output pair to the scoring network
input.inspect(EvalPair.of(ereDoc, new EREDocAndResponses(ereDoc, responses, linking)));
}
// trigger the scoring network to write its summary files
input.finish();
// log alignment failures
responsesAndLinkingFromKBPExtractor.finish();
responsesAndLinkingFromEREExtractor.finish();
}
private static final ImmutableSet<Symbol> BANNED_ROLES =
SymbolUtils.setFrom("Time", "Crime", "Position",
"Fine", "Sentence");
private static final ImmutableSet<Symbol> ROLES_2016 = SymbolUtils
.setFrom("Agent", "Artifact", "Attacker", "Audience", "Beneficiary", "Crime", "Destination",
"Entity", "Giver", "Instrument", "Money", "Origin", "Person", "Place", "Position",
"Recipient", "Target", "Thing", "Time", "Victim");
private static final ImmutableSet<Symbol> ALLOWED_ROLES_2016 =
Sets.difference(ROLES_2016, BANNED_ROLES).immutableCopy();
private static final ImmutableSet<Symbol> linkableRealis = SymbolUtils.setFrom("Other", "Actual");
private static final Predicate<Response> bannedRolesFilter = new Predicate<Response>() {
@Override
public boolean apply(@Nullable final Response response) {
return ALLOWED_ROLES_2016.contains(response.role());
}
};
private static Function<EvalPair<? extends Iterable<? extends DocLevelEventArg>, ? extends Iterable<? extends DocLevelEventArg>>, ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg>>
EXACT_MATCH_ALIGNER = EquivalenceBasedProvenancedAligner
.forEquivalenceFunction(Functions.<DocLevelEventArg>identity())
.asFunction();
// this sets up a scoring network which is executed on every input
private static void setupScoring(
final InspectionNode<EvalPair<EREDocument, EREDocAndResponses>> input,
final ResponsesAndLinkingFromKBPExtractor responsesAndLinkingFromKBPExtractor,
final ResponsesAndLinkingFromEREExtractor responsesAndLinkingFromEREExtractor,
Iterable<? extends ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>> scoringEventObservers,
final File outputDir) {
final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>>
inputAsResponsesAndLinking =
transformRight(transformLeft(input, responsesAndLinkingFromEREExtractor),
responsesAndLinkingFromKBPExtractor);
final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> filteredFor2016 =
InspectorTreeDSL.transformBoth(
inputAsResponsesAndLinking,
ResponsesAndLinking.filterFunction(ARG_TYPE_IS_ALLOWED_FOR_2016));
final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> filteredForLifeDie =
transformed(filteredFor2016, RestrictLifeInjureToLifeDieEvents.INSTANCE);
// set up for event argument scoring in 2015 style
eventArgumentScoringSetup(filteredForLifeDie, scoringEventObservers, outputDir);
// set up for linking scoring in 2015 style
linkingScoringSetup(filteredForLifeDie, outputDir);
}
private static void eventArgumentScoringSetup(
final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>>
inputAsResponsesAndLinking,
Iterable<? extends ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>> scoringEventObservers,
final File outputDir) {
final InspectorTreeNode<EvalPair<ImmutableSet<DocLevelEventArg>, ImmutableSet<DocLevelEventArg>>>
inputAsSetsOfScoringTuples =
transformBoth(inputAsResponsesAndLinking, ResponsesAndLinkingFunctions.args());
final InspectorTreeNode<EvalPair<ImmutableSet<DocLevelEventArg>, ImmutableSet<DocLevelEventArg>>>
inputAsSetsOfRealisNeutralizedTuples =
transformBoth(inputAsResponsesAndLinking, NeutralizeRealis.INSTANCE);
argScoringSetup(inputAsSetsOfScoringTuples,
ImmutableList.<ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>>of(),
new File(outputDir, "withRealis"));
// we apply scoring observers only to the realis neutralized version
argScoringSetup(inputAsSetsOfRealisNeutralizedTuples,
scoringEventObservers, new File(outputDir, "noRealis"));
}
private static void argScoringSetup(
final InspectorTreeNode<EvalPair<ImmutableSet<DocLevelEventArg>, ImmutableSet<DocLevelEventArg>>> inputAsSetsOfScoringTuples,
final Iterable<? extends ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>> scoringEventObservers,
final File outputDir) {
// require exact match between the system arguments and the key responses
final InspectorTreeNode<ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg>>
alignmentNode = transformed(inputAsSetsOfScoringTuples, EXACT_MATCH_ALIGNER);
// overall F score
final AggregateBinaryFScoresInspector<DocLevelEventArg, DocLevelEventArg>
scoreAndWriteOverallFScore =
AggregateBinaryFScoresInspector.createWithScoringObservers("aggregateF.txt", outputDir,
scoringEventObservers);
inspect(alignmentNode).with(scoreAndWriteOverallFScore);
// "arg" score with weighted TP/FP
final ArgumentScoringInspector argScorer =
ArgumentScoringInspector.createOutputtingTo(outputDir);
inspect(alignmentNode).with(argScorer);
// log errors
final BinaryErrorLogger<HasDocID, HasDocID> logWrongAnswers = BinaryErrorLogger
.forStringifierAndOutputDir(Functions.<HasDocID>toStringFunction(), outputDir);
inspect(alignmentNode).with(logWrongAnswers);
final BinaryFScoreBootstrapStrategy perEventBootstrapStrategy =
BinaryFScoreBootstrapStrategy.createBrokenDownBy("EventType",
HasEventType.ExtractFunction.INSTANCE, outputDir);
final BootstrapInspector breakdownScoresByEventTypeWithBootstrapping =
BootstrapInspector.forStrategy(perEventBootstrapStrategy, 1000, new Random(0));
inspect(alignmentNode).with(breakdownScoresByEventTypeWithBootstrapping);
}
private static void linkingScoringSetup(
final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>>
inputAsResponsesAndLinking, final File outputDir) {
final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> filteredForRealis =
transformBoth(inputAsResponsesAndLinking,
ResponsesAndLinking.filterFunction(REALIS_ALLOWED_FOR_LINKING));
// withRealis
{
final InspectorTreeNode<EvalPair<DocLevelArgLinking, DocLevelArgLinking>>
linkingNode = transformBoth(filteredForRealis, ResponsesAndLinkingFunctions.linking());
// we throw out any system responses not found in the key before scoring linking
final InspectorTreeNode<EvalPair<DocLevelArgLinking, DocLevelArgLinking>>
filteredNode = transformed(linkingNode, RestrictToLinking.INSTANCE);
final LinkingInspector linkingInspector =
LinkingInspector.createOutputtingTo(new File(outputDir, "withRealis"));
inspect(filteredNode).with(linkingInspector);
}
// without realis
{
final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>>
neutralizedRealis =
transformBoth(filteredForRealis, transformArgs(LinkingRealisNeutralizer.INSTANCE));
final InspectorTreeNode<EvalPair<DocLevelArgLinking, DocLevelArgLinking>>
linkingNode = transformBoth(neutralizedRealis, ResponsesAndLinkingFunctions.linking());
// we throw out any system responses not found in the key before scoring linking, after neutralizing realis
final InspectorTreeNode<EvalPair<DocLevelArgLinking, DocLevelArgLinking>>
filteredNode = transformed(linkingNode, RestrictToLinking.INSTANCE);
final LinkingInspector linkingInspector =
LinkingInspector.createOutputtingTo(new File(outputDir, "noRealis"));
inspect(filteredNode).with(linkingInspector);
}
}
private static final Predicate<_DocLevelEventArg> ARG_TYPE_IS_ALLOWED_FOR_2016 =
compose(in(ALLOWED_ROLES_2016), DocLevelEventArgFunctions.eventArgumentType());
private static final Predicate<_DocLevelEventArg> REALIS_ALLOWED_FOR_LINKING =
compose(in(linkableRealis), DocLevelEventArgFunctions.realis());
private enum RestrictLifeInjureToLifeDieEvents implements
Function<EvalPair<ResponsesAndLinking, ResponsesAndLinking>, EvalPair<ResponsesAndLinking, ResponsesAndLinking>> {
INSTANCE;
final Symbol LifeDie = Symbol.from("Life.Die");
@Override
public EvalPair<ResponsesAndLinking, ResponsesAndLinking> apply(
final EvalPair<ResponsesAndLinking, ResponsesAndLinking> input) {
// find all Life.Die event arguments
final ImmutableSet<DocLevelEventArg> keyArgs = ImmutableSet.copyOf(filter(input.key().args(),
Predicates.compose(equalTo(LifeDie), DocLevelEventArgFunctions.eventType())));
// get all possible candidate Life.Injure event arguments that could be derived from these Life.Die arguments
final ImmutableSet<DocLevelEventArg> argsToIgnore =
ImmutableSet.copyOf(transform(keyArgs, LifeDieToLifeInjure.INSTANCE));
// filter both the ERE and the system input to ignore these derived arguments.
return EvalPair.of(input.key().filter(not(in(argsToIgnore))),
input.test().filter(not(in(argsToIgnore))));
}
}
private enum LifeDieToLifeInjure implements Function<DocLevelEventArg, DocLevelEventArg> {
INSTANCE {
final Symbol LifeInjure = Symbol.from("Life.Injure");
@Nullable
@Override
public DocLevelEventArg apply(@Nullable final DocLevelEventArg docLevelEventArg) {
checkNotNull(docLevelEventArg);
checkArgument(docLevelEventArg.eventType()
.equalTo(RestrictLifeInjureToLifeDieEvents.INSTANCE.LifeDie));
return docLevelEventArg.withEventType(LifeInjure);
}
}
}
private static Function<? super ResponsesAndLinking, ResponsesAndLinking> transformArgs(
final Function<? super DocLevelEventArg, DocLevelEventArg> transformer) {
return new Function<ResponsesAndLinking, ResponsesAndLinking>() {
@Override
public ResponsesAndLinking apply(final ResponsesAndLinking responsesAndLinking) {
return responsesAndLinking.transform(transformer);
}
};
}
private enum LinkingRealisNeutralizer
implements Function<DocLevelEventArg, DocLevelEventArg> {
INSTANCE;
static final Symbol NEUTRALIZED = Symbol.from("neutralized");
@Override
public DocLevelEventArg apply(final DocLevelEventArg docLevelEventArg) {
return docLevelEventArg.withRealis(NEUTRALIZED);
}
}
private enum NeutralizeRealis
implements Function<ResponsesAndLinking, ImmutableSet<DocLevelEventArg>> {
INSTANCE;
static final Symbol NEUTRALIZED = Symbol.from("neutralized");
@Override
public ImmutableSet<DocLevelEventArg> apply(final ResponsesAndLinking input) {
final ImmutableSet.Builder<DocLevelEventArg> ret = ImmutableSet.builder();
for (final DocLevelEventArg arg : input.args()) {
ret.add(arg.withRealis(NEUTRALIZED));
}
return ret.build();
}
}
private enum RestrictToLinking implements
Function<EvalPair<DocLevelArgLinking, DocLevelArgLinking>, EvalPair<DocLevelArgLinking, DocLevelArgLinking>> {
INSTANCE;
@Override
public EvalPair<DocLevelArgLinking, DocLevelArgLinking> apply(
final EvalPair<DocLevelArgLinking, DocLevelArgLinking> input) {
final DocLevelArgLinking newTest =
input.test().filterArguments(in(input.key().allArguments()));
return EvalPair.of(input.key(), newTest);
}
}
private static final class ArgumentScoringInspector implements
Inspector<ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg>> {
// beta as defined by the EAL task guidelines.
private static final double beta = 0.25;
private final File outputDir;
private double scoreAggregator = 0.0;
private int aggregateTPs = 0;
private int aggregateFPs = 0;
private int aggregateFNs = 0;
final ImmutableMap.Builder<Symbol, Integer> truePositives = ImmutableMap.builder();
final ImmutableMap.Builder<Symbol, Integer> falsePositives = ImmutableMap.builder();
final ImmutableMap.Builder<Symbol, Integer> falseNegatives = ImmutableMap.builder();
final ImmutableMap.Builder<Symbol, Double> scores = ImmutableMap.builder();
private ArgumentScoringInspector(final File outputDir) {
this.outputDir = outputDir;
}
public static ArgumentScoringInspector createOutputtingTo(final File outputDir) {
return new ArgumentScoringInspector(outputDir);
}
@Override
public void inspect(
final ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg> evalPair) {
// left is ERE, right is system output.
final Iterable<DocLevelEventArg> args =
concat(evalPair.allLeftItems(), evalPair.allRightItems());
if (Iterables.size(args) == 0) {
log.warn("No output for eval pair {}", evalPair);
return;
}
final Symbol docid = checkNotNull(getFirst(args, null)).docID();
log.info("Gathering arg scores for {}", docid);
int docTPs = evalPair.leftAligned().size();
checkArgument(evalPair.leftAligned().equals(evalPair.rightAligned()));
this.aggregateTPs += docTPs;
int docFPs = evalPair.rightUnaligned().size();
this.aggregateFPs += docFPs;
// scores are clipped at 0.
double score = Math.max(docTPs - beta * docFPs, 0);
int docFNs = evalPair.leftUnaligned().size();
aggregateFNs += docFNs;
scoreAggregator += score;
truePositives.put(docid, docTPs);
falsePositives.put(docid, docFPs);
falseNegatives.put(docid, docFNs);
scores.put(docid, score);
}
@Override
public void finish() throws IOException {
final String scorePattern = "TP: %d, FP: %d, FN: %d, Score: %f\n";
// see guidelines section 7.3.1.1.4 for aggregating rules:
// sum over per document contributions, divide by total number of TRFRs in the answer key
// Math.max is to skip division by zero errors.
final double overAllArgScore =
100 * scoreAggregator / Math.max(0.0 + aggregateFNs + aggregateTPs, 1.0);
final String scoreString =
String.format(scorePattern, aggregateTPs, aggregateFPs, aggregateFNs, overAllArgScore);
Files.asCharSink(new File(outputDir, "argScores.txt"), Charsets.UTF_8).write(scoreString);
final ImmutableMap<Symbol, Double> scores = this.scores.build();
final ImmutableMap<Symbol, Integer> falsePositives = this.falsePositives.build();
final ImmutableMap<Symbol, Integer> truePositives = this.truePositives.build();
final ImmutableMap<Symbol, Integer> falseNegatives = this.falseNegatives.build();
for (final Symbol docid : scores.keySet()) {
final File docDir = new File(outputDir, docid.asString());
docDir.mkdirs();
final File docScore = new File(docDir, "argScores.txt");
// avoid dividing by zero
final double normalizer = Math.max(truePositives.get(docid) + falseNegatives.get(docid), 1);
// see guidelines referenced above
// pretends that the corpus is a single document
Files.asCharSink(docScore, Charsets.UTF_8).write(String
.format(scorePattern, truePositives.get(docid), falsePositives.get(docid),
falseNegatives.get(docid), 100 * scores.get(docid) / normalizer));
}
}
}
private static final class LinkingInspector implements
Inspector<EvalPair<DocLevelArgLinking, DocLevelArgLinking>> {
private final File outputDir;
private final ImmutableMap.Builder<Symbol, ExplicitFMeasureInfo> countsB =
ImmutableMap.builder();
private final ImmutableMap.Builder<Symbol, Integer> predictedCountsB = ImmutableMap.builder();
private final ImmutableMap.Builder<Symbol, Integer> actualCountsB = ImmutableMap.builder();
private final ImmutableMap.Builder<Symbol, Integer> linkingArgsCountB = ImmutableMap.builder();
private LinkingInspector(final File outputDir) {
this.outputDir = outputDir;
}
public static LinkingInspector createOutputtingTo(final File outputFile) {
return new LinkingInspector(outputFile);
}
@Override
public void inspect(
final EvalPair<DocLevelArgLinking, DocLevelArgLinking> item) {
checkArgument(ImmutableSet.copyOf(concat(item.key())).containsAll(
ImmutableSet.copyOf(concat(item.test()))), "Must contain only answers in test set!");
if (!item.key().docID().equalTo(item.test().docID())) {
log.warn("DocIDs do not match: {} vs {}", item.key().docID(), item.test().docID());
}
final ExplicitFMeasureInfo counts = LinkF1.create().score(item.test(), item.key());
final ImmutableSet<DocLevelEventArg> args = ImmutableSet.copyOf(concat(
transform(concat(item.test().eventFrames(), item.key().eventFrames()),
ScoringEventFrameFunctions.arguments())));
final Symbol docid = item.key().docID();
predictedCountsB.put(docid, ImmutableSet.copyOf(concat(item.test().eventFrames())).size());
actualCountsB.put(docid, ImmutableSet.copyOf(concat(item.key().eventFrames())).size());
countsB.put(docid, counts);
linkingArgsCountB.put(docid, args.size());
}
@Override
public void finish() throws IOException {
// copies logic from com.bbn.kbp.events2014.scorer.bin.AggregateResultWriter.computeLinkScores()
final ImmutableMap<Symbol, ExplicitFMeasureInfo> counts = countsB.build();
final ImmutableMap<Symbol, Integer> predictedCounts = predictedCountsB.build();
final ImmutableMap<Symbol, Integer> actualCounts = actualCountsB.build();
final ImmutableMap<Symbol, Integer> linkingArgsCounts = linkingArgsCountB.build();
double precision = 0;
double recall = 0;
double f1 = 0;
double linkNormalizerSum = 0;
checkNotNull(counts, "Inspect must be called before Finish!");
for (final Symbol docid : counts.keySet()) {
final File docOutput = new File(outputDir, docid.asString());
docOutput.mkdirs();
final PrintWriter outputWriter = new PrintWriter(new File(docOutput, "linkingF.txt"));
outputWriter.println(counts.get(docid).toString());
outputWriter.close();
precision += counts.get(docid).precision() * predictedCounts.get(docid);
recall += counts.get(docid).recall() * actualCounts.get(docid);
f1 += counts.get(docid).f1() * actualCounts.get(docid);
linkNormalizerSum += linkingArgsCounts.get(docid);
}
// the normalizer sum can't actually be negative here, but this minimizes divergence with the source logic.
double aggregateLinkScore =
(linkNormalizerSum > 0.0) ? f1 / linkNormalizerSum : 0.0;
double aggregateLinkPrecision =
(linkNormalizerSum > 0.0) ? precision / linkNormalizerSum : 0.0;
double aggregateLinkRecall =
(linkNormalizerSum > 0.0) ? recall / linkNormalizerSum : 0.0;
final ExplicitFMeasureInfo aggregate =
new ExplicitFMeasureInfo(aggregateLinkPrecision, aggregateLinkRecall, aggregateLinkScore);
final PrintWriter outputWriter = new PrintWriter(new File(outputDir, "linkingF.txt"));
outputWriter.println(aggregate);
outputWriter.close();
}
}
private enum ERERealisEnum {
generic,
other,
actual,
}
private enum ArgumentRealis {
Generic,
Actual,
Other
}
private static final class ResponsesAndLinkingFromEREExtractor
implements Function<EREDocument, ResponsesAndLinking>, Finishable {
// for tracking things from the answer key discarded due to not being entity mentions
private final Multiset<String> allGoldArgs = HashMultiset.create();
private final Multiset<String> discarded = HashMultiset.create();
private final Set<Symbol> unknownEventTypes = Sets.newHashSet();
private final Set<Symbol> unknownEventSubtypes = Sets.newHashSet();
private final Set<Symbol> unknownRoles = Sets.newHashSet();
private final SimpleEventOntologyMapper mapper;
private final QuoteFilter quoteFilter;
private ResponsesAndLinkingFromEREExtractor(final SimpleEventOntologyMapper mapper,
final QuoteFilter quoteFilter) {
this.mapper = checkNotNull(mapper);
this.quoteFilter = checkNotNull(quoteFilter);
}
private boolean inQuotedRegion(String docId, ERESpan span) {
// the kbp replacement is a hack to handle dry run docids having additional tracking information on them sometimes.
return quoteFilter.isInQuote(Symbol.from(docId.replaceAll("-kbp", "")),
CharOffsetSpan.of(span.asCharOffsets()));
}
@Override
public ResponsesAndLinking apply(final EREDocument doc) {
final ImmutableSet.Builder<DocLevelEventArg> ret = ImmutableSet.builder();
// every event mention argument within a hopper is linked
final DocLevelArgLinking.Builder linking = DocLevelArgLinking.builder()
.docID(Symbol.from(doc.getDocId()));
for (final EREEvent ereEvent : doc.getEvents()) {
final ScoringEventFrame.Builder eventFrame = ScoringEventFrame.builder();
boolean addedArg = false;
for (final EREEventMention ereEventMention : ereEvent.getEventMentions()) {
// events from quoted regions are invalid
if (!inQuotedRegion(doc.getDocId(), ereEventMention.getTrigger())) {
for (final EREArgument ereArgument : ereEventMention.getArguments()) {
if (!inQuotedRegion(doc.getDocId(), ereArgument.getExtent())) {
// arguments from quoted regions are invalid
final Symbol ereEventMentionType = Symbol.from(ereEventMention.getType());
final Symbol ereEventMentionSubtype = Symbol.from(ereEventMention.getSubtype());
final Symbol ereArgumentRole = Symbol.from(ereArgument.getRole());
final ArgumentRealis argumentRealis =
getRealis(ereEventMention.getRealis(), ereArgument.getRealis().get());
boolean skip = false;
if (!mapper.eventType(ereEventMentionType).isPresent()) {
unknownEventTypes.add(ereEventMentionType);
skip = true;
}
if (!mapper.eventRole(ereArgumentRole).isPresent()) {
unknownRoles.add(ereArgumentRole);
skip = true;
}
if (!mapper.eventSubtype(ereEventMentionSubtype).isPresent()) {
unknownEventSubtypes.add(ereEventMentionSubtype);
skip = true;
}
if (skip) {
continue;
}
// type.subtype is Response format
final String typeRoleKey = mapper.eventType(ereEventMentionType).get() +
"." + mapper.eventSubtype(ereEventMentionSubtype).get() +
"/" + mapper.eventRole(ereArgumentRole).get();
allGoldArgs.add(typeRoleKey);
final DocLevelEventArg arg =
DocLevelEventArg.builder().docID(Symbol.from(doc.getDocId()))
.eventType(Symbol.from(mapper.eventType(ereEventMentionType).get() + "." +
mapper.eventSubtype(ereEventMentionSubtype).get()))
.eventArgumentType(mapper.eventRole(ereArgumentRole).get())
.corefID(ScoringUtils.extractScoringEntity(ereArgument, doc).globalID())
.realis(Symbol.from(argumentRealis.name())).build();
ret.add(arg);
// ban generic responses from ERE linking.
if (!arg.realis().asString().equalsIgnoreCase(ERERealisEnum.generic.name())) {
eventFrame.addArguments(arg);
addedArg = true;
} else {
log.debug("Dropping ERE arg {} from linking in {} due to generic realis", arg,
ereEventMention);
}
} else {
log.info("Ignoring ERE event mention argument {} as within a quoted region",
ereArgument);
}
}
} else {
log.info("Ignoring ERE event mention {} as within a quoted region", ereEventMention);
}
if (addedArg) {
linking.addEventFrames(eventFrame.build());
}
}
}
return ResponsesAndLinking.of(ret.build(), linking.build());
}
private ArgumentRealis getRealis(final String ERERealis, final LinkRealis linkRealis) {
// generic event mention realis overrides everything
if (ERERealis.equals(ERERealisEnum.generic.name())) {
return ArgumentRealis.Generic;
} else {
// if the argument is realis
if (linkRealis.equals(LinkRealis.REALIS)) {
if (ERERealis.equals(ERERealisEnum.other.name())) {
return ArgumentRealis.Other;
} else if (ERERealis.equals(ERERealisEnum.actual.name())) {
return ArgumentRealis.Actual;
} else {
throw new RuntimeException(
"Unknown ERERealis of type " + linkRealis);
}
} else {
// if it's irrealis, override Actual with Other, Other is preserved. Generic is handled above.
return ArgumentRealis.Other;
}
}
}
@Override
public void finish() throws IOException {
log.info(
"Of {} gold event arguments, {} were discarded as non-entities",
allGoldArgs.size(), discarded.size());
for (final String errKey : discarded.elementSet()) {
if (discarded.count(errKey) > 0) {
log.info("Of {} gold {} arguments, {} discarded ",
+allGoldArgs.count(errKey), errKey, discarded.count(errKey));
}
}
if (!unknownEventTypes.isEmpty()) {
log.info("The following ERE event types were ignored as outside the ontology: {}",
SymbolUtils.byStringOrdering().immutableSortedCopy(unknownEventTypes));
}
if (!unknownEventSubtypes.isEmpty()) {
log.info("The following ERE event subtypes were ignored as outside the ontology: {}",
SymbolUtils.byStringOrdering().immutableSortedCopy(unknownEventSubtypes));
}
if (!unknownRoles.isEmpty()) {
log.info("The following ERE event argument roles were ignored as outside the ontology: {}",
SymbolUtils.byStringOrdering().immutableSortedCopy(unknownRoles));
}
}
}
private static final class ResponsesAndLinkingFromKBPExtractor
implements Function<EREDocAndResponses, ResponsesAndLinking>,
Finishable {
// each system item which fails to align to any reference item gets put in its own
// coreference class, numbered using this sequence
private IntIDSequence alignmentFailureIDs = IntIDSequence.startingFrom(0);
private ImmutableSetMultimap.Builder<String, String> mentionAlignmentFailuresB =
ImmutableSetMultimap.builder();
private Multiset<String> numResponses = HashMultiset.create();
private final ImmutableMap<Symbol, File> ereMapping;
private final CoreNLPXMLLoader coreNLPXMLLoader;
private final boolean relaxUsingCORENLP;
private final EREToKBPEventOntologyMapper ontologyMapper;
private final CharSink alignmentFailuresSink;
public ResponsesAndLinkingFromKBPExtractor(final Map<Symbol, File> ereMapping,
final CoreNLPXMLLoader coreNLPXMLLoader, final boolean relaxUsingCORENLP,
final EREToKBPEventOntologyMapper ontologyMapper,
final CharSink alignmentFailuresSink) {
this.ereMapping = ImmutableMap.copyOf(ereMapping);
this.coreNLPXMLLoader = coreNLPXMLLoader;
this.relaxUsingCORENLP = relaxUsingCORENLP;
this.ontologyMapper = checkNotNull(ontologyMapper);
this.alignmentFailuresSink = checkNotNull(alignmentFailuresSink);
}
public ResponsesAndLinking apply(final EREDocAndResponses input) {
final ImmutableSet.Builder<DocLevelEventArg> ret = ImmutableSet.builder();
final Iterable<Response> responses = input.responses();
final EREDocument doc = input.ereDoc();
// Work around LDC document ID inconsistency; -kbp is used internally by the LDC as a form of
// document tracking. Externally the difference does not matter so we just normalize the ID
final Symbol ereID = Symbol.from(doc.getDocId().replace("-kbp", ""));
final Optional<CoreNLPDocument> coreNLPDoc;
final EREAligner ereAligner;
try {
coreNLPDoc = Optional.fromNullable(ereMapping.get(ereID)).isPresent() ? Optional
.of(coreNLPXMLLoader.loadFrom(ereMapping.get(ereID)))
: Optional.<CoreNLPDocument>absent();
checkState(coreNLPDoc.isPresent() || !relaxUsingCORENLP, "Must have CoreNLP document "
+ "if using Core NLP relaxation");
ereAligner = EREAligner.create(doc, coreNLPDoc, ontologyMapper);
} catch (IOException e) {
throw new RuntimeException(e);
}
final ImmutableMap.Builder<Response, DocLevelEventArg> responseToDocLevelArg =
ImmutableMap.builder();
for (final Response response : responses) {
final DocLevelEventArg res = resolveToERE(doc, ereAligner, response);
ret.add(res);
responseToDocLevelArg.put(response, res);
}
for (final Response response : input.linking().allResponses()) {
if (response.realis().equals(KBPRealis.Generic)) {
throw new TACKBPEALException("Generic Arguments are not allowed in linking");
}
}
return fromResponses(ImmutableSet.copyOf(input.responses()),
responseToDocLevelArg.build(), input.linking());
}
private DocLevelEventArg resolveToERE(final EREDocument doc, final EREAligner ereAligner,
final Response response) {
numResponses.add(errKey(response));
final Symbol realis = Symbol.from(response.realis().name());
final Optional<ScoringCorefID> alignedCorefIDOpt = ereAligner.argumentForResponse(response);
if (!alignedCorefIDOpt.isPresent()) {
log.info("Alignment failed for {}", response);
mentionAlignmentFailuresB.put(errKey(response), response.toString());
} else if (alignedCorefIDOpt.get().scoringEntityType()
.equals(ScoringEntityType.InsufficientEntityLevel)) {
log.info("Insufficient entity level for {}", response);
}
// this increments the alignment failure ID regardless of success or failure, but
// we don't care
final ScoringCorefID alignedCorefID = alignedCorefIDOpt.or(
// in case of alignment failure, we make a pseudo-entity from the CAS offsets
// it will always be wrong, but will be consistent for the same extent appearing in
// different event roles
ScoringCorefID.of(ScoringEntityType.AlignmentFailure,
response.canonicalArgument().charOffsetSpan().asCharOffsetRange().toString()));
return DocLevelEventArg.builder().docID(Symbol.from(doc.getDocId()))
.eventType(response.type()).eventArgumentType(response.role())
.corefID(alignedCorefID.globalID()).realis(realis).build();
}
ResponsesAndLinking fromResponses(final ImmutableSet<Response> originalResponses,
final ImmutableMap<Response, DocLevelEventArg> responseToDocLevelEventArg,
final ResponseLinking responseLinking) {
final DocLevelArgLinking.Builder linkingBuilder = DocLevelArgLinking.builder()
.docID(responseLinking.docID());
for (final ResponseSet rs : responseLinking.responseSets()) {
final ScoringEventFrame.Builder eventFrameBuilder = ScoringEventFrame.builder();
boolean addedArg = false;
for (final Response response : rs) {
if (responseToDocLevelEventArg.containsKey(response)) {
eventFrameBuilder.addArguments(responseToDocLevelEventArg.get(response));
addedArg = true;
}
}
if (addedArg) {
linkingBuilder.addEventFrames(eventFrameBuilder.build());
}
}
return ResponsesAndLinking.of(responseToDocLevelEventArg.values(), linkingBuilder.build());
}
public String errKey(Response r) {
return r.type() + "/" + r.role();
}
public void finish() throws IOException {
final ImmutableSetMultimap<String, String> mentionAlignmentFailures =
mentionAlignmentFailuresB.build();
log.info(
"Of {} system responses, got {} mention alignment failures",
numResponses.size(), mentionAlignmentFailures.size());
final StringBuilder msg = new StringBuilder();
for (final String errKey : numResponses.elementSet()) {
final ImmutableSet<String> failuresForKey = mentionAlignmentFailures.get(errKey);
if (failuresForKey != null) {
msg.append("Of ").append(numResponses.count(errKey)).append(errKey)
.append(" responses, ").append(failuresForKey.size())
.append(" mention alignment failures:\n")
.append(StringUtils.unixNewlineJoiner().join(failuresForKey)).append("\n");
}
}
alignmentFailuresSink.write(msg.toString());
}
}
// code for running as a standalone executable
public static void main(String[] argv) {
// we wrap the main method in this way to
// ensure a non-zero return value on failure
try {
trueMain(argv);
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
public static void trueMain(String[] argv) throws IOException {
final Parameters params = Parameters.loadSerifStyle(new File(argv[0]));
Guice.createInjector(new ScoreKBPAgainstERE.GuiceModule(params))
.getInstance(ScoreKBPAgainstERE.class).go();
}
// sets up a plugin architecture for additional scoring observers
public static final class GuiceModule extends AbstractModule {
private final Parameters params;
GuiceModule(final Parameters params) {
this.params = checkNotNull(params);
}
@Override
protected void configure() {
bind(Parameters.class).toInstance(params);
// declare that people can provide scoring observer plugins, even though none are
// provided by default
MapBinder.newMapBinder(binder(), TypeLiteral.get(String.class),
new TypeLiteral<ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>>() {
});
try {
bind(EREToKBPEventOntologyMapper.class)
.toInstance(EREToKBPEventOntologyMapper.create2016Mapping());
} catch (IOException ioe) {
throw new TACKBPEALException(ioe);
}
}
@Provides
QuoteFilter getQuoteFiler(Parameters params) throws IOException {
return QuoteFilter.loadFrom(Files.asByteSource(params.getExistingFile("quoteFilter")));
}
}
}
@Value.Immutable
@Functional
@TextGroupPackageImmutable
abstract class _ResponsesAndLinking {
@Value.Parameter
public abstract ImmutableSet<DocLevelEventArg> args();
@Value.Parameter
public abstract DocLevelArgLinking linking();
@Value.Check
protected void check() {
checkArgument(args().containsAll(ImmutableSet.copyOf(concat(linking()))));
}
public final ResponsesAndLinking filter(Predicate<? super DocLevelEventArg> predicate) {
return ResponsesAndLinking.of(
Iterables.filter(args(), predicate),
linking().filterArguments(predicate));
}
public final ResponsesAndLinking transform(
final Function<? super DocLevelEventArg, DocLevelEventArg> transformer) {
return ResponsesAndLinking
.of(Iterables.transform(args(), transformer), linking().transformArguments(transformer));
}
static final Function<ResponsesAndLinking, ResponsesAndLinking> filterFunction(
final Predicate<? super DocLevelEventArg> predicate) {
return new Function<ResponsesAndLinking, ResponsesAndLinking>() {
@Override
public ResponsesAndLinking apply(final ResponsesAndLinking input) {
return input.filter(predicate);
}
};
}
}
final class EREDocAndResponses {
private final EREDocument ereDoc;
private final Iterable<Response> responses;
private final ResponseLinking linking;
public EREDocAndResponses(final EREDocument ereDoc, final Iterable<Response> responses,
final ResponseLinking linking) {
this.ereDoc = checkNotNull(ereDoc);
this.responses = checkNotNull(responses);
this.linking = checkNotNull(linking);
}
public EREDocument ereDoc() {
return ereDoc;
}
public Iterable<Response> responses() {
return responses;
}
public ResponseLinking linking() {
return linking;
}
}
|
package com.bbn.kbp.events;
import com.bbn.bue.common.Finishable;
import com.bbn.bue.common.HasDocID;
import com.bbn.bue.common.Inspector;
import com.bbn.bue.common.evaluation.AggregateBinaryFScoresInspector;
import com.bbn.bue.common.evaluation.BinaryErrorLogger;
import com.bbn.bue.common.evaluation.BinaryFScoreBootstrapStrategy;
import com.bbn.bue.common.evaluation.BootstrapInspector;
import com.bbn.bue.common.evaluation.EquivalenceBasedProvenancedAligner;
import com.bbn.bue.common.evaluation.EvalPair;
import com.bbn.bue.common.evaluation.InspectionNode;
import com.bbn.bue.common.evaluation.InspectorTreeDSL;
import com.bbn.bue.common.evaluation.InspectorTreeNode;
import com.bbn.bue.common.evaluation.ProvenancedAlignment;
import com.bbn.bue.common.files.FileUtils;
import com.bbn.bue.common.parameters.Parameters;
import com.bbn.bue.common.symbols.Symbol;
import com.bbn.bue.common.symbols.SymbolUtils;
import com.bbn.kbp.events.ontology.EREToKBPEventOntologyMapper;
import com.bbn.kbp.events.ontology.SimpleEventOntologyMapper;
import com.bbn.kbp.events2014.DocumentSystemOutput2015;
import com.bbn.kbp.events2014.Response;
import com.bbn.kbp.events2014.ResponseLinking;
import com.bbn.kbp.events2014.ResponseSet;
import com.bbn.kbp.events2014.SystemOutputLayout;
import com.bbn.kbp.events2014.io.SystemOutputStore;
import com.bbn.kbp.linking.ExplicitFMeasureInfo;
import com.bbn.kbp.linking.LinkF1;
import com.bbn.nlp.corenlp.CoreNLPDocument;
import com.bbn.nlp.corenlp.CoreNLPParseNode;
import com.bbn.nlp.corenlp.CoreNLPXMLLoader;
import com.bbn.nlp.corpora.ere.EREArgument;
import com.bbn.nlp.corpora.ere.EREDocument;
import com.bbn.nlp.corpora.ere.EREEntity;
import com.bbn.nlp.corpora.ere.EREEntityArgument;
import com.bbn.nlp.corpora.ere.EREEntityMention;
import com.bbn.nlp.corpora.ere.EREEvent;
import com.bbn.nlp.corpora.ere.EREEventMention;
import com.bbn.nlp.corpora.ere.EREFillerArgument;
import com.bbn.nlp.corpora.ere.ERELoader;
import com.bbn.nlp.corpora.ere.LinkRealis;
import com.bbn.nlp.events.HasEventType;
import com.bbn.nlp.parsing.HeadFinders;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Multiset;
import com.google.common.collect.Sets;
import com.google.common.io.Files;
import com.google.common.reflect.TypeToken;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import javax.annotation.Nullable;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.inspect;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformBoth;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformLeft;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformRight;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformed;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.Iterables.concat;
import static com.google.common.collect.Iterables.filter;
/**
* Scores KBP 2015 event argument output against an ERE gold standard. Scoring is in terms of
* (Event Type, Event Role, Entity) tuples. This program is an experimental rough draft and has a
* number of limitations: <ul> <li>We only handle arguments which are entity mentions; others are
* ignored according to the ERE structure on the gold side and by filtering out a (currently
* hardcoded) set of argument roles on the system side.</li> <i>We map system responses to entities
* by looking for an entity which has a mention which shares the character offsets of the base
* filler exactly either by itself or by its nominal head (given in ERE). In the future we may
* implement more lenient alignment strategies.</i> <li> Currently system responses which fail to
* align to any entity at all are discarded rather than penalized.</li> </ul>
*/
public final class ScoreKBPAgainstERE {
private static final Logger log = LoggerFactory.getLogger(ScoreKBPAgainstERE.class);
private ScoreKBPAgainstERE() {
throw new UnsupportedOperationException();
}
public static void main(String[] argv) {
// we wrap the main method in this way to
// ensure a non-zero return value on failure
try {
trueMain(argv);
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
private static void trueMain(String[] argv) throws IOException {
Parameters params = Parameters.loadSerifStyle(new File(argv[0]));
log.info(params.dump());
final ImmutableSet<Symbol> docIDsToScore = ImmutableSet.copyOf(
FileUtils.loadSymbolList(params.getExistingFile("docIDsToScore")));
final ImmutableMap<Symbol, File> goldDocIDToFileMap = FileUtils.loadSymbolToFileMap(
Files.asCharSource(params.getExistingFile("goldDocIDToFileMap"), Charsets.UTF_8));
final File outputDir = params.getCreatableDirectory("ereScoringOutput");
final SystemOutputLayout outputLayout = SystemOutputLayout.ParamParser.fromParamVal(
params.getString("outputLayout"));
final SystemOutputStore outputStore =
outputLayout.open(params.getExistingDirectory("systemOutput"));
final ImmutableMap<Symbol, File> coreNLPProcessedRawDocs = FileUtils.loadSymbolToFileMap(
Files.asCharSource(params.getExistingFile("coreNLPDocIDMap"), Charsets.UTF_8));
final boolean relaxUsingCORENLP = params.getBoolean("relaxUsingCoreNLP");
final boolean useExactMatchForCoreNLPRelaxation =
relaxUsingCORENLP && params.getBoolean("useExactMatchForCoreNLPRelaxation");
final CoreNLPXMLLoader coreNLPXMLLoader =
CoreNLPXMLLoader.builder(HeadFinders.<CoreNLPParseNode>getEnglishPTBHeadFinder()).build();
log.info("Scoring over {} documents", docIDsToScore.size());
// on the gold side we take an ERE document as input
final TypeToken<EREDocument> inputIsEREDoc = new TypeToken<EREDocument>() {
};
// on the test side we take an AnswerKey, but we bundle it with the gold ERE document
// for use in alignment later
final TypeToken<EREDocAndResponses> inputIsEREDocAndAnswerKey =
new TypeToken<EREDocAndResponses>() {
};
final InspectionNode<EvalPair<EREDocument, EREDocAndResponses>>
input = InspectorTreeDSL.pairedInput(inputIsEREDoc, inputIsEREDocAndAnswerKey);
// these will extract the scoring tuples from the KBP system input and ERE docs, respectively
// we create these here because we will call their .finish method()s
// at the end to record some statistics about alignment failures,
// so we need to keep references to them
final ResponsesAndLinkingFromKBPExtractor responsesAndLinkingFromKBPExtractor =
new ResponsesAndLinkingFromKBPExtractor(coreNLPProcessedRawDocs,
coreNLPXMLLoader, relaxUsingCORENLP,
useExactMatchForCoreNLPRelaxation);
final ResponsesAndLinkingFromEREExtractor responsesAndLinkingFromEREExtractor =
new ResponsesAndLinkingFromEREExtractor(EREToKBPEventOntologyMapper.create2016Mapping());
// this sets it up so that everything fed to input will be scored in various ways
setupScoring(input, responsesAndLinkingFromKBPExtractor, responsesAndLinkingFromEREExtractor,
outputDir);
final ERELoader loader = ERELoader.create();
for (final Symbol docID : docIDsToScore) {
final File ereFileName = goldDocIDToFileMap.get(docID);
if (ereFileName == null) {
throw new RuntimeException("Missing key file for " + docID);
}
final EREDocument ereDoc = loader.loadFrom(ereFileName);
checkState(ereDoc.getDocId().equals(docID.asString()),
"fetched document ID must be equal to stored");
final Iterable<Response>
responses = filter(outputStore.read(docID).arguments().responses(), bannedRolesFilter);
final ResponseLinking linking =
((DocumentSystemOutput2015) outputStore.read(docID)).linking();
linking.copyWithFilteredResponses(Predicates.in(ImmutableSet.copyOf(responses)));
// feed this ERE doc/ KBP output pair to the scoring network
input.inspect(EvalPair.of(ereDoc, new EREDocAndResponses(ereDoc, responses, linking)));
}
// trigger the scoring network to write its summary files
input.finish();
// log alignment failures
responsesAndLinkingFromKBPExtractor.finish();
responsesAndLinkingFromEREExtractor.finish();
}
private static final ImmutableSet<Symbol> BANNED_ROLES =
SymbolUtils.setFrom("Time", "Crime", "Position",
"Fine", "Sentence");
private static final ImmutableSet<Symbol> ROLES_2016 = SymbolUtils
.setFrom("Agent", "Artifact", "Attacker", "Audience", "Beneficiary", "Crime", "Destination",
"Entity", "Giver", "Instrument", "Money", "Origin", "Person", "Place", "Position",
"Recipient", "Target", "Thing", "Time", "Victim");
private static final ImmutableSet<Symbol> ALLOWED_ROLES_2016 =
Sets.difference(ROLES_2016, BANNED_ROLES).immutableCopy();
private static final ImmutableSet<Symbol> linkableRealis = SymbolUtils.setFrom("Other", "Actual");
private static final Predicate<Response> bannedRolesFilter = new Predicate<Response>() {
@Override
public boolean apply(@Nullable final Response response) {
return ALLOWED_ROLES_2016.contains(response.role());
}
};
private static Function<EvalPair<? extends Iterable<? extends DocLevelEventArg>, ? extends Iterable<? extends DocLevelEventArg>>, ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg>>
EXACT_MATCH_ALIGNER = EquivalenceBasedProvenancedAligner
.forEquivalenceFunction(Functions.<DocLevelEventArg>identity())
.asFunction();
// this sets up a scoring network which is executed on every input
private static void setupScoring(
final InspectionNode<EvalPair<EREDocument, EREDocAndResponses>> input,
final ResponsesAndLinkingFromKBPExtractor responsesAndLinkingFromKBPExtractor,
final ResponsesAndLinkingFromEREExtractor responsesAndLinkingFromEREExtractor,
final File outputDir) {
final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>>
inputAsResponsesAndLinking =
transformRight(transformLeft(input, responsesAndLinkingFromEREExtractor),
responsesAndLinkingFromKBPExtractor);
final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> filteredInput =
InspectorTreeDSL.transformBoth(
inputAsResponsesAndLinking, filterFor2016());
// set up for event argument scoring in 2015 style
eventArgumentScoringSetup(filteredInput, outputDir);
// set up for linking scoring in 2015 style
linkingScoringSetup(filteredInput, outputDir);
}
private static void eventArgumentScoringSetup(
final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>>
inputAsResponsesAndLinking, final File outputDir) {
final InspectorTreeNode<EvalPair<ImmutableSet<DocLevelEventArg>, ImmutableSet<DocLevelEventArg>>>
inputAsSetsOfScoringTuples =
transformBoth(inputAsResponsesAndLinking, ResponsesAndLinking.argFunction);
final InspectorTreeNode<ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg>>
alignmentNode = transformed(inputAsSetsOfScoringTuples, EXACT_MATCH_ALIGNER);
// overall F score
final AggregateBinaryFScoresInspector<Object, Object> scoreAndWriteOverallFScore =
AggregateBinaryFScoresInspector.createOutputtingTo("aggregateF.txt", outputDir);
inspect(alignmentNode).with(scoreAndWriteOverallFScore);
// log errors
final BinaryErrorLogger<HasDocID, HasDocID> logWrongAnswers = BinaryErrorLogger
.forStringifierAndOutputDir(Functions.<HasDocID>toStringFunction(), outputDir);
inspect(alignmentNode).with(logWrongAnswers);
final BinaryFScoreBootstrapStrategy perEventBootstrapStrategy =
BinaryFScoreBootstrapStrategy.createBrokenDownBy("EventType",
HasEventType.ExtractFunction.INSTANCE, outputDir);
final BootstrapInspector breakdownScoresByEventTypeWithBootstrapping =
BootstrapInspector.forStrategy(perEventBootstrapStrategy, 1000, new Random(0));
inspect(alignmentNode).with(breakdownScoresByEventTypeWithBootstrapping);
}
private static void linkingScoringSetup(
final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>>
inputAsResponsesAndLinking, final File outputDir) {
final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> filteredForRealis =
transformBoth(inputAsResponsesAndLinking, filterLinkingFor2016());
final InspectorTreeNode<EvalPair<ImmutableSet<ImmutableSet<DocLevelEventArg>>, ImmutableSet<ImmutableSet<DocLevelEventArg>>>>
linkingNode = transformRight(
transformLeft(filteredForRealis, ResponsesAndLinking.linkingFunction),
ResponsesAndLinking.linkingFunction);
final InspectorTreeNode<EvalPair<ImmutableSet<ImmutableSet<DocLevelEventArg>>, ImmutableSet<ImmutableSet<DocLevelEventArg>>>>
filteredNode =
transformed(linkingNode, ScoreKBPAgainstERE.<DocLevelEventArg>restrictToLinkingFunction());
final LinkingInspector linkingInspector =
LinkingInspector.createOutputtingTo(new File(outputDir, "linkingF.txt"));
inspect(filteredNode).with(linkingInspector);
}
private static Function<ResponsesAndLinking, ResponsesAndLinking> filterFor2016() {
return new Function<ResponsesAndLinking, ResponsesAndLinking>() {
@Nullable
@Override
public ResponsesAndLinking apply(@Nullable final ResponsesAndLinking responsesAndLinking) {
checkNotNull(responsesAndLinking);
final ImmutableSet<DocLevelEventArg> args = FluentIterable.from(responsesAndLinking.args())
.filter(Predicates.compose(Predicates.in(ALLOWED_ROLES_2016),
DocLevelEventArgFunctions.eventArgumentType())).toSet();
final ImmutableSet<ImmutableSet<DocLevelEventArg>> linking =
filterNestedElements(Predicates.in(args)).apply(responsesAndLinking.linking());
return new EREResponsesAndLinking(args, linking);
}
};
}
private static Function<ResponsesAndLinking, ResponsesAndLinking> filterLinkingFor2016() {
return new Function<ResponsesAndLinking, ResponsesAndLinking>() {
@Nullable
@Override
public ResponsesAndLinking apply(@Nullable final ResponsesAndLinking responsesAndLinking) {
checkNotNull(responsesAndLinking);
final ImmutableSet<DocLevelEventArg> args = FluentIterable.from(responsesAndLinking.args())
.filter(Predicates.compose(Predicates.in(linkableRealis),
DocLevelEventArgFunctions.realis())).toSet();
final ImmutableSet<ImmutableSet<DocLevelEventArg>> linking =
filterNestedElements(Predicates.in(args)).apply(responsesAndLinking.linking());
return new EREResponsesAndLinking(args, linking);
}
};
}
private static <T> Function<Iterable<? extends Set<T>>, ImmutableSet<ImmutableSet<T>>> filterNestedElements(
final Predicate<T> filter) {
return new Function<Iterable<? extends Set<T>>, ImmutableSet<ImmutableSet<T>>>() {
@Nullable
@Override
public ImmutableSet<ImmutableSet<T>> apply(@Nullable final Iterable<? extends Set<T>> sets) {
final ImmutableSet.Builder<ImmutableSet<T>> ret = ImmutableSet.builder();
for (final Set<T> s : sets) {
ret.add(ImmutableSet.copyOf(Iterables.filter(s, filter)));
}
return ret.build();
}
};
}
private static <T> Function<EvalPair<ImmutableSet<ImmutableSet<T>>, ImmutableSet<ImmutableSet<T>>>, EvalPair<ImmutableSet<ImmutableSet<T>>, ImmutableSet<ImmutableSet<T>>>> restrictToLinkingFunction() {
return new Function<EvalPair<ImmutableSet<ImmutableSet<T>>, ImmutableSet<ImmutableSet<T>>>, EvalPair<ImmutableSet<ImmutableSet<T>>, ImmutableSet<ImmutableSet<T>>>>() {
@Nullable
@Override
public EvalPair<ImmutableSet<ImmutableSet<T>>, ImmutableSet<ImmutableSet<T>>> apply(
@Nullable final EvalPair<ImmutableSet<ImmutableSet<T>>, ImmutableSet<ImmutableSet<T>>> input) {
final ImmutableSet<ImmutableSet<T>> key =
filterNestedElements(Predicates.in(ImmutableSet.copyOf(Iterables.concat(input.test()))))
.apply(input.key());
return EvalPair.of(key, input.test());
}
};
}
private static final class LinkingInspector implements
Inspector<EvalPair<ImmutableSet<ImmutableSet<DocLevelEventArg>>, ImmutableSet<ImmutableSet<DocLevelEventArg>>>> {
private final File outputFile;
ExplicitFMeasureInfo counts = null;
private LinkingInspector(final File outputFile) {
this.outputFile = outputFile;
}
public static LinkingInspector createOutputtingTo(final File outputFile) {
return new LinkingInspector(outputFile);
}
@Override
public void inspect(
final EvalPair<ImmutableSet<ImmutableSet<DocLevelEventArg>>, ImmutableSet<ImmutableSet<DocLevelEventArg>>> item) {
checkArgument(ImmutableSet.copyOf(concat(item.test())).containsAll(
ImmutableSet.copyOf(concat(item.key()))), "Must contain only answers in test set!");
counts = LinkF1.create().score(item.key(), item.test());
}
@Override
public void finish() throws IOException {
checkNotNull(counts, "Inspect must be called before Finish!");
final PrintWriter outputWriter = new PrintWriter(outputFile);
outputWriter.println(counts.toString());
outputWriter.close();
}
}
private enum ERERealisEnum {
generic,
other,
actual,
}
private enum ArgumentRealis {
Generic,
Actual,
Other
}
private static final class ResponsesAndLinkingFromEREExtractor
implements Function<EREDocument, ResponsesAndLinking>, Finishable {
// for tracking things from the answer key discarded due to not being entity mentions
private final Multiset<String> allGoldArgs = HashMultiset.create();
private final Multiset<String> discarded = HashMultiset.create();
private final SimpleEventOntologyMapper mapper;
private ResponsesAndLinkingFromEREExtractor(final SimpleEventOntologyMapper mapper) {
this.mapper = checkNotNull(mapper);
}
@Override
public ResponsesAndLinking apply(final EREDocument doc) {
final ImmutableSet.Builder<DocLevelEventArg> ret = ImmutableSet.builder();
// every event mention argument within a hopper is linked
final ImmutableSet.Builder<ImmutableSet<DocLevelEventArg>> linking = ImmutableSet.builder();
for (final EREEvent ereEvent : doc.getEvents()) {
final ImmutableSet.Builder<DocLevelEventArg> responseSet = ImmutableSet.builder();
for (final EREEventMention ereEventMention : ereEvent.getEventMentions()) {
for (final EREArgument ereArgument : ereEventMention.getArguments()) {
final Symbol ereEventMentionType = Symbol.from(ereEventMention.getType());
final Symbol ereEventMentionSubtype = Symbol.from(ereEventMention.getSubtype());
final Symbol ereArgumentRole = Symbol.from(ereArgument.getRole());
final ArgumentRealis argumentRealis =
getRealis(ereEventMention.getRealis(), ereArgument.getRealis().get());
boolean skip = false;
if (!mapper.eventType(ereEventMentionType).isPresent()) {
log.debug("EventType {} is not known to the KBP ontology", ereEventMentionType);
skip = true;
}
if (!mapper.eventRole(ereArgumentRole).isPresent()) {
log.debug("EventRole {} is not known to the KBP ontology", ereArgumentRole);
skip = true;
}
if (!mapper.eventSubtype(ereEventMentionSubtype).isPresent()) {
log.debug("EventSubtype {} is not known to the KBP ontology", ereEventMentionSubtype);
skip = true;
}
if (skip) {
continue;
}
// type.subtype is Response format
final String typeRoleKey = mapper.eventType(ereEventMentionType).get() +
"." + mapper.eventSubtype(ereEventMentionSubtype).get() +
"/" + mapper.eventRole(ereArgumentRole).get();
allGoldArgs.add(typeRoleKey);
if (ereArgument instanceof EREEntityArgument) {
final EREEntityMention entityMention =
((EREEntityArgument) ereArgument).entityMention();
final Optional<EREEntity> containingEntity = doc.getEntityContaining(entityMention);
checkState(containingEntity.isPresent(), "Corrupt ERE key input lacks "
+ "entity for entity mention %s", entityMention);
final DocLevelEventArg arg =
DocLevelEventArg.builder().docID(Symbol.from(doc.getDocId()))
.eventType(Symbol.from(mapper.eventType(ereEventMentionType).get() + "." +
mapper.eventSubtype(ereEventMentionSubtype).get()))
.eventArgumentType(mapper.eventRole(ereArgumentRole).get())
.corefID(containingEntity.get().getID())
.realis(Symbol.from(argumentRealis.name())).build();
ret.add(arg);
responseSet.add(arg);
} else if (ereArgument instanceof EREFillerArgument) {
final EREFillerArgument filler = (EREFillerArgument) ereArgument;
final DocLevelEventArg arg =
DocLevelEventArg.builder().docID(Symbol.from(doc.getDocId()))
.eventType(Symbol.from(mapper.eventType(ereEventMentionType).get() + "." +
mapper.eventSubtype(ereEventMentionSubtype).get()))
.eventArgumentType(mapper.eventRole(ereArgumentRole).get())
.corefID(filler.filler().getID()).realis(Symbol.from(argumentRealis.name()))
.build();
ret.add(arg);
responseSet.add(arg);
} else {
throw new RuntimeException("Unknown ERE argument type " + ereArgument.getClass());
}
}
}
linking.add(responseSet.build());
}
return new EREResponsesAndLinking(ret.build(), linking.build());
}
/*
{EventMentionRealis,ArgumentRealis}=event argument realis
{Generic,*}=Generic
{X, True}=X
{Actual, False}=Other
{Other,False}=Other
*/
private ArgumentRealis getRealis(final String ERERealis, final LinkRealis linkRealis) {
// generic event mention realis overrides everything
if (ERERealis.equals(ERERealisEnum.generic.name())) {
return ArgumentRealis.Generic;
} else {
// if the argument is realis
if (linkRealis.equals(LinkRealis.REALIS)) {
if (ERERealis.equals(ERERealisEnum.other.name())) {
return ArgumentRealis.Other;
} else if (ERERealis.equals(ERERealisEnum.actual.name())) {
return ArgumentRealis.Actual;
} else {
throw new RuntimeException(
"Unknown ERERealis of type " + linkRealis);
}
} else {
// if it's irrealis, override Actual with Other, Other is preserved. Generic is handled above.
return ArgumentRealis.Other;
}
}
}
@Override
public void finish() throws IOException {
log.info(
"Of {} gold event arguments, {} were discarded as non-entities",
allGoldArgs.size(), discarded.size());
for (final String errKey : discarded.elementSet()) {
if (discarded.count(errKey) > 0) {
log.info("Of {} gold {} arguments, {} discarded ",
+allGoldArgs.count(errKey), errKey, discarded.count(errKey));
}
}
}
}
private static final class ResponsesAndLinkingFromKBPExtractor
implements Function<EREDocAndResponses, ResponsesAndLinking>,
Finishable {
private Multiset<String> mentionAlignmentFailures = HashMultiset.create();
private Multiset<String> numResponses = HashMultiset.create();
private final ImmutableMap<Symbol, File> ereMapping;
private final CoreNLPXMLLoader coreNLPXMLLoader;
private final boolean relaxUsingCORENLP;
private final boolean useExactMatchForCoreNLPRelaxation;
public ResponsesAndLinkingFromKBPExtractor(final Map<Symbol, File> ereMapping,
final CoreNLPXMLLoader coreNLPXMLLoader, final boolean relaxUsingCORENLP,
final boolean useExactMatchForCoreNLPRelaxation) {
this.ereMapping = ImmutableMap.copyOf(ereMapping);
this.coreNLPXMLLoader = coreNLPXMLLoader;
this.relaxUsingCORENLP = relaxUsingCORENLP;
this.useExactMatchForCoreNLPRelaxation = useExactMatchForCoreNLPRelaxation;
}
public ResponsesAndLinking apply(final EREDocAndResponses input) {
final ImmutableSet.Builder<DocLevelEventArg> ret = ImmutableSet.builder();
final Iterable<Response> responses = input.responses();
final EREDocument doc = input.ereDoc();
final Symbol ereID = Symbol.from(doc.getDocId());
final Optional<CoreNLPDocument> coreNLPDoc;
final EREAligner ereAligner;
try {
coreNLPDoc = Optional.fromNullable(ereMapping.get(ereID)).isPresent() ? Optional
.of(coreNLPXMLLoader.loadFrom(ereMapping.get(ereID)))
: Optional.<CoreNLPDocument>absent();
ereAligner = EREAligner
.create(relaxUsingCORENLP, useExactMatchForCoreNLPRelaxation, doc, coreNLPDoc);
} catch (IOException e) {
throw new RuntimeException(e);
}
final ImmutableMap.Builder<Response, DocLevelEventArg> responseToDocLevelArg =
ImmutableMap.builder();
for (final Response response : responses) {
numResponses.add(errKey(response));
final Symbol realis = Symbol.from(response.realis().name());
// there are too few instances of these to bother matching on type currently
final ImmutableSet<EREEntity> candidateEntities = ereAligner.entitiesForResponse(response);
if (candidateEntities.size() > 1) {
log.warn(
"Found {} candidate entities for base filler {}, using the first one!",
candidateEntities.size(), response.baseFiller());
}
final EREEntity matchingEntity = Iterables.getFirst(candidateEntities, null);
if (matchingEntity != null) {
final DocLevelEventArg res = DocLevelEventArg.builder().docID(Symbol.from(doc.getDocId()))
.eventType(response.type()).eventArgumentType(response.role())
.corefID(matchingEntity.getID()).realis(realis).build();
ret.add(res);
responseToDocLevelArg.put(response, res);
} else {
final ImmutableSet<EREFillerArgument> fillers = ereAligner.fillersForResponse(response);
final EREFillerArgument filler = Iterables.getFirst(fillers, null);
// there are too few instances of these to bother matching on type currently
if (fillers.size() > 1) {
log.warn("Found multiple {} matching fillers for {}", fillers.size(),
response.baseFiller());
}
if (filler != null) {
final DocLevelEventArg res =
DocLevelEventArg.builder().docID(Symbol.from(doc.getDocId()))
.eventType(response.type()).eventArgumentType(response.role())
.corefID(filler.filler().getID()).realis(realis).build();
ret.add(res);
responseToDocLevelArg.put(response, res);
} else {
// add the response with a fake alignment so we properly penalize answers that don't align
final DocLevelEventArg fake =
DocLevelEventArg.builder().docID(Symbol.from(doc.getDocId()))
.eventType(response.type()).eventArgumentType(response.role())
.corefID("fake " + mentionAlignmentFailures.size()).realis(realis).build();
ret.add(fake);
responseToDocLevelArg.put(response, fake);
mentionAlignmentFailures.add(errKey(response));
log.warn("Neither entity nor filler match found for {}", response.toString());
}
}
}
return new KBPResponsesAndLinking(ImmutableSet.copyOf(input.responses()),
responseToDocLevelArg.build(), input.linking());
}
public String errKey(Response r) {
return r.type() + "/" + r.role();
}
public void finish() {
log.info(
"Of {} system responses, got {} mention alignment failures",
numResponses.size(), mentionAlignmentFailures.size());
for (final String errKey : numResponses.elementSet()) {
if (mentionAlignmentFailures.count(errKey) > 0) {
log.info("Of {} {} responses, {} mention alignment failures",
+numResponses.count(errKey), errKey, mentionAlignmentFailures.count(errKey));
}
}
}
}
}
interface ResponsesAndLinking {
ImmutableSet<DocLevelEventArg> args();
ImmutableSet<ImmutableSet<DocLevelEventArg>> linking();
Function<ResponsesAndLinking, ImmutableSet<DocLevelEventArg>> argFunction =
new Function<ResponsesAndLinking, ImmutableSet<DocLevelEventArg>>() {
@Nullable
@Override
public ImmutableSet<DocLevelEventArg> apply(
@Nullable final ResponsesAndLinking responsesAndLinking) {
return responsesAndLinking.args();
}
};
Function<ResponsesAndLinking, ImmutableSet<ImmutableSet<DocLevelEventArg>>> linkingFunction =
new Function<ResponsesAndLinking, ImmutableSet<ImmutableSet<DocLevelEventArg>>>() {
@Nullable
@Override
public ImmutableSet<ImmutableSet<DocLevelEventArg>> apply(
@Nullable final ResponsesAndLinking responsesAndLinking) {
return responsesAndLinking.linking();
}
};
}
final class KBPResponsesAndLinking implements ResponsesAndLinking {
final ImmutableSet<Response> originalResponses;
final ImmutableMap<Response, DocLevelEventArg> responseToDocLevelEventArg;
final ImmutableSet<ImmutableSet<DocLevelEventArg>> responseSets;
KBPResponsesAndLinking(final ImmutableSet<Response> originalResponses,
final ImmutableMap<Response, DocLevelEventArg> responseToDocLevelEventArg,
final ResponseLinking responseLinking) {
this.originalResponses = originalResponses;
this.responseToDocLevelEventArg = responseToDocLevelEventArg;
final ImmutableSet.Builder<ImmutableSet<DocLevelEventArg>> responseSetsB =
ImmutableSet.builder();
for (final ResponseSet rs : responseLinking.responseSets()) {
final ImmutableSet.Builder<DocLevelEventArg> rsn = ImmutableSet.builder();
for (final Response response : rs) {
if (responseToDocLevelEventArg.containsKey(response)) {
rsn.add(responseToDocLevelEventArg.get(response));
}
}
responseSetsB.add(rsn.build());
}
this.responseSets = responseSetsB.build();
}
@Override
public ImmutableSet<DocLevelEventArg> args() {
return ImmutableSet.copyOf(responseToDocLevelEventArg.values());
}
@Override
public ImmutableSet<ImmutableSet<DocLevelEventArg>> linking() {
return responseSets;
}
}
final class EREResponsesAndLinking implements ResponsesAndLinking {
final ImmutableSet<DocLevelEventArg> args;
final ImmutableSet<ImmutableSet<DocLevelEventArg>> linking;
EREResponsesAndLinking(final Iterable<DocLevelEventArg> args,
final Iterable<ImmutableSet<DocLevelEventArg>> linking) {
this.args = ImmutableSet.copyOf(args);
this.linking = ImmutableSet.copyOf(linking);
checkArgument(this.args.containsAll(ImmutableSet.copyOf(Iterables.concat(this.linking))));
}
@Override
public ImmutableSet<DocLevelEventArg> args() {
return args;
}
@Override
public ImmutableSet<ImmutableSet<DocLevelEventArg>> linking() {
return linking;
}
}
final class EREDocAndResponses {
private final EREDocument ereDoc;
private final Iterable<Response> responses;
private final ResponseLinking linking;
public EREDocAndResponses(final EREDocument ereDoc, final Iterable<Response> responses,
final ResponseLinking linking) {
this.ereDoc = checkNotNull(ereDoc);
this.responses = checkNotNull(responses);
this.linking = checkNotNull(linking);
}
public EREDocument ereDoc() {
return ereDoc;
}
public Iterable<Response> responses() {
return responses;
}
public ResponseLinking linking() {
return linking;
}
}
|
package voldemort.store.pausable;
import voldemort.server.VoldemortConfig;
import voldemort.store.StorageConfiguration;
import voldemort.store.StorageEngine;
import voldemort.store.memory.InMemoryStorageEngine;
import voldemort.utils.ByteArray;
/**
* The storage configuration for the PausableStorageEngine
*
* @author jay
*
*/
public class PausableStorageConfiguration implements StorageConfiguration {
private static final String TYPE_NAME = "pausable";
public PausableStorageConfiguration(@SuppressWarnings("unused") VoldemortConfig config) {}
public void close() {}
public StorageEngine<ByteArray, byte[]> getStore(String name) {
return new PausableStorageEngine<ByteArray, byte[]>(new InMemoryStorageEngine<ByteArray, byte[]>(name));
}
public String getType() {
return TYPE_NAME;
}
}
|
package org.voltdb.regressionsuites;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import junit.framework.Test;
import org.voltdb.BackendTarget;
import org.voltdb.DefaultSnapshotDataTarget;
import org.voltdb.VoltDB;
import org.voltdb.VoltTable;
import org.voltdb.VoltTableRow;
import org.voltdb.VoltType;
import org.voltdb.VoltTable.ColumnInfo;
import org.voltdb.catalog.CatalogMap;
import org.voltdb.catalog.Cluster;
import org.voltdb.catalog.Database;
import org.voltdb.catalog.Site;
import org.voltdb.catalog.Table;
import org.voltdb.client.Client;
import org.voltdb.client.ProcCallException;
import org.voltdb.regressionsuites.saverestore.CatalogChangeSingleProcessServer;
import org.voltdb.regressionsuites.saverestore.SaveRestoreTestProjectBuilder;
/**
* Test the SnapshotSave and SnapshotRestore system procedures
*/
public class TestSaveRestoreSysprocSuite extends RegressionSuite {
private static final String TMPDIR = "/tmp";
private static final String TESTNONCE = "testnonce";
private static final int ALLOWELT = 0;
public TestSaveRestoreSysprocSuite(String name) {
super(name);
}
@Override
public void setUp()
{
deleteTestFiles();
super.setUp();
DefaultSnapshotDataTarget.m_simulateFullDiskWritingChunk = false;
DefaultSnapshotDataTarget.m_simulateFullDiskWritingHeader = false;
org.voltdb.sysprocs.SnapshotRegistry.clear();
}
@Override
public void tearDown() throws InterruptedException
{
deleteTestFiles();
super.tearDown();
}
private void deleteTestFiles()
{
FilenameFilter cleaner = new FilenameFilter()
{
public boolean accept(File dir, String file)
{
return file.startsWith(TESTNONCE) || file.endsWith(".vpt") || file.endsWith(".digest");
}
};
File tmp_dir = new File(TMPDIR);
File[] tmp_files = tmp_dir.listFiles(cleaner);
for (File tmp_file : tmp_files)
{
tmp_file.delete();
}
}
private void corruptTestFiles(boolean random) throws Exception
{
FilenameFilter cleaner = new FilenameFilter()
{
public boolean accept(File dir, String file)
{
return file.startsWith(TESTNONCE);
}
};
java.util.Random r;
if (random) {
r = new java.util.Random();
} else {
r = new java.util.Random(0);
}
File tmp_dir = new File(TMPDIR);
File[] tmp_files = tmp_dir.listFiles(cleaner);
int tmpIndex = r.nextInt(tmp_files.length);
int corruptValue = r.nextInt() % 127;
java.io.RandomAccessFile raf = new java.io.RandomAccessFile( tmp_files[tmpIndex], "rw");
final int fileLength = (int)raf.length();
int corruptPosition = r.nextInt((int)raf.length());
System.out.println("Corrupting file " + tmp_files[tmpIndex].getName() +
" at byte " + corruptPosition + " with value " + corruptPosition);
raf.seek(corruptPosition);
raf.writeByte(corruptValue);
raf.close();
}
private VoltTable createReplicatedTable(int numberOfItems,
int indexBase)
{
VoltTable repl_table =
new VoltTable(new ColumnInfo("RT_ID", VoltType.INTEGER),
new ColumnInfo("RT_NAME", VoltType.STRING),
new ColumnInfo("RT_INTVAL", VoltType.INTEGER),
new ColumnInfo("RT_FLOATVAL", VoltType.FLOAT));
for (int i = indexBase; i < numberOfItems + indexBase; i++) {
Object[] row = new Object[] {i,
"name_" + i,
i,
new Double(i)};
repl_table.addRow(row);
}
return repl_table;
}
private VoltTable createPartitionedTable(int numberOfItems,
int indexBase)
{
VoltTable partition_table =
new VoltTable(new ColumnInfo("PT_ID", VoltType.INTEGER),
new ColumnInfo("PT_NAME", VoltType.STRING),
new ColumnInfo("PT_INTVAL", VoltType.INTEGER),
new ColumnInfo("PT_FLOATVAL", VoltType.FLOAT));
for (int i = indexBase; i < numberOfItems + indexBase; i++)
{
Object[] row = new Object[] {i,
"name_" + i,
i,
new Double(i)};
partition_table.addRow(row);
}
return partition_table;
}
private VoltTable[] loadTable(Client client, String tableName,
VoltTable table)
{
VoltTable[] results = null;
int allowELT = 0;
try
{
client.callProcedure("@LoadMultipartitionTable", tableName,
table, allowELT);
}
catch (Exception ex)
{
ex.printStackTrace();
fail("loadTable exception: " + ex.getMessage());
}
return results;
}
private void loadLargeReplicatedTable(Client client, String tableName,
int itemsPerChunk, int numChunks)
{
for (int i = 0; i < numChunks; i++)
{
VoltTable repl_table =
createReplicatedTable(itemsPerChunk, i * itemsPerChunk);
loadTable(client, tableName, repl_table);
}
}
private void loadLargePartitionedTable(Client client, String tableName,
int itemsPerChunk, int numChunks)
{
for (int i = 0; i < numChunks; i++)
{
VoltTable part_table =
createPartitionedTable(itemsPerChunk, i * itemsPerChunk);
loadTable(client, tableName, part_table);
}
}
private VoltTable[] saveTables(Client client)
{
VoltTable[] results = null;
try
{
results = client.callProcedure("@SnapshotSave", TMPDIR,
TESTNONCE,
(byte)1);
}
catch (Exception ex)
{
ex.printStackTrace();
fail("SnapshotSave exception: " + ex.getMessage());
}
return results;
}
private void checkTable(Client client, String tableName, String orderByCol,
int expectedRows)
{
if (expectedRows > 200000)
{
System.out.println("Table too large to retrieve with select *");
System.out.println("Skipping integrity check");
}
VoltTable result = null;
try
{
result = client.callProcedure("SaveRestoreSelect", tableName)[0];
}
catch (Exception e)
{
e.printStackTrace();
}
final int rowCount = result.getRowCount();
assertEquals(expectedRows, rowCount);
int i = 0;
while (result.advanceRow())
{
assertEquals(i, result.getLong(0));
assertEquals("name_" + i, result.getString(1));
assertEquals(i, result.getLong(2));
assertEquals(new Double(i), result.getDouble(3));
++i;
}
}
/*
* Also does some basic smoke tests
* of @SnapshotStatus, @SnapshotScan and @SnapshotDelete
*/
public void testSnapshotSave() throws Exception
{
Client client = getClient();
int num_replicated_items_per_chunk = 100;
int num_replicated_chunks = 10;
int num_partitioned_items_per_chunk = 120;
int num_partitioned_chunks = 10;
loadLargeReplicatedTable(client, "REPLICATED_TESTER",
num_replicated_items_per_chunk,
num_replicated_chunks);
loadLargePartitionedTable(client, "PARTITION_TESTER",
num_partitioned_items_per_chunk,
num_partitioned_chunks);
VoltTable[] results = null;
results = client.callProcedure("@SnapshotSave", TMPDIR,
TESTNONCE, (byte)1);
/*
* Check that snapshot status returns a reasonable result
*/
VoltTable statusResults[] = client.callProcedure("@SnapshotStatus");
assertNotNull(statusResults);
assertEquals( 2, statusResults.length);
assertEquals( 8, statusResults[0].getColumnCount());
assertEquals( 1, statusResults[0].getRowCount());
assertTrue(statusResults[0].advanceRow());
assertTrue(TMPDIR.equals(statusResults[0].getString("PATH")));
assertTrue(TESTNONCE.equals(statusResults[0].getString("NONCE")));
assertFalse( 0 == statusResults[0].getLong("END_TIME"));
assertTrue("SUCCESS".equals(statusResults[0].getString("RESULT")));
VoltTable scanResults[] = client.callProcedure("@SnapshotScan", new Object[] { null });
assertNotNull(scanResults);
assertEquals( 1, scanResults.length);
assertEquals( 1, scanResults[0].getColumnCount());
assertEquals( 1, scanResults[0].getRowCount());
assertTrue( scanResults[0].advanceRow());
assertTrue( "ERR_MSG".equals(scanResults[0].getColumnName(0)));
scanResults = client.callProcedure("@SnapshotScan", "/doesntexist");
assertNotNull(scanResults);
assertEquals( 1, scanResults[1].getRowCount());
assertTrue( scanResults[1].advanceRow());
assertTrue( "FAILURE".equals(scanResults[1].getString("RESULT")));
scanResults = client.callProcedure("@SnapshotScan", TMPDIR);
assertNotNull(scanResults);
assertEquals( 3, scanResults.length);
assertEquals( 8, scanResults[0].getColumnCount());
assertEquals( 1, scanResults[0].getRowCount());
assertTrue( scanResults[0].advanceRow());
assertTrue(TMPDIR.equals(scanResults[0].getString("PATH")));
assertTrue(TESTNONCE.equals(scanResults[0].getString("NONCE")));
assertTrue("TRUE".equals(scanResults[0].getString("COMPLETE")));
FilenameFilter cleaner = new FilenameFilter()
{
public boolean accept(File dir, String file)
{
return file.startsWith(TESTNONCE);
}
};
File tmp_dir = new File(TMPDIR);
File[] tmp_files = tmp_dir.listFiles(cleaner);
tmp_files[0].delete();
scanResults = client.callProcedure("@SnapshotScan", TMPDIR);
assertNotNull(scanResults);
assertEquals( 3, scanResults.length);
assertEquals( 8, scanResults[0].getColumnCount());
assertEquals( 1, scanResults[0].getRowCount());
assertTrue( scanResults[0].advanceRow());
assertTrue(TMPDIR.equals(scanResults[0].getString("PATH")));
assertTrue(TESTNONCE.equals(scanResults[0].getString("NONCE")));
assertTrue("FALSE".equals(scanResults[0].getString("COMPLETE")));
assertTrue(tmp_files[0].getName().contains(scanResults[0].getString("TABLES_MISSING")));
// Instead of something exhaustive, let's just make sure that we get
// the number of result rows corresponding to the number of ExecutionSites
// that did save work
Cluster cluster = VoltDB.instance().getCatalogContext().cluster;
Database database = cluster.getDatabases().get("database");
CatalogMap<Table> tables = database.getTables();
CatalogMap<Site> sites = cluster.getSites();
int num_hosts = cluster.getHosts().size();
int replicated = 0;
int total_tables = 0;
int expected_entries = 0;
for (Table table : tables)
{
// Ignore materialized tables
if (table.getMaterializer() == null)
{
total_tables++;
if (table.getIsreplicated())
{
replicated++;
}
}
}
for (Site s : sites) {
if (s.getIsexec()) {
expected_entries++;
}
}
assertEquals(expected_entries, results[0].getRowCount());
while (results[0].advanceRow())
{
assertEquals(results[0].getString("RESULT"), "SUCCESS");
}
// Now, try the save again and verify that we fail (since all the save
// files will still exist. This will return one entry per table
// per host
expected_entries =
((total_tables - replicated) * num_hosts) + replicated;
try
{
results = client.callProcedure("@SnapshotSave", TMPDIR,
TESTNONCE, (byte)1);
}
catch (Exception ex)
{
ex.printStackTrace();
fail("SnapshotSave exception: " + ex.getMessage());
}
assertEquals(expected_entries, results[0].getRowCount());
while (results[0].advanceRow())
{
if (!tmp_files[0].getName().contains(results[0].getString("TABLE"))) {
assertEquals(results[0].getString("RESULT"), "FAILURE");
assertTrue(results[0].getString("ERR_MSG").contains("SAVE FILE ALREADY EXISTS"));
}
}
VoltTable deleteResults[] =
client.callProcedure(
"@SnapshotDelete",
new String[] {TMPDIR},
new String[]{TESTNONCE});
assertNotNull(deleteResults);
assertEquals( 1, deleteResults.length);
assertEquals( 8, deleteResults[0].getColumnCount());
assertEquals( 7, deleteResults[0].getRowCount());
tmp_files = tmp_dir.listFiles(cleaner);
assertEquals( 0, tmp_files.length);
}
// public void testIdleOnlineSnapshot() throws Exception
// Client client = getClient();
// int num_replicated_items_per_chunk = 100;
// int num_replicated_chunks = 10;
// int num_partitioned_items_per_chunk = 120;
// int num_partitioned_chunks = 10;
// loadLargeReplicatedTable(client, "REPLICATED_TESTER",
// num_replicated_items_per_chunk,
// num_replicated_chunks);
// loadLargePartitionedTable(client, "PARTITION_TESTER",
// num_partitioned_items_per_chunk,
// num_partitioned_chunks);
// client.callProcedure("@SnapshotSave", TMPDIR,
// TESTNONCE, (byte)0);
// Thread.sleep(300);
// /*
// * Check that snapshot status returns a reasonable result
// */
// VoltTable statusResults[] = client.callProcedure("@SnapshotStatus");
// assertNotNull(statusResults);
// assertEquals( 2, statusResults.length);
// assertEquals( 8, statusResults[0].getColumnCount());
// assertEquals( 1, statusResults[0].getRowCount());
// assertTrue(statusResults[0].advanceRow());
// assertTrue(TMPDIR.equals(statusResults[0].getString("PATH")));
// assertTrue(TESTNONCE.equals(statusResults[0].getString("NONCE")));
// assertFalse( 0 == statusResults[0].getLong("END_TIME"));
// assertTrue("SUCCESS".equals(statusResults[0].getString("RESULT")));
// public void testSaveAndRestoreReplicatedTable()
// throws IOException, InterruptedException, ProcCallException
// int num_replicated_items_per_chunk = 100;
// int num_replicated_chunks = 10;
// Client client = getClient();
// loadLargeReplicatedTable(client, "REPLICATED_TESTER",
// num_replicated_items_per_chunk,
// num_replicated_chunks);
// VoltTable[] results = null;
// results = saveTables(client);
// // Kill and restart all the execution sites.
// m_config.shutDown();
// m_config.startUp();
// client = getClient();
// try
// client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE, ALLOWELT);
// catch (Exception ex)
// ex.printStackTrace();
// fail("SnapshotRestore exception: " + ex.getMessage());
// checkTable(client, "REPLICATED_TESTER", "RT_ID",
// num_replicated_items_per_chunk * num_replicated_chunks);
// results = client.callProcedure("@Statistics", "table");
// int foundItem = 0;
// while (results[0].advanceRow())
// if (results[0].getString(2).equals("REPLICATED_TESTER"))
// ++foundItem;
// assertEquals((num_replicated_chunks * num_replicated_items_per_chunk),
// results[0].getLong(4));
// // make sure all sites were loaded
// assertEquals(3, foundItem);
// public void testSaveAndRestorePartitionedTable()
// throws IOException, InterruptedException, ProcCallException
// int num_partitioned_items_per_chunk = 120; // divisible by 3
// int num_partitioned_chunks = 10;
// Client client = getClient();
// loadLargePartitionedTable(client, "PARTITION_TESTER",
// num_partitioned_items_per_chunk,
// num_partitioned_chunks);
// VoltTable[] results = null;
// DefaultSnapshotDataTarget.m_simulateFullDiskWritingHeader = true;
// results = saveTables(client);
// deleteTestFiles();
// while (results[0].advanceRow()) {
// assertTrue(results[0].getString("RESULT").equals("FAILURE"));
// DefaultSnapshotDataTarget.m_simulateFullDiskWritingHeader = false;
// results = saveTables(client);
// while (results[0].advanceRow()) {
// if (!results[0].getString("RESULT").equals("SUCCESS")) {
// System.out.println(results[0].getString("ERR_MSG"));
// assertTrue(results[0].getString("RESULT").equals("SUCCESS"));
// try
// results = client.callProcedure("@SnapshotStatus");
// assertTrue(results[0].advanceRow());
// assertTrue(results[0].getString("RESULT").equals("SUCCESS"));
// assertEquals( 1, results[0].getRowCount());
// catch (Exception ex)
// ex.printStackTrace();
// fail("SnapshotRestore exception: " + ex.getMessage());
// // Kill and restart all the execution sites.
// m_config.shutDown();
// m_config.startUp();
// client = getClient();
// try
// results = client.callProcedure("@SnapshotRestore", TMPDIR,
// TESTNONCE, ALLOWELT);
// catch (Exception ex)
// ex.printStackTrace();
// fail("SnapshotRestore exception: " + ex.getMessage());
// checkTable(client, "PARTITION_TESTER", "PT_ID",
// num_partitioned_items_per_chunk * num_partitioned_chunks);
// results = client.callProcedure("@Statistics", "table");
// int foundItem = 0;
// while (results[0].advanceRow())
// if (results[0].getString(2).equals("PARTITION_TESTER"))
// ++foundItem;
// assertEquals((num_partitioned_items_per_chunk * num_partitioned_chunks) / 3,
// results[0].getLong(4));
// // make sure all sites were loaded
// assertEquals(3, foundItem);
// // Kill and restart all the execution sites.
// m_config.shutDown();
// m_config.startUp();
// deleteTestFiles();
// DefaultSnapshotDataTarget.m_simulateFullDiskWritingChunk = true;
// org.voltdb.sysprocs.SnapshotRegistry.clear();
// client = getClient();
// loadLargePartitionedTable(client, "PARTITION_TESTER",
// num_partitioned_items_per_chunk,
// num_partitioned_chunks);
// results = saveTables(client);
// try
// results = client.callProcedure("@SnapshotStatus");
// assertTrue(results[0].advanceRow());
// assertTrue(results[0].getString("RESULT").equals("FAILURE"));
// catch (Exception ex)
// ex.printStackTrace();
// fail("SnapshotRestore exception: " + ex.getMessage());
// DefaultSnapshotDataTarget.m_simulateFullDiskWritingChunk = false;
// deleteTestFiles();
// results = saveTables(client);
// // Kill and restart all the execution sites.
// m_config.shutDown();
// m_config.startUp();
// client = getClient();
// try
// results = client.callProcedure("@SnapshotRestore", TMPDIR,
// TESTNONCE, ALLOWELT);
// catch (Exception ex)
// ex.printStackTrace();
// fail("SnapshotRestore exception: " + ex.getMessage());
// checkTable(client, "PARTITION_TESTER", "PT_ID",
// num_partitioned_items_per_chunk * num_partitioned_chunks);
// results = client.callProcedure("@Statistics", "table");
// foundItem = 0;
// while (results[0].advanceRow())
// if (results[0].getString(2).equals("PARTITION_TESTER"))
// ++foundItem;
// assertEquals((num_partitioned_items_per_chunk * num_partitioned_chunks) / 3,
// results[0].getLong(4));
// // make sure all sites were loaded
// assertEquals(3, foundItem);
// // Test that we fail properly when there are no savefiles available
// public void testRestoreMissingFiles()
// throws IOException, InterruptedException
// int num_replicated_items = 1000;
// int num_partitioned_items = 126;
// Client client = getClient();
// VoltTable repl_table = createReplicatedTable(num_replicated_items, 0);
// // make a TPCC warehouse table
// VoltTable partition_table =
// createPartitionedTable(num_partitioned_items, 0);
// loadTable(client, "REPLICATED_TESTER", repl_table);
// loadTable(client, "PARTITION_TESTER", partition_table);
// saveTables(client);
// // Kill and restart all the execution sites.
// m_config.shutDown();
// deleteTestFiles();
// m_config.startUp();
// client = getClient();
// try {
// client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE, ALLOWELT);
// catch (Exception e) {
// assertTrue(e.getMessage().contains("No savefile state to restore"));
// return;
// assertTrue(false);
// // Test that we fail properly when the save files are corrupted
// public void testCorruptedFiles()
// throws Exception
// int num_replicated_items = 1000;
// int num_partitioned_items = 126;
// final int iterations = isValgrind() ? 5 : 100;
// for (int ii = 0; ii < iterations; ii++) {
// Client client = getClient();
// VoltTable repl_table = createReplicatedTable(num_replicated_items, 0);
// // make a TPCC warehouse table
// VoltTable partition_table =
// createPartitionedTable(num_partitioned_items, 0);
// loadTable(client, "REPLICATED_TESTER", repl_table);
// loadTable(client, "PARTITION_TESTER", partition_table);
// VoltTable results[] = saveTables(client);
// while (results[0].advanceRow()) {
// if (results[0].getString("RESULT").equals("FAILURE")) {
// System.out.println(results[0].getString("ERR_MSG"));
// assertTrue(results[0].getString("RESULT").equals("SUCCESS"));
// corruptTestFiles(false);
// releaseClient(client);
// // Kill and restart all the execution sites.
// m_config.shutDown();
// m_config.startUp();
// client = getClient();
// results = client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE, ALLOWELT);
// assertNotNull(results);
// deleteTestFiles();
// releaseClient(client);
// // Test that a random corruption doesn't mess up the table. Not reproducible but useful for detecting
// // stuff we won't normally find
// public void testCorruptedFilesRandom()
// throws Exception
// int num_replicated_items = 1000;
// int num_partitioned_items = 126;
// final int iterations = isValgrind() ? 5 : 100;
// for (int ii = 0; ii < iterations; ii++) {
// Client client = getClient();
// VoltTable repl_table = createReplicatedTable(num_replicated_items, 0);
// // make a TPCC warehouse table
// VoltTable partition_table =
// createPartitionedTable(num_partitioned_items, 0);
// loadTable(client, "REPLICATED_TESTER", repl_table);
// loadTable(client, "PARTITION_TESTER", partition_table);
// saveTables(client);
// releaseClient(client);
// // Kill and restart all the execution sites.
// m_config.shutDown();
// corruptTestFiles(true);
// m_config.startUp();
// client = getClient();
// VoltTable results[] = client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE, ALLOWELT);
// assertNotNull(results);
// deleteTestFiles();
// releaseClient(client);
//// public void testRestoreMissingPartitionFile()
//// throws IOException, InterruptedException
//// int num_replicated_items = 1000;
//// int num_partitioned_items = 126;
//// Client client = getClient();
//// VoltTable repl_table = createReplicatedTable(num_replicated_items, 0);
//// // make a TPCC warehouse table
//// VoltTable partition_table =
//// createPartitionedTable(num_partitioned_items, 0);
//// loadTable(client, "REPLICATED_TESTER", repl_table);
//// loadTable(client, "PARTITION_TESTER", partition_table);
//// saveTables(client);
//// // Kill and restart all the execution sites.
//// m_config.shutDown();
//// String filename = TESTNONCE + "-PARTITION_TESTER-host_0";
//// File item_file = new File(TMPDIR, filename);
//// item_file.delete();
//// m_config.startUp();
//// client = getClient();
//// try {
//// client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE);
//// catch (Exception e) {
//// assertTrue(e.getMessage().
//// contains("PARTITION_TESTER has some inconsistency"));
//// return;
//// assertTrue(false);
// public void testRepartition()
// throws IOException, InterruptedException, ProcCallException
// int num_replicated_items_per_chunk = 100;
// int num_replicated_chunks = 10;
// int num_partitioned_items_per_chunk = 120; // divisible by 3 and 4
// int num_partitioned_chunks = 10;
// Client client = getClient();
// loadLargeReplicatedTable(client, "REPLICATED_TESTER",
// num_replicated_items_per_chunk,
// num_partitioned_chunks);
// loadLargePartitionedTable(client, "PARTITION_TESTER",
// num_partitioned_items_per_chunk,
// num_partitioned_chunks);
// VoltTable[] results = null;
// results = saveTables(client);
// // Kill and restart all the execution sites.
// m_config.shutDown();
// CatalogChangeSingleProcessServer config =
// (CatalogChangeSingleProcessServer) m_config;
// config.recompile(4);
// m_config.startUp();
// client = getClient();
// try
// results = client.callProcedure("@SnapshotRestore", TMPDIR,
// TESTNONCE, ALLOWELT);
// // XXX Should check previous results for success but meh for now
// catch (Exception ex)
// ex.printStackTrace();
// fail("SnapshotRestore exception: " + ex.getMessage());
// checkTable(client, "PARTITION_TESTER", "PT_ID",
// num_partitioned_items_per_chunk * num_partitioned_chunks);
// checkTable(client, "REPLICATED_TESTER", "RT_ID",
// num_replicated_items_per_chunk * num_replicated_chunks);
// results = client.callProcedure("@Statistics", "table");
// int foundItem = 0;
// while (results[0].advanceRow())
// if (results[0].getString(2).equals("PARTITION_TESTER"))
// ++foundItem;
// assertEquals((num_partitioned_items_per_chunk * num_partitioned_chunks) / 4,
// results[0].getLong(4));
// // make sure all sites were loaded
// assertEquals(4, foundItem);
// config.revertCompile();
// public void testChangeDDL()
// throws IOException, InterruptedException, ProcCallException
// int num_partitioned_items_per_chunk = 120;
// int num_partitioned_chunks = 10;
// Client client = getClient();
// loadLargePartitionedTable(client, "PARTITION_TESTER",
// num_partitioned_items_per_chunk,
// num_partitioned_chunks);
// // Store something in the table which will change columns
// VoltTable change_table =
// new VoltTable(new ColumnInfo("ID", VoltType.INTEGER),
// new ColumnInfo("BYEBYE", VoltType.INTEGER));
// for (int i = 0; i < 10; i++)
// Object[] row = new Object[] {i, i};
// change_table.addRow(row);
// loadTable(client, "CHANGE_COLUMNS", change_table);
// VoltTable[] results = null;
// results = saveTables(client);
// // Kill and restart all the execution sites.
// m_config.shutDown();
// CatalogChangeSingleProcessServer config =
// (CatalogChangeSingleProcessServer) m_config;
// SaveRestoreTestProjectBuilder project =
// new SaveRestoreTestProjectBuilder();
// project.addDefaultProcedures();
// project.addDefaultPartitioning();
// project.addSchema(SaveRestoreTestProjectBuilder.class.
// getResource("saverestore-altered-ddl.sql"));
// config.recompile(project);
// m_config.startUp();
// client = getClient();
// try
// results = client.callProcedure("@SnapshotRestore", TMPDIR,
// TESTNONCE, ALLOWELT);
// catch (Exception ex)
// ex.printStackTrace();
// fail("SnapshotRestore exception: " + ex.getMessage());
// // XXX consider adding a check that the newly materialized table is
// // not loaded
// results = client.callProcedure("@Statistics", "table");
// boolean found_gets_created = false;
// while (results[0].advanceRow())
// if (results[0].getString(2).equals("GETS_REMOVED"))
// fail("Table GETS_REMOVED got reloaded");
// if (results[0].getString(2).equals("GETS_CREATED"))
// found_gets_created = true;
// // Check the table which changed columns
// VoltTable[] change_results =
// client.callProcedure("SaveRestoreSelect", "CHANGE_COLUMNS");
// assertEquals(3, change_results[0].getColumnCount());
// for (int i = 0; i < 10; i++)
// VoltTableRow row = change_results[0].fetchRow(i);
// assertEquals(i, row.getLong("ID"));
// assertEquals(1234, row.getLong("HASDEFAULT"));
// row.getLong("HASNULL");
// assertTrue(row.wasNull());
// assertTrue(found_gets_created);
// config.revertCompile();
// public void testGoodChangeAttributeTypes()
// throws IOException, InterruptedException, ProcCallException
// Client client = getClient();
// // Store something in the table which will change columns
// VoltTable change_types =
// new VoltTable(new ColumnInfo("ID", VoltType.INTEGER),
// new ColumnInfo("BECOMES_INT", VoltType.TINYINT),
// new ColumnInfo("BECOMES_FLOAT", VoltType.INTEGER),
// new ColumnInfo("BECOMES_TINY", VoltType.INTEGER));
// change_types.addRow(0, 100, 100, 100);
// change_types.addRow(1, VoltType.NULL_TINYINT, VoltType.NULL_INTEGER,
// VoltType.NULL_INTEGER);
// loadTable(client, "CHANGE_TYPES", change_types);
// saveTables(client);
// // Kill and restart all the execution sites.
// m_config.shutDown();
// CatalogChangeSingleProcessServer config =
// (CatalogChangeSingleProcessServer) m_config;
// SaveRestoreTestProjectBuilder project =
// new SaveRestoreTestProjectBuilder();
// project.addDefaultProcedures();
// project.addDefaultPartitioning();
// project.addSchema(SaveRestoreTestProjectBuilder.class.
// getResource("saverestore-altered-ddl.sql"));
// config.recompile(project);
// m_config.startUp();
// client = getClient();
// try
// client.callProcedure("@SnapshotRestore", TMPDIR,
// TESTNONCE, ALLOWELT);
// catch (Exception ex)
// ex.printStackTrace();
// fail("SnapshotRestore exception: " + ex.getMessage());
// client.callProcedure("@Statistics", "table");
// VoltTable[] change_results =
// client.callProcedure("SaveRestoreSelect", "CHANGE_TYPES");
// VoltTableRow row = change_results[0].fetchRow(0);
// assertEquals(100, row.getLong(1));
// assertEquals(100.0, row.getDouble(2));
// assertEquals(100, row.getLong(3));
// row = change_results[0].fetchRow(1);
// row.getLong(1);
// assertTrue(row.wasNull());
// row.getDouble(2);
// assertTrue(row.wasNull());
// row.getLong(3);
// assertTrue(row.wasNull());
// config.revertCompile();
// public void testBadChangeAttributeTypes()
// throws IOException, InterruptedException, ProcCallException
// Client client = getClient();
// // Store something in the table which will change columns
// VoltTable change_types =
// new VoltTable(new ColumnInfo("ID", VoltType.INTEGER),
// new ColumnInfo("BECOMES_INT", VoltType.TINYINT),
// new ColumnInfo("BECOMES_FLOAT", VoltType.INTEGER),
// new ColumnInfo("BECOMES_TINY", VoltType.INTEGER));
// change_types.addRow(0, 100, 100, 100000);
// loadTable(client, "CHANGE_TYPES", change_types);
// VoltTable[] results = null;
// results = saveTables(client);
// // Kill and restart all the execution sites.
// m_config.shutDown();
// CatalogChangeSingleProcessServer config =
// (CatalogChangeSingleProcessServer) m_config;
// SaveRestoreTestProjectBuilder project =
// new SaveRestoreTestProjectBuilder();
// project.addDefaultProcedures();
// project.addDefaultPartitioning();
// project.addSchema(SaveRestoreTestProjectBuilder.class.
// getResource("saverestore-altered-ddl.sql"));
// config.recompile(project);
// m_config.startUp();
// client = getClient();
// try
// results = client.callProcedure("@SnapshotRestore", TMPDIR,
// TESTNONCE, ALLOWELT);
// catch (Exception ex)
// ex.printStackTrace();
// fail("Unexpected exception from SnapshotRestore");
// boolean type_failure = false;
// while (results[0].advanceRow())
// if (results[0].getString("RESULT").equals("FAILURE"))
// if (results[0].getString("ERR_MSG").contains("would overflow"))
// type_failure = true;
// assertTrue(type_failure);
// config.revertCompile();
/**
* Build a list of the tests to be run. Use the regression suite
* helpers to allow multiple back ends.
* JUnit magic that uses the regression suite helper classes.
*/
static public Test suite() {
VoltServerConfig config = null;
MultiConfigSuiteBuilder builder =
new MultiConfigSuiteBuilder(TestSaveRestoreSysprocSuite.class);
SaveRestoreTestProjectBuilder project =
new SaveRestoreTestProjectBuilder();
project.addAllDefaults();
config =
new CatalogChangeSingleProcessServer("sysproc-threesites.jar", 3,
BackendTarget.NATIVE_EE_JNI);
boolean success = config.compile(project);
assert(success);
builder.addServerConfig(config);
return builder;
}
}
|
package org.voltdb.regressionsuites;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.CountDownLatch;
import java.util.zip.GZIPInputStream;
import org.json_voltpatches.JSONException;
import org.json_voltpatches.JSONObject;
import org.voltcore.logging.VoltLogger;
import org.voltdb.BackendTarget;
import org.voltdb.DefaultSnapshotDataTarget;
import org.voltdb.VoltDB;
import org.voltdb.VoltTable;
import org.voltdb.VoltTable.ColumnInfo;
import org.voltdb.VoltTableRow;
import org.voltdb.VoltType;
import org.voltdb.catalog.CatalogMap;
import org.voltdb.catalog.Cluster;
import org.voltdb.catalog.Database;
import org.voltdb.catalog.Table;
import org.voltdb.client.Client;
import org.voltdb.client.ClientFactory;
import org.voltdb.client.ClientResponse;
import org.voltdb.client.NoConnectionsException;
import org.voltdb.client.ProcCallException;
import org.voltdb.client.SyncCallback;
import org.voltdb.iv2.MpInitiator;
import org.voltdb.iv2.TxnEgo;
import org.voltdb.sysprocs.saverestore.SnapshotUtil;
import org.voltdb.utils.SnapshotConverter;
import org.voltdb.utils.SnapshotVerifier;
import org.voltdb.utils.VoltFile;
import org.voltdb_testprocs.regressionsuites.SaveRestoreBase;
import org.voltdb_testprocs.regressionsuites.saverestore.CatalogChangeSingleProcessServer;
import org.voltdb_testprocs.regressionsuites.saverestore.SaveRestoreTestProjectBuilder;
/**
* Test the SnapshotSave and SnapshotRestore system procedures
*/
public class TestSaveRestoreSysprocSuite extends SaveRestoreBase {
private final static VoltLogger LOG = new VoltLogger("CONSOLE");
public TestSaveRestoreSysprocSuite(String name) {
super(name);
}
private void corruptTestFiles(java.util.Random r) throws Exception
{
FilenameFilter cleaner = new FilenameFilter()
{
@Override
public boolean accept(File dir, String file)
{
// NOTE: at some point we will be prepared to corrupt
// the catalog. At that point, get rid of the
// .jar exclusion.
return file.startsWith(TESTNONCE) && !file.endsWith(".jar");
}
};
File tmp_dir = new File(TMPDIR);
File[] tmp_files = tmp_dir.listFiles(cleaner);
int tmpIndex = r.nextInt(tmp_files.length);
byte corruptValue[] = new byte[1];
r.nextBytes(corruptValue);
java.io.RandomAccessFile raf = new java.io.RandomAccessFile( tmp_files[tmpIndex], "rw");
int corruptPosition = r.nextInt((int)raf.length());
raf.seek(corruptPosition);
byte currentValue = raf.readByte();
while (currentValue == corruptValue[0]) {
r.nextBytes(corruptValue);
}
System.out.println("Corrupting file " + tmp_files[tmpIndex].getName() +
" at byte " + corruptPosition + " with value " + corruptValue[0]);
raf.seek(corruptPosition);
raf.write(corruptValue);
raf.close();
}
private VoltTable createReplicatedTable(int numberOfItems,
int indexBase,
Set<String> expectedText) {
return createReplicatedTable(numberOfItems, indexBase, expectedText, false);
}
private VoltTable createReplicatedTable(int numberOfItems,
int indexBase,
Set<String> expectedText,
boolean generateCSV)
{
VoltTable repl_table =
new VoltTable(new ColumnInfo("RT_ID", VoltType.INTEGER),
new ColumnInfo("RT_NAME", VoltType.STRING),
new ColumnInfo("RT_INTVAL", VoltType.INTEGER),
new ColumnInfo("RT_FLOATVAL", VoltType.FLOAT));
char delimeter = generateCSV ? ',' : '\t';
for (int i = indexBase; i < numberOfItems + indexBase; i++) {
String stringVal = null;
String escapedVal = null;
if (expectedText != null) {
if (generateCSV) {
int escapable = i % 5;
switch (escapable) {
case 0:
stringVal = "name_" + i;
escapedVal = "\"name_" + i + "\"";
break;
case 1:
stringVal = "na,me_" + i;
escapedVal = "\"na,me_" + i + "\"";
break;
case 2:
stringVal = "na\"me_" + i;
escapedVal = "\"na\"\"me_" + i + "\"";
break;
case 3:
stringVal = "na\rme_" + i;
escapedVal = "\"na\rme_" + i + "\"";
break;
case 4:
stringVal = "na\nme_" + i;
escapedVal = "\"na\nme_" + i + "\"";
break;
}
} else {
stringVal = "name_" + i;
escapedVal = "name_" + i;
}
} else {
stringVal = "name_" + i;
}
Object[] row = new Object[] {i,
stringVal,
i,
new Double(i)};
if (expectedText != null) {
StringBuilder sb = new StringBuilder(64);
if (generateCSV) {
sb.append('"').append(i).append('"').append(delimeter).append(escapedVal).append(delimeter);
sb.append('"').append(i).append('"').append(delimeter);
sb.append('"').append(new Double(i).toString()).append('"');
} else {
sb.append(i).append(delimeter).append(escapedVal).append(delimeter);
sb.append(i).append(delimeter);
sb.append(new Double(i).toString());
}
expectedText.add(sb.toString());
}
repl_table.addRow(row);
}
return repl_table;
}
private VoltTable createPartitionedTable(int numberOfItems,
int indexBase)
{
VoltTable partition_table =
new VoltTable(new ColumnInfo("PT_ID", VoltType.INTEGER),
new ColumnInfo("PT_NAME", VoltType.STRING),
new ColumnInfo("PT_INTVAL", VoltType.INTEGER),
new ColumnInfo("PT_FLOATVAL", VoltType.FLOAT));
for (int i = indexBase; i < numberOfItems + indexBase; i++)
{
Object[] row = new Object[] {i,
"name_" + i,
i,
new Double(i)};
partition_table.addRow(row);
}
return partition_table;
}
private VoltTable[] loadTable(Client client, String tableName, boolean replicated,
VoltTable table)
{
VoltTable[] results = null;
try
{
if (replicated) {
client.callProcedure("@LoadMultipartitionTable", tableName,
table);
} else {
ArrayList<SyncCallback> callbacks = new ArrayList<SyncCallback>();
VoltType columnTypes[] = new VoltType[table.getColumnCount()];
for (int ii = 0; ii < columnTypes.length; ii++) {
columnTypes[ii] = table.getColumnType(ii);
}
while (table.advanceRow()) {
SyncCallback cb = new SyncCallback();
callbacks.add(cb);
Object params[] = new Object[table.getColumnCount()];
for (int ii = 0; ii < columnTypes.length; ii++) {
params[ii] = table.get(ii, columnTypes[ii]);
}
client.callProcedure(cb, tableName + ".insert", params);
}
}
}
catch (Exception ex)
{
ex.printStackTrace();
fail("loadTable exception: " + ex.getMessage());
}
return results;
}
private void loadLargeReplicatedTable(Client client, String tableName,
int itemsPerChunk, int numChunks) {
loadLargeReplicatedTable(client, tableName, itemsPerChunk, numChunks, false, null);
}
private void loadLargeReplicatedTable(Client client, String tableName,
int itemsPerChunk, int numChunks, boolean generateCSV, Set<String> expectedText)
{
for (int i = 0; i < numChunks; i++)
{
VoltTable repl_table =
createReplicatedTable(itemsPerChunk, i * itemsPerChunk, expectedText, generateCSV);
loadTable(client, tableName, true, repl_table);
}
}
private void loadLargePartitionedTable(Client client, String tableName,
int itemsPerChunk, int numChunks)
{
for (int i = 0; i < numChunks; i++)
{
VoltTable part_table =
createPartitionedTable(itemsPerChunk, i * itemsPerChunk);
loadTable(client, tableName, false, part_table);
}
}
private VoltTable[] saveTablesWithDefaultOptions(Client client)
{
return saveTables(client, TMPDIR, TESTNONCE, true, false);
}
private VoltTable[] saveTables(Client client, String dir, String nonce, boolean block, boolean csv)
{
VoltTable[] results = null;
try
{
// For complete coverage test with JSON for CSV saves and legacy args otherwise.
if (csv) {
JSONObject jsObj = new JSONObject();
try {
jsObj.put("uripath", String.format("file://%s", dir));
jsObj.put("nonce", nonce);
jsObj.put("block", block);
jsObj.put("format", "csv");
} catch (JSONException e) {
fail("JSON exception" + e.getMessage());
}
results = client.callProcedure("@SnapshotSave", jsObj.toString()).getResults();
}
else {
results = client.callProcedure("@SnapshotSave", dir, nonce, (byte)(block ? 1 : 0))
.getResults();
}
}
catch (Exception ex)
{
ex.printStackTrace();
fail("SnapshotSave exception: " + ex.getMessage());
}
return results;
}
private void checkTable(Client client, String tableName, String orderByCol,
int expectedRows) throws Exception
{
if (expectedRows > 200000)
{
System.out.println("Table too large to retrieve with select *");
System.out.println("Skipping integrity check");
}
VoltTable result = client.callProcedure("SaveRestoreSelect", tableName).getResults()[0];
final int rowCount = result.getRowCount();
assertEquals(expectedRows, rowCount);
int i = 0;
while (result.advanceRow())
{
assertEquals(i, result.getLong(0));
assertEquals("name_" + i, result.getString(1));
assertEquals(i, result.getLong(2));
assertEquals(new Double(i), result.getDouble(3));
++i;
}
}
private void validateSnapshot(boolean expectSuccess) {
validateSnapshot(expectSuccess, false, TESTNONCE);
}
private boolean validateSnapshot(boolean expectSuccess, boolean onlyReportSuccess, String nonce) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
PrintStream original = System.out;
try {
System.setOut(ps);
String args[] = new String[] {
nonce,
"--dir",
TMPDIR
};
SnapshotVerifier.main(args);
ps.flush();
String reportString = baos.toString("UTF-8");
boolean success = false;
if (expectSuccess) {
success = reportString.startsWith("Snapshot valid\n");
} else {
success = reportString.startsWith("Snapshot corrupted\n");
}
if (!onlyReportSuccess) {
if (!success) {
fail(reportString);
}
}
return success;
} catch (UnsupportedEncodingException e) {}
finally {
System.setOut(original);
}
return false;
}
/*
* Test that IV2 transaction ids for inactive partitions are propagated during snapshot restore. This
* test is sufficient to test restore as well because the transactions ids are published
* to ZK and retrieved by the snapshot daemon for each @SnapshotSave invocation.
*/
@SuppressWarnings("unchecked")
public void testPropagateIV2TransactionIds()
throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
if (!VoltDB.instance().isIV2Enabled()) return;
System.out.println("Starting testPropagateIV2TransactionIds");
int num_replicated_items = 1000;
int num_partitioned_items = 126;
Client client = getClient();
VoltTable repl_table = createReplicatedTable(num_replicated_items, 0, null);
// make a TPCC warehouse table
VoltTable partition_table =
createPartitionedTable(num_partitioned_items, 0);
loadTable(client, "REPLICATED_TESTER", true, repl_table);
loadTable(client, "PARTITION_TESTER", false, partition_table);
saveTablesWithDefaultOptions(client);
JSONObject digest = SnapshotUtil.CRCCheck(new VoltFile(TMPDIR, TESTNONCE + "-host_0.digest"), LOG);
JSONObject transactionIds = digest.getJSONObject("partitionTransactionIds");
System.out.println("TRANSACTION IDS: " + transactionIds.toString());
assertEquals( 4, transactionIds.length());
Set<Integer> partitions = new HashSet<Integer>();
Iterator<String> keys = transactionIds.keys();
while (keys.hasNext()) {
final long foundTxnId = transactionIds.getLong(keys.next());
//The txnids should be non-zero and there should be one for each partition
partitions.add(TxnEgo.getPartitionId(foundTxnId));
assertTrue(foundTxnId > TxnEgo.makeZero(TxnEgo.getPartitionId(foundTxnId)).getTxnId());
}
assertTrue(partitions.contains(0));
assertTrue(partitions.contains(1));
assertTrue(partitions.contains(2));
assertTrue(partitions.contains(MpInitiator.MP_INIT_PID));
m_config.shutDown();
CatalogChangeSingleProcessServer config =
(CatalogChangeSingleProcessServer) m_config;
config.recompile(1);
try {
m_config.startUp(false);
client = getClient();
client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE).getResults();
saveTables(client, TMPDIR, TESTNONCE + 2, true, false);
digest = SnapshotUtil.CRCCheck(new VoltFile(TMPDIR, TESTNONCE + "2-host_0.digest"), LOG);
JSONObject newTransactionIds = digest.getJSONObject("partitionTransactionIds");
assertEquals(transactionIds.length(), newTransactionIds.length());
keys = transactionIds.keys();
while (keys.hasNext()) {
String partitionId = keys.next();
final long txnid = newTransactionIds.getLong(partitionId);
//Because these are no longer part of the cluster they should be unchanged
if (partitionId.equals("2") || partitionId.equals("1")) {
assertEquals(txnid, transactionIds.getLong(partitionId));
} else if (partitionId.equals(Integer.toString(MpInitiator.MP_INIT_PID)) || partitionId.equals(1)) {
//These should be > than the one from the other snapshot
//because it picked up where it left off on restore and did more work
assertTrue(txnid > transactionIds.getLong(partitionId));
}
}
} finally {
config.revertCompile();
}
}
// Test that a replicated table can be distributed correctly
public void testDistributeReplicatedTable()
throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Starting testDistributeReplicatedTable");
m_config.shutDown();
int num_replicated_items = 1000;
int num_partitioned_items = 126;
LocalCluster lc = new LocalCluster( JAR_NAME, 2, 3, 0, BackendTarget.NATIVE_EE_JNI);
lc.setHasLocalServer(false);
SaveRestoreTestProjectBuilder project =
new SaveRestoreTestProjectBuilder();
project.addAllDefaults();
lc.compile(project);
lc.startUp();
try {
Client client = ClientFactory.createClient();
client.createConnection(lc.getListenerAddresses().get(0));
try {
VoltTable repl_table = createReplicatedTable(num_replicated_items, 0, null);
// make a TPCC warehouse table
VoltTable partition_table =
createPartitionedTable(num_partitioned_items, 0);
loadTable(client, "REPLICATED_TESTER", true, repl_table);
loadTable(client, "PARTITION_TESTER", false, partition_table);
saveTablesWithDefaultOptions(client);
boolean skipFirst = true;
int deletedFiles = 0;
for (File f : lc.listFiles(new File(TMPDIR))) {
if (f.getName().startsWith(TESTNONCE + "-REPLICATED")) {
if (skipFirst) {
skipFirst = false;
continue;
}
assertTrue(f.delete());
deletedFiles++;
}
}
assertEquals(deletedFiles, 2);
} finally {
client.close();
}
lc.shutDown();
lc.startUp(false);
client = ClientFactory.createClient();
client.createConnection(lc.getListenerAddresses().get(0));
try {
ClientResponse cr = client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE);
assertTrue(cr.getStatus() == ClientResponse.SUCCESS);
checkTable(client, "REPLICATED_TESTER", "RT_ID", num_replicated_items);
checkTable(client, "PARTITION_TESTER", "PT_ID", num_partitioned_items);
} finally {
client.close();
}
} finally {
lc.shutDown();
}
}
public void testQueueUserSnapshot() throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Staring testQueueUserSnapshot.");
Client client = getClient();
int num_replicated_items_per_chunk = 100;
int num_replicated_chunks = 10;
int num_partitioned_items_per_chunk = 120;
int num_partitioned_chunks = 10;
Set<String> expectedText = new HashSet<String>();
loadLargeReplicatedTable(client, "REPLICATED_TESTER",
num_replicated_items_per_chunk,
num_replicated_chunks,
false,
expectedText);
loadLargePartitionedTable(client, "PARTITION_TESTER",
num_partitioned_items_per_chunk,
num_partitioned_chunks);
// Take a snapshot that will block snapshots in the system
DefaultSnapshotDataTarget.m_simulateBlockedWrite = new CountDownLatch(1);
client.callProcedure("@SnapshotSave", TMPDIR,
TESTNONCE, (byte)0);
org.voltdb.SnapshotDaemon.m_userSnapshotRetryInterval = 1;
// Make sure we can queue a snapshot
ClientResponse r =
client.callProcedure("@SnapshotSave", TMPDIR, TESTNONCE + "2", (byte)0);
VoltTable result = r.getResults()[0];
assertTrue(result.advanceRow());
assertTrue(
result.getString("ERR_MSG").startsWith("SNAPSHOT REQUEST QUEUED"));
//Let it reattempt and fail a few times
Thread.sleep(2000);
// Make sure that attempting to queue a second snapshot save request results
// in a snapshot in progress message
r =
client.callProcedure("@SnapshotSave", TMPDIR, TESTNONCE + "2", (byte)0);
result = r.getResults()[0];
assertTrue(result.advanceRow());
assertTrue(
result.getString("ERR_MSG").startsWith("SNAPSHOT IN PROGRESS"));
// Now make sure it is reattempted and works
DefaultSnapshotDataTarget.m_simulateBlockedWrite.countDown();
DefaultSnapshotDataTarget.m_simulateBlockedWrite = null;
boolean hadSuccess = false;
for (int ii = 0; ii < 5; ii++) {
Thread.sleep(2000);
hadSuccess = validateSnapshot(true, true, TESTNONCE + "2");
if (hadSuccess) break;
}
assertTrue(hadSuccess);
// Make sure errors are properly forwarded, this is one code path to handle errors,
// there is another for errors that don't occur right off the bat
r =
client.callProcedure("@SnapshotSave", TMPDIR, TESTNONCE + "2", (byte)1);
result = r.getResults()[0];
assertTrue(result.advanceRow());
assertTrue(result.getString("ERR_MSG").startsWith("SAVE FILE ALREADY EXISTS"));
}
// Test specific case where a user snapshot is queued
// and then fails while queued. It shouldn't block future snapshots
public void testQueueFailedUserSnapshot() throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Staring testQueueFailedUserSnapshot.");
Client client = getClient();
int num_replicated_items_per_chunk = 100;
int num_replicated_chunks = 10;
int num_partitioned_items_per_chunk = 120;
int num_partitioned_chunks = 10;
Set<String> expectedText = new HashSet<String>();
loadLargeReplicatedTable(client, "REPLICATED_TESTER",
num_replicated_items_per_chunk,
num_replicated_chunks,
false,
expectedText);
loadLargePartitionedTable(client, "PARTITION_TESTER",
num_partitioned_items_per_chunk,
num_partitioned_chunks);
// Take a snapshot that will block snapshots in the system
DefaultSnapshotDataTarget.m_simulateBlockedWrite = new CountDownLatch(1);
client.callProcedure("@SnapshotSave", TMPDIR,
TESTNONCE, (byte)0);
org.voltdb.SnapshotDaemon.m_userSnapshotRetryInterval = 1;
// Make sure we can queue a snapshot
ClientResponse r =
client.callProcedure("@SnapshotSave", TMPDIR, TESTNONCE, (byte)0);
VoltTable result = r.getResults()[0];
assertTrue(result.advanceRow());
assertTrue(
result.getString("ERR_MSG").startsWith("SNAPSHOT REQUEST QUEUED"));
//Let it reattempt a few times
Thread.sleep(2000);
// Now make sure it is reattempted, it will fail,
// because it has the name of an existing snapshot.
// No way to tell other then that new snapshots continue to work
DefaultSnapshotDataTarget.m_simulateBlockedWrite.countDown();
DefaultSnapshotDataTarget.m_simulateBlockedWrite = null;
Thread.sleep(2000);
// Make sure errors are properly forwarded, this is one code path to handle errors,
// there is another for errors that don't occur right off the bat
r =
client.callProcedure("@SnapshotSave", TMPDIR, TESTNONCE + "2", (byte)1);
result = r.getResults()[0];
while (result.advanceRow()) {
assertTrue(result.getString("RESULT").equals("SUCCESS"));
}
validateSnapshot(true, false, TESTNONCE + "2");
}
public void testRestore12Snapshot()
throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
Client client = getClient();
byte snapshotTarBytes[] = new byte[1024 * 1024 * 3];
InputStream is =
org.voltdb_testprocs.regressionsuites.saverestore.MatView.class.
getResource("voltdb_1.2_snapshot.tar.gz").openConnection().getInputStream();
GZIPInputStream gis = new GZIPInputStream(is);
int totalRead = 0;
int readLastTime = 0;
while (readLastTime != -1 && totalRead != snapshotTarBytes.length) {
readLastTime = gis.read(snapshotTarBytes, totalRead, snapshotTarBytes.length - totalRead);
if (readLastTime == -1) {
break;
}
totalRead += readLastTime;
}
assertTrue(totalRead > 0);
assertFalse(totalRead == snapshotTarBytes.length);
ProcessBuilder pb = new ProcessBuilder(new String[]{ "tar", "--directory", TMPDIR, "-x"});
Process proc = pb.start();
OutputStream os = proc.getOutputStream();
os.write(snapshotTarBytes, 0, totalRead);
os.close();
assertEquals(0, proc.waitFor());
validateSnapshot(true);
byte firstStringBytes[] = new byte[1048576];
java.util.Arrays.fill(firstStringBytes, (byte)'c');
byte secondStringBytes[] = new byte[1048564];
java.util.Arrays.fill(secondStringBytes, (byte)'a');
client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE);
VoltTable results[] = client.callProcedure("JumboSelect", 0).getResults();
assertEquals(results.length, 1);
assertTrue(results[0].advanceRow());
assertTrue(java.util.Arrays.equals( results[0].getStringAsBytes(1), firstStringBytes));
assertTrue(java.util.Arrays.equals( results[0].getStringAsBytes(2), secondStringBytes));
}
public void testRestore2dot8dot4dot1Snapshot()
throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
Client client = getClient();
byte snapshotTarBytes[] = new byte[1024 * 1024 * 3];
InputStream is =
org.voltdb_testprocs.regressionsuites.saverestore.MatView.class.
getResource("voltdb_2.8.4.1_snapshot.tar.gz").openConnection().getInputStream();
GZIPInputStream gis = new GZIPInputStream(is);
int totalRead = 0;
int readLastTime = 0;
while (readLastTime != -1 && totalRead != snapshotTarBytes.length) {
readLastTime = gis.read(snapshotTarBytes, totalRead, snapshotTarBytes.length - totalRead);
if (readLastTime == -1) {
break;
}
totalRead += readLastTime;
}
assertTrue(totalRead > 0);
assertFalse(totalRead == snapshotTarBytes.length);
ProcessBuilder pb = new ProcessBuilder(new String[]{ "tar", "--directory", TMPDIR, "-x"});
Process proc = pb.start();
OutputStream os = proc.getOutputStream();
os.write(snapshotTarBytes, 0, totalRead);
os.close();
assertEquals(0, proc.waitFor());
validateSnapshot(true);
byte firstStringBytes[] = new byte[1048576];
java.util.Arrays.fill(firstStringBytes, (byte)'c');
byte secondStringBytes[] = new byte[1048564];
java.util.Arrays.fill(secondStringBytes, (byte)'a');
client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE);
VoltTable results[] = client.callProcedure("JumboSelect", 0).getResults();
assertEquals(results.length, 1);
assertTrue(results[0].advanceRow());
assertTrue(java.util.Arrays.equals( results[0].getStringAsBytes(1), firstStringBytes));
assertTrue(java.util.Arrays.equals( results[0].getStringAsBytes(2), secondStringBytes));
}
public void testSaveRestoreJumboRows()
throws IOException, InterruptedException, ProcCallException
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Starting testSaveRestoreJumboRows.");
Client client = getClient();
byte firstStringBytes[] = new byte[1048576];
java.util.Arrays.fill(firstStringBytes, (byte)'c');
String firstString = new String(firstStringBytes, "UTF-8");
byte secondStringBytes[] = new byte[1048564];
java.util.Arrays.fill(secondStringBytes, (byte)'a');
String secondString = new String(secondStringBytes, "UTF-8");
VoltTable results[] = client.callProcedure("JumboInsert", 0, firstString, secondString).getResults();
firstString = null;
secondString = null;
assertEquals(results.length, 1);
assertEquals( 1, results[0].asScalarLong());
results = client.callProcedure("JumboSelect", 0).getResults();
assertEquals(results.length, 1);
assertTrue(results[0].advanceRow());
assertTrue(java.util.Arrays.equals( results[0].getStringAsBytes(1), firstStringBytes));
assertTrue(java.util.Arrays.equals( results[0].getStringAsBytes(2), secondStringBytes));
saveTablesWithDefaultOptions(client);
validateSnapshot(true);
releaseClient(client);
// Kill and restart all the execution sites.
m_config.shutDown();
m_config.startUp();
client = getClient();
client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE);
results = client.callProcedure("JumboSelect", 0).getResults();
assertEquals(results.length, 1);
assertTrue(results[0].advanceRow());
assertTrue(java.util.Arrays.equals( results[0].getStringAsBytes(1), firstStringBytes));
assertTrue(java.util.Arrays.equals( results[0].getStringAsBytes(2), secondStringBytes));
}
public void testTSVConversion() throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Staring testTSVConversion.");
Client client = getClient();
int num_replicated_items_per_chunk = 100;
int num_replicated_chunks = 10;
int num_partitioned_items_per_chunk = 120;
int num_partitioned_chunks = 10;
Set<String> expectedText = new TreeSet<String>();
loadLargeReplicatedTable(client, "REPLICATED_TESTER",
num_replicated_items_per_chunk,
num_replicated_chunks,
false,
expectedText);
loadLargePartitionedTable(client, "PARTITION_TESTER",
num_partitioned_items_per_chunk,
num_partitioned_chunks);
client.callProcedure("@SnapshotSave", TMPDIR,
TESTNONCE, (byte)1);
validateSnapshot(true);
generateAndValidateTextFile( expectedText, false);
}
public void testCSVConversion() throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Starting testCSVConversion");
Client client = getClient();
int num_replicated_items_per_chunk = 100;
int num_replicated_chunks = 10;
int num_partitioned_items_per_chunk = 120;
int num_partitioned_chunks = 10;
Set<String> expectedText = new TreeSet<String>();
loadLargeReplicatedTable(client, "REPLICATED_TESTER",
num_replicated_items_per_chunk,
num_replicated_chunks,
true,
expectedText);
loadLargePartitionedTable(client, "PARTITION_TESTER",
num_partitioned_items_per_chunk,
num_partitioned_chunks);
client.callProcedure("@SnapshotSave", TMPDIR,
TESTNONCE, (byte)1);
validateSnapshot(true);
generateAndValidateTextFile( new TreeSet<String>(expectedText), true);
deleteTestFiles();
client.callProcedure("@SnapshotSave",
"{ uripath:\"file://" + TMPDIR +
"\", nonce:\"" + TESTNONCE + "\", block:true, format:\"csv\" }");
FileInputStream fis = new FileInputStream(
TMPDIR + File.separator + TESTNONCE + "-REPLICATED_TESTER" + ".csv");
validateTextFile(expectedText, true, fis);
}
public void testBadSnapshotParams() throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Starting testBadSnapshotParams");
Client client = getClient();
int num_replicated_items_per_chunk = 100;
int num_replicated_chunks = 10;
int num_partitioned_items_per_chunk = 120;
int num_partitioned_chunks = 10;
Set<String> expectedText = new TreeSet<String>();
loadLargeReplicatedTable(client, "REPLICATED_TESTER",
num_replicated_items_per_chunk,
num_replicated_chunks,
true,
expectedText);
loadLargePartitionedTable(client, "PARTITION_TESTER",
num_partitioned_items_per_chunk,
num_partitioned_chunks);
boolean threwexception = false;
try {
client.callProcedure("@SnapshotSave",
"{ }");
} catch (Exception e) {
threwexception = true;
}
assertTrue(threwexception);
threwexception = false;
try {
client.callProcedure("@SnapshotSave",
new Object[] {null});
} catch (Exception e) {
threwexception = true;
}
assertTrue(threwexception);
threwexception = false;
try {
client.callProcedure("@SnapshotSave",
"{ uripath:\"file:///tmp\", nonce:\"\", block:true }");
} catch (Exception e) {
threwexception = true;
}
assertTrue(threwexception);
threwexception = false;
try {
client.callProcedure("@SnapshotSave",
"{ uripath:\"file:///tmp\", nonce:\"-\", block:true }");
} catch (Exception e) {
threwexception = true;
}
assertTrue(threwexception);
threwexception = false;
try {
client.callProcedure("@SnapshotSave",
"{ uripath:\"file:///tmp\", nonce:\",\", block:true }");
} catch (Exception e) {
threwexception = true;
}
assertTrue(threwexception);
threwexception = false;
try {
client.callProcedure("@SnapshotSave",
"{ uripath:\"hdfs:///tmp\", nonce:\"foo\", block:true }");
} catch (Exception e) {
threwexception = true;
}
assertTrue(threwexception);
threwexception = false;
try {
client.callProcedure("@SnapshotSave",
"{ uripath:\"/tmp\", nonce:\"foo\", block:true }");
} catch (Exception e) {
threwexception = true;
}
assertTrue(threwexception);
threwexception = false;
try {
client.callProcedure("@SnapshotSave",
"{ uripath:true, nonce:\"foo\", block:true }");
} catch (Exception e) {
threwexception = true;
}
assertTrue(threwexception);
client.callProcedure("@SnapshotSave",
"{ uripath:\"file://" + TMPDIR +
"\", nonce:\"" + TESTNONCE + "\", block:true, format:\"csv\" }");
FileInputStream fis = new FileInputStream(
TMPDIR + File.separator + TESTNONCE + "-REPLICATED_TESTER" + ".csv");
validateTextFile(expectedText, true, fis);
}
// Also does some basic smoke tests
// of @SnapshotStatus, @SnapshotScan and @SnapshotDelete
public void testSnapshotSave() throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Starting testSnapshotSave");
Client client = getClient();
int num_replicated_items_per_chunk = 100;
int num_replicated_chunks = 10;
int num_partitioned_items_per_chunk = 120;
int num_partitioned_chunks = 10;
loadLargeReplicatedTable(client, "REPLICATED_TESTER",
num_replicated_items_per_chunk,
num_replicated_chunks);
loadLargePartitionedTable(client, "PARTITION_TESTER",
num_partitioned_items_per_chunk,
num_partitioned_chunks);
VoltTable[] results = null;
results = client.callProcedure("@SnapshotSave", TMPDIR,
TESTNONCE, (byte)1).getResults();
validateSnapshot(true);
// Check that snapshot status returns a reasonable result
checkSnapshotStatus(client, TMPDIR, TESTNONCE, null, "SUCCESS", 8);
VoltTable scanResults[] = client.callProcedure("@SnapshotScan", new Object[] { null }).getResults();
assertNotNull(scanResults);
assertEquals( 1, scanResults.length);
assertEquals( 1, scanResults[0].getColumnCount());
assertEquals( 1, scanResults[0].getRowCount());
assertTrue( scanResults[0].advanceRow());
assertTrue( "ERR_MSG".equals(scanResults[0].getColumnName(0)));
scanResults = client.callProcedure("@SnapshotScan", "/doesntexist").getResults();
assertNotNull(scanResults);
assertEquals( 1, scanResults[1].getRowCount());
assertTrue( scanResults[1].advanceRow());
assertTrue( "FAILURE".equals(scanResults[1].getString("RESULT")));
scanResults = client.callProcedure("@SnapshotScan", TMPDIR).getResults();
assertNotNull(scanResults);
assertEquals( 3, scanResults.length);
assertEquals( 9, scanResults[0].getColumnCount());
assertTrue(scanResults[0].getRowCount() >= 1);
assertTrue(scanResults[0].advanceRow());
// We can't assert that all snapshot files are generated by this test.
// There might be leftover snapshot files from other runs.
int count = 0;
String completeStatus = null;
do {
if (TESTNONCE.equals(scanResults[0].getString("NONCE"))) {
assertTrue(TMPDIR.equals(scanResults[0].getString("PATH")));
count++;
completeStatus = scanResults[0].getString("COMPLETE");
}
} while (scanResults[0].advanceRow());
assertEquals(1, count);
assertNotNull(completeStatus);
assertTrue("TRUE".equals(completeStatus));
FilenameFilter cleaner = new FilenameFilter()
{
@Override
public boolean accept(File dir, String file)
{
return file.startsWith(TESTNONCE) && file.endsWith("vpt");
}
};
File tmp_dir = new File(TMPDIR);
File[] tmp_files = tmp_dir.listFiles(cleaner);
tmp_files[0].delete();
scanResults = client.callProcedure("@SnapshotScan", TMPDIR).getResults();
assertNotNull(scanResults);
assertEquals( 3, scanResults.length);
assertEquals( 9, scanResults[0].getColumnCount());
assertTrue(scanResults[0].getRowCount() >= 1);
assertTrue(scanResults[0].advanceRow());
count = 0;
String missingTableName = null;
do {
if (TESTNONCE.equals(scanResults[0].getString("NONCE"))
&& "FALSE".equals(scanResults[0].getString("COMPLETE"))) {
assertTrue(TMPDIR.equals(scanResults[0].getString("PATH")));
count++;
missingTableName = scanResults[0].getString("TABLES_MISSING");
}
} while (scanResults[0].advanceRow());
assertEquals(1, count);
assertNotNull(missingTableName);
assertTrue(tmp_files[0].getName().contains(missingTableName));
// Instead of something exhaustive, let's just make sure that we get
// the number of result rows corresponding to the number of ExecutionSites
// that did save work
Cluster cluster = VoltDB.instance().getCatalogContext().cluster;
Database database = cluster.getDatabases().get("database");
CatalogMap<Table> tables = database.getTables();
int num_hosts = 1;
int replicated = 0;
int total_tables = 0;
int expected_entries = 3;
for (Table table : tables)
{
// Ignore materialized tables
if (table.getMaterializer() == null)
{
total_tables++;
if (table.getIsreplicated())
{
replicated++;
}
}
}
assertEquals(expected_entries, results[0].getRowCount());
while (results[0].advanceRow())
{
assertEquals(results[0].getString("RESULT"), "SUCCESS");
}
// Now, try the save again and verify that we fail (since all the save
// files will still exist. This will return one entry per table
// per host
expected_entries =
((total_tables - replicated) * num_hosts) + replicated;
try
{
results = client.callProcedure("@SnapshotSave", TMPDIR,
TESTNONCE, (byte)1).getResults();
}
catch (Exception ex)
{
ex.printStackTrace();
fail("SnapshotSave exception: " + ex.getMessage());
}
assertEquals(expected_entries, results[0].getRowCount());
while (results[0].advanceRow())
{
if (!tmp_files[0].getName().contains(results[0].getString("TABLE"))) {
assertEquals(results[0].getString("RESULT"), "FAILURE");
assertTrue(results[0].getString("ERR_MSG").contains("SAVE FILE ALREADY EXISTS"));
}
}
VoltTable deleteResults[] =
client.callProcedure(
"@SnapshotDelete",
new String[] {TMPDIR},
new String[]{TESTNONCE}).getResults();
assertNotNull(deleteResults);
assertEquals( 1, deleteResults.length);
assertEquals( 9, deleteResults[0].getColumnCount());
//No rows returned right now, because the delete is done in a separate thread
assertEquals( 0, deleteResults[0].getRowCount());
//Give the async thread time to delete the files
boolean hadZeroFiles = false;
for (int ii = 0; ii < 20; ii++) {
Thread.sleep(100);
tmp_files = tmp_dir.listFiles(cleaner);
if (tmp_files.length == 0) {
hadZeroFiles = true;
break;
}
}
assertTrue( hadZeroFiles);
validateSnapshot(false);
try
{
results = client.callProcedure(
"@SnapshotSave",
"{ uripath:\"file://" + TMPDIR +
"\", nonce:\"" + TESTNONCE + "\", block:true, format:\"csv\" }").getResults();
}
catch (Exception ex)
{
ex.printStackTrace();
fail("SnapshotSave exception: " + ex.getMessage());
}
System.out.println("Created CSV snapshot");
}
private void generateAndValidateTextFile(Set<String> expectedText, boolean csv) throws Exception {
String args[] = new String[] {
TESTNONCE,
"--dir",
TMPDIR,
"--table",
"REPLICATED_TESTER",
"--type",
csv ? "CSV" : "TSV",
"--outdir",
TMPDIR
};
SnapshotConverter.main(args);
FileInputStream fis = new FileInputStream(
TMPDIR + File.separator + "REPLICATED_TESTER" + (csv ? ".csv" : ".tsv"));
validateTextFile( expectedText, csv, fis);
}
private void validateTextFile(Set<String> expectedText, boolean csv, FileInputStream fis) throws Exception {
try {
InputStreamReader isr = new InputStreamReader(fis, "UTF-8");
BufferedReader br = new BufferedReader(isr);
StringBuffer sb = new StringBuffer();
int nextCharInt;
while ((nextCharInt = br.read()) != -1) {
char nextChar = (char)nextCharInt;
if (csv) {
if (nextChar == '"') {
sb.append(nextChar);
int nextNextCharInt = -1;
char prevChar = nextChar;
while ((nextNextCharInt = br.read()) != -1) {
char nextNextChar = (char)nextNextCharInt;
if (nextNextChar == '"') {
if (prevChar == '"') {
sb.append(nextNextChar);
} else {
sb.append(nextNextChar);
break;
}
} else {
sb.append(nextNextChar);
}
prevChar = nextNextChar;
}
} else if (nextChar == '\n' || nextChar == '\r') {
if (!expectedText.contains(sb.toString())) {
System.out.println("Missing string is " + sb);
}
assertTrue(expectedText.remove(sb.toString()));
sb = new StringBuffer();
} else {
sb.append(nextChar);
}
} else {
if (nextChar == '\\') {
sb.append(nextChar);
int nextNextCharInt = br.read();
char nextNextChar = (char)nextNextCharInt;
sb.append(nextNextChar);
} else if (nextChar == '\n' || nextChar == '\r') {
if (!expectedText.contains(sb.toString())) {
System.out.println("Missing string is " + sb);
}
assertTrue(expectedText.remove(sb.toString()));
sb = new StringBuffer();
} else {
sb.append(nextChar);
}
}
}
assertTrue(expectedText.isEmpty());
} finally {
fis.close();
}
}
public void testIdleOnlineSnapshot() throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Starting testIdleOnlineSnapshot");
Client client = getClient();
int num_replicated_items_per_chunk = 100;
int num_replicated_chunks = 10;
int num_partitioned_items_per_chunk = 120;
int num_partitioned_chunks = 10;
loadLargeReplicatedTable(client, "REPLICATED_TESTER",
num_replicated_items_per_chunk,
num_replicated_chunks);
loadLargePartitionedTable(client, "PARTITION_TESTER",
num_partitioned_items_per_chunk,
num_partitioned_chunks);
client.callProcedure("@SnapshotSave", TMPDIR,
TESTNONCE, (byte)0);
// Increased timeout from .7 to 1.2 seconds for the mini. It might not
// finished the non-blocking snapshot in time. Later increased to 2.0
// to get memcheck to stop timing out ENG-1800
Thread.sleep(2000);
// Check that snapshot status returns a reasonable result
checkSnapshotStatus(client, TMPDIR, TESTNONCE, null, "SUCCESS", 8);
validateSnapshot(true);
}
public void testSaveAndRestoreReplicatedTable()
throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Starting testSaveAndRestoreReplicatedTable");
int num_replicated_items_per_chunk = 200;
int num_replicated_chunks = 10;
Client client = getClient();
loadLargeReplicatedTable(client, "REPLICATED_TESTER",
num_replicated_items_per_chunk,
num_replicated_chunks);
// hacky, need to sleep long enough so the internal server tick
// updates the memory stats
Thread.sleep(1000);
VoltTable orig_mem = null;
try
{
orig_mem = client.callProcedure("@Statistics", "memory", 0).getResults()[0];
System.out.println("STATS: " + orig_mem.toString());
}
catch (Exception ex)
{
ex.printStackTrace();
fail("Statistics exception: " + ex.getMessage());
}
VoltTable[] results = null;
results = saveTablesWithDefaultOptions(client);
// Kill and restart all the execution sites.
m_config.shutDown();
m_config.startUp();
client = getClient();
try
{
client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE);
while (results[0].advanceRow()) {
if (results[0].getString("RESULT").equals("FAILURE")) {
fail(results[0].getString("ERR_MSG"));
}
}
}
catch (Exception ex)
{
ex.printStackTrace();
fail("SnapshotRestore exception: " + ex.getMessage());
}
// hacky, need to sleep long enough so the internal server tick
// updates the memory stats
Thread.sleep(1000);
VoltTable final_mem = null;
try
{
final_mem = client.callProcedure("@Statistics", "memory", 0).getResults()[0];
System.out.println("STATS: " + final_mem.toString());
}
catch (Exception ex)
{
ex.printStackTrace();
fail("Statistics exception: " + ex.getMessage());
}
checkTable(client, "REPLICATED_TESTER", "RT_ID",
num_replicated_items_per_chunk * num_replicated_chunks);
results = client.callProcedure("@Statistics", "table", 0).getResults();
System.out.println("@Statistics after restore:");
System.out.println(results[0]);
int foundItem = 0;
while (foundItem != 3) {
while (results[0].advanceRow())
{
if (results[0].getString("TABLE_NAME").equals("REPLICATED_TESTER"))
{
++foundItem;
}
}
}
assertEquals(3, foundItem);
results = client.callProcedure("@Statistics", "table", 0).getResults();
while (results[0].advanceRow())
{
if (results[0].getString("TABLE_NAME").equals("REPLICATED_TESTER"))
{
assertEquals((num_replicated_chunks * num_replicated_items_per_chunk),
results[0].getLong("TUPLE_COUNT"));
}
}
// make sure all sites were loaded
validateSnapshot(true);
}
public void testSaveAndRestorePartitionedTable()
throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Starting testSaveAndRestorePartitionedTable");
int num_partitioned_items_per_chunk = 120; // divisible by 3
int num_partitioned_chunks = 10;
Client client = getClient();
loadLargePartitionedTable(client, "PARTITION_TESTER",
num_partitioned_items_per_chunk,
num_partitioned_chunks);
VoltTable[] results = null;
// hacky, need to sleep long enough so the internal server tick
// updates the memory stats
Thread.sleep(1000);
VoltTable orig_mem = null;
try
{
orig_mem = client.callProcedure("@Statistics", "memory", 0).getResults()[0];
System.out.println("STATS: " + orig_mem.toString());
}
catch (Exception ex)
{
ex.printStackTrace();
fail("Statistics exception: " + ex.getMessage());
}
DefaultSnapshotDataTarget.m_simulateFullDiskWritingHeader = true;
results = saveTablesWithDefaultOptions(client);
deleteTestFiles();
while (results[0].advanceRow()) {
assertTrue(results[0].getString("RESULT").equals("FAILURE"));
}
DefaultSnapshotDataTarget.m_simulateFullDiskWritingHeader = false;
validateSnapshot(false);
results = saveTablesWithDefaultOptions(client);
validateSnapshot(true);
while (results[0].advanceRow()) {
if (!results[0].getString("RESULT").equals("SUCCESS")) {
System.out.println(results[0].getString("ERR_MSG"));
}
assertTrue(results[0].getString("RESULT").equals("SUCCESS"));
}
try
{
checkSnapshotStatus(client, TMPDIR, TESTNONCE, null, "SUCCESS", 8);
}
catch (Exception ex)
{
ex.printStackTrace();
fail("SnapshotRestore exception: " + ex.getMessage());
}
// Kill and restart all the execution sites.
m_config.shutDown();
m_config.startUp();
client = getClient();
try
{
results = client.callProcedure("@SnapshotRestore", TMPDIR,
TESTNONCE).getResults();
while (results[0].advanceRow()) {
if (results[0].getString("RESULT").equals("FAILURE")) {
fail(results[0].getString("ERR_MSG"));
}
}
}
catch (Exception ex)
{
ex.printStackTrace();
fail("SnapshotRestore exception: " + ex.getMessage());
}
boolean threwException = false;
try
{
results = client.callProcedure("@SnapshotRestore", TMPDIR,
TESTNONCE).getResults();
while (results[0].advanceRow()) {
if (results[0].getString("RESULT").equals("FAILURE")) {
fail(results[0].getString("ERR_MSG"));
}
}
}
catch (Exception ex)
{
threwException = true;
}
assertTrue(threwException);
checkTable(client, "PARTITION_TESTER", "PT_ID",
num_partitioned_items_per_chunk * num_partitioned_chunks);
int foundItem = 0;
while (foundItem != 3) {
foundItem = 0;
results = client.callProcedure("@Statistics", "table", 0).getResults();
while (results[0].advanceRow())
{
if (results[0].getString("TABLE_NAME").equals("PARTITION_TESTER"))
{
++foundItem;
}
}
}
results = client.callProcedure("@Statistics", "table", 0).getResults();
while (results[0].advanceRow())
{
if (results[0].getString("TABLE_NAME").equals("PARTITION_TESTER"))
{
assertEquals((num_partitioned_items_per_chunk * num_partitioned_chunks) / 3,
results[0].getLong("TUPLE_COUNT"));
}
}
// Kill and restart all the execution sites.
m_config.shutDown();
m_config.startUp();
deleteTestFiles();
DefaultSnapshotDataTarget.m_simulateFullDiskWritingChunk = true;
org.voltdb.sysprocs.SnapshotRegistry.clear();
client = getClient();
loadLargePartitionedTable(client, "PARTITION_TESTER",
num_partitioned_items_per_chunk,
num_partitioned_chunks);
results = saveTablesWithDefaultOptions(client);
validateSnapshot(false);
try
{
results = client.callProcedure("@SnapshotStatus").getResults();
boolean hasFailure = false;
while (results[0].advanceRow())
hasFailure |= results[0].getString("RESULT").equals("FAILURE");
assertTrue(hasFailure);
}
catch (Exception ex)
{
ex.printStackTrace();
fail("SnapshotRestore exception: " + ex.getMessage());
}
DefaultSnapshotDataTarget.m_simulateFullDiskWritingChunk = false;
deleteTestFiles();
results = saveTablesWithDefaultOptions(client);
validateSnapshot(true);
// Kill and restart all the execution sites.
m_config.shutDown();
m_config.startUp();
client = getClient();
try
{
results = client.callProcedure("@SnapshotRestore", TMPDIR,
TESTNONCE).getResults();
}
catch (Exception ex)
{
ex.printStackTrace();
fail("SnapshotRestore exception: " + ex.getMessage());
}
// hacky, need to sleep long enough so the internal server tick
// updates the memory stats
Thread.sleep(1000);
VoltTable final_mem = null;
try
{
final_mem = client.callProcedure("@Statistics", "memory", 0).getResults()[0];
System.out.println("STATS: " + final_mem.toString());
}
catch (Exception ex)
{
ex.printStackTrace();
fail("Statistics exception: " + ex.getMessage());
}
checkTable(client, "PARTITION_TESTER", "PT_ID",
num_partitioned_items_per_chunk * num_partitioned_chunks);
results = client.callProcedure("@Statistics", "table", 0).getResults();
foundItem = 0;
while (foundItem != 3) {
while (results[0].advanceRow())
{
if (results[0].getString("TABLE_NAME").equals("PARTITION_TESTER"))
{
++foundItem;
}
}
}
assertEquals(3, foundItem);
results = client.callProcedure("@Statistics", "table", 0).getResults();
while (results[0].advanceRow())
{
if (results[0].getString("TABLE_NAME").equals("PARTITION_TESTER"))
{
assertEquals((num_partitioned_items_per_chunk * num_partitioned_chunks) / 3,
results[0].getLong("TUPLE_COUNT"));
}
}
}
// Test that we fail properly when there are no savefiles available
public void testRestoreMissingFiles()
throws IOException, InterruptedException
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Starting testRestoreMissingFile");
int num_replicated_items = 1000;
int num_partitioned_items = 126;
Client client = getClient();
VoltTable repl_table = createReplicatedTable(num_replicated_items, 0, null);
// make a TPCC warehouse table
VoltTable partition_table =
createPartitionedTable(num_partitioned_items, 0);
loadTable(client, "REPLICATED_TESTER", true, repl_table);
loadTable(client, "PARTITION_TESTER", false, partition_table);
saveTablesWithDefaultOptions(client);
validateSnapshot(true);
// Kill and restart all the execution sites.
m_config.shutDown();
deleteTestFiles();
m_config.startUp();
client = getClient();
try {
client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE);
}
catch (Exception e) {
assertTrue(e.getMessage().contains("No savefile state to restore"));
return;
}
assertTrue(false);
}
// Test that we fail properly when the save files are corrupted
public void testCorruptedFiles()
throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Starting testCorruptedFiles");
int num_replicated_items = 1000;
int num_partitioned_items = 126;
java.util.Random r = new java.util.Random(0);
final int iterations = isValgrind() ? 5 : 5;
for (int ii = 0; ii < iterations; ii++) {
Client client = getClient();
VoltTable repl_table = createReplicatedTable(num_replicated_items, 0, null);
// make a TPCC warehouse table
VoltTable partition_table =
createPartitionedTable(num_partitioned_items, 0);
loadTable(client, "REPLICATED_TESTER", true, repl_table);
loadTable(client, "PARTITION_TESTER", false, partition_table);
VoltTable results[] = saveTablesWithDefaultOptions(client);
validateSnapshot(true);
while (results[0].advanceRow()) {
if (results[0].getString("RESULT").equals("FAILURE")) {
System.out.println(results[0].getString("ERR_MSG"));
}
assertTrue(results[0].getString("RESULT").equals("SUCCESS"));
}
corruptTestFiles(r);
validateSnapshot(false);
releaseClient(client);
// Kill and restart all the execution sites.
m_config.shutDown();
m_config.startUp();
client = getClient();
results = null;
try {
client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE);
fail(); // expect fail
}
catch (ProcCallException e) {
assertEquals(ClientResponse.OPERATIONAL_FAILURE, e.getClientResponse().getStatus());
results = e.getClientResponse().getResults();
}
assertNotNull(results);
assertNotNull(results[0]);
boolean haveFailure = false;
while (results[0].advanceRow()) {
if (results[0].getString("RESULT").equals("FAILURE")) {
haveFailure = true;
break;
}
}
assertTrue(haveFailure);
deleteTestFiles();
releaseClient(client);
// Kill and restart all the execution sites.
m_config.shutDown();
m_config.startUp();
}
}
// Test that a random corruption doesn't mess up the table. Not reproducible but useful for detecting
// stuff we won't normally find
public void testCorruptedFilesRandom()
throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Starting testCorruptedFilesRandom");
int num_replicated_items = 1000;
int num_partitioned_items = 126;
java.util.Random r = new java.util.Random();
final int iterations = isValgrind() ? 5 : 5;
for (int ii = 0; ii < iterations; ii++) {
Client client = getClient();
VoltTable repl_table = createReplicatedTable(num_replicated_items, 0, null);
// make a TPCC warehouse table
VoltTable partition_table =
createPartitionedTable(num_partitioned_items, 0);
loadTable(client, "REPLICATED_TESTER", true, repl_table);
loadTable(client, "PARTITION_TESTER", false, partition_table);
saveTablesWithDefaultOptions(client);
validateSnapshot(true);
releaseClient(client);
// Kill and restart all the execution sites.
m_config.shutDown();
corruptTestFiles(r);
validateSnapshot(false);
m_config.startUp();
client = getClient();
VoltTable results[] = null;
try {
client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE).getResults();
fail(); // expect fail
}
catch (ProcCallException e) {
assertEquals(ClientResponse.OPERATIONAL_FAILURE, e.getClientResponse().getStatus());
results = e.getClientResponse().getResults();
}
assertNotNull(results);
assertNotNull(results[0]);
boolean haveFailure = false;
while (results[0].advanceRow()) {
if (results[0].getString("RESULT").equals("FAILURE")) {
haveFailure = true;
break;
}
}
if (!haveFailure) {
System.out.println("foo");
}
assertTrue(haveFailure);
deleteTestFiles();
releaseClient(client);
// Kill and restart all the execution sites.
m_config.shutDown();
m_config.startUp();
}
}
public void testRestoreMissingPartitionFile()
throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
int num_replicated_items = 1000;
int num_partitioned_items = 126;
Client client = getClient();
VoltTable repl_table = createReplicatedTable(num_replicated_items, 0, null);
// make a TPCC warehouse table
VoltTable partition_table =
createPartitionedTable(num_partitioned_items, 0);
loadTable(client, "REPLICATED_TESTER", true, repl_table);
loadTable(client, "PARTITION_TESTER", false, partition_table);
saveTablesWithDefaultOptions(client);
// Kill and restart all the execution sites.
m_config.shutDown();
String filename = TESTNONCE + "-PARTITION_TESTER-host_0.vpt";
File item_file = new File(TMPDIR, filename);
assertTrue(item_file.delete());
m_config.startUp();
client = getClient();
VoltTable resultTable = null;
try {
client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE);
}
catch (ProcCallException e) {
resultTable = e.getClientResponse().getResults()[0];
assertEquals(ClientResponse.OPERATIONAL_FAILURE, e.getClientResponse().getStatus());
}
assertTrue(resultTable.advanceRow());
assertTrue(resultTable.getString("ERR_MSG").equals("Save data contains no information for table PARTITION_TESTER"));
}
public void testRepartition()
throws Exception
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Starting testRepartition");
int num_replicated_items_per_chunk = 100;
int num_replicated_chunks = 10;
int num_partitioned_items_per_chunk = 120; // divisible by 3 and 4
int num_partitioned_chunks = 10;
Client client = getClient();
loadLargeReplicatedTable(client, "REPLICATED_TESTER",
num_replicated_items_per_chunk,
num_partitioned_chunks);
loadLargePartitionedTable(client, "PARTITION_TESTER",
num_partitioned_items_per_chunk,
num_partitioned_chunks);
VoltTable[] results = null;
results = saveTablesWithDefaultOptions(client);
validateSnapshot(true);
// Kill and restart all the execution sites.
m_config.shutDown();
CatalogChangeSingleProcessServer config =
(CatalogChangeSingleProcessServer) m_config;
config.recompile(4);
m_config.startUp();
client = getClient();
try
{
results = client.callProcedure("@SnapshotRestore", TMPDIR,
TESTNONCE).getResults();
// XXX Should check previous results for success but meh for now
}
catch (Exception ex)
{
ex.printStackTrace();
fail("SnapshotRestore exception: " + ex.getMessage());
}
checkTable(client, "PARTITION_TESTER", "PT_ID",
num_partitioned_items_per_chunk * num_partitioned_chunks);
checkTable(client, "REPLICATED_TESTER", "RT_ID",
num_replicated_items_per_chunk * num_replicated_chunks);
// Spin until the stats look complete
int foundItem = 0;
while (foundItem != 4) {
foundItem = 0;
results = client.callProcedure("@Statistics", "table", 0).getResults();
while (results[0].advanceRow()) {
if (results[0].getString("TABLE_NAME").equals("PARTITION_TESTER"))
{
++foundItem;
}
}
}
while (results[0].advanceRow())
{
if (results[0].getString("TABLE_NAME").equals("PARTITION_TESTER"))
{
assertEquals((num_partitioned_items_per_chunk * num_partitioned_chunks) / 4,
results[0].getLong("TUPLE_COUNT"));
}
}
config.revertCompile();
}
public void testChangeDDL()
throws IOException, InterruptedException, ProcCallException
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Starting testChangeDDL");
int num_partitioned_items_per_chunk = 120;
int num_partitioned_chunks = 10;
Client client = getClient();
loadLargePartitionedTable(client, "PARTITION_TESTER",
num_partitioned_items_per_chunk,
num_partitioned_chunks);
// Store something in the table which will change columns
VoltTable change_table =
new VoltTable(new ColumnInfo("ID", VoltType.INTEGER),
new ColumnInfo("BYEBYE", VoltType.INTEGER));
VoltTable eng_2025_table =
new VoltTable(new ColumnInfo("key", VoltType.STRING),
new ColumnInfo("value", VoltType.VARBINARY));
for (int i = 0; i < 10; i++)
{
Object[] row = new Object[] {i, i};
change_table.addRow(row);
eng_2025_table.addRow(new Object[] {Integer.toString(i), new byte[64]});
}
loadTable(client, "CHANGE_COLUMNS", false, change_table);
loadTable(client, "ENG_2025", true, eng_2025_table);
VoltTable[] results = null;
results = saveTablesWithDefaultOptions(client);
validateSnapshot(true);
// Kill and restart all the execution sites.
m_config.shutDown();
CatalogChangeSingleProcessServer config =
(CatalogChangeSingleProcessServer) m_config;
SaveRestoreTestProjectBuilder project =
new SaveRestoreTestProjectBuilder();
project.addDefaultProcedures();
project.addDefaultPartitioning();
project.addSchema(SaveRestoreTestProjectBuilder.class.
getResource("saverestore-altered-ddl.sql"));
config.recompile(project);
m_config.startUp();
client = getClient();
try
{
results = client.callProcedure("@SnapshotRestore", TMPDIR,
TESTNONCE).getResults();
}
catch (Exception ex)
{
ex.printStackTrace();
fail("SnapshotRestore exception: " + ex.getMessage());
}
// XXX consider adding a check that the newly materialized table is
// not loaded
results = client.callProcedure("@Statistics", "table", 0).getResults();
boolean found_gets_created = false;
while (results[0].advanceRow())
{
if (results[0].getString("TABLE_NAME").equals("GETS_REMOVED"))
{
fail("Table GETS_REMOVED got reloaded");
}
if (results[0].getString("TABLE_NAME").equals("GETS_CREATED"))
{
found_gets_created = true;
}
}
// Check the table which changed columns
VoltTable[] change_results =
client.callProcedure("SaveRestoreSelect", "CHANGE_COLUMNS").getResults();
assertEquals(3, change_results[0].getColumnCount());
for (int i = 0; i < 10; i++)
{
VoltTableRow row = change_results[0].fetchRow(i);
assertEquals(i, row.getLong("ID"));
assertEquals(1234, row.getLong("HASDEFAULT"));
row.getLong("HASNULL");
assertTrue(row.wasNull());
}
assertTrue(found_gets_created);
config.revertCompile();
}
public void testGoodChangeAttributeTypes()
throws IOException, InterruptedException, ProcCallException
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Starting testGoodChangeAttributeTypes");
Client client = getClient();
// Store something in the table which will change columns
VoltTable change_types =
new VoltTable(new ColumnInfo("ID", VoltType.INTEGER),
new ColumnInfo("BECOMES_INT", VoltType.TINYINT),
new ColumnInfo("BECOMES_FLOAT", VoltType.INTEGER),
new ColumnInfo("BECOMES_TINY", VoltType.INTEGER));
change_types.addRow(0, 100, 100, 100);
change_types.addRow(1, VoltType.NULL_TINYINT, VoltType.NULL_INTEGER,
VoltType.NULL_INTEGER);
loadTable(client, "CHANGE_TYPES", true, change_types);
saveTablesWithDefaultOptions(client);
validateSnapshot(true);
// Kill and restart all the execution sites.
m_config.shutDown();
CatalogChangeSingleProcessServer config =
(CatalogChangeSingleProcessServer) m_config;
SaveRestoreTestProjectBuilder project =
new SaveRestoreTestProjectBuilder();
project.addDefaultProcedures();
project.addDefaultPartitioning();
project.addSchema(SaveRestoreTestProjectBuilder.class.
getResource("saverestore-altered-ddl.sql"));
config.recompile(project);
m_config.startUp();
client = getClient();
try
{
client.callProcedure("@SnapshotRestore", TMPDIR, TESTNONCE);
}
catch (Exception ex)
{
ex.printStackTrace();
fail("SnapshotRestore exception: " + ex.getMessage());
}
client.callProcedure("@Statistics", "table", 0);
VoltTable[] change_results =
client.callProcedure("SaveRestoreSelect", "CHANGE_TYPES").getResults();
VoltTableRow row = change_results[0].fetchRow(0);
assertEquals(100, row.getLong(1));
assertEquals(100.0, row.getDouble(2));
assertEquals(100, row.getLong(3));
row = change_results[0].fetchRow(1);
row.getLong(1);
assertTrue(row.wasNull());
row.getDouble(2);
assertTrue(row.wasNull());
row.getLong(3);
assertTrue(row.wasNull());
config.revertCompile();
}
public void testBadChangeAttributeTypes()
throws IOException, InterruptedException, ProcCallException
{
if (isValgrind()) return; // snapshot doesn't run in valgrind ENG-4034
System.out.println("Starting testBadChangeAttributeTypes");
Client client = getClient();
// Store something in the table which will change columns
VoltTable change_types =
new VoltTable(new ColumnInfo("ID", VoltType.INTEGER),
new ColumnInfo("BECOMES_INT", VoltType.TINYINT),
new ColumnInfo("BECOMES_FLOAT", VoltType.INTEGER),
new ColumnInfo("BECOMES_TINY", VoltType.INTEGER));
change_types.addRow(0, 100, 100, 100000);
loadTable(client, "CHANGE_TYPES", true, change_types);
VoltTable[] results = null;
results = saveTablesWithDefaultOptions(client);
validateSnapshot(true);
// Kill and restart all the execution sites.
m_config.shutDown();
CatalogChangeSingleProcessServer config =
(CatalogChangeSingleProcessServer) m_config;
SaveRestoreTestProjectBuilder project =
new SaveRestoreTestProjectBuilder();
project.addDefaultProcedures();
project.addDefaultPartitioning();
project.addSchema(SaveRestoreTestProjectBuilder.class.
getResource("saverestore-altered-ddl.sql"));
config.recompile(project);
m_config.startUp();
client = getClient();
try
{
results = client.callProcedure("@SnapshotRestore", TMPDIR,
TESTNONCE).getResults();
fail(); // expect failure
}
catch (ProcCallException ex) {
assertEquals(ClientResponse.OPERATIONAL_FAILURE, ex.getClientResponse().getStatus());
results = ex.getClientResponse().getResults();
}
boolean type_failure = false;
while (results[0].advanceRow())
{
if (results[0].getString("RESULT").equals("FAILURE"))
{
if (results[0].getString("ERR_MSG").contains("would overflow"))
{
type_failure = true;
}
}
}
assertTrue(type_failure);
config.revertCompile();
}
public static class SnapshotResult {
Long hostID;
String table;
String path;
String filename;
String nonce;
Long txnID;
Long endTime;
String result;
}
public static SnapshotResult[] checkSnapshotStatus(Client client, String path, String nonce, Integer endTime,
String result, Integer rowCount)
throws NoConnectionsException, IOException, ProcCallException {
// Execute @SnapshotStatus to get raw results.
VoltTable statusResults[] = client.callProcedure("@SnapshotStatus").getResults();
assertNotNull(statusResults);
assertEquals( 1, statusResults.length);
assertEquals( 14, statusResults[0].getColumnCount());
// Validate row count if requested.
Integer resultRowCount = statusResults[0].getRowCount();
if (rowCount != null) {
assertEquals(rowCount, resultRowCount);
}
// Populate status data object list.
SnapshotResult[] results = new SnapshotResult[resultRowCount];
for (int i = 0; i < resultRowCount; i++) {
assertTrue(statusResults[0].advanceRow());
results[i] = new SnapshotResult();
results[i].hostID = statusResults[0].getLong("HOST_ID");
results[i].table = statusResults[0].getString("TABLE");
results[i].path = statusResults[0].getString("PATH");
results[i].filename = statusResults[0].getString("FILENAME");
results[i].nonce = statusResults[0].getString("NONCE");
results[i].txnID = statusResults[0].getLong("TXNID");
results[i].endTime = statusResults[0].getLong("END_TIME");
results[i].result = statusResults[0].getString("RESULT");
// Perform requested validation.
if (path != null) {
assertEquals(path, results[i].path);
}
if (nonce != null) {
assertEquals(nonce, results[i].nonce);
}
if (endTime != null) {
assertEquals(endTime, results[i].endTime);
}
if (result != null) {
assertEquals(result, results[i].result);
}
}
return results;
}
/**
* Build a list of the tests to be run. Use the regression suite
* helpers to allow multiple back ends.
* JUnit magic that uses the regression suite helper classes.
*/
static public junit.framework.Test suite() {
VoltServerConfig config = null;
MultiConfigSuiteBuilder builder =
new MultiConfigSuiteBuilder(TestSaveRestoreSysprocSuite.class);
SaveRestoreTestProjectBuilder project =
new SaveRestoreTestProjectBuilder();
project.addAllDefaults();
config =
new CatalogChangeSingleProcessServer(JAR_NAME, 3,
BackendTarget.NATIVE_EE_JNI);
boolean success = config.compile(project);
assert(success);
builder.addServerConfig(config);
return builder;
}
}
|
package se.sics.cooja.mspmote.interfaces;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.Collection;
import javax.swing.JButton;
import javax.swing.JPanel;
import org.apache.log4j.Logger;
import org.jdom.Element;
import se.sics.cooja.ClassDescription;
import se.sics.cooja.Mote;
import se.sics.cooja.Simulation;
import se.sics.cooja.interfaces.Button;
import se.sics.cooja.mspmote.MspMoteTimeEvent;
import se.sics.cooja.mspmote.SkyMote;
@ClassDescription("Button")
public class SkyButton extends Button {
private static Logger logger = Logger.getLogger(SkyButton.class);
private SkyMote skyMote;
private Simulation sim;
private MspMoteTimeEvent pressButtonEvent;
private MspMoteTimeEvent releaseButtonEvent;
public SkyButton(Mote mote) {
skyMote = (SkyMote) mote;
sim = mote.getSimulation();
pressButtonEvent = new MspMoteTimeEvent((SkyMote)mote, 0) {
public void execute(long t) {
skyMote.skyNode.setButton(true);
}
};
releaseButtonEvent = new MspMoteTimeEvent((SkyMote)mote, 0) {
public void execute(long t) {
skyMote.skyNode.setButton(false);
}
};
}
public void clickButton() {
sim.invokeSimulationThread(new Runnable() {
public void run() {
sim.scheduleEvent(pressButtonEvent, sim.getSimulationTime());
sim.scheduleEvent(releaseButtonEvent, sim.getSimulationTime() + Simulation.MILLISECOND);
}
});
}
public void pressButton() {
sim.invokeSimulationThread(new Runnable() {
public void run() {
sim.scheduleEvent(pressButtonEvent, sim.getSimulationTime());
}
});
}
public void releaseButton() {
sim.invokeSimulationThread(new Runnable() {
public void run() {
sim.scheduleEvent(releaseButtonEvent, sim.getSimulationTime());
}
});
}
public boolean isPressed() {
logger.warn("Not implemented");
return false;
}
public JPanel getInterfaceVisualizer() {
JPanel panel = new JPanel();
final JButton clickButton = new JButton("Click button");
panel.add(clickButton);
clickButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
clickButton();
}
});
return panel;
}
public void releaseInterfaceVisualizer(JPanel panel) {
}
public Collection<Element> getConfigXML() {
return null;
}
public void setConfigXML(Collection<Element> configXML, boolean visAvailable) {
}
}
|
package org.cli;
import org.commcare.util.cli.ApplicationHost;
import org.commcare.util.cli.CliCommand;
import org.commcare.util.engine.CommCareConfigEngine;
import org.javarosa.core.util.externalizable.LivePrototypeFactory;
import org.javarosa.core.util.externalizable.PrototypeFactory;
import org.junit.Assert;
import org.junit.Test;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.io.StringReader;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.nio.charset.StandardCharsets;
import static junit.framework.TestCase.assertTrue;
/**
*
* Tests for the CommCare CLI
*
* Uses a specific, highly paired format to deal with the CLI's I/O
*
* @author wpride
*/
public class CliTests {
private class CliTestRun<E extends CliTestReader> {
CliTestRun(String applicationPath,
String restoreResource,
Class<E> cliTestReaderClass,
String steps,
String endpointId,
String[] endpointArgs) throws InvocationTargetException, NoSuchMethodException, InstantiationException, IllegalAccessException {
ApplicationHost host = buildApplicationHost(applicationPath, restoreResource, cliTestReaderClass, steps);
boolean passed = false;
try {
host.run(endpointId, endpointArgs);
} catch (TestPassException e) {
passed = true;
}
assertTrue(passed);
}
private ApplicationHost buildApplicationHost(String applicationResource,
String restoreResource,
Class<E> cliTestReaderClass,
String steps) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
ClassLoader classLoader = getClass().getClassLoader();
String applicationPath = new File(classLoader.getResource(applicationResource).getFile()).getAbsolutePath();
PrototypeFactory prototypeFactory = new LivePrototypeFactory();
CommCareConfigEngine engine = CliCommand.configureApp(applicationPath, prototypeFactory);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream outStream = new PrintStream(baos);
Constructor<E> ctor = cliTestReaderClass.getConstructor(String.class, ByteArrayOutputStream.class);
CliTestReader reader = ctor.newInstance(steps, baos);
ApplicationHost host = new ApplicationHost(engine, prototypeFactory, reader, outStream);
File restoreFile = new File(classLoader.getResource(restoreResource).getFile());
String restorePath = restoreFile.getAbsolutePath();
host.setRestoreToLocalFile(restorePath);
return host;
}
}
@Test
public void testConstraintsForm() throws Exception {
// Start a basic form
new CliTestRun<>("basic_app/basic.ccz",
"case_create_basic.xml",
BasicTestReader.class,
"1 0 \n",
null,
null);
}
@Test
public void testCaseSelection() throws Exception {
// Perform case selection
new CliTestRun<>("basic_app/basic.ccz",
"basic_app/restore.xml",
CaseTestReader.class,
"2 1 5 1 \n \n",
null,
null);
}
@Test
public void testSessionEndpoint() throws Exception {
// Run CLI with session endpoint arg
new CliTestRun<>("basic_app/basic.ccz",
"basic_app/restore.xml",
SessionEndpointTestReader.class,
"\n",
"m5_endpoint",
new String[] {"124938b2-c228-4107-b7e6-31a905c3f4ff"});
}
/**
* The CliTestReader overrides the Reader (usually System.in) passed into the CLI
* and so is able to provide input through the readLine() function that the CLI
* reads from. We are also able to get the output at this point and make assertions
* about its content.
*/
static abstract class CliTestReader extends BufferedReader {
private String[] steps;
private int stepIndex;
private ByteArrayOutputStream outStream;
CliTestReader(String steps, ByteArrayOutputStream outStream) {
super(new StringReader("Unused dummy reader"));
this.steps = steps.split(" ");
this.outStream = outStream;
}
@Override
public String readLine() throws IOException {
String output = new String(outStream.toByteArray(), StandardCharsets.UTF_8);
processLine(stepIndex, output);
String ret = steps[stepIndex++];
outStream.reset();
// Return the next input for the CLI to process
return ret;
}
abstract void processLine(int stepIndex, String output);
}
static class BasicTestReader extends CliTestReader {
public BasicTestReader(String args, ByteArrayOutputStream outStream) {
super(args, outStream);
}
void processLine(int stepIndex, String output) {
switch(stepIndex) {
case 0:
Assert.assertTrue(output.contains("Basic Tests"));
Assert.assertTrue(output.contains("0) Basic Form Tests"));
break;
case 1:
Assert.assertTrue(output.contains("0) Constraints"));
break;
case 2:
Assert.assertTrue(output.contains("Press Return to proceed"));
break;
case 3:
Assert.assertTrue(output.contains("This form tests different logic constraints."));
throw new TestPassException();
default:
throw new RuntimeException(String.format("Did not recognize output %s at stepIndex %s", output, stepIndex));
}
}
}
static class CaseTestReader extends CliTestReader {
public CaseTestReader(String args, ByteArrayOutputStream outStream) {
super(args, outStream);
}
void processLine(int stepIndex, String output) {
switch(stepIndex) {
case 0:
Assert.assertTrue(output.contains("Basic Tests"));
Assert.assertTrue(output.contains("0) Basic Form Tests"));
break;
case 1:
Assert.assertTrue(output.contains("0) Create a Case"));
break;
case 2:
// m2_case_short
Assert.assertTrue(output.contains("Case | vl1"));
Assert.assertTrue(output.contains("Date Opened"));
Assert.assertTrue(output.contains("case one"));
break;
case 3:
// Tab 0 of m2_case_long
Assert.assertTrue(output.contains("Phone Number"));
Assert.assertTrue(output.contains("9632580741"));
break;
case 4:
// Tab 1 of m2_case_long
Assert.assertTrue(output.contains("Geodata"));
Assert.assertTrue(output.contains("17.4469641 78.3719456 543.4 24.36"));
break;
case 5:
Assert.assertTrue(output.contains("Form Start: Press Return to proceed"));
break;
case 6:
Assert.assertTrue(output.contains("This form will allow you to add and update"));
throw new TestPassException();
default:
throw new RuntimeException(String.format("Did not recognize output %s at stepIndex %s", output, stepIndex));
}
}
}
static class SessionEndpointTestReader extends CliTestReader {
public SessionEndpointTestReader(String args, ByteArrayOutputStream outStream) {
super(args, outStream);
}
void processLine(int stepIndex, String output) {
switch(stepIndex) {
case 0:
Assert.assertTrue(output.contains("0) Update a Case"));
Assert.assertTrue(output.contains("1) Close a Case"));
throw new TestPassException();
default:
throw new RuntimeException(String.format("Did not recognize output %s at stepIndex %s", output, stepIndex));
}
}
}
// Because the CLI is a REPL that will loop indefinitely unless certain code paths are
// reached we need to provide a way for tests to exit early. This exception will be
// caught at the top level of the CliTestRun and set the tests to pass when thrown.
private static class TestPassException extends RuntimeException {}
}
|
package samples;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import tools.TestBase;
import tools.logging.TestOutput;
import tools.selenium.SeleniumHelper.Locators;
import tools.selenium.SeleniumHelper;
public class SampleIT extends TestBase {
@BeforeClass(alwaysRun = true)
public void beforeClass() throws Exception {
// set the base URL for the tests here
testSite = "http:
// set the author of the tests here
author = "Max Saperstone\n<br/>max.saperstone@coveros.com";
// set the version of the tests or of the software, possibly with a
// dynamic check
version = "0.0.1";
}
@DataProvider(name = "google search terms", parallel = true)
public Object[][] DataSetOptions() {
return new Object[][] { new Object[] { "python" }, new Object[] { "perl" }, new Object[] { "bash" } };
}
@Test(groups = { "sample" }, description = "A sample test to check a title")
public void sampleTest() throws Exception {
// obtain our logger
TestOutput output = this.output.get();
// perform some actions
output.compareTitle("Google");
// verify no issues
finish();
}
@Test(dataProvider = "google search terms", groups = { "sample" },
description = "A sample test using a data provider to perform searches")
public void sampleTestWDataProvider(String searchTerm) throws Exception {
// obtain our browser instance
SeleniumHelper selHelper = this.selHelper.get();
// obtain our logger
TestOutput output = this.output.get();
// perform some actions
selHelper.type(Locators.name, "q", searchTerm);
selHelper.click(Locators.name, "btnG");
selHelper.waitForElementDisplayed(Locators.id, "resultStats");
output.compareTitle(searchTerm + " - Google Search");
// verify no issues
finish();
}
@Test(groups = { "sample" }, description = "A sample test to check a the goToURL method")
public void sampleTestGoToURL() throws Exception {
SeleniumHelper selHelper = this.selHelper.get();
// perform some actions
selHelper.goToURL("https:
selHelper.getCurrentUrl().equals("https:
// verify no issues
finish();
}
@Test(groups = { "sample" }, description = "A sample test to check the waitForElementPresent method")
public void sampleTestWaitForElementPresent() throws Exception {
SeleniumHelper selHelper = this.selHelper.get();
// perform some actions
selHelper.waitForElementPresent(Locators.name, "q");
// verify no issues
finish();
}
@Test(groups = { "sample" }, description = "A sample test to check the waitForElementNotPresent method")
public void sampleTestWaitForElementNotPresent() throws Exception {
SeleniumHelper selHelper = this.selHelper.get();
// perform some actions
selHelper.waitForElementNotPresent(Locators.name, "non-existent-name");
// verify no issues
finish();
}
@Test(groups = { "sample" }, description = "A sample test to check the waitForElementDisplayed method")
public void sampleTestWaitForElementDisplayed() throws Exception {
SeleniumHelper selHelper = this.selHelper.get();
// perform some actions
selHelper.waitForElementDisplayed(Locators.name, "q");
// verify no issues
finish();
}
}
|
package net.sf.mpxj;
import java.nio.charset.Charset;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
/**
* Instances of this class represent enumerated code page values.
*/
public enum CodePage
{
ANSI("ANSI", "Cp1252"),
MAC("MAC", "MacRoman"),
LATIN("850", "Cp850"),
US("437", "Cp437"),
ZH("ZH", "GB2312");
/**
* Private constructor.
*
* @param value MPX code page name
* @param charset Java character set name
*/
private CodePage(String value, String charset)
{
m_value = value;
m_charset = charset;
}
/**
* Retrieve a CodePage instance representing the supplied value.
*
* @param value MPX code page name
* @return CodePage instance
*/
public static CodePage getInstance(String value)
{
CodePage result = NAME_MAP.get(value);
if (result == null)
{
result = ANSI;
}
return (result);
}
/**
* Retrieve the Java character set represented by the codepage.
*
* @return Java Charset instance
*/
public Charset getCharset()
{
return (Charset.forName(m_charset));
}
/**
* Returns the string representation of the codepage.
*
* @return codepage
*/
@Override public String toString()
{
return (m_value);
}
private String m_value;
private String m_charset;
private static final Map<String, CodePage> NAME_MAP = new HashMap<String, CodePage>();
static
{
for (CodePage e : EnumSet.range(CodePage.ANSI, CodePage.ZH))
{
NAME_MAP.put(e.m_value, e);
}
}
}
|
package ASSET.GUI.CommandLine;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.Iterator;
import java.util.Vector;
import org.w3c.dom.Document;
import ASSET.ScenarioType;
import ASSET.GUI.CommandLine.CommandLine.ASSETProgressMonitor;
import ASSET.Scenario.CoreScenario;
import ASSET.Scenario.LiveScenario.ISimulation;
import ASSET.Scenario.LiveScenario.ISimulationQue;
import ASSET.Scenario.Observers.CoreObserver;
import ASSET.Scenario.Observers.InterScenarioObserverType;
import ASSET.Scenario.Observers.ScenarioObserver;
import ASSET.Scenario.Observers.ScenarioStatusObserver;
import ASSET.Scenario.Observers.TimeObserver;
import ASSET.Util.SupportTesting;
import ASSET.Util.MonteCarlo.ScenarioGenerator;
import ASSET.Util.XML.ASSETReaderWriter;
import ASSET.Util.XML.ASSETReaderWriter.ResultsContainer;
import MWC.Algorithms.LiveData.IAttribute;
public class MultiScenarioCore implements ISimulationQue
{
/**
* success code to prove it ran ok
*/
static int SUCCESS = 0;
/**
* error code to return when we've rx the wrong parameters
*/
static int WRONG_PARAMETERS = 1;
/**
* error code to return when we can't load our data
*/
static int PROBLEM_LOADING = 2;
/**
* error code to indicate we couldn't find one of the files
*/
static int FILE_NOT_FOUND = 3;
/**
* error code to indicate we couldn't create the output files
*/
static int TROUBLE_MAKING_FILES = 4;
/**
* the scenario generator that does all the work
*/
private ScenarioGenerator _myGenny;
/**
* the set of scenarios we're going to run through
*/
protected Vector<Document> _myScenarioDocuments;
private Vector<InterScenarioObserverType> _theInterObservers;
private Vector<ScenarioObserver> _thePlainObservers;
private Vector<ScenarioObserver> _allObservers;
private ResultsContainer _resultsStore;
private Vector<InstanceWrapper> _theScenarios;
private Vector<IAttribute> _myAttributes;
private ScenarioStatusObserver _stateObserver;
/**
* ok, get things up and running. Load the data-files
*
* @param scenario
* the scenario file
* @param control
* the control file
* @param pMon
* who tell what we're up to
* @param outputDirectory
* where to write to
* @return null for success, message for failure
*/
private String setup(String scenario, String control,
ASSETProgressMonitor pMon, File outputDirectory)
{
// ok, create our genny
_myGenny = new ScenarioGenerator();
// now create somewhere for the scenarios to go
_myScenarioDocuments = new Vector<Document>(0, 1);
// and now create the list of scenarios
String res = _myGenny.createScenarios(scenario, control,
_myScenarioDocuments, pMon, outputDirectory);
return res;
}
public boolean isMultiScenario(String controlFile)
throws FileNotFoundException
{
return CommandLine.isMultiScenarioFile(controlFile);
}
/**
* write this set of scenarios to disk, for later examination
*
* @param out
* standard out
* @param err
* error out
* @param in
* input (to receive user input)
* @return success code (0) or failure codes
*/
private int writeToDisk(PrintStream out, PrintStream err, InputStream in)
{
int res = 0;
try
{
String failure = _myGenny.writeTheseToFile(_myScenarioDocuments, false);
// just check for any other probs
if (failure != null)
{
res = TROUBLE_MAKING_FILES;
}
}
catch (Exception e)
{
res = TROUBLE_MAKING_FILES;
}
return res;
}
public InstanceWrapper getWrapperFor(ScenarioType scenario)
{
InstanceWrapper res = null;
Iterator<InstanceWrapper> sList = _theScenarios.iterator();
while (sList.hasNext())
{
InstanceWrapper thisWrap = (InstanceWrapper) sList.next();
if (thisWrap.scenario == scenario)
{
res = thisWrap;
break;
}
}
return res;
}
/**
* ok, let's get going...
*
* @param out
* @param err
* @param scenarioRunningListener
*/
private int runAll(OutputStream out, OutputStream err, InputStream in,
Document controlFile, NewScenarioListener listener)
{
int result = SUCCESS;
final int scenarioLen = _myScenarioDocuments.size();
// get the data we're after
String controlStr = ScenarioGenerator.writeToString(_myGenny
.getControlFile());
InputStream controlStream = new ByteArrayInputStream(controlStr.getBytes());
// ok, we've got our scenarios up and running, might as well run through
// them
int ctr = 0;
ScenarioType oldScenario = null;
boolean firstRun = true;
for (Iterator<InstanceWrapper> iterator = _theScenarios.iterator(); iterator
.hasNext();)
{
InstanceWrapper wrapper = iterator.next();
ScenarioType thisS = wrapper.scenario;
// tell the listener what's up
if (listener != null)
listener.newScenario(oldScenario, thisS);
if (firstRun)
{
firstRun = false;
// we don't need to initialise the listeners for the first scenario, it
// gets done in advance.
}
else
{
// get the observers sorted
wrapper.initialise(_allObservers);
}
// now run this one
CommandLine runner = wrapper.commandLine;
System.out.print("Run " + (ctr + 1) + " of " + scenarioLen + " ");
// now set the seed
thisS.setSeed(_resultsStore.randomSeed);
// and get going....
runner.run();
wrapper.terminate(_allObservers);
try
{
// and reset the control stream
controlStream.reset();
}
catch (IOException e)
{
e.printStackTrace(); // To change body of catch statement use Options |
// File Templates.
}
// and remember the scenario
oldScenario = thisS;
ctr++;
}
// ok, everything's finished running. Just have a pass through to
// close any i-scenario observers
for (int thisObs = 0; thisObs < _theInterObservers.size(); thisObs++)
{
ScenarioObserver scen = _theInterObservers.elementAt(thisObs);
if (scen.isActive())
{
InterScenarioObserverType obs = _theInterObservers.elementAt(thisObs);
obs.finish();
}
}
return result;
}
/**
* member method, effectively to handle "main" processing.
*
* @param args
* the arguments we received from the command line
* @param out
* standard out
* @param err
* error out
* @param in
* input (to receive user input)
* @param pMon
* @param outputDirectory
* - where to put the working files
* @return success code (0) or failure codes
*/
public int prepareFiles(String controlFile, String scenarioFile,
PrintStream out, PrintStream err, InputStream in,
ASSETProgressMonitor pMon, File outputDirectory)
{
int resCode = 0;
// do a little tidying
_myAttributes = null;
_theInterObservers = null;
_thePlainObservers = null;
System.out.println("about to generate scenarios");
// and set it up (including generating the scenarios)
String res = setup(scenarioFile, controlFile, pMon, outputDirectory);
if (res != null)
{
// see what it was, file not found?
if (res.indexOf("not found") >= 0)
{
err.println("Problem finding control file:" + res);
resCode = FILE_NOT_FOUND;
}
else
{
err.println("Problem loading multi-scenario generator:" + res);
resCode = PROBLEM_LOADING;
}
}
else
{
out.println("about to write new scenarios to disk");
pMon.beginTask("Writing generated scenarios to disk", 1);
// ok, now write the scenarios to disk
resCode = writeToDisk(out, err, in);
pMon.worked(1);
// and let our generator ditch some gash
// _myGenny = null;
// there was lots of stuff read in by the scenario generator. Whilst
// we've removed our only reference to
// it on the previous line, the system won't necessarily do a GC just
// yet - so we'll trigger an artificial one.
System.gc();
if (resCode != SUCCESS)
{
if (resCode == TROUBLE_MAKING_FILES)
{
err.println("Failed to write new scenarios to disk. Is an old copy of an output file currently open?");
err.println(" Alternately, is a file-browser currently looking at the output directory?");
}
}
}
return resCode;
}
public int prepareControllers(ResultsContainer multiRunResultsStore,
ASSETProgressMonitor pMon, NewScenarioListener newScenarioListener)
{
int resCode = 0;
_resultsStore = multiRunResultsStore;
// sort out observers (inter & intra)
_theInterObservers = new Vector<InterScenarioObserverType>(0, 1);
_thePlainObservers = new Vector<ScenarioObserver>();
// start off by generating the time/state observers that we create for
// everybody
_stateObserver = new ScenarioStatusObserver();
_thePlainObservers.add(_stateObserver);
_thePlainObservers.add(new TimeObserver());
// also add those from the file
Vector<ScenarioObserver> theObservers = _resultsStore.observerList;
for (int i = 0; i < theObservers.size(); i++)
{
ScenarioObserver observer = theObservers.elementAt(i);
if (observer instanceof InterScenarioObserverType)
{
_theInterObservers.add((InterScenarioObserverType) observer);
}
else
_thePlainObservers.add(observer);
}
// also collate the collected set of observers
// combine the two sets of observers
_allObservers = new Vector<ScenarioObserver>();
_allObservers.addAll(_theInterObservers);
_allObservers.addAll(_thePlainObservers);
// also read in the collection of scenarios
_theScenarios = new Vector<InstanceWrapper>(0, 1);
pMon.beginTask("Reading in block of scenarios", _myScenarioDocuments.size());
for (Iterator<Document> iterator = _myScenarioDocuments.iterator(); iterator
.hasNext();)
{
Document thisD = iterator.next();
String scenarioStr = ScenarioGenerator.writeToString(thisD);
InputStream scenarioStream = new ByteArrayInputStream(
scenarioStr.getBytes());
CoreScenario newS = new CoreScenario();
ASSETReaderWriter.importThis(newS, null, scenarioStream);
// wrap the scenario
CommandLine runner = new CommandLine(newS);
InstanceWrapper wrapper = new InstanceWrapper(newS, runner);
_theScenarios.add(wrapper);
pMon.worked(1);
}
// ok, everything's loaded. Just have a pass through to
// initialise any intra-scenario observers
for (int thisObs = 0; thisObs < _theInterObservers.size(); thisObs++)
{
ScenarioObserver scen = _theInterObservers.elementAt(thisObs);
if (scen.isActive())
{
InterScenarioObserverType obs = (InterScenarioObserverType) scen;
// is it active?
obs.initialise(_resultsStore.outputDirectory);
}
}
// right, just setup the listeners for the first scenario, so it can be
// controlled form
// the time controller
if (!_theScenarios.isEmpty())
{
InstanceWrapper firstS = _theScenarios.firstElement();
firstS.initialise(_allObservers);
}
// tell the parent we've got a new scenario
if (newScenarioListener != null)
newScenarioListener.newScenario(null,
_theScenarios.firstElement().scenario);
return resCode;
}
public int nowRun(PrintStream out, PrintStream err, InputStream in,
NewScenarioListener scenarioListener)
{
return runAll(out, err, in, _myGenny.getControlFile(), scenarioListener);
}
// testing stuff
public static class MultiServerTest extends SupportTesting
{
public MultiServerTest(final String val)
{
super(val);
}
public void testValidStartup()
{
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ByteArrayOutputStream bes = new ByteArrayOutputStream();
PrintStream out = new PrintStream(bos);
PrintStream err = new PrintStream(bes);
InputStream in = new ByteArrayInputStream(new byte[]
{});
bos.reset();
bes.reset();
String[] args = new String[2];
args[1] = "src/ASSET/Util/MonteCarlo/test_variance_scenario.xml";
args[0] = "src/ASSET/Util/MonteCarlo/test_variance1.xml";
// args[1] =
// "..\\src\\java\\ASSET_SRC\\ASSET\\Util\\MonteCarlo\\test_variance1.xml";
MultiScenarioCore scen = new MultiScenarioCore();
ASSETProgressMonitor pMon = new ASSETProgressMonitor()
{
public void beginTask(String name, int totalWork)
{
}
public void worked(int work)
{
}
};
int res = scen.prepareFiles(args[0], args[1], out, err, in, pMon, null);
assertEquals("ran ok", SUCCESS, res);
// check the contents of the error message
assertEquals("no error reported", 0, bes.size());
// check the scenarios got created
Vector<Document> scenarios = scen._myScenarioDocuments;
assertEquals("scenarios got created", 3, scenarios.size());
}
public void testCommandLineMainProcessing()
{
String[] args = new String[2];
args[0] = "src/ASSET/Util/MonteCarlo/test_variance_scenario.xml";
args[1] = "src/ASSET/Util/MonteCarlo/test_variance_realistic.xml";
CommandLine.main(args);
}
}
// and now the main method
/**
* main method, of course - decides whether to handle this ourselves, or to
* pass it on to the command line
*
* @param args
*/
public static void main(String[] args)
{
MultiServerTest tm = new MultiServerTest("me");
SupportTesting.callTestMethods(tm);
}
public Vector<IAttribute> getAttributes()
{
if (_myAttributes == null)
{
// look at our observers, find any attributes
_myAttributes = new Vector<IAttribute>();
// start off with the single-scenario observers
for (Iterator<ScenarioObserver> iterator = _thePlainObservers.iterator(); iterator
.hasNext();)
{
ScenarioObserver thisS = iterator.next();
if (thisS instanceof IAttribute)
_myAttributes.add((IAttribute) thisS);
}
// now the multi-scenario observers
for (Iterator<InterScenarioObserverType> iterator = _theInterObservers
.iterator(); iterator.hasNext();)
{
InterScenarioObserverType thisS = iterator.next();
if (thisS instanceof IAttribute)
_myAttributes.add((IAttribute) thisS);
}
}
// done.
return _myAttributes;
}
public Vector<ISimulation> getSimulations()
{
Vector<ISimulation> res = new Vector<ISimulation>();
for (Iterator<InstanceWrapper> iter = _theScenarios.iterator(); iter
.hasNext();)
res.add((ISimulation) iter.next().scenario);
// return my list of simulations
return res;
}
public boolean isRunning()
{
return false;
}
public void startQue(NewScenarioListener listener)
{
// ok, go for it
nowRun(System.out, System.err, System.in, listener);
}
public void stopQue()
{
}
public IAttribute getState()
{
return _stateObserver;
}
public static class InstanceWrapper
{
final ScenarioType scenario;
final CommandLine commandLine;
public InstanceWrapper(ScenarioType theScenario, CommandLine theCommandLine)
{
scenario = theScenario;
commandLine = theCommandLine;
}
public void initialise(Vector<ScenarioObserver> allObservers)
{
// ok, get the scenario, so we can set up our observers
for (int i = 0; i < allObservers.size(); i++)
{
CoreObserver thisObs = (CoreObserver) allObservers.elementAt(i);
// and set it up
thisObs.setup(scenario);
// and add to the runner
commandLine.addObserver(thisObs);
}
}
public void terminate(Vector<ScenarioObserver> allObservers)
{
// ok, get the scenario, so we can set up our observers
for (int i = 0; i < allObservers.size(); i++)
{
CoreObserver thisObs = (CoreObserver) allObservers.elementAt(i);
// and tear it down
thisObs.tearDown(scenario);
}
// and remove all the observers
commandLine.clearObservers();
}
}
public Vector<ScenarioObserver> getObservers()
{
return _allObservers;
}
}
|
package udo.testdriver;
import static org.junit.Assert.*;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.GregorianCalendar;
import org.junit.Test;
import udo.storage.Storage;
import udo.storage.Task;
import udo.storage.Task.TaskType;
public class StorageTest {
private static ArrayList<Task> taskList = new ArrayList<Task>();
private static ArrayList<Task> doneTasks = new ArrayList<Task>();
private static String storageFile = "task.json";
private static Storage st;
public void initialize(){
clearFile(storageFile);
st = new Storage();
taskList.clear();
doneTasks.clear();
}
public void clearFile(String fileName) {
try {
FileWriter fw = new FileWriter(fileName);
BufferedWriter bw = new BufferedWriter(fw);
bw.write("");
fw.close();
bw.close();
} catch (IOException ex){
ex.printStackTrace();
}
}
private void addDummyTasks() {
Task task1 = new Task(TaskType.TODO, "fighting", null,null, null,
120, new GregorianCalendar(2011,01,02), "personal", false, false);
Task task2 = new Task(TaskType.EVENT, "reading books", null, new GregorianCalendar(2006,03,01), new GregorianCalendar(2005,04,01),
0, null, "leisure", false, false);
ArrayList<Task> temp = new ArrayList<Task>();
temp.add(task1);
temp.add(task2);
st.add(temp);
}
@Test
public void testAdd() {
initialize();
Task task0 = new Task(TaskType.DEADLINE, "meeting", new GregorianCalendar(2005,01,01), null, null,
0, new GregorianCalendar(2005,01,02), "work",true, false);
Task task1 = new Task(TaskType.TODO, "fighting", null,null, null,
120, new GregorianCalendar(2011,01,02), "personal", false, false);
Task task2 = new Task(TaskType.EVENT, "reading books", null, new GregorianCalendar(2006,03,01), new GregorianCalendar(2005,04,01),
0, null, "leisure", false, false);
st.add(task0);
st.add(task1);
st.add(task2);
taskList.add(task0);
taskList.add(task1);
taskList.add(task2);
assertEquals(taskList, st.query());
}
@Test
public void testDelete() {
initialize();
assertEquals(false, st.delete(0));
st.add(new Task(TaskType.DEADLINE, "meeting", new GregorianCalendar(2005,01,01), null, null,
0, new GregorianCalendar(2005,01,02), "work",true, false));
assertEquals(true, st.delete(0));
assertEquals(0, st.query().size());
Task task0 = new Task(TaskType.DEADLINE, "meeting", new GregorianCalendar(2005,01,01), null, null,
0, new GregorianCalendar(2005,01,02), "work",true, false);
Task task1 = new Task(TaskType.TODO, "fighting", null,null, null,
120, new GregorianCalendar(2011,01,02), "personal", false, false);
Task task2 = new Task(TaskType.EVENT, "reading books", null, new GregorianCalendar(2006,03,01), new GregorianCalendar(2005,04,01),
0, null, "leisure", false, false);
st.add(task0);
st.add(task1);
st.add(task2);
assertEquals(false, st.delete(3));
assertEquals(true, st.delete(0));
taskList.add(task2);
taskList.add(task1);
assertEquals(taskList, st.query());
}
@Test
public void TestModify(){
initialize();
assertEquals(false, st.modify(0, new Task()));
Task task0 = new Task(TaskType.DEADLINE, "meeting", new GregorianCalendar(2005,01,01), null, null,
0, new GregorianCalendar(2005,01,02), "work",true, false);
Task task1 = new Task(TaskType.TODO, "fighting", null,null, null,
120, new GregorianCalendar(2011,01,02), "personal", false, false);
st.add(task0);
assertEquals(true, st.modify(0, task1));
taskList.add(task1);
assertEquals(taskList, st.query());
}
@Test
public void TestQuery(){
initialize();
assertEquals(new ArrayList<Task>(), st.query());
Task task0 = new Task(TaskType.DEADLINE, "meeting", new GregorianCalendar(2005,01,01), null, null,
0, new GregorianCalendar(2005,01,02), "work",true, false);
st.add(task0);
taskList.add(task0);
assertEquals(taskList, st.query());
}
@Test
public void TestUndo(){
initialize();
Task task0 = new Task(TaskType.DEADLINE, "meeting", new GregorianCalendar(2005,01,01), null, null,
0, new GregorianCalendar(2005,01,02), "work",true, false);
Task task1 = new Task(TaskType.TODO, "fighting", null,null, null,
120, new GregorianCalendar(2011,01,02), "personal", false, false);
assertEquals(false,st.undo());
st.add(task0);
assertEquals(true, st.undo());
assertEquals(new ArrayList<Task>(), st.query());
assertEquals(false, st.undo()); //cannot undo multiple times
st.add(task0);
st.modify(0, task1);
assertEquals(true, st.undo());
taskList.add(task1);
assertEquals(taskList, st.query());
st.delete(0);
assertEquals(true, st.undo());
assertEquals(taskList, st.query());
}
@Test
public void TestAddDummy(){
initialize();
assertEquals(false, st.add(new ArrayList<Task>()));
addDummyTasks();
for (int i = 0; i < st.query().size(); i++){
assertEquals(new Integer(1), st.query().get(i).getGroupId());
assertEquals(new Integer(1), st.query().get(i).getGroupId());
}
addDummyTasks();
for (int i = 2; i < st.query().size(); i++){
assertEquals(new Integer(2), st.query().get(i).getGroupId());
assertEquals(new Integer(2), st.query().get(i).getGroupId());
}
}
@Test
public void TestConfirm(){
initialize();
addDummyTasks();
assertEquals(false, st.confirm(-1));
assertEquals(false, st.confirm(3));
assertEquals(true, st.confirm(0));
Task task1 = new Task(TaskType.TODO, "fighting", null,null, null,
120, new GregorianCalendar(2011,01,02), "personal", false, false);
Task task2 = new Task(TaskType.EVENT, "reading books", null, new GregorianCalendar(2006,03,01), new GregorianCalendar(2005,04,01),
0, null, "leisure", false, false);
task1.setIndex(0);
task1.setGroupId(0);
assertEquals(task1, st.query().get(0));
addDummyTasks();
task2.setIndex(2);
task2.setGroupId(1);
assertEquals(task2, st.query().get(2));
assertEquals(true, st.confirm(2));
task2.setIndex(1);
task2.setGroupId(0);
assertEquals(task2, st.query().get(1));
}
}
|
package com.intellij.uiDesigner.designSurface;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.uiDesigner.CutCopyPasteSupport;
import com.intellij.uiDesigner.FormEditingUtil;
import com.intellij.uiDesigner.core.GridConstraints;
import com.intellij.uiDesigner.core.Util;
import com.intellij.uiDesigner.radComponents.RadComponent;
import com.intellij.uiDesigner.radComponents.RadContainer;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.util.*;
import java.util.List;
/**
* @author Anton Katilin
* @author Vladimir Kondratyev
*/
public final class ResizeProcessor extends EventProcessor {
private static final Logger LOG = Logger.getInstance("#com.intellij.uiDesigner.designSurface.ResizeProcessor");
private RadComponent myComponent;
private int myResizeMask;
private Point myLastPoint;
private Point myPressPoint;
private Rectangle myBounds;
private Rectangle myOriginalBounds;
private RadContainer myOriginalParent;
private final GuiEditor myEditor;
private GridConstraints myOriginalConstraints;
private RadComponent myResizedCopy;
public ResizeProcessor(final GuiEditor editor, final RadComponent component, final int resizeMask){
myEditor = editor;
if (component.getParent() == null) {
throw new IllegalArgumentException("parent is null for " + component);
}
myComponent = component;
myOriginalParent = component.getParent();
myOriginalConstraints = component.getConstraints();
final List<RadComponent> copyList = CutCopyPasteSupport.copyComponents(editor, Collections.singletonList(component));
if (component.getParent().getLayoutManager().isGrid() && copyList != null) {
myComponent.setResizing(true);
Rectangle rc = SwingUtilities.convertRectangle(component.getParent().getDelegee(),
component.getBounds(),
myEditor.getDragLayer());
component.setDragging(true);
component.setSelected(false);
myResizedCopy = copyList.get(0);
myResizedCopy.setBounds(rc);
myResizedCopy.setSelected(true);
editor.getDragLayer().add(myResizedCopy.getDelegee());
}
myResizeMask = resizeMask;
setCursor(getResizeCursor());
}
protected void processKeyEvent(final KeyEvent e){}
protected void processMouseEvent(final MouseEvent e){
if (e.getID() == MouseEvent.MOUSE_PRESSED) {
myLastPoint = e.getPoint();
myPressPoint = myLastPoint;
myBounds = myOriginalParent.getLayoutManager().isGrid() ? myResizedCopy.getBounds() : myComponent.getBounds();
myOriginalBounds = new Rectangle(myBounds);
}
else if(e.getID()==MouseEvent.MOUSE_DRAGGED){
final int dx = e.getX() - myLastPoint.x;
final int dy = e.getY() - myLastPoint.y;
if (myOriginalParent.getLayoutManager().isGrid()) {
final Point point = SwingUtilities.convertPoint(myEditor.getDragLayer(), e.getX(), e.getY(), myOriginalParent.getDelegee());
putGridSpanFeedback(point);
}
else if (myOriginalParent.isXY()) {
myEditor.getActiveDecorationLayer().removeFeedback();
setCursor(getResizeCursor());
}
else {
return;
}
if ((Math.abs(e.getX() - myPressPoint.getX()) > DragSelectionProcessor.TREMOR ||
Math.abs(e.getY() - myPressPoint.getY()) > DragSelectionProcessor.TREMOR)) {
}
final GridConstraints constraints = myComponent.getConstraints();
if ((myResizeMask & Painter.WEST_MASK) != 0) {
myBounds.x += dx;
myBounds.width -= dx;
}
if ((myResizeMask & Painter.EAST_MASK) != 0) {
myBounds.width += dx;
}
if ((myResizeMask & Painter.NORTH_MASK) != 0) {
myBounds.y += dy;
myBounds.height -= dy;
}
if ((myResizeMask & Painter.SOUTH_MASK) != 0) {
myBounds.height += dy;
}
final Dimension minSize = myComponent.getMinimumSize();
final Rectangle newBounds = myOriginalParent.getLayoutManager().isGrid() ? myResizedCopy.getBounds() : myComponent.getBounds();
// Component's bounds cannot be less the some minimum size
if (myBounds.width >= minSize.width) {
newBounds.x = myBounds.x;
newBounds.width = myBounds.width;
}
else {
if((myResizeMask & Painter.WEST_MASK) != 0){
newBounds.x = newBounds.x+newBounds.width-minSize.width;
newBounds.width = minSize.width;
}
else if ((myResizeMask & Painter.EAST_MASK) != 0) {
newBounds.width = minSize.width;
}
}
if (myBounds.height >= minSize.height) {
newBounds.y = myBounds.y;
newBounds.height = myBounds.height;
}
else {
if ((myResizeMask & Painter.NORTH_MASK) != 0) {
newBounds.y = newBounds.y + newBounds.height - minSize.height;
newBounds.height = minSize.height;
}
else if ((myResizeMask & Painter.SOUTH_MASK) != 0) {
newBounds.height = minSize.height;
}
}
final Dimension size = newBounds.getSize();
Util.adjustSize(myComponent.getDelegee(), constraints, size);
newBounds.width = size.width;
newBounds.height = size.height;
if (myOriginalParent.getLayoutManager().isGrid()) {
myResizedCopy.setBounds(newBounds);
}
else {
if (myEditor.ensureEditable()) {
myComponent.setBounds(newBounds);
}
}
myEditor.refresh();
myLastPoint=e.getPoint();
}
else if (e.getID() == MouseEvent.MOUSE_RELEASED) {
boolean modified = false;
myComponent.getDelegee().setVisible(true);
myComponent.setResizing(false);
myComponent.setSelected(true);
if (myResizedCopy != null) {
myEditor.getDragLayer().remove(myResizedCopy.getDelegee());
}
if (myOriginalParent.getLayoutManager().isGrid() && myEditor.ensureEditable()) {
final Point point = SwingUtilities.convertPoint(myEditor.getDragLayer(), e.getX(), e.getY(), myOriginalParent.getDelegee());
Rectangle rcGrid = getGridSpanGridRect(myOriginalParent, myOriginalConstraints, point, myResizeMask);
if (rcGrid != null && isGridSpanDropAllowed(rcGrid)) {
GridConstraints oldConstraints = (GridConstraints) myOriginalConstraints.clone();
myOriginalConstraints.setColumn(rcGrid.x);
myOriginalConstraints.setRow(rcGrid.y);
myOriginalConstraints.setColSpan(rcGrid.width);
myOriginalConstraints.setRowSpan(rcGrid.height);
myComponent.fireConstraintsChanged(oldConstraints);
modified = true;
}
}
myEditor.getActiveDecorationLayer().removeFeedback();
myComponent.setDragging(false);
if (modified) {
myEditor.refreshAndSave(true);
}
}
}
private Cursor getResizeCursor() {
return Cursor.getPredefinedCursor(Painter.getResizeCursor(myResizeMask));
}
private void putGridSpanFeedback(final Point point) {
Rectangle rcGrid = getGridSpanGridRect(myOriginalParent, myOriginalConstraints, point, myResizeMask);
if (rcGrid != null) {
Rectangle rc = myOriginalParent.getGridLayoutManager().getGridCellRangeRect(myOriginalParent, rcGrid.y, rcGrid.x,
rcGrid.y+rcGrid.height-1, rcGrid.x+rcGrid.width-1);
myEditor.getActiveDecorationLayer().putFeedback(myOriginalParent.getDelegee(), rc);
setCursor(isGridSpanDropAllowed(rcGrid) ? getResizeCursor() : FormEditingUtil.getMoveNoDropCursor());
}
else {
setCursor(getResizeCursor());
myEditor.getActiveDecorationLayer().removeFeedback();
}
}
@Nullable
static Rectangle getGridSpanGridRect(final RadContainer grid,
final GridConstraints originalConstraints,
final Point point,
final int resizeMask) {
int rowAtMouse = (resizeMask & (Painter.NORTH_MASK | Painter.SOUTH_MASK)) != 0
? grid.getGridRowAt(point.y)
: -1;
int colAtMouse = (resizeMask & (Painter.WEST_MASK | Painter.EAST_MASK)) != 0
? grid.getGridColumnAt(point.x)
: -1;
if (rowAtMouse != -1 || colAtMouse != -1) {
final int origStartCol = originalConstraints.getColumn();
final int origEndCol = originalConstraints.getColumn() + originalConstraints.getColSpan() - 1;
int startCol = origStartCol;
int endCol = origEndCol;
if (colAtMouse >= 0) {
if ((resizeMask & Painter.WEST_MASK) != 0 && colAtMouse <= endCol) {
// resize to left
startCol = colAtMouse;
}
else if ((resizeMask & Painter.EAST_MASK) != 0 && colAtMouse >= startCol) {
endCol = colAtMouse;
}
}
final int origStartRow = originalConstraints.getRow();
final int origEndRow = originalConstraints.getRow() + originalConstraints.getRowSpan() - 1;
int startRow = origStartRow;
int endRow = origEndRow;
if (rowAtMouse >= 0) {
if ((resizeMask & Painter.NORTH_MASK) != 0 && rowAtMouse <= endRow) {
startRow = rowAtMouse;
}
else if ((resizeMask & Painter.SOUTH_MASK) != 0 && rowAtMouse >= startRow) {
endRow = rowAtMouse;
}
}
return new Rectangle(startCol, startRow, endCol-startCol+1, endRow-startRow+1);
}
return null;
}
protected boolean cancelOperation(){
myComponent.setBounds(myOriginalBounds);
myComponent.setResizing(false);
myComponent.setDragging(false);
if (myResizedCopy != null) {
myEditor.getDragLayer().remove(myResizedCopy.getDelegee());
myResizedCopy = null;
}
myEditor.refresh();
return true;
}
private boolean isGridSpanDropAllowed(final Rectangle rcGrid) {
return myOriginalParent.findComponentInRect(rcGrid.y, rcGrid.x, rcGrid.height, rcGrid.width) == null;
}
}
|
package com.intellij.uiDesigner.designSurface;
import com.intellij.uiDesigner.CutCopyPasteSupport;
import com.intellij.uiDesigner.FormEditingUtil;
import com.intellij.uiDesigner.UIDesignerBundle;
import com.intellij.uiDesigner.core.GridConstraints;
import com.intellij.uiDesigner.core.Util;
import com.intellij.uiDesigner.radComponents.RadComponent;
import com.intellij.uiDesigner.radComponents.RadContainer;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.Cursor;
import java.awt.Dimension;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.util.Collections;
import java.util.List;
/**
* @author Anton Katilin
* @author Vladimir Kondratyev
*/
public final class ResizeProcessor extends EventProcessor {
private RadComponent myComponent;
private int myResizeMask;
private Point myLastPoint;
private Point myPressPoint;
private Rectangle myBounds;
private Rectangle myOriginalBounds;
private RadContainer myOriginalParent;
private final GuiEditor myEditor;
private GridConstraints myOriginalConstraints;
private RadComponent myResizedCopy;
public ResizeProcessor(final GuiEditor editor, final RadComponent component, final int resizeMask){
myEditor = editor;
if (component.getParent() == null) {
throw new IllegalArgumentException("parent is null for " + component);
}
myComponent = component;
myOriginalParent = component.getParent();
myOriginalConstraints = component.getConstraints();
final List<RadComponent> copyList = CutCopyPasteSupport.copyComponents(editor, Collections.singletonList(component));
if (component.getParent().getLayoutManager().isGrid() && copyList != null) {
myComponent.setResizing(true);
Rectangle rc = SwingUtilities.convertRectangle(component.getParent().getDelegee(),
component.getBounds(),
myEditor.getDragLayer());
component.setDragging(true);
component.setSelected(false);
myResizedCopy = copyList.get(0);
myResizedCopy.setBounds(rc);
myResizedCopy.setSelected(true);
editor.getDragLayer().add(myResizedCopy.getDelegee());
}
myResizeMask = resizeMask;
setCursor(getResizeCursor());
}
protected void processKeyEvent(final KeyEvent e){}
protected void processMouseEvent(final MouseEvent e){
if (e.getID() == MouseEvent.MOUSE_PRESSED) {
myLastPoint = e.getPoint();
myPressPoint = myLastPoint;
myBounds = myOriginalParent.getLayoutManager().isGrid() ? myResizedCopy.getBounds() : myComponent.getBounds();
myOriginalBounds = new Rectangle(myBounds);
}
else if(e.getID()==MouseEvent.MOUSE_DRAGGED){
final int dx = e.getX() - myLastPoint.x;
final int dy = e.getY() - myLastPoint.y;
if (myOriginalParent.getLayoutManager().isGrid()) {
final Point point = SwingUtilities.convertPoint(myEditor.getDragLayer(), e.getX(), e.getY(), myOriginalParent.getDelegee());
putGridSpanFeedback(point);
}
else if (myOriginalParent.isXY()) {
myEditor.getActiveDecorationLayer().removeFeedback();
setCursor(getResizeCursor());
}
else {
return;
}
final GridConstraints constraints = myComponent.getConstraints();
if ((myResizeMask & Painter.WEST_MASK) != 0) {
myBounds.x += dx;
myBounds.width -= dx;
}
if ((myResizeMask & Painter.EAST_MASK) != 0) {
myBounds.width += dx;
}
if ((myResizeMask & Painter.NORTH_MASK) != 0) {
myBounds.y += dy;
myBounds.height -= dy;
}
if ((myResizeMask & Painter.SOUTH_MASK) != 0) {
myBounds.height += dy;
}
final Dimension minSize = myComponent.getMinimumSize();
final Rectangle newBounds = myOriginalParent.getLayoutManager().isGrid() ? myResizedCopy.getBounds() : myComponent.getBounds();
// Component's bounds cannot be less the some minimum size
if (myBounds.width >= minSize.width) {
newBounds.x = myBounds.x;
newBounds.width = myBounds.width;
}
else {
if((myResizeMask & Painter.WEST_MASK) != 0){
newBounds.x = newBounds.x+newBounds.width-minSize.width;
newBounds.width = minSize.width;
}
else if ((myResizeMask & Painter.EAST_MASK) != 0) {
newBounds.width = minSize.width;
}
}
if (myBounds.height >= minSize.height) {
newBounds.y = myBounds.y;
newBounds.height = myBounds.height;
}
else {
if ((myResizeMask & Painter.NORTH_MASK) != 0) {
newBounds.y = newBounds.y + newBounds.height - minSize.height;
newBounds.height = minSize.height;
}
else if ((myResizeMask & Painter.SOUTH_MASK) != 0) {
newBounds.height = minSize.height;
}
}
final Dimension size = newBounds.getSize();
Util.adjustSize(myComponent.getDelegee(), constraints, size);
newBounds.width = size.width;
newBounds.height = size.height;
if (myOriginalParent.getLayoutManager().isGrid()) {
myResizedCopy.setBounds(newBounds);
}
else {
if (myEditor.ensureEditable()) {
myComponent.setBounds(newBounds);
}
}
myEditor.refresh();
myLastPoint=e.getPoint();
}
else if (e.getID() == MouseEvent.MOUSE_RELEASED) {
boolean modified = false;
myComponent.getDelegee().setVisible(true);
myComponent.setResizing(false);
myComponent.setSelected(true);
if (myResizedCopy != null) {
myEditor.getDragLayer().remove(myResizedCopy.getDelegee());
}
if (myOriginalParent.getLayoutManager().isGrid() && myEditor.ensureEditable()) {
final Point point = SwingUtilities.convertPoint(myEditor.getDragLayer(), e.getX(), e.getY(), myOriginalParent.getDelegee());
Rectangle rcGrid = getGridSpanGridRect(myOriginalParent, myOriginalConstraints, point, myResizeMask);
if (rcGrid != null && isGridSpanDropAllowed(rcGrid)) {
GridConstraints oldConstraints = (GridConstraints) myOriginalConstraints.clone();
myOriginalConstraints.setColumn(rcGrid.x);
myOriginalConstraints.setRow(rcGrid.y);
myOriginalConstraints.setColSpan(rcGrid.width);
myOriginalConstraints.setRowSpan(rcGrid.height);
myComponent.fireConstraintsChanged(oldConstraints);
modified = true;
}
}
else {
modified = true;
}
myEditor.getActiveDecorationLayer().removeFeedback();
myComponent.setDragging(false);
if (modified) {
myEditor.refreshAndSave(true);
}
}
}
private Cursor getResizeCursor() {
return Cursor.getPredefinedCursor(Painter.getResizeCursor(myResizeMask));
}
private void putGridSpanFeedback(final Point point) {
Rectangle rcGrid = getGridSpanGridRect(myOriginalParent, myOriginalConstraints, point, myResizeMask);
if (rcGrid != null) {
Rectangle rc = myOriginalParent.getGridLayoutManager().getGridCellRangeRect(myOriginalParent, rcGrid.y, rcGrid.x,
rcGrid.y+rcGrid.height-1, rcGrid.x+rcGrid.width-1);
String tooltip = UIDesignerBundle.message("resize.feedback", myComponent.getDisplayName(), rcGrid.height, rcGrid.width);
myEditor.getActiveDecorationLayer().putFeedback(myOriginalParent.getDelegee(), rc, tooltip);
setCursor(isGridSpanDropAllowed(rcGrid) ? getResizeCursor() : FormEditingUtil.getMoveNoDropCursor());
}
else {
setCursor(getResizeCursor());
myEditor.getActiveDecorationLayer().removeFeedback();
}
}
@Nullable
static Rectangle getGridSpanGridRect(final RadContainer grid,
final GridConstraints originalConstraints,
final Point point,
final int resizeMask) {
int rowAtMouse = (resizeMask & (Painter.NORTH_MASK | Painter.SOUTH_MASK)) != 0
? grid.getGridRowAt(point.y)
: -1;
int colAtMouse = (resizeMask & (Painter.WEST_MASK | Painter.EAST_MASK)) != 0
? grid.getGridColumnAt(point.x)
: -1;
if (rowAtMouse != -1 || colAtMouse != -1) {
final int origStartCol = originalConstraints.getColumn();
final int origEndCol = originalConstraints.getColumn() + originalConstraints.getColSpan() - 1;
int startCol = origStartCol;
int endCol = origEndCol;
if (colAtMouse >= 0) {
if ((resizeMask & Painter.WEST_MASK) != 0 && colAtMouse <= endCol) {
// resize to left
startCol = colAtMouse;
}
else if ((resizeMask & Painter.EAST_MASK) != 0 && colAtMouse >= startCol) {
endCol = colAtMouse;
}
}
final int origStartRow = originalConstraints.getRow();
final int origEndRow = originalConstraints.getRow() + originalConstraints.getRowSpan() - 1;
int startRow = origStartRow;
int endRow = origEndRow;
if (rowAtMouse >= 0) {
if ((resizeMask & Painter.NORTH_MASK) != 0 && rowAtMouse <= endRow) {
startRow = rowAtMouse;
}
else if ((resizeMask & Painter.SOUTH_MASK) != 0 && rowAtMouse >= startRow) {
endRow = rowAtMouse;
}
}
return new Rectangle(startCol, startRow, endCol-startCol+1, endRow-startRow+1);
}
return null;
}
protected boolean cancelOperation(){
myComponent.setBounds(myOriginalBounds);
myComponent.setResizing(false);
myComponent.setDragging(false);
if (myResizedCopy != null) {
myEditor.getDragLayer().remove(myResizedCopy.getDelegee());
myResizedCopy = null;
}
myEditor.refresh();
return true;
}
private boolean isGridSpanDropAllowed(final Rectangle rcGrid) {
return myOriginalParent.findComponentInRect(rcGrid.y, rcGrid.x, rcGrid.height, rcGrid.width) == null;
}
}
|
package urllistcompare;
import java.io.File;
import java.util.Scanner;
/**
* @author Rocco Barbini (roccobarbi@gmail.com)
*
* This class is tasked with managing the whole reading process:
* - it prompts the user with the information needed to set up the whole process;
* - it creates the URLList;
* - it creates the readers;
* - it actually runs the readers.
*
*/
public class ReadManager {
private final int QTY_READERS = 2;
private final String PROMPT = ">: ";
private CSVReader[] reader;
private URLList dest;
private URLFormat[] format;
/**
* Default constructor: it does almost nothing.
*/
public ReadManager() {
reader = null;
dest = null;
format = null;
}
public boolean userInput(){
boolean done = false, keepAsking = true;
Scanner keyboard = new Scanner(System.in);
// temp variables to store the values needed to create each reader
URLFormat tempFormat = null;
boolean tempHeader = false, tempIsTSep = false;
char tempTSep = 0, tempDSep = 0, tempVSep = 0;
int tempUrlI = 0, tempImpI = 0;
String fileName = "", input = "";
File file = null;
// Loop the formats and create the URLList
for(int i = 0; i < QTY_READERS; i++){
format[i] = URLFormat.inputFormat(PROMPT);
}
dest = new URLList(format[0], format[1]);
// Loop to create each reader
for(int i = 0; i < QTY_READERS; i++){
// fileName
keepAsking = true;
while(keepAsking){
// Prompt the user
System.out.println("Enter the name of the file number " + i + ",\n" +
"with format " + format[i].getFormatSample() + "\n" +
"then press ENTER!");
System.out.print(PROMPT);
input = keyboard.nextLine();
if(input.length() > 0){ // Otherwise keep looping
file = new File(input);
if(file.exists() && file.canRead()){
keepAsking = false;
} else {
System.out.println("The file does not exist or it can't be read!");
}
}
}
// headers
keepAsking = true;
while(keepAsking){
// Prompt the user
System.out.println("Does the file have a header line? [y/n]");
System.out.print(PROMPT);
input = keyboard.nextLine();
if(input.length() > 0){ // Otherwise keep looping
char selector = input.toLowerCase().charAt(0);
switch(selector){
case 'y':
tempHeader = true;
keepAsking = false;
break;
case 'n':
tempHeader = false;
keepAsking = false;
break;
default:
System.out.println("Invalid selection, please try again!");
}
}
}
// thousand separator
keepAsking = true;
while(keepAsking){
// Prompt the user
System.out.println("Does the file have a thousand separator? [y/n]");
System.out.print(PROMPT);
input = keyboard.nextLine();
if(input.length() > 0){ // Otherwise keep looping
char selector = input.toLowerCase().charAt(0);
switch(selector){
case 'y':
tempIsTSep = true;
keepAsking = false;
break;
case 'n':
tempIsTSep = false;
keepAsking = false;
break;
default:
System.out.println("Invalid selection, please try again!");
}
}
}
// thousand separator
keepAsking = true;
while(keepAsking && tempIsTSep){
// Prompt the user
System.out.println("Choose a thousand separator:");
System.out.println("1 : .");
System.out.println("2 : ,");
System.out.print(PROMPT);
input = keyboard.nextLine();
if(input.length() > 0){ // Otherwise keep looping
char selector = input.toLowerCase().charAt(0);
switch(selector){
case '1':
tempTSep = '.';
keepAsking = false;
break;
case '2':
tempTSep = '.';
keepAsking = false;
break;
default:
System.out.println("Invalid selection, please try again!");
}
}
}
// decimal separator
keepAsking = true;
while(keepAsking){
// Prompt the user
System.out.println("Choose a decimal separator:");
System.out.println("1 : .");
System.out.println("2 : ,");
System.out.print(PROMPT);
input = keyboard.nextLine();
if(input.length() > 0){ // Otherwise keep looping
char selector = input.toLowerCase().charAt(0);
switch(selector){ //TODO: check that it's not the same as the thousand separator
case '1':
tempDSep = '.';
keepAsking = false;
break;
case '2':
tempDSep = '.';
keepAsking = false;
break;
default:
System.out.println("Invalid selection, please try again!");
}
}
}
// value separator
keepAsking = true;
while(keepAsking){
// Prompt the user
System.out.println("Choose a value separator:");
System.out.println("1 : ;");
System.out.println("2 : ,");
System.out.println("3 : \t");
System.out.print(PROMPT);
input = keyboard.nextLine();
if(input.length() > 0){ // Otherwise keep looping
char selector = input.toLowerCase().charAt(0);
switch(selector){ //TODO: check that it's not the same as the thousand separator AND use an enumeration
case '1':
tempVSep = ';';
keepAsking = false;
break;
case '2':
tempVSep = '.';
keepAsking = false;
break;
case '3':
tempVSep = '\t';
keepAsking = false;
break;
default:
System.out.println("Invalid selection, please try again!");
}
}
}
// Choose the columns that will be parsed
System.out.println("Which column is used for the page name/url?");
System.out.print(PROMPT);
tempUrlI = keyboard.nextInt();
keyboard.nextLine();
System.out.println("Which column is used for the page impressions?");
System.out.print(PROMPT);
tempImpI = keyboard.nextInt();
keyboard.nextLine();
// Create the CSVReader
if(tempIsTSep){
reader[i] = new CSVReader(tempHeader, tempUrlI, tempImpI, tempVSep, tempDSep, tempIsTSep, tempTSep, file, format[i]);
} else {
reader[i] = new CSVReader(tempHeader, tempUrlI, tempImpI, tempVSep, tempDSep, tempIsTSep, file, format[i]);
}
}
done = true;
return done;
}
}
|
package imagej.script.editor;
import imagej.script.ScriptLanguage;
import java.util.ArrayList;
import java.util.List;
import javax.swing.JTextArea;
import javax.swing.text.BadLocationException;
import javax.swing.text.DefaultHighlighter;
import javax.swing.text.Document;
import javax.swing.text.Position;
/**
* TODO
*
* @author Johannes Schindelin
*/
public class ErrorHandler {
protected List<Error> list = new ArrayList<Error>();
protected int current = -1;
protected JTextArea textArea;
protected int currentOffset;
protected Parser parser;
public ErrorHandler(JTextArea textArea) {
this.textArea = textArea;
}
public ErrorHandler(ScriptLanguage language, JTextArea textArea,
int startOffset) {
this(textArea);
String languageName = language == null ? "None" : language.getLanguageName();
if (languageName.equals("Java"))
parser = new JavacErrorParser();
else
return;
currentOffset = startOffset;
try {
parseErrors();
} catch (BadLocationException e) {
handleException(e);
}
}
public int getErrorCount() {
return list.size();
}
public boolean setCurrent(int index) {
if (index < 0 || index >= list.size())
return false;
current = index;
return true;
}
public boolean nextError(boolean forward) {
if (forward) {
if (current + 1 >= list.size())
return false;
current++;
}
else {
if (current - 1 < 0)
return false;
current
}
return true;
}
public String getPath() {
return list.get(current).path;
}
public int getLine() {
return list.get(current).line;
}
public Position getPosition() {
return list.get(current).position;
}
public void markLine() throws BadLocationException {
int offset = getPosition().getOffset();
int line = textArea.getLineOfOffset(offset);
int start = textArea.getLineStartOffset(line);
int end = textArea.getLineEndOffset(line);
textArea.getHighlighter().removeAllHighlights();
textArea.getHighlighter().addHighlight(start, end,
DefaultHighlighter.DefaultPainter);
scrollToVisible(start);
}
public void scrollToVisible(int offset) throws BadLocationException {
if (textArea == null) return;
textArea.scrollRectToVisible(textArea.modelToView(textArea.getDocument().getLength()));
textArea.scrollRectToVisible(textArea.modelToView(offset));
}
static class Error {
String path;
int line;
Position position;
public Error(String path, int line) {
this.path = path;
this.line = line;
}
}
public void addError(String path, int line, String text) {
try {
Document document = textArea.getDocument();
int offset = document.getLength();
if (!text.endsWith("\n"))
text += "\n";
textArea.insert(text, offset);
if (path == null || line < 0)
return;
Error error = new Error(path, line);
error.position = document.createPosition(offset + 1);
list.add(error);
} catch (BadLocationException e) {
handleException(e);
}
}
interface Parser {
Error getError(String line);
}
void parseErrors() throws BadLocationException {
int line = textArea.getLineOfOffset(currentOffset);
int lineCount = textArea.getLineCount();
for (;;) {
if (++line >= lineCount)
return;
int start = textArea.getLineStartOffset(line);
int end = textArea.getLineEndOffset(line);
String text = textArea.getText(start, end - start);
Error error = parser.getError(text);
if (error != null) try {
error.position = textArea.getDocument()
.createPosition(start);
list.add(error);
} catch (BadLocationException e) {
handleException(e);
}
}
}
class JavacErrorParser implements Parser {
public Error getError(String line) {
int colon = line.indexOf(".java:");
if (colon <= 0)
return null;
colon += 5;
int next = line.indexOf(':', colon + 1);
if (next < colon + 2)
return null;
int lineNumber;
try {
lineNumber = Integer.parseInt(line
.substring(colon + 1, next));
} catch (NumberFormatException e) {
return null;
}
String fileName = line.substring(0, colon);
return new Error(fileName, lineNumber);
}
}
private void handleException(final Throwable e) {
TextEditor.handleException(e, textArea);
}
}
|
package userprofile.model;
import java.util.ArrayList;
/**
*
* @author Michael Kramer
*/
public class UserList {
private ArrayList<User> theListOfUsers = new ArrayList<User>();
/**
* Default Constructor of the UserList, creates an empty UserList
*/
public UserList() {
}
/**
* Goes through the UserList validates the provided credentials
* @param username The Username to validate
* @param password The Password to validate
* @return
*/
public boolean authenticateUserCredentials(String username, char[] password) {
System.err.println("This is a stub.");
//TODO: Implment authenticateUserCredentials
return true;
}
/**
* Adds a User to the UserList with the provided UserName and Password
* @param username Username of the new User
* @param password Password of the new User
*/
public void addUser(String username, char[] password) {
System.err.println("This is a stub.");
//TODO: Implement addUser
}
/**
* Removes a User from the UserList
* @param username The Username of the User to remove
* @param password The Password of the User to remove
*/
public void deleteUser(String username, char[] password) {
System.err.println("This is a stub.");
//TODO: Implement deleteUser
}
/**
* Returns if a User is found with the provided Username
* @param username The Username to check for
* @return
*/
public boolean hasUser(String username) {
System.err.println("This is a stub.");
//TODO: Implement hasUser
return false;
}
/**
* Gets the User with Username from the UserList
* @param username The Username of the User to get
* @return
*/
public User getUser(String username) {
System.err.println("This is a stub.");
//TODO: Implement getUser
return new User();
}
/**
* Returns the number of Users in the UserList
* @return
*/
public int getUserCount() {
System.err.println("This is a stub.");
//TODO: implement getUserCount
return 0;
}
}
|
package wota.gameobjects;
import java.lang.Math;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import wota.gamemaster.AILoader;
import wota.gamemaster.Logger;
import wota.gamemaster.RandomPosition;
import wota.gamemaster.SimulationParameters;
import wota.gamemaster.StatisticsLogger;
import wota.utility.Vector;
/**
* Contains all elements of the game world.
*/
public class GameWorld {
private final List<Player> players = new LinkedList<Player>();
private final LinkedList<SugarObject> sugarObjects = new LinkedList<SugarObject>();
private Logger logger;
private SpacePartitioning spacePartitioning;
private final Parameters parameters;
private int tickCount = 0;
private final RandomPosition randomPosition;
public GameWorld(Parameters parameters, RandomPosition randomPosition) {
this.parameters = parameters;
this.randomPosition = randomPosition;
spacePartitioning = new SpacePartitioning(maximumSight(), parameters);
}
private static double maximumSight() {
double maximum = 0.0;
for (Caste caste : Caste.values()) {
if (caste.SIGHT_RANGE > maximum) {
maximum = caste.SIGHT_RANGE;
}
if (caste.HEARING_RANGE > maximum) {
maximum = caste.HEARING_RANGE;
}
}
return maximum;
}
public void createRandomSugarObject() {
List<Vector> hillPositions = new LinkedList<Vector>();
for (Player player : players) {
hillPositions.add(player.hillObject.getPosition());
}
List<Vector> sugarPositions = new LinkedList<Vector>();
for (SugarObject sugarObject : sugarObjects) {
sugarPositions.add(sugarObject.getPosition());
}
SugarObject sugarObject = new SugarObject(randomPosition.sugarPosition(hillPositions, sugarPositions),
parameters);
addSugarObject(sugarObject);
}
public void addSugarObject(SugarObject sugarObject) {
sugarObjects.add(sugarObject);
spacePartitioning.addSugarObject(sugarObject);
}
/** Do not modify the list! Use addSugarObject instead */
public List<SugarObject> getSugarObjects() {
return sugarObjects;
}
public void addPlayer(Player player) {
players.add(player);
}
private int nextPlayerId = 0;
public class Player {
public final List<AntObject> antObjects = new LinkedList<AntObject>();
public final HillObject hillObject;
public final String name;
public final String creator;
private final int id;
public int id() {
return id;
}
// TODO make this private and change addPlayer
public Player(Vector position, Class<? extends HillAI> hillAIClass) {
hillObject = new HillObject(position, this, hillAIClass, parameters);
spacePartitioning.addHillObject(hillObject);
name = AILoader.getAIName(hillAIClass);
creator = AILoader.getAICreator(hillAIClass);
id = nextPlayerId;
nextPlayerId++;
}
@Override
public String toString() {
return "AI " + (id +1) + " " + name + " written by " + creator;
}
public void addAntObject(AntObject antObject) {
antObjects.add(antObject);
spacePartitioning.addAntObject(antObject);
}
public int numAnts(Caste caste) {
int num = 0;
for (AntObject antObject : antObjects) {
if (antObject.getCaste() == caste) {
num++;
}
}
return num;
}
}
public void tick() {
tickCount++;
// create Ants for all AntObjects and the QueenObject and sets them in
// the AntAI (the latter happens in AntObject.createAnt() )
// also create Sugar for SugarObjects
for (Player player : players) {
for (AntObject antObject : player.antObjects) {
antObject.createAnt();
}
player.hillObject.createHill();
}
for (SugarObject sugarObject : sugarObjects) {
sugarObject.createSugar();
}
// The MessageObjects don't need a "createMessage", because one can
// construct the Message instance when the
// MessageObject instance is constructed.
// call tick for all AntObjects
for (Player player : players) {
for (AntObject antObject : player.antObjects) {
List<Ant> visibleAnts = new LinkedList<Ant>();
List<Sugar> visibleSugar = new LinkedList<Sugar>();
List<Hill> visibleHills = new LinkedList<Hill>();
List<AntMessage> audibleAntMessages = new LinkedList<AntMessage>();
HillMessage audibleHillMessage = null;
double sightRange = antObject.getCaste().SIGHT_RANGE;
double hearingRange = antObject.getCaste().HEARING_RANGE;
Vector position = antObject.getPosition();
for (AntObject visibleAntObject :
spacePartitioning.antObjectsInsideCircle(sightRange, position)) {
if (visibleAntObject != antObject) {
visibleAnts.add(visibleAntObject.getAnt());
}
}
for (SugarObject visibleSugarObject :
spacePartitioning.sugarObjectsInsideCircle(sightRange, position)) {
visibleSugar.add(visibleSugarObject.getSugar());
}
for (HillObject visibleHillObject :
spacePartitioning.hillObjectsInsideCircle(sightRange, position)) {
visibleHills.add(visibleHillObject.getHill());
}
for (AntMessageObject audibleAntMessageObject :
spacePartitioning.antMessageObjectsInsideCircle(hearingRange, position)) {
if (audibleAntMessageObject.sender.playerID == player.id()) {
audibleAntMessages.add(audibleAntMessageObject.getMessage());
}
}
for (HillMessageObject audibleHillMessageObject :
spacePartitioning.hillMessageObjectsInsideCircle(hearingRange, position)) {
if (audibleHillMessageObject.sender.playerID == player.id()) {
audibleHillMessage = audibleHillMessageObject.getMessage();
}
}
antObject.tick(visibleAnts, visibleSugar, visibleHills, audibleAntMessages, audibleHillMessage);
}
// and now for the hill. Sorry for the awful duplication of code but I couldn't see a way without
// lots of work
List<Ant> visibleAnts = new LinkedList<Ant>();
List<Sugar> visibleSugar = new LinkedList<Sugar>();
List<Hill> visibleHills = new LinkedList<Hill>();
List<AntMessage> audibleAntMessages = new LinkedList<AntMessage>();
HillMessage audibleHillMessage = null;
double sightRange = player.hillObject.caste.SIGHT_RANGE;
double hearingRange = player.hillObject.caste.HEARING_RANGE;
Vector position = player.hillObject.getPosition();
for (AntObject visibleAntObject :
spacePartitioning.antObjectsInsideCircle(sightRange, position)) {
visibleAnts.add(visibleAntObject.getAnt());
}
for (SugarObject visibleSugarObject :
spacePartitioning.sugarObjectsInsideCircle(sightRange, position)) {
visibleSugar.add(visibleSugarObject.getSugar());
}
for (HillObject visibleHillObject :
spacePartitioning.hillObjectsInsideCircle(sightRange, position)) {
if (visibleHillObject != player.hillObject) {
visibleHills.add(visibleHillObject.getHill());
}
}
for (AntMessageObject audibleAntMessageObject :
spacePartitioning.antMessageObjectsInsideCircle(hearingRange, position)) {
if (audibleAntMessageObject.sender.playerID == player.id()) {
audibleAntMessages.add(audibleAntMessageObject.getMessage());
}
}
for (HillMessageObject audibleHillMessageObject :
spacePartitioning.hillMessageObjectsInsideCircle(hearingRange, position)) {
if (audibleHillMessageObject.sender.playerID == player.id()) {
audibleHillMessage = audibleHillMessageObject.getMessage();
}
}
player.hillObject.tick(visibleAnts, visibleSugar, visibleHills, audibleAntMessages, audibleHillMessage);
}
// Only do this now that we used last tick's message objects.
spacePartitioning.discardAntMessageObjects();
spacePartitioning.discardHillMessageObjects();
// execute all actions, ants get created
for (Player player : players) {
for (AntObject antObject : player.antObjects) {
executeActionExceptMovement(antObject);
}
}
for (Player player : players) {
for (AntObject antObject : player.antObjects) {
executeMovement(antObject);
}
// order does matter since the hill creates new ants!
handleHillMessage(player.hillObject, player.hillObject.hillAI.popMessage());
executeAntOrders(player.hillObject);
}
// Needs to go before removing dead ants, because they need to be in
// the correct cell to be removed.
spacePartitioning.update();
// Let ants die!
for (Player player : players) {
for (Iterator<AntObject> antObjectIter = player.antObjects.iterator();
antObjectIter.hasNext();) {
AntObject maybeDead = antObjectIter.next();
if (maybeDead.isDead()) {
antObjectIter.remove();
antDies(maybeDead);
}
}
}
int removedSugarObjects = removeSugarObjects();
for (int i=0; i<removedSugarObjects; i++) {
createRandomSugarObject();
}
}
public void antDies(AntObject almostDead) {
spacePartitioning.removeAntObject(almostDead);
almostDead.die();
logger.antDied(almostDead);
}
/**
* iterates through sugarObjects and
* 1) removes the empty ones
* 2) decreases their ticksToWait by calling tick()
*
* @return
* The number of removed SugarObjects
*/
private int removeSugarObjects() {
int nRemovedSugarObjects = 0;
for (Iterator<SugarObject> sugarObjectIter = sugarObjects.iterator();
sugarObjectIter.hasNext();) {
SugarObject sugarObject = sugarObjectIter.next();
sugarObject.tick();
// make invisible as soon as there is no more sugar
if (sugarObject.getAmount() <= 0 && sugarObject.isInSpacePartitioning()) {
spacePartitioning.removeSugarObject(sugarObject);
sugarObject.setIsInSpacePartitioning(false);
}
if (sugarObject.getAmount() > 0 && !sugarObject.isInSpacePartitioning()) {
spacePartitioning.addSugarObject(sugarObject);
sugarObject.setIsInSpacePartitioning(true);
}
// remove if empty
if (sugarObject.getAmount() <= 0 && sugarObject.getQueueSize() == 0) {
sugarObject.getsRemoved();
sugarObjectIter.remove();
nRemovedSugarObjects++;
}
}
return nRemovedSugarObjects;
}
private void executeAntOrders(HillObject hillObject) {
List<AntOrder> antOrders = hillObject.getAntOrders();
Iterator<AntOrder> iterator = antOrders.iterator();
final Player player = hillObject.getPlayer();
while (iterator.hasNext()) {
AntOrder antOrder = iterator.next();
if (parameters.ANT_COST <= player.hillObject.getStoredFood()) {
player.hillObject.changeStoredFoodBy(-parameters.ANT_COST);
AntObject newAntObject = new AntObject(
hillObject.getPlayer().hillObject.getPosition(),
antOrder.getCaste(), antOrder.getAntAIClass(),
hillObject.getPlayer(), parameters);
createAntObject(hillObject, newAntObject);
}
}
}
/**
* @param hillObject Hill which created this AntObject
* @param newAntObject freshly created AntObject
*/
private void createAntObject(HillObject hillObject, AntObject newAntObject) {
hillObject.getPlayer().addAntObject(newAntObject);
logger.antCreated(newAntObject);
}
private void executeActionExceptMovement(AntObject actor) {
Action action = actor.getAction();
// Attack
Ant targetAnt = action.attackTarget;
if (targetAnt != null
&& parameters.distance(targetAnt.antObject.getPosition(), actor.getPosition())
<= parameters.ATTACK_RANGE) {
AntObject target = targetAnt.antObject;
actor.setAttackTarget(target);
// collateral damage, including damage to target:
// the formula how the damage decreases with distance yields full damage for distance 0.
// the radius of the area of effect equals ATTACK_RANGE
for (AntObject closeAntObject : spacePartitioning.antObjectsInsideCircle(parameters.ATTACK_RANGE, target.getPosition())) {
if (closeAntObject.player != actor.player) {
closeAntObject.takesDamage(actor.getCaste().ATTACK*
fractionOfDamageInDistance(parameters.distance(closeAntObject.getPosition(),target.getPosition())));
}
}
}
else {
actor.setAttackTarget(null);
}
// Drop sugar at the hill and reset ticksToLive if inside the hill.
// Optimization: Use space partitioning for dropping sugar at the hill, don't test for all ants.
if (parameters.distance(actor.player.hillObject.getPosition(), actor.getPosition())
<= parameters.HILL_RADIUS) {
actor.player.hillObject.changeStoredFoodBy(actor.getSugarCarry());
logger.antCollectedFood(actor.player, actor.getSugarCarry());
actor.dropSugar();
}
// or drop sugar if desired
if (action.dropItem == true) {
actor.dropSugar();
}
// Pick up sugar
Sugar sugar = action.sugarTarget;
if (sugar != null) {
if (parameters.distance(actor.getPosition(),sugar.sugarObject.getPosition())
<= sugar.sugarObject.getRadius()) {
actor.pickUpSugar(sugar.sugarObject);
}
}
// Messages
handleAntMessages(actor, action);
}
private double fractionOfDamageInDistance(double distance) {
double fraction = 1 - distance / parameters.ATTACK_RANGE;
return Math.max(fraction, 0);
}
private void handleHillMessage(HillObject actor, HillMessageObject message) {
if (message != null) {
spacePartitioning.addHillMessageObject(message);
}
}
private void handleAntMessages(AntObject actor, Action action) {
if (action.antMessageObject != null) {
spacePartitioning.addAntMessageObject(action.antMessageObject);
}
}
private static void executeMovement(AntObject actor) {
Action action = actor.getAction();
if (actor.getAttackTarget() != null) {
actor.move(action.movement.boundLengthBy(actor.getCaste().SPEED_WHILE_ATTACKING));
} else if (actor.getSugarCarry() > 0) {
actor.move(action.movement.boundLengthBy(actor.getCaste().SPEED_WHILE_CARRYING_SUGAR));
} else {
actor.move(action.movement.boundLengthBy(actor.getCaste().SPEED));
}
}
public boolean allPlayersDead() {
if (tickCount < parameters.DONT_CHECK_VICTORY_CONDITION_BEFORE) {
return false;
}
int nPlayersAlive = players.size();
for (Player player : players) {
if ( player.antObjects.size() == 0 ) {
nPlayersAlive
}
}
return nPlayersAlive == 0;
}
public Player getWinner() {
if (tickCount < parameters.DONT_CHECK_VICTORY_CONDITION_BEFORE) {
return null;
}
double totalAnts = 0;
for (Player player : players) {
totalAnts += player.antObjects.size();
}
for (Player player : players) {
if (player.antObjects.size() / totalAnts >= parameters.FRACTION_OF_ALL_ANTS_NEEDED_FOR_VICTORY) {
return player;
}
}
return null;
}
/**
* Gets all players who currently have the most ants.
* @return List of players who currently have the most ants of all players.
*/
public List<Player> getPlayersWithMostAnts() {
List<Player> playersWithMostAnts = new LinkedList<Player>();
for (Player player : players) {
if (playersWithMostAnts.isEmpty()) {
playersWithMostAnts.add(player);
}
else {
int antsPlayer = player.antObjects.size();
int antsNeeded = playersWithMostAnts.get(0).antObjects.size();
if (antsPlayer > antsNeeded) {
playersWithMostAnts.clear();
playersWithMostAnts.add(player);
}
else if (antsPlayer == antsNeeded) {
playersWithMostAnts.add(player);
}
}
}
return playersWithMostAnts;
}
public int totalNumberOfAntObjects() {
int n = 0;
for (Player player : players) {
n += player.antObjects.size();
}
return n;
}
public List<Player> getPlayers() {
return Collections.unmodifiableList(players);
}
/**
* @return number of passed ticks
*/
public int tickCount() {
return tickCount;
}
public void setLogger(Logger logger) {
this.logger = logger;
}
}
|
package wota.gameobjects;
import java.lang.Math;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import wota.gamemaster.AILoader;
import wota.gamemaster.AbstractLogger;
import wota.gameobjects.LeftoverParameters;
import wota.utility.SeededRandomizer;
import wota.utility.Vector;
public class GameWorld {
private final List<Player> players = new LinkedList<Player>();
private final LinkedList<SugarObject> sugarObjects = new LinkedList<SugarObject>();
private List<AbstractLogger> registeredLoggers = new LinkedList<AbstractLogger>();
private SpacePartitioning spacePartitioning;
private final Parameters parameters;
private int tickCount = 0;
public GameWorld(Parameters parameters) {
this.parameters = parameters;
spacePartitioning = new SpacePartitioning(maximumSight(), parameters);
}
private static double maximumSight() {
double maximum = 0.0;
for (Caste caste : Caste.values()) {
if (caste.SIGHT_RANGE > maximum) {
maximum = caste.SIGHT_RANGE;
}
if (caste.HEARING_RANGE > maximum) {
maximum = caste.HEARING_RANGE;
}
}
return maximum;
}
public void createRandomSugarObject() {
SugarObject sugarObject = new SugarObject(parameters.INITIAL_SUGAR,
new Vector(SeededRandomizer.getDouble()*parameters.SIZE_X,
SeededRandomizer.getDouble()*parameters.SIZE_Y),
parameters);
addSugarObject(sugarObject);
}
public void addSugarObject(SugarObject sugarObject) {
sugarObjects.add(sugarObject);
spacePartitioning.addSugarObject(sugarObject);
}
/** Do not modify the list! Use addSugarObject instead */
public List<SugarObject> getSugarObjects() {
return sugarObjects;
}
public void addPlayer(Player player) {
notifyLoggers(AbstractLogger.LogEventType.PLAYER_REGISTERED);
players.add(player);
}
private static int nextPlayerId = 0;
public class Player {
public final List<AntObject> antObjects = new LinkedList<AntObject>();
public final HillObject hillObject;
public final QueenObject queenObject;
public final String name;
private final int id;
public boolean hasLost = false;
public int getId() {
return id;
}
// TODO make this private and change addPlayer
public Player(Vector position, Class<? extends QueenAI> queenAIClass) {
hillObject = new HillObject(position, this, parameters);
spacePartitioning.addHillObject(hillObject);
queenObject = new QueenObject(position, queenAIClass, this, parameters);
antObjects.add(queenObject);
name = AILoader.getAIName(queenAIClass);
id = nextPlayerId;
nextPlayerId++;
}
public void addAntObject(AntObject antObject) {
antObjects.add(antObject);
spacePartitioning.addAntObject(antObject);
}
}
public void tick() {
tickCount++;
notifyLoggers(AbstractLogger.LogEventType.TICK);
// can be removed as soon as SpacePartitioning is well tested!
if (LeftoverParameters.DEBUG) {
System.out.println("SpacePartitioning: " + spacePartitioning.totalNumberOfAntObjects());
System.out.println("Total number: " + totalNumberOfAntObjects());
}
// create Ants for all AntObjects and the QueenObject and sets them in
// the AntAI (the latter happens in AntObject.createAnt() )
// also create Sugar for SugarObjects
for (Player player : players) {
for (AntObject antObject : player.antObjects) {
antObject.createAnt();
}
player.hillObject.createHill();
}
for (SugarObject sugarObject : sugarObjects) {
sugarObject.createSugar();
}
// The MessageObjects don't need a "createMessage", because one can
// construct the Message instance when the
// MessageObject instance is constructed.
// call tick for all AntObjects
for (Player player : players) {
for (AntObject antObject : player.antObjects) {
List<Ant> visibleAnts = new LinkedList<Ant>();
List<Sugar> visibleSugar = new LinkedList<Sugar>();
List<Hill> visibleHills = new LinkedList<Hill>();
List<Message> audibleMessages = new LinkedList<Message>();
for (AntObject visibleAntObject :
spacePartitioning.antObjectsInsideCircle(antObject.getCaste().SIGHT_RANGE, antObject.getPosition())) {
if (visibleAntObject != antObject) {
visibleAnts.add(visibleAntObject.getAnt());
}
}
for (SugarObject visibleSugarObject :
spacePartitioning.sugarObjectsInsideCircle(antObject.getCaste().SIGHT_RANGE, antObject.getPosition())) {
visibleSugar.add(visibleSugarObject.getSugar());
}
for (HillObject visibleHillObject :
spacePartitioning.hillObjectsInsideCircle(antObject.getCaste().SIGHT_RANGE, antObject.getPosition())) {
visibleHills.add(visibleHillObject.getHill());
}
for (MessageObject audibleMessageObject :
spacePartitioning.messageObjectsInsideCircle(antObject.getCaste().HEARING_RANGE, antObject.getPosition())) {
if (audibleMessageObject.getSender().playerID == antObject.player.id) {
audibleMessages.add(audibleMessageObject.getMessage());
}
}
antObject.tick(visibleAnts, visibleSugar, visibleHills, audibleMessages);
}
}
// Only do this now that we used last ticks message objects.
spacePartitioning.discardMessageObjects();
// execute all actions, ants get created
for (Player player : players) {
for (AntObject antObject : player.antObjects) {
executeActionExceptMovement(antObject);
}
}
for (Player player : players) {
for (AntObject antObject : player.antObjects) {
executeMovement(antObject);
}
// order does matter since the queen creates new ants!
executeAntOrders(player.queenObject);
}
// Needs to go before removing dead ants, because they need to be in
// the correct cell to be removed.
spacePartitioning.update();
// Let ants die!
for (Player player : players) {
for (Iterator<AntObject> antObjectIter = player.antObjects.iterator();
antObjectIter.hasNext();) {
AntObject maybeDead = antObjectIter.next();
if (maybeDead.isDead()) {
antObjectIter.remove();
spacePartitioning.removeAntObject(maybeDead);
}
}
}
int removedSugarObjects = removeSugarAndDecreaseTicksToWait();
for (int i=0; i<removedSugarObjects; i++) {
createRandomSugarObject();
}
}
/**
* iterates through sugarObjects and
* 1) removes the empty ones
* 2) decreases their ticksToWait by calling tick()
*
* @return
* The number of removed SugarObjects
*/
private int removeSugarAndDecreaseTicksToWait() {
int nRemovedSugarObjects = 0;
for (Iterator<SugarObject> sugarObjectIter = sugarObjects.iterator();
sugarObjectIter.hasNext();) {
SugarObject sugarObject = sugarObjectIter.next();
sugarObject.tick();
// remove if empty
if (sugarObject.getAmount() <= 0) {
sugarObjectIter.remove();
spacePartitioning.removeSugarObject(sugarObject);
nRemovedSugarObjects++;
}
}
return nRemovedSugarObjects;
}
private void executeAntOrders(QueenObject queenObject) {
List<AntOrder> antOrders = queenObject.getAntOrders();
Iterator<AntOrder> iterator = antOrders.iterator();
final Player player = queenObject.player;
while (iterator.hasNext()) {
AntOrder antOrder = iterator.next();
if (parameters.ANT_COST <= player.hillObject.getStoredFood()) {
player.hillObject.changeStoredFoodBy(-parameters.ANT_COST);
AntObject antObject = new AntObject(
queenObject.player.hillObject.getPosition(),
antOrder.getCaste(), antOrder.getAntAIClass(),
queenObject.player, parameters);
queenObject.player.addAntObject(antObject);
}
}
}
private void executeActionExceptMovement(AntObject actor) {
Action action = actor.getAction();
// Attack
Ant targetAnt = action.attackTarget;
if (targetAnt != null
&& parameters.distance(targetAnt.antObject.getPosition(), actor.getPosition())
<= parameters.ATTACK_RANGE) {
AntObject target = targetAnt.antObject;
actor.setAttackTarget(target);
// collateral damage, including damage to target:
// the formula how the damage decreases with distance yields full damage for distance 0.
// the radius of the area of effect equals ATTACK_RANGE
for (AntObject closeAntObject : spacePartitioning.antObjectsInsideCircle(parameters.ATTACK_RANGE, target.getPosition())) {
if (closeAntObject.player != actor.player) {
closeAntObject.takesDamage(actor.getCaste().ATTACK*
fractionOfDamageInDistance(parameters.distance(closeAntObject.getPosition(),target.getPosition())));
}
}
}
else {
actor.setAttackTarget(null);
}
// Drop sugar at the hill and reset ticksToLive if inside the hill.
// Optimization: Use space partitioning for dropping sugar at the hill, don't test for all ants.
if (parameters.distance(actor.player.hillObject.getPosition(), actor.getPosition())
<= parameters.HILL_RADIUS) {
actor.player.hillObject.changeStoredFoodBy(actor.getSugarCarry());
actor.dropSugar();
actor.resetTicksToLive();
}
// or drop sugar if desired
if (action.dropItem == true) {
actor.dropSugar();
}
// Pick up sugar
Sugar sugar = action.sugarTarget;
if (sugar != null) {
if (parameters.distance(actor.getPosition(),sugar.sugarObject.getPosition())
<= sugar.sugarObject.getRadius()) {
actor.pickUpSugar(sugar.sugarObject);
}
}
// Messages
handleMessages(actor, action);
}
private double fractionOfDamageInDistance(double distance) {
double fraction = 1 - distance / parameters.ATTACK_RANGE;
return Math.max(fraction, 0);
}
private static void executeMovement(AntObject actor) {
Action action = actor.getAction();
if (actor.getAttackTarget() != null) {
actor.move(action.movement.boundLengthBy(actor.getCaste().SPEED_WHILE_ATTACKING));
} else if (actor.getSugarCarry() > 0) {
actor.move(action.movement.boundLengthBy(actor.getCaste().SPEED_WHILE_CARRYING_SUGAR));
} else {
actor.move(action.movement.boundLengthBy(actor.getCaste().SPEED));
}
}
/** check the victory condition after this amount of ticks */
private static int DONT_CHECK_VICTORY_CONDITION_BEFORE = 100;
/** tests if victory condition is fulfilled
* @return is the victory condition fulfilled or can nobody win anymore? */
public boolean checkVictoryCondition() {
if (tickCount < DONT_CHECK_VICTORY_CONDITION_BEFORE)
return false;
int nPossibleWinners = players.size();
for (Player player : players) {
if ( (player.antObjects.size() == 1 && !player.queenObject.isDead() ) ||
player.antObjects.size() == 0) {
player.hasLost = true;
nPossibleWinners
}
else {
player.hasLost = false;
}
break;
}
return (nPossibleWinners <= 1);
}
/**
* Assumes that checkVictoryCondition returns true.
* @return the player who won the game or null for draws.
*/
public Player getWinner() {
if (!checkVictoryCondition()) {
System.err.println("getWinner() should only be called if checkVictoryCondition() return true!");
}
for (Player player : players) {
if (player.hasLost == false) {
return player;
}
}
return null;
}
private void handleMessages(AntObject actor, Action action) {
if (action.messageObject != null) {
spacePartitioning.addMessageObject(action.messageObject);
Message message = action.messageObject.getMessage();
if (LeftoverParameters.DEBUG)
System.out.println("\"" + message.content + "\" sagt "
+ message.sender + ".");
}
}
public void registerLogger(AbstractLogger logger) {
registeredLoggers.add(logger);
}
private void notifyLoggers(AbstractLogger.LogEventType event) {
for (AbstractLogger logger : registeredLoggers)
logger.log(event);
}
public int totalNumberOfAntObjects() {
int n = 0;
for (Player player : players) {
n += player.antObjects.size();
}
return n;
}
public List<Player> getPlayers() {
return Collections.unmodifiableList(players);
}
/**
* @return number of passed ticks
*/
public int tickCount() {
return tickCount;
}
}
|
package wyc.builder;
import static wyc.lang.WhileyFile.*;
import static wyil.util.ErrorMessages.INVALID_BINARY_EXPRESSION;
import static wyil.util.ErrorMessages.INVALID_BOOLEAN_EXPRESSION;
import static wyil.util.ErrorMessages.INVALID_SET_OR_LIST_EXPRESSION;
import static wyil.util.ErrorMessages.UNKNOWN_VARIABLE;
import static wyil.util.ErrorMessages.VARIABLE_POSSIBLY_UNITIALISED;
import static wyil.util.ErrorMessages.errorMessage;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import wybs.lang.SyntacticElement;
import wybs.lang.SyntaxError;
import wybs.util.ResolveError;
import wyc.lang.Expr;
import wyc.lang.UnresolvedType;
import wyc.lang.WhileyFile.Context;
import wyil.lang.Attribute;
import wyil.lang.Block;
import wyil.lang.Code;
import wyil.lang.Type;
import wyil.lang.Value;
import wyil.util.Pair;
import wyil.util.Triple;
/**
* <p>
* Responsible for compiling source-level expressions into wyil bytecodes in a
* given context. This includes generating wyil code for constraints as
* necessary using a global generator. For example:
* </p>
*
* <pre>
* define natlist as [nat]
*
* natlist check([int] ls):
* if ls is natlist:
* return ls
* else:
* return []
* </pre>
* <p>
* Here, a local generator will be called to compile the expression
* <code>ls is natlist</code> into wyil bytecode (amongst other expressions). To
* this, it must in turn obtain the bytecodes for the type <code>natlist</code>.
* Since <code>natlist</code> is defined at the global level, the global
* generator will be called to do this (which, in turn, may call a local
* generator again).
* </p>
* <p>
* <b>NOTE:</b> it is currently assumed that all expressions being generated are
* already typed. This restriction may be lifted in the future.
* </p>
*
* @author David J. Pearce
*
*/
public final class LocalGenerator {
private final GlobalGenerator global;
private final Context context;
public LocalGenerator(GlobalGenerator global, Context context) {
this.context = context;
this.global = global;
}
public Context context() {
return context;
}
public Block generateCondition(String target, Expr condition,
HashMap<String, Integer> environment) {
try {
if (condition instanceof Expr.Constant) {
return generateCondition(target, (Expr.Constant) condition, environment);
} else if (condition instanceof Expr.LocalVariable) {
return generateCondition(target, (Expr.LocalVariable) condition, environment);
} else if (condition instanceof Expr.ConstantAccess) {
return generateCondition(target, (Expr.ConstantAccess) condition, environment);
} else if (condition instanceof Expr.BinOp) {
return generateCondition(target, (Expr.BinOp) condition, environment);
} else if (condition instanceof Expr.UnOp) {
return generateCondition(target, (Expr.UnOp) condition, environment);
} else if (condition instanceof Expr.AbstractInvoke) {
return generateCondition(target, (Expr.AbstractInvoke) condition, environment);
} else if (condition instanceof Expr.RecordAccess) {
return generateCondition(target, (Expr.RecordAccess) condition, environment);
} else if (condition instanceof Expr.Record) {
return generateCondition(target, (Expr.Record) condition, environment);
} else if (condition instanceof Expr.Tuple) {
return generateCondition(target, (Expr.Tuple) condition, environment);
} else if (condition instanceof Expr.IndexOf) {
return generateCondition(target, (Expr.IndexOf) condition, environment);
} else if (condition instanceof Expr.Comprehension) {
return generateCondition(target, (Expr.Comprehension) condition, environment);
} else {
syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION), context, condition);
}
} catch (SyntaxError se) {
throw se;
} catch (Exception ex) {
internalFailure(ex.getMessage(), context, condition, ex);
}
return null;
}
private Block generateCondition(String target, Expr.Constant c, HashMap<String,Integer> environment) {
Value.Bool b = (Value.Bool) c.value;
Block blk = new Block(environment.size());
if (b.value) {
blk.append(Code.Goto(target));
} else {
// do nout
}
return blk;
}
private Block generateCondition(String target, Expr.LocalVariable v,
HashMap<String, Integer> environment) throws ResolveError {
Block blk = new Block(environment.size());
blk.append(Code.Load(Type.T_BOOL, environment.get(v.var)));
blk.append(Code.Const(Value.V_BOOL(true)),attributes(v));
blk.append(Code.IfGoto(Type.T_BOOL,Code.COp.EQ, target),attributes(v));
return blk;
}
private Block generateCondition(String target, Expr.ConstantAccess v,
HashMap<String, Integer> environment) throws ResolveError {
Block blk = new Block(environment.size());
Value val = v.value;
// Obviously, this will be evaluated one way or another.
blk.append(Code.Const(val));
blk.append(Code.Const(Value.V_BOOL(true)),attributes(v));
blk.append(Code.IfGoto(v.result().raw(),Code.COp.EQ, target),attributes(v));
return blk;
}
private Block generateCondition(String target, Expr.BinOp v, HashMap<String,Integer> environment) throws Exception {
Expr.BOp bop = v.op;
Block blk = new Block(environment.size());
if (bop == Expr.BOp.OR) {
blk.append(generateCondition(target, v.lhs, environment));
blk.append(generateCondition(target, v.rhs, environment));
return blk;
} else if (bop == Expr.BOp.AND) {
String exitLabel = Block.freshLabel();
blk.append(generateCondition(exitLabel, invert(v.lhs), environment));
blk.append(generateCondition(target, v.rhs, environment));
blk.append(Code.Label(exitLabel));
return blk;
} else if (bop == Expr.BOp.IS) {
return generateTypeCondition(target, v, environment);
}
Code.COp cop = OP2COP(bop,v);
if (cop == Code.COp.EQ && v.lhs instanceof Expr.LocalVariable
&& v.rhs instanceof Expr.Constant
&& ((Expr.Constant) v.rhs).value == Value.V_NULL) {
// this is a simple rewrite to enable type inference.
Expr.LocalVariable lhs = (Expr.LocalVariable) v.lhs;
if (!environment.containsKey(lhs.var)) {
syntaxError(errorMessage(UNKNOWN_VARIABLE), context, v.lhs);
}
int slot = environment.get(lhs.var);
blk.append(Code.IfType(v.srcType.raw(), slot, Type.T_NULL, target), attributes(v));
} else if (cop == Code.COp.NEQ && v.lhs instanceof Expr.LocalVariable
&& v.rhs instanceof Expr.Constant
&& ((Expr.Constant) v.rhs).value == Value.V_NULL) {
// this is a simple rewrite to enable type inference.
String exitLabel = Block.freshLabel();
Expr.LocalVariable lhs = (Expr.LocalVariable) v.lhs;
if (!environment.containsKey(lhs.var)) {
syntaxError(errorMessage(UNKNOWN_VARIABLE), context, v.lhs);
}
int slot = environment.get(lhs.var);
blk.append(Code.IfType(v.srcType.raw(), slot, Type.T_NULL, exitLabel), attributes(v));
blk.append(Code.Goto(target));
blk.append(Code.Label(exitLabel));
} else {
blk.append(generate(v.lhs, environment));
blk.append(generate(v.rhs, environment));
blk.append(Code.IfGoto(v.srcType.raw(), cop, target), attributes(v));
}
return blk;
}
private Block generateTypeCondition(String target, Expr.BinOp v, HashMap<String,Integer> environment) throws Exception {
Block blk;
int slot;
if (v.lhs instanceof Expr.LocalVariable) {
Expr.LocalVariable lhs = (Expr.LocalVariable) v.lhs;
if (!environment.containsKey(lhs.var)) {
syntaxError(errorMessage(UNKNOWN_VARIABLE), context, v.lhs);
}
slot = environment.get(lhs.var);
blk = new Block(environment.size());
} else {
blk = generate(v.lhs, environment);
slot = -1;
}
Expr.TypeVal rhs = (Expr.TypeVal) v.rhs;
Block constraint = global.generate(rhs.unresolvedType, context);
if(constraint != null) {
String exitLabel = Block.freshLabel();
Type glb = Type.intersect(v.srcType.raw(), Type.Negation(rhs.type.raw()));
if(glb != Type.T_VOID) {
// Only put the actual type test in if it is necessary.
String nextLabel = Block.freshLabel();
// FIXME: should be able to just test the glb here and branch to
// exit label directly. However, this currently doesn't work
// because of limitations with intersection of open records.
blk.append(Code.IfType(v.srcType.raw(), slot, rhs.type.raw(), nextLabel),
attributes(v));
blk.append(Code.Goto(exitLabel));
blk.append(Code.Label(nextLabel));
}
// FIXME: I think there's a bug here when slot == -1
constraint = shiftBlockExceptionZero(environment.size()-1,slot,constraint);
blk.append(chainBlock(exitLabel,constraint));
blk.append(Code.Goto(target));
blk.append(Code.Label(exitLabel));
} else {
blk.append(Code.IfType(v.srcType.raw(), slot, rhs.type.raw(), target),
attributes(v));
}
return blk;
}
private Block generateCondition(String target, Expr.UnOp v, HashMap<String,Integer> environment) {
Expr.UOp uop = v.op;
switch (uop) {
case NOT:
String label = Block.freshLabel();
Block blk = generateCondition(label, v.mhs, environment);
blk.append(Code.Goto(target));
blk.append(Code.Label(label));
return blk;
}
syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION), context, v);
return null;
}
private Block generateCondition(String target, Expr.IndexOf v, HashMap<String,Integer> environment) {
Block blk = generate(v, environment);
blk.append(Code.Const(Value.V_BOOL(true)),attributes(v));
blk.append(Code.IfGoto(Type.T_BOOL, Code.COp.EQ, target),attributes(v));
return blk;
}
private Block generateCondition(String target, Expr.RecordAccess v, HashMap<String,Integer> environment) {
Block blk = generate(v, environment);
blk.append(Code.Const(Value.V_BOOL(true)),attributes(v));
blk.append(Code.IfGoto(Type.T_BOOL, Code.COp.EQ, target),attributes(v));
return blk;
}
private Block generateCondition(String target, Expr.AbstractInvoke v, HashMap<String,Integer> environment) throws ResolveError {
Block blk = generate((Expr) v, environment);
blk.append(Code.Const(Value.V_BOOL(true)),attributes(v));
blk.append(Code.IfGoto(Type.T_BOOL, Code.COp.EQ, target),attributes(v));
return blk;
}
private Block generateCondition(String target, Expr.Comprehension e,
HashMap<String,Integer> environment) {
if (e.cop != Expr.COp.NONE && e.cop != Expr.COp.SOME) {
syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION), context, e);
}
// Ok, non-boolean case.
Block blk = new Block(environment.size());
ArrayList<Triple<Integer,Integer,Type.EffectiveCollection>> slots = new ArrayList();
for (Pair<String, Expr> src : e.sources) {
int srcSlot;
int varSlot = allocate(src.first(),environment);
Nominal srcType = src.second().result();
if(src.second() instanceof Expr.LocalVariable) {
// this is a little optimisation to produce slightly better
// code.
Expr.LocalVariable v = (Expr.LocalVariable) src.second();
if(environment.containsKey(v.var)) {
srcSlot = environment.get(v.var);
} else {
// fall-back plan ...
blk.append(generate(src.second(), environment));
srcSlot = allocate(environment);
blk.append(Code.Store(srcType.raw(), srcSlot),attributes(e));
}
} else {
blk.append(generate(src.second(), environment));
srcSlot = allocate(environment);
blk.append(Code.Store(srcType.raw(), srcSlot),attributes(e));
}
slots.add(new Triple(varSlot,srcSlot,srcType.raw()));
}
ArrayList<String> labels = new ArrayList<String>();
String loopLabel = Block.freshLabel();
for (Triple<Integer, Integer, Type.EffectiveCollection> p : slots) {
Type.EffectiveCollection srcType = p.third();
String lab = loopLabel + "$" + p.first();
blk.append(Code.Load((Type) srcType, p.second()), attributes(e));
blk.append(Code
.ForAll(srcType, p.first(), lab, Collections.EMPTY_LIST),
attributes(e));
labels.add(lab);
}
if (e.cop == Expr.COp.NONE) {
String exitLabel = Block.freshLabel();
blk.append(generateCondition(exitLabel, e.condition,
environment));
for (int i = (labels.size() - 1); i >= 0; --i) {
blk.append(Code.End(labels.get(i)));
}
blk.append(Code.Goto(target));
blk.append(Code.Label(exitLabel));
} else { // SOME
blk.append(generateCondition(target, e.condition,
environment));
for (int i = (labels.size() - 1); i >= 0; --i) {
blk.append(Code.End(labels.get(i)));
}
} // ALL, LONE and ONE will be harder
return blk;
}
public Block generate(Expr expression, HashMap<String,Integer> environment) {
try {
if (expression instanceof Expr.Constant) {
return generate((Expr.Constant) expression, environment);
} else if (expression instanceof Expr.LocalVariable) {
return generate((Expr.LocalVariable) expression, environment);
} else if (expression instanceof Expr.ConstantAccess) {
return generate((Expr.ConstantAccess) expression, environment);
} else if (expression instanceof Expr.Set) {
return generate((Expr.Set) expression, environment);
} else if (expression instanceof Expr.List) {
return generate((Expr.List) expression, environment);
} else if (expression instanceof Expr.SubList) {
return generate((Expr.SubList) expression, environment);
} else if (expression instanceof Expr.SubString) {
return generate((Expr.SubString) expression, environment);
} else if (expression instanceof Expr.BinOp) {
return generate((Expr.BinOp) expression, environment);
} else if (expression instanceof Expr.LengthOf) {
return generate((Expr.LengthOf) expression, environment);
} else if (expression instanceof Expr.Dereference) {
return generate((Expr.Dereference) expression, environment);
} else if (expression instanceof Expr.Convert) {
return generate((Expr.Convert) expression, environment);
} else if (expression instanceof Expr.IndexOf) {
return generate((Expr.IndexOf) expression, environment);
} else if (expression instanceof Expr.UnOp) {
return generate((Expr.UnOp) expression, environment);
} else if (expression instanceof Expr.FunctionCall) {
return generate((Expr.FunctionCall) expression, true, environment);
} else if (expression instanceof Expr.MethodCall) {
return generate((Expr.MethodCall) expression, true, environment);
} else if (expression instanceof Expr.IndirectFunctionCall) {
return generate((Expr.IndirectFunctionCall) expression, true, environment);
} else if (expression instanceof Expr.IndirectMethodCall) {
return generate((Expr.IndirectMethodCall) expression, true, environment);
} else if (expression instanceof Expr.IndirectMessageSend) {
return generate((Expr.IndirectMessageSend) expression, true, environment);
} else if (expression instanceof Expr.MessageSend) {
return generate((Expr.MessageSend) expression, true, environment);
} else if (expression instanceof Expr.Comprehension) {
return generate((Expr.Comprehension) expression, environment);
} else if (expression instanceof Expr.RecordAccess) {
return generate((Expr.RecordAccess) expression, environment);
} else if (expression instanceof Expr.Record) {
return generate((Expr.Record) expression, environment);
} else if (expression instanceof Expr.Tuple) {
return generate((Expr.Tuple) expression, environment);
} else if (expression instanceof Expr.Dictionary) {
return generate((Expr.Dictionary) expression, environment);
} else if (expression instanceof Expr.FunctionOrMethodOrMessage) {
return generate((Expr.FunctionOrMethodOrMessage) expression, environment);
} else if (expression instanceof Expr.New) {
return generate((Expr.New) expression, environment);
} else {
// should be dead-code
internalFailure("unknown expression: "
+ expression.getClass().getName(), context, expression);
}
} catch (ResolveError rex) {
syntaxError(rex.getMessage(), context, expression, rex);
} catch (SyntaxError se) {
throw se;
} catch (Exception ex) {
internalFailure(ex.getMessage(), context, expression, ex);
}
return null;
}
public Block generate(Expr.MessageSend fc, boolean retval,
HashMap<String, Integer> environment) throws ResolveError {
Block blk = new Block(environment.size());
blk.append(generate(fc.qualification, environment));
for (Expr e : fc.arguments) {
blk.append(generate(e, environment));
}
blk.append(Code.Send(fc.messageType.raw(), fc.nid, fc.synchronous, retval),
attributes(fc));
return blk;
}
public Block generate(Expr.MethodCall fc, boolean retval,
HashMap<String, Integer> environment) throws ResolveError {
Block blk = new Block(environment.size());
for (Expr e : fc.arguments) {
blk.append(generate(e, environment));
}
blk.append(Code.Invoke(fc.methodType.raw(), fc.nid(), retval), attributes(fc));
return blk;
}
public Block generate(Expr.FunctionCall fc, boolean retval,
HashMap<String, Integer> environment) throws ResolveError {
Block blk = new Block(environment.size());
for (Expr e : fc.arguments) {
blk.append(generate(e, environment));
}
blk.append(Code.Invoke(fc.functionType.raw(), fc.nid(), retval), attributes(fc));
return blk;
}
public Block generate(Expr.IndirectFunctionCall fc, boolean retval,
HashMap<String, Integer> environment) throws ResolveError {
Block blk = new Block(environment.size());
blk.append(generate(fc.src,environment));
for (Expr e : fc.arguments) {
blk.append(generate(e, environment));
}
blk.append(Code.IndirectInvoke(fc.functionType.raw(), retval), attributes(fc));
return blk;
}
public Block generate(Expr.IndirectMethodCall fc, boolean retval,
HashMap<String, Integer> environment) throws ResolveError {
Block blk = new Block(environment.size());
blk.append(generate(fc.src,environment));
for (Expr e : fc.arguments) {
blk.append(generate(e, environment));
}
blk.append(Code.IndirectInvoke(fc.methodType.raw(), retval), attributes(fc));
return blk;
}
public Block generate(Expr.IndirectMessageSend fc, boolean retval,
HashMap<String, Integer> environment) throws ResolveError {
Block blk = new Block(environment.size());
blk.append(generate(fc.src,environment));
blk.append(generate(fc.receiver,environment));
for (Expr e : fc.arguments) {
blk.append(generate(e, environment));
}
blk.append(Code.IndirectSend(fc.messageType.raw(), fc.synchronous, retval), attributes(fc));
return blk;
}
private Block generate(Expr.Constant c, HashMap<String,Integer> environment) {
Block blk = new Block(environment.size());
blk.append(Code.Const(c.value), attributes(c));
return blk;
}
private Block generate(Expr.FunctionOrMethodOrMessage s, HashMap<String,Integer> environment) {
Block blk = new Block(environment.size());
blk.append(Code.Const(Value.V_FUN(s.nid, s.type.raw())),
attributes(s));
return blk;
}
private Block generate(Expr.ConstantAccess v, HashMap<String,Integer> environment) throws ResolveError {
Block blk = new Block(environment.size());
Value val = v.value;
blk.append(Code.Const(val),attributes(v));
return blk;
}
private Block generate(Expr.LocalVariable v, HashMap<String,Integer> environment) throws ResolveError {
if (environment.containsKey(v.var)) {
Block blk = new Block(environment.size());
blk.append(Code.Load(v.result().raw(), environment.get(v.var)), attributes(v));
return blk;
} else {
syntaxError(errorMessage(VARIABLE_POSSIBLY_UNITIALISED), context,
v);
}
// must be an error
syntaxError("unknown variable \"" + v.var + "\"", context,v);
return null;
}
private Block generate(Expr.UnOp v, HashMap<String,Integer> environment) {
Block blk = generate(v.mhs, environment);
switch (v.op) {
case NEG:
blk.append(Code.Negate(v.result().raw()), attributes(v));
break;
case INVERT:
blk.append(Code.Invert(v.result().raw()), attributes(v));
break;
case NOT:
String falseLabel = Block.freshLabel();
String exitLabel = Block.freshLabel();
blk = generateCondition(falseLabel, v.mhs, environment);
blk.append(Code.Const(Value.V_BOOL(true)), attributes(v));
blk.append(Code.Goto(exitLabel));
blk.append(Code.Label(falseLabel));
blk.append(Code.Const(Value.V_BOOL(false)), attributes(v));
blk.append(Code.Label(exitLabel));
break;
default:
// should be dead-code
internalFailure("unexpected unary operator encountered", context, v);
return null;
}
return blk;
}
private Block generate(Expr.LengthOf v, HashMap<String,Integer> environment) {
Block blk = generate(v.src, environment);
blk.append(Code.LengthOf(v.srcType.raw()), attributes(v));
return blk;
}
private Block generate(Expr.Dereference v, HashMap<String,Integer> environment) {
Block blk = generate(v.src, environment);
blk.append(Code.Dereference(v.srcType.raw()), attributes(v));
return blk;
}
private Block generate(Expr.IndexOf v, HashMap<String,Integer> environment) {
Block blk = new Block(environment.size());
blk.append(generate(v.src, environment));
blk.append(generate(v.index, environment));
blk.append(Code.IndexOf(v.srcType.raw()),attributes(v));
return blk;
}
private Block generate(Expr.Convert v, HashMap<String,Integer> environment) {
Block blk = new Block(environment.size());
blk.append(generate(v.expr, environment));
Type from = v.expr.result().raw();
Type to = v.result().raw();
// TODO: include constraints
blk.append(Code.Convert(from,to),attributes(v));
return blk;
}
private Block generate(Expr.BinOp v, HashMap<String,Integer> environment) throws Exception {
// could probably use a range test for this somehow
if (v.op == Expr.BOp.EQ || v.op == Expr.BOp.NEQ || v.op == Expr.BOp.LT
|| v.op == Expr.BOp.LTEQ || v.op == Expr.BOp.GT || v.op == Expr.BOp.GTEQ
|| v.op == Expr.BOp.SUBSET || v.op == Expr.BOp.SUBSETEQ
|| v.op == Expr.BOp.ELEMENTOF || v.op == Expr.BOp.AND || v.op == Expr.BOp.OR) {
String trueLabel = Block.freshLabel();
String exitLabel = Block.freshLabel();
Block blk = generateCondition(trueLabel, v, environment);
blk.append(Code.Const(Value.V_BOOL(false)), attributes(v));
blk.append(Code.Goto(exitLabel));
blk.append(Code.Label(trueLabel));
blk.append(Code.Const(Value.V_BOOL(true)), attributes(v));
blk.append(Code.Label(exitLabel));
return blk;
}
Expr.BOp bop = v.op;
Block blk = new Block(environment.size());
blk.append(generate(v.lhs, environment));
blk.append(generate(v.rhs, environment));
Type result = v.result().raw();
switch(bop) {
case UNION:
blk.append(Code.SetUnion((Type.EffectiveSet)result,Code.OpDir.UNIFORM),attributes(v));
return blk;
case INTERSECTION:
blk.append(Code.SetIntersect((Type.EffectiveSet)result,Code.OpDir.UNIFORM),attributes(v));
return blk;
case DIFFERENCE:
blk.append(Code.SetDifference((Type.EffectiveSet)result,Code.OpDir.UNIFORM),attributes(v));
return blk;
case LISTAPPEND:
blk.append(Code.ListAppend((Type.EffectiveList)result,Code.OpDir.UNIFORM),attributes(v));
return blk;
case STRINGAPPEND:
Type lhs = v.lhs.result().raw();
Type rhs = v.rhs.result().raw();
Code.OpDir dir;
if(lhs == Type.T_STRING && rhs == Type.T_STRING) {
dir = Code.OpDir.UNIFORM;
} else if(lhs == Type.T_STRING && Type.isSubtype(Type.T_CHAR, rhs)) {
dir = Code.OpDir.LEFT;
} else if(rhs == Type.T_STRING && Type.isSubtype(Type.T_CHAR, lhs)) {
dir = Code.OpDir.RIGHT;
} else {
// this indicates that one operand must be explicitly converted
// into a string.
dir = Code.OpDir.UNIFORM;
}
blk.append(Code.StringAppend(dir),attributes(v));
return blk;
default:
blk.append(Code.BinOp(result, OP2BOP(bop,v)),attributes(v));
return blk;
}
}
private Block generate(Expr.Set v, HashMap<String,Integer> environment) {
Block blk = new Block(environment.size());
int nargs = 0;
for (Expr e : v.arguments) {
nargs++;
blk.append(generate(e, environment));
}
blk.append(Code.NewSet(v.type.raw(),nargs),attributes(v));
return blk;
}
private Block generate(Expr.List v, HashMap<String,Integer> environment) {
Block blk = new Block(environment.size());
int nargs = 0;
for (Expr e : v.arguments) {
nargs++;
blk.append(generate(e, environment));
}
blk.append(Code.NewList(v.type.raw(),nargs),attributes(v));
return blk;
}
private Block generate(Expr.SubList v, HashMap<String, Integer> environment) {
Block blk = new Block(environment.size());
blk.append(generate(v.src, environment));
blk.append(generate(v.start, environment));
blk.append(generate(v.end, environment));
blk.append(Code.SubList(v.type.raw()), attributes(v));
return blk;
}
private Block generate(Expr.SubString v, HashMap<String, Integer> environment) {
Block blk = new Block(environment.size());
blk.append(generate(v.src, environment));
blk.append(generate(v.start, environment));
blk.append(generate(v.end, environment));
blk.append(Code.SubString(), attributes(v));
return blk;
}
private Block generate(Expr.Comprehension e, HashMap<String,Integer> environment) {
// First, check for boolean cases which are handled mostly by
// generateCondition.
if (e.cop == Expr.COp.SOME || e.cop == Expr.COp.NONE) {
String trueLabel = Block.freshLabel();
String exitLabel = Block.freshLabel();
int freeSlot = allocate(environment);
Block blk = generateCondition(trueLabel, e, environment);
blk.append(Code.Const(Value.V_BOOL(false)), attributes(e));
blk.append(Code.Store(Type.T_BOOL,freeSlot),attributes(e));
blk.append(Code.Goto(exitLabel));
blk.append(Code.Label(trueLabel));
blk.append(Code.Const(Value.V_BOOL(true)), attributes(e));
blk.append(Code.Store(Type.T_BOOL,freeSlot),attributes(e));
blk.append(Code.Label(exitLabel));
blk.append(Code.Load(Type.T_BOOL,freeSlot),attributes(e));
return blk;
}
// Ok, non-boolean case.
Block blk = new Block(environment.size());
ArrayList<Triple<Integer,Integer,Type.EffectiveCollection>> slots = new ArrayList();
for (Pair<String, Expr> p : e.sources) {
int srcSlot;
int varSlot = allocate(p.first(),environment);
Expr src = p.second();
Type rawSrcType = src.result().raw();
if(src instanceof Expr.LocalVariable) {
// this is a little optimisation to produce slightly better
// code.
Expr.LocalVariable v = (Expr.LocalVariable) src;
if(environment.containsKey(v.var)) {
srcSlot = environment.get(v.var);
} else {
// fall-back plan ...
blk.append(generate(src, environment));
srcSlot = allocate(environment);
blk.append(Code.Store(rawSrcType, srcSlot),attributes(e));
}
} else {
blk.append(generate(src, environment));
srcSlot = allocate(environment);
blk.append(Code.Store(rawSrcType, srcSlot),attributes(e));
}
slots.add(new Triple(varSlot,srcSlot,rawSrcType));
}
Type resultType;
int resultSlot = allocate(environment);
if (e.cop == Expr.COp.LISTCOMP) {
resultType = e.type.raw();
blk.append(Code.NewList((Type.List) resultType,0), attributes(e));
blk.append(Code.Store((Type.List) resultType,resultSlot),attributes(e));
} else {
resultType = e.type.raw();
blk.append(Code.NewSet((Type.Set) resultType,0), attributes(e));
blk.append(Code.Store((Type.Set) resultType,resultSlot),attributes(e));
}
// At this point, it would be good to determine an appropriate loop
// invariant for a set comprehension. This is easy enough in the case of
// a single variable comprehension, but actually rather difficult for a
// multi-variable comprehension.
// For example, consider <code>{x+y | x in xs, y in ys, x<0 && y<0}</code>
// What is an appropriate loop invariant here?
String continueLabel = Block.freshLabel();
ArrayList<String> labels = new ArrayList<String>();
String loopLabel = Block.freshLabel();
for (Triple<Integer, Integer, Type.EffectiveCollection> p : slots) {
String target = loopLabel + "$" + p.first();
blk.append(Code.Load((Type) p.third(), p.second()), attributes(e));
blk.append(Code.ForAll(p.third(), p.first(), target,
Collections.EMPTY_LIST), attributes(e));
labels.add(target);
}
if (e.condition != null) {
blk.append(generateCondition(continueLabel, invert(e.condition),
environment));
}
blk.append(Code.Load(resultType,resultSlot),attributes(e));
blk.append(generate(e.value, environment));
// FIXME: following broken for list comprehensions
blk.append(Code.SetUnion((Type.Set) resultType, Code.OpDir.LEFT),attributes(e));
blk.append(Code.Store(resultType,resultSlot),attributes(e));
if(e.condition != null) {
blk.append(Code.Label(continueLabel));
}
for (int i = (labels.size() - 1); i >= 0; --i) {
blk.append(Code.End(labels.get(i)));
}
blk.append(Code.Load(resultType,resultSlot),attributes(e));
return blk;
}
private Block generate(Expr.Record sg, HashMap<String,Integer> environment) {
Block blk = new Block(environment.size());
ArrayList<String> keys = new ArrayList<String>(sg.fields.keySet());
Collections.sort(keys);
for (String key : keys) {
blk.append(generate(sg.fields.get(key), environment));
}
blk.append(Code.NewRecord(sg.result().raw()), attributes(sg));
return blk;
}
private Block generate(Expr.Tuple sg, HashMap<String,Integer> environment) {
Block blk = new Block(environment.size());
for (Expr e : sg.fields) {
blk.append(generate(e, environment));
}
blk.append(Code.NewTuple(sg.result().raw(),sg.fields.size()),attributes(sg));
return blk;
}
private Block generate(Expr.Dictionary sg, HashMap<String,Integer> environment) {
Block blk = new Block(environment.size());
for (Pair<Expr,Expr> e : sg.pairs) {
blk.append(generate(e.first(), environment));
blk.append(generate(e.second(), environment));
}
blk.append(Code.NewDict(sg.result().raw(),sg.pairs.size()),attributes(sg));
return blk;
}
private Block generate(Expr.RecordAccess sg, HashMap<String,Integer> environment) {
Block lhs = generate(sg.src, environment);
lhs.append(Code.FieldLoad(sg.srcType.raw(),sg.name), attributes(sg));
return lhs;
}
private Block generate(Expr.New expr,
HashMap<String, Integer> environment) throws ResolveError {
Block blk = generate(expr.expr,environment);
blk.append(Code.New(expr.type.raw()));
return blk;
}
private Code.BOp OP2BOP(Expr.BOp bop, SyntacticElement elem) {
switch (bop) {
case ADD:
return Code.BOp.ADD;
case SUB:
return Code.BOp.SUB;
case MUL:
return Code.BOp.MUL;
case DIV:
return Code.BOp.DIV;
case REM:
return Code.BOp.REM;
case RANGE:
return Code.BOp.RANGE;
case BITWISEAND:
return Code.BOp.BITWISEAND;
case BITWISEOR:
return Code.BOp.BITWISEOR;
case BITWISEXOR:
return Code.BOp.BITWISEXOR;
case LEFTSHIFT:
return Code.BOp.LEFTSHIFT;
case RIGHTSHIFT:
return Code.BOp.RIGHTSHIFT;
}
syntaxError(errorMessage(INVALID_BINARY_EXPRESSION), context, elem);
return null;
}
private Code.COp OP2COP(Expr.BOp bop, SyntacticElement elem) {
switch (bop) {
case EQ:
return Code.COp.EQ;
case NEQ:
return Code.COp.NEQ;
case LT:
return Code.COp.LT;
case LTEQ:
return Code.COp.LTEQ;
case GT:
return Code.COp.GT;
case GTEQ:
return Code.COp.GTEQ;
case SUBSET:
return Code.COp.SUBSET;
case SUBSETEQ:
return Code.COp.SUBSETEQ;
case ELEMENTOF:
return Code.COp.ELEMOF;
}
syntaxError(errorMessage(INVALID_BOOLEAN_EXPRESSION), context, elem);
return null;
}
private static int allocate(HashMap<String,Integer> environment) {
return allocate("$" + environment.size(),environment);
}
private static int allocate(String var, HashMap<String,Integer> environment) {
// this method is a bit of a hack
Integer r = environment.get(var);
if(r == null) {
int slot = environment.size();
environment.put(var, slot);
return slot;
} else {
return r;
}
}
private static Expr invert(Expr e) {
if (e instanceof Expr.BinOp) {
Expr.BinOp bop = (Expr.BinOp) e;
Expr.BinOp nbop = null;
switch (bop.op) {
case AND:
nbop = new Expr.BinOp(Expr.BOp.OR, invert(bop.lhs), invert(bop.rhs), attributes(e));
break;
case OR:
nbop = new Expr.BinOp(Expr.BOp.AND, invert(bop.lhs), invert(bop.rhs), attributes(e));
break;
case EQ:
nbop = new Expr.BinOp(Expr.BOp.NEQ, bop.lhs, bop.rhs, attributes(e));
break;
case NEQ:
nbop = new Expr.BinOp(Expr.BOp.EQ, bop.lhs, bop.rhs, attributes(e));
break;
case LT:
nbop = new Expr.BinOp(Expr.BOp.GTEQ, bop.lhs, bop.rhs, attributes(e));
break;
case LTEQ:
nbop = new Expr.BinOp(Expr.BOp.GT, bop.lhs, bop.rhs, attributes(e));
break;
case GT:
nbop = new Expr.BinOp(Expr.BOp.LTEQ, bop.lhs, bop.rhs, attributes(e));
break;
case GTEQ:
nbop = new Expr.BinOp(Expr.BOp.LT, bop.lhs, bop.rhs, attributes(e));
break;
}
if(nbop != null) {
nbop.srcType = bop.srcType;
return nbop;
}
} else if (e instanceof Expr.UnOp) {
Expr.UnOp uop = (Expr.UnOp) e;
switch (uop.op) {
case NOT:
return uop.mhs;
}
}
Expr.UnOp r = new Expr.UnOp(Expr.UOp.NOT, e);
r.type = Nominal.T_BOOL;
return r;
}
/**
* The shiftBlock method takes a block and shifts every slot a given amount
* to the right. The number of inputs remains the same. This method is used
*
* @param amount
* @param blk
* @return
*/
private static Block shiftBlockExceptionZero(int amount, int zeroDest, Block blk) {
HashMap<Integer,Integer> binding = new HashMap<Integer,Integer>();
for(int i=1;i!=blk.numSlots();++i) {
binding.put(i,i+amount);
}
binding.put(0, zeroDest);
Block nblock = new Block(blk.numInputs());
for(Block.Entry e : blk) {
Code code = e.code.remap(binding);
nblock.append(code,e.attributes());
}
return nblock.relabel();
}
private static Block chainBlock(String target, Block blk) {
Block nblock = new Block(blk.numInputs());
for (Block.Entry e : blk) {
if (e.code instanceof Code.Fail) {
nblock.append(Code.Goto(target), e.attributes());
} else {
nblock.append(e.code, e.attributes());
}
}
return nblock.relabel();
}
/**
* The attributes method extracts those attributes of relevance to wyil, and
* discards those which are only used for the wyc front end.
*
* @param elem
* @return
*/
private static Collection<Attribute> attributes(SyntacticElement elem) {
ArrayList<Attribute> attrs = new ArrayList<Attribute>();
attrs.add(elem.attribute(Attribute.Source.class));
return attrs;
}
}
|
package com.intellij.openapi.diff.impl.patch.formove;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.diff.impl.patch.ApplyPatchContext;
import com.intellij.openapi.diff.impl.patch.ApplyPatchStatus;
import com.intellij.openapi.diff.impl.patch.FilePatch;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypes;
import com.intellij.openapi.fileTypes.ex.FileTypeChooser;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vcs.VcsBundle;
import com.intellij.openapi.vcs.changes.ChangeListManager;
import com.intellij.openapi.vcs.changes.LocalChangeList;
import com.intellij.openapi.vcs.changes.VcsDirtyScopeManager;
import com.intellij.openapi.vcs.changes.patch.ApplyPatchAction;
import com.intellij.openapi.vcs.changes.patch.RelativePathCalculator;
import com.intellij.openapi.vfs.ReadonlyStatusHandler;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* for patches. for shelve.
*/
public class PatchApplier {
private final Project myProject;
private final VirtualFile myBaseDirectory;
private final List<FilePatch> myPatches;
private final CustomBinaryPatchApplier myCustomForBinaries;
private final LocalChangeList myTargetChangeList;
private final List<FilePatch> myRemainingPatches;
private final PathsVerifier myVerifier;
public PatchApplier(final Project project, final VirtualFile baseDirectory, final List<FilePatch> patches,
final LocalChangeList targetChangeList, final CustomBinaryPatchApplier customForBinaries) {
myProject = project;
myBaseDirectory = baseDirectory;
myPatches = patches;
myTargetChangeList = targetChangeList;
myCustomForBinaries = customForBinaries;
myRemainingPatches = new ArrayList<FilePatch>();
myVerifier = new PathsVerifier(myProject, myBaseDirectory, myPatches);
}
public ApplyPatchStatus execute() {
myRemainingPatches.addAll(myPatches);
final Ref<ApplyPatchStatus> refStatus = new Ref<ApplyPatchStatus>(ApplyPatchStatus.FAILURE);
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
CommandProcessor.getInstance().executeCommand(myProject, new Runnable() {
public void run() {
if (! myVerifier.execute()) {
return;
}
if (! makeWritable(myVerifier.getWritableFiles())) {
return;
}
final List<Pair<VirtualFile, FilePatch>> textPatches = myVerifier.getTextPatches();
if (! fileTypesAreOk(textPatches)) {
return;
}
final ApplyPatchStatus status = actualApply(myVerifier);
if (status != null) {
refStatus.set(status);
}
} // end of Command run
}, VcsBundle.message("patch.apply.command"), null);
}
});
showApplyStatus(refStatus.get());
final List<VirtualFile> directlyAffected = myVerifier.getDirectlyAffected();
final List<VirtualFile> indirectlyAffected = myVerifier.getAllAffected();
refreshIndirectlyAffected(indirectlyAffected);
final VcsDirtyScopeManager vcsDirtyScopeManager = VcsDirtyScopeManager.getInstance(myProject);
for (VirtualFile file : directlyAffected) {
vcsDirtyScopeManager.fileDirty(file);
}
if ((myTargetChangeList != null) && (! directlyAffected.isEmpty())) {
ApplyPatchAction.moveChangesOfVsToList(myProject, directlyAffected, myTargetChangeList);
} else {
final ChangeListManager changeListManager = ChangeListManager.getInstance(myProject);
changeListManager.scheduleUpdate();
}
return refStatus.get();
}
@Nullable
private ApplyPatchStatus actualApply(final PathsVerifier verifier) {
final List<Pair<VirtualFile, FilePatch>> textPatches = verifier.getTextPatches();
final ApplyPatchContext context = new ApplyPatchContext(myBaseDirectory, 0, true, true);
ApplyPatchStatus status = null;
try {
status = applyList(textPatches, context, status);
if (myCustomForBinaries == null) {
status = applyList(verifier.getBinaryPatches(), context, status);
} else {
final List<Pair<VirtualFile, FilePatch>> binaryPatches = verifier.getBinaryPatches();
ApplyPatchStatus patchStatus = myCustomForBinaries.apply(binaryPatches);
final List<FilePatch> appliedPatches = myCustomForBinaries.getAppliedPatches();
moveForCustomBinaries(binaryPatches, patchStatus, appliedPatches);
status = ApplyPatchStatus.and(status, patchStatus);
myRemainingPatches.removeAll(appliedPatches);
}
}
catch (IOException e) {
showError(myProject, e.getMessage(), true);
return ApplyPatchStatus.FAILURE;
}
return status;
}
private void moveForCustomBinaries(final List<Pair<VirtualFile, FilePatch>> patches, ApplyPatchStatus status,
final List<FilePatch> appliedPatches) throws IOException {
for (Pair<VirtualFile, FilePatch> patch : patches) {
if (appliedPatches.contains(patch.getSecond())) {
myVerifier.doMoveIfNeeded(patch.getFirst());
}
}
}
private ApplyPatchStatus applyList(final List<Pair<VirtualFile, FilePatch>> patches, final ApplyPatchContext context,
ApplyPatchStatus status) throws IOException {
for (Pair<VirtualFile, FilePatch> patch : patches) {
ApplyPatchStatus patchStatus = ApplyPatchAction.applyOnly(myProject, patch.getSecond(), context, patch.getFirst());
myVerifier.doMoveIfNeeded(patch.getFirst());
status = ApplyPatchStatus.and(status, patchStatus);
if (ApplyPatchStatus.SUCCESS.equals(patchStatus)) {
myRemainingPatches.remove(patch.getSecond());
} else {
// interrupt if failure
return status;
}
}
return status;
}
private void showApplyStatus(final ApplyPatchStatus status) {
if (status == ApplyPatchStatus.ALREADY_APPLIED) {
showError(myProject, VcsBundle.message("patch.apply.already.applied"), false);
}
else if (status == ApplyPatchStatus.PARTIAL) {
showError(myProject, VcsBundle.message("patch.apply.partially.applied"), false);
} else if (ApplyPatchStatus.SUCCESS.equals(status)) {
showError(myProject, VcsBundle.message("patch.apply.success.applied.text"), false);
}
}
public List<FilePatch> getRemainingPatches() {
return myRemainingPatches;
}
private boolean makeWritable(final List<VirtualFile> filesToMakeWritable) {
final VirtualFile[] fileArray = filesToMakeWritable.toArray(new VirtualFile[filesToMakeWritable.size()]);
final ReadonlyStatusHandler.OperationStatus readonlyStatus = ReadonlyStatusHandler.getInstance(myProject).ensureFilesWritable(fileArray);
return (! readonlyStatus.hasReadonlyFiles());
}
private boolean fileTypesAreOk(final List<Pair<VirtualFile, FilePatch>> textPatches) {
for (Pair<VirtualFile, FilePatch> textPatch : textPatches) {
final VirtualFile file = textPatch.getFirst();
if (! file.isDirectory()) {
FileType fileType = file.getFileType();
if (fileType == FileTypes.UNKNOWN) {
fileType = FileTypeChooser.associateFileType(file.getPresentableName());
if (fileType == null) {
showError(myProject, "Cannot apply patch. File " + file.getPresentableName() + " type not defined.", true);
return false;
}
}
}
}
return true;
}
private void refreshIndirectlyAffected(final List<VirtualFile> files) {
Collections.sort(files, FilePathComparator.getInstance());
for (VirtualFile file : files) {
file.refresh(true, false);
}
}
public static void showError(final Project project, final String message, final boolean error) {
final Application application = ApplicationManager.getApplication();
if (application.isUnitTestMode()) {
return;
}
final String title = VcsBundle.message("patch.apply.dialog.title");
final Runnable messageShower = new Runnable() {
public void run() {
if (error) {
Messages.showErrorDialog(project, message, title);
}
else {
Messages.showInfoMessage(project, message, title);
}
}
};
if (application.isDispatchThread()) {
messageShower.run();
} else {
application.invokeLater(new Runnable() {
public void run() {
messageShower.run();
}
});
}
}
@Nullable
public static VirtualFile getFile(final VirtualFile baseDir, final String path) {
if (path == null) {
return null;
}
final List<String> tail = new ArrayList<String>();
final VirtualFile file = getFile(baseDir, path, tail);
if (tail.isEmpty()) {
return file;
}
return null;
}
@Nullable
public static VirtualFile getFile(final VirtualFile baseDir, final String path, final List<String> tail) {
VirtualFile child = baseDir;
final String[] pieces = RelativePathCalculator.split(path);
for (int i = 0; i < pieces.length; i++) {
final String piece = pieces[i];
if (child == null) {
return null;
}
if ("".equals(piece) || ".".equals(piece)) {
continue;
}
if ("..".equals(piece)) {
child = child.getParent();
continue;
}
VirtualFile nextChild = child.findChild(piece);
if (nextChild == null) {
if (tail != null) {
for (int j = i; j < pieces.length; j++) {
final String pieceInner = pieces[j];
tail.add(pieceInner);
}
}
return child;
}
child = nextChild;
}
return child;
}
}
|
package org.nakedobjects.viewer.skylark.basic;
import org.nakedobjects.object.Naked;
import org.nakedobjects.object.NakedObject;
import org.nakedobjects.object.NakedObjectSpecification;
import org.nakedobjects.object.control.Consent;
import org.nakedobjects.object.control.Veto;
import org.nakedobjects.utility.DebugString;
import org.nakedobjects.utility.NakedObjectRuntimeException;
import org.nakedobjects.viewer.skylark.ObjectContent;
import org.nakedobjects.viewer.skylark.ParameterContent;
/**
* Links an action on an object to a view.
*/
public class ActionContent extends ObjectContent {
private final ActionHelper invocation;
private final ParameterContent[] parameters;
public ActionContent(ActionHelper invocation) {
this.invocation = invocation;
parameters = invocation.createParameters();
}
public Consent canClear() {
return Veto.DEFAULT;
}
public Consent canSet(NakedObject dragSource) {
return Veto.DEFAULT;
}
public void clear() {
throw new NakedObjectRuntimeException("Invalid call");
}
public void debugDetails(DebugString debug) {
debug.appendln(4, "action", getActionName());
debug.appendln(4, "target", getNaked());
String parameterSet = "";
for (int i = 0; i < parameters.length; i++) {
parameterSet += parameters[i];
}
debug.appendln(4, "parameters", parameterSet);
}
public Consent disabled() {
return invocation.disabled();
}
public Naked execute() {
return invocation.invoke();
}
public String getActionName() {
return invocation.getName();
}
public String getIconName() {
return getNaked().getIconName();
}
/* public Image getIconPicture(int iconHeight) {
NakedObjectSpecification specification = getNaked().getSpecification();
return ImageFactory.getInstance().loadIcon(specification, "", iconHeight);
}
*/ public Naked getNaked() {
return invocation.getTarget();
}
public int getNoParameters() {
return parameters.length;
}
public NakedObject getObject() {
return invocation.getTarget();
}
public ParameterContent getParameterContent(int index) {
return parameters[index];
}
public Naked getParameterObject(int index) {
return invocation.getParameter(index);
}
public NakedObjectSpecification getSpecification() {
return getObject().getSpecification();
}
/**
* Can't pesist actions
*/
public boolean isPersistable() {
return false;
}
public boolean isObject() {
return true;
}
public boolean isTransient() {
return true;
}
public void setObject(NakedObject object) {
throw new NakedObjectRuntimeException("Invalid call");
}
public void setParameter(int number, ParameterContent parameter) {
parameters[number] = parameter;
}
public String title() {
return getNaked().titleString();
}
public String windowTitle() {
return getActionName();
}
public String getName() {
return invocation.getName();
}
public String getDescription() {
return invocation.getDescription();
}
}
|
package org.nakedobjects.viewer.skylark.value;
import org.nakedobjects.viewer.skylark.Canvas;
import org.nakedobjects.viewer.skylark.Color;
import org.nakedobjects.viewer.skylark.Content;
import org.nakedobjects.viewer.skylark.MenuOptionSet;
import org.nakedobjects.viewer.skylark.Size;
import org.nakedobjects.viewer.skylark.Style;
import org.nakedobjects.viewer.skylark.Text;
import org.nakedobjects.viewer.skylark.ValueContent;
import org.nakedobjects.viewer.skylark.ViewAxis;
import org.nakedobjects.viewer.skylark.ViewSpecification;
import java.awt.event.KeyEvent;
public class PasswordField extends AbstractField {
protected static final Text style = Style.NORMAL;
private boolean isSaved;
private int maxTextWidth;
private String password;
private int width;
public PasswordField(Content content, ViewSpecification design, ViewAxis axis, int width) {
super(content, design, axis);
setMaxTextWidth(width);
this.width = style.charWidth('O') + 2;
// height = style.getTextHeight() + style.getDescent();
password = text();
}
public boolean canFocus() {
return canChangeValue();
}
public void contentMenuOptions(MenuOptionSet options) {
options.add(MenuOptionSet.OBJECT, new ClearValueOption());
super.contentMenuOptions((options));
options.setColor(Style.VALUE_MENU);
}
private void delete() {
isSaved = false;
password = password.substring(0, password.length() - 1);
markDamaged();
}
public void draw(Canvas canvas) {
super.draw(canvas);
int length = password.length();
int x = 0;
for (int i = 0; i < length; i++) {
canvas.drawSolidOval(x, 0 + VPADDING + 2, width, width, hasFocus() ? Color.YELLOW : Color.LIGHT_GRAY);
x += width + 2;
}
x = 0;
for (int i = 0; i < length + 1; i++) {
canvas.drawOval(0 + i * (width + 2), 0 + VPADDING + 2, width, width, hasFocus() ? Color.BLACK : Color.GRAY);
x += width + 2;
}
}
public void editComplete() {
if (canChangeValue() && !isSaved) {
isSaved = true;
initiateSave();
}
}
public void escape() {
password = "";
isSaved = false;
markDamaged();
}
public void focusLost() {
editComplete();
}
public Size getRequiredSize() {
int width = HPADDING + maxTextWidth + HPADDING;
int height = style.getLineHeight() + VPADDING * 2;
height = Math.max(height, Style.defaultFieldHeight());
return new Size(width, height);
}
public void keyPressed(final int keyCode, final int modifiers) {
if (!canChangeValue()) {
return;
}
if (keyCode == KeyEvent.VK_CONTROL || keyCode == KeyEvent.VK_SHIFT || keyCode == KeyEvent.VK_ALT) {
return;
}
switch (keyCode) {
case KeyEvent.VK_LEFT:
delete();
break;
case KeyEvent.VK_DELETE:
delete();
break;
case KeyEvent.VK_BACK_SPACE:
delete();
break;
case KeyEvent.VK_TAB:
editComplete();
break;
case KeyEvent.VK_ENTER:
editComplete();
getParent().keyPressed(keyCode, modifiers);
break;
case KeyEvent.VK_ESCAPE:
escape();
break;
default:
break;
}
}
public void keyTyped(char keyCode) {
password += keyCode;
isSaved = false;
markDamaged();
}
protected void save() {
ValueContent content = (ValueContent) getContent();
content.parseTextEntry(password);
}
/**
* Set the maximum width of the field, as a number of characters
*/
public void setMaxTextWidth(int noCharacters) {
maxTextWidth = style.charWidth('o') * noCharacters;
}
/**
* Set the maximum width of the field, as a number of pixels
*/
public void setMaxWidth(int width) {
maxTextWidth = width;
}
private String text() {
String text;
ValueContent content = (ValueContent) getContent();
text = content.getNaked().titleString();
return text;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.