answer stringlengths 17 10.2M |
|---|
package de.comeight.crystallogy.gui.bookOfKnowledge.pages.credits;
import de.comeight.crystallogy.gui.bookOfKnowledge.GuiBookUtilities;
import de.comeight.crystallogy.gui.bookOfKnowledge.pages.GuiBookPage;
import de.comeight.crystallogy.util.RGBColor;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
@SideOnly(Side.CLIENT)
public class GuiBookCredits extends GuiBookPage {
public GuiBookCredits() {
super("Credits:");
}
@Override
public void drawScreen(int mouseX, int mouseY, float partialTicks) {
super.drawScreen(mouseX, mouseY, partialTicks);
drawText();
drawVersion();
}
private void drawText(){
GuiBookUtilities.drawTextBox(xPosBook + BORDER_LEFT, yPosBook + BORDER_TOP + 20, WRAPWIDTH, 1.0F, "First of all THANKS for reading this book.\n"
+ "\n"
+ "This book was written and developed by COM8.\n"
+ "\n"
+ "I know that there will be spelling mistakes and or grammatical problems that I was unable to sort out. "
+ "If you find one, feel free to report it as a bug at:\n");
GuiBookUtilities.drawTextBox(xPosBook + BORDER_LEFT, yPosBook + BORDER_TOP + 130, WRAPWIDTH, 1.0F, new RGBColor(0.0F, 0.0F, 1.0F).toInt(), "https://github.com/COM8/Crystallogy/issues");
GuiBookUtilities.drawTextBox(xPosBook + BORDER_LEFT, yPosBook + BORDER_TOP + 160, WRAPWIDTH, 1.0F, "If you have a suggestion how I could improve this book, you can create an \"Issue\" under the link above with the label \"enhancement\".\n");
}
private void drawVersion(){
GuiBookUtilities.drawTextBox(xPosBook + xSize - 35, yPosBook + BORDER_TOP, WRAPWIDTH, 1.0F, new RGBColor(1.0F, 0.0F, 0.0F).toInt(), "v.1.1");
}
} |
package net.cgt.weixin.activity;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import android.app.Activity;
import android.os.Bundle;
import android.view.Menu;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.Window;
import android.view.View.OnClickListener;
/**
* Activity
*
* @author lijian
* @data 2015-01-08 22:03:31
*/
public class BaseActivity extends Activity implements OnClickListener {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public void onClick(View v) {
}
/**
* overflow<br>
* onMenuOpened()overflowAction
*
* @param featureId
* @param menu
* @return
*/
@Override
public boolean onMenuOpened(int featureId, Menu menu) {
//MenuBuildersetOptionalIconsVisibletrue
if (featureId == Window.FEATURE_ACTION_BAR && menu != null) {
if (menu.getClass().getSimpleName().equals("MenuBuilder")) {
try {
Method m = menu.getClass().getDeclaredMethod("setOptionalIconsVisible", Boolean.TYPE);
m.setAccessible(true);
m.invoke(menu, true);
} catch (Exception e) {
}
}
}
return super.onMenuOpened(featureId, menu);
}
/**
* AndroidMenu<br>
* setOverflowShowingAlways()MenuMenuoverflow
*/
protected void setOverflowShowingAlways() {
try {
ViewConfiguration config = ViewConfiguration.get(this);
Field menuKeyField = ViewConfiguration.class.getDeclaredField("sHasPermanentMenuKey");
menuKeyField.setAccessible(true);
menuKeyField.setBoolean(config, false);
} catch (Exception e) {
e.printStackTrace();
}
}
} |
package org.commcare.session;
import org.commcare.modern.util.Pair;
import org.commcare.suite.model.ComputedDatum;
import org.commcare.suite.model.Detail;
import org.commcare.suite.model.EntityDatum;
import org.commcare.suite.model.Entry;
import org.commcare.suite.model.FormEntry;
import org.commcare.suite.model.FormIdDatum;
import org.commcare.suite.model.Menu;
import org.commcare.suite.model.RemoteQueryDatum;
import org.commcare.suite.model.SessionDatum;
import org.commcare.suite.model.StackFrameStep;
import org.commcare.suite.model.StackOperation;
import org.commcare.suite.model.Suite;
import org.commcare.suite.model.SyncEntry;
import org.commcare.util.CommCarePlatform;
import org.javarosa.core.model.condition.EvaluationContext;
import org.javarosa.core.model.instance.DataInstance;
import org.javarosa.core.model.instance.ExternalDataInstance;
import org.javarosa.core.model.instance.InstanceInitializationFactory;
import org.javarosa.core.services.locale.Localizer;
import org.javarosa.core.util.OrderedHashtable;
import org.javarosa.core.util.externalizable.DeserializationException;
import org.javarosa.core.util.externalizable.ExtUtil;
import org.javarosa.xpath.XPathException;
import org.javarosa.xpath.XPathParseTool;
import org.javarosa.xpath.expr.XPathExpression;
import org.javarosa.xpath.expr.XPathFuncExpr;
import org.javarosa.xpath.parser.XPathSyntaxException;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Stack;
import java.util.Vector;
/**
* Before arriving at the Form Entry phase, CommCare applications
* need to determine what form to enter, and with what pre-requisites.
*
* A CommCare Session helps to encapsulate this information by identifying
* the set of possible entry operations (Every piece of data needed to begin
* entry) and specifying the operation which would most quickly filter our
* the set of operations.
*
* NOTE: Currently horribly coupled to the platform.
*
* @author ctsims
*/
public class CommCareSession {
private final CommCarePlatform platform;
private StackFrameStep popped;
private String currentCmd;
/**
* A table of all datums (id --> value) that are currently on the session stack
*/
private final OrderedHashtable<String, String> collectedDatums;
private String currentXmlns;
/**
* The current session frame data
*/
private SessionFrame frame;
/**
* The stack of pending Frames
*/
private final Stack<SessionFrame> frameStack;
/**
* Used by touchforms
*/
@SuppressWarnings("unused")
public CommCareSession() {
this((CommCarePlatform)null);
}
public CommCareSession(CommCarePlatform platform) {
this.platform = platform;
this.collectedDatums = new OrderedHashtable<>();
this.frame = new SessionFrame();
this.frameStack = new Stack<>();
}
/**
* Copy constructor
*/
public CommCareSession(CommCareSession oldCommCareSession) {
// NOTE: 'platform' is being copied in a shallow manner
this.platform = oldCommCareSession.platform;
if (oldCommCareSession.popped != null) {
this.popped = new StackFrameStep(oldCommCareSession.popped);
}
this.currentCmd = oldCommCareSession.currentCmd;
this.currentXmlns = oldCommCareSession.currentXmlns;
this.frame = new SessionFrame(oldCommCareSession.frame);
collectedDatums = new OrderedHashtable<>();
for (Enumeration e = oldCommCareSession.collectedDatums.keys(); e.hasMoreElements(); ) {
String key = (String)e.nextElement();
collectedDatums.put(key, oldCommCareSession.collectedDatums.get(key));
}
this.frameStack = new Stack<>();
// NOTE: can't use for/each due to J2ME build issues w/ Stack
for (int i = 0; i < oldCommCareSession.frameStack.size(); i++) {
frameStack.addElement(oldCommCareSession.frameStack.elementAt(i));
}
}
public Vector<Entry> getEntriesForCommand(String commandId) {
return getEntriesForCommand(commandId, new OrderedHashtable<String, String>());
}
/**
* @param commandId the current command id
* @param currentSessionData all of the datums already on the stack
* @return A list of all of the form entry actions that are possible with the given commandId
* and the given list of already-collected datums
*/
private Vector<Entry> getEntriesForCommand(String commandId,
OrderedHashtable<String, String> currentSessionData) {
for (Suite s : platform.getInstalledSuites()) {
for (Menu m : s.getMenus()) {
// We need to see if everything in this menu can be matched
if (commandId.equals(m.getId())) {
return getEntriesFromMenu(m, currentSessionData);
}
}
if (s.getEntries().containsKey(commandId)) {
Vector<Entry> entries = new Vector<>();
entries.addElement(s.getEntries().get(commandId));
return entries;
}
}
return new Vector<>();
}
/**
* Get all entries that correspond to commands listed in the menu provided.
* Excludes entries whose data requirements aren't met by the 'currentSessionData'
*/
private Vector<Entry> getEntriesFromMenu(Menu menu,
OrderedHashtable<String, String> currentSessionData) {
Vector<Entry> entries = new Vector<>();
Hashtable<String, Entry> map = platform.getMenuMap();
//We're in a menu we have a set of requirements which
//need to be fulfilled
for (String cmd : menu.getCommandIds()) {
Entry e = map.get(cmd);
if (e == null) {
throw new RuntimeException("No entry found for menu command [" + cmd + "]");
}
if (entryRequirementsSatsified(e, currentSessionData)) {
entries.addElement(e);
}
}
return entries;
}
public OrderedHashtable<String, String> getData() {
return collectedDatums;
}
private static boolean entryRequirementsSatsified(Entry entry,
OrderedHashtable<String, String> currentSessionData) {
Vector<SessionDatum> requirements = entry.getSessionDataReqs();
if (requirements.size() >= currentSessionData.size()) {
for (int i = 0; i < currentSessionData.size(); ++i) {
if (!requirements.elementAt(i).getDataId().equals(currentSessionData.keyAt(i))) {
return false;
}
}
}
return true;
}
public CommCarePlatform getPlatform() {
return this.platform;
}
/**
* Based on the current state of the session, determine what information is needed next to
* proceed
*
* @return One of the session SessionFrame.STATE_* strings, or null if
* the session does not need anything else to proceed
*/
public String getNeededData(EvaluationContext evalContext) {
if (currentCmd == null) {
return SessionFrame.STATE_COMMAND_ID;
}
Vector<Entry> entries = getEntriesForCommand(currentCmd, collectedDatums);
String needDatum = getDataNeededByAllEntries(entries);
if (needDatum != null) {
return needDatum;
} else if (entries.size() == 1
&& entries.elementAt(0) instanceof SyncEntry
&& ((SyncEntry)entries.elementAt(0)).getSyncPost().isRelevant(evalContext)) {
return SessionFrame.STATE_SYNC_REQUEST;
} else if (entries.size() > 1 || !entries.elementAt(0).getCommandId().equals(currentCmd)) {
//the only other thing we can need is a form command. If there's
//still more than one applicable entry, we need to keep going
return SessionFrame.STATE_COMMAND_ID;
} else {
return null;
}
}
/**
* Checks that all entries have the same id for their first required data,
* and if so, returns the data's associated session state. Otherwise,
* returns null.
*/
private String getDataNeededByAllEntries(Vector<Entry> entries) {
String datumNeededByAllEntriesSoFar = null;
String neededDatumId = null;
for (Entry e : entries) {
SessionDatum datumNeededForThisEntry =
getFirstMissingDatum(collectedDatums, e.getSessionDataReqs());
if (datumNeededForThisEntry != null) {
if (neededDatumId == null) {
neededDatumId = datumNeededForThisEntry.getDataId();
if (datumNeededForThisEntry instanceof EntityDatum) {
datumNeededByAllEntriesSoFar = SessionFrame.STATE_DATUM_VAL;
} else if (datumNeededForThisEntry instanceof ComputedDatum) {
datumNeededByAllEntriesSoFar = SessionFrame.STATE_DATUM_COMPUTED;
} else if (datumNeededForThisEntry instanceof RemoteQueryDatum) {
datumNeededByAllEntriesSoFar = SessionFrame.STATE_QUERY_REQUEST;
}
} else if (!neededDatumId.equals(datumNeededForThisEntry.getDataId())) {
// data needed from the first entry isn't consistent with
// the current entry
return null;
}
} else {
// we don't need any data, or the first data needed isn't
// consistent across entries
return null;
}
}
return datumNeededByAllEntriesSoFar;
}
public String[] getHeaderTitles() {
Hashtable<String, String> menus = new Hashtable<>();
for (Suite s : platform.getInstalledSuites()) {
for (Menu m : s.getMenus()) {
menus.put(m.getId(), m.getName().evaluate());
}
}
Vector<StackFrameStep> steps = frame.getSteps();
String[] returnVal = new String[steps.size()];
Hashtable<String, Entry> entries = platform.getMenuMap();
int i = 0;
for (StackFrameStep step : steps) {
if (SessionFrame.STATE_COMMAND_ID.equals(step.getType())) {
//Menu or form.
if (menus.containsKey(step.getId())) {
returnVal[i] = menus.get(step.getId());
} else if (entries.containsKey(step.getId())) {
returnVal[i] = entries.get(step.getId()).getText().evaluate();
}
} else if (SessionFrame.STATE_DATUM_VAL.equals(step.getType())) {
//TODO: Grab the name of the case
} else if (SessionFrame.STATE_DATUM_COMPUTED.equals(step.getType())) {
//Nothing to do here
}
if (returnVal[i] != null) {
//Menus contain a potential argument listing where that value is on the screen,
//clear it out if it exists
returnVal[i] = Localizer.processArguments(returnVal[i], new String[]{""}).trim();
}
++i;
}
return returnVal;
}
/**
* @return The next relevant datum for the current entry. Requires there to be
* an entry on the stack
*/
public SessionDatum getNeededDatum() {
Entry entry = getEntriesForCommand(getCommand()).elementAt(0);
return getNeededDatum(entry);
}
/**
* @param entry An entry which is consistent as a step on the stack
* @return A session datum definition if one is pending. Null otherwise.
*/
public SessionDatum getNeededDatum(Entry entry) {
return getFirstMissingDatum(collectedDatums, entry.getSessionDataReqs());
}
/**
* Return the first SessionDatum that is in allDatumsNeeded, but is not represented in
* datumsCollectedSoFar
*/
private SessionDatum getFirstMissingDatum(OrderedHashtable datumsCollectedSoFar,
Vector<SessionDatum> allDatumsNeeded) {
for (SessionDatum datum : allDatumsNeeded) {
if (!datumsCollectedSoFar.containsKey(datum.getDataId())) {
return datum;
}
}
return null;
}
public Detail getDetail(String id) {
for (Suite s : platform.getInstalledSuites()) {
Detail d = s.getDetail(id);
if (d != null) {
return d;
}
}
return null;
}
public Menu getMenu(String id) {
for (Suite suite : platform.getInstalledSuites()) {
for (Menu m : suite.getMenus()) {
if (id.equals(m.getId())) {
return m;
}
}
}
return null;
}
public void stepBack(EvaluationContext evalContext) {
// Pop the first thing off of the stack frame, no matter what
popSessionFrameStack();
// Keep popping things off until the value of needed data indicates that we are back to
// somewhere where we are waiting for user-provided input
while (this.getNeededData(evalContext) == null ||
this.getNeededData(evalContext).equals(SessionFrame.STATE_DATUM_COMPUTED)) {
popSessionFrameStack();
}
}
public void popStep(EvaluationContext evalContext) {
popSessionFrameStack();
while (getNeededData(evalContext) == null) {
popSessionFrameStack();
}
}
private void popSessionFrameStack() {
StackFrameStep recentPop = frame.popStep();
//TODO: Check the "base state" of the frame after popping to see if we invalidated the stack
syncState();
popped = recentPop;
}
public void setDatum(String keyId, String value) {
frame.pushStep(new StackFrameStep(SessionFrame.STATE_DATUM_VAL, keyId, value));
syncState();
}
/**
* Set a (xml) data instance as the result to a session query datum.
* The instance is available in session's evaluation context until the corresponding query frame is removed
*/
public void setQueryDatum(ExternalDataInstance queryResultInstance) {
SessionDatum datum = getNeededDatum();
if (datum instanceof RemoteQueryDatum) {
StackFrameStep step =
new StackFrameStep(SessionFrame.STATE_QUERY_REQUEST,
datum.getDataId(), datum.getValue(), queryResultInstance);
frame.pushStep(step);
syncState();
} else {
throw new RuntimeException("Trying to set query successful when one isn't needed.");
}
}
public void setComputedDatum(EvaluationContext ec) throws XPathException {
SessionDatum datum = getNeededDatum();
XPathExpression form;
try {
form = XPathParseTool.parseXPath(datum.getValue());
} catch (XPathSyntaxException e) {
e.printStackTrace();
throw new RuntimeException(e.getMessage());
}
if (datum instanceof FormIdDatum) {
setXmlns(XPathFuncExpr.toString(form.eval(ec)));
setDatum("", "awful");
} else if (datum instanceof ComputedDatum) {
setDatum(datum.getDataId(), XPathFuncExpr.toString(form.eval(ec)));
}
}
public void setXmlns(String xmlns) {
frame.pushStep(new StackFrameStep(SessionFrame.STATE_FORM_XMLNS, xmlns, null));
syncState();
}
public void setCommand(String commandId) {
frame.pushStep(new StackFrameStep(SessionFrame.STATE_COMMAND_ID, commandId, null));
syncState();
}
public void syncState() {
this.collectedDatums.clear();
this.currentCmd = null;
this.currentXmlns = null;
this.popped = null;
for (StackFrameStep step : frame.getSteps()) {
if (SessionFrame.STATE_DATUM_VAL.equals(step.getType())) {
String key = step.getId();
String value = step.getValue();
if (key != null && value != null) {
collectedDatums.put(key, value);
}
} else if (SessionFrame.STATE_QUERY_REQUEST.equals(step.getType())) {
collectedDatums.put(step.getId(), step.getValue());
} else if (SessionFrame.STATE_COMMAND_ID.equals(step.getType())) {
this.currentCmd = step.getId();
} else if (SessionFrame.STATE_FORM_XMLNS.equals(step.getType())) {
this.currentXmlns = step.getId();
}
}
}
public StackFrameStep getPoppedStep() {
return popped;
}
public String getForm() {
if (this.currentXmlns != null) {
return this.currentXmlns;
}
String command = getCommand();
if (command == null) {
return null;
}
Entry e = platform.getMenuMap().get(command);
if (e.isView() || e.isSync()) {
return null;
} else {
return ((FormEntry)e).getXFormNamespace();
}
}
public String getCommand() {
return this.currentCmd;
}
/**
* Clear the current stack frame and release any pending
* stack frames (completely clearing the session)
*/
public void clearAllState() {
frame = new SessionFrame();
frameStack.removeAllElements();
syncState();
}
/**
* Retrieve an evaluation context in which to evaluate expressions in the
* current session state
*
* @param iif the instance initailzier for the current platform
* @return Evaluation context for current session state
*/
public EvaluationContext getEvaluationContext(InstanceInitializationFactory iif) {
return this.getEvaluationContext(iif, getCommand());
}
/**
* Retrieve an evaluation context in which to evaluate expressions in the context of a given
* command in the installed app
*
* @param iif the instance initializer for the current platform
* @return Evaluation context for a command in the installed app
*/
public EvaluationContext getEvaluationContext(InstanceInitializationFactory iif, String command) {
if (command == null) {
return new EvaluationContext(null);
}
Entry entry = getEntriesForCommand(command).elementAt(0);
Hashtable<String, DataInstance> instancesInScope = entry.getInstances();
for (Enumeration en = instancesInScope.keys(); en.hasMoreElements(); ) {
String key = (String)en.nextElement();
instancesInScope.put(key, instancesInScope.get(key).initialize(iif, key));
}
addInstancesFromFrame(instancesInScope);
return new EvaluationContext(null, instancesInScope);
}
private void addInstancesFromFrame(Hashtable<String, DataInstance> instanceMap) {
for (StackFrameStep step : frame.getSteps()) {
if (step.hasXmlInstance()) {
instanceMap.put(step.getId(), step.getXmlInstance());
}
}
}
public SessionFrame getFrame() {
//TODO: Type safe copy
return frame;
}
/**
* Executes a set of stack operations against the current session environment.
*
* The context data and session data provided will consistently match the live frame
* when the operations began executing, although frame operations will be executed
* against the most recent frame. (IE: If a new frame is pushed here, xpath expressions
* calculated within it will be evaluated against the starting, but <push> actions
* will happen against the newly pushed frame)
*/
public boolean executeStackOperations(Vector<StackOperation> ops, EvaluationContext ec) {
// The on deck frame is the frame that is the target of operations that execute
// as part of this stack update. If at the end of the stack ops the frame on deck
// doesn't match the current (living) frame, it will become the the current frame
SessionFrame onDeck = frame;
// Whether the current frame is on the stack (we wanna treat it as a "phantom" bottom element
// at first, basically.
boolean currentFramePushed = false;
for (StackOperation op : ops) {
// Is there a frame with a matching ID for this op?
String frameId = op.getFrameId();
SessionFrame matchingFrame = updateMatchingFrame(frameId);
switch (op.getOp()) {
case StackOperation.OPERATION_CREATE:
// Ensure no frames exist with this ID
if (matchingFrame == null) {
Boolean currentFramePushedOrNull =
performPush(op, new SessionFrame(frameId), true, currentFramePushed, onDeck, ec);
if (currentFramePushedOrNull != null) {
currentFramePushed = currentFramePushedOrNull;
}
}
break;
case StackOperation.OPERATION_PUSH:
Boolean currentFramePushedOrNull =
performPush(op, matchingFrame, false, currentFramePushed, onDeck, ec);
if (currentFramePushedOrNull != null) {
currentFramePushed = currentFramePushedOrNull;
}
break;
case StackOperation.OPERATION_CLEAR:
performClearOperation(matchingFrame, op, ec);
break;
default:
throw new RuntimeException("Undefined stack operation: " + op.getOp());
}
}
return popOrSync(onDeck);
}
private Boolean performPush(StackOperation op, SessionFrame matchingFrame,
boolean isNewFrame, boolean currentFramePushed,
SessionFrame onDeck, EvaluationContext ec) {
if (op.isOperationTriggered(ec)) {
// If we don't have a frame yet, this push is targeting the
// frame on deck
if (matchingFrame == null) {
matchingFrame = onDeck;
}
for (StackFrameStep step : op.getStackFrameSteps()) {
matchingFrame.pushStep(step.defineStep(ec));
}
return pushNewFrame(matchingFrame, isNewFrame, currentFramePushed);
}
return null;
}
private SessionFrame updateMatchingFrame(String frameId) {
if (frameId != null) {
// TODO: This is correct, right? We want to treat the current frame
// as part of the "environment" and not let people create a new frame
// with the same id? Possibly this should only be true if the current
// frame is live?
if (frameId.equals(frame.getFrameId())) {
return frame;
} else {
// Otherwise, peruse the stack looking for another
// frame with a matching ID.
for (Enumeration e = frameStack.elements(); e.hasMoreElements(); ) {
SessionFrame stackFrame = (SessionFrame)e.nextElement();
if (frameId.equals(stackFrame.getFrameId())) {
return stackFrame;
}
}
}
}
return null;
}
private boolean pushNewFrame(SessionFrame matchingFrame, boolean newFrame, boolean currentFramePushed) {
// ok, frame should be appropriately modified now.
// we also need to push this frame if it's new
if (newFrame) {
// Before we can push a frame onto the stack, we need to
// make sure the stack is clean. This means that if the
// current frame has a snapshot, we've gotta make sure
// the existing frames are still valid.
// TODO: We might want to handle this differently in the future,
// so that we can account for the invalidated frames in the ui
// somehow.
cleanStack();
// OK, now we want to take the current frame and put it up on the frame stack unless
// this frame is dead (IE: We're closing it out). then we'll push the new frame
// on top of it.
if (!frame.isDead() && !currentFramePushed) {
frameStack.push(frame);
return true;
}
frameStack.push(matchingFrame);
}
return currentFramePushed;
}
private void performClearOperation(SessionFrame matchingFrame,
StackOperation op,
EvaluationContext ec) {
if (matchingFrame != null) {
if (op.isOperationTriggered(ec)) {
frameStack.removeElement(matchingFrame);
}
}
}
private boolean popOrSync(SessionFrame onDeck) {
if (!frame.isDead() && frame != onDeck) {
// If the current frame isn't dead, and isn't on deck, that means we've pushed
// in new frames and need to load up the correct one
if (!finishAndPop()) {
// Somehow we didn't end up with any frames after that? that's incredibly weird, I guess
// we should just start over.
clearAllState();
}
return true;
} else {
syncState();
return false;
}
}
/**
* Checks to see if the current frame has a clean snapshot. If
* not, clears the stack and the snapshot (since the snapshot can
* only be relevant to the existing frames)
*/
private void cleanStack() {
//See whether the current frame was incompatible with its start
//state.
if (frame.isSnapshotIncompatible()) {
//If it is, our frames can no longer make sense.
this.frameStack.removeAllElements();
frame.clearSnapshot();
}
}
/**
* Called after a session has been completed. Executes and pending stack operations
* from the current session, completes the session, and pops the top of any pending
* frames into execution.
*
* @return True if there was a pending frame and it has been
* popped into the current session. False if the stack was empty
* and the session is over.
*/
public boolean finishExecuteAndPop(EvaluationContext ec) {
Vector<StackOperation> ops = getCurrentEntry().getPostEntrySessionOperations();
//Let the session know that the current frame shouldn't work its way back onto the stack
markCurrentFrameForDeath();
//First, see if we have operations to run
if(ops.size() > 0) {
executeStackOperations(ops, ec);
}
return finishAndPop();
}
/**
* Complete the current session (and perform any cleanup), then
* check the stack for any pending frames, and load the top one
* into the current session if so.
*
* @return True if there was a pending frame and it has been
* popped into the current session. False if the stack was empty
* and the session is over.
*/
private boolean finishAndPop() {
cleanStack();
if (frameStack.empty()) {
return false;
} else {
frame = frameStack.pop();
//Ok, so if _after_ popping from the stack, we still have
//stack members, we need to be careful about making sure
//that they won't get triggered if we abandon the current
//frame
if (!frameStack.isEmpty()) {
frame.captureSnapshot();
}
syncState();
return true;
}
}
/**
* Retrieve the single valid entry for the current session, should be called only
* when the current request is fully built
*
* @return The unique valid entry built on this session. Will throw an exception if there isn't
* a unique entry.
*/
public Entry getCurrentEntry() {
Vector<Entry> e = getEntriesForCommand(getCommand());
if (e.size() > 1) {
throw new IllegalStateException("The current session does not contain a single valid entry");
}
if (e.size() == 0) {
throw new IllegalStateException("The current session has no valid entry");
}
return e.elementAt(0);
}
/**
* Retrieves a valid datum definition in the current session's history
* which contains a selector for the datum Id provided.
*
* Can be used to resolve the context about an item that
* has been selected in this session.
*
* @param datumId The ID of a session datum in the session history
* @return An Entry object which contains a selector for that datum
* which is in this session history
*/
public EntityDatum findDatumDefinition(String datumId) {
//We're performing a walk down the entities in this session here,
//we should likely generalize this to make it easier to do it for other
//operations
Vector<StackFrameStep> steps = frame.getSteps();
int stepId = -1;
//walk to our datum
for (int i = 0; i < steps.size(); ++i) {
if (SessionFrame.STATE_DATUM_VAL.equals(steps.elementAt(i).getType()) &&
steps.elementAt(i).getId().equals(datumId)) {
stepId = i;
break;
}
}
if (stepId == -1) {
System.out.println("I don't think this should be possible...");
return null;
}
//ok, so now we have our step, we want to walk backwards until we find the entity
//associated with our ID
for (int i = stepId; i >= 0; i
if (steps.elementAt(i).getType().equals(SessionFrame.STATE_COMMAND_ID)) {
Vector<Entry> entries = this.getEntriesForCommand(steps.elementAt(i).getId());
//TODO: Don't we know the right entry? What if our last command is an actual entry?
for (Entry entry : entries) {
for (SessionDatum datum : entry.getSessionDataReqs()) {
if (datum.getDataId().equals(datumId) && datum instanceof EntityDatum) {
return (EntityDatum)datum;
}
}
}
}
}
return null;
}
private void markCurrentFrameForDeath() {
frame.kill();
}
/**
* Does the command only have a view entry, and no other actions available
* to take?
*/
public boolean isViewCommand(String command) {
Vector<Entry> entries = this.getEntriesForCommand(command);
return entries.size() == 1 && entries.elementAt(0).isView();
}
public boolean isSyncCommand(String command) {
Vector<Entry> entries = this.getEntriesForCommand(command);
return entries.size() == 1 && entries.elementAt(0).isSync();
}
public void addExtraToCurrentFrameStep(String key, Object value) {
frame.addExtraTopStep(key, value);
}
public Object getCurrentFrameStepExtra(String key) {
return frame.getTopStepExtra(key);
}
/**
* Builds a session from by restoring serialized SessionFrame and syncing
* from that. Doesn't support restoring the frame stack
*/
public static CommCareSession restoreSessionFromStream(CommCarePlatform ccPlatform,
DataInputStream inputStream)
throws DeserializationException, IOException {
SessionFrame restoredFrame = new SessionFrame();
restoredFrame.readExternal(inputStream, ExtUtil.defaultPrototypes());
CommCareSession restoredSession = new CommCareSession(ccPlatform);
restoredSession.frame = restoredFrame;
restoredSession.syncState();
return restoredSession;
}
public void serializeSessionState(DataOutputStream outputStream) throws IOException {
frame.writeExternal(outputStream);
}
} |
package org.commcare.suite.model.graph;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Vector;
import org.commcare.suite.model.DetailTemplate;
import org.commcare.suite.model.Text;
import org.javarosa.core.model.condition.EvaluationContext;
import org.javarosa.core.model.instance.TreeReference;
import org.javarosa.core.util.externalizable.DeserializationException;
import org.javarosa.core.util.externalizable.ExtUtil;
import org.javarosa.core.util.externalizable.ExtWrapList;
import org.javarosa.core.util.externalizable.ExtWrapListPoly;
import org.javarosa.core.util.externalizable.ExtWrapMap;
import org.javarosa.core.util.externalizable.Externalizable;
import org.javarosa.core.util.externalizable.PrototypeFactory;
import org.javarosa.xpath.parser.XPathSyntaxException;
/**
* Defines a graph: type, set of series, set of text annotations, and key-value-based configuration.
*
* @author jschweers
*/
public class Graph implements Externalizable, DetailTemplate, Configurable {
public static final String TYPE_XY = "xy";
public static final String TYPE_BUBBLE = "bubble";
public static final String TYPE_TIME = "time";
private String mType;
private Vector<XYSeries> mSeries;
private Hashtable<String, Text> mConfiguration;
private Vector<Annotation> mAnnotations;
public Graph() {
mSeries = new Vector<XYSeries>();
mConfiguration = new Hashtable<String, Text>();
mAnnotations = new Vector<Annotation>();
}
public String getType() {
return mType;
}
public void setType(String type) {
mType = type;
}
public void addSeries(XYSeries s) {
mSeries.addElement(s);
}
public void addAnnotation(Annotation a) {
mAnnotations.addElement(a);
}
public Text getConfiguration(String key) {
return mConfiguration.get(key);
}
public void setConfiguration(String key, Text value) {
mConfiguration.put(key, value);
}
public Enumeration getConfigurationKeys() {
return mConfiguration.keys();
}
/*
* (non-Javadoc)
* @see org.javarosa.core.util.externalizable.Externalizable#readExternal(java.io.DataInputStream, org.javarosa.core.util.externalizable.PrototypeFactory)
*/
public void readExternal(DataInputStream in, PrototypeFactory pf) throws IOException, DeserializationException {
ExtUtil.readString(in);
mConfiguration = (Hashtable<String, Text>)ExtUtil.read(in, new ExtWrapMap(String.class, Text.class), pf);
mSeries = (Vector<XYSeries>)ExtUtil.read(in, new ExtWrapListPoly(), pf);
mAnnotations = (Vector<Annotation>)ExtUtil.read(in, new ExtWrapList(Annotation.class), pf);
}
/*
* (non-Javadoc)
* @see org.javarosa.core.util.externalizable.Externalizable#writeExternal(java.io.DataOutputStream)
*/
public void writeExternal(DataOutputStream out) throws IOException {
ExtUtil.writeString(out, mType);
ExtUtil.write(out, new ExtWrapMap(mConfiguration));
ExtUtil.write(out, new ExtWrapListPoly(mSeries));
ExtUtil.write(out, new ExtWrapList(mAnnotations));
}
/*
* (non-Javadoc)
* @see org.commcare.suite.model.DetailTemplate#evaluate(org.javarosa.core.model.condition.EvaluationContext)
*/
public GraphData evaluate(EvaluationContext context) {
GraphData data = new GraphData();
data.setType(mType);
evaluateConfiguration(this, data, context);
evaluateSeries(data, context);
evaluateAnnotations(data, context);
return data;
}
/*
* Helper for evaluate. Looks at annotations only.
*/
private void evaluateAnnotations(GraphData graphData, EvaluationContext context) {
for (Annotation a : mAnnotations) {
graphData.addAnnotation(new AnnotationData(
a.getX().evaluate(context),
a.getY().evaluate(context),
a.getAnnotation().evaluate(context)
));
}
}
/*
* Helper for evaluate. Looks at configuration only.
*/
private void evaluateConfiguration(Configurable template, ConfigurableData data, EvaluationContext context) {
Enumeration e = template.getConfigurationKeys();
Vector<String> nonvariables = new Vector<String>();
String prefix = "var-";
while (e.hasMoreElements()) {
String key = (String)e.nextElement();
if (key.startsWith(prefix)) {
String value = template.getConfiguration(key).evaluate(context);
context.setVariable(key.substring(prefix.length()), value);
}
else {
nonvariables.addElement(key);
}
}
for (String key : nonvariables) {
String value = template.getConfiguration(key).evaluate(context);
data.setConfiguration(key, value);
}
}
/*
* Helper for evaluate. Looks at a single series.
*/
private void evaluateSeries(GraphData graphData, EvaluationContext context) {
try {
for (XYSeries s : mSeries) {
Vector<TreeReference> refList = context.expandReference(s.getNodeSet());
SeriesData seriesData = new SeriesData();
EvaluationContext seriesContext = new EvaluationContext(context, context.getContextRef());
evaluateConfiguration(s, seriesData, seriesContext);
for (TreeReference ref : refList) {
EvaluationContext refContext = new EvaluationContext(seriesContext, ref);
String x = s.evaluateX(refContext);
String y = s.evaluateY(refContext);
if (x != null && y != null) {
if (graphData.getType().equals(Graph.TYPE_BUBBLE)) {
String radius = ((BubbleSeries)s).evaluateRadius(refContext);
seriesData.addPoint(new BubblePointData(x, y, radius));
} else {
seriesData.addPoint(new XYPointData(x, y));
}
}
}
graphData.addSeries(seriesData);
}
} catch (XPathSyntaxException e) {
e.printStackTrace();
}
}
} |
package com.productions.pieter.notificationanalyzer;
import android.app.Fragment;
import android.content.Intent;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import com.j256.ormlite.android.apptools.OpenHelperManager;
import com.productions.pieter.notificationanalyzer.Models.DatabaseHelper;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.LinkedList;
import java.util.List;
public class HistoryFragment extends Fragment {
private DatabaseHelper databaseHelper = null;
private Date currentSelectedDate = null;
private int currentSelectedBarPosition = -1;
private BarChart barChart = null;
private SimpleDateFormat dateFormat = new SimpleDateFormat("d MMM");
private View headerDayCount = null;
public DatabaseHelper getDatabaseHelper() {
if (databaseHelper == null) {
databaseHelper = OpenHelperManager.getHelper(this.getActivity(), DatabaseHelper.class);
}
return databaseHelper;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
View view = inflater.inflate(R.layout.fragment_history, container, false);
View viewListHeader = inflater.inflate(R.layout.list_header_chart, null);
barChart = (BarChart) viewListHeader.findViewById(R.id.bar_chart);
barChart.setBarChartListener(new BarChartListener() {
@Override
public void onBarClick(Date date, int position) {
currentSelectedDate = date;
currentSelectedBarPosition = position;
showDayLabelChart(date, position);
showDayListView(date);
}
@Override
public void onIntervalChanged(Date first, Date end) {
TextView chartDateStart = (TextView) getActivity().findViewById(R.id.chart_date_start);
TextView chartDateEnd = (TextView) getActivity().findViewById(R.id.chart_date_end);
chartDateStart.setText(first != null ? dateFormat.format(first) : "");
chartDateEnd.setText(end != null ? dateFormat.format(end) : "");
}
/**
* Called when the chart is finished drawing.
*/
@Override
public void onChartDraw() {
ListView listView = (ListView) getActivity().findViewById(R.id.list_view_history);
TextView textView = (TextView) getActivity().findViewById(R.id.history_empty);
if (barChart.isEmpty()) {
listView.setVisibility(View.GONE);
textView.setVisibility(View.VISIBLE);
} else {
listView.setVisibility(View.VISIBLE);
textView.setVisibility(View.GONE);
}
}
});
ListView listHistory = (ListView) view.findViewById(R.id.list_view_history);
listHistory.addHeaderView(viewListHeader, null, false);
headerDayCount = inflater.inflate(R.layout.list_header_day_count, null);
headerDayCount.setVisibility(View.GONE);
listHistory.addHeaderView(headerDayCount, null, false);
listHistory.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
Intent intent = new Intent(getActivity(), AppDetail.class);
NotificationAppView clickedApp = (NotificationAppView) adapterView.getAdapter().getItem(i);
intent.putExtra(Intent.EXTRA_SUBJECT, clickedApp.AppName);
startActivity(intent);
}
});
return view;
}
@Override
public void onResume() {
super.onResume();
ListView listHistory = (ListView) this.getActivity().findViewById(R.id.list_view_history);
if (currentSelectedDate != null) {
Calendar calSelected = new GregorianCalendar();
calSelected.setTime(currentSelectedDate);
Calendar calToday = Calendar.getInstance();
if (calSelected.get(Calendar.DATE) == calToday.get(Calendar.DATE)) {
showDayListView(currentSelectedDate);
}
showDayLabelChart(currentSelectedDate, currentSelectedBarPosition);
} else {
listHistory.setAdapter(new NotificationAdapter(this.getActivity(), new LinkedList<NotificationAppView>()));
TextView chartDateCurrent = (TextView) getActivity().findViewById(R.id.chart_date_current);
chartDateCurrent.setVisibility(View.INVISIBLE);
headerDayCount.setVisibility(View.GONE);
}
barChart.update();
}
private void showDayListView(Date date) {
ListView listView = (ListView) getActivity().findViewById(R.id.list_view_history);
try {
List<NotificationAppView> objects = getDatabaseHelper().getNotificationDao().getOverviewDay(date);
listView.setAdapter(new NotificationAdapter(getActivity(), objects));
int totalCount = 0;
for (int i = 0; i < objects.size(); i++) {
totalCount += objects.get(i).Notifications;
}
TextView titleCounter = (TextView) headerDayCount.findViewById(R.id.title_counter);
titleCounter.setText(Integer.toString(totalCount));
TextView titleCounterSuffix = (TextView) headerDayCount.findViewById(R.id.title_counter_suffix);
if (totalCount == 1) {
titleCounterSuffix.setText(R.string.title_counter_suffix_single);
} else {
titleCounterSuffix.setText(R.string.title_counter_suffix_plural);
}
headerDayCount.setVisibility(View.VISIBLE);
} catch (SQLException e) {
e.printStackTrace();
}
}
private void showDayLabelChart(Date date, int position) {
TextView chartDateCurrent = (TextView) getActivity().findViewById(R.id.chart_date_current);
chartDateCurrent.setText(dateFormat.format(date));
chartDateCurrent.setVisibility(View.VISIBLE);
LinearLayout.LayoutParams layoutParams = (LinearLayout.LayoutParams) chartDateCurrent.getLayoutParams();
int marginLeft = getResources().getDimensionPixelOffset(R.dimen.bar_chart_width_bar) * position
+ getResources().getDimensionPixelOffset(R.dimen.bar_chart_width_bar) / 2 - chartDateCurrent.getWidth() / 2
+ getResources().getDimensionPixelOffset(R.dimen.barchart_marginSides);
layoutParams.setMargins(marginLeft, 0, 0, 0);
chartDateCurrent.setLayoutParams(layoutParams);
}
@Override
public void onDestroy() {
super.onDestroy();
if (databaseHelper != null) {
OpenHelperManager.releaseHelper();
databaseHelper = null;
}
}
} |
package org.csstudio.archive.engine.model;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.csstudio.archive.engine.Activator;
import org.csstudio.archive.engine.scanner.ScanThread;
import org.csstudio.archive.engine.scanner.Scanner;
import org.csstudio.archive.rdb.ChannelConfig;
import org.csstudio.archive.rdb.RDBArchive;
import org.csstudio.archive.rdb.engineconfig.ChannelGroupConfig;
import org.csstudio.archive.rdb.engineconfig.SampleEngineConfig;
import org.csstudio.platform.data.ITimestamp;
import org.csstudio.platform.data.IValue;
import org.csstudio.platform.data.TimestampFactory;
import org.csstudio.platform.data.ValueFactory;
import org.csstudio.platform.logging.CentralLogger;
import org.eclipse.core.runtime.Platform;
import org.eclipse.core.runtime.preferences.IPreferencesService;
/** Data model of the archive engine.
* @author Kay Kasemir
*/
public class EngineModel
{
/** Version code. See also webroot/version.html */
final public static String VERSION = "1.2.3"; //$NON-NLS-1$
/** Name of this model */
private String name = "Archive Engine"; //$NON-NLS-1$
/** RDB Archive to which samples are written.
* <p>
* <b>NOTE Thread Usage:</b>
* During startup, <code>addChannel</code> might
* access the archive, but later on only the <code>WriteThread</code>
* touches the archive to avoid thread issues.
*/
final private RDBArchive archive;
/** Thread that writes to the <code>archive</code> */
final private WriteThread writer;
/** All the channels.
* <p>
* Accessed by HTTPD and main thread, so lock on <code>this</code>
*/
final List<ArchiveChannel> channels = new ArrayList<ArchiveChannel>();
/** Channels mapped by name.
* <p>
* @see channels about thread safety
*/
final Map<String, ArchiveChannel> channel_by_name = new HashMap<String, ArchiveChannel>();
/** Groups of archived channels
* <p>
* @see channels about thread safety
*/
final List<ArchiveGroup> groups = new ArrayList<ArchiveGroup>();
/** Scanner for scanned channels */
final Scanner scanner = new Scanner();
/** Thread that runs the scanner */
final ScanThread scan_thread = new ScanThread(scanner);
/** Engine states */
public enum State
{
/** Initial model state before <code>start()</code> */
IDLE,
/** Running model, state after <code>start()</code> */
RUNNING,
/** State after <code>requestStop()</code>; still running. */
SHUTDOWN_REQUESTED,
/** State after <code>requestRestart()</code>; still running. */
RESTART_REQUESTED,
/** State while in <code>stop()</code>; will then be IDLE again. */
STOPPING
}
/** Engine state */
private State state = State.IDLE;
/** Start time of the model */
private ITimestamp start_time = null;
/** Write period in seconds */
private int write_period = 30;
/** Maximum number of repeat counts for scanned channels */
private int max_repeats = 60;
/** Write batch size */
private int batch_size = 500;
/** Buffer reserve (N times what's ideally needed) */
private double buffer_reserve = 2.0;
/** Construct model that writes to archive */
public EngineModel(final RDBArchive archive)
{
this.archive = archive;
applyPreferences();
writer = new WriteThread(archive);
}
/** Read preference settings */
@SuppressWarnings("nls")
private void applyPreferences()
{
final IPreferencesService prefs = Platform.getPreferencesService();
if (prefs == null)
return;
write_period = prefs.getInt(Activator.ID, "write_period", write_period, null);
max_repeats = prefs.getInt(Activator.ID, "max_repeats", max_repeats, null);
batch_size = prefs.getInt(Activator.ID, "batch_size", batch_size, null);
buffer_reserve = prefs.getDouble(Activator.ID, "buffer_reserve", buffer_reserve, null);
}
/** @return Name (description) */
final public String getName()
{
return name;
}
/** @return Seconds into the future that should be ignored */
public static long getIgnoredFutureSeconds()
{
// TODO make configurable
// 1 day
return 24*60*60;
}
/** @return Write period in seconds */
final public int getWritePeriod()
{
return write_period;
}
/** @return Write batch size */
final public int getBatchSize()
{
return batch_size;
}
/** @return Current model state */
final public State getState()
{
return state;
}
/** @return Start time of the engine or <code>null</code> if not running */
final public ITimestamp getStartTime()
{
return start_time;
}
/** Get existing or add new group.
* @param name Name of the group to find or add.
* @return ArchiveGroup
* @throws Exception on error (wrong state)
*/
final public ArchiveGroup addGroup(final String name) throws Exception
{
if (state != State.IDLE)
throw new Exception("Cannot add group while " + state); //$NON-NLS-1$
// Avoid duplicates
synchronized (this)
{
ArchiveGroup group = getGroup(name);
if (group != null)
return group;
// Add new group
group = new ArchiveGroup(name);
groups.add(group);
return group;
}
}
/** @return Number of groups */
final synchronized public int getGroupCount()
{
return groups.size();
}
/** Get one archive group.
* @param group_index 0...<code>getGroupCount()-1</code>
* @return group
* @see #getGroupCount()
*/
final synchronized public ArchiveGroup getGroup(final int group_index)
{
return groups.get(group_index);
}
/** @return Group by that name or <code>null</code> if not found */
final synchronized public ArchiveGroup getGroup(final String name)
{
for (ArchiveGroup group : groups)
if (group.getName().equals(name))
return group;
return null;
}
/** @return Number of channels */
final synchronized public int getChannelCount()
{
return channels.size();
}
/** @param i Channel index, 0 ... <code>getChannelCount()-1</code> */
final synchronized public ArchiveChannel getChannel(int i)
{
return channels.get(i);
}
/** @return Channel by that name or <code>null</code> if not found */
final synchronized public ArchiveChannel getChannel(final String name)
{
return channel_by_name.get(name);
}
/** Add a channel to the engine under given group.
* @param name Channel name
* @param group Name of the group to which to add
* @param enablement How channel acts on the group
* @param monitor Monitor or scan?
* @param sample_val Sample mode configuration value: 'delta' for monitor
* @param period Estimated update period [seconds]
* @return {@link ArchiveChannel}
* @throws Exception on error from channel creation
*/
@SuppressWarnings("nls")
final public ArchiveChannel addChannel(final String name,
final ArchiveGroup group,
final Enablement enablement, final boolean monitor,
final double sample_val,
final double period) throws Exception
{
if (state != State.IDLE)
throw new Exception("Cannot add channel while " + state); //$NON-NLS-1$
// Is this an existing channel?
ArchiveChannel channel = getChannel(name);
// For the engine, channels can be in more than one group
// if configuration matches.
if (channel != null)
{
final String gripe = String.format(
"Group '%s': Channel '%s' already in group '%s'",
group.getName(), name, channel.getGroup(0).getName());
if (channel.getEnablement() != enablement)
throw new Exception(gripe + " with different enablement");
if (// Now monitor, but not before?
(monitor && (channel instanceof ScannedArchiveChannel))
||
// Or now scanned, but before monitor, or other scan rate?
(!monitor
&& ((channel instanceof MonitoredArchiveChannel)
|| ((ScannedArchiveChannel)channel).getPeriod() != period)
))
throw new Exception(gripe + " with different sample mechanism");
}
else
{ // Channel is new to this engine.
// See if there's already a sample in the archive,
// because we won't be able to go back-in-time before that sample.
IValue last_sample = null;
final ChannelConfig channel_id = archive.getChannel(name);
if (channel_id != null)
{
final ITimestamp last_stamp = channel_id.getLastTimestamp();
if (last_stamp != null)
// Create fake string sample with that time
last_sample = ValueFactory.createStringValue(last_stamp,
ValueFactory.createOKSeverity(),
"", IValue.Quality.Original,
new String [] { "Last timestamp in archive" });
}
// Determine buffer capacity
int buffer_capacity = (int) (write_period / period * buffer_reserve);
// When scan or update period exceeds write period,
// simply use the reserve for the capacity
if (buffer_capacity < buffer_reserve)
buffer_capacity = (int)buffer_reserve;
// Create new channel
if (monitor)
{
if (sample_val > 0)
channel = new DeltaArchiveChannel(name, enablement,
buffer_capacity, last_sample, period, sample_val);
else
channel = new MonitoredArchiveChannel(name, enablement,
buffer_capacity, last_sample,
period);
}
else
{
channel = new ScannedArchiveChannel(name, enablement,
buffer_capacity, last_sample, period,
max_repeats);
scanner.add((ScannedArchiveChannel)channel, period);
}
synchronized (this)
{
channels.add(channel);
channel_by_name.put(channel.getName(), channel);
}
writer.addChannel(channel);
}
// Connect new or old channel to group
channel.addGroup(group);
group.add(channel);
return channel;
}
/** Start processing all channels and writing to archive. */
final public void start() throws Exception
{
start_time = TimestampFactory.now();
state = State.RUNNING;
writer.start(write_period, batch_size);
for (ArchiveGroup group : groups)
{
group.start();
// Check for stop request.
// Unfortunately, we don't check inside group.start(),
// which could have run for some time....
if (state == State.SHUTDOWN_REQUESTED)
break;
}
scan_thread.start();
}
/** @return Timestamp of end of last write run */
public ITimestamp getLastWriteTime()
{
return writer.getLastWriteTime();
}
/** @return Average number of values per write run */
public double getWriteCount()
{
return writer.getWriteCount();
}
/** @return Average duration of write run in seconds */
public double getWriteDuration()
{
return writer.getWriteDuration();
}
/** @see Scanner#getIdlePercentage() */
final public double getIdlePercentage()
{
return scanner.getIdlePercentage();
}
/** Ask the model to stop.
* Merely updates the model state.
* @see #getState()
*/
final public void requestStop()
{
state = State.SHUTDOWN_REQUESTED;
}
/** Ask the model to restart.
* Merely updates the model state.
* @see #getState()
*/
final public void requestRestart()
{
state = State.RESTART_REQUESTED;
}
/** Reset engine statistics */
public void reset()
{
writer.reset();
scanner.reset();
synchronized (this)
{
for (ArchiveChannel channel : channels)
channel.reset();
}
}
/** Stop monitoring the channels, flush the write buffers. */
@SuppressWarnings("nls")
final public void stop() throws Exception
{
state = State.STOPPING;
CentralLogger.getInstance().getLogger(this).info("Stopping scanner");
// Stop scanning
scan_thread.stop();
// Assert that scanning has stopped before we add 'off' events
scan_thread.join();
// Disconnect from network
CentralLogger.getInstance().getLogger(this).info("Stopping archive groups");
for (ArchiveGroup group : groups)
group.stop();
// Flush all values out
CentralLogger.getInstance().getLogger(this).info("Stopping writer");
writer.shutdown();
// Update state
state = State.IDLE;
start_time = null;
}
/** Read configuration of model from RDB.
* @param name Name of engine in RDB
* @param port Current HTTPD port
*/
@SuppressWarnings("nls")
final public void readConfig(final String name, final int port) throws Exception
{
this.name = name;
final SampleEngineConfig engine = archive.findEngine(name);
if (engine == null)
throw new Exception("Unknown engine '" + name + "'");
// Is the configuration consistent?
if (engine.getUrl().getPort() != port)
throw new Exception("Engine running on port " + port +
" while configuration requires " + engine.getUrl().toString());
// Get groups
final ChannelGroupConfig[] engine_groups = engine.getGroups();
for (ChannelGroupConfig group_config : engine_groups)
{
final ArchiveGroup group = addGroup(group_config.getName());
// Add channels to group
final ChannelConfig[] channel_configs = group_config.getChannels();
for (ChannelConfig channel_config : channel_configs)
{
Enablement enablement = Enablement.Passive;
if (group_config.getEnablingChannelId() == channel_config.getId())
enablement = Enablement.Enabling;
addChannel(channel_config.getName(), group, enablement,
channel_config.getSampleMode().isMonitor(),
channel_config.getSampleValue(),
channel_config.getSamplePeriod());
}
}
}
/** Remove all channels and groups. */
@SuppressWarnings("nls")
final public void clearConfig()
{
if (state != State.IDLE)
throw new IllegalStateException("Only allowed in IDLE state");
synchronized (this)
{
groups.clear();
channel_by_name.clear();
channels.clear();
}
scanner.clear();
}
/** Write debug info to stdout */
@SuppressWarnings("nls")
public void dumpDebugInfo()
{
System.out.println(TimestampFactory.now().toString() + ": Debug info");
for (int c=0; c<getChannelCount(); ++c)
{
final ArchiveChannel channel = getChannel(c);
StringBuilder buf = new StringBuilder();
buf.append("'" + channel.getName() + "' (");
for (int i=0; i<channel.getGroupCount(); ++i)
{
if (i > 0)
buf.append(", ");
buf.append(channel.getGroup(i).getName());
}
buf.append("): ");
buf.append(channel.getMechanism());
buf.append(channel.isEnabled() ? ", enabled" : ", DISABLED");
buf.append(channel.isConnected() ? ", connected (" : ", DISCONNECTED (");
buf.append(channel.getInternalState() + ")");
buf.append(", value " + channel.getCurrentValue());
buf.append(", last stored " + channel.getLastArchivedValue());
System.out.println(buf.toString());
}
}
} |
package exm.stc.ic.opt;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Set;
import org.apache.log4j.Logger;
import exm.stc.common.Settings;
import exm.stc.common.exceptions.UserException;
import exm.stc.common.lang.Arg;
import exm.stc.common.lang.Types;
import exm.stc.common.lang.Var;
import exm.stc.common.lang.Var.Alloc;
import exm.stc.common.util.HierarchicalSet;
import exm.stc.common.util.Pair;
import exm.stc.ic.opt.InitVariables.InitState;
import exm.stc.ic.opt.OptimizerPass.FunctionOptimizerPass;
import exm.stc.ic.tree.ICContinuations.Continuation;
import exm.stc.ic.tree.ICInstructions.Instruction;
import exm.stc.ic.tree.ICTree.Block;
import exm.stc.ic.tree.ICTree.BlockType;
import exm.stc.ic.tree.ICTree.Function;
import exm.stc.ic.tree.ICTree.Statement;
import exm.stc.ic.tree.ICTree.StatementType;
import exm.stc.ic.tree.Opcode;
import exm.stc.ic.tree.TurbineOp;
/**
* Try to merge multiple array inserts into a single build instruction.
* TODO: optimise multisets
*/
public class ArrayBuild extends FunctionOptimizerPass {
@Override
public String getPassName() {
return "Array build";
}
@Override
public String getConfigEnabledKey() {
return Settings.OPT_ARRAY_BUILD;
}
@Override
public void optimize(Logger logger, Function f) throws UserException {
InfoMap info = buildInfo(logger, f);
optimize(logger, f, info);
}
private static class InfoMap extends HashMap<Pair<Var, Block>, BlockVarInfo> {
/** Prevent warnings by providing version */
private static final long serialVersionUID = 1L;
BlockVarInfo getEntry(Block block, Var arr) {
Pair<Var, Block> key = Pair.create(arr, block);
BlockVarInfo entry = this.get(key);
if (entry == null) {
entry = new BlockVarInfo();
this.put(key, entry);
}
return entry;
}
}
static class BlockVarInfo {
/** If var was declared in this block */
boolean declaredHere = false;
/** If immediate insert instruction found in this block */
boolean insertImmHere = false;
/** If another modification instruction found in this block */
boolean otherModHere = false;
/** If insert imm was used in this block or descendants */
boolean insertImmRec = false;
/** If another mod was used in this block or descendants */
boolean otherModRec = false;
/** If, for any possible execution of this block, insertImm is
* called at most once on this variable in a single block in
* this subtree. Also false if insertImm never used in subtree */
boolean insertImmOnce = false;
public boolean isModifiedHere() {
return insertImmHere || otherModHere;
}
public boolean isModifiedInSubtree() {
return otherModRec || insertImmRec;
}
@Override
public String toString() {
return "insertImmHere: " + insertImmHere + " " +
"otherModHere: " + otherModHere + " " +
"insertImmRec: " + insertImmRec + " " +
"otherModRec: " + otherModRec + " " +
"insertImmOnce: " + insertImmOnce;
}
public boolean noInserts() {
return !insertImmHere && !otherModHere &&
!insertImmRec && !otherModRec;
}
}
/**
* Traverse and build up info about how array is modified in all
* blocks of function
* @param logger
* @param f
* @return
*/
private InfoMap buildInfo(Logger logger, Function f) {
// Set to track candidates in scope
HierarchicalSet<Var> candidates = new HierarchicalSet<Var>();
InfoMap info = new InfoMap();
buildInfoRec(logger, f, f.mainBlock(), info, candidates);
return info;
}
private boolean isValidCandidate(Var var) {
return Types.isArray(var) && var.storage() != Alloc.ALIAS;
}
private void addBlockCandidates(Block block,
InfoMap info, Collection<Var> candidates,
Collection<Var> vars) {
for (Var var: vars) {
if (isValidCandidate(var)){
candidates.add(var);
info.getEntry(block, var).declaredHere = true;
}
}
}
private void buildInfoRec(Logger logger, Function f,
Block block, InfoMap info, HierarchicalSet<Var> candidates) {
addBlockCandidates(f, block, info, candidates);
for (Statement stmt: block.getStatements()) {
switch (stmt.type()) {
case INSTRUCTION:
updateInfo(block, info, stmt.instruction(), candidates);
break;
default:
// Do nothing: handle conditionals below
break;
}
}
for (Continuation c: block.allComplexStatements()) {
for (Block inner: c.getBlocks()) {
buildInfoRec(logger, f, inner, info, candidates.makeChild());
}
}
// Compute bottom-up properties
updateInfoBottomUp(logger, block, info, candidates);
if (logger.isTraceEnabled()) {
logger.trace("Collected info on block: " +
System.identityHashCode(block) + " " + block.getType());
for (Var candidate: candidates) {
logger.trace(candidate + " => " + info.getEntry(block, candidate));
}
}
}
/**
* Add new candidates declared in block
* @param f
* @param block
* @param candidates
*/
private void addBlockCandidates(Function f, Block block, InfoMap info,
Set<Var> candidates) {
if (block.getType() == BlockType.MAIN_BLOCK) {
addBlockCandidates(block, info, candidates, f.getOutputList());
}
addBlockCandidates(block, info, candidates, block.getVariables());
}
private void updateInfo(Block block, InfoMap info, Instruction inst,
Set<Var> candidates) {
if (inst.op == Opcode.ARR_STORE) {
Var arr = inst.getOutput(0);
if (candidates.contains(arr)) {
BlockVarInfo entry = info.getEntry(block, arr);
entry.insertImmHere = true;
}
} else {
for (Var out: inst.getOutputs()) {
if (isValidCandidate(out) && candidates.contains(out)) {
BlockVarInfo entry = info.getEntry(block, out);
entry.otherModHere = true;
}
}
}
}
/**
* Update recursively defined properties
* @param logger
* @param block
* @param info
* @param candidates update info for these vars
*/
private void updateInfoBottomUp(Logger logger, Block block,
InfoMap info, Set<Var> candidates) {
for (Var candidate: candidates) {
BlockVarInfo ci = info.getEntry(block, candidate);
ci.insertImmRec = ci.insertImmHere;
ci.otherModRec = ci.otherModHere;
// Count number of blocks in subtree that are "valid"
int insertImmOnceCounter = ci.insertImmHere ? 1 : 0;
// True if insert immediate might happen on multiple blocks
boolean insertImmOnceInvalid = false;
for (Continuation cont: block.allComplexStatements()) {
int iiBlockCount = 0; // Count subblocks with insertImm
for (Block contBlock: cont.getBlocks()) {
BlockVarInfo ciInner = info.getEntry(contBlock, candidate);
ci.insertImmRec = ci.insertImmRec || ciInner.insertImmRec;
ci.otherModRec = ci.otherModRec || ciInner.otherModRec;
if (ciInner.insertImmOnce) {
iiBlockCount++;
} else if (ciInner.insertImmRec) {
insertImmOnceInvalid = true;
}
}
if (cont.isLoop() && iiBlockCount > 0) {
// Invalid: insert within loop
insertImmOnceInvalid = true;
} else if (cont.isConditional() && iiBlockCount > 0) {
// We might be able to optimize if it only happens on branches
// of this conditional
insertImmOnceCounter++;
} else {
// Assume each block executes once
if (iiBlockCount == 1) {
insertImmOnceCounter++;
} else if (iiBlockCount > 1) {
insertImmOnceInvalid = true;
}
}
}
// If we determined that there was once place (this block or subtree)
// where insertImmOnce was valid, and there were no disqualifiers
ci.insertImmOnce = (insertImmOnceCounter == 1) && !insertImmOnceInvalid;
}
}
private void optimize(Logger logger, Function f, InfoMap info) {
InitState init = InitState.enterFunction(f);
optRecurseOnBlock(logger, f, f.mainBlock(), info, init,
new HierarchicalSet<Var>(), new HierarchicalSet<Var>());
}
private void optRecurseOnBlock(Logger logger, Function f, Block block,
InfoMap info, InitState init,
HierarchicalSet<Var> cands, HierarchicalSet<Var> invalid) {
addBlockCandidates(f, block, info, cands);
for (Var cand: cands) {
if (!invalid.contains(cand)) {
BlockVarInfo vi = info.getEntry(block, cand);
if (logger.isTraceEnabled()) {
logger.trace("Candidate: " + cand + " in block " +
System.identityHashCode(block) + " " + block.getType());
logger.trace(vi);
}
if (vi.otherModRec) {
logger.trace("Can't optimize due other other inserts!");
invalid.add(cand);
} else if ((vi.insertImmOnce && vi.insertImmHere) ||
(vi.noInserts() && vi.declaredHere)) {
// Criteria 1: declared here && no inserts here or in children
// TODO
// non-mutually-exclusive path
// Optimize here: cases where only inserted in this block,
// or no inserts at all
logger.trace("Can optimize!");
replaceInserts(logger, block, init, cand);
invalid.add(cand); // Don't try to opt in descendants
} else if (vi.insertImmOnce) {
logger.trace("Try to optimize in descendant block!");
// Do nothing: handle in child block
} else {
logger.trace("Optimization not valid!");
// Invalid: can't do optimization anywhere
invalid.add(cand);
}
}
}
for (Statement stmt: block.getStatements()) {
switch (stmt.type()) {
case INSTRUCTION:
// Update which variables are initialized
InitVariables.updateInitVars(logger, stmt, init, false);
break;
case CONDITIONAL:
// Recurse and optimize, plus also update init vars
optRecurseOnCont(logger, f, stmt.conditional(), info, init,
cands, invalid);
break;
}
}
for (Continuation cont: block.getContinuations()) {
optRecurseOnCont(logger, f, cont, info, init, cands, invalid);
}
}
private void optRecurseOnCont(Logger logger, Function f,
Continuation cont, InfoMap info, InitState init,
HierarchicalSet<Var> cands, HierarchicalSet<Var> invalid) {
InitState contInit = init.enterContinuation(cont);
List<InitState> blockInits = new ArrayList<InitState>();
for (Block inner: cont.getBlocks()) {
InitState blockInit = contInit.enterBlock(inner);
optRecurseOnBlock(logger, f, inner, info, blockInit, cands.makeChild(),
invalid.makeChild());
blockInits.add(blockInit);
}
if (InitState.canUnifyBranches(cont)) {
init.unifyBranches(cont, blockInits);
}
}
/**
* Replace arrayInsertImm instructions with an arrayBuild
* @param block
* @param arr
* @param init initialized state from outside. Not modified
*/
private void replaceInserts(Logger logger, Block block,
InitState init, Var arr) {
// First remove the old instructions and gather keys and vals
Pair<List<Arg>, List<Arg>> keyVals = removeOldInserts(block, arr);
List<Arg> keys = keyVals.val1;
List<Arg> vals = keyVals.val2;
ListIterator<Statement> insertPos;
insertPos = findArrayBuildPos(logger, block, init, arr, keys, vals);
insertPos.add(TurbineOp.arrayBuild(arr, keys, vals));
}
/**
* Select the location to insert the array build instruction
* @param block
* @param array
* @param keys
* @param vals
* @return
*/
private ListIterator<Statement> findArrayBuildPos(Logger logger,
Block block, InitState outerInit,
Var array, List<Arg> keys, List<Arg> vals) {
// Place the array build instruction as early as possible, once all
// inputs are initialized
Set<Var> needsInit = new HashSet<Var>();
// array variable may need to be initialized
if (InitVariables.varMustBeInitialized(array, true)) {
if (!outerInit.initVars.contains(array)) {
needsInit.add(array);
}
}
for (Arg key: keys) {
if (key.isVar()) {
// Assert to check assumptions match init var analysis
assert (InitVariables.assignBeforeRead(key.getVar()));
// Key must be assigned
if (!outerInit.assignedVals.contains(key.getVar())) {
needsInit.add(key.getVar());
}
}
}
for (Arg val: vals) {
if (val.isVar()) {
Var var = val.getVar();
if (InitVariables.assignBeforeRead(var) &&
!outerInit.assignedVals.contains(var)) {
// Must assign value
needsInit.add(var);
} else if (InitVariables.varMustBeInitialized(var, false) &&
!outerInit.initVars.contains(var)) {
// Must init alias
needsInit.add(var);
}
}
}
InitState blockInit = outerInit.enterBlock(block);
// Move forward until all variables are initialized
ListIterator<Statement> insertPos = block.statementIterator();
while (insertPos.hasNext() && !needsInit.isEmpty()) {
Statement stmt = insertPos.next();
InitVariables.updateInitVars(logger, stmt, blockInit, false);
// Check to see if everything is ready now
// TODO: iterating over this every time is inefficient, but probably
// good enough
Iterator<Var> it = needsInit.iterator();
while (it.hasNext()) {
Var v = it.next();
if (InitVariables.assignBeforeRead(v)) {
if (blockInit.assignedVals.contains(v)) {
it.remove();
}
} else if (InitVariables.varMustBeInitialized(v, false)) {
if (blockInit.initVars.contains(v)) {
it.remove();
}
}
}
}
if (!needsInit.isEmpty()) {
logger.warn("STC internal warning: wasn't able to determine that "
+ needsInit + " were initialized");
}
return insertPos;
}
private Pair<List<Arg>, List<Arg>> removeOldInserts(Block block, Var arr) {
List<Arg> keys = new ArrayList<Arg>();
List<Arg> vals = new ArrayList<Arg>();
ListIterator<Statement> it = block.statementIterator();
while (it.hasNext()) {
Statement stmt = it.next();
if (stmt.type() == StatementType.INSTRUCTION) {
Instruction inst = stmt.instruction();
if (inst.op == Opcode.ARR_STORE) {
if (inst.getOutput(0).equals(arr)) {
it.remove();
Arg key = inst.getInput(0);
Arg val = inst.getInput(1);
assert(Types.isArrayKeyVal(arr, key));
assert(Types.isElemValType(arr, val));
keys.add(key);
vals.add(val);
}
}
}
}
return Pair.create(keys, vals);
}
} |
package ueb04.a2;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
public class Comm2 {
private static RoutingTable Routing = null;
public static void init(String name, int[] peers){
// sind wir der erste Knoten?
if (!available(FirstPort)){ // zugegeben: das ist ein schwaches Argument..
// Wir sind der erste Knoten!
Routing = new RoutingTable(name);
} else{
// Wir sind nicht der erste Knoten!
// finde die anderen und update das Netzwerk
}
}
private static void send(int port, Message m){
try {
InetAddress local = InetAddress.getLocalHost();
DatagramSocket toSocket = new DatagramSocket();
DatagramPacket packet = new DatagramPacket(m.toString().getBytes("UTF-8"),100,port);
toSocket.send(packet);
} catch (SocketException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (UnknownHostException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public static boolean available(int port) {
ServerSocket ss = null;
DatagramSocket ds = null;
try {
ss = new ServerSocket(port);
ss.setReuseAddress(true);
ds = new DatagramSocket(port);
ds.setReuseAddress(true);
return true;
} catch (IOException e) {
} finally {
if (ds != null) {
ds.close();
}
if (ss != null) {
try {
ss.close();
} catch (IOException e) {
/* should not be thrown */
}
}
}
return false;
}
private static int FirstPort = 5000;
/**
* In Stringform:
* "NAME,PORT,DISTANCE;NAME2,PORT2,DISTANCE2"
*/
private static class RoutingTable {
private int[] distances;
private int[] ports;
private String[] names;
private HashMap<String, Integer> nameToPosition = new HashMap<String, Integer>();
public RoutingTable(String name){
this.nameToPosition.put(name,0);
this.distances = new int[1];
this.ports = new int[1];
this.names = new String[1];
}
public RoutingTable(List<Message> messages, String name){
List<RoutingTable> peers = new ArrayList<RoutingTable>();
for(Message m : messages){
peers.add(new RoutingTable(m));
}
RoutingTable o = peers.get(0);
if(peers.size() == 1){
for(int i = 0; i < o.length();i++){
this.distances[i] = o.distance(i) + 1;
this.ports[i] = o.port(i);
this.names[i] = o.name(i);
this.nameToPosition.put(o.name(i),i);
}
}else{
for(int i = 0; i < o.length();i++){
RoutingTable lowest = o;
for(RoutingTable t : peers){
if(t.distance(i) < lowest.distance(i)){
lowest = t;
}
}
this.distances[i] = lowest.distance(i) + 1;
this.ports[i] = lowest.port(i);
this.names[i] = lowest.name(i);
this.nameToPosition.put(lowest.name(i),i);
}
}
}
private RoutingTable(Message m){
String[] t = m.Message.split(";");
this.distances = new int[t.length];
this.ports = new int[t.length];
this.names = new String[t.length];
for(int i =0; i < t.length; i++){
String[]r = t[i].split(",");
this.distances[i] = Integer.parseInt(r[2]);
this.ports[i] = Integer.parseInt(r[1]);
this.names[i] = r[0];
this.nameToPosition.put(r[0],i);
}
}
public int length(){
return this.distances.length;
}
public int distance(int i){
return this.distances[i];
}
public int port(int i){
return this.ports[i];
}
public String name(int i){
return this.names[i];
}
public int distance(String name){
int i = this.nameToPosition.get(name);
return this.distances[i];
}
public int port(String name){
int i = this.nameToPosition.get(name);
return this.ports[i];
}
public String name(String name){
int i = this.nameToPosition.get(name);
return this.names[i];
}
@Override
public String toString(){
StringBuilder sb = new StringBuilder();
for(int i = 0; i < this.distances.length;i++){
if(sb.length() > 0){
sb.append(";");
}
sb.append(this.names[i]);
sb.append(",");
sb.append(this.ports[i]);
sb.append(",");
sb.append(this.names[i]);
}
return sb.toString();
}
private void grow(){
int[] d = new int[this.distances.length + 1];
int[] p = new int[this.ports.length + 1];
String[] n = new String[this.names.length + 1];
for(int i = 0; i < this.distances.length;i++){
d[i]=this.distances[i];
p[i]=this.ports[i];
n[i]=this.names[i];
}
this.distances = d;
this.ports = p;
this.names = n;
}
}
private enum MessageType{
Message,
Update,
Connect,
ConnectAccept
}
private static class Message {
public final int Port;
public String Message;
public String Name;
public MessageType Type;
public List<Integer> Peers = new ArrayList<Integer>();
public Message(byte [] data){
String[] t = null;
try {
t = new String(data,"UTF-8").split("|");
} catch (UnsupportedEncodingException e) {
System.out.println("nop wtf");
}
Name = t[2];
Port = Integer.parseInt(t[1]);
switch (Integer.parseInt(t[0])){
case 0:
Type = MessageType.Message;
Message = t[3];
break;
case 1:
Type = MessageType.Update;
for(String p : t[3].split(",")){
Peers.add(Integer.parseInt(p));
}
break;
case 2:
Type = MessageType.Connect;
break;
}
}
public Message(int port, String name, RoutingTable t){
this.Port = port;
this.Name = name;
this.Type = MessageType.ConnectAccept;
this.Message = t.toString();
}
public Message(int port, String name,String message){
this.Port = port;
this.Name = name;
this.Message = message;
this.Type = MessageType.Message;
}
public Message(int port, String name,List<Integer> peers){
this.Port = port;
this.Name = name;
this.Message = "";
for(int peer : peers){
if (this.Message.length() != 0){
this.Message += ",";
}
this.Message += peer;
}
this.Type = MessageType.Update;
}
@Override
public String toString(){
switch (this.Type){
case ConnectAccept:
return "3|" + this.Port + "|" + this.Name + "|" + this.Message;
case Connect:
return "2|" + this.Port + "|" + this.Name;
case Update:
return "1|" + this.Port + "|" + this.Name + "|" + this.Message;
case Message:
return "0|" + this.Port + "|" + this.Name + "|" + this.Message;
}
return null;
}
}
} |
package ui;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;
import javafx.application.Platform;
import javafx.concurrent.Task;
import javafx.event.Event;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.scene.Parent;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.PasswordField;
import javafx.scene.control.TextField;
import javafx.scene.layout.ColumnConstraints;
import javafx.scene.layout.GridPane;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Region;
import javafx.stage.Stage;
import javafx.stage.StageStyle;
import javafx.stage.WindowEvent;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.eclipse.egit.github.core.IRepositoryIdProvider;
import org.eclipse.egit.github.core.RepositoryId;
import service.ServiceManager;
import storage.DataManager;
import ui.components.Dialog;
import ui.components.StatusBar;
import ui.issuecolumn.ColumnControl;
import util.DialogMessage;
public class LoginDialog extends Dialog<Boolean> {
private static final String DIALOG_TITLE = "GitHub Login";
private static final int DIALOG_HEIGHT = 200;
private static final int DIALOG_WIDTH = 470;
private static final String LABEL_REPO_NAME = "Repository:";
private static final String FIELD_DEFAULT_REPO_OWNER = "<owner/organization>";
private static final String FIELD_DEFAULT_REPO_NAME = "<repository>";
private static final String PASSWORD_LABEL = "Password:";
private static final String USERNAME_LABEL = "Username:";
private static final String BUTTON_SIGN_IN = "Sign in";
private static final Logger logger = LogManager.getLogger(LoginDialog.class.getName());
private TextField repoOwnerField;
private TextField repoNameField;
private TextField usernameField;
private PasswordField passwordField;
private ColumnControl columns;
private Button loginButton;
public LoginDialog(Stage parentStage, ColumnControl columns) {
super(parentStage);
this.columns = columns;
}
@Override
protected void onClose(WindowEvent e) {
completeResponse(false);
}
@Override
protected Parent content() {
setTitle(DIALOG_TITLE);
setSize(DIALOG_WIDTH, DIALOG_HEIGHT);
setStageStyle(StageStyle.UTILITY);
GridPane grid = new GridPane();
setupGridPane(grid);
Label repoNameLabel = new Label(LABEL_REPO_NAME);
grid.add(repoNameLabel, 0, 0);
repoOwnerField = new TextField(FIELD_DEFAULT_REPO_OWNER);
repoOwnerField.setPrefWidth(140);
grid.add(repoOwnerField, 1, 0);
Label slash = new Label("/");
grid.add(slash, 2, 0);
repoNameField = new TextField(FIELD_DEFAULT_REPO_NAME);
repoNameField.setPrefWidth(250);
grid.add(repoNameField, 3, 0);
Label usernameLabel = new Label(USERNAME_LABEL);
grid.add(usernameLabel, 0, 1);
usernameField = new TextField();
grid.add(usernameField, 1, 1, 3, 1);
Label passwordLabel = new Label(PASSWORD_LABEL);
grid.add(passwordLabel, 0, 2);
passwordField = new PasswordField();
grid.add(passwordField, 1, 2, 3, 1);
populateSavedFields();
repoOwnerField.setOnAction(this::login);
repoNameField.setOnAction(this::login);
usernameField.setOnAction(this::login);
passwordField.setOnAction(this::login);
HBox buttons = new HBox(10);
buttons.setAlignment(Pos.BOTTOM_RIGHT);
loginButton = new Button(BUTTON_SIGN_IN);
loginButton.setOnAction(this::login);
buttons.getChildren().add(loginButton);
grid.add(buttons, 3, 3);
return grid;
}
/**
* Fills in fields which have values at this point.
*/
private void populateSavedFields() {
Optional<RepositoryId> lastViewed = DataManager.getInstance().getLastViewedRepository();
if (lastViewed.isPresent()) {
repoOwnerField.setText(lastViewed.get().getOwner());
repoNameField.setText(lastViewed.get().getName());
}
String lastLoginName = DataManager.getInstance().getLastLoginUsername();
if (!lastLoginName.isEmpty()) {
usernameField.setText(lastLoginName);
}
// Change focus depending on what fields are present
Platform.runLater(() -> {
if (!lastLoginName.isEmpty()) {
passwordField.requestFocus();
} else if (lastViewed.isPresent()) {
usernameField.requestFocus();
}
});
}
/**
* Configures the central grid pane before it's used.
* @param grid
*/
private static void setupGridPane(GridPane grid) {
grid.setAlignment(Pos.CENTER);
grid.setHgap(7);
grid.setVgap(10);
grid.setPadding(new Insets(25));
grid.setPrefSize(390, 100);
grid.setMaxSize(Region.USE_COMPUTED_SIZE, Region.USE_COMPUTED_SIZE);
applyColumnConstraints(grid, 20, 39, 2, 39);
}
/**
* A variadic function that applies percentage-width column constraints to
* the given grid pane.
* @param grid the grid pane to apply column constraints to
* @param values an array of integer values which should add up to 100
*/
private static void applyColumnConstraints(GridPane grid, int... values) {
// The values should sum up to 100%
int sum = 0;
for (int i=0; i<values.length; i++) {
sum += values[i];
}
assert sum == 100 : "Column constraints should sum up to 100%!";
// Apply constraints to grid
ColumnConstraints column;
for (int i=0; i<values.length; i++) {
column = new ColumnConstraints();
column.setPercentWidth(values[i]);
grid.getColumnConstraints().add(column);
}
}
private void login(Event e) {
// Resolve username and password
String owner = repoOwnerField.getText();
String repo = repoNameField.getText();
String username = usernameField.getText();
String password = passwordField.getText();
if (username.isEmpty() && password.isEmpty()) {
BufferedReader reader;
try {
reader = new BufferedReader(new FileReader("credentials.txt"));
String line = null;
while ((line = reader.readLine()) != null) {
if (username.isEmpty()) {
username = line;
} else {
password = line;
}
}
logger.info("Logged in using credentials.txt");
} catch (Exception ex) {
logger.info("Failed to find or open credentials.txt");
}
}
// Save login details
DataManager.getInstance().setLastLoginUsername(username);
// Update UI
enableElements(false);
// Run blocking operations in the background
StatusBar.displayMessage("Signing in at GitHub...");
boolean couldLogIn = ServiceManager.getInstance().login(username, password);
Task<Boolean> task = new Task<Boolean>() {
@Override
protected Boolean call() throws Exception {
StatusBar.displayMessage("Signed in; loading data...");
boolean loadSuccess = loadRepository(owner, repo);
final CountDownLatch latch = new CountDownLatch(1);
Platform.runLater(()->{
columns.resumeColumns();
latch.countDown();
});
try {
latch.await();
} catch (InterruptedException e) {
e.printStackTrace();
}
return loadSuccess;
}
};
task.setOnSucceeded(wse -> {
if (task.getValue()) {
StatusBar.displayMessage("Issues loaded successfully! " + ServiceManager.getInstance().getRemainingRequests() + " requests remaining out of " + ServiceManager.getInstance().getRequestLimit() + ".");
completeResponse(true);
close();
} else {
handleError("Issues failed to load. Please try again.");
}
});
task.setOnFailed(wse -> {
Throwable thrown = task.getException();
logger.error(thrown.getLocalizedMessage(), thrown);
handleError("An error occurred: " + task.getException());
});
if (couldLogIn) {
DialogMessage.showProgressDialog(task, "Loading issues from " + owner + "/" + repo + "...");
Thread th = new Thread(task);
th.setDaemon(true);
th.start();
} else {
handleError("Failed to sign in. Please try again.");
}
}
private void handleError(String message) {
Platform.runLater(()->{
enableElements(true);
StatusBar.displayMessage(message);
DialogMessage.showWarningDialog("Warning", message);
});
}
private void enableElements(boolean enable) {
boolean disable = !enable;
loginButton.setDisable(disable);
repoOwnerField.setDisable(disable);
repoNameField.setDisable(disable);
usernameField.setDisable(disable);
passwordField.setDisable(disable);
}
private boolean loadRepository(String owner, String repoName) throws IOException {
boolean loaded = ServiceManager.getInstance().setupRepository(owner, repoName);
ServiceManager.getInstance().setupAndStartModelUpdate();
IRepositoryIdProvider currRepo = ServiceManager.getInstance().getRepoId();
if (currRepo != null) {
String repoId = currRepo.generateId();
DataManager.getInstance().addToLastViewedRepositories(repoId);
}
return loaded;
}
} |
package org.osgi.impl.service.megcontainer;
import java.util.*;
import org.osgi.framework.*;
import java.lang.reflect.*;
import org.osgi.meglet.Meglet;
import org.osgi.meglet.MegletHandle;
import org.osgi.service.application.*;
import org.osgi.service.log.LogService;
/**
* Specialization of the application descriptor. Represents a Meglet and
* provides generic methods inherited from the application descriptor. It is a
* service.
*/
public final class MegletDescriptor extends ApplicationDescriptor {
private Properties props;
private Hashtable names;
private Hashtable icons;
private BundleContext bc;
private String startClass;
private String pid;
private Bundle bundle;
private MegletContainer megletContainer;
private String defaultLanguage;
/**
* @param bc
* @param props
* @param names
* @param icons
* @param defaultLanguage
* @param startClass
* @param bundle
* @param impl
*/
public MegletDescriptor(BundleContext bc, Properties props, Map names,
Map icons, String defaultLang, String startClass, Bundle bundle, MegletContainer mc ) throws Exception {
super( props.getProperty( APPLICATION_PID ) );
this.bc = bc;
this.props = new Properties();
this.props.putAll(props);
this.names = new Hashtable(names);
this.icons = new Hashtable(icons);
this.startClass = startClass;
this.bundle = bundle;
this.megletContainer = mc;
if (names.size() == 0 || icons.size() == 0
|| !props.containsKey("application.bundle.id")
|| !props.containsKey( APPLICATION_PID )
|| !props.containsKey( APPLICATION_VERSION ))
throw new Exception("Invalid MEG container input!");
if( !names.containsKey( defaultLang ) )
throw new Exception("Invalid default language!");
this.defaultLanguage = defaultLang;
pid = props.getProperty( APPLICATION_PID );
}
/**
* @return
*/
public long getBundleId() {
return Long.parseLong( props.getProperty("application.bundle.id") );
}
/**
* @return
*/
public boolean isSingleton() {
String singleton = props.getProperty( APPLICATION_SINGLETON );
return singleton == null || singleton.equalsIgnoreCase("true");
}
/**
* @return
*/
public String getStartClass() {
return startClass;
}
protected BundleContext getBundleContext() {
return bc;
}
public Map getPropertiesSpecific(String locale) {
Hashtable properties = new Hashtable();
String localizedName = (String) names.get(locale);
if (localizedName == null) {
if( ( localizedName = (String) names.get( defaultLanguage ) ) == null ) {
Enumeration enum = names.keys();
String firstKey = (String) enum.nextElement();
localizedName = (String) names.get(firstKey);
locale = firstKey;
} else
locale = defaultLanguage;
}
properties.put( APPLICATION_NAME, localizedName);
properties.put( APPLICATION_ICON, icons.get(locale) );
properties.put("application.bundle.id", props.getProperty("application.bundle.id"));
properties.put( APPLICATION_VERSION, props.getProperty( APPLICATION_VERSION ));
properties.put( APPLICATION_VENDOR, props.getProperty( APPLICATION_VENDOR ));
String singleton = props.getProperty( APPLICATION_SINGLETON );
if (singleton == null || singleton.equalsIgnoreCase("true"))
properties.put( APPLICATION_SINGLETON, "true");
else
properties.put( APPLICATION_SINGLETON, "false");
String autostart = props.getProperty( APPLICATION_AUTOSTART );
if (autostart != null && autostart.equalsIgnoreCase("true"))
properties.put( APPLICATION_AUTOSTART, "true");
else
properties.put( APPLICATION_AUTOSTART, "false");
String visible = props.getProperty( APPLICATION_VISIBLE );
if (visible != null && visible.equalsIgnoreCase("false"))
properties.put( APPLICATION_VISIBLE, "false");
else
properties.put( APPLICATION_VISIBLE, "true");
boolean launchable = false;
try {
launchable = megletContainer.isLaunchable( this );
}catch (Exception e) {
MegletContainer.log( bc, LogService.LOG_ERROR,
"Exception occurred at searching the Meglet container reference!", e);
}
properties.put("application.locked", (new Boolean(isLocked())).toString());
properties.put("application.launchable", (new Boolean(launchable)).toString());
properties.put("application.type", "MEG");
properties.put( APPLICATION_PID, new String( pid ) );
return properties;
}
void initMeglet( Meglet meglet, MegletHandle handle ) throws Exception {
Class megletClass = Meglet.class;
Method setupMethod = megletClass.getDeclaredMethod( "init", new Class [] {
MegletHandle.class, BundleContext.class } );
setupMethod.setAccessible( true );
setupMethod.invoke( meglet, new Object [] { handle, bc } );
}
/**
* Called by launch() to create and start a new instance in an application
* model specific way. It also creates and registeres the application handle
* to represent the newly created and started instance.
*
* @param arguments
* the startup parameters of the new application instance, may be
* null
*
* @return the service reference to the application model specific
* application handle for the newly created and started instance.
*
* @throws Exception
* if any problem occures.
*/
protected ServiceReference launchSpecific( Map args ) throws Exception {
Meglet meglet = megletContainer.createMegletInstance( this, false );
MegletHandleImpl appHandle = new MegletHandleImpl( megletContainer, meglet, this, bc);
if (meglet == null)
throw new Exception("Cannot create meglet instance!");
initMeglet( meglet, appHandle );
return appHandle.startHandle( args );
}
} |
package org.ovirt.engine.core.common.businessentities;
import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import javax.validation.constraints.Pattern;
import javax.validation.constraints.Size;
import org.codehaus.jackson.annotate.JsonIgnore;
import org.codehaus.jackson.annotate.JsonProperty;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Type;
import org.hibernate.annotations.TypeDef;
import org.ovirt.engine.core.common.businessentities.mapping.GuidType;
import org.ovirt.engine.core.common.utils.ValidationUtils;
import org.ovirt.engine.core.common.validation.annotation.MTU;
import org.ovirt.engine.core.common.validation.group.CreateEntity;
import org.ovirt.engine.core.common.validation.group.UpdateEntity;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.compat.INotifyPropertyChanged;
import org.ovirt.engine.core.compat.NGuid;
@Entity
@Table(name = "network")
@TypeDef(name = "guid", typeClass = GuidType.class)
public class Network extends IVdcQueryable implements INotifyPropertyChanged, Serializable, BusinessEntity<Guid>, Nameable {
private static final long serialVersionUID = 7357288865938773402L;
@Id
@GeneratedValue(generator = "system-uuid")
@GenericGenerator(name = "system-uuid", strategy = "org.ovirt.engine.core.dao.GuidGenerator")
@Column(name = "Id")
@Type(type = "guid")
private Guid id;
@Pattern(regexp = "^[_a-zA-Z0-9]{1,15}$", message = "NETWORK_ILEGAL_NETWORK_NAME", groups = { CreateEntity.class,
UpdateEntity.class })
@Size(min = 1, max = BusinessEntitiesDefinitions.NETWORK_NAME_SIZE)
@Column(name = "name")
private String name;
@Size(max = BusinessEntitiesDefinitions.GENERAL_MAX_SIZE)
@Column(name = "description")
private String description;
@Column(name = "type")
private Integer type;
@Pattern(regexp = ValidationUtils.IP_PATTERN, message = "NETWORK_ADDR_IN_STATIC_IP_BAD_FORMAT")
@Size(max = BusinessEntitiesDefinitions.GENERAL_NETWORK_ADDR_SIZE)
@Column(name = "addr")
private String addr;
@Pattern(regexp = ValidationUtils.IP_PATTERN, message = "NETWORK_ADDR_IN_SUBNET_BAD_FORMAT")
@Size(max = BusinessEntitiesDefinitions.GENERAL_SUBNET_SIZE)
@Column(name = "subnet")
private String subnet;
@Pattern(regexp = ValidationUtils.IP_PATTERN, message = "NETWORK_ADDR_IN_GATEWAY_BAD_FORMAT")
@Size(max = BusinessEntitiesDefinitions.GENERAL_GATEWAY_SIZE)
@Column(name = "gateway")
private String gateway;
@Column(name = "vlan_id")
private Integer vlanId;
@Column(name = "stp")
private boolean stp = false;
@Column(name = "storage_pool_id")
@Type(type = "guid")
private NGuid storagePoolId;
@ManyToOne
@JoinTable(name = "network_cluster", joinColumns = @JoinColumn(name = "network_id"),
inverseJoinColumns = @JoinColumn(name = "cluster_id"))
private NetworkCluster cluster;
private boolean vmNetwork = true;
@MTU
private int mtu;
public Network() {
}
public Network(String addr, String description, Guid id, String name, String subnet, String gateway, Integer type,
Integer vlan_id, boolean stp, int mtu, boolean vmNetwork) {
this.addr = addr;
this.description = description;
this.id = id;
this.name = name;
this.subnet = subnet;
this.gateway = gateway;
this.type = type;
this.vlanId = vlan_id;
this.stp = stp;
this.mtu = mtu;
this.vmNetwork = vmNetwork;
}
public NetworkCluster getCluster() {
return cluster;
}
public String getaddr() {
return this.addr;
}
public void setaddr(String value) {
this.addr = value;
}
public String getdescription() {
return this.description;
}
public void setdescription(String value) {
this.description = value;
}
@Override
public Guid getId() {
return this.id;
}
@Override
public void setId(Guid value) {
this.id = value;
}
@JsonIgnore
@Override
public String getName() {
return this.name;
}
// remove this in a cleanup patch
@JsonProperty
public String getname() {
return this.name;
}
public void setname(String value) {
this.name = value;
}
public String getsubnet() {
return this.subnet;
}
public void setsubnet(String value) {
this.subnet = value;
}
public String getgateway() {
return this.gateway;
}
public void setgateway(String value) {
this.gateway = value;
}
public Integer gettype() {
return this.type;
}
public void settype(Integer value) {
this.type = value;
}
public Integer getvlan_id() {
return this.vlanId;
}
public void setvlan_id(Integer value) {
this.vlanId = value;
}
public boolean getstp() {
return this.stp;
}
public void setstp(boolean value) {
this.stp = value;
}
public NGuid getstorage_pool_id() {
return this.storagePoolId;
}
public void setstorage_pool_id(NGuid value) {
this.storagePoolId = value;
}
public void setCluster(NetworkCluster cluster) {
this.cluster = cluster;
}
@Override
public Object getQueryableId() {
return getId();
}
private static final java.util.ArrayList<String> _networkProperties = new java.util.ArrayList<String>(
java.util.Arrays.asList(new String[] { "addr", "description", "name", "subnet", "type", "vlan_id",
"Status", "stp", "storage_pool_id", "gateway", "is_display", "mtu" }));
@Override
public java.util.ArrayList<String> getChangeablePropertiesList() {
return _networkProperties;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append(getName())
.append(" {id=")
.append(getId())
.append(", description=")
.append(getdescription())
.append(", subnet=")
.append(getsubnet())
.append(", gateway=")
.append(getgateway())
.append(", type=")
.append(gettype())
.append(", vlan_id=")
.append(getvlan_id())
.append(", stp=")
.append(getstp())
.append(", storage_pool_id=")
.append(getstorage_pool_id())
.append(", mtu=")
.append(getMtu())
.append(", vmNetwork=")
.append(isVmNetwork())
.append(", cluster=")
.append(getCluster())
.append("}");
return builder.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((addr == null) ? 0 : addr.hashCode());
//FIXME: remove cluster from hashCode calculation - breaks the tests when working in JDBC template mode
/*
result = prime * result + ((cluster == null) ? 0 : cluster.hashCode());
*/
result = prime * result + ((description == null) ? 0 : description.hashCode());
result = prime * result + ((gateway == null) ? 0 : gateway.hashCode());
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((storagePoolId == null) ? 0 : storagePoolId.hashCode());
result = prime * result + (stp ? 1231 : 1237);
result = prime * result + ((subnet == null) ? 0 : subnet.hashCode());
result = prime * result + ((type == null) ? 0 : type.hashCode());
result = prime * result + ((vlanId == null) ? 0 : vlanId.hashCode());
result = prime * result + (mtu);
result = prime * result + ((vmNetwork) ? 11 : 13);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Network other = (Network) obj;
if (addr == null) {
if (other.addr != null)
return false;
} else if (!addr.equals(other.addr))
return false;
//FIXME: currently removing cluster from equals, tests are failing
/*
if (cluster == null) {
if (other.cluster != null)
return false;
} else if (!cluster.equals(other.cluster))
return false;
*/
if (description == null) {
if (other.description != null)
return false;
} else if (!description.equals(other.description))
return false;
if (gateway == null) {
if (other.gateway != null)
return false;
} else if (!gateway.equals(other.gateway))
return false;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
if (storagePoolId == null) {
if (other.storagePoolId != null)
return false;
} else if (!storagePoolId.equals(other.storagePoolId))
return false;
if (stp != other.stp)
return false;
if (subnet == null) {
if (other.subnet != null)
return false;
} else if (!subnet.equals(other.subnet))
return false;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
if (vlanId == null) {
if (other.vlanId != null)
return false;
} else if (!vlanId.equals(other.vlanId))
return false;
if (mtu != other.mtu)
return false;
if (vmNetwork != other.vmNetwork) {
return false;
}
return true;
}
public int getMtu() {
return mtu;
}
public void setMtu(int mtu) {
this.mtu = mtu;
}
public boolean isVmNetwork() {
return vmNetwork;
}
public void setVmNetwork(boolean vmNetwork) {
this.vmNetwork = vmNetwork;
}
} |
package TobleMiner.MineFight.GameEngine.Match;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.block.Block;
import org.bukkit.block.Sign;
import org.bukkit.entity.Arrow;
import org.bukkit.entity.Item;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
import org.bukkit.event.block.Action;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockDamageEvent;
import org.bukkit.event.block.BlockPlaceEvent;
import org.bukkit.event.entity.EntityChangeBlockEvent;
import org.bukkit.event.entity.EntityCombustEvent;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.entity.EntityDamageEvent;
import org.bukkit.event.entity.EntityDamageEvent.DamageCause;
import org.bukkit.event.entity.EntityExplodeEvent;
import org.bukkit.event.entity.FoodLevelChangeEvent;
import org.bukkit.event.entity.ItemDespawnEvent;
import org.bukkit.event.entity.PlayerDeathEvent;
import org.bukkit.event.entity.ProjectileHitEvent;
import org.bukkit.event.entity.ProjectileLaunchEvent;
import org.bukkit.event.player.AsyncPlayerChatEvent;
import org.bukkit.event.player.PlayerDropItemEvent;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.event.player.PlayerPickupItemEvent;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.PlayerInventory;
import org.bukkit.util.Vector;
import TobleMiner.MineFight.Main;
import TobleMiner.MineFight.Configuration.Container.FlagContainer;
import TobleMiner.MineFight.Configuration.Container.Killstreak;
import TobleMiner.MineFight.Configuration.Container.KillstreakConfig;
import TobleMiner.MineFight.Configuration.Container.RadioStationContainer;
import TobleMiner.MineFight.Configuration.Weapon.WeaponDescriptor;
import TobleMiner.MineFight.Configuration.Weapon.WeaponDescriptor.DamageType;
import TobleMiner.MineFight.Configuration.Weapon.WeaponDescriptor.WeaponUseType;
import TobleMiner.MineFight.Configuration.Weapon.WeaponIndex;
import TobleMiner.MineFight.Debug.Debugger;
import TobleMiner.MineFight.ErrorHandling.Error;
import TobleMiner.MineFight.ErrorHandling.ErrorReporter;
import TobleMiner.MineFight.ErrorHandling.ErrorSeverity;
import TobleMiner.MineFight.GameEngine.GameEngine;
import TobleMiner.MineFight.GameEngine.Match.Gamemode.Gamemode;
import TobleMiner.MineFight.GameEngine.Match.Gamemode.Conquest.Flag;
import TobleMiner.MineFight.GameEngine.Match.Gamemode.Rush.RadioStation;
import TobleMiner.MineFight.GameEngine.Match.Spawning.Spawnengine;
import TobleMiner.MineFight.GameEngine.Match.Statistics.StatHandler;
import TobleMiner.MineFight.GameEngine.Match.Team.Team;
import TobleMiner.MineFight.GameEngine.Match.Team.TeamBlue;
import TobleMiner.MineFight.GameEngine.Match.Team.TeamRed;
import TobleMiner.MineFight.GameEngine.Player.PVPPlayer;
import TobleMiner.MineFight.GameEngine.Player.PVPPlayer.HitZone;
import TobleMiner.MineFight.GameEngine.Player.CombatClass.CombatClass;
import TobleMiner.MineFight.GameEngine.Player.Info.InformationSign;
import TobleMiner.MineFight.GameEngine.Player.Resupply.ResupplyStation;
import TobleMiner.MineFight.Protection.Area3D;
import TobleMiner.MineFight.Util.Location.TeleportUtil;
import TobleMiner.MineFight.Util.Protection.ProtectionUtil;
import TobleMiner.MineFight.Util.SyncDerp.EffectSyncCalls;
import TobleMiner.MineFight.Util.SyncDerp.EntitySyncCalls;
import TobleMiner.MineFight.Util.SyncDerp.InventorySyncCalls;
import TobleMiner.MineFight.Weapon.Projectile.Projectile;
import TobleMiner.MineFight.Weapon.Projectile.SimpleProjectile;
import TobleMiner.MineFight.Weapon.Projectile.WeaponProjectile;
import TobleMiner.MineFight.Weapon.TickControlled.IMS;
import TobleMiner.MineFight.Weapon.TickControlled.TickControlledWeapon;
import TobleMiner.MineFight.Weapon.TickControlled.Missile.Missile;
import TobleMiner.MineFight.Weapon.TickControlled.Missile.PlayerSeeker;
public class Match
{
public final Gamemode gmode;
public final World world;
public final String name;
private TeamRed teamRed = new TeamRed();
private TeamBlue teamBlue = new TeamBlue();
private List<PVPPlayer> playersBlue = new ArrayList<PVPPlayer>();
private List<PVPPlayer> playersRed = new ArrayList<PVPPlayer>();
private final boolean hardcore;
private final Area3D classSelectArea;
private final Area3D spawnArea;
private final Area3D spawnAreaRed;
private final Area3D spawnAreaBlue;
private final Location matchLeaveLoc;
private final List<TickControlledWeapon> ltcw = new ArrayList<TickControlledWeapon>();
private List<InformationSign> infSs = new ArrayList<InformationSign>();
private List<Flag> flags = new ArrayList<Flag>();
private int flagsRed = 0;
private int flagsBlue = 0;
private List<RadioStation> radioStations = new ArrayList<RadioStation>();
private Iterator<RadioStation> radioStationIterator;
private RadioStation activeRadioStation;
private int timer = 1;
private int beaconInterv;
public final StatHandler sh;
private final KillstreakConfig kcconf;
private final HashMap<Item,IMS> imss = new HashMap<Item,IMS>();
private final HashMap<Arrow,Missile> missiles = new HashMap<Arrow,Missile>();
private final List<ResupplyStation> resupplyStations = new ArrayList<ResupplyStation>();
private final HashMap<Arrow, Projectile> projectiles = new HashMap<Arrow, Projectile>();
public final boolean damageEnviron;
private final boolean exploDamageEnviron;
public final WeaponIndex weapons;
private final ProtectionUtil protection = new ProtectionUtil();
private final Spawnengine spawnengine;
public Match(World world, Gamemode gmode, String name, boolean hardcore, WeaponIndex weapons, List<Sign> infoSigns, List<FlagContainer> flags, List<RadioStationContainer> radioStations, StatHandler sh)
{
this.sh = sh;
this.world = world;
this.gmode = gmode;
this.name = name;
this.hardcore = hardcore;
this.weapons = weapons;
this.matchLeaveLoc = Main.gameEngine.configuration.getRoundEndSpawnForWorld(world);
this.spawnArea = Main.gameEngine.configuration.getSpawnForWorld(world);
this.spawnAreaRed = Main.gameEngine.configuration.getSpawnForWorldRed(world);
this.spawnAreaBlue = Main.gameEngine.configuration.getSpawnForWorldBlue(world);
this.classSelectArea = Main.gameEngine.configuration.getRespawnForWorld(world);
this.kcconf = Main.gameEngine.configuration.getKillstreaks(world, gmode);
if(gmode.equals(Gamemode.Conquest))
{
this.teamRed.setPoints(Main.gameEngine.configuration.getPointsForGamemodeInWorld(world,gmode));
this.teamBlue.setPoints(Main.gameEngine.configuration.getPointsForGamemodeInWorld(world,gmode));
}
else if(gmode.equals(Gamemode.Rush))
{
this.teamRed.setPoints(Main.gameEngine.configuration.getPointsForGamemodeInWorld(world,gmode));
this.teamBlue.setPoints(radioStations.size());
}
else
{
this.teamRed.setPoints(0);
this.teamBlue.setPoints(0);
}
for(Sign s : infoSigns)
{
InformationSign infS = new InformationSign(this,s);
infSs.add(infS);
}
if(gmode.equals(Gamemode.Conquest))
{
double dist = Main.gameEngine.configuration.getFlagCaptureDistance(world);
double speed = Main.gameEngine.configuration.getFlagCaptureSpeed(world);
double accel = Main.gameEngine.configuration.getFlagCaptureAcceleration(world);
for(FlagContainer fc : flags)
{
Flag flag = new Flag(fc,this,dist,speed,accel);
this.flags.add(flag);
}
}
else if(gmode.equals(Gamemode.Rush))
{
double radioStationExploTime = Main.gameEngine.configuration.getRadioStationDestructTime(world);
for(RadioStationContainer s : radioStations)
{
RadioStation rs = new RadioStation(s, radioStationExploTime, this);
this.radioStations.add(rs);
}
radioStationIterator = this.radioStations.iterator();
if(radioStationIterator.hasNext())
{
activeRadioStation = radioStationIterator.next();
}
}
this.damageEnviron = Main.gameEngine.configuration.canEnvironmentBeDamaged(gmode, world);
this.exploDamageEnviron = Main.gameEngine.configuration.canExlosionsDamageEnvironment(gmode, world);
this.beaconInterv = Main.gameEngine.configuration.getInfoBeaconInterval(gmode, world);
this.spawnengine = new Spawnengine(this);
}
public boolean canEnvironmentBeDamaged()
{
return this.damageEnviron;
}
public boolean canExplosionsDamageEnvironment()
{
return this.exploDamageEnviron;
}
public World getWorld()
{
return world;
}
public String join(Player p)
{
if(Main.gameEngine.getPlayerExact(p) != null)
{
return ChatColor.DARK_RED+Main.gameEngine.dict.get("alreadyJoined");
}
this.sendTeamMessage(null, ChatColor.GOLD+String.format(Main.gameEngine.dict.get("joinMsg"),p.getName()));
int membersRed = playersRed.size();
int membersBlue = playersBlue.size();
Random rand = new Random();
Team team = ((rand.nextInt() % 2 == 0) ? teamRed : teamBlue);
if(membersRed != membersBlue)
{
if(membersRed > membersBlue)
{
team = teamBlue;
}
else
{
team = teamRed;
}
}
PVPPlayer player = new PVPPlayer(p, team, this, Bukkit.getServer().createMap(world));
p.setDisplayName(player.getName());
p.setCustomName(player.getName());
p.setCustomNameVisible(true);
for(PVPPlayer watcher : this.getPlayers())
{
Main.plsl.sendNamechange(player, watcher, false);
Main.plsl.sendNamechange(watcher, player, false);
}
if(team == teamRed)
{
playersRed.add(player);
}
else
{
playersBlue.add(player);
}
p.teleport(this.classSelectArea.pickRandomPoint());
Main.plsl.registerPlayer(p, player);
player.storeInventory();
Main.gameEngine.weaponRegistry.playerJoined(this, player);
return ChatColor.DARK_GREEN+String.format(Main.gameEngine.dict.get("persJoinMsg"), this.name);
}
public PVPPlayer getPlayerByName(String name)
{
for(int i=0;i<playersRed.size();i++)
{
PVPPlayer p = playersRed.get(i);
if(p.getName().equals(name))
{
return p;
}
}
for(int i=0;i<playersBlue.size();i++)
{
PVPPlayer p = playersBlue.get(i);
if(p.getName().equals(name))
{
return p;
}
}
return null;
}
public PVPPlayer getPlayerExact(Player player)
{
for(int i=0;i<playersRed.size();i++)
{
PVPPlayer p = playersRed.get(i);
if(p.thePlayer.equals(player))
{
return p;
}
}
for(int i=0;i<playersBlue.size();i++)
{
PVPPlayer p = playersBlue.get(i);
if(p.thePlayer.equals(player))
{
return p;
}
}
return null;
}
public boolean anounceTeamchange(PVPPlayer player,Team from,Team to)
{
if(from == to)
{
return true;
}
for(PVPPlayer watcher : this.getPlayers())
{
Main.plsl.sendNamechange(player, watcher, false);
}
Main.gameEngine.weaponRegistry.playerChangedTeam(this, player);
if(from == teamRed)
{
playersRed.remove(player);
playersBlue.add(player);
return true;
}
if(from == teamBlue)
{
playersBlue.remove(player);
playersRed.add(player);
return true;
}
return false;
}
public void anouncePlayerLeave(PVPPlayer player)
{
Main.plsl.unregisterPlayer(player.thePlayer);
if(player.getTeam() == teamRed)
{
playersRed.remove(player);
}
else
{
playersBlue.remove(player);
}
player.thePlayer.setDisplayName(player.thePlayer.getName());
player.thePlayer.setCustomName(player.thePlayer.getName());
player.thePlayer.setCustomNameVisible(false);
player.thePlayer.getInventory().clear();
player.thePlayer.getInventory().setHelmet(null);
player.thePlayer.getInventory().setChestplate(null);
player.thePlayer.getInventory().setLeggings(null);
player.thePlayer.getInventory().setBoots(null);
player.loadInventory();
this.sendTeamMessage(null,ChatColor.GOLD+String.format(Main.gameEngine.dict.get("matchLeaveBroad"),player.getName()));
for(PVPPlayer watcher : this.getPlayers())
{
Main.plsl.sendNamechange(player, watcher, true);
Main.plsl.sendNamechange(watcher, player, true);
}
Main.gameEngine.weaponRegistry.playerLeft(this, player);
}
public boolean isHardcore()
{
return hardcore;
}
public void setTeam(PVPPlayer p,Team t)
{
p.setTeam(t);
}
public boolean canKill(PVPPlayer killer,PVPPlayer victim)
{
if(killer == null)
{
Debugger.writeDebugOut("canKill: killer is null");
return true;
}
if(killer.isSpawned() && victim.isSpawned())
{
Debugger.writeDebugOut(String.format("\"%s\" canKill \"%s\", teams: %s and %s",killer.getName(),victim.getName(),killer.getTeam().getName(),victim.getTeam().getName()));
Debugger.writeDebugOut(String.format("\"%s\" canKill \"%s\", teams: %s and %s",killer.getName(),victim.getName(),killer.getTeam().toString(),victim.getTeam().toString()));
Debugger.writeDebugOut(Boolean.toString(killer.getTeam() != victim.getTeam() || this.hardcore));
return (killer.getTeam() != victim.getTeam() || this.hardcore);
}
return false;
}
public void kill(PVPPlayer killer, PVPPlayer victim,String weapon, boolean doKill)
{
kill(killer, victim, weapon, doKill, false);
}
public void kill(PVPPlayer killer, PVPPlayer victim, String weapon, boolean doKill, boolean headshot)
{
if(doKill)
{
victim.normalDeathBlocked = true;
}
if(killer != null)
{
if(killer.isSpawned() && victim.isSpawned())
{
if(doKill)
{
victim.thePlayer.setHealth(0);
}
victim.onKill(killer);
victim.setSpawned(false);
if(headshot)
{
weapon += " - "+Main.gameEngine.dict.get("headshot");
}
if(killer == victim)
{
this.broadcastMessage(killer.getName()+" ["+weapon+"] "+Main.gameEngine.dict.get("suicide"));
}
else
{
if(gmode.equals(Gamemode.Teamdeathmatch))
{
killer.getTeam().addPoints(1);
}
killer.killed();
this.checkKillstreak(killer);
this.broadcastMessage(killer.getName()+" ["+weapon+"] "+victim.getName());
}
}
}
else
{
if(doKill)
{
victim.thePlayer.setHealth(0);
}
victim.onKill(killer);
victim.setSpawned(false);
this.broadcastMessage(victim.getName()+" "+Main.gameEngine.dict.get("died"));
}
if(gmode.equals(Gamemode.Conquest))
{
victim.getTeam().subPoints(1);
}
else if(gmode.equals(Gamemode.Rush))
{
if(victim.getTeam().equals(teamRed))
{
victim.getTeam().subPoints(1);
}
}
Main.gameEngine.weaponRegistry.playerKilled(this, killer, victim);
Main.gameEngine.weaponRegistry.playerDied(this, victim, killer);
victim.normalDeathBlocked = false;
}
public void doUpdate()
{
try
{
for(int i=0;i<playersBlue.size();i++)
{
PVPPlayer p = playersBlue.get(i);
p.doUpdate();
}
for(int i=0;i<playersRed.size();i++)
{
PVPPlayer p = playersRed.get(i);
p.doUpdate();
}
for(int i=0;i<ltcw.size();i++)
{
TickControlledWeapon tcw = ltcw.get(i);
tcw.doUpdate();
}
for(int i=0;i<infSs.size();i++)
{
InformationSign infS = infSs.get(i);
infS.doUpdate();
}
try
{
for(int i=0;i<resupplyStations.size();i++)
{
ResupplyStation rs = resupplyStations.get(i);
rs.doUpdate();
}
}
catch(Exception ex)
{
ex.printStackTrace();
}
if(gmode.equals(Gamemode.Conquest))
{
int tmpflagsRed = 0;
int tmpflagsBlue = 0;
double pointLossPerFlagPerSecond = Main.gameEngine.configuration.getPointlossPerFlagPerSecond(world);
boolean needntHaveHalfFlags = Main.gameEngine.configuration.getLosePointsWhenEnemyHasLessThanHalfFlags(world);
for(int i=0;i<flags.size();i++)
{
Flag flag = flags.get(i);
flag.doUpdate();
Team flagOwner = flag.getOwner();
if(flagOwner != null)
{
if(flagOwner.equals(teamRed))
{
tmpflagsRed++;
}
else if(flagOwner.equals(teamBlue))
{
tmpflagsBlue++;
}
}
}
flagsRed = tmpflagsRed;
flagsBlue = tmpflagsBlue;
if(flagsRed != flagsBlue)
{
int flagDiff = Math.abs(flagsRed-flagsBlue);
if(needntHaveHalfFlags)
{
if(flagsRed > flagsBlue)
{
teamBlue.subPoints(pointLossPerFlagPerSecond/100d*flagDiff);
}
else
{
teamRed.subPoints(pointLossPerFlagPerSecond/100d*flagDiff);
}
}
else
{
if(((double)flagsRed) > ((double)this.getFlagNum())/2d)
{
teamBlue.subPoints(pointLossPerFlagPerSecond/GameEngine.tps*(((double)flagsRed)-((double)this.getFlagNum())/2d));
}
else if(((double)flagsBlue) > ((double)this.getFlagNum())/2d)
{
teamRed.subPoints(pointLossPerFlagPerSecond/GameEngine.tps*(((double)flagsBlue)-((double)this.getFlagNum())/2d));
}
}
}
}
else if(gmode.equals(Gamemode.Rush))
{
for(int i=0;i<radioStations.size();i++)
{
RadioStation rs = radioStations.get(i);
rs.doUpdate();
}
}
if(gmode.equals(Gamemode.Teamdeathmatch))
{
double pointLimit = Main.gameEngine.configuration.getPointsForGamemodeInWorld(world, gmode);
if(teamRed.getPoints() >= pointLimit)
{
this.win(teamRed);
}
else if(teamBlue.getPoints() >= pointLimit)
{
this.win(teamBlue);
}
}
else if(gmode.equals(Gamemode.Conquest) || gmode.equals(Gamemode.Rush))
{
if(teamRed.getPoints() <= 0d)
{
this.win(teamBlue);
}
else if(teamBlue.getPoints() <= 0d)
{
this.win(teamRed);
}
}
if(beaconInterv > 0)
{
if(timer > ((double)this.beaconInterv)*GameEngine.tps)
{
this.beaconInterv = Main.gameEngine.configuration.getInfoBeaconInterval(gmode, world);
timer = 0;
this.sendTeamMessage(null,ChatColor.GOLD+String.format("%s: %s",Main.gameEngine.dict.get("tickets"),teamRed.color+Integer.toString((int)Math.round(teamRed.getPoints()))+ChatColor.RESET+" | "+teamBlue.color+Integer.toString((int)Math.round(teamBlue.getPoints()))));
if(gmode.equals(Gamemode.Conquest))
{
this.sendTeamMessage(null,ChatColor.GOLD+String.format("%s: %s",Main.gameEngine.dict.get("Flags"),teamRed.color+Integer.toString(this.getFlagsRed())+ChatColor.RESET+" | "+teamBlue.color+Integer.toString(this.getFlagsBlue())));
if(this.getFlagNum() - this.getFlagsRed() - this.getFlagsBlue() > 0)
{
this.sendTeamMessage(null,ChatColor.GOLD+String.format(Main.gameEngine.dict.get("uncapped"),Integer.toString(this.getFlagNum() - this.getFlagsRed() - this.getFlagsBlue())));
}
}
}
timer++;
}
Main.gameEngine.weaponRegistry.onTick(this);
}
catch(Exception ex)
{
Error error = new Error("Internal error!", "Error while performing tick-update: "+ex.getMessage(), "This is most certainly the developers fault. Please report this error to me.", this.getClass().getName(), ErrorSeverity.WARNING);
ex.printStackTrace();
ErrorReporter.reportError(error);
}
}
public void broadcastMessage(String message)
{
for(int i=0;i<playersRed.size();i++)
{
PVPPlayer player = playersRed.get(i);
player.thePlayer.sendMessage(message);
}
for(int i=0;i<playersBlue.size();i++)
{
PVPPlayer player = playersBlue.get(i);
player.thePlayer.sendMessage(message);
}
}
public void playerDroppedItem(PlayerDropItemEvent event)
{
PVPPlayer player = this.getPlayerExact(event.getPlayer());
Item is = event.getItemDrop();
if(player != null && player.isSpawned())
{
if(is.getItemStack().getType().equals(Material.REDSTONE))
{
if(player.killstreaks.contains(Killstreak.IMS))
{
player.killstreaks.remove(Killstreak.IMS);
double triggerDist = Main.gameEngine.configuration.getIMSTriggerDist();
int grenades = Main.gameEngine.configuration.getIMSShots();
IMS ims = new IMS(this, is, triggerDist, grenades, player);
this.imss.put(is, ims);
}
}
else
{
event.setCancelled(Main.gameEngine.configuration.getPreventItemDrop(world, gmode));
}
}
else
{
event.setCancelled(Main.gameEngine.configuration.getPreventItemDrop(world, gmode));
}
Main.gameEngine.weaponRegistry.executeEvent(this, event);
}
public void playerPickUpItem(PlayerPickupItemEvent event)
{
Item is = event.getItem();
PVPPlayer player = this.getPlayerExact(event.getPlayer());
if(player != null && player.isSpawned())
{
if(is.getItemStack().getType().equals(Material.REDSTONE))
{
IMS ims = imss.get(is);
if(ims != null)
{
event.setCancelled(true);
}
}
}
else
event.setCancelled(true);
Main.gameEngine.weaponRegistry.executeEvent(this, event);
}
public void itemDespawn(ItemDespawnEvent event)
{
Main.gameEngine.weaponRegistry.executeEvent(this, event);
}
private void checkKillstreak(PVPPlayer player)
{
Killstreak ks = this.kcconf.getKillstreak(player.killstreak);
if(ks != null && ks != Killstreak.NONE)
{
player.addKillstreak(ks);
}
}
public void endMatch()
{
for(int i=0;i<playersRed.size();i++)
{
PVPPlayer p = playersRed.get(i);
p.thePlayer.sendMessage(ChatColor.DARK_GREEN+String.format(Main.gameEngine.dict.get("matchend"),p.getPoints()));
p.leaveMatch(matchLeaveLoc);
}
for(int i=0;i<playersBlue.size();i++)
{
PVPPlayer p = playersBlue.get(i);
p.thePlayer.sendMessage(ChatColor.DARK_GREEN+String.format(Main.gameEngine.dict.get("matchend"),p.getPoints()));
p.leaveMatch(matchLeaveLoc);
}
if(this.canEnvironmentBeDamaged() || this.canExplosionsDamageEnvironment())
{
}
teamRed = new TeamRed();
teamBlue = new TeamBlue();
playersBlue = new ArrayList<PVPPlayer>();
playersRed = new ArrayList<PVPPlayer>();
infSs = new ArrayList<InformationSign>();
flags = new ArrayList<Flag>();
imss.clear();
Main.gameEngine.weaponRegistry.matchEnded(this);
Main.gameEngine.removeMatch(this);
}
public void playerDeath(PlayerDeathEvent event)
{
Player entity = event.getEntity();
List<ItemStack> drops = event.getDrops();
String deathMessage = event.getDeathMessage();
PVPPlayer player = this.getPlayerExact(entity);
if(player != null)
{
Debugger.writeDebugOut("Player "+player.getName()+" died.");
if(Main.gameEngine.configuration.getPreventItemDropOnDeath(this.world, this.gmode))
{
if(drops != null)
{
drops.clear();
}
}
if(player.normalDeathBlocked)
{
event.setDeathMessage("");
}
else
{
Player killer = entity.getKiller();
PVPPlayer PVPkiller = null;
if(killer != null)
{
PVPkiller = this.getPlayerExact(killer);
}
String weapon = Main.gameEngine.dict.get("killed");
deathMessage = deathMessage.toLowerCase();
if(this.canKill(PVPkiller, player))
{
this.kill(PVPkiller, player, weapon, false);
}
event.setDeathMessage("");
}
}
else
{
event.setDeathMessage(deathMessage);
}
Main.gameEngine.weaponRegistry.executeEvent(this, event);
}
public boolean playerDamage(Player entity, DamageCause damageCause)
{
PVPPlayer player = this.getPlayerExact(entity);
if(player != null)
{
if(player.isSpawned())
{
if(damageCause.equals(DamageCause.FALL))
{
return !Main.gameEngine.configuration.isFalldamageActive(world, gmode);
}
else if(damageCause.equals(DamageCause.ENTITY_EXPLOSION) || damageCause.equals(DamageCause.BLOCK_EXPLOSION))
{
return true;
}
else
{
return false;
}
}
}
return true;
}
public void rightClickSign(Player p, Block clickedBlock)
{
PVPPlayer player = this.getPlayerExact(p);
if(player != null)
{
if(player.isSpawned())
{
return;
}
Block b = clickedBlock;
Sign sign = (Sign)b.getState();
String cmd = sign.getLines()[1];
String joinCmd = Main.gameEngine.configuration.config.getString("GameControl.Sign.joinCmd");
if(cmd.equalsIgnoreCase(joinCmd))
{
player.thePlayer.sendMessage(ChatColor.GOLD+String.format(Main.gameEngine.dict.get("spawnmsg"),player.getTeam().color+player.getTeam().getName().toUpperCase()+ChatColor.RESET));
this.spawnPlayer(player);
}
else
{
CombatClass cc = Main.gameEngine.combatClasses.get(cmd.toLowerCase().trim());
if(cc != null && player.getCombatClass() != cc)
{
this.setCombatClass(cc,player);
}
}
}
}
private void setCombatClass(CombatClass cc, PVPPlayer player)
{
String name = cc.name;
List<ItemStack> lis = cc.kit;
PlayerInventory pi = player.thePlayer.getInventory();
InventorySyncCalls.clear(pi);
player.thePlayer.updateInventory();
for(ItemStack is : lis)
{
InventorySyncCalls.addItemStack(pi, is);
}
pi.setHelmet(cc.armor[0]);
pi.setChestplate(cc.armor[1]);
pi.setLeggings(cc.armor[2]);
pi.setBoots(cc.armor[3]);
if(gmode.equals(Gamemode.Rush) || gmode.equals(Gamemode.Conquest))
{
if(gmode.equals(Gamemode.Rush))
{
InventorySyncCalls.addItemStack(pi, new ItemStack(Material.COMPASS,1));
InventorySyncCalls.addItemStack(pi, new ItemStack(Material.REDSTONE_TORCH_ON,64));
if(this.activeRadioStation != null)
{
player.thePlayer.setCompassTarget(activeRadioStation.getLocation());
}
}
}
player.thePlayer.updateInventory(); //Seems NOT to work without updateInventory
player.thePlayer.sendMessage(String.format(ChatColor.DARK_GREEN+Main.gameEngine.dict.get("classchange"),name));
player.setCombatClass(cc);
}
private void spawnPlayer(PVPPlayer player)
{
player.setSpawned(true);
Location loc = TeleportUtil.getSafeTeleportLocation(this.spawnengine.findSafeSpawn(this.getSpawnLoc(player), player));
player.hasMap = Main.gameEngine.configuration.isMinimapEnabled(this.world);
if(player.hasMap)
{
player.getMapView().setCenterX(loc.getBlockX());
player.getMapView().setCenterZ(loc.getBlockZ());
InventorySyncCalls.addItemStack(player.thePlayer.getInventory(), new ItemStack(Material.MAP,1,player.getMapView().getId()));
player.thePlayer.updateInventory();
}
for(Killstreak ks : player.killstreaks)
{
Inventory i = player.thePlayer.getInventory();
if(ks == Killstreak.IMS) InventorySyncCalls.addItemStack(i,new ItemStack(Material.REDSTONE,1));
if(ks == Killstreak.PLAYERSEEKER) InventorySyncCalls.addItemStack(i, new ItemStack(Material.STICK,1));
}
player.teleport(loc);
Main.gameEngine.weaponRegistry.playerRespawned(this, player);
}
private Location getSpawnLoc(PVPPlayer p)
{
if(this.gmode.equals(Gamemode.Teamdeathmatch))
{
return this.spawnArea.pickRandomPoint();
}
if(this.gmode.equals(Gamemode.Rush))
{
if(activeRadioStation != null)
{
Location baseLoc = activeRadioStation.getLocation();
double inner = 0;
double outer = 0;
if(teamRed.equals(p.getTeam()))
{
inner = Main.gameEngine.configuration.getAttackerInnerSpawnRadius(world);
outer = Main.gameEngine.configuration.getAttackerOuterSpawnRadius(world);
}
else
{
inner = Main.gameEngine.configuration.getDefenderInnerSpawnRadius(world);
outer = Main.gameEngine.configuration.getDefenderOuterSpawnRadius(world);
}
Random rand = new Random();
double angle = rand.nextDouble()*Math.PI*2d;
double rad = inner+rand.nextDouble()*(outer-inner);
double x = Math.sin(angle)*rad;
double z = Math.cos(angle)*rad;
double avgHeight = 0;
for(RadioStation rs : radioStations)
{
avgHeight += rs.getLocation().getY();
}
avgHeight /= radioStations.size();
Location loc = baseLoc.clone().add(x,5d,z);
List<Location> locs = TeleportUtil.getSafeTeleportLocations(loc,200,this.activeRadioStation.spawnSky);
if(locs.size() > 0)
{
double hdiff = Double.MAX_VALUE;
loc = locs.get(0);
for(Location l : locs)
{
if(Math.abs(l.getY()-avgHeight) < hdiff)
{
hdiff = Math.abs(l.getY()-avgHeight);
loc = l;
}
}
}
return loc;
}
}
if(this.gmode.equals(Gamemode.Conquest))
{
List<Flag> teamFlags = new ArrayList<Flag>();
for(Flag f : flags)
{
if(p.getTeam() == f.getOwner())
{
teamFlags.add(f);
}
}
if(teamFlags.size() > 0)
{
Random rand = new Random();
int flagNum = rand.nextInt(teamFlags.size());
Flag flag = teamFlags.get(flagNum);
Location baseLoc = flag.getLocation();
double inner = 4d;
double outer = 8d;
double angle = rand.nextDouble()*Math.PI*2d;
double rad = inner+rand.nextDouble()*(outer-inner);
double x = Math.sin(angle)*rad;
double z = Math.cos(angle)*rad;
double avgHeight = 0;
for(Flag f : flags)
{
avgHeight += f.getLocation().getY();
}
avgHeight /= flags.size();
Location loc = baseLoc.clone().add(x,5d,z);
List<Location> locs = TeleportUtil.getSafeTeleportLocations(loc, 200, flag.spawnSky);
if(locs.size() > 0)
{
double hdiff = Double.MAX_VALUE;
loc = locs.get(0);
for(Location l : locs)
{
if(Math.abs(l.getY()-avgHeight) < hdiff)
{
hdiff = Math.abs(l.getY()-avgHeight);
loc = l;
}
}
}
return loc;
}
else
{
if(p.getTeam() == this.teamRed)
return this.spawnAreaRed.pickRandomPoint();
else if(p.getTeam() == this.teamBlue)
return this.spawnAreaBlue.pickRandomPoint();
}
}
return this.spawnArea.pickRandomPoint();
}
public void playerChangedWorld(Player player)
{
PVPPlayer p = this.getPlayerExact(player);
if(p != null)
{
p.leaveMatch(matchLeaveLoc);
}
}
public void playerQuit(Player p)
{
PVPPlayer player = this.getPlayerExact(p);
if(player != null)
{
player.leaveMatch(matchLeaveLoc);
}
}
public int blockPlace(BlockPlaceEvent event)
{
Main.gameEngine.weaponRegistry.executeEvent(this, event);
PVPPlayer player = this.getPlayerExact(event.getPlayer());
if(player != null && player.isSpawned())
{
Block b = event.getBlock();
if(gmode.equals(Gamemode.Rush) && (b.getType().equals(Material.REDSTONE_TORCH_ON) || b.getType().equals(Material.REDSTONE_TORCH_OFF)))
{
if(activeRadioStation != null && activeRadioStation.getLocation().distance(b.getLocation()) < 3d)
{
if(player.getTeam() == this.teamRed)
{
activeRadioStation.armer = player;
return 0;
}
return 2;
}
}
else if(b.getType().equals(Material.WALL_SIGN))
{
Location facing = ResupplyStation.getFacing((Sign)b.getState());
if(b.getLocation().clone().add(facing).getBlock().getType().equals(Material.WOOL))
{
double ammoRefillDist = Main.gameEngine.configuration.getAmmoResupplyRange();
double ammoRefillSpeed = Main.gameEngine.configuration.getAmmoResupplySpeed();
int fill = Main.gameEngine.configuration.getAmmoResupplyAmount();
this.resupplyStations.add(new ResupplyStation((Sign)b.getState(), this, player, ammoRefillDist, ammoRefillSpeed, fill));
}
}
else if(!Main.gameEngine.configuration.canEnvironmentBeDamaged(gmode, world))
{
return 2;
}
return 1;
}
return 2;
}
public boolean arrowLaunchedByPlayer(Player p, Arrow arrow)
{
PVPPlayer player = this.getPlayerExact(p);
if(player != null && player.isSpawned())
{
double critpProbab = Main.gameEngine.configuration.getCritProbability(world, gmode);
Random rand = new Random();
Projectile sp = new SimpleProjectile(player, rand.nextDouble() < critpProbab, 1.0d, arrow, "{placeholder}");
if(player.getCombatClass() != null)
{
ItemStack is = player.thePlayer.getItemInHand();
if(is != null)
{
WeaponIndex wi = this.weapons.get(DamageType.PROJECTILEHIT);
if(wi != null)
{
WeaponDescriptor wd = wi.get(is.getType());
if(wd != null)
{
Debugger.writeDebugOut(String.format("Projectile '%s' fired.",wd.name));
sp = new WeaponProjectile(player, arrow, wd, rand.nextDouble() < critpProbab);
double speed = wd.speed;
arrow.setVelocity(arrow.getVelocity().clone().multiply(speed));
}
}
}
}
this.projectiles.put(arrow, sp);
return false;
}
return true;
}
public void registerTickControlled(TickControlledWeapon tickControlledWeapon)
{
ltcw.add(tickControlledWeapon);
}
public void unregisterTickControlled(TickControlledWeapon tickControlledWeapon)
{
ltcw.remove(tickControlledWeapon);
}
public String[] getInformationSignText()
{
String[] info = new String[4];
info[0] = this.gmode.toString();
info[1] = ChatColor.RED+"RED"+ChatColor.RESET+"|"+ChatColor.BLUE+"BLUE";
info[2] = "Points:"+Integer.toString((int)Math.round(teamRed.getPoints()))+"|"+Integer.toString((int)Math.round(teamBlue.getPoints()));
info[3] = "Players: "+Integer.toString(playersRed.size())+" | "+Integer.toString(playersBlue.size());
return info;
}
public List<PVPPlayer> getPlayers()
{
List<PVPPlayer> players = new ArrayList<PVPPlayer>();
for(PVPPlayer p : this.playersRed)
{
players.add(p);
}
for(PVPPlayer p : this.playersBlue)
{
players.add(p);
}
return players;
}
public TeamRed getTeamRed()
{
return teamRed;
}
public TeamBlue getTeamBlue()
{
return teamBlue;
}
public int getFlagsRed()
{
return this.flagsRed;
}
public int getFlagsBlue()
{
return this.flagsBlue;
}
public int getFlagNum()
{
return this.flags.size();
}
public Location getMatchLeaveLoc()
{
return matchLeaveLoc;
}
public void sendTeamMessage(Team team, String message) //Team = null will send a message to all players
{
if(team != teamBlue)
{
for(PVPPlayer p : this.playersRed)
{
p.thePlayer.sendMessage(message);
}
}
if(team != teamRed)
{
for(PVPPlayer p : this.playersBlue)
{
p.thePlayer.sendMessage(message);
}
}
}
public boolean arrowHitPlayer(Player p, Arrow a, double damage)
{
Debugger.writeDebugOut("Player hit by arrow called!");
PVPPlayer player = this.getPlayerExact(p);
if(player != null)
{
if(player.isSpawned())
{
HitZone hitzone = HitZone.TORSO;
double deltaY = p.getLocation().clone().add(0d, 2d, 0d).getY()-a.getLocation().getY();
double multi = 1d;
if(deltaY > (2d/3d)) //Legshot
{
hitzone = HitZone.LEG;
multi = Main.gameEngine.configuration.getLegshotDamageMultiplier(world, gmode);
}
else if(deltaY < (1d/3d)) //Headshot
{
hitzone = HitZone.HEAD;
multi = Main.gameEngine.configuration.getHeadshotDamageMultiplier(world, gmode);
}
Projectile sp = this.projectiles.get(a);
if(sp != null)
{
Debugger.writeDebugOut("Projectile is registered!");
PVPPlayer attacker = sp.shooter;
if(attacker != null)
{
if(this.canKill(attacker, player))
{
Debugger.writeDebugOut(String.format("\"%s\" damaging \"%s\", teams: %s and %s",attacker.getName(),player.getName(),attacker.getTeam().getName(),player.getTeam().getName()));
if(sp instanceof WeaponProjectile)
{
multi = 1d;
}
else
{
damage = Main.gameEngine.configuration.getProjectileDamage(world, gmode);
}
if(sp.isCritical)
{
attacker.thePlayer.sendMessage(ChatColor.GOLD+Main.gameEngine.dict.get("crit")+"!");
multi *= Main.gameEngine.configuration.getCritMultiplier(world, gmode);
}
if(hitzone == HitZone.HEAD)
{
attacker.thePlayer.sendMessage(ChatColor.GOLD+Main.gameEngine.dict.get("headshot")+"!");
}
player.normalDeathBlocked = true;
player.thePlayer.damage((float)Math.round(sp.getDmg(damage, hitzone, player.thePlayer.getLocation()) * multi * player.thePlayer.getMaxHealth()));
Debugger.writeDebugOut("Flight distance: "+sp.getFlightDistance(player.thePlayer.getLocation()));
Debugger.writeDebugOut("Damage_base: "+sp.getDmg(damage, hitzone, player.thePlayer.getLocation()));
Debugger.writeDebugOut("Damage: "+(sp.getDmg(damage, hitzone, player.thePlayer.getLocation()) * multi * player.thePlayer.getMaxHealth()));
Debugger.writeDebugOut("Health: "+Double.toString(player.thePlayer.getHealth()));
if(player.thePlayer.getHealth() <= 0d)
{
this.kill(attacker, player, sp.getWpName(), false, hitzone == HitZone.HEAD);
}
else
{
player.addKillhelper(attacker, sp.getDmg(damage, hitzone, player.thePlayer.getLocation()) * multi * player.thePlayer.getMaxHealth());
}
player.normalDeathBlocked = false;
}
}
this.projectiles.remove(a);
EntitySyncCalls.removeEntity(a);
return true;
}
else
{
Debugger.writeDebugOut("Unregistered projectile hit "+p.getName());
}
return false;
}
}
return true;
}
public String playerChangeTeam(PVPPlayer player)
{
Team from = player.getTeam();
Team to = (from == teamRed ? teamBlue : teamRed);
int numFrom = (from == teamRed ? playersRed.size() : playersBlue.size());
int numTo = (to == teamRed ? playersRed.size() : playersBlue.size());
boolean autoBalance = Main.gameEngine.configuration.getAutobalance(world, gmode);
if((!autoBalance) || (numFrom > numTo))
{
this.kill(player, player, "", player.thePlayer.getHealth() > 0);
this.setTeam(player,to);
return ChatColor.DARK_GREEN+String.format(Main.gameEngine.dict.get("playerChangeTeam"),to.color+to.getName().toUpperCase()+ChatColor.DARK_GREEN);
}
else
{
return ChatColor.DARK_RED+Main.gameEngine.dict.get("teamchangeUnbalanced");
}
}
public Location playerRespawn(Player p, Location respawnLocation)
{
PVPPlayer player = this.getPlayerExact(p);
if(player != null)
{
return this.classSelectArea.pickRandomPoint();
}
return respawnLocation;
}
public List<PVPPlayer> getSpawnedPlayersNearLocation(Location loc, double dist)
{
List<PVPPlayer> nearPlayers = new ArrayList<PVPPlayer>();
List<PVPPlayer> players = this.getPlayers();
for(PVPPlayer p : players)
{
if(p.thePlayer.getLocation().distance(loc) <= dist && p.isSpawned())
{
nearPlayers.add(p);
}
}
return nearPlayers;
}
public void unregisterIMS(Item item)
{
imss.remove(item);
}
public void radioStationDestroyed(RadioStation radioStation)
{
this.sendTeamMessage(teamRed,ChatColor.GREEN+Main.gameEngine.dict.get("rsDestroyed"));
this.sendTeamMessage(teamBlue,ChatColor.RED+Main.gameEngine.dict.get("rsLost"));
this.teamBlue.subPoints(1.0d);
if(radioStationIterator.hasNext())
{
activeRadioStation = radioStationIterator.next();
List<PVPPlayer> players = this.getPlayers();
for(PVPPlayer p : players)
{
p.thePlayer.setCompassTarget(activeRadioStation.getLocation());
}
}
else
{
this.teamBlue.setPoints(-1d);
}
}
public int getRemainingStations()
{
return (int)Math.round(teamBlue.getPoints());
}
public double getTeamPoints(Team team)
{
if(teamRed.equals(team))
{
return teamRed.getPoints();
}
if(teamBlue.equals(team))
{
if(gmode.equals(Gamemode.Rush))
{
return (double)this.getRemainingStations();
}
return teamBlue.getPoints();
}
return 0d;
}
public int blockBreak(BlockBreakEvent event)
{
Main.gameEngine.weaponRegistry.executeEvent(this, event);
PVPPlayer player = this.getPlayerExact(event.getPlayer());
if(player != null && player.isSpawned())
{
Block b = event.getBlock();
if(gmode.equals(Gamemode.Rush) && (b.getType().equals(Material.REDSTONE_TORCH_ON) || b.getType().equals(Material.REDSTONE_TORCH_OFF)))
{
if(activeRadioStation != null && activeRadioStation.getLocation().distance(b.getLocation()) < 3d)
{
if(player.getTeam() == this.teamBlue)
{
activeRadioStation.defender = player;
return 0;
}
}
return 2;
}
else if(!Main.gameEngine.configuration.canEnvironmentBeDamaged(gmode, world))
{
return 2;
}
return 1;
}
return 2;
}
public void win(Team winner)
{
this.sendTeamMessage(winner,ChatColor.DARK_GREEN+Main.gameEngine.dict.get("won"));
this.sendTeamMessage(winner.equals(teamRed) ? teamBlue : teamRed,ChatColor.DARK_RED+Main.gameEngine.dict.get("lost"));
this.endMatch();
}
public int getPlayerNumRed()
{
return this.playersRed.size();
}
public int getPlayerNumBlue()
{
return this.playersBlue.size();
}
public List<Flag> getFlags()
{
return this.flags;
}
public List<RadioStation> getRadioStations()
{
return this.radioStations;
}
public boolean playerDamagePlayer(Player damager, Player damaged, double d)
{
Debugger.writeDebugOut("Player damage player called!");
PVPPlayer attacker = this.getPlayerExact(damager);
PVPPlayer player = this.getPlayerExact(damaged);
if(attacker != null && player != null)
{
if(this.canKill(attacker, player))
{
ItemStack inHand = damager.getItemInHand();
if(inHand != null)
{
WeaponIndex wi = this.weapons.get(WeaponUseType.HIT);
if(wi != null)
{
WeaponDescriptor wd = wi.get(inHand.getType());
if(wd != null && wd.dmgType == DamageType.HIT)
{
d = damaged.getMaxHealth() * wd.getDamage(0d);
}
}
}
Debugger.writeDebugOut(String.format("\"%s\" damaging \"%s\", teams: %s and %s",attacker.getName(),player.getName(),attacker.getTeam().getName(),player.getTeam().getName()));
player.normalDeathBlocked = true;
player.thePlayer.damage(d);
if(player.thePlayer.getHealth() <= 0d)
{
this.kill(attacker, player, Main.gameEngine.dict.get("killed"), false);
}
else
{
player.addKillhelper(attacker, d);
}
player.normalDeathBlocked = false;
}
}
return true;
}
public void playerChat(AsyncPlayerChatEvent event)
{
PVPPlayer player = this.getPlayerExact(event.getPlayer());
String format = event.getFormat();
if(player != null)
{
format = "<"+player.getName()+"> %2$s";
}
event.setFormat(format);
Main.gameEngine.weaponRegistry.executeEvent(this, event);
}
public void foodLevelChange(FoodLevelChangeEvent event)
{
if(event.getEntity() instanceof Player)
{
PVPPlayer player = this.getPlayerExact((Player)event.getEntity());
if(player != null && player.isSpawned())
{
event.setCancelled(!Main.gameEngine.configuration.isHungerActive(world, gmode));
}
else
{
event.setCancelled(true);
}
}
else
{
event.setCancelled(true);
}
Main.gameEngine.weaponRegistry.executeEvent(this, event);
}
public void unregisterResupply(ResupplyStation resupplyStation)
{
this.resupplyStations.remove(resupplyStation);
}
private void createFakeExplosion(PVPPlayer issuer, Location loc, float exploStr, boolean b, boolean doDamage, String weapon)
{
List<PVPPlayer> players = this.getPlayers();
for(PVPPlayer p : players)
{
if(issuer != null && doDamage)
{
if(this.canKill(issuer, p) || issuer == p) //explosions always damage their creator
{
double dist = loc.distance(p.thePlayer.getLocation());
double radius = 1.24d*exploStr;
if(dist < radius)
{
double expo = 0.8d; //Nice constant for pretty fair damage
double impact = (1d - dist/radius) * expo;
double dmg = (Math.pow(impact, 2) + impact) * 8d * exploStr + 1d;
p.normalDeathBlocked = true;
p.thePlayer.damage((int)Math.floor(dmg));
if(p.thePlayer.getHealth() <= 0d)
{
this.kill(issuer, p, weapon, false);
}
else
{
p.addKillhelper(issuer, (int)Math.floor(dmg));
}
p.normalDeathBlocked = false;
}
}
}
p.createFakeExplosion(loc, exploStr, b);
}
}
public void createExplosion(PVPPlayer issuer, Location loc, float exploStr, String weapon)
{
if(!protection.isLocProtected(loc))
{
this.createFakeExplosion(issuer,loc, exploStr, false, true, weapon);
EffectSyncCalls.createExplosion(loc, exploStr);
}
else
{
this.createFakeExplosion(issuer,loc, exploStr, true, true, weapon);
}
}
public boolean itemDamage(Item is, DamageCause cause)
{
boolean explo = ((cause == DamageCause.BLOCK_EXPLOSION) || (cause == DamageCause.ENTITY_EXPLOSION));
if(is.getItemStack().getType().equals(Material.REDSTONE))
{
if(imss.get(is) != null)
{
return true;
}
}
return false;
}
public void addMissile(Missile proj)
{
this.missiles.put(proj.getProjectile(), proj);
}
public void rmMissile(Missile proj)
{
this.missiles.remove(proj.getProjectile());
}
public void rightClickWithStick(Player p)
{
PVPPlayer player = this.getPlayerExact(p);
if(player != null)
{
if(player.killstreaks.contains(Killstreak.PLAYERSEEKER))
{
player.killstreaks.remove(Killstreak.PLAYERSEEKER);
InventorySyncCalls.removeItemStack(player.thePlayer.getInventory(), new ItemStack(Material.STICK,1));
double offset = 2.5d;
Vector dir = player.thePlayer.getLocation().getDirection().clone();
dir = dir.clone().multiply(15d/dir.length());
Arrow arr = this.world.spawnArrow(player.thePlayer.getLocation().clone().add(new Vector(0d, offset, 0d)), dir, 2f, 0.1f);
arr.setVelocity(dir);
new PlayerSeeker(this, arr, player, null, Main.gameEngine.configuration);
}
}
}
public void entityExplosion(EntityExplodeEvent event)
{
for(Block b : event.blockList())
{
if(this.protection.isBlockProtected(b))
{
event.setCancelled(true);;
}
}
Main.gameEngine.weaponRegistry.executeEvent(this, event);
}
public void createWeaponProjectile(PVPPlayer shooter, Location launchLoc, Vector velocity, WeaponDescriptor wd, boolean crit)
{
Arrow arr = this.world.spawnArrow(launchLoc, velocity.clone(), 1f, 1f);
arr.setVelocity(velocity.clone());
this.projectiles.put(arr, new WeaponProjectile(shooter, arr, wd, crit));
}
public void playerInteract(PlayerInteractEvent event)
{
Player p = event.getPlayer();
if(event.getAction().equals(Action.RIGHT_CLICK_BLOCK))
{
Material material = event.getClickedBlock().getType();
if(material.equals(Material.SIGN) || material.equals(Material.SIGN_POST) || material.equals(Material.WALL_SIGN))
{
this.rightClickSign(p, event.getClickedBlock());
}
}
PVPPlayer player = this.getPlayerExact(p);
if(player != null)
{
ItemStack is = p.getInventory().getItemInHand();
if(is != null)
{
if(event.getAction().equals(Action.RIGHT_CLICK_AIR) || event.getAction().equals(Action.RIGHT_CLICK_BLOCK))
{
if(is.getType().equals(Material.STICK))
{
Main.gameEngine.rightClickWithStick(p);
}
}
}
}
Main.gameEngine.weaponRegistry.executeEvent(this, event);
}
public void entityDamage(EntityDamageEvent event)
{
Main.gameEngine.weaponRegistry.executeEvent(this, event);
if(event.getEntity() instanceof Player)
{
if(this.playerDamage((Player)event.getEntity(), event.getCause()))
event.setCancelled(true);
}
}
public void entityCombust(EntityCombustEvent event)
{
Main.gameEngine.weaponRegistry.executeEvent(this, event);
}
public void projectileLaunched(ProjectileLaunchEvent event)
{
org.bukkit.entity.Projectile proj = event.getEntity();
if(proj instanceof Arrow)
{
Arrow arrow = (Arrow)proj;
LivingEntity shooter = arrow.getShooter();
if(shooter instanceof Player)
{
Player p = (Player)shooter;
if(this.arrowLaunchedByPlayer(p,arrow))
event.setCancelled(true);;
}
}
Main.gameEngine.weaponRegistry.executeEvent(this, event);
}
public int blockDamaged(BlockDamageEvent event)
{
Main.gameEngine.weaponRegistry.executeEvent(this, event);
PVPPlayer player = this.getPlayerExact(event.getPlayer());
if(player != null && player.isSpawned())
{
Block b = event.getBlock();
if(gmode.equals(Gamemode.Rush) && (b.getType().equals(Material.REDSTONE_TORCH_ON) || b.getType().equals(Material.REDSTONE_TORCH_OFF)))
{
if(activeRadioStation != null && activeRadioStation.getLocation().distance(b.getLocation()) < 3d)
{
if(player.getTeam() == this.teamBlue)
{
activeRadioStation.defender = player;
return 0;
}
}
return 2;
}
else if(!Main.gameEngine.configuration.canEnvironmentBeDamaged(gmode, world))
{
return 2;
}
return 1;
}
return 2;
}
public int blockChanged(EntityChangeBlockEvent event)
{
Main.gameEngine.weaponRegistry.executeEvent(this, event);
if(event.getEntity() instanceof Player)
{
PVPPlayer player = this.getPlayerExact((Player)event.getEntity());
if(player != null && player.isSpawned())
{
Block b = event.getBlock();
if(gmode.equals(Gamemode.Rush) && (b.getType().equals(Material.REDSTONE_TORCH_ON) || b.getType().equals(Material.REDSTONE_TORCH_OFF)))
{
if(activeRadioStation != null && activeRadioStation.getLocation().distance(b.getLocation()) < 3d)
{
if(player.getTeam() == this.teamBlue)
{
activeRadioStation.defender = player;
return 0;
}
}
return 2;
}
else if(!Main.gameEngine.configuration.canEnvironmentBeDamaged(gmode, world))
{
return 2;
}
return 1;
}
}
return 2;
}
public void entityDamageByEntity(EntityDamageByEntityEvent event)
{
if(event.getDamager() instanceof Arrow && event.getEntity() instanceof Player)
{
if(this.arrowHitPlayer((Player)event.getEntity(), (Arrow)event.getDamager(), event.getDamage()))
event.setCancelled(true);
}
else if(event.getDamager() instanceof Player && event.getEntity() instanceof Player)
{
if(this.playerDamagePlayer((Player)event.getDamager(),(Player)event.getEntity(),event.getDamage()))
event.setCancelled(true);
}
Main.gameEngine.weaponRegistry.executeEvent(this, event);
}
public void projectileHit(ProjectileHitEvent event)
{
Main.gameEngine.weaponRegistry.executeEvent(this, event);
if(event.getEntity() instanceof Arrow)
{
Arrow arr = (Arrow)event.getEntity();
Missile missile = this.missiles.get(arr);
if(missile != null)
{
missile.explode();
return;
}
}
}
} |
package fr.mgargadennec.blossom.core.common.dto;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
import java.util.Date;
public abstract class AbstractDTO {
@JsonSerialize(using = ToStringSerializer.class)
private Long id;
private Date creationDate;
private String creationUser;
private Date modificationDate;
private String modificationUser;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Date getCreationDate() {
return creationDate;
}
public void setCreationDate(Date creationDate) {
this.creationDate = creationDate;
}
public String getCreationUser() {
return creationUser;
}
public void setCreationUser(String creationUser) {
this.creationUser = creationUser;
}
public Date getModificationDate() {
return modificationDate;
}
public void setModificationDate(Date modificationDate) {
this.modificationDate = modificationDate;
}
public String getModificationUser() {
return modificationUser;
}
public void setModificationUser(String modificationUser) {
this.modificationUser = modificationUser;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AbstractDTO that = (AbstractDTO) o;
return id == null || that.id == null ? false : id.equals(that.id);
}
@Override
public int hashCode() {
return id != null ? id.hashCode() : 0;
}
} |
package gov.nih.nci.cabig.caaers.web.fields.validators;
import junit.framework.TestCase;
/**
*
* @author Ion C. Olaru
*
*/
public class SignValidatorTest extends TestCase {
SignValidator dv;
SignValidator dv2;
protected void setUp() throws Exception {
super.setUp();
dv = new SignValidator(true);
dv2 = new SignValidator(false);
}
public void testIsValid() {
assertTrue(dv.isValid(123));
assertTrue(dv.isValid(0));
assertFalse(dv2.isValid(123));
assertFalse(dv2.isValid(0));
assertFalse(dv.isValid(-1));
assertFalse(dv.isValid(-7899999.123));
assertTrue(dv2.isValid(-1));
assertTrue(dv2.isValid(-7899999.123));
}
public void testGetMessagePrefix() {
assertEquals("Invalid sign", dv.getMessagePrefix());
}
public void testGetValidatorCSSClassName() {
assertEquals("POSITIVE", dv.getValidatorCSSClassName());
}
} |
package org.camunda.bpm.camel.component.externaltasks;
import static org.camunda.bpm.camel.component.CamundaBpmConstants.CAMUNDA_BPM_PROCESS_DEFINITION_ID;
import static org.camunda.bpm.camel.component.CamundaBpmConstants.CAMUNDA_BPM_PROCESS_DEFINITION_KEY;
import static org.camunda.bpm.camel.component.CamundaBpmConstants.CAMUNDA_BPM_PROCESS_INSTANCE_ID;
import static org.camunda.bpm.camel.component.CamundaBpmConstants.CAMUNDA_BPM_PROCESS_PRIO;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.concurrent.ScheduledExecutorService;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.PollingConsumer;
import org.apache.camel.Processor;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.impl.ScheduledBatchPollingConsumer;
import org.apache.camel.spi.Synchronization;
import org.apache.camel.util.CastUtils;
import org.camunda.bpm.camel.component.CamundaBpmEndpoint;
import org.camunda.bpm.camel.component.CamundaBpmExternalTaskEndpointImpl;
import org.camunda.bpm.engine.ExternalTaskService;
import org.camunda.bpm.engine.externaltask.LockedExternalTask;
public class BatchConsumer extends ScheduledBatchPollingConsumer {
private final CamundaBpmEndpoint camundaEndpoint;
private int timeout;
private final int retryTimeout;
private final long lockDuration;
private final String topic;
public BatchConsumer(final CamundaBpmEndpoint endpoint, final Processor processor, final int retryTimeout,
final long lockDuration, final String topic) {
super(endpoint, processor);
this.camundaEndpoint = endpoint;
this.retryTimeout = retryTimeout;
this.lockDuration = lockDuration;
this.topic = topic;
}
public BatchConsumer(final CamundaBpmEndpoint endpoint, final Processor processor,
final ScheduledExecutorService executor, final int retryTimeout, final long lockDuration,
final String topic) {
super(endpoint, processor, executor);
this.camundaEndpoint = endpoint;
this.retryTimeout = retryTimeout;
this.lockDuration = lockDuration;
this.topic = topic;
}
@Override
public int processBatch(Queue<Object> exchanges) throws Exception {
int total = exchanges.size();
int answer = total;
for (int index = 0; index < total && isBatchAllowed(); index++) {
// only loop if we are started (allowed to run)
// use poll to remove the head so it does not consume memory even
// after we have processed it
Exchange exchange = (Exchange) exchanges.poll();
// add current index and total as properties
exchange.setProperty(Exchange.BATCH_INDEX, index);
exchange.setProperty(Exchange.BATCH_SIZE, total);
exchange.setProperty(Exchange.BATCH_COMPLETE, index == total - 1);
// update pending number of exchanges
pendingExchanges = total - index - 1;
boolean started = processExchange(exchange);
// if we did not start processing then decrement the counter
if (!started) {
answer
}
}
// drain any in progress files as we are done with this batch
removeExcessiveInProgressTasks(CastUtils.cast((Queue<?>) exchanges, Exchange.class), 0);
return answer;
}
private boolean processExchange(final Exchange exchange) throws Exception {
exchange.addOnCompletion(new Synchronization() {
@Override
public void onFailure(Exchange exchange) {
completeTask(exchange);
}
@Override
public void onComplete(Exchange exchange) {
completeTask(exchange);
}
});
getProcessor().process(exchange);
return true;
}
/**
* Drain any in progress files as we are done with this batch
*
* @param exchanges
* the exchanges
* @param limit
* the limit
*/
protected void removeExcessiveInProgressTasks(Queue<Exchange> exchanges, int limit) {
while (exchanges.size() > limit) {
// must remove last
Exchange exchange = exchanges.poll();
releaseTask(exchange);
}
}
private void releaseTask(final Exchange exchange) {
exchange.setProperty(Exchange.ROLLBACK_ONLY, Boolean.TRUE);
completeTask(exchange);
}
private ExternalTaskService getExternalTaskService() {
return camundaEndpoint.getProcessEngine().getExternalTaskService();
}
@SuppressWarnings("unchecked")
private void completeTask(final Exchange exchange) {
final Message in = exchange.getIn();
if (in == null) {
throw new RuntimeCamelException("Unexpected exchange: in is null!");
}
final LockedExternalTask task = in.getHeader(CamundaBpmExternalTaskEndpointImpl.EXCHANGE_HEADER_TASK,
LockedExternalTask.class);
if (task == null) {
throw new RuntimeCamelException("Unexpected exchange: in-header '"
+ CamundaBpmExternalTaskEndpointImpl.EXCHANGE_HEADER_TASK + "' is null!");
}
final ExternalTaskService externalTaskService = getExternalTaskService();
// rollback
if (exchange.isRollbackOnly()) {
externalTaskService.unlock(task.getId());
} else
// failure
if (exchange.isFailed()) {
final Exception exception = exchange.getException();
externalTaskService.handleFailure(task.getId(),
task.getWorkerId(),
exception.getMessage(),
task.getRetries(),
retryTimeout);
} else
// success
{
final Message out = exchange.getOut();
final Map<String, Object> variablesToBeSet;
if ((out != null) && (out.getBody() != null) && (out.getBody() instanceof Map)) {
variablesToBeSet = out.getBody(Map.class);
} else {
variablesToBeSet = null;
}
if (variablesToBeSet != null) {
externalTaskService.complete(task.getId(), task.getWorkerId(), variablesToBeSet);
} else {
externalTaskService.complete(task.getId(), task.getWorkerId());
}
}
}
private static HashMap<String, Integer> pollCounter = new HashMap<String, Integer>() {
public Integer get(Object key) {
Integer result = super.get(key);
if (result == null) {
result = new Integer(0);
super.put((String) key, result);
}
return result;
};
};
protected int poll() throws Exception {
int messagesPolled = 0;
int pc = pollCounter.get(camundaEndpoint.getEndpointUri());
pc++;
System.err.println("polled for '" + camundaEndpoint.getEndpointUri() + "': " + pc);
pollCounter.put(camundaEndpoint.getEndpointUri(), pc);
PriorityQueue<Exchange> exchanges = new PriorityQueue<Exchange>(new Comparator<Exchange>() {
@Override
public int compare(Exchange o1, Exchange o2) {
Long prio1 = (Long) o1.getProperty(CAMUNDA_BPM_PROCESS_PRIO, 0);
Long prio2 = (Long) o2.getProperty(CAMUNDA_BPM_PROCESS_PRIO, 0);
return prio1.compareTo(prio2);
}
});
if (isPollAllowed()) {
final List<LockedExternalTask> tasks = getExternalTaskService().fetchAndLock(maxMessagesPerPoll,
camundaEndpoint.getEndpointUri(),
true).topic(topic, lockDuration).execute();
messagesPolled = tasks.size();
for (final LockedExternalTask task : tasks) {
Exchange exchange = getEndpoint().createExchange();
exchange.setFromEndpoint(getEndpoint());
exchange.setExchangeId(task.getWorkerId() + "/" + task.getId());
exchange.setProperty(CAMUNDA_BPM_PROCESS_INSTANCE_ID, task.getProcessInstanceId());
exchange.setProperty(CAMUNDA_BPM_PROCESS_DEFINITION_KEY, task.getProcessDefinitionKey());
exchange.setProperty(CAMUNDA_BPM_PROCESS_DEFINITION_ID, task.getProcessDefinitionId());
exchange.setProperty(CAMUNDA_BPM_PROCESS_PRIO, task.getPriority());
// result.setProperty(BatchConsumer.PROPERTY_PRIORITY, ???);
final Message in = exchange.getIn();
in.setHeader(CamundaBpmExternalTaskEndpointImpl.EXCHANGE_HEADER_TASK, task);
// if the result of the polled exchange has output we should
// create
// a new exchange and
// use the output as input to the next processor
if (exchange.hasOut()) {
// lets create a new exchange
Exchange newExchange = getEndpoint().createExchange();
newExchange.getIn().copyFrom(exchange.getOut());
exchange = newExchange;
}
exchanges.add(exchange);
}
}
processBatch(CastUtils.cast(exchanges));
return messagesPolled;
}
public int getTimeout() {
return timeout;
}
/**
* Sets a timeout to use with {@link PollingConsumer}. <br/>
* <br/>
* Use <tt>timeout < 0</tt> for {@link PollingConsumer#receive()}. <br/>
* Use <tt>timeout == 0</tt> for {@link PollingConsumer#receiveNoWait()}.
* <br/>
* Use <tt>timeout > 0</tt> for {@link PollingConsumer#receive(long)}}.
* <br/>
* The default timeout value is <tt>0</tt>
*
* @param timeout
* the timeout value
*/
public void setTimeout(int timeout) {
this.timeout = timeout;
}
} |
package de.larmic.butterfaces.component.renderkit.html_basic.text;
import de.larmic.butterfaces.component.html.text.HtmlTreeBox;
import de.larmic.butterfaces.component.partrenderer.RenderUtils;
import de.larmic.butterfaces.component.renderkit.html_basic.text.model.CachedNodesInitializer;
import de.larmic.butterfaces.component.renderkit.html_basic.text.model.TreeBoxModelType;
import de.larmic.butterfaces.component.renderkit.html_basic.text.model.TreeBoxModelWrapper;
import de.larmic.butterfaces.component.renderkit.html_basic.text.part.TrivialComponentsEntriesNodePartRenderer;
import de.larmic.butterfaces.context.StringHtmlEncoder;
import de.larmic.butterfaces.model.tree.Node;
import de.larmic.butterfaces.resolver.MustacheResolver;
import de.larmic.butterfaces.util.StringUtils;
import javax.faces.component.UIComponent;
import javax.faces.component.UINamingContainer;
import javax.faces.context.FacesContext;
import javax.faces.context.ResponseWriter;
import javax.faces.convert.ConverterException;
import javax.faces.render.FacesRenderer;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* @author Lars Michaelis
*/
@FacesRenderer(componentFamily = HtmlTreeBox.COMPONENT_FAMILY, rendererType = HtmlTreeBox.RENDERER_TYPE)
public class TreeBoxRenderer extends AbstractHtmlTagRenderer<HtmlTreeBox> {
public static final String DEFAULT_SINGLE_LINE_OF_TEXT_USING_TITLE_TEMPLATE = "<div class=\"tr-template-single-line\"> <div class=\"content-wrapper editor-area\"> <div>{{title}}</div> </div></div>";
public static final String DEFAULT_SINGLE_LINE_OF_TEXT_USING_TOSTRING_TEMPLATE = "<div class=\"tr-template-single-line\"> <div class=\"content-wrapper editor-area\"> <div>{{butterObjectToString}}</div> </div></div>";
public static final String DEFAULT_SPINNER_TEXT = "Fetching data...";
public static final String DEFAULT_NO_MATCHING_TEXT = "No matching entries...";
@Override
protected boolean encodeReadonly() {
return false;
}
@Override
public void encodeBegin(FacesContext context, UIComponent component) throws IOException {
super.encodeBegin(context, component, "butter-component-treebox");
}
@Override
protected void encodeEnd(HtmlTreeBox treeBox, ResponseWriter writer) throws IOException {
final TreeBoxModelWrapper treeBoxModelWrapper = new TreeBoxModelWrapper(treeBox);
final List<Node> nodes = treeBoxModelWrapper.getNodes();
final TreeBoxModelType treeBoxModelType = treeBoxModelWrapper.getTreeBoxModelType();
final List<String> mustacheKeys = this.createMustacheKeys(FacesContext.getCurrentInstance(), treeBox);
final String clientIdSeparator = String.valueOf(UINamingContainer.getSeparatorChar(FacesContext.getCurrentInstance()));
final Map<Integer, Node> nodesMap = CachedNodesInitializer.createNodesMap(nodes);
writer.startElement("script", treeBox);
writer.writeText("jQuery(function () {\n", null);
writer.writeText("var entries_" + treeBox.getClientId().replace(clientIdSeparator, "_") + " = " + new TrivialComponentsEntriesNodePartRenderer().renderEntriesAsJSON(nodes, mustacheKeys, nodesMap) + ";\n", null);
final String jQueryBySelector = RenderUtils.createJQueryBySelector(treeBox.getClientId(), "input");
final String pluginCall = createJQueryPluginCallTrivial(treeBox, treeBoxModelType, mustacheKeys, nodesMap);
writer.writeText("var trivialTree" + treeBox.getClientId().replace(clientIdSeparator, "_") + " = " + jQueryBySelector + pluginCall + ";", null);
writer.writeText("});", null);
writer.endElement("script");
}
private List<String> createMustacheKeys(FacesContext context, HtmlTreeBox treeBox) throws IOException {
if (treeBox.getFacet("template") != null) {
final String encodedTemplate = StringHtmlEncoder.encodeComponentWithSurroundingDiv(context, treeBox.getFacet("template"));
return MustacheResolver.getMustacheKeysForTreeNode(encodedTemplate);
}
return Collections.emptyList();
}
@Override
protected void encodeInnerEnd(UIComponent component, ResponseWriter writer) throws IOException {
final HtmlTreeBox treeBox = (HtmlTreeBox) component;
if (treeBox.isReadonly()) {
writer.startElement(ELEMENT_DIV, component);
writer.writeAttribute("class", "butter-component-value", null);
super.encodeSuperEnd(FacesContext.getCurrentInstance(), component);
writer.endElement(ELEMENT_DIV);
}
}
@Override
public Object getConvertedValue(final FacesContext context,
final UIComponent component,
final Object submittedValue) throws ConverterException {
if (submittedValue == null || "".equals(submittedValue)) {
return null;
}
final String newValue = (String) submittedValue;
final HtmlTreeBox treeBox = (HtmlTreeBox) component;
final TreeBoxModelWrapper treeBoxModelWrapper = new TreeBoxModelWrapper(treeBox);
final TreeBoxModelType treeBoxModelType = treeBoxModelWrapper.getTreeBoxModelType();
if (treeBoxModelType == TreeBoxModelType.STRINGS) {
return newValue;
}
final List<Node> nodes = treeBoxModelWrapper.getNodes();
final Map<Integer, Node> nodesMap = CachedNodesInitializer.createNodesMap(nodes);
final Integer selectedIndex = Integer.valueOf(newValue);
final Node node = nodesMap.get(selectedIndex);
return treeBoxModelType == TreeBoxModelType.OBJECTS && node != null ? node.getData() : node;
}
private String createJQueryPluginCallTrivial(final HtmlTreeBox treeBox,
final TreeBoxModelType treeBoxModelType,
final List<String> mustacheKeys,
final Map<Integer, Node> nodesMap) throws IOException {
final StringBuilder jQueryPluginCall = new StringBuilder();
final FacesContext context = FacesContext.getCurrentInstance();
final Integer selectedEntryId = this.findValueInCachedNodes(treeBox.getValue(), treeBoxModelType, nodesMap);
final Node selectedNode = selectedEntryId != null ? nodesMap.get(selectedEntryId) : null;
final String editable = TrivialComponentsEntriesNodePartRenderer.getEditingMode(treeBox);
final String noMatchingText = StringUtils.getNotNullValue(treeBox.getNoEntriesText(), DEFAULT_NO_MATCHING_TEXT);
final String spinnerText = StringUtils.getNotNullValue(treeBox.getSpinnerText(), DEFAULT_SPINNER_TEXT);
if (treeBoxModelType == TreeBoxModelType.STRINGS || treeBoxModelType == TreeBoxModelType.OBJECTS) {
jQueryPluginCall.append("TrivialComboBox({");
} else {
jQueryPluginCall.append("TrivialTreeComboBox({");
}
jQueryPluginCall.append("\n allowFreeText: false,");
if (treeBoxModelType == TreeBoxModelType.OBJECTS) {
jQueryPluginCall.append("\n valueProperty: 'id',");
}
jQueryPluginCall.append("\n inputTextProperty: '" + StringUtils.getNotNullValue(treeBox.getInputTextProperty(), "title") + "',");
if (treeBox.getFacet("emptyEntryTemplate") != null) {
jQueryPluginCall.append("\n emptyEntryTemplate: '" + StringHtmlEncoder.encodeComponentWithSurroundingDiv(context, treeBox.getFacet("emptyEntryTemplate"), "editor-area") + "',");
} else if (StringUtils.isNotEmpty(treeBox.getPlaceholder())) {
jQueryPluginCall.append("\n emptyEntryTemplate: '<div class=\"defaultEmptyEntry\">" + treeBox.getPlaceholder() + "</div>',");
}
jQueryPluginCall.append("\n editingMode: '" + editable + "',");
if (selectedEntryId != null && selectedNode != null) {
jQueryPluginCall.append("\n selectedEntry: " + new TrivialComponentsEntriesNodePartRenderer().renderNode(mustacheKeys, nodesMap, selectedEntryId, selectedNode) + ",");
}
if (treeBox.getFacet("selectedEntryTemplate") != null) {
jQueryPluginCall.append("\n selectedEntryTemplate: '" + StringHtmlEncoder.encodeComponentWithSurroundingDiv(context, treeBox.getFacet("selectedEntryTemplate"), "editor-area") + "',");
}
if (treeBox.getFacet("template") != null) {
final String encodedTemplate = StringHtmlEncoder.encodeComponentWithSurroundingDiv(context, treeBox.getFacet("template"), "editor-area");
if (treeBoxModelType == TreeBoxModelType.OBJECTS) {
jQueryPluginCall.append("\n template: '" + encodedTemplate + "',");
} else {
jQueryPluginCall.append("\n templates: ['" + encodedTemplate + "'],");
}
} else if (treeBoxModelType == TreeBoxModelType.NODES) {
jQueryPluginCall.append("\n templates: ['" + TreeRenderer.DEFAULT_NODES_TEMPLATE + "'],");
} else if (treeBoxModelType == TreeBoxModelType.STRINGS) {
jQueryPluginCall.append("\n template: '" + DEFAULT_SINGLE_LINE_OF_TEXT_USING_TITLE_TEMPLATE + "',");
} else if (treeBoxModelType == TreeBoxModelType.OBJECTS) {
jQueryPluginCall.append("\n template: '" + DEFAULT_SINGLE_LINE_OF_TEXT_USING_TOSTRING_TEMPLATE + "',");
}
jQueryPluginCall.append("\n spinnerTemplate: '<div class=\"tr-default-spinner\"><div class=\"spinner\"></div><div>" + spinnerText + "</div></div>',");
jQueryPluginCall.append("\n noEntriesTemplate: '<div class=\"tr-default-no-data-display\"><div>" + noMatchingText + "</div></div>',");
jQueryPluginCall.append("\n entries: entries_" + treeBox.getClientId().replace(":", "_"));
jQueryPluginCall.append("});");
return jQueryPluginCall.toString();
}
private Integer findValueInCachedNodes(final Object treeBoxValue, final TreeBoxModelType treeBoxModelType, final Map<Integer, Node> nodesMap) {
if (treeBoxModelType == TreeBoxModelType.STRINGS && treeBoxValue instanceof String) {
for (Integer index : nodesMap.keySet()) {
final Node node = nodesMap.get(index);
if (treeBoxValue.equals(node.getTitle())) {
return index;
}
}
} else if (treeBoxModelType == TreeBoxModelType.OBJECTS && treeBoxValue != null) {
for (Integer index : nodesMap.keySet()) {
final Node node = nodesMap.get(index);
if (treeBoxValue.equals(node.getData())) {
return index;
}
}
} else if (treeBoxValue != null) {
for (Integer index : nodesMap.keySet()) {
final Node node = nodesMap.get(index);
if (treeBoxValue.equals(node)) {
return index;
}
}
}
return null;
}
} |
package com.yahoo.vespa.hosted.controller.deployment;
import com.yahoo.vespa.hosted.controller.Controller;
import com.yahoo.vespa.hosted.controller.api.integration.BuildService;
import com.yahoo.vespa.hosted.controller.maintenance.JobControl;
import com.yahoo.vespa.hosted.controller.maintenance.Maintainer;
import java.time.Duration;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Triggers deployment jobs in an external BuildService.
*
* Triggering is performed by an Executor, as there is no guarantee the BuildService provides a timely response.
*
* @author jvenstad
*/
public class DeploymentJobExecutor extends Maintainer {
private static final Logger log = Logger.getLogger(DeploymentJobExecutor.class.getName());
private static final int triggeringRetries = 5;
private final BuildService buildService;
private final Executor executor;
public DeploymentJobExecutor(Controller controller, Duration triggeringInterval, JobControl jobControl, BuildService buildService) {
this(controller, triggeringInterval, jobControl, buildService, Executors.newFixedThreadPool(20));
}
DeploymentJobExecutor(Controller controller, Duration triggeringInterval, JobControl jobControl,
BuildService buildService, Executor executor) {
super(controller, triggeringInterval, jobControl);
this.buildService = buildService;
this.executor = executor;
}
@Override
protected void maintain() {
controller().applications().deploymentTrigger().deploymentQueue().takeJobsToRun()
.forEach(buildJob -> executor.execute(() -> {
log.log(Level.INFO, "Attempting to trigger " + buildJob + " in Screwdriver.");
for (int i = 0; i < triggeringRetries; i++)
if (buildService.trigger(buildJob))
return;
log.log(Level.WARNING, "Exhausted all " + triggeringRetries + " retries for " + buildJob + " without success.");
}));
}
} |
package com.yahoo.vespa.hosted.controller.deployment;
import com.yahoo.component.Version;
import com.yahoo.config.provision.InstanceName;
import com.yahoo.config.provision.SystemName;
import com.yahoo.config.provision.zone.RoutingMethod;
import com.yahoo.config.provision.zone.ZoneId;
import com.yahoo.vespa.flags.PermanentFlags;
import com.yahoo.vespa.hosted.controller.api.integration.deployment.ApplicationVersion;
import com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType;
import com.yahoo.vespa.hosted.controller.api.integration.deployment.RunId;
import com.yahoo.vespa.hosted.controller.application.pkg.ApplicationPackage;
import com.yahoo.vespa.hosted.controller.application.Change;
import com.yahoo.vespa.hosted.controller.versions.VespaVersion;
import org.junit.Assert;
import org.junit.Test;
import java.time.Duration;
import java.time.Instant;
import java.util.Collection;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalLong;
import java.util.stream.Collectors;
import static com.yahoo.config.provision.SystemName.main;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.productionApNortheast1;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.productionApNortheast2;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.productionApSoutheast1;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.productionAwsUsEast1a;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.productionCdAwsUsEast1a;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.productionCdUsEast1;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.productionEuWest1;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.productionUsCentral1;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.productionUsEast3;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.productionUsWest1;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.stagingTest;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.systemTest;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.testApNortheast1;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.testApNortheast2;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.testAwsUsEast1a;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.testEuWest1;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.testUsCentral1;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.testUsEast3;
import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.testUsWest1;
import static com.yahoo.vespa.hosted.controller.deployment.DeploymentTrigger.ChangesToCancel.ALL;
import static com.yahoo.vespa.hosted.controller.deployment.DeploymentTrigger.ChangesToCancel.PLATFORM;
import static java.util.Collections.emptyList;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
/**
* Tests a wide variety of deployment scenarios and configurations
*
* @author bratseth
* @author mpolden
* @author jonmv
*/
public class DeploymentTriggerTest {
private final DeploymentTester tester = new DeploymentTester();
@Test
public void testTriggerFailing() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.upgradePolicy("default")
.region("us-west-1")
.build();
// Deploy completely once
var app = tester.newDeploymentContext().submit(applicationPackage).deploy();
// New version is released
Version version = Version.fromString("6.3");
tester.controllerTester().upgradeSystem(version);
tester.upgrader().maintain();
// staging-test fails deployment and is retried
app.failDeployment(stagingTest);
tester.triggerJobs();
assertEquals("Retried dead job", 2, tester.jobs().active().size());
app.assertRunning(stagingTest);
app.runJob(stagingTest);
// system-test is now the only running job -- production jobs haven't started yet, since it is unfinished.
app.assertRunning(systemTest);
assertEquals(1, tester.jobs().active().size());
// system-test fails and is retried
app.timeOutUpgrade(systemTest);
tester.triggerJobs();
assertEquals("Job is retried on failure", 1, tester.jobs().active().size());
app.runJob(systemTest);
tester.triggerJobs();
app.assertRunning(productionUsWest1);
// production-us-west-1 fails, but the app loses its projectId, and the job isn't retried.
tester.applications().lockApplicationOrThrow(app.application().id(), locked ->
tester.applications().store(locked.withProjectId(OptionalLong.empty())));
app.timeOutConvergence(productionUsWest1);
tester.triggerJobs();
assertEquals("Job is not triggered when no projectId is present", 0, tester.jobs().active().size());
}
@Test
public void revisionChangeWhenFailingMakesApplicationChangeWaitForPreviousToComplete() {
DeploymentContext app = tester.newDeploymentContext();
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.revisionChange(null) // separate by default, but we override this in test builder
.region("us-east-3")
.test("us-east-3")
.build();
app.submit(applicationPackage).runJob(systemTest).runJob(stagingTest).runJob(productionUsEast3);
Optional<ApplicationVersion> v0 = app.lastSubmission();
app.submit(applicationPackage);
Optional<ApplicationVersion> v1 = app.lastSubmission();
assertEquals(v0, app.instance().change().application());
// Eager tests still run before new revision rolls out.
app.runJob(systemTest).runJob(stagingTest);
// v0 rolls out completely.
app.runJob(testUsEast3);
assertEquals(Optional.empty(), app.instance().change().application());
// v1 starts rolling when v0 is done.
tester.outstandingChangeDeployer().run();
assertEquals(v1, app.instance().change().application());
// v1 fails, so v2 starts immediately.
app.runJob(productionUsEast3).failDeployment(testUsEast3);
app.submit(applicationPackage);
Optional<ApplicationVersion> v2 = app.lastSubmission();
assertEquals(v2, app.instance().change().application());
}
@Test
public void leadingUpgradeAllowsApplicationChangeWhileUpgrading() {
var applicationPackage = new ApplicationPackageBuilder().region("us-east-3")
.upgradeRollout("leading")
.build();
var app = tester.newDeploymentContext();
app.submit(applicationPackage).deploy();
Change upgrade = Change.of(new Version("7.8.9"));
tester.controllerTester().upgradeSystem(upgrade.platform().get());
tester.upgrader().maintain();
app.runJob(systemTest).runJob(stagingTest);
tester.triggerJobs();
app.assertRunning(productionUsEast3);
assertEquals(upgrade, app.instance().change());
app.submit(applicationPackage);
assertEquals(upgrade.with(app.lastSubmission().get()), app.instance().change());
}
@Test
public void abortsJobsOnNewApplicationChange() {
var app = tester.newDeploymentContext();
app.submit()
.runJob(systemTest)
.runJob(stagingTest);
tester.triggerJobs();
RunId id = tester.jobs().last(app.instanceId(), productionUsCentral1).get().id();
assertTrue(tester.jobs().active(id).isPresent());
app.submit();
assertTrue(tester.jobs().active(id).isPresent());
tester.triggerJobs();
tester.runner().run();
assertTrue(tester.jobs().active(id).isPresent()); // old run
app.runJob(systemTest).runJob(stagingTest).runJob(stagingTest); // outdated run is aborted when otherwise blocking a new run
tester.triggerJobs();
app.jobAborted(productionUsCentral1);
Versions outdated = tester.jobs().last(app.instanceId(), productionUsCentral1).get().versions();
// Flesh bag re-triggers job, and _that_ is not aborted
tester.deploymentTrigger().reTrigger(app.instanceId(), productionUsCentral1, "flesh bag");
tester.triggerJobs();
app.runJob(productionUsCentral1);
Versions reTriggered = tester.jobs().last(app.instanceId(), productionUsCentral1).get().versions();
assertEquals(outdated, reTriggered);
app.runJob(productionUsCentral1).runJob(productionUsWest1).runJob(productionUsEast3);
assertEquals(Change.empty(), app.instance().change());
tester.controllerTester().upgradeSystem(new Version("8.9"));
tester.upgrader().maintain();
app.runJob(systemTest).runJob(stagingTest);
tester.clock().advance(Duration.ofMinutes(1));
tester.triggerJobs();
// Upgrade is allowed to proceed ahead of revision change, and is not aborted.
app.submit();
app.runJob(systemTest).runJob(stagingTest);
tester.triggerJobs();
tester.runner().run();
assertEquals(EnumSet.of(productionUsCentral1), tester.jobs().active().stream()
.map(run -> run.id().type())
.collect(Collectors.toCollection(() -> EnumSet.noneOf(JobType.class))));
}
@Test
public void similarDeploymentSpecsAreNotRolledOut() {
ApplicationPackage firstPackage = new ApplicationPackageBuilder()
.region("us-east-3")
.build();
DeploymentContext app = tester.newDeploymentContext().submit(firstPackage, 5417);
var version = app.lastSubmission();
assertEquals(version, app.instance().change().application());
app.runJob(systemTest)
.runJob(stagingTest)
.runJob(productionUsEast3);
assertEquals(Change.empty(), app.instance().change());
// A similar application package is submitted. Since a new job is added, the original revision is again a target.
ApplicationPackage secondPackage = new ApplicationPackageBuilder()
.systemTest()
.stagingTest()
.region("us-east-3")
.delay(Duration.ofHours(1))
.test("us-east-3")
.build();
app.submit(secondPackage, 5417);
app.triggerJobs();
assertEquals(List.of(), tester.jobs().active());
assertEquals(version, app.instance().change().application());
tester.clock().advance(Duration.ofHours(1));
app.runJob(testUsEast3);
assertEquals(List.of(), tester.jobs().active());
assertEquals(Change.empty(), app.instance().change());
// The original application package is submitted again. No new jobs are added, so no change needs to roll out now.
app.submit(firstPackage, 5417);
app.triggerJobs();
assertEquals(List.of(), tester.jobs().active());
assertEquals(Change.empty(), app.instance().change());
}
@Test
public void testOutstandingChangeWithNextRevisionTarget() {
ApplicationPackage appPackage = new ApplicationPackageBuilder().revisionTarget("next")
.revisionChange("when-failing")
.region("us-east-3")
.build();
DeploymentContext app = tester.newDeploymentContext()
.submit(appPackage);
Optional<ApplicationVersion> revision1 = app.lastSubmission();
app.submit(appPackage);
Optional<ApplicationVersion> revision2 = app.lastSubmission();
app.submit(appPackage);
Optional<ApplicationVersion> revision3 = app.lastSubmission();
app.submit(appPackage);
Optional<ApplicationVersion> revision4 = app.lastSubmission();
app.submit(appPackage);
Optional<ApplicationVersion> revision5 = app.lastSubmission();
// 5 revisions submitted; the first is rolling out, and the others are queued.
tester.outstandingChangeDeployer().run();
assertEquals(revision1, app.instance().change().application());
assertEquals(revision2, app.deploymentStatus().outstandingChange(InstanceName.defaultName()).application());
// The second revision is set as the target by user interaction.
tester.deploymentTrigger().forceChange(app.instanceId(), Change.of(revision2.get()));
tester.outstandingChangeDeployer().run();
assertEquals(revision2, app.instance().change().application());
assertEquals(revision3, app.deploymentStatus().outstandingChange(InstanceName.defaultName()).application());
// The second revision deploys completely, and the third starts rolling out.
app.runJob(systemTest).runJob(stagingTest)
.runJob(productionUsEast3);
tester.outstandingChangeDeployer().run();
tester.outstandingChangeDeployer().run();
assertEquals(revision3, app.instance().change().application());
assertEquals(revision4, app.deploymentStatus().outstandingChange(InstanceName.defaultName()).application());
// The third revision fails, and the fourth is chosen to replace it.
app.triggerJobs().timeOutConvergence(systemTest);
tester.outstandingChangeDeployer().run();
tester.outstandingChangeDeployer().run();
assertEquals(revision4, app.instance().change().application());
assertEquals(revision5, app.deploymentStatus().outstandingChange(InstanceName.defaultName()).application());
// Tests for outstanding change are relevant when current revision completes.
app.runJob(systemTest).runJob(systemTest)
.jobAborted(stagingTest).runJob(stagingTest).runJob(stagingTest)
.runJob(productionUsEast3);
tester.outstandingChangeDeployer().run();
tester.outstandingChangeDeployer().run();
assertEquals(revision5, app.instance().change().application());
assertEquals(Change.empty(), app.deploymentStatus().outstandingChange(InstanceName.defaultName()));
app.runJob(productionUsEast3);
}
@Test
public void deploymentSpecWithDelays() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.systemTest()
.delay(Duration.ofSeconds(30))
.region("us-west-1")
.delay(Duration.ofMinutes(2))
.delay(Duration.ofMinutes(2)) // Multiple delays are summed up
.region("us-central-1")
.delay(Duration.ofMinutes(10)) // Delays after last region are valid, but have no effect
.build();
var app = tester.newDeploymentContext().submit(applicationPackage);
// Test jobs pass
app.runJob(systemTest);
tester.clock().advance(Duration.ofSeconds(15));
app.runJob(stagingTest);
tester.triggerJobs();
// No jobs have started yet, as 30 seconds have not yet passed.
assertEquals(0, tester.jobs().active().size());
tester.clock().advance(Duration.ofSeconds(15));
tester.triggerJobs();
// 30 seconds after the declared test, jobs may begin. The implicit test does not affect the delay.
assertEquals(1, tester.jobs().active().size());
app.assertRunning(productionUsWest1);
// 3 minutes pass, delayed trigger does nothing as us-west-1 is still in progress
tester.clock().advance(Duration.ofMinutes(3));
tester.triggerJobs();
assertEquals(1, tester.jobs().active().size());
app.assertRunning(productionUsWest1);
// us-west-1 completes
app.runJob(productionUsWest1);
// Delayed trigger does nothing as not enough time has passed after us-west-1 completion
tester.triggerJobs();
assertTrue("No more jobs triggered at this time", tester.jobs().active().isEmpty());
// 3 minutes pass, us-central-1 is still not triggered
tester.clock().advance(Duration.ofMinutes(3));
tester.triggerJobs();
assertTrue("No more jobs triggered at this time", tester.jobs().active().isEmpty());
// 4 minutes pass, us-central-1 is triggered
tester.clock().advance(Duration.ofMinutes(1));
tester.triggerJobs();
app.runJob(productionUsCentral1);
assertTrue("All jobs consumed", tester.jobs().active().isEmpty());
// Delayed trigger job runs again, with nothing to trigger
tester.clock().advance(Duration.ofMinutes(10));
tester.triggerJobs();
assertTrue("All jobs consumed", tester.jobs().active().isEmpty());
}
@Test
public void deploymentSpecWithParallelDeployments() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.region("us-central-1")
.parallel("us-west-1", "us-east-3")
.region("eu-west-1")
.build();
var app = tester.newDeploymentContext().submit(applicationPackage);
// Test jobs pass
app.runJob(systemTest).runJob(stagingTest);
// Deploys in first region
tester.triggerJobs();
assertEquals(1, tester.jobs().active().size());
app.runJob(productionUsCentral1);
// Deploys in two regions in parallel
tester.triggerJobs();
assertEquals(2, tester.jobs().active().size());
app.assertRunning(productionUsEast3);
app.assertRunning(productionUsWest1);
app.runJob(productionUsWest1);
assertEquals(1, tester.jobs().active().size());
app.assertRunning(productionUsEast3);
app.runJob(productionUsEast3);
// Last region completes
tester.triggerJobs();
assertEquals(1, tester.jobs().active().size());
app.runJob(productionEuWest1);
assertTrue("All jobs consumed", tester.jobs().active().isEmpty());
}
@Test
public void testNoOtherChangesDuringSuspension() {
// Application is deployed in 3 regions:
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.region("us-central-1")
.parallel("us-west-1", "us-east-3")
.build();
var application = tester.newDeploymentContext().submit().deploy();
// The first production zone is suspended:
tester.configServer().setSuspension(application.deploymentIdIn(ZoneId.from("prod", "us-central-1")), true);
// A new change needs to be pushed out, but should not go beyond the suspended zone:
application.submit()
.runJob(systemTest)
.runJob(stagingTest)
.runJob(productionUsCentral1);
tester.triggerJobs();
application.assertNotRunning(productionUsEast3);
application.assertNotRunning(productionUsWest1);
// The zone is unsuspended so jobs start:
tester.configServer().setSuspension(application.deploymentIdIn(ZoneId.from("prod", "us-central-1")), false);
tester.triggerJobs();
application.runJob(productionUsWest1).runJob(productionUsEast3);
assertEquals(Change.empty(), application.instance().change());
}
@Test
public void testBlockRevisionChange() {
// Tuesday, 17:30
tester.at(Instant.parse("2017-09-26T17:30:00.00Z"));
Version version = Version.fromString("6.2");
tester.controllerTester().upgradeSystem(version);
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.upgradePolicy("canary")
// Block application version changes on tuesday in hours 18 and 19
.blockChange(true, false, "tue", "18-19", "UTC")
.region("us-west-1")
.region("us-central-1")
.region("us-east-3")
.build();
var app = tester.newDeploymentContext().submit(applicationPackage).deploy();
tester.clock().advance(Duration.ofHours(1));
tester.triggerJobs();
assertEquals(0, tester.jobs().active().size());
app.submit(applicationPackage);
assertTrue(app.deploymentStatus().outstandingChange(app.instance().name()).hasTargets());
app.runJob(systemTest).runJob(stagingTest);
tester.outstandingChangeDeployer().run();
assertTrue(app.deploymentStatus().outstandingChange(app.instance().name()).hasTargets());
tester.triggerJobs();
assertEquals(emptyList(), tester.jobs().active());
tester.clock().advance(Duration.ofHours(2));
tester.outstandingChangeDeployer().run();
assertFalse(app.deploymentStatus().outstandingChange(app.instance().name()).hasTargets());
tester.triggerJobs(); // Tests already run for the blocked production job.
app.assertRunning(productionUsWest1);
}
@Test
public void testCompletionOfPartOfChangeDuringBlockWindow() {
// Tuesday, 17:30
tester.at(Instant.parse("2017-09-26T17:30:00.00Z"));
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.blockChange(true, true, "tue", "18", "UTC")
.region("us-west-1")
.region("us-east-3")
.build();
var app = tester.newDeploymentContext().submit(applicationPackage).deploy();
// Application on (6.1, 1.0.1)
Version v1 = Version.fromString("6.1");
// Application is mid-upgrade when block window begins, and gets an outstanding change.
Version v2 = Version.fromString("6.2");
tester.controllerTester().upgradeSystem(v2);
tester.upgrader().maintain();
app.runJob(stagingTest).runJob(systemTest);
// Entering block window will keep the outstanding change in place.
tester.clock().advance(Duration.ofHours(1));
app.submit(applicationPackage);
app.runJob(productionUsWest1);
assertEquals(1, app.instanceJobs().get(productionUsWest1).lastSuccess().get().versions().targetApplication().buildNumber().getAsLong());
assertEquals(2, app.deploymentStatus().outstandingChange(app.instance().name()).application().get().buildNumber().getAsLong());
tester.triggerJobs();
// Platform upgrade keeps rolling, since it began before block window, and tests for the new revision have also started.
assertEquals(3, tester.jobs().active().size());
app.runJob(productionUsEast3);
assertEquals(2, tester.jobs().active().size());
// Upgrade is done, and outstanding change rolls out when block window ends.
assertEquals(Change.empty(), app.instance().change());
assertTrue(app.deploymentStatus().outstandingChange(app.instance().name()).hasTargets());
app.runJob(stagingTest).runJob(systemTest);
tester.clock().advance(Duration.ofHours(1));
tester.outstandingChangeDeployer().run();
assertTrue(app.instance().change().hasTargets());
assertFalse(app.deploymentStatus().outstandingChange(app.instance().name()).hasTargets());
app.runJob(productionUsWest1).runJob(productionUsEast3);
assertFalse(app.instance().change().hasTargets());
}
@Test
public void testJobPause() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.region("us-west-1")
.region("us-east-3")
.build();
var app = tester.newDeploymentContext().submit(applicationPackage).deploy();
tester.controllerTester().upgradeSystem(new Version("9.8.7"));
tester.upgrader().maintain();
tester.deploymentTrigger().pauseJob(app.instanceId(), productionUsWest1,
tester.clock().instant().plus(Duration.ofSeconds(1)));
tester.deploymentTrigger().pauseJob(app.instanceId(), productionUsEast3,
tester.clock().instant().plus(Duration.ofSeconds(3)));
// us-west-1 does not trigger when paused.
app.runJob(systemTest).runJob(stagingTest);
tester.triggerJobs();
app.assertNotRunning(productionUsWest1);
// us-west-1 triggers when no longer paused, but does not retry when paused again.
tester.clock().advance(Duration.ofMillis(1500));
tester.triggerJobs();
app.assertRunning(productionUsWest1);
tester.deploymentTrigger().pauseJob(app.instanceId(), productionUsWest1, tester.clock().instant().plus(Duration.ofSeconds(1)));
app.failDeployment(productionUsWest1);
tester.triggerJobs();
app.assertNotRunning(productionUsWest1);
tester.clock().advance(Duration.ofMillis(1000));
tester.triggerJobs();
app.runJob(productionUsWest1);
// us-east-3 does not automatically trigger when paused, but does when forced.
tester.triggerJobs();
app.assertNotRunning(productionUsEast3);
tester.deploymentTrigger().forceTrigger(app.instanceId(), productionUsEast3, "mrTrigger", true, true, false);
app.assertRunning(productionUsEast3);
assertFalse(app.instance().jobPause(productionUsEast3).isPresent());
assertEquals(app.deployment(productionUsEast3.zone(tester.controller().system())).version(),
tester.jobs().last(app.instanceId(), productionUsEast3).get().versions().targetPlatform());
}
@Test
public void applicationVersionIsNotDowngraded() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.region("us-central-1")
.region("eu-west-1")
.build();
var app = tester.newDeploymentContext().submit(applicationPackage).deploy();
// productionUsCentral1 fails after deployment, causing a mismatch between deployed and successful state.
app.submit(applicationPackage)
.runJob(systemTest)
.runJob(stagingTest)
.timeOutUpgrade(productionUsCentral1);
ApplicationVersion appVersion1 = app.lastSubmission().get();
assertEquals(appVersion1, app.deployment(ZoneId.from("prod.us-central-1")).applicationVersion());
// Verify the application change is not removed when platform change is cancelled.
tester.deploymentTrigger().cancelChange(app.instanceId(), PLATFORM);
assertEquals(Change.of(appVersion1), app.instance().change());
// Now cancel the change as is done through the web API.
tester.deploymentTrigger().cancelChange(app.instanceId(), ALL);
assertEquals(Change.empty(), app.instance().change());
// A new version is released, which should now deploy the currently deployed application version to avoid downgrades.
Version version1 = new Version("6.2");
tester.controllerTester().upgradeSystem(version1);
tester.upgrader().maintain();
app.runJob(systemTest).runJob(stagingTest).failDeployment(productionUsCentral1);
// The last job has a different target, and the tests need to run again.
// These may now start, since the first job has been triggered once, and thus is verified already.
app.runJob(systemTest).runJob(stagingTest);
// Finally, the two production jobs complete, in order.
app.runJob(productionUsCentral1).runJob(productionEuWest1);
assertEquals(appVersion1, app.deployment(ZoneId.from("prod.us-central-1")).applicationVersion());
}
@Test
public void downgradingApplicationVersionWorks() {
var app = tester.newDeploymentContext().submit().deploy();
ApplicationVersion appVersion0 = app.lastSubmission().get();
assertEquals(Optional.of(appVersion0), app.instance().latestDeployed());
app.submit().deploy();
ApplicationVersion appVersion1 = app.lastSubmission().get();
assertEquals(Optional.of(appVersion1), app.instance().latestDeployed());
// Downgrading application version.
tester.deploymentTrigger().forceChange(app.instanceId(), Change.of(appVersion0));
assertEquals(Change.of(appVersion0), app.instance().change());
app.runJob(stagingTest)
.runJob(productionUsCentral1)
.runJob(productionUsEast3)
.runJob(productionUsWest1);
assertEquals(Change.empty(), app.instance().change());
assertEquals(appVersion0, app.instance().deployments().get(productionUsEast3.zone(tester.controller().system())).applicationVersion());
assertEquals(Optional.of(appVersion0), app.instance().latestDeployed());
}
@Test
public void settingANoOpChangeIsANoOp() {
var app = tester.newDeploymentContext().submit();
assertEquals(Optional.empty(), app.instance().latestDeployed());
app.deploy();
ApplicationVersion appVersion0 = app.lastSubmission().get();
assertEquals(Optional.of(appVersion0), app.instance().latestDeployed());
app.submit().deploy();
ApplicationVersion appVersion1 = app.lastSubmission().get();
assertEquals(Optional.of(appVersion1), app.instance().latestDeployed());
// Triggering a roll-out of an already deployed application is a no-op.
assertEquals(Change.empty(), app.instance().change());
tester.deploymentTrigger().forceChange(app.instanceId(), Change.of(appVersion1));
assertEquals(Change.empty(), app.instance().change());
assertEquals(Optional.of(appVersion1), app.instance().latestDeployed());
}
@Test
public void stepIsCompletePreciselyWhenItShouldBe() {
var app1 = tester.newDeploymentContext("tenant1", "app1", "default");
var app2 = tester.newDeploymentContext("tenant1", "app2", "default");
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.region("us-central-1")
.region("eu-west-1")
.build();
// System upgrades to version0 and applications deploy on that version
Version version0 = Version.fromString("7.0");
tester.controllerTester().upgradeSystem(version0);
app1.submit(applicationPackage).deploy();
app2.submit(applicationPackage).deploy();
// version1 is released and application1 skips upgrading to that version
Version version1 = Version.fromString("7.1");
tester.controllerTester().upgradeSystem(version1);
tester.upgrader().maintain();
// Deploy application2 to keep this version present in the system
app2.deployPlatform(version1);
tester.deploymentTrigger().cancelChange(app1.instanceId(), ALL);
// version2 is released and application1 starts upgrading
Version version2 = Version.fromString("7.2");
tester.controllerTester().upgradeSystem(version2);
tester.upgrader().maintain();
tester.triggerJobs();
app1.jobAborted(systemTest).jobAborted(stagingTest);
app1.runJob(systemTest).runJob(stagingTest).timeOutConvergence(productionUsCentral1);
assertEquals(version2, app1.deployment(productionUsCentral1.zone(main)).version());
Instant triggered = app1.instanceJobs().get(productionUsCentral1).lastTriggered().get().start();
tester.clock().advance(Duration.ofHours(1));
// version2 becomes broken and upgrade targets latest non-broken
tester.upgrader().overrideConfidence(version2, VespaVersion.Confidence.broken);
tester.controllerTester().computeVersionStatus();
tester.upgrader().maintain(); // Cancel upgrades to broken version
assertEquals("Change becomes latest non-broken version", Change.of(version1), app1.instance().change());
// version1 proceeds 'til the last job, where it fails; us-central-1 is skipped, as current change is strictly dominated by what's deployed there.
app1.runJob(systemTest).runJob(stagingTest)
.failDeployment(productionEuWest1);
assertEquals(triggered, app1.instanceJobs().get(productionUsCentral1).lastTriggered().get().start());
// Roll out a new application version, which gives a dual change -- this should trigger us-central-1, but only as long as it hasn't yet deployed there.
ApplicationVersion revision1 = app1.lastSubmission().get();
app1.submit(applicationPackage);
ApplicationVersion revision2 = app1.lastSubmission().get();
app1.runJob(systemTest) // Tests for new revision on version2
.runJob(stagingTest)
.runJob(systemTest) // Tests for new revision on version1
.runJob(stagingTest);
assertEquals(Change.of(version1).with(revision2), app1.instance().change());
tester.triggerJobs();
app1.assertRunning(productionUsCentral1);
assertEquals(version2, app1.instance().deployments().get(productionUsCentral1.zone(main)).version());
assertEquals(revision1, app1.deployment(productionUsCentral1.zone(main)).applicationVersion());
assertTrue(triggered.isBefore(app1.instanceJobs().get(productionUsCentral1).lastTriggered().get().start()));
// Change has a higher application version than what is deployed -- deployment should trigger.
app1.timeOutUpgrade(productionUsCentral1);
assertEquals(version2, app1.deployment(productionUsCentral1.zone(main)).version());
assertEquals(revision2, app1.deployment(productionUsCentral1.zone(main)).applicationVersion());
// Change is again strictly dominated, and us-central-1 is skipped, even though it is still failing.
tester.clock().advance(Duration.ofHours(3)); // Enough time for retry
tester.triggerJobs();
// Failing job is not retried as change has been deployed
app1.assertNotRunning(productionUsCentral1);
// Last job has a different deployment target, so tests need to run again.
app1.runJob(productionEuWest1) // Upgrade completes, and revision is the only change.
.runJob(productionUsCentral1) // With only revision change, central should run to cover a previous failure.
.runJob(productionEuWest1); // Finally, west changes revision.
assertEquals(Change.empty(), app1.instance().change());
assertEquals(Optional.of(RunStatus.success), app1.instanceJobs().get(productionUsCentral1).lastStatus());
}
@Test
public void eachParallelDeployTargetIsTested() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.parallel("eu-west-1", "us-east-3")
.build();
// Application version 1 and platform version 6.1.
var app = tester.newDeploymentContext().submit(applicationPackage).deploy();
// Success in first prod zone, change cancelled between triggering and completion of eu west job.
// One of the parallel zones get a deployment, but both fail their jobs.
Version v1 = new Version("6.1");
Version v2 = new Version("6.2");
tester.controllerTester().upgradeSystem(v2);
tester.upgrader().maintain();
app.runJob(systemTest).runJob(stagingTest);
app.timeOutConvergence(productionEuWest1);
tester.deploymentTrigger().cancelChange(app.instanceId(), PLATFORM);
assertEquals(v2, app.deployment(productionEuWest1.zone(main)).version());
assertEquals(v1, app.deployment(productionUsEast3.zone(main)).version());
// New application version should run system and staging tests against both 6.1 and 6.2, in no particular order.
app.submit(applicationPackage);
tester.triggerJobs();
Version firstTested = app.instanceJobs().get(systemTest).lastTriggered().get().versions().targetPlatform();
assertEquals(firstTested, app.instanceJobs().get(stagingTest).lastTriggered().get().versions().targetPlatform());
app.runJob(systemTest).runJob(stagingTest);
// Test jobs for next production zone can start and run immediately.
tester.triggerJobs();
assertNotEquals(firstTested, app.instanceJobs().get(systemTest).lastTriggered().get().versions().targetPlatform());
assertNotEquals(firstTested, app.instanceJobs().get(stagingTest).lastTriggered().get().versions().targetPlatform());
app.runJob(systemTest).runJob(stagingTest);
// Finish old run of the aborted production job.
app.triggerJobs().jobAborted(productionUsEast3);
// New upgrade is already tested for both jobs.
// Both jobs fail again, and must be re-triggered -- this is ok, as they are both already triggered on their current targets.
app.failDeployment(productionEuWest1).failDeployment(productionUsEast3)
.runJob(productionEuWest1).runJob(productionUsEast3);
assertFalse(app.instance().change().hasTargets());
assertEquals(2, app.instanceJobs().get(productionEuWest1).lastSuccess().get().versions().targetApplication().buildNumber().getAsLong());
assertEquals(2, app.instanceJobs().get(productionUsEast3).lastSuccess().get().versions().targetApplication().buildNumber().getAsLong());
}
@Test
public void retriesFailingJobs() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.region("us-central-1")
.build();
// Deploy completely on default application and platform versions
var app = tester.newDeploymentContext().submit(applicationPackage).deploy();
// New application change is deployed and fails in system-test for a while
app.submit(applicationPackage).runJob(stagingTest).failDeployment(systemTest);
// Retries immediately once
app.failDeployment(systemTest);
tester.triggerJobs();
app.assertRunning(systemTest);
// Stops immediate retry when next triggering is considered after first failure
tester.clock().advance(Duration.ofSeconds(1));
app.failDeployment(systemTest);
tester.triggerJobs();
app.assertNotRunning(systemTest);
// Retries after 10 minutes since previous completion, plus half the time since the first failure
tester.clock().advance(Duration.ofMinutes(10).plus(Duration.ofSeconds(1)));
tester.triggerJobs();
app.assertRunning(systemTest);
// Retries less frequently as more time passes
app.failDeployment(systemTest);
tester.clock().advance(Duration.ofMinutes(15));
tester.triggerJobs();
app.assertNotRunning(systemTest);
// Retries again when sufficient time has passed
tester.clock().advance(Duration.ofSeconds(2));
tester.triggerJobs();
app.assertRunning(systemTest);
// Still fails and is not retried
app.failDeployment(systemTest);
tester.triggerJobs();
app.assertNotRunning(systemTest);
// Another application change is deployed and fixes system-test. Change is triggered immediately as target changes
app.submit(applicationPackage).deploy();
assertTrue("Deployment completed", tester.jobs().active().isEmpty());
}
@Test
public void testPlatformVersionSelection() {
// Setup system
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.region("us-west-1")
.build();
Version version1 = tester.controller().readSystemVersion();
var app1 = tester.newDeploymentContext();
// First deployment: An application change
app1.submit(applicationPackage).deploy();
assertEquals("First deployment gets system version", version1, app1.application().oldestDeployedPlatform().get());
assertEquals(version1, tester.configServer().lastPrepareVersion().get());
// Application change after a new system version, and a region added
Version version2 = new Version(version1.getMajor(), version1.getMinor() + 1);
tester.controllerTester().upgradeSystem(version2);
applicationPackage = new ApplicationPackageBuilder()
.region("us-west-1")
.region("us-east-3")
.build();
app1.submit(applicationPackage).deploy();
assertEquals("Application change preserves version, and new region gets oldest version too",
version1, app1.application().oldestDeployedPlatform().get());
assertEquals(version1, tester.configServer().lastPrepareVersion().get());
assertFalse("Change deployed", app1.instance().change().hasTargets());
tester.upgrader().maintain();
app1.deployPlatform(version2);
assertEquals("Version upgrade changes version", version2, app1.application().oldestDeployedPlatform().get());
assertEquals(version2, tester.configServer().lastPrepareVersion().get());
}
@Test
public void requeueNodeAllocationFailureStagingJob() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.region("us-east-3")
.build();
var app1 = tester.newDeploymentContext("tenant1", "app1", "default").submit(applicationPackage);
var app2 = tester.newDeploymentContext("tenant2", "app2", "default").submit(applicationPackage);
var app3 = tester.newDeploymentContext("tenant3", "app3", "default").submit(applicationPackage);
// all applications: system-test completes successfully with some time in between, to determine trigger order.
app2.runJob(systemTest);
tester.clock().advance(Duration.ofMinutes(1));
app1.runJob(systemTest);
tester.clock().advance(Duration.ofMinutes(1));
app3.runJob(systemTest);
// all applications: staging test jobs queued
tester.triggerJobs();
assertEquals(3, tester.jobs().active().size());
// Abort all running jobs, so we have three candidate jobs, of which only one should be triggered at a time.
tester.abortAll();
assertEquals(List.of(), tester.jobs().active());
tester.readyJobsTrigger().maintain();
assertEquals(1, tester.jobs().active().size());
tester.readyJobsTrigger().maintain();
assertEquals(2, tester.jobs().active().size());
tester.readyJobsTrigger().maintain();
assertEquals(3, tester.jobs().active().size());
// Remove the jobs for app1 and app2, and then let app3 fail node allocation.
// All three jobs are now eligible, but the one for app3 should trigger first as a nodeAllocationFailure-retry.
app3.nodeAllocationFailure(stagingTest);
app1.abortJob(stagingTest);
app2.abortJob(stagingTest);
tester.readyJobsTrigger().maintain();
app3.assertRunning(stagingTest);
assertEquals(1, tester.jobs().active().size());
tester.readyJobsTrigger().maintain();
assertEquals(2, tester.jobs().active().size());
tester.readyJobsTrigger().maintain();
assertEquals(3, tester.jobs().active().size());
// Finish deployment for apps 2 and 3, then release a new version, leaving only app1 with an application upgrade.
app2.deploy();
app3.deploy();
app1.runJob(stagingTest);
assertEquals(0, tester.jobs().active().size());
tester.controllerTester().upgradeSystem(new Version("6.2"));
tester.upgrader().maintain();
app1.submit(applicationPackage);
// Tests for app1 trigger before the others since it carries an application upgrade.
tester.readyJobsTrigger().run();
app1.assertRunning(systemTest);
app1.assertRunning(stagingTest);
assertEquals(2, tester.jobs().active().size());
// Let the test jobs start, remove everything except system test for app3, which fails node allocation again.
tester.triggerJobs();
app3.nodeAllocationFailure(systemTest);
app1.abortJob(systemTest);
app1.abortJob(stagingTest);
app2.abortJob(systemTest);
app2.abortJob(stagingTest);
app3.abortJob(stagingTest);
assertEquals(0, tester.jobs().active().size());
assertTrue(app1.instance().change().application().isPresent());
assertFalse(app2.instance().change().application().isPresent());
assertFalse(app3.instance().change().application().isPresent());
tester.readyJobsTrigger().maintain();
app1.assertRunning(stagingTest);
app3.assertRunning(systemTest);
assertEquals(2, tester.jobs().active().size());
tester.readyJobsTrigger().maintain();
app1.assertRunning(systemTest);
assertEquals(4, tester.jobs().active().size());
tester.readyJobsTrigger().maintain();
app3.assertRunning(stagingTest);
app2.assertRunning(stagingTest);
app2.assertRunning(systemTest);
assertEquals(6, tester.jobs().active().size());
}
@Test
public void testUserInstancesNotInDeploymentSpec() {
var app = tester.newDeploymentContext();
tester.controller().applications().createInstance(app.application().id().instance("user"));
app.submit().deploy();
}
@Test
public void testMultipleInstancesWithDifferentChanges() {
DeploymentContext i1 = tester.newDeploymentContext("t", "a", "i1");
DeploymentContext i2 = tester.newDeploymentContext("t", "a", "i2");
DeploymentContext i3 = tester.newDeploymentContext("t", "a", "i3");
DeploymentContext i4 = tester.newDeploymentContext("t", "a", "i4");
ApplicationPackage applicationPackage = ApplicationPackageBuilder
.fromDeploymentXml("<deployment version='1'>\n" +
" <upgrade revision-change='when-failing' />\n" +
" <parallel>\n" +
" <instance id='i1'>\n" +
" <prod>\n" +
" <region>us-east-3</region>\n" +
" <delay hours='6' />\n" +
" </prod>\n" +
" </instance>\n" +
" <instance id='i2'>\n" +
" <prod>\n" +
" <region>us-east-3</region>\n" +
" </prod>\n" +
" </instance>\n" +
" </parallel>\n" +
" <instance id='i3'>\n" +
" <prod>\n" +
" <region>us-east-3</region>\n" +
" <delay hours='18' />\n" +
" <test>us-east-3</test>\n" +
" </prod>\n" +
" </instance>\n" +
" <instance id='i4'>\n" +
" <test />\n" +
" <staging />\n" +
" <prod>\n" +
" <region>us-east-3</region>\n" +
" </prod>\n" +
" </instance>\n" +
"</deployment>\n");
// Package is submitted, and change propagated to the two first instances.
i1.submit(applicationPackage);
Optional<ApplicationVersion> v0 = i1.lastSubmission();
tester.outstandingChangeDeployer().run();
assertEquals(v0, i1.instance().change().application());
assertEquals(v0, i2.instance().change().application());
assertEquals(Optional.empty(), i3.instance().change().application());
assertEquals(Optional.empty(), i4.instance().change().application());
// Tests run in i4, as they're declared there, and i1 and i2 get to work
i4.runJob(systemTest).runJob(stagingTest);
i1.runJob(productionUsEast3);
i2.runJob(productionUsEast3);
// Since the post-deployment delay of i1 is incomplete, i3 doesn't yet get the change.
tester.outstandingChangeDeployer().run();
assertEquals(v0, i1.instance().latestDeployed());
assertEquals(v0, i2.instance().latestDeployed());
assertEquals(Optional.empty(), i1.instance().change().application());
assertEquals(Optional.empty(), i2.instance().change().application());
assertEquals(Optional.empty(), i3.instance().change().application());
assertEquals(Optional.empty(), i4.instance().change().application());
// When the delay is done, i3 gets the change.
tester.clock().advance(Duration.ofHours(6));
tester.outstandingChangeDeployer().run();
assertEquals(Optional.empty(), i1.instance().change().application());
assertEquals(Optional.empty(), i2.instance().change().application());
assertEquals(v0, i3.instance().change().application());
assertEquals(Optional.empty(), i4.instance().change().application());
// v0 begins roll-out in i3, and v1 is submitted and rolls out in i1 and i2 some time later
i3.runJob(productionUsEast3);
tester.clock().advance(Duration.ofHours(12));
i1.submit(applicationPackage);
Optional<ApplicationVersion> v1 = i1.lastSubmission();
i4.runJob(systemTest).runJob(stagingTest);
i1.runJob(productionUsEast3);
i2.runJob(productionUsEast3);
assertEquals(v1, i1.instance().latestDeployed());
assertEquals(v1, i2.instance().latestDeployed());
assertEquals(Optional.empty(), i1.instance().change().application());
assertEquals(Optional.empty(), i2.instance().change().application());
assertEquals(v0, i3.instance().change().application());
assertEquals(Optional.empty(), i4.instance().change().application());
// After some time, v2 also starts rolling out to i1 and i2, but does not complete in i2
tester.clock().advance(Duration.ofHours(3));
i1.submit(applicationPackage);
Optional<ApplicationVersion> v2 = i1.lastSubmission();
i4.runJob(systemTest).runJob(stagingTest);
i1.runJob(productionUsEast3);
tester.clock().advance(Duration.ofHours(3));
// v1 is all done in i1 and i2, but does not yet roll out in i3; v2 is not completely rolled out there yet.
tester.outstandingChangeDeployer().run();
assertEquals(v0, i3.instance().change().application());
// i3 completes v0, which rolls out to i4; v1 is ready for i3, but v2 is not.
i3.runJob(testUsEast3);
assertEquals(Optional.empty(), i3.instance().change().application());
tester.outstandingChangeDeployer().run();
assertEquals(v2, i1.instance().latestDeployed());
assertEquals(v1, i2.instance().latestDeployed());
assertEquals(v0, i3.instance().latestDeployed());
assertEquals(Optional.empty(), i1.instance().change().application());
assertEquals(v2, i2.instance().change().application());
assertEquals(v1, i3.instance().change().application());
assertEquals(v0, i4.instance().change().application());
}
@Test
public void testMultipleInstancesWithRevisionCatchingUpToUpgrade() {
String spec = "<deployment>\n" +
" <instance id='alpha'>\n" +
" <upgrade rollout=\"simultaneous\" revision-target=\"next\" />\n" +
" <test />\n" +
" <staging />\n" +
" </instance>\n" +
" <instance id='beta'>\n" +
" <upgrade rollout=\"simultaneous\" revision-change=\"when-clear\" revision-target=\"next\" />\n" +
" <prod>\n" +
" <region>us-east-3</region>\n" +
" <test>us-east-3</test>\n" +
" </prod>\n" +
" </instance>\n" +
"</deployment>\n";
ApplicationPackage applicationPackage = ApplicationPackageBuilder.fromDeploymentXml(spec);
DeploymentContext alpha = tester.newDeploymentContext("t", "a", "alpha");
DeploymentContext beta = tester.newDeploymentContext("t", "a", "beta");
alpha.submit(applicationPackage).deploy();
Optional<ApplicationVersion> revision1 = alpha.lastSubmission();
Version version1 = new Version("7.1");
tester.controllerTester().upgradeSystem(version1);
tester.upgrader().run();
alpha.runJob(systemTest).runJob(stagingTest);
assertEquals(Change.empty(), alpha.instance().change());
assertEquals(Change.empty(), beta.instance().change());
tester.upgrader().run();
assertEquals(Change.empty(), alpha.instance().change());
assertEquals(Change.of(version1), beta.instance().change());
tester.outstandingChangeDeployer().run();
beta.triggerJobs();
tester.runner().run();
tester.outstandingChangeDeployer().run();
beta.triggerJobs();
tester.outstandingChangeDeployer().run();
beta.assertRunning(productionUsEast3);
beta.assertNotRunning(testUsEast3);
alpha.submit(applicationPackage);
Optional<ApplicationVersion> revision2 = alpha.lastSubmission();
assertEquals(Change.of(revision2.get()), alpha.instance().change());
assertEquals(Change.of(version1), beta.instance().change());
alpha.runJob(systemTest).runJob(stagingTest);
assertEquals(Change.empty(), alpha.instance().change());
assertEquals(Change.of(version1), beta.instance().change());
tester.outstandingChangeDeployer().run();
assertEquals(Change.of(version1).with(revision2.get()), beta.instance().change());
beta.triggerJobs();
tester.runner().run();
beta.triggerJobs();
beta.assertRunning(productionUsEast3);
beta.assertNotRunning(testUsEast3);
beta.runJob(productionUsEast3)
.runJob(testUsEast3);
assertEquals(Change.empty(), beta.instance().change());
}
@Test
public void testMultipleInstances() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.instances("instance1,instance2")
.region("us-east-3")
.build();
var app = tester.newDeploymentContext("tenant1", "application1", "instance1")
.submit(applicationPackage)
.completeRollout();
assertEquals(2, app.application().instances().size());
assertEquals(2, app.application().productionDeployments().values().stream()
.mapToInt(Collection::size)
.sum());
}
@Test
public void testDeclaredProductionTests() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.region("us-east-3")
.delay(Duration.ofMinutes(1))
.test("us-east-3")
.region("us-west-1")
.region("us-central-1")
.test("us-central-1")
.test("us-west-1")
.build();
var app = tester.newDeploymentContext().submit(applicationPackage);
app.runJob(systemTest).runJob(stagingTest).runJob(productionUsEast3);
app.assertNotRunning(productionUsWest1);
tester.clock().advance(Duration.ofMinutes(1));
app.runJob(testUsEast3)
.runJob(productionUsWest1).runJob(productionUsCentral1)
.runJob(testUsCentral1).runJob(testUsWest1);
assertEquals(Change.empty(), app.instance().change());
// Application starts upgrade, but is confidence is broken cancelled after first zone. Tests won't run.
Version version0 = app.application().oldestDeployedPlatform().get();
Version version1 = Version.fromString("7.7");
tester.controllerTester().upgradeSystem(version1);
tester.upgrader().maintain();
app.runJob(systemTest).runJob(stagingTest).runJob(productionUsEast3);
tester.clock().advance(Duration.ofMinutes(1));
app.failDeployment(testUsEast3);
tester.triggerJobs();
app.assertRunning(testUsEast3);
tester.upgrader().overrideConfidence(version1, VespaVersion.Confidence.broken);
tester.controllerTester().computeVersionStatus();
tester.upgrader().maintain();
app.failDeployment(testUsEast3);
app.assertNotRunning(testUsEast3);
assertEquals(Change.empty(), app.instance().change());
// Application is pinned to previous version, and downgrades to that. Tests are re-run.
tester.deploymentTrigger().triggerChange(app.instanceId(), Change.of(version0).withPin());
app.runJob(stagingTest).runJob(productionUsEast3);
tester.clock().advance(Duration.ofMinutes(1));
app.failDeployment(testUsEast3);
tester.clock().advance(Duration.ofMinutes(11)); // Job is cooling down after consecutive failures.
app.runJob(testUsEast3);
assertEquals(Change.empty().withPin(), app.instance().change());
// Same upgrade is attempted, and production tests wait for redeployment.
tester.deploymentTrigger().cancelChange(app.instanceId(), ALL);
tester.upgrader().overrideConfidence(version1, VespaVersion.Confidence.high);
tester.controllerTester().computeVersionStatus();
tester.upgrader().maintain();
app.triggerJobs();
app.assertRunning(productionUsEast3);
app.assertNotRunning(testUsEast3);
app.runJob(productionUsEast3);
tester.clock().advance(Duration.ofMinutes(1));
app.runJob(testUsEast3).runJob(productionUsWest1).triggerJobs();
app.assertRunning(productionUsCentral1);
tester.runner().run();
app.triggerJobs();
app.assertNotRunning(testUsCentral1);
}
@Test
public void testDeployComplicatedDeploymentSpec() {
String complicatedDeploymentSpec =
"<deployment version='1.0' athenz-domain='domain' athenz-service='service'>\n" +
" <parallel>\n" +
" <instance id='instance' athenz-service='in-service'>\n" +
" <staging />\n" +
" <prod>\n" +
" <parallel>\n" +
" <region active='true'>us-west-1</region>\n" +
" <steps>\n" +
" <region active='true'>us-east-3</region>\n" +
" <delay hours='2' />\n" +
" <region active='true'>eu-west-1</region>\n" +
" <delay hours='2' />\n" +
" </steps>\n" +
" <steps>\n" +
" <delay hours='3' />\n" +
" <region active='true'>aws-us-east-1a</region>\n" +
" <parallel>\n" +
" <region active='true' athenz-service='no-service'>ap-northeast-1</region>\n" +
" <region active='true'>ap-northeast-2</region>\n" +
" <test>aws-us-east-1a</test>\n" +
" </parallel>\n" +
" </steps>\n" +
" <delay hours='3' minutes='30' />\n" +
" </parallel>\n" +
" <parallel>\n" +
" <test>ap-northeast-2</test>\n" +
" <test>ap-northeast-1</test>\n" +
" </parallel>\n" +
" <test>us-east-3</test>\n" +
" <region active='true'>ap-southeast-1</region>\n" +
" </prod>\n" +
" <endpoints>\n" +
" <endpoint id='foo' container-id='bar'>\n" +
" <region>us-east-3</region>\n" +
" </endpoint>\n" +
" <endpoint id='nalle' container-id='frosk' />\n" +
" <endpoint container-id='quux' />\n" +
" </endpoints>\n" +
" </instance>\n" +
" <instance id='other'>\n" +
" <upgrade policy='conservative' />\n" +
" <test />\n" +
" <block-change revision='true' version='false' days='sat' hours='0-23' time-zone='CET' />\n" +
" <prod>\n" +
" <region active='true'>eu-west-1</region>\n" +
" <test>eu-west-1</test>\n" +
" </prod>\n" +
" <notifications when='failing'>\n" +
" <email role='author' />\n" +
" <email address='john@dev' when='failing-commit' />\n" +
" <email address='jane@dev' />\n" +
" </notifications>\n" +
" </instance>\n" +
" </parallel>\n" +
" <instance id='last'>\n" +
" <upgrade policy='conservative' />\n" +
" <prod>\n" +
" <region active='true'>eu-west-1</region>\n" +
" </prod>\n" +
" </instance>\n" +
"</deployment>\n";
tester.atMondayMorning();
ApplicationPackage applicationPackage = ApplicationPackageBuilder.fromDeploymentXml(complicatedDeploymentSpec);
var app1 = tester.newDeploymentContext("t", "a", "instance").submit(applicationPackage);
var app2 = tester.newDeploymentContext("t", "a", "other");
var app3 = tester.newDeploymentContext("t", "a", "last");
// Verify that the first submission rolls out as per the spec.
tester.triggerJobs();
assertEquals(2, tester.jobs().active().size());
app1.runJob(stagingTest);
tester.triggerJobs();
assertEquals(1, tester.jobs().active().size());
app2.runJob(systemTest);
app1.runJob(productionUsWest1);
tester.triggerJobs();
assertEquals(2, tester.jobs().active().size());
app1.runJob(productionUsEast3);
tester.triggerJobs();
assertEquals(1, tester.jobs().active().size());
tester.clock().advance(Duration.ofHours(2));
app1.runJob(productionEuWest1);
tester.triggerJobs();
assertEquals(1, tester.jobs().active().size());
app2.assertNotRunning(testEuWest1);
app2.runJob(productionEuWest1);
tester.triggerJobs();
assertEquals(1, tester.jobs().active().size());
app2.runJob(testEuWest1);
tester.triggerJobs();
assertEquals(List.of(), tester.jobs().active());
tester.clock().advance(Duration.ofHours(1));
app1.runJob(productionAwsUsEast1a);
tester.triggerJobs();
assertEquals(3, tester.jobs().active().size());
app1.runJob(testAwsUsEast1a);
tester.triggerJobs();
assertEquals(2, tester.jobs().active().size());
app1.runJob(productionApNortheast2);
tester.triggerJobs();
assertEquals(1, tester.jobs().active().size());
app1.runJob(productionApNortheast1);
tester.triggerJobs();
assertEquals(List.of(), tester.jobs().active());
tester.clock().advance(Duration.ofMinutes(30));
tester.triggerJobs();
assertEquals(List.of(), tester.jobs().active());
tester.clock().advance(Duration.ofMinutes(30));
app1.runJob(testApNortheast1);
tester.triggerJobs();
assertEquals(1, tester.jobs().active().size());
app1.runJob(testApNortheast2);
tester.triggerJobs();
assertEquals(1, tester.jobs().active().size());
app1.runJob(testUsEast3);
tester.triggerJobs();
assertEquals(1, tester.jobs().active().size());
app1.runJob(productionApSoutheast1);
tester.triggerJobs();
assertEquals(1, tester.jobs().active().size());
app3.runJob(productionEuWest1);
tester.triggerJobs();
assertEquals(List.of(), tester.jobs().active());
tester.atMondayMorning().clock().advance(Duration.ofDays(5)); // Inside revision block window for second, conservative instance.
Version version = Version.fromString("8.1");
tester.controllerTester().upgradeSystem(version);
tester.upgrader().maintain();
assertEquals(Change.of(version), app1.instance().change());
assertEquals(Change.empty(), app2.instance().change());
assertEquals(Change.empty(), app3.instance().change());
// Upgrade instance 1; upgrade rolls out first, with revision following.
// The new platform won't roll out to the conservative instance until the normal one is upgraded.
app1.submit(applicationPackage);
assertEquals(Change.of(version).with(app1.application().latestVersion().get()), app1.instance().change());
// Upgrade platform.
app2.runJob(systemTest);
app1.runJob(stagingTest)
.runJob(productionUsWest1)
.runJob(productionUsEast3);
// Upgrade revision
tester.clock().advance(Duration.ofSeconds(1)); // Ensure we see revision as rolling after upgrade.
app2.runJob(systemTest);
app1.runJob(stagingTest)
.runJob(productionUsWest1);
// productionUsEast3 won't change revision before its production test has completed for the upgrade, which is one of the last jobs!
tester.clock().advance(Duration.ofHours(2));
app1.runJob(productionEuWest1);
tester.clock().advance(Duration.ofHours(1));
app1.runJob(productionAwsUsEast1a);
app1.runJob(testAwsUsEast1a);
tester.clock().advance(Duration.ofSeconds(1));
app1.runJob(productionAwsUsEast1a);
app1.runJob(testAwsUsEast1a);
app1.runJob(productionApNortheast2);
app1.runJob(productionApNortheast1);
tester.clock().advance(Duration.ofHours(1));
app1.runJob(testApNortheast1);
app1.runJob(testApNortheast2);
app1.runJob(productionApNortheast2);
app1.runJob(productionApNortheast1);
app1.runJob(testUsEast3);
app1.runJob(productionApSoutheast1);
tester.clock().advance(Duration.ofSeconds(1));
app1.runJob(productionUsEast3);
tester.clock().advance(Duration.ofHours(2));
app1.runJob(productionEuWest1);
tester.clock().advance(Duration.ofMinutes(330));
app1.runJob(testApNortheast1);
app1.runJob(testApNortheast2);
app1.runJob(testUsEast3);
app1.runJob(productionApSoutheast1);
app1.runJob(stagingTest); // Tests with only the outstanding application change.
app2.runJob(systemTest); // Tests with only the outstanding application change.
// Confidence rises to 'high', for the new version, and instance 2 starts to upgrade.
tester.controllerTester().computeVersionStatus();
tester.upgrader().maintain();
tester.outstandingChangeDeployer().run();
tester.triggerJobs();
assertEquals(tester.jobs().active().toString(), 1, tester.jobs().active().size());
assertEquals(Change.empty(), app1.instance().change());
assertEquals(Change.of(version), app2.instance().change());
assertEquals(Change.empty(), app3.instance().change());
app2.runJob(productionEuWest1)
.failDeployment(testEuWest1);
// Instance 2 failed the last job, and now exits block window, letting application change roll out with the upgrade.
tester.clock().advance(Duration.ofDays(1)); // Leave block window for revisions.
tester.upgrader().maintain();
tester.outstandingChangeDeployer().run();
assertEquals(0, tester.jobs().active().size());
tester.triggerJobs();
assertEquals(1, tester.jobs().active().size());
assertEquals(Change.empty(), app1.instance().change());
assertEquals(Change.of(version).with(app1.application().latestVersion().get()), app2.instance().change());
app2.runJob(productionEuWest1)
.runJob(testEuWest1);
assertEquals(Change.empty(), app2.instance().change());
assertEquals(Change.empty(), app3.instance().change());
tester.upgrader().maintain();
tester.outstandingChangeDeployer().run();
assertEquals(Change.of(version).with(app1.lastSubmission().get()), app3.instance().change());
tester.deploymentTrigger().cancelChange(app3.instanceId(), ALL);
tester.outstandingChangeDeployer().run();
tester.upgrader().maintain();
assertEquals(Change.of(app1.lastSubmission().get()), app3.instance().change());
app3.runJob(productionEuWest1);
tester.upgrader().maintain();
app1.runJob(stagingTest);
app3.runJob(productionEuWest1);
tester.triggerJobs();
assertEquals(List.of(), tester.jobs().active());
assertEquals(Change.empty(), app3.instance().change());
}
@Test
public void testRevisionJoinsUpgradeWithSeparateRollout() {
var appPackage = new ApplicationPackageBuilder().region("us-central-1")
.region("us-east-3")
.region("us-west-1")
.upgradeRollout("separate")
.build();
var app = tester.newDeploymentContext().submit(appPackage).deploy();
// Platform rolls through first production zone.
var version0 = tester.controller().readSystemVersion();
var version1 = new Version("7.1");
tester.controllerTester().upgradeSystem(version1);
tester.upgrader().maintain();
app.runJob(systemTest).runJob(stagingTest).runJob(productionUsCentral1);
tester.clock().advance(Duration.ofMinutes(1));
// Revision starts rolling, but stays behind.
var revision0 = app.lastSubmission();
app.submit(appPackage);
var revision1 = app.lastSubmission();
assertEquals(Change.of(version1).with(revision1.get()), app.instance().change());
app.runJob(systemTest).runJob(stagingTest).runJob(productionUsCentral1);
// Upgrade got here first, so attempts to proceed alone, but the upgrade fails.
app.triggerJobs();
assertEquals(new Versions(version1, revision0.get(), Optional.of(version0), revision0),
tester.jobs().last(app.instanceId(), productionUsEast3).get().versions());
app.timeOutConvergence(productionUsEast3);
// Revision is allowed to join.
app.triggerJobs();
assertEquals(new Versions(version1, revision1.get(), Optional.of(version1), revision0),
tester.jobs().last(app.instanceId(), productionUsEast3).get().versions());
app.runJob(productionUsEast3);
// Platform and revision now proceed together.
app.runJob(stagingTest);
app.triggerJobs();
assertEquals(new Versions(version1, revision1.get(), Optional.of(version0), revision0),
tester.jobs().last(app.instanceId(), productionUsWest1).get().versions());
app.runJob(productionUsWest1);
assertEquals(Change.empty(), app.instance().change());
// New upgrade fails in staging-test, and revision to fix it is submitted.
var version2 = new Version("7.2");
tester.controllerTester().upgradeSystem(version2);
tester.upgrader().maintain();
app.runJob(systemTest).failDeployment(stagingTest);
tester.clock().advance(Duration.ofMinutes(30));
app.failDeployment(stagingTest);
app.submit(appPackage);
app.runJob(systemTest).runJob(stagingTest) // Tests run with combined upgrade.
.runJob(productionUsCentral1) // Combined upgrade stays together.
.runJob(productionUsEast3).runJob(productionUsWest1);
assertEquals(Map.of(), app.deploymentStatus().jobsToRun());
assertEquals(Change.empty(), app.instance().change());
}
@Test
public void testProductionTestBlockingDeploymentWithSeparateRollout() {
var appPackage = new ApplicationPackageBuilder().region("us-east-3")
.region("us-west-1")
.delay(Duration.ofHours(1))
.test("us-east-3")
.upgradeRollout("separate")
.build();
var app = tester.newDeploymentContext().submit(appPackage)
.runJob(systemTest).runJob(stagingTest)
.runJob(productionUsEast3).runJob(productionUsWest1);
tester.clock().advance(Duration.ofHours(1));
app.runJob(testUsEast3);
assertEquals(Change.empty(), app.instance().change());
// Platform rolls through first production zone.
var version0 = tester.controller().readSystemVersion();
var version1 = new Version("7.1");
tester.controllerTester().upgradeSystem(version1);
tester.upgrader().maintain();
app.runJob(systemTest).runJob(stagingTest).runJob(productionUsEast3);
// Revision starts rolling, but waits for production test to verify the upgrade.
var revision0 = app.lastSubmission();
app.submit(appPackage);
var revision1 = app.lastSubmission();
assertEquals(Change.of(version1).with(revision1.get()), app.instance().change());
app.runJob(systemTest).runJob(stagingTest).triggerJobs();
app.assertRunning(productionUsWest1);
app.assertNotRunning(productionUsEast3);
// Upgrade got here first, so attempts to proceed alone, but the upgrade fails.
app.triggerJobs();
assertEquals(new Versions(version1, revision0.get(), Optional.of(version0), revision0),
tester.jobs().last(app.instanceId(), productionUsWest1).get().versions());
app.timeOutConvergence(productionUsWest1).triggerJobs();
// Upgrade now fails between us-east-3 deployment and test, so test is abandoned, and revision unblocked.
app.assertRunning(productionUsEast3);
assertEquals(new Versions(version1, revision1.get(), Optional.of(version1), revision0),
tester.jobs().last(app.instanceId(), productionUsEast3).get().versions());
app.runJob(productionUsEast3).triggerJobs()
.jobAborted(productionUsWest1).runJob(productionUsWest1);
tester.clock().advance(Duration.ofHours(1));
app.runJob(testUsEast3);
assertEquals(Change.empty(), app.instance().change());
}
@Test
public void testProductionTestNotBlockingDeploymentWithSimultaneousRollout() {
var appPackage = new ApplicationPackageBuilder().region("us-east-3")
.region("us-central-1")
.region("us-west-1")
.delay(Duration.ofHours(1))
.test("us-east-3")
.test("us-west-1")
.upgradeRollout("simultaneous")
.build();
var app = tester.newDeploymentContext().submit(appPackage)
.runJob(systemTest).runJob(stagingTest)
.runJob(productionUsEast3).runJob(productionUsCentral1).runJob(productionUsWest1);
tester.clock().advance(Duration.ofHours(1));
app.runJob(testUsEast3).runJob(testUsWest1);
assertEquals(Change.empty(), app.instance().change());
// Platform rolls through first production zone.
var version0 = tester.controller().readSystemVersion();
var version1 = new Version("7.1");
tester.controllerTester().upgradeSystem(version1);
tester.upgrader().maintain();
app.runJob(systemTest).runJob(stagingTest).runJob(productionUsEast3);
// Revision starts rolling, and causes production test to abort when it reaches deployment.
var revision0 = app.lastSubmission();
app.submit(appPackage);
var revision1 = app.lastSubmission();
assertEquals(Change.of(version1).with(revision1.get()), app.instance().change());
app.runJob(systemTest).runJob(stagingTest).triggerJobs();
app.assertRunning(productionUsCentral1);
app.assertRunning(productionUsEast3);
// Revision deploys to first prod zone.
app.triggerJobs();
assertEquals(new Versions(version1, revision1.get(), Optional.of(version1), revision0),
tester.jobs().last(app.instanceId(), productionUsEast3).get().versions());
tester.clock().advance(Duration.ofSeconds(1));
app.runJob(productionUsEast3);
// Revision catches up in second prod zone.
app.runJob(systemTest).runJob(stagingTest).runJob(stagingTest).triggerJobs();
app.jobAborted(productionUsCentral1).triggerJobs();
assertEquals(new Versions(version1, revision1.get(), Optional.of(version0), revision0),
tester.jobs().last(app.instanceId(), productionUsCentral1).get().versions());
app.runJob(productionUsCentral1).triggerJobs();
// Revision proceeds alone in third prod zone, making test targets different for the two prod tests.
assertEquals(new Versions(version0, revision1.get(), Optional.of(version0), revision0),
tester.jobs().last(app.instanceId(), productionUsWest1).get().versions());
app.runJob(productionUsWest1);
app.triggerJobs();
app.assertNotRunning(testUsEast3);
tester.clock().advance(Duration.ofHours(1));
// Test lets revision proceed alone, and us-west-1 is blocked until tested.
app.runJob(testUsEast3).triggerJobs();
app.assertNotRunning(productionUsWest1);
app.runJob(testUsWest1).runJob(productionUsWest1).runJob(testUsWest1); // Test for us-east-3 is not re-run.
assertEquals(Change.empty(), app.instance().change());
}
@Test
public void testVeryLengthyPipelineRevisions() {
String lengthyDeploymentSpec =
"<deployment version='1.0'>\n" +
" <instance id='alpha'>\n" +
" <test />\n" +
" <staging />\n" +
" <upgrade revision-change='always' />\n" +
" <prod>\n" +
" <region>us-east-3</region>\n" +
" <test>us-east-3</test>\n" +
" </prod>\n" +
" </instance>\n" +
" <instance id='beta'>\n" +
" <upgrade revision-change='when-failing' />\n" +
" <prod>\n" +
" <region>us-east-3</region>\n" +
" <test>us-east-3</test>\n" +
" </prod>\n" +
" </instance>\n" +
" <instance id='gamma'>\n" +
" <upgrade revision-change='when-clear' revision-target='next' />\n" +
" <prod>\n" +
" <region>us-east-3</region>\n" +
" <test>us-east-3</test>\n" +
" </prod>\n" +
" </instance>\n" +
"</deployment>\n";
var appPackage = ApplicationPackageBuilder.fromDeploymentXml(lengthyDeploymentSpec);
var alpha = tester.newDeploymentContext("t", "a", "alpha");
var beta = tester.newDeploymentContext("t", "a", "beta");
var gamma = tester.newDeploymentContext("t", "a", "gamma");
alpha.submit(appPackage).deploy();
// revision2 is submitted, and rolls through alpha.
var revision1 = alpha.lastSubmission();
alpha.submit(appPackage);
var revision2 = alpha.lastSubmission();
alpha.runJob(systemTest).runJob(stagingTest)
.runJob(productionUsEast3).runJob(testUsEast3);
assertEquals(Optional.empty(), alpha.instance().change().application());
// revision3 is submitted when revision2 is half-way.
tester.outstandingChangeDeployer().run();
beta.runJob(productionUsEast3);
alpha.submit(appPackage);
var revision3 = alpha.lastSubmission();
beta.runJob(testUsEast3);
assertEquals(Optional.empty(), beta.instance().change().application());
// revision3 is the target for alpha, beta is done, version1 is the target for gamma.
tester.outstandingChangeDeployer().run();
assertEquals(revision3, alpha.instance().change().application());
assertEquals(Optional.empty(), beta.instance().change().application());
assertEquals(revision2, gamma.instance().change().application());
// revision3 rolls to beta, then a couple of new revisions are submitted to alpha, and the latter is the new target.
alpha.runJob(systemTest).runJob(stagingTest)
.runJob(productionUsEast3).runJob(testUsEast3);
tester.outstandingChangeDeployer().run();
assertEquals(Optional.empty(), alpha.instance().change().application());
assertEquals(revision3, beta.instance().change().application());
// revision5 supersedes revision4
alpha.submit(appPackage);
var revision4 = alpha.lastSubmission();
alpha.runJob(systemTest).runJob(stagingTest)
.runJob(productionUsEast3);
alpha.submit(appPackage);
var revision5 = alpha.lastSubmission();
alpha.runJob(systemTest).runJob(stagingTest)
.runJob(productionUsEast3).runJob(testUsEast3);
tester.outstandingChangeDeployer().run();
assertEquals(Optional.empty(), alpha.instance().change().application());
assertEquals(revision3, beta.instance().change().application());
// revision6 rolls through alpha, and becomes the next target for beta
alpha.submit(appPackage);
var revision6 = alpha.lastSubmission();
alpha.runJob(systemTest).runJob(stagingTest)
.runJob(productionUsEast3)
.runJob(testUsEast3);
beta.runJob(productionUsEast3).runJob(testUsEast3);
tester.outstandingChangeDeployer().run();
assertEquals(Optional.empty(), alpha.instance().change().application());
assertEquals(revision6, beta.instance().change().application());
// revision6 rolls through beta, but revision3 is the next target for gamma with "exclusive" revision upgrades
alpha.jobAborted(stagingTest).runJob(stagingTest);
beta.runJob(productionUsEast3).runJob(testUsEast3);
// revision 2 fails, but this does not bring on revision 3
gamma.failDeployment(productionUsEast3);
tester.outstandingChangeDeployer().run();
assertEquals(Optional.empty(), beta.instance().change().application());
assertEquals(revision2, gamma.instance().change().application());
// revision 2 completes
gamma.runJob(productionUsEast3)
.runJob(testUsEast3);
tester.outstandingChangeDeployer().run();
assertEquals(Optional.empty(), alpha.instance().change().application());
assertEquals(Optional.empty(), beta.instance().change().application());
assertEquals(revision3, gamma.instance().change().application());
// revision 6 is next, once 3 is done
// revision 3 completes
gamma.runJob(productionUsEast3)
.runJob(testUsEast3);
tester.outstandingChangeDeployer().run();
assertEquals(revision6, gamma.instance().change().application());
}
@Test
public void testVeryLengthyPipelineUpgrade() {
String lengthyDeploymentSpec =
"<deployment version='1.0'>\n" +
" <instance id='alpha'>\n" +
" <test />\n" +
" <staging />\n" +
" <upgrade rollout='simultaneous' />\n" +
" <prod>\n" +
" <region>us-east-3</region>\n" +
" <test>us-east-3</test>\n" +
" </prod>\n" +
" </instance>\n" +
" <instance id='beta'>\n" +
" <upgrade rollout='simultaneous' />\n" +
" <prod>\n" +
" <region>us-east-3</region>\n" +
" <test>us-east-3</test>\n" +
" </prod>\n" +
" </instance>\n" +
" <instance id='gamma'>\n" +
" <upgrade rollout='separate' />\n" +
" <prod>\n" +
" <region>us-east-3</region>\n" +
" <test>us-east-3</test>\n" +
" </prod>\n" +
" </instance>\n" +
"</deployment>\n";
var appPackage = ApplicationPackageBuilder.fromDeploymentXml(lengthyDeploymentSpec);
var alpha = tester.newDeploymentContext("t", "a", "alpha");
var beta = tester.newDeploymentContext("t", "a", "beta");
var gamma = tester.newDeploymentContext("t", "a", "gamma");
alpha.submit(appPackage).deploy();
// A version releases, but when the first upgrade has gotten through alpha, beta, and gamma, a newer version has high confidence.
var version0 = tester.controller().readSystemVersion();
var version1 = new Version("7.1");
var version2 = new Version("7.2");
tester.controllerTester().upgradeSystem(version1);
tester.upgrader().maintain();
alpha.runJob(systemTest).runJob(stagingTest)
.runJob(productionUsEast3).runJob(testUsEast3);
assertEquals(Change.empty(), alpha.instance().change());
tester.upgrader().maintain();
beta.runJob(productionUsEast3);
tester.controllerTester().upgradeSystem(version2);
beta.runJob(testUsEast3);
assertEquals(Change.empty(), beta.instance().change());
tester.upgrader().maintain();
assertEquals(Change.of(version2), alpha.instance().change());
assertEquals(Change.empty(), beta.instance().change());
assertEquals(Change.of(version1), gamma.instance().change());
}
@Test
public void testRevisionJoinsUpgradeWithLeadingRollout() {
var appPackage = new ApplicationPackageBuilder().region("us-central-1")
.region("us-east-3")
.region("us-west-1")
.upgradeRollout("leading")
.build();
var app = tester.newDeploymentContext().submit(appPackage).deploy();
// Platform rolls through first production zone.
var version0 = tester.controller().readSystemVersion();
var version1 = new Version("7.1");
tester.controllerTester().upgradeSystem(version1);
tester.upgrader().maintain();
app.runJob(systemTest).runJob(stagingTest).runJob(productionUsCentral1);
tester.clock().advance(Duration.ofMinutes(1));
// Revision starts rolling, and catches up.
var revision0 = app.lastSubmission();
app.submit(appPackage);
var revision1 = app.lastSubmission();
assertEquals(Change.of(version1).with(revision1.get()), app.instance().change());
app.runJob(systemTest).runJob(stagingTest).runJob(productionUsCentral1);
// Upgrade got here first, and has triggered, but is now obsolete.
app.triggerJobs();
assertEquals(new Versions(version1, revision0.get(), Optional.of(version0), revision0),
tester.jobs().last(app.instanceId(), productionUsEast3).get().versions());
assertEquals(RunStatus.running, tester.jobs().last(app.instanceId(), productionUsEast3).get().status());
// Once staging tests verify the joint upgrade, the job is replaced with that.
app.runJob(stagingTest);
app.triggerJobs();
app.jobAborted(productionUsEast3).runJob(productionUsEast3);
assertEquals(new Versions(version1, revision1.get(), Optional.of(version0), revision0),
tester.jobs().last(app.instanceId(), productionUsEast3).get().versions());
// Platform and revision now proceed together.
app.triggerJobs();
assertEquals(new Versions(version1, revision1.get(), Optional.of(version0), revision0),
tester.jobs().last(app.instanceId(), productionUsWest1).get().versions());
app.runJob(productionUsWest1);
assertEquals(Change.empty(), app.instance().change());
}
@Test
public void testRevisionPassesUpgradeWithSimultaneousRollout() {
var appPackage = new ApplicationPackageBuilder().region("us-central-1")
.region("us-east-3")
.region("us-west-1")
.upgradeRollout("simultaneous")
.build();
var app = tester.newDeploymentContext().submit(appPackage).deploy();
// Platform rolls through first production zone.
var version0 = tester.controller().readSystemVersion();
var version1 = new Version("7.1");
tester.controllerTester().upgradeSystem(version1);
tester.upgrader().maintain();
app.runJob(systemTest).runJob(stagingTest).runJob(productionUsCentral1);
tester.clock().advance(Duration.ofMinutes(1));
// Revision starts rolling, and catches up.
var revision0 = app.lastSubmission();
app.submit(appPackage);
var revision1 = app.lastSubmission();
assertEquals(Change.of(version1).with(revision1.get()), app.instance().change());
app.runJob(systemTest).runJob(stagingTest).runJob(productionUsCentral1);
// Upgrade got here first, and has triggered, but is now obsolete.
app.triggerJobs();
app.assertRunning(productionUsEast3);
assertEquals(new Versions(version1, revision0.get(), Optional.of(version0), revision0),
tester.jobs().last(app.instanceId(), productionUsEast3).get().versions());
assertEquals(RunStatus.running, tester.jobs().last(app.instanceId(), productionUsEast3).get().status());
// Once staging tests verify the joint upgrade, the job is replaced with that.
app.runJob(systemTest).runJob(stagingTest).runJob(stagingTest);
app.triggerJobs();
app.jobAborted(productionUsEast3).runJob(productionUsEast3);
assertEquals(new Versions(version1, revision1.get(), Optional.of(version0), revision0),
tester.jobs().last(app.instanceId(), productionUsEast3).get().versions());
// Revision now proceeds alone.
app.triggerJobs();
assertEquals(new Versions(version0, revision1.get(), Optional.of(version0), revision0),
tester.jobs().last(app.instanceId(), productionUsWest1).get().versions());
app.runJob(productionUsWest1);
// Upgrade follows.
app.triggerJobs();
assertEquals(new Versions(version1, revision1.get(), Optional.of(version0), revision1),
tester.jobs().last(app.instanceId(), productionUsWest1).get().versions());
app.runJob(productionUsWest1);
assertEquals(Change.empty(), app.instance().change());
}
@Test
public void mixedDirectAndPipelineJobsInProduction() {
ApplicationPackage cdPackage = new ApplicationPackageBuilder().region("cd-us-east-1")
.region("cd-aws-us-east-1a")
.build();
var zones = List.of(ZoneId.from("test.cd-us-west-1"),
ZoneId.from("staging.cd-us-west-1"),
ZoneId.from("prod.cd-us-east-1"),
ZoneId.from("prod.cd-aws-us-east-1a"));
tester.controllerTester()
.setZones(zones, SystemName.cd)
.setRoutingMethod(zones, RoutingMethod.sharedLayer4);
tester.controllerTester().upgradeSystem(Version.fromString("6.1"));
tester.controllerTester().computeVersionStatus();
var app = tester.newDeploymentContext();
app.runJob(productionCdUsEast1, cdPackage);
app.submit(cdPackage);
app.runJob(systemTest);
// Staging test requires unknown initial version, and is broken.
tester.controller().applications().deploymentTrigger().forceTrigger(app.instanceId(), productionCdUsEast1, "user", false, true, true);
app.runJob(productionCdUsEast1)
.abortJob(stagingTest) // Complete failing run.
.runJob(stagingTest) // Run staging-test for production zone with no prior deployment.
.runJob(productionCdAwsUsEast1a);
// Manually deploy to east again, then upgrade the system.
app.runJob(productionCdUsEast1, cdPackage);
var version = new Version("7.1");
tester.controllerTester().upgradeSystem(version);
tester.upgrader().maintain();
// System and staging tests both require unknown versions, and are broken.
tester.controller().applications().deploymentTrigger().forceTrigger(app.instanceId(), productionCdUsEast1, "user", false, true, true);
app.runJob(productionCdUsEast1)
.triggerJobs()
.jobAborted(systemTest)
.jobAborted(stagingTest)
.runJob(systemTest) // Run test for aws zone again.
.runJob(stagingTest) // Run test for aws zone again.
.runJob(productionCdAwsUsEast1a);
// Deploy manually again, then submit new package.
app.runJob(productionCdUsEast1, cdPackage);
app.submit(cdPackage);
app.triggerJobs().runJob(systemTest);
// Staging test requires unknown initial version, and is broken.
tester.controller().applications().deploymentTrigger().forceTrigger(app.instanceId(), productionCdUsEast1, "user", false, true, true);
app.runJob(productionCdUsEast1)
.jobAborted(stagingTest)
.runJob(stagingTest)
.runJob(productionCdAwsUsEast1a);
}
@Test
public void testsInSeparateInstance() {
String deploymentSpec =
"<deployment version='1.0' athenz-domain='domain' athenz-service='service'>\n" +
" <instance id='canary'>\n" +
" <upgrade policy='canary' />\n" +
" <test />\n" +
" <staging />\n" +
" </instance>\n" +
" <instance id='default'>\n" +
" <prod>\n" +
" <region>eu-west-1</region>\n" +
" <test>eu-west-1</test>\n" +
" </prod>\n" +
" </instance>\n" +
"</deployment>\n";
ApplicationPackage applicationPackage = ApplicationPackageBuilder.fromDeploymentXml(deploymentSpec);
var canary = tester.newDeploymentContext("t", "a", "canary").submit(applicationPackage);
var conservative = tester.newDeploymentContext("t", "a", "default");
canary.runJob(systemTest)
.runJob(stagingTest);
conservative.runJob(productionEuWest1)
.runJob(testEuWest1);
canary.submit(applicationPackage)
.runJob(systemTest)
.runJob(stagingTest);
tester.outstandingChangeDeployer().run();
conservative.runJob(productionEuWest1)
.runJob(testEuWest1);
tester.controllerTester().upgradeSystem(new Version("7.7.7"));
tester.upgrader().maintain();
canary.runJob(systemTest)
.runJob(stagingTest);
tester.upgrader().maintain();
conservative.runJob(productionEuWest1)
.runJob(testEuWest1);
}
@Test
public void testEagerTests() {
var app = tester.newDeploymentContext().submit().deploy();
// Start upgrade, then receive new submission.
Version version1 = new Version("7.8.9");
ApplicationVersion build1 = app.lastSubmission().get();
tester.controllerTester().upgradeSystem(version1);
tester.upgrader().maintain();
app.runJob(stagingTest);
app.submit();
ApplicationVersion build2 = app.lastSubmission().get();
assertNotEquals(build1, build2);
// App now free to start system tests eagerly, for new submission. These should run assuming upgrade succeeds.
tester.triggerJobs();
app.assertRunning(stagingTest);
assertEquals(version1,
app.instanceJobs().get(stagingTest).lastCompleted().get().versions().targetPlatform());
assertEquals(build1,
app.instanceJobs().get(stagingTest).lastCompleted().get().versions().targetApplication());
assertEquals(version1,
app.instanceJobs().get(stagingTest).lastTriggered().get().versions().sourcePlatform().get());
assertEquals(build1,
app.instanceJobs().get(stagingTest).lastTriggered().get().versions().sourceApplication().get());
assertEquals(version1,
app.instanceJobs().get(stagingTest).lastTriggered().get().versions().targetPlatform());
assertEquals(build2,
app.instanceJobs().get(stagingTest).lastTriggered().get().versions().targetApplication());
// App completes upgrade, and outstanding change is triggered. This should let relevant, running jobs finish.
app.runJob(systemTest)
.runJob(productionUsCentral1)
.runJob(productionUsEast3)
.runJob(productionUsWest1);
tester.outstandingChangeDeployer().run();
assertEquals(RunStatus.running, tester.jobs().last(app.instanceId(), stagingTest).get().status());
app.runJob(stagingTest);
tester.triggerJobs();
app.assertNotRunning(stagingTest);
}
@Test
public void testTriggeringOfIdleTestJobsWhenFirstDeploymentIsOnNewerVersionThanChange() {
ApplicationPackage applicationPackage = new ApplicationPackageBuilder().systemTest()
.stagingTest()
.region("us-east-3")
.region("us-west-1")
.build();
var app = tester.newDeploymentContext().submit(applicationPackage).deploy();
var appToAvoidVersionGC = tester.newDeploymentContext("g", "c", "default").submit().deploy();
Version version2 = new Version("7.8.9");
Version version3 = new Version("8.9.10");
tester.controllerTester().upgradeSystem(version2);
tester.deploymentTrigger().triggerChange(appToAvoidVersionGC.instanceId(), Change.of(version2));
appToAvoidVersionGC.deployPlatform(version2);
// app upgrades first zone to version3, and then the other two to version2.
tester.controllerTester().upgradeSystem(version3);
tester.deploymentTrigger().triggerChange(app.instanceId(), Change.of(version3));
app.runJob(systemTest).runJob(stagingTest);
tester.triggerJobs();
tester.upgrader().overrideConfidence(version3, VespaVersion.Confidence.broken);
tester.controllerTester().computeVersionStatus();
tester.upgrader().run();
assertEquals(Optional.of(version2), app.instance().change().platform());
app.runJob(systemTest)
.runJob(productionUsEast3)
.runJob(stagingTest)
.runJob(productionUsWest1);
assertEquals(version3, app.instanceJobs().get(productionUsEast3).lastSuccess().get().versions().targetPlatform());
assertEquals(version2, app.instanceJobs().get(productionUsWest1).lastSuccess().get().versions().targetPlatform());
assertEquals(Map.of(), app.deploymentStatus().jobsToRun());
assertEquals(Change.empty(), app.instance().change());
assertEquals(List.of(), tester.jobs().active());
}
@Test
public void testRetriggerQueue() {
var app = tester.newDeploymentContext().submit().deploy();
app.submit();
tester.triggerJobs();
tester.deploymentTrigger().reTrigger(app.instanceId(), productionUsEast3, null);
tester.deploymentTrigger().reTriggerOrAddToQueue(app.deploymentIdIn(ZoneId.from("prod", "us-east-3")), null);
tester.deploymentTrigger().reTriggerOrAddToQueue(app.deploymentIdIn(ZoneId.from("prod", "us-east-3")), null);
List<RetriggerEntry> retriggerEntries = tester.controller().curator().readRetriggerEntries();
Assert.assertEquals(1, retriggerEntries.size());
}
@Test
public void testOrchestrationWithIncompatibleVersionPairs() {
Version version1 = new Version("7");
Version version2 = new Version("8");
tester.controllerTester().flagSource().withListFlag(PermanentFlags.INCOMPATIBLE_VERSIONS.id(), List.of("8"), String.class);
tester.controllerTester().upgradeSystem(version1);
DeploymentContext app = tester.newDeploymentContext()
.submit(new ApplicationPackageBuilder().region("us-east-3")
.compileVersion(version1)
.build())
.deploy();
tester.controllerTester().upgradeSystem(version2);
tester.upgrader().run();
assertEquals(Change.empty(), app.instance().change());
app.submit(new ApplicationPackageBuilder().region("us-east-3")
.compileVersion(version2)
.build());
app.deploy();
assertEquals(version2, tester.jobs().last(app.instanceId(), productionUsEast3).get().versions().targetPlatform());
assertEquals(version2, tester.jobs().last(app.instanceId(), productionUsEast3).get().versions().targetApplication().compileVersion().get());
}
} |
package org.jboss.as.controller.transform.description;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.FAILED;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.FAILURE_DESCRIPTION;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OUTCOME;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.SUCCESS;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.jboss.as.controller.OperationFailedException;
import org.jboss.as.controller.PathAddress;
import org.jboss.as.controller.descriptions.ModelDescriptionConstants;
import org.jboss.as.controller.registry.Resource;
import org.jboss.as.controller.transform.OperationRejectionPolicy;
import org.jboss.as.controller.transform.OperationResultTransformer;
import org.jboss.as.controller.transform.OperationTransformer;
import org.jboss.dmr.ModelNode;
/**
* @author Emanuel Muckenhuber
*/
class AttributeTransformationRule extends TransformationRule {
private final Map<String, AttributeTransformationDescription> descriptions;
AttributeTransformationRule(Map<String, AttributeTransformationDescription> descriptions) {
this.descriptions = descriptions;
}
@Override
void transformOperation(final ModelNode operation, PathAddress address, ChainedOperationContext context) throws OperationFailedException {
final ModelNode transformed = operation.clone();
final RejectedAttributesLogContext rejectedAttributes = new RejectedAttributesLogContext(context, address, operation);
doTransform(address, transformed, operation, context, rejectedAttributes);
final OperationRejectionPolicy policy;
final OperationResultTransformer resultTransformer;
if (!rejectedAttributes.hasRejections()) {
policy = OperationTransformer.DEFAULT_REJECTION_POLICY;
resultTransformer = OperationResultTransformer.ORIGINAL_RESULT;
} else {
policy = new OperationRejectionPolicy() {
@Override
public boolean rejectOperation(ModelNode preparedResult) {
return true;
}
@Override
public String getFailureDescription() {
return rejectedAttributes.getOperationRejectDescription();
}
};
resultTransformer = new OperationResultTransformer() {
@Override
public ModelNode transformResult(ModelNode result) {
ModelNode res = result;
if (!result.hasDefined(OUTCOME) || SUCCESS.equals(result.get(OUTCOME).asString())) {
res = result.clone();
res.get(OUTCOME).set(FAILED);
res.get(FAILURE_DESCRIPTION).set(policy.getFailureDescription());
}
return res;
}
};
}
context.invokeNext(new OperationTransformer.TransformedOperation(transformed, policy, resultTransformer));
}
@Override
void transformResource(final Resource resource, final PathAddress address, final ChainedResourceContext context) throws OperationFailedException {
final ModelNode model = resource.getModel();
RejectedAttributesLogContext rejectedAttributes = new RejectedAttributesLogContext(context, address, null);
doTransform(address, model, null, context, rejectedAttributes);
if (rejectedAttributes.hasRejections()) {
rejectedAttributes.errorOrWarnOnResourceTransformation();
}
context.invokeNext(resource);
}
private void doTransform(PathAddress address, ModelNode modelOrOp, ModelNode operation, AbstractChainedContext context, RejectedAttributesLogContext rejectedAttributes) {
Map<String, String> renames = new HashMap<String, String>();
Map<String, ModelNode> adds = new HashMap<String, ModelNode>();
Set<String> newAttributes = new HashSet<String>();
Set<String> discardedAttributes = new HashSet<String>();
//Make sure that context.readResourceXXX() returns an unmodifiable Resource
context.setImmutableResource(true);
try {
//Initial setup and discard
for(final Map.Entry<String, AttributeTransformationDescription> entry : descriptions.entrySet()) {
final String attributeName = entry.getKey();
final boolean isNewAttribute = !modelOrOp.has(attributeName);
final ModelNode attributeValue = modelOrOp.get(attributeName);
if (isNewAttribute) {
newAttributes.add(attributeName);
}
AttributeTransformationDescription description = entry.getValue();
//discard what can be discarded
if (description.shouldDiscard(address, TransformationRule.cloneAndProtect(attributeValue), operation, context)) {
modelOrOp.remove(attributeName);
discardedAttributes.add(attributeName);
}
String newName = description.getNewName();
if (newName != null) {
renames.put(attributeName, newName);
}
}
//Check rejections (unless it is a remove operation, in which case we just remove)
if (operation == null || (!operation.get(ModelDescriptionConstants.OP).asString().equals(ModelDescriptionConstants.REMOVE) && !
operation.get(ModelDescriptionConstants.OP).asString().equals(ModelDescriptionConstants.READ_ATTRIBUTE_OPERATION))) {
for(final Map.Entry<String, AttributeTransformationDescription> entry : descriptions.entrySet()) {
final String attributeName = entry.getKey();
if (!discardedAttributes.contains(attributeName)) {
final ModelNode attributeValue = modelOrOp.get(attributeName);
AttributeTransformationDescription description = entry.getValue();
//Check the rest of the model can be transformed
description.rejectAttributes(rejectedAttributes, TransformationRule.cloneAndProtect(attributeValue));
}
}
}
//Do conversions
for(final Map.Entry<String, AttributeTransformationDescription> entry : descriptions.entrySet()) {
final String attributeName = entry.getKey();
if (!discardedAttributes.contains(attributeName)) {
final ModelNode attributeValue = modelOrOp.get(attributeName);
AttributeTransformationDescription description = entry.getValue();
description.convertValue(address, attributeValue, operation, context);
if (!attributeValue.isDefined()) {
modelOrOp.remove(attributeName);
} else if (newAttributes.contains(attributeName)) {
adds.put(attributeName, attributeValue);
}
}
}
} finally {
context.setImmutableResource(false);
}
if (renames.size() > 0) {
for (Map.Entry<String, String> entry : renames.entrySet()) {
if (modelOrOp.has(entry.getKey())) {
ModelNode model = modelOrOp.remove(entry.getKey());
if (model.isDefined()) {
modelOrOp.get(entry.getValue()).set(model);
}
}
}
}
if (adds.size() > 0) {
for (Map.Entry<String, ModelNode> entry : adds.entrySet()) {
modelOrOp.get(entry.getKey()).set(entry.getValue());
}
}
}
} |
package org.ihtsdo.buildcloud.service.identifier.client;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.http.HttpStatus;
import org.ihtsdo.otf.rest.client.RestClientException;
import org.ihtsdo.otf.rest.client.resty.RestyHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import us.monoid.json.JSONArray;
import us.monoid.json.JSONException;
import us.monoid.json.JSONObject;
import us.monoid.web.JSONResource;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
public class IdServiceRestClientImpl implements IdServiceRestClient {
private static final String TOKEN = "token";
private static final String MESSAGE = "message";
private static final String STATUS = "status";
private static final String SCHEME_ID = "schemeId";
private static final String SCHEME_IDS = "schemeIds";
private static final String SCTIDS = "sctids";
private static final String SRS = "srs";
private static final String SYSTEM_IDS = "systemIds";
private static final String QUANTITY = "quantity";
private static final String COMMENT = "comment";
private static final String GENERATE_LEGACY_IDS = "generateLegacyIds";
private static final String SOFTWARE = "software";
private static final String PARTITION_ID = "partitionId";
private static final String NAMESPACE = "namespace";
private static final String SYSTEM_ID = "systemId";
private static final String SCTID = "sctid";
private static final String APPLICATION_JSON = "application/json";
public static final String ANY_CONTENT_TYPE = "*/*";
private String idServiceUrl;
private RestyHelper resty;
private IdServiceRestUrlHelper urlHelper;
private Gson gson;
private static String token;
private static final Object LOCK = new Object();
private static final Logger LOGGER = LoggerFactory.getLogger(IdServiceRestClientImpl.class);
private static AtomicInteger currentSessions = new AtomicInteger();
private int timeOutInSeconds = 300;
private int maxTries;
private int retryDelaySeconds;
private String userName;
private String password;
public IdServiceRestClientImpl(String idServiceUrl, String username, String password) {
this.idServiceUrl = idServiceUrl;
urlHelper = new IdServiceRestUrlHelper(idServiceUrl);
this.resty = new RestyHelper(ANY_CONTENT_TYPE);
gson = new GsonBuilder().setPrettyPrinting().create();
this.userName = username;
this.password = password;
}
private boolean isServiceRunning() {
JSONResource response;
try {
response = resty.json(urlHelper.getTestServiceUrl());
if (response != null && HttpStatus.SC_OK == response.getHTTPStatus()) {
return true;
}
} catch (IOException e) {
LOGGER.error("Error when testing service", e);
}
return false;
}
@Override
public String logIn() throws RestClientException {
synchronized (LOCK) {
if ( token != null) {
LOGGER.info("ID service rest client is already logged in with token:" + token);
}
//validate token
if ( !isTokenValid(token) ) {
//get a new token;
token = accquireToken();
}
currentSessions.getAndIncrement();
}
return token;
}
private String accquireToken() throws RestClientException {
String securityToken = null;
if (!isServiceRunning()) {
throw new RestClientException("Id service is not currently running at URL:" + idServiceUrl);
}
LOGGER.info("Id service rest client logs in to get a new security token." );
try {
JSONObject jsonObject = new JSONObject();
jsonObject.put("username", this.userName);
jsonObject.put("password", this.password);
securityToken = (String) resty.json(urlHelper.getLoginUrl(), RestyHelper.content(jsonObject)).get(TOKEN);
LOGGER.info("Security token is acquired successfully:" + securityToken );
} catch (Exception e) {
throw new RestClientException("Failed to login for user name:" + this.userName, e);
}
return securityToken;
}
private boolean isTokenValid(String token) {
if (token == null) {
return false;
}
boolean isValid = false;
try {
JSONObject jsonObject = new JSONObject();
jsonObject.put(TOKEN, token);
JSONResource response = resty.json(urlHelper.getTokenAuthenticationUrl(), RestyHelper.content(jsonObject,APPLICATION_JSON));
if (response != null) {
if (HttpStatus.SC_OK == (response.getHTTPStatus())) {
isValid = true;
} else {
LOGGER.info("Inavlid token with failure reason from id server:" + response.get(MESSAGE));
}
}
} catch (Exception e) {
LOGGER.error("Failed to valid token:" + token, e);
}
return isValid;
}
@Override
public Map<Long,String> getStatusForSctIds(Collection<Long> sctIds) throws RestClientException {
Map<Long,String> result = new HashMap<>();
if (sctIds == null || sctIds.isEmpty()) {
return result;
}
StringBuilder scdStrList = new StringBuilder();
boolean isFirst = true;
for (Long id : sctIds) {
if (!isFirst) {
scdStrList.append(",");
}
if (isFirst) {
isFirst = false;
}
scdStrList.append(id.toString());
}
int attempt = 1;
boolean isDone = false;
while (!isDone) {
try {
JSONObject requestData = new JSONObject();
requestData.put(SCTIDS, scdStrList.toString());
JSONResource response = resty.json(urlHelper.getSctIdBulkUrl(token),RestyHelper.content(requestData, APPLICATION_JSON));
if ( HttpStatus.SC_OK == (response.getHTTPStatus()) ){
JSONArray items = response.array();
for (int i =0; i < items.length();i++) {
result.put(new Long((String)items.getJSONObject(i).get(SCTID)), (String)items.getJSONObject(i).get(STATUS));
}
} else {
throw new RestClientException(getFailureMessage(response));
}
isDone = true;
} catch (Exception e) {
if (attempt < maxTries) {
LOGGER.warn("Id service failed on attempt {}. Waiting {} seconds before retrying.", attempt, retryDelaySeconds, e);
attempt++;
try {
Thread.sleep(retryDelaySeconds * 1000);
} catch (InterruptedException ie) {
LOGGER.warn("Retry dealy interrupted.",e);
}
} else {
throw new RestClientException("Failed to get sctIds for batch size:" + sctIds.size(), e);
}
}
}
return result;
}
@Override
public Long getOrCreateSctId(UUID componentUuid, Integer namespaceId, String partitionId, String comment) throws RestClientException {
Long result = null;
int attempt = 1;
while (result == null) {
try {
JSONObject requestData = new JSONObject();
requestData.put(NAMESPACE, namespaceId.intValue());
requestData.put(PARTITION_ID, partitionId);
requestData.put(SYSTEM_ID, componentUuid.toString());
requestData.put(SOFTWARE, SRS);
requestData.put(GENERATE_LEGACY_IDS, "false");
requestData.put(COMMENT, comment);
JSONResource response = resty.json(urlHelper.getSctIdGenerateUrl(token), RestyHelper.content((requestData),APPLICATION_JSON));
if ( response != null && HttpStatus.SC_OK == (response.getHTTPStatus()) ){
result = new Long((String)response.get(SCTID));
} else {
throw new RestClientException(getFailureMessage(response));
}
} catch (Exception e) {
if (attempt < maxTries) {
LOGGER.warn("Id service failed on attempt {}. Waiting {} seconds before retrying.", attempt, retryDelaySeconds, e);
attempt++;
try {
Thread.sleep(retryDelaySeconds * 1000);
} catch (InterruptedException ie) {
LOGGER.warn("Retry dealy interrupted.",e);
}
} else {
throw new RestClientException("Failed to create sctId for uuid:" + componentUuid.toString(), e);
}
}
}
return result;
}
@Override
public HashMap<UUID,Long> getOrCreateSctIds(List<UUID> uuids,Integer namespaceId,String partitionId, String comment) throws RestClientException {
LOGGER.debug("Start creating sctIds with batch size {} for namespace {} and partitionId {}", uuids.size(), namespaceId, partitionId);
HashMap<UUID, Long> result = new HashMap<>();
if (uuids == null || uuids.isEmpty()) {
LOGGER.warn("Empty UUIDs submitted for requesting sctIds");
return result;
}
long startTime = new Date().getTime();
List<String> uuidStrings = new ArrayList<>();
for (UUID uuid : uuids) {
uuidStrings.add(uuid.toString());
}
try {
JSONObject requestData = new JSONObject();
requestData.put(NAMESPACE, namespaceId.intValue());
requestData.put(PARTITION_ID, partitionId);
requestData.put(QUANTITY,uuids.size());
requestData.put(SYSTEM_IDS, uuidStrings.toArray());
requestData.put(SOFTWARE, SRS);
requestData.put(GENERATE_LEGACY_IDS, "false");
requestData.put(COMMENT, comment);
JSONResource response = resty.json(urlHelper.getSctIdBulkGenerateUrl(token), RestyHelper.content((requestData),APPLICATION_JSON));
if ( HttpStatus.SC_OK == response.getHTTPStatus()) {
String jobId = response.get("id").toString();
LOGGER.info("Bulk job id:" + jobId + " with batch size:" + uuids.size());
if (BULK_JOB_STATUS.COMPLETED_WITH_SUCCESS.getCode() == waitForCompleteStatus(jobId, getTimeOutInSeconds())) {
JSONArray items = resty.json(urlHelper.getBulkJobResultUrl(jobId, token)).array();
for (int i =0;i < items.length();i++) {
result.put(UUID.fromString((String)items.getJSONObject(i).get(SYSTEM_ID)), new Long((String)items.getJSONObject(i).get(SCTID)));
}
}
} else {
String statusMsg = getFailureMessage(response);
LOGGER.error(statusMsg);
throw new RestClientException(statusMsg);
}
} catch (Exception e) {
String message = "Bulk getOrCreateSctIds job failed.";
LOGGER.error(message, e);
throw new RestClientException(message,e);
}
LOGGER.debug("End creating sctIds with batch size {} for namespace {} and partitionId {}", uuids.size(), namespaceId, partitionId);
LOGGER.info("Time taken in seconds:" + (new Date().getTime() - startTime) /1000);
return result;
}
@Override
public Map<UUID, String> getOrCreateSchemeIds(List<UUID> uuids, SchemeIdType schemeType, String comment) throws RestClientException {
LOGGER.debug("Start creating scheme id {} with batch size {} ", schemeType, uuids.size());
HashMap<UUID, String> result = new HashMap<>();
if (uuids == null || uuids.isEmpty()) {
LOGGER.warn("Empty UUIDs submitted for requesting schemeIdType:" + schemeType);
return result;
}
long startTime = new Date().getTime();
List<String> uuidStrings = new ArrayList<>();
for (UUID uuid : uuids) {
uuidStrings.add(uuid.toString());
}
try {
JSONObject requestData = new JSONObject();
requestData.put(QUANTITY,uuids.size());
requestData.put(SYSTEM_IDS, uuidStrings.toArray());
requestData.put(SOFTWARE, SRS);
requestData.put(COMMENT, comment);
JSONResource response = resty.json(urlHelper.getSchemeIdBulkGenerateUrl(token, schemeType), RestyHelper.content((requestData),APPLICATION_JSON));
if ( HttpStatus.SC_OK == response.getHTTPStatus()) {
String jobId = response.get("id").toString();
LOGGER.info("Scheme ids bulk job id:" + jobId + " with batch size:" + uuids.size());
if (BULK_JOB_STATUS.COMPLETED_WITH_SUCCESS.getCode() == waitForCompleteStatus(jobId, getTimeOutInSeconds())) {
JSONArray items = resty.json(urlHelper.getBulkJobResultUrl(jobId, token)).array();
for (int i =0;i < items.length();i++) {
result.put(UUID.fromString((String)items.getJSONObject(i).get(SYSTEM_ID)), (String)items.getJSONObject(i).get(SCHEME_ID));
}
}
} else {
throw new RestClientException(getFailureMessage(response));
}
} catch (Exception e) {
String message = "Bulk job getOrCreateSchemeIds failed for schemetype:" + schemeType;
LOGGER.error(message, e);
throw new RestClientException(message, e);
}
LOGGER.debug("End creating scheme id {} with batch size {} ", schemeType, uuids.size());
LOGGER.info("Time taken in seconds:" + (new Date().getTime() - startTime) /1000);
return result;
}
private String getFailureMessage(JSONResource response) throws Exception {
return "Received Http status from id service:" + response.getHTTPStatus() + " message:" + response.get(MESSAGE);
}
private int waitForCompleteStatus(String jobId, int timeoutInSeconds)
throws RestClientException, InterruptedException {
String url = urlHelper.getBulkJobStatusUrl(token, jobId);
long startTime = new Date().getTime();
int status = 0;
boolean isCompleted = false;
String logMsg = null;
while (!isCompleted) {
try {
JSONResource response = resty.json(url);
Object statusObj = response.get(STATUS);
status = Integer.parseInt(statusObj.toString()) ;
Object log = response.get("log");
if (log != null) {
logMsg = log.toString();
}
} catch (Exception e) {
String msg = "Rest client error while checking bulk job status:" + url;
LOGGER.error(msg, e);
throw new RestClientException(msg, e);
}
isCompleted = (BULK_JOB_STATUS.PENDING.getCode() != status && BULK_JOB_STATUS.RUNNING.getCode() != status);
if (!isCompleted && ((new Date().getTime() - startTime) > timeoutInSeconds *1000)) {
String message = "Client timeout after waiting " + timeoutInSeconds + " seconds for bulk job to complete:" + url;
LOGGER.warn(message);
throw new RestClientException(message);
}
if (!isCompleted) {
Thread.sleep(1000 * 10);
}
}
if (BULK_JOB_STATUS.COMPLETED_WITH_SUCCESS.getCode() != status) {
LOGGER.error("Bulk job id {} finsihed with non successful status {} failureReason: {}", jobId, status, logMsg);
throw new RestClientException("Bulk job :" + jobId + " did not complete successfully with status code:" + status);
}
return status;
}
public int getMaxTries() {
return maxTries;
}
public void setMaxTries(int maxTries) {
this.maxTries = maxTries;
}
public int getRetryDelaySeconds() {
return retryDelaySeconds;
}
public void setRetryDelaySeconds(int retryDelaySeconds) {
this.retryDelaySeconds = retryDelaySeconds;
}
@Override
public void logOut() throws RestClientException {
currentSessions.getAndDecrement();
synchronized (LOCK) {
if (token != null) {
LOGGER.info("Total current sessions:" + currentSessions.get());
if (currentSessions.get() == 0) {
try {
JSONObject jsonObject = new JSONObject();
jsonObject.put(TOKEN, this.token);
resty.json(urlHelper.getLogoutUrl(), RestyHelper.content((jsonObject)));
LOGGER.info("Id service rest client logs out successfully with token:" + this.token );
token = null;
} catch (Exception e) {
throw new RestClientException("Failed to login out " + this.userName, e);
}
}
}
}
}
public int getTimeOutInSeconds() {
return timeOutInSeconds;
}
public void setTimeOutInSeconds(int timeOutInSeconds) {
this.timeOutInSeconds = timeOutInSeconds;
}
@Override
public boolean publishSctIds(List<Long> sctIds, Integer namespaceId, String comment) throws RestClientException {
LOGGER.debug("Start publishing sctIds with batch size {} for namespace {}", sctIds.size(), namespaceId);
if (sctIds == null || sctIds.isEmpty()) {
return true;
}
boolean isPublished = false;
long startTime = new Date().getTime();
List<String> sctIdStringList = new ArrayList<>();
for (Long sctId : sctIds) {
sctIdStringList.add(String.valueOf(sctId));
}
try {
JSONObject requestData = new JSONObject();
requestData.put(SCTIDS, sctIdStringList.toArray());
requestData.put(NAMESPACE, namespaceId.intValue());
requestData.put(SOFTWARE, SRS);
requestData.put(COMMENT, comment);
JSONResource response = resty.put(urlHelper.getSctIdBulkPublishingUrl(token), requestData, APPLICATION_JSON);
if ( HttpStatus.SC_OK == response.getHTTPStatus()) {
String jobId = response.get("id").toString();
LOGGER.info("Bulk job id:" + jobId + " for publishing sctIds with batch size:" + sctIds.size());
if (BULK_JOB_STATUS.COMPLETED_WITH_SUCCESS.getCode() == waitForCompleteStatus(jobId, getTimeOutInSeconds())) {
isPublished = true;
}
} else {
String statusMsg = "Received http status code from id service:" + response.getHTTPStatus();
LOGGER.error(statusMsg);
throw new RestClientException(statusMsg);
}
} catch (Exception e) {
String message = "Bulk publishSctIds job failed.";
LOGGER.error(message, e);
throw new RestClientException(message, e);
}
LOGGER.debug("End publishing sctIds with batch size {} for namespace {}", sctIds.size(), namespaceId);
LOGGER.info("Time taken in seconds:" + (new Date().getTime() - startTime) /1000);
return isPublished;
}
@Override
public boolean publishSchemeIds(List<String> schemeIds, SchemeIdType schemeType, String comment) throws RestClientException {
LOGGER.debug("Start publishing sctIds with batch size {} for namespace {}", schemeIds.size());
if (schemeIds == null || schemeIds.isEmpty()) {
return true;
}
boolean isPublished = false;
long startTime = new Date().getTime();
try {
JSONObject requestData = new JSONObject();
requestData.put(SCHEME_IDS, schemeIds.toArray());
requestData.put(SOFTWARE, SRS);
requestData.put(COMMENT, comment);
JSONResource response = resty.put(urlHelper.getSchemeIdBulkPublishingUrl(schemeType,token), requestData, APPLICATION_JSON);
if ( HttpStatus.SC_OK == response.getHTTPStatus()) {
String jobId = response.get("id").toString();
LOGGER.info("Bulk job id:" + jobId + " for publishing sctIds with batch size:" + schemeIds.size());
if (BULK_JOB_STATUS.COMPLETED_WITH_SUCCESS.getCode() == waitForCompleteStatus(jobId, getTimeOutInSeconds())){
isPublished = true;
}
} else {
String statusMsg = "Received http status code from id service:" + response.getHTTPStatus() + " message:" + response.get(MESSAGE);
LOGGER.error(statusMsg);
throw new RestClientException(statusMsg);
}
} catch (Exception e) {
String message = "Bulk publishSctIds job failed.";
LOGGER.error(message, e);
throw new RestClientException(message, e);
}
LOGGER.debug("End publishing sctIds with batch size {}", schemeIds.size());
LOGGER.info("Time taken in seconds:" + (new Date().getTime() - startTime) /1000);
return isPublished;
}
@Override
public Map<String, String> getStatusForSchemeIds(SchemeIdType schemeType, Collection<String> legacyIds) throws RestClientException {
Map<String,String> result = new HashMap<>();
if (legacyIds == null || legacyIds.isEmpty()) {
return result;
}
int attempt = 1;
boolean isDone = false;
while (!isDone) {
JSONResource response = null;
try {
response = resty.json(urlHelper.getSchemeIdBulkUrl(token, schemeType, legacyIds));
if ( HttpStatus.SC_OK == (response.getHTTPStatus()) ){
JSONArray items = response.array();
for (int i =0;i < items.length();i++) {
result.put((String)items.getJSONObject(i).get(SCHEME_ID), (String)items.getJSONObject(i).get(STATUS));
}
} else {
throw new RestClientException("http status code is:" + response.getHTTPStatus() + " message:" + response.get(MESSAGE));
}
isDone = true;
} catch (Exception e) {
if (attempt < maxTries) {
LOGGER.warn("Id service failed on attempt {}. Waiting {} seconds before retrying.", attempt, retryDelaySeconds, e);
attempt++;
try {
Thread.sleep(retryDelaySeconds * 1000);
} catch (InterruptedException ie) {
LOGGER.warn("Retry delay interrupted.",e);
}
} else {
throw new RestClientException("Failed to get scheme Ids for batch size:" + legacyIds.size(), e);
}
}
}
return result;
}
public Map<Long,JSONObject> getSctIdRecords(Collection<Long> sctIds) throws RestClientException {
Map<Long,JSONObject> result = new HashMap<>();
if (sctIds == null || sctIds.isEmpty()) {
return result;
}
int attempt = 1;
boolean isDone = false;
while (!isDone) {
JSONResource response = null;
try {
response = resty.json(urlHelper.getSctIdBulkUrl(token, sctIds));
if ( HttpStatus.SC_OK == (response.getHTTPStatus()) ){
JSONArray items = response.array();
for (int i =0;i < items.length();i++) {
result.put(new Long((String)items.getJSONObject(i).get(SCTID)), items.getJSONObject(i));
}
} else {
throw new RestClientException("http status code is:" + response.getHTTPStatus());
}
isDone = true;
} catch (Exception e) {
if (attempt < maxTries) {
LOGGER.warn("Id service failed on attempt {}. Waiting {} seconds before retrying.", attempt, retryDelaySeconds, e);
attempt++;
try {
Thread.sleep(retryDelaySeconds * 1000);
} catch (InterruptedException ie) {
LOGGER.warn("Retry delay interrupted.",e);
}
} else {
throw new RestClientException("Failed to get sctIds for batch size:" + sctIds.size(), e);
}
}
}
return result;
}
public Map<String, JSONObject> getSchemeIds(SchemeIdType schemeType, Collection<String> legacyIds) throws RestClientException {
Map<String,JSONObject> result = new HashMap<>();
if (legacyIds == null || legacyIds.isEmpty()) {
return result;
}
int attempt = 1;
boolean isDone = false;
while (!isDone) {
JSONResource response = null;
try {
response = resty.json(urlHelper.getSchemeIdBulkUrl(token, schemeType, legacyIds));
if ( HttpStatus.SC_OK == (response.getHTTPStatus()) ){
JSONArray items = response.array();
for (int i =0;i < items.length();i++) {
result.put((String)items.getJSONObject(i).get(SCHEME_ID), items.getJSONObject(i));
}
} else {
throw new RestClientException("http status code is:" + response.getHTTPStatus());
}
isDone = true;
} catch (Exception e) {
if (attempt < maxTries) {
LOGGER.warn("Id service failed on attempt {}. Waiting {} seconds before retrying.", attempt, retryDelaySeconds, e);
attempt++;
try {
Thread.sleep(retryDelaySeconds * 1000);
} catch (InterruptedException ie) {
LOGGER.warn("Retry delay interrupted.",e);
}
} else {
throw new RestClientException("Failed to get sctIds for batch size:" + legacyIds.size(), e);
}
}
}
return result;
}
@Override
public Map<Long, UUID> getUuidsForSctIds(Collection<Long> sctIds) throws RestClientException {
Map<Long,JSONObject> sctIdRecords = getSctIdRecords(sctIds);
Map<Long, UUID> sctIdUuidMap = new HashMap<>();
for (Long id : sctIdRecords.keySet()) {
try {
sctIdUuidMap.put(id, UUID.fromString((String)sctIdRecords.get(id).get(SYSTEM_ID)));
} catch (JSONException e) {
throw new RestClientException("Error when fetching system id for sctId:" + id, e);
}
}
return sctIdUuidMap;
}
} |
package algorithms.imageProcessing.transform;
import algorithms.matrix.MatrixUtil;
import algorithms.util.FormatArray;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import no.uib.cipr.matrix.NotConvergedException;
/**
* utility methods for camera intrinsic and extrinsic matrices.
*
* KIntrinsic is the camera intrinsic parameters
* KExtrinsic is the camera extrinsic parameters, specifically, the camera
* position and orientation in world coordinates.
*
* to project the world coordinate system point into the camera frame
* canonical form,
* multiply it by the extrinsic matrix (which will translate the
* coordinates and rotate them).
*
* to project the camera frame coordinate system into the image frame,
* multiply it by the intrinsic matrix.
*
* TODO: consider implementing tangential distortion.
* see Heikkila, Janne, and Olli Silven 1997. "A four-step camera calibration
* procedure with implicit image correction."
* Computer Vision and Pattern Recognition, 1997.
* Proceedings., 1997 IEEE Computer Society Conference on. IEEE, 1997.
* @author nichole
*/
public class Camera {
public static double eps = 1e-7;
/**
* create camera intrinsic matrix k with assumptions of square pixels
* and no skew. the focal length and optical centers should be in units of pixels.
* NOTE that given the field of view (FOV) and the image dimensions,
* one can roughly estimate the focal length as (image width/2) / tan(FOV/2).
* @param focalLength focal length of camera in units of pixels. NOTE that
* the sign of the focal length remains the same as is given, so if you
* want to use a right-hand coordinate system, you should give a negative
* focal length.
* @param centerX x coordinate of principal point in pixels, usually image center.
* @param centerY y coordinate of principal point in pixels, usually image center.
* @return intrinsic camera matrix in units of pixels.
*/
public static double[][] createIntrinsicCameraMatrix(double focalLength,
double centerX, double centerY) {
double[][] k = new double[3][3];
k[0] = new double[]{focalLength, 0, centerX};
k[1] = new double[]{0, focalLength, centerY};
k[2]= new double[]{0, 0, 1};
return k;
}
/**
* create camera intrinsic matrix k. the focal length and optical centers should be in units of pixels.
* NOTE that given the field of view (FOV) and the image dimensions,
* one can roughly estimate the focal length as (image width/2) / tan(FOV/2).
* @param focalLengthX focal length of camera in units of pixels along x axis.
* NOTE that
* the sign of the focal length remains the same as is given, so if you
* want to use a right-hand coordinate system, you should give a negative
* focal length.
* @param focalLengthY focal length of camera in units of pixels along y axis.
* NOTE that
* the sign of the focal length remains the same as is given, so if you
* want to use a right-hand coordinate system, you should give a negative
* focal length.
* @param centerX x coordinate of principal point in pixels, usually image center.
* @param centerY y coordinate of principal point in pixels, usually image center.
* @param skew camera skew
* @return intrinsic camera matrix in units of pixels.
*/
public static double[][] createIntrinsicCameraMatrix(double focalLengthX,
double focalLengthY, double centerX, double centerY, double skew) {
double[][] k = new double[3][3];
k[0] = new double[]{focalLengthX, skew, centerX};
k[1] = new double[]{0, focalLengthY, centerY};
k[2]= new double[]{0, 0, 1};
return k;
}
/**
*create the inverse of camera intrinsic matrix k with assumptions of square pixels
* and no skew. the focal length and optical centers should be in units of pixels.
* NOTE that given the field of view (FOV) and the image dimensions,
* one can roughly estimate the focal length as (image width/2) / tan(FOV/2).
* @param focalLength focal length of camera in units of pixels.
* NOTE that
* the sign of the focal length remains the same as is given, so if you
* want to use a right-hand coordinate system, you should give a negative
* focal length.
* @param centerX x coordinate of principal point in pixels, usually image center.
* @param centerY y coordinate of principal point in pixels, usually image center.
* @return intrinsic camera matrix in units of pixels.
*/
public static double[][] createIntrinsicCameraMatrixInverse(double focalLength,
double centerX, double centerY) {
double[][] k = new double[3][3];
k[0] = new double[]{1./focalLength, 0, -centerX/focalLength};
k[1] = new double[]{0, 1./focalLength, -centerY/focalLength};
k[2] = new double[]{0, 0, 1};
return k;
}
/**
* create the inverse of camera intrinsic matrix k.
* @param kIntr intrinsic camera matrix.
* @return intrinsic camera matrix inverse.
*/
public static double[][] createIntrinsicCameraMatrixInverse(double[][] kIntr) {
double[][] kInv = MatrixUtil.zeros(3, 3);
double fx = kIntr[0][0];
double fy = kIntr[1][1];
double fxfy = fx*fy;
double g = kIntr[0][1];
double xc = kIntr[0][2];
double yc = kIntr[1][2];
kInv[0][0] = 1./fx;
kInv[0][1] = -g/fxfy;
kInv[0][2] = (-xc/fx) + (g*yc/fxfy);
kInv[1][1] = 1./fy;
kInv[1][2] = -yc/fy;
kInv[2][2] = 1;
return kInv;
}
/**
*
* @param k camera intrinsics matrix of size 3 x 3.
* @param r camera extrinsic rotation matrix of size 3 x 3.
* @param t camera extrinsic translation vector of length 2.
* @return the camera matrix resulting from intrinsic and extrinsic parameters.
* the size is 3 x 4.
*/
public static double[][] createCamera(double[][] k, double[][] r, double[] t) {
if (k.length != 3 || k[0].length != 3) {
throw new IllegalArgumentException("k must be 3 x 3");
}
if (r.length != 3 || r[0].length != 3) {
throw new IllegalArgumentException("r must be 3 x 3");
}
if (t.length != 3) {
throw new IllegalArgumentException("t must be length 3");
}
/*
4x4
[ R -R*t ]
[ 0 1 ]
P = K * R * [I | -t]
alternately, can write as P = K * [ R | -R*t]
*/
double[] rt = MatrixUtil.multiplyMatrixByColumnVector(r, t);
double[][] kExtr = new double[3][4];
for (int i = 0; i < 3; ++i) {
kExtr[i] = new double[4];
System.arraycopy(r[i], 0, kExtr[i], 0, 3);
kExtr[i][3] = rt[i];
}
double[][] p = MatrixUtil.multiply(k, kExtr);
return p;
}
/**
* not ready for use. a quick rough method to estimate the 3D homogeneous point
* from the 2D-homogenous point and this inverse camera matrix, with caveat
* about missing information on the last dimension.
* One should use reconstruction methods instead of this method.
* to use:
* <pre>
* double[][] X = MatrixUtil.multiply(cameraInv, x);
* then divide each column by the 3rd row.
* </pre>
*
* @param k camera intrinsics matrix of size 3 x 3.
* @param r camera extrinsic rotation matrix of size 3 x 3.
* @param t camera extrinsic translation vector of length 2.
* @return the inverse camera matrix resulting from intrinsic and extrinsic parameters.
* the size is 4x3
*/
public static double[][] createCameraInverse(double[][] k, double[][] r, double[] t) {
if (k.length != 3 || k[0].length != 3) {
throw new IllegalArgumentException("k must be 3 x 3");
}
if (r.length != 3 || r[0].length != 3) {
throw new IllegalArgumentException("r must be 3 x 3");
}
if (t.length != 3) {
throw new IllegalArgumentException("t must be length 3");
}
/*
translation matrix: inverse changes the signs of the translation elements, but not the diagonal.
rotation matrix: inverse is the transpose of rotation matrix.
scaling matrix: inverse is performed on each element, that is, the reciprocal.
*/
double[] tInv = Arrays.copyOf(t, t.length);
tInv[0] *= -1;
tInv[1] *= -1;
double[] rTInv = MatrixUtil.multiplyMatrixByColumnVector(r, tInv);
double[][] kInv = Camera.createIntrinsicCameraMatrixInverse(k);
/*
inverse of K * R * [I | -t]
is | r | r*tInv ]^T * kInv
*/
double[][] cInv = new double[3][4];
for (int i = 0; i < 3; ++i) {
cInv[i] = new double[4];
System.arraycopy(r[i], 0, cInv[i], 0, 3);
cInv[i][3] = rTInv[i];
}
cInv = MatrixUtil.transpose(cInv);
cInv = MatrixUtil.multiply(cInv, kInv);
return cInv;
}
public static double[][] cameraToPixelCoordinates(double[][] xC, double[] rCoeffs,
double focalLength, double centerX, double centerY, boolean useR2R4) {
double[][] cc = MatrixUtil.copy(xC);
for (int i = 0; i < xC[0].length; ++i) {
// normalized pinhole projection X_c/Z_c and
cc[0][i] /= xC[2][i];
cc[1][i] /= xC[2][i];
}
if (rCoeffs != null && rCoeffs.length > 0) {
// input and output cc are in camera reference frame
cc = CameraCalibration.applyRadialDistortion(cc, rCoeffs[0], rCoeffs[1],
useR2R4);
}
focalLength = Math.abs(focalLength);
double[][] cameraIntr = Camera.createIntrinsicCameraMatrix(focalLength, centerX, centerY);
cc = MatrixUtil.multiply(cameraIntr, cc);
return cc;
}
public static double[][] cameraToPixelCoordinates(double[][] xC, double[] rCoeffs,
CameraIntrinsicParameters kIntr, boolean useR2R4) {
double[][] cc = MatrixUtil.copy(xC);
for (int i = 0; i < xC[0].length; ++i) {
// normalized pinhole projection X_c/Z_c and
cc[0][i] /= xC[2][i];
cc[1][i] /= xC[2][i];
}
if (rCoeffs != null && rCoeffs.length > 0) {
// input and output cc are in camera reference frame
cc = CameraCalibration.applyRadialDistortion(cc, rCoeffs[0], rCoeffs[1],
useR2R4);
}
cc = MatrixUtil.multiply(kIntr.getIntrinsic(), cc);
return cc;
}
/** converts pixel coordinates to normalized camera coordinates by transforming them to camera
reference frame then applying Lp2-normalization.
* @param x points in the camera centered reference frame.
* format is 3XN for N points.
* @param intrinsic
* @return pixels transformed to camera coordinate reerence frame then
* Lp2-normalized.
* @throws no.uib.cipr.matrix.NotConvergedException
*/
public static double[][] pixelToNormalizedCameraCoordinates(double[][] x,
CameraIntrinsicParameters intrinsic) throws NotConvergedException {
double[][] kIntrInv = Camera.createIntrinsicCameraMatrixInverse(intrinsic.getIntrinsic());
// the direction of the points is calculated by K^-1 * x
double[][] xDirection = MatrixUtil.multiply(kIntrInv, x);
double sum;
int i;
for (int col = 0; col < x[0].length; ++col) {
sum = (x[0][col]*x[0][col]) + (x[1][col]*x[1][col]) + (x[2][col]*x[2][col]);
sum = Math.sqrt(sum);
for (i = 0; i < 3; ++i) {
xDirection[i][col] /= sum;
}
}
return xDirection;
}
/**
* transform the world coordinates xW to the camera reference frame using
* (rot * xW + trans).
* @param xW coordinates of objects in a world reference frame.
* The format is 3XN for N points.
* @param rot the rotation matrix to apply to the translated coordinates.
* the size is 3X3.
* @param trans the translation to apply to coordinates xW in x, y, and z.
* The length of the array is 3.
* @return xW transformed to the camera reference frame.
*/
public static double[][] worldToCameraCoordinates(double[][] xW, double[][] rot,
double[] trans) {
if (xW.length != 3) {
throw new IllegalArgumentException("xW.length must be 3 (for x, y, z)");
}
if (trans.length != 3) {
throw new IllegalArgumentException("trans.length must be 3 (for x, y, z)");
}
if (rot.length != 3 || rot[0].length != 3) {
throw new IllegalArgumentException("rot must be 3X3");
}
int n = xW[0].length;
double[][] xC = MatrixUtil.copy(xW);
xC = MatrixUtil.multiply(rot, xC);
int i, j;
for (i = 0; i < n; ++i) {
for (j = 0; j < 3; ++j) {
xC[j][i] += trans[j];
}
}
return xC;
}
/**
* transform the world coordinate point xWPt to the camera reference frame using
* (rot * xW + trans).
* @param xWPt coordinates of an object in a world reference frame.
* The length is 3.
* @param rot the rotation matrix to apply to the translated coordinates.
* the size is 3X3.
* @param trans the translation to apply to coordinates xW in x, y, and z.
* The length of the array is 3.
* @return the point xW transformed to the camera reference frame.
*/
public static double[] worldToCameraCoordinates(double[] xWPt, double[][] rot,
double[] trans) {
if (xWPt.length != 3) {
throw new IllegalArgumentException("xW.length must be 3 (for x, y, z)");
}
if (trans.length != 3) {
throw new IllegalArgumentException("trans.length must be 3 (for x, y, z)");
}
if (rot.length != 3 || rot[0].length != 3) {
throw new IllegalArgumentException("rot must be 3X3");
}
double[] xC = Arrays.copyOf(xWPt, xWPt.length);
xC = MatrixUtil.multiplyMatrixByColumnVector(rot, xC);
int j;
for (j = 0; j < 3; ++j) {
xC[j] += trans[j];
}
return xC;
}
/**
* transform the world coordinate point xWPt to the camera reference frame using
* (rot * xW + trans).
* @param xWPt coordinates of an object in a world reference frame.
* The length is 3.
* @param rot the rotation matrix to apply to the translated coordinates.
* the size is 3X3.
* @param trans the translation to apply to coordinates xW in x, y, and z.
* The length of the array is 3.
* @param aux an auxiliary array of length 3 used internally to hold
* values for a calculation. it's present to help the invoker reuse an
* object instead of creating another. it's values are not used.
* @param out the point xW transformed to the camera reference frame.
*/
public static void worldToCameraCoordinates(double[] xWPt, double[][] rot,
double[] trans, double[] aux, double[] out) {
if (xWPt.length != 3) {
throw new IllegalArgumentException("xW.length must be 3 (for x, y, z)");
}
if (trans.length != 3) {
throw new IllegalArgumentException("trans.length must be 3 (for x, y, z)");
}
if (rot.length != 3 || rot[0].length != 3) {
throw new IllegalArgumentException("rot must be 3X3");
}
if (out.length != 3) {
throw new IllegalArgumentException("out.length must be 3 (for x, y, z)");
}
//point_camera = (1/x_z) * (R * point_world + translation) is called the normalized coordinate.
double[] xC = Arrays.copyOf(xWPt, xWPt.length);
MatrixUtil.multiplyMatrixByColumnVector(rot, xC, aux);
int j;
for (j = 0; j < 3; ++j) {
out[j] = aux[j] + trans[j];
}
}
/**
* transform the world coordinate point xWPt to the camera reference frame using
* (rot * xW + trans).
* @param xWPt coordinates of an object in a world reference frame.
* The length is 3.
* @param rot the rotation matrix to apply to the translated coordinates.
* the size is 3X3.
* @param trans the translation to apply to coordinates xW in x, y, and z.
* The length of the array is 3.
* @param out the point transformed to the camera reference frame.
*/
public static void worldToCameraCoordinates(double[] xWPt, double[][] rot,
double[] trans, double[] out) {
if (xWPt.length != 3) {
throw new IllegalArgumentException("xW.length must be 3 (for x, y, z)");
}
if (trans.length != 3) {
throw new IllegalArgumentException("trans.length must be 3 (for x, y, z)");
}
if (rot.length != 3 || rot[0].length != 3) {
throw new IllegalArgumentException("rot must be 3X3");
}
double[] x2 = Arrays.copyOf(xWPt, xWPt.length);
MatrixUtil.multiplyMatrixByColumnVector(rot, x2, out);
int j;
for (j = 0; j < 3; ++j) {
out[j] = x2[j] + trans[j];
}
}
public static double[][] pixelToCameraCoordinates(double[][] x,
CameraIntrinsicParameters kIntr, double[] rCoeffs, boolean useR2R4)
throws NotConvergedException, IOException {
double[][] intr = MatrixUtil.copy(kIntr.getIntrinsic());
return pixelToCameraCoordinates(x, intr, rCoeffs, useR2R4);
}
public static double[][] pixelToCameraCoordinates(double[][] x,
double[][] intr, double[] rCoeffs, boolean useR2R4)
throws NotConvergedException, IOException {
// use absolute value of focal lengths
intr[0][0] = Math.abs(intr[0][0]);
intr[1][1] = Math.abs(intr[1][1]);
double[][] cameraIntrInv = Camera.createIntrinsicCameraMatrixInverse(
intr);
// put x into camera coordinates reference frame:
double[][] pix = MatrixUtil.multiply(cameraIntrInv, x);
if (rCoeffs != null) {
pix = CameraCalibration.removeRadialDistortion(pix, rCoeffs[0],
rCoeffs[1], useR2R4);
}
return pix;
}
/**
* transform 2D pixel measurements x_i to unit norm 3D directions
* <pre>
* references Szeliski 2010, eqn (6.36)
* </pre>
* @param x 2D pixel measurements in format 3XN where the rows are x, y, and "1"'s
* and the columns are each point in the N points.
* @param k camera intrinsic parameters
* @return
*/
public static double[][] transformToUnitNormDirections(double[][] x,
CameraIntrinsicParameters k) {
if (x.length != 3) {
throw new IllegalArgumentException("x length must be 3");
}
double[][] kInv = Camera.createIntrinsicCameraMatrixInverse(k.intrinsic);
//(K^-1 * x_i)/(||K^-1 * x_i||)
double[][] norm = MatrixUtil.multiply(kInv, x);
int i, j;
double sum = 0;
for (j = 0; j < norm.length; ++j) {
sum = 0;
for (i = 0; i < norm.length; ++i) {
sum += (norm[i][j]*norm[i][j]);
}
sum = Math.sqrt(sum);
for (i = 0; i < norm.length; ++i) {
norm[i][j] /= sum;
}
}
return norm;
}
public static class CameraIntrinsicParameters {
private double[][] intrinsic;
private double lambda;
public CameraIntrinsicParameters(double[][] k) {
this.intrinsic = k;
}
public CameraIntrinsicParameters() {
}
/**
* @return the intrinsic parameters
*/
public double[][] getIntrinsic() {
return intrinsic;
}
/**
* @param intrinsic the intrinsic parameters to set
*/
public void setIntrinsic(double[][] intrinsic) {
this.intrinsic = intrinsic;
}
/**
* @return the lambda the scale factor used in projection
*/
public double getLambda() {
return lambda;
}
/**
* @param lambda the lambda to set for scale factor of projection
*/
public void setLambda(double lambda) {
this.lambda = lambda;
}
}
public static class CameraProjection {
/**
* the projection matrix of a camera which is a 3X4 matrix of
* intrinsic times extrinsic parameter matrices
*/
private double[][] p;
public CameraProjection(double[][] projection) {
this.p = projection;
}
/**
* @return the p
*/
public double[][] getP() {
return p;
}
/**
* @param p the p to set
*/
public void setP(double[][] p) {
this.p = p;
}
}
public static class CameraParameters {
private final CameraIntrinsicParameters intrinsicParameters;
private final CameraExtrinsicParameters extrinsicParameters;
public CameraParameters(CameraIntrinsicParameters intrinsics,
CameraExtrinsicParameters extrinsics) {
this.intrinsicParameters = intrinsics;
this.extrinsicParameters = extrinsics;
}
public double[][] createProjectionMatrix() {
double[][] rt = new double[3][4];
int i, j;
for (i = 0; i < 3; ++i) {
rt[i] = new double[4];
System.arraycopy(extrinsicParameters.rotation[i], 0, rt[i], 0, 3);
rt[i][3] = extrinsicParameters.translation[i];
}
double[][] p = MatrixUtil.multiply(intrinsicParameters.getIntrinsic(), rt);
return p;
}
/**
* @return the intrinsicParameters
*/
public CameraIntrinsicParameters getIntrinsicParameters() {
return intrinsicParameters;
}
/**
* @return the extrinsicParameters
*/
public CameraExtrinsicParameters getExtrinsicParameters() {
return extrinsicParameters;
}
}
public static class CameraExtrinsicParameters {
private double[][] rotation;
private double[] translation;
/**
* @return the rotation
*/
public double[][] getRotation() {
return rotation;
}
/**
* @param rotation the rotation to set
*/
public void setRotation(double[][] rotation) {
this.rotation = rotation;
}
/**
* @return the translation
*/
public double[] getTranslation() {
return translation;
}
/**
* @param translation the translation to set
*/
public void setTranslation(double[] translation) {
this.translation = translation;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("rot=\n");
if (rotation != null) {
sb.append(FormatArray.toString(rotation, "%.4e"));
}
sb.append("trans=\n");
if (translation != null) {
sb.append(FormatArray.toString(translation, "%.4e"));
}
return sb.toString();
}
}
public static class CameraMatrices {
private CameraIntrinsicParameters intrinsics;
private List<CameraExtrinsicParameters> extrinsics = new ArrayList<CameraExtrinsicParameters>();
private double[] radialDistortion;
private boolean useR2R4;
/**
* @return the radialDistortion
*/
public double[] getRadialDistortCoeff() {
return radialDistortion;
}
/**
* @return true if using radial distortion function from Ma et al. 2004
for model #4 in Table 2, f(r) = 1 +k1*r^2 + k2*r^4,
else return false if using model #3 f(r) = 1 +k1*r + k2*r^2.
*/
public boolean useR2R4() {
return useR2R4;
}
/**
* @param radialDistortion the radialDistortion to set
* @param useR2R4 use radial distortion function from Ma et al. 2004 for model #4 in Table 2,
f(r) = 1 +k1*r^2 + k2*r^4 if true,
else use model #3 f(r) = 1 +k1*r + k2*r^2.
*/
public void setRadialDistortion(double[] radialDistortion, boolean useR2R4) {
this.radialDistortion = radialDistortion;
this.useR2R4 = useR2R4;
}
/**
* @return the intrinsics
*/
public CameraIntrinsicParameters getIntrinsics() {
return intrinsics;
}
/**
* @param intrinsics the intrinsics to set
*/
public void setIntrinsics(CameraIntrinsicParameters intrinsics) {
this.intrinsics = intrinsics;
}
/**
* @return the extrinsics
*/
public List<CameraExtrinsicParameters> getExtrinsics() {
return extrinsics;
}
/**
* @param extrinsics the extrinsics to set
*/
public void addExtrinsics(CameraExtrinsicParameters extrinsics) {
this.extrinsics.add(extrinsics);
}
/**
* @param extrinsics the extrinsics to set
*/
public void addExtrinsics(List<CameraExtrinsicParameters> extrinsics) {
this.extrinsics.addAll(extrinsics);
}
}
} |
package io.debezium.connector.mysql;
import static io.debezium.util.Strings.isNullOrEmpty;
import java.io.FileInputStream;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.UnrecoverableKeyException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.sql.SQLException;
import java.time.Duration;
import java.time.Instant;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Predicate;
import javax.net.ssl.KeyManager;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.event.Level;
import com.github.shyiko.mysql.binlog.BinaryLogClient;
import com.github.shyiko.mysql.binlog.BinaryLogClient.LifecycleListener;
import com.github.shyiko.mysql.binlog.event.DeleteRowsEventData;
import com.github.shyiko.mysql.binlog.event.Event;
import com.github.shyiko.mysql.binlog.event.EventData;
import com.github.shyiko.mysql.binlog.event.EventHeader;
import com.github.shyiko.mysql.binlog.event.EventHeaderV4;
import com.github.shyiko.mysql.binlog.event.EventType;
import com.github.shyiko.mysql.binlog.event.GtidEventData;
import com.github.shyiko.mysql.binlog.event.QueryEventData;
import com.github.shyiko.mysql.binlog.event.RotateEventData;
import com.github.shyiko.mysql.binlog.event.RowsQueryEventData;
import com.github.shyiko.mysql.binlog.event.TableMapEventData;
import com.github.shyiko.mysql.binlog.event.UpdateRowsEventData;
import com.github.shyiko.mysql.binlog.event.WriteRowsEventData;
import com.github.shyiko.mysql.binlog.event.deserialization.EventDataDeserializationException;
import com.github.shyiko.mysql.binlog.event.deserialization.EventDeserializer;
import com.github.shyiko.mysql.binlog.event.deserialization.GtidEventDataDeserializer;
import com.github.shyiko.mysql.binlog.io.ByteArrayInputStream;
import com.github.shyiko.mysql.binlog.network.AuthenticationException;
import com.github.shyiko.mysql.binlog.network.DefaultSSLSocketFactory;
import com.github.shyiko.mysql.binlog.network.SSLMode;
import com.github.shyiko.mysql.binlog.network.SSLSocketFactory;
import com.github.shyiko.mysql.binlog.network.ServerException;
import io.debezium.DebeziumException;
import io.debezium.annotation.SingleThreadAccess;
import io.debezium.config.CommonConnectorConfig.EventProcessingFailureHandlingMode;
import io.debezium.config.Configuration;
import io.debezium.connector.mysql.MySqlConnectorConfig.GtidNewChannelPosition;
import io.debezium.connector.mysql.MySqlConnectorConfig.SecureConnectionMode;
import io.debezium.data.Envelope.Operation;
import io.debezium.function.BlockingConsumer;
import io.debezium.pipeline.ErrorHandler;
import io.debezium.pipeline.EventDispatcher;
import io.debezium.pipeline.source.spi.StreamingChangeEventSource;
import io.debezium.relational.TableId;
import io.debezium.schema.SchemaChangeEvent;
import io.debezium.util.Clock;
import io.debezium.util.Metronome;
import io.debezium.util.Strings;
import io.debezium.util.Threads;
/**
*
* @author Jiri Pechanec
*/
public class MySqlStreamingChangeEventSource implements StreamingChangeEventSource<MySqlPartition, MySqlOffsetContext> {
private static final Logger LOGGER = LoggerFactory.getLogger(MySqlStreamingChangeEventSource.class);
private static final String KEEPALIVE_THREAD_NAME = "blc-keepalive";
private final EnumMap<EventType, BlockingConsumer<Event>> eventHandlers = new EnumMap<>(EventType.class);
private final BinaryLogClient client;
private final MySqlStreamingChangeEventSourceMetrics metrics;
private final Clock clock;
private final EventProcessingFailureHandlingMode eventDeserializationFailureHandlingMode;
private final EventProcessingFailureHandlingMode inconsistentSchemaHandlingMode;
private int startingRowNumber = 0;
private long initialEventsToSkip = 0L;
private boolean skipEvent = false;
private boolean ignoreDmlEventByGtidSource = false;
private final Predicate<String> gtidDmlSourceFilter;
private final AtomicLong totalRecordCounter = new AtomicLong();
private volatile Map<String, ?> lastOffset = null;
private com.github.shyiko.mysql.binlog.GtidSet gtidSet;
private final float heartbeatIntervalFactor = 0.8f;
private final Map<String, Thread> binaryLogClientThreads = new ConcurrentHashMap<>(4);
private final MySqlTaskContext taskContext;
private final MySqlConnectorConfig connectorConfig;
private final MySqlConnection connection;
private final EventDispatcher<MySqlPartition, TableId> eventDispatcher;
private final ErrorHandler errorHandler;
@SingleThreadAccess("binlog client thread")
private Instant eventTimestamp;
public static class BinlogPosition {
final String filename;
final long position;
public BinlogPosition(String filename, long position) {
assert filename != null;
this.filename = filename;
this.position = position;
}
public String getFilename() {
return filename;
}
public long getPosition() {
return position;
}
@Override
public String toString() {
return filename + "/" + position;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + filename.hashCode();
result = prime * result + (int) (position ^ (position >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
BinlogPosition other = (BinlogPosition) obj;
if (!filename.equals(other.filename)) {
return false;
}
if (position != other.position) {
return false;
}
return true;
}
}
@FunctionalInterface
private static interface BinlogChangeEmitter<T> {
void emit(TableId tableId, T data) throws InterruptedException;
}
public MySqlStreamingChangeEventSource(MySqlConnectorConfig connectorConfig, MySqlConnection connection,
EventDispatcher<MySqlPartition, TableId> dispatcher, ErrorHandler errorHandler, Clock clock,
MySqlTaskContext taskContext, MySqlStreamingChangeEventSourceMetrics metrics) {
this.taskContext = taskContext;
this.connectorConfig = connectorConfig;
this.connection = connection;
this.clock = clock;
this.eventDispatcher = dispatcher;
this.errorHandler = errorHandler;
this.metrics = metrics;
eventDeserializationFailureHandlingMode = connectorConfig.getEventProcessingFailureHandlingMode();
inconsistentSchemaHandlingMode = connectorConfig.inconsistentSchemaFailureHandlingMode();
// Set up the log reader ...
client = taskContext.getBinaryLogClient();
// BinaryLogClient will overwrite thread names later
client.setThreadFactory(
Threads.threadFactory(MySqlConnector.class, connectorConfig.getLogicalName(), "binlog-client", false, false,
x -> binaryLogClientThreads.put(x.getName(), x)));
client.setServerId(connectorConfig.serverId());
client.setSSLMode(sslModeFor(connectorConfig.sslMode()));
if (connectorConfig.sslModeEnabled()) {
SSLSocketFactory sslSocketFactory = getBinlogSslSocketFactory(connectorConfig, connection);
if (sslSocketFactory != null) {
client.setSslSocketFactory(sslSocketFactory);
}
}
Configuration configuration = connectorConfig.getConfig();
client.setKeepAlive(configuration.getBoolean(MySqlConnectorConfig.KEEP_ALIVE));
final long keepAliveInterval = configuration.getLong(MySqlConnectorConfig.KEEP_ALIVE_INTERVAL_MS);
client.setKeepAliveInterval(keepAliveInterval);
// Considering heartbeatInterval should be less than keepAliveInterval, we use the heartbeatIntervalFactor
// multiply by keepAliveInterval and set the result value to heartbeatInterval.The default value of heartbeatIntervalFactor
// is 0.8, and we believe the left time (0.2 * keepAliveInterval) is enough to process the packet received from the MySQL server.
client.setHeartbeatInterval((long) (keepAliveInterval * heartbeatIntervalFactor));
boolean filterDmlEventsByGtidSource = configuration.getBoolean(MySqlConnectorConfig.GTID_SOURCE_FILTER_DML_EVENTS);
gtidDmlSourceFilter = filterDmlEventsByGtidSource ? connectorConfig.gtidSourceFilter() : null;
// Set up the event deserializer with additional type(s) ...
final Map<Long, TableMapEventData> tableMapEventByTableId = new HashMap<Long, TableMapEventData>();
EventDeserializer eventDeserializer = new EventDeserializer() {
@Override
public Event nextEvent(ByteArrayInputStream inputStream) throws IOException {
try {
// Delegate to the superclass ...
Event event = super.nextEvent(inputStream);
// We have to record the most recent TableMapEventData for each table number for our custom deserializers ...
if (event.getHeader().getEventType() == EventType.TABLE_MAP) {
TableMapEventData tableMapEvent = event.getData();
tableMapEventByTableId.put(tableMapEvent.getTableId(), tableMapEvent);
}
return event;
}
// DBZ-217 In case an event couldn't be read we create a pseudo-event for the sake of logging
catch (EventDataDeserializationException edde) {
// DBZ-3095 As of Java 15, when reaching EOF in the binlog stream, the polling loop in
// BinaryLogClient#listenForEventPackets() keeps returning values != -1 from peek();
// this causes the loop to never finish
// Propagating the exception (either EOF or socket closed) causes the loop to be aborted
// in this case
if (edde.getCause() instanceof IOException) {
throw edde;
}
EventHeaderV4 header = new EventHeaderV4();
header.setEventType(EventType.INCIDENT);
header.setTimestamp(edde.getEventHeader().getTimestamp());
header.setServerId(edde.getEventHeader().getServerId());
if (edde.getEventHeader() instanceof EventHeaderV4) {
header.setEventLength(((EventHeaderV4) edde.getEventHeader()).getEventLength());
header.setNextPosition(((EventHeaderV4) edde.getEventHeader()).getNextPosition());
header.setFlags(((EventHeaderV4) edde.getEventHeader()).getFlags());
}
EventData data = new EventDataDeserializationExceptionData(edde);
return new Event(header, data);
}
}
};
// Add our custom deserializers ...
eventDeserializer.setEventDataDeserializer(EventType.STOP, new StopEventDataDeserializer());
eventDeserializer.setEventDataDeserializer(EventType.GTID, new GtidEventDataDeserializer());
eventDeserializer.setEventDataDeserializer(EventType.WRITE_ROWS,
new RowDeserializers.WriteRowsDeserializer(tableMapEventByTableId));
eventDeserializer.setEventDataDeserializer(EventType.UPDATE_ROWS,
new RowDeserializers.UpdateRowsDeserializer(tableMapEventByTableId));
eventDeserializer.setEventDataDeserializer(EventType.DELETE_ROWS,
new RowDeserializers.DeleteRowsDeserializer(tableMapEventByTableId));
eventDeserializer.setEventDataDeserializer(EventType.EXT_WRITE_ROWS,
new RowDeserializers.WriteRowsDeserializer(
tableMapEventByTableId).setMayContainExtraInformation(true));
eventDeserializer.setEventDataDeserializer(EventType.EXT_UPDATE_ROWS,
new RowDeserializers.UpdateRowsDeserializer(
tableMapEventByTableId).setMayContainExtraInformation(true));
eventDeserializer.setEventDataDeserializer(EventType.EXT_DELETE_ROWS,
new RowDeserializers.DeleteRowsDeserializer(
tableMapEventByTableId).setMayContainExtraInformation(true));
client.setEventDeserializer(eventDeserializer);
}
protected void onEvent(MySqlOffsetContext offsetContext, Event event) {
long ts = 0;
if (event.getHeader().getEventType() == EventType.HEARTBEAT) {
// HEARTBEAT events have no timestamp but are fired only when
// there is no traffic on the connection which means we are caught-up
metrics.setMilliSecondsBehindSource(ts);
return;
}
// MySQL has seconds resolution but mysql-binlog-connector-java returns
// a value in milliseconds
long eventTs = event.getHeader().getTimestamp();
if (eventTs == 0) {
LOGGER.trace("Received unexpected event with 0 timestamp: {}", event);
return;
}
ts = clock.currentTimeInMillis() - eventTs;
LOGGER.trace("Current milliseconds behind source: {} ms", ts);
metrics.setMilliSecondsBehindSource(ts);
}
protected void ignoreEvent(MySqlOffsetContext offsetContext, Event event) {
LOGGER.trace("Ignoring event due to missing handler: {}", event);
}
protected void handleEvent(MySqlPartition partition, MySqlOffsetContext offsetContext, Event event) {
if (event == null) {
return;
}
final EventHeader eventHeader = event.getHeader();
// Update the source offset info. Note that the client returns the value in *milliseconds*, even though the binlog
// contains only *seconds* precision ...
// HEARTBEAT events have no timestamp; only set the timestamp if the event is not a HEARTBEAT
eventTimestamp = !eventHeader.getEventType().equals(EventType.HEARTBEAT) ? Instant.ofEpochMilli(eventHeader.getTimestamp()) : null;
offsetContext.setBinlogServerId(eventHeader.getServerId());
final EventType eventType = eventHeader.getEventType();
if (eventType == EventType.ROTATE) {
EventData eventData = event.getData();
RotateEventData rotateEventData;
if (eventData instanceof EventDeserializer.EventDataWrapper) {
rotateEventData = (RotateEventData) ((EventDeserializer.EventDataWrapper) eventData).getInternal();
}
else {
rotateEventData = (RotateEventData) eventData;
}
offsetContext.setBinlogStartPoint(rotateEventData.getBinlogFilename(), rotateEventData.getBinlogPosition());
}
else if (eventHeader instanceof EventHeaderV4) {
EventHeaderV4 trackableEventHeader = (EventHeaderV4) eventHeader;
offsetContext.setEventPosition(trackableEventHeader.getPosition(), trackableEventHeader.getEventLength());
}
// If there is a handler for this event, forward the event to it ...
try {
// Forward the event to the handler ...
eventHandlers.getOrDefault(eventType, (e) -> ignoreEvent(offsetContext, e)).accept(event);
// Generate heartbeat message if the time is right
eventDispatcher.dispatchHeartbeatEvent(partition, offsetContext);
// Capture that we've completed another event ...
offsetContext.completeEvent();
// update last offset used for logging
lastOffset = offsetContext.getOffset();
if (skipEvent) {
// We're in the mode of skipping events and we just skipped this one, so decrement our skip count ...
--initialEventsToSkip;
skipEvent = initialEventsToSkip > 0;
}
}
catch (RuntimeException e) {
// There was an error in the event handler, so propagate the failure to Kafka Connect ...
logStreamingSourceState();
errorHandler.setProducerThrowable(new DebeziumException("Error processing binlog event", e));
// Do not stop the client, since Kafka Connect should stop the connector on it's own
// (and doing it here may cause problems the second time it is stopped).
// We can clear the listeners though so that we ignore all future events ...
eventHandlers.clear();
LOGGER.info(
"Error processing binlog event, and propagating to Kafka Connect so it stops this connector. Future binlog events read before connector is shutdown will be ignored.");
}
catch (InterruptedException e) {
// Most likely because this reader was stopped and our thread was interrupted ...
Thread.currentThread().interrupt();
eventHandlers.clear();
LOGGER.info("Stopped processing binlog events due to thread interruption");
}
}
@SuppressWarnings("unchecked")
protected <T extends EventData> T unwrapData(Event event) {
EventData eventData = event.getData();
if (eventData instanceof EventDeserializer.EventDataWrapper) {
eventData = ((EventDeserializer.EventDataWrapper) eventData).getInternal();
}
return (T) eventData;
}
/**
* Handle the supplied event that signals that mysqld has stopped.
*
* @param event the server stopped event to be processed; may not be null
*/
protected void handleServerStop(MySqlOffsetContext offsetContext, Event event) {
LOGGER.debug("Server stopped: {}", event);
}
/**
* Handle the supplied event that is sent by a primary to a replica to let the replica know that the primary is still alive. Not
* written to a binary log.
*
* @param event the server stopped event to be processed; may not be null
*/
protected void handleServerHeartbeat(MySqlPartition partition, MySqlOffsetContext offsetContext, Event event) throws InterruptedException {
LOGGER.trace("Server heartbeat: {}", event);
eventDispatcher.dispatchServerHeartbeatEvent(partition, offsetContext);
}
/**
* Handle the supplied event that signals that an out of the ordinary event that occurred on the master. It notifies the replica
* that something happened on the primary that might cause data to be in an inconsistent state.
*
* @param event the server stopped event to be processed; may not be null
*/
protected void handleServerIncident(MySqlPartition partition, MySqlOffsetContext offsetContext, Event event) {
if (event.getData() instanceof EventDataDeserializationExceptionData) {
metrics.onErroneousEvent(partition, "source = " + event);
EventDataDeserializationExceptionData data = event.getData();
EventHeaderV4 eventHeader = (EventHeaderV4) data.getCause().getEventHeader(); // safe cast, instantiated that ourselves
// logging some additional context but not the exception itself, this will happen in handleEvent()
if (eventDeserializationFailureHandlingMode == EventProcessingFailureHandlingMode.FAIL) {
LOGGER.error(
"Error while deserializing binlog event at offset {}.{}" +
"Use the mysqlbinlog tool to view the problematic event: mysqlbinlog --start-position={} --stop-position={} --verbose {}",
offsetContext.getOffset(),
System.lineSeparator(),
eventHeader.getPosition(),
eventHeader.getNextPosition(),
offsetContext.getSource().binlogFilename());
throw new RuntimeException(data.getCause());
}
else if (eventDeserializationFailureHandlingMode == EventProcessingFailureHandlingMode.WARN) {
LOGGER.warn(
"Error while deserializing binlog event at offset {}.{}" +
"This exception will be ignored and the event be skipped.{}" +
"Use the mysqlbinlog tool to view the problematic event: mysqlbinlog --start-position={} --stop-position={} --verbose {}",
offsetContext.getOffset(),
System.lineSeparator(),
System.lineSeparator(),
eventHeader.getPosition(),
eventHeader.getNextPosition(),
offsetContext.getSource().binlogFilename(),
data.getCause());
}
}
else {
LOGGER.error("Server incident: {}", event);
}
}
/**
* Handle the supplied event with a {@link RotateEventData} that signals the logs are being rotated. This means that either
* the server was restarted, or the binlog has transitioned to a new file. In either case, subsequent table numbers will be
* different than those seen to this point.
*
* @param event the database change data event to be processed; may not be null
*/
protected void handleRotateLogsEvent(MySqlOffsetContext offsetContext, Event event) {
LOGGER.debug("Rotating logs: {}", event);
RotateEventData command = unwrapData(event);
assert command != null;
taskContext.getSchema().clearTableMappings();
}
/**
* Handle the supplied event with a {@link GtidEventData} that signals the beginning of a GTID transaction.
* We don't yet know whether this transaction contains any events we're interested in, but we have to record
* it so that we know the position of this event and know we've processed the binlog to this point.
* <p>
* Note that this captures the current GTID and complete GTID set, regardless of whether the connector is
* {@link MySqlTaskContext#gtidSourceFilter() filtering} the GTID set upon connection. We do this because
* we actually want to capture all GTID set values found in the binlog, whether or not we process them.
* However, only when we connect do we actually want to pass to MySQL only those GTID ranges that are applicable
* per the configuration.
*
* @param event the GTID event to be processed; may not be null
*/
protected void handleGtidEvent(MySqlOffsetContext offsetContext, Event event) {
LOGGER.debug("GTID transaction: {}", event);
GtidEventData gtidEvent = unwrapData(event);
String gtid = gtidEvent.getGtid();
gtidSet.add(gtid);
offsetContext.startGtid(gtid, gtidSet.toString()); // rather than use the client's GTID set
ignoreDmlEventByGtidSource = false;
if (gtidDmlSourceFilter != null && gtid != null) {
String uuid = gtid.trim().substring(0, gtid.indexOf(":"));
if (!gtidDmlSourceFilter.test(uuid)) {
ignoreDmlEventByGtidSource = true;
}
}
metrics.onGtidChange(gtid);
}
/**
* Handle the supplied event with an {@link RowsQueryEventData} by recording the original SQL query
* that generated the event.
*
* @param event the database change data event to be processed; may not be null
*/
protected void handleRowsQuery(MySqlOffsetContext offsetContext, Event event) {
// Unwrap the RowsQueryEvent
final RowsQueryEventData lastRowsQueryEventData = unwrapData(event);
// Set the query on the source
offsetContext.setQuery(lastRowsQueryEventData.getQuery());
}
/**
* Handle the supplied event with an {@link QueryEventData} by possibly recording the DDL statements as changes in the
* MySQL schemas.
*
* @param partition the partition in which the even occurred
* @param event the database change data event to be processed; may not be null
* @throws InterruptedException if this thread is interrupted while recording the DDL statements
*/
protected void handleQueryEvent(MySqlPartition partition, MySqlOffsetContext offsetContext, Event event) throws InterruptedException {
QueryEventData command = unwrapData(event);
LOGGER.debug("Received query command: {}", event);
String sql = command.getSql().trim();
if (sql.equalsIgnoreCase("BEGIN")) {
// We are starting a new transaction ...
offsetContext.startNextTransaction();
eventDispatcher.dispatchTransactionStartedEvent(partition, offsetContext.getTransactionId(), offsetContext);
offsetContext.setBinlogThread(command.getThreadId());
if (initialEventsToSkip != 0) {
LOGGER.debug("Restarting partially-processed transaction; change events will not be created for the first {} events plus {} more rows in the next event",
initialEventsToSkip, startingRowNumber);
// We are restarting, so we need to skip the events in this transaction that we processed previously...
skipEvent = true;
}
return;
}
if (sql.equalsIgnoreCase("COMMIT")) {
handleTransactionCompletion(partition, offsetContext, event);
return;
}
String upperCasedStatementBegin = Strings.getBegin(sql, 7).toUpperCase();
if (upperCasedStatementBegin.startsWith("XA ")) {
// This is an XA transaction, and we currently ignore these and do nothing ...
return;
}
if (connectorConfig.getDdlFilter().test(sql)) {
LOGGER.debug("DDL '{}' was filtered out of processing", sql);
return;
}
if (upperCasedStatementBegin.equals("INSERT ") || upperCasedStatementBegin.equals("UPDATE ") || upperCasedStatementBegin.equals("DELETE ")) {
if (eventDeserializationFailureHandlingMode == EventProcessingFailureHandlingMode.FAIL) {
throw new DebeziumException(
"Received DML '" + sql + "' for processing, binlog probably contains events generated with statement or mixed based replication format");
}
else if (eventDeserializationFailureHandlingMode == EventProcessingFailureHandlingMode.WARN) {
LOGGER.warn("Warning only: Received DML '" + sql
+ "' for processing, binlog probably contains events generated with statement or mixed based replication format");
return;
}
else {
return;
}
}
if (sql.equalsIgnoreCase("ROLLBACK")) {
// We have hit a ROLLBACK which is not supported
LOGGER.warn("Rollback statements cannot be handled without binlog buffering, the connector will fail. Please check '{}' to see how to enable buffering",
MySqlConnectorConfig.BUFFER_SIZE_FOR_BINLOG_READER.name());
}
final List<SchemaChangeEvent> schemaChangeEvents = taskContext.getSchema().parseStreamingDdl(partition, sql,
command.getDatabase(), offsetContext, clock.currentTimeAsInstant());
try {
for (SchemaChangeEvent schemaChangeEvent : schemaChangeEvents) {
if (taskContext.getSchema().skipSchemaChangeEvent(schemaChangeEvent)) {
continue;
}
final TableId tableId = schemaChangeEvent.getTables().isEmpty() ? null : schemaChangeEvent.getTables().iterator().next().id();
eventDispatcher.dispatchSchemaChangeEvent(tableId, (receiver) -> {
try {
receiver.schemaChangeEvent(schemaChangeEvent);
}
catch (Exception e) {
throw new DebeziumException(e);
}
});
}
}
catch (InterruptedException e) {
LOGGER.info("Processing interrupted");
}
}
private void handleTransactionCompletion(MySqlPartition partition, MySqlOffsetContext offsetContext, Event event) throws InterruptedException {
// We are completing the transaction ...
eventDispatcher.dispatchTransactionCommittedEvent(partition, offsetContext);
offsetContext.commitTransaction();
offsetContext.setBinlogThread(-1L);
skipEvent = false;
ignoreDmlEventByGtidSource = false;
}
/**
* Handle a change in the table metadata.
* <p>
* This method should be called whenever we consume a TABLE_MAP event, and every transaction in the log should include one
* of these for each table affected by the transaction. Each table map event includes a monotonically-increasing numeric
* identifier, and this identifier is used within subsequent events within the same transaction. This table identifier can
* change when:
* <ol>
* <li>the table structure is modified (e.g., via an {@code ALTER TABLE ...} command); or</li>
* <li>MySQL rotates to a new binary log file, even if the table structure does not change.</li>
* </ol>
*
* @param event the update event; never null
*/
protected void handleUpdateTableMetadata(MySqlPartition partition, MySqlOffsetContext offsetContext, Event event) throws InterruptedException {
TableMapEventData metadata = unwrapData(event);
long tableNumber = metadata.getTableId();
String databaseName = metadata.getDatabase();
String tableName = metadata.getTable();
TableId tableId = new TableId(databaseName, null, tableName);
if (taskContext.getSchema().assignTableNumber(tableNumber, tableId)) {
LOGGER.debug("Received update table metadata event: {}", event);
}
else {
informAboutUnknownTableIfRequired(partition, offsetContext, event, tableId);
}
}
/**
* If we receive an event for a table that is monitored but whose metadata we
* don't know, either ignore that event or raise a warning or error as per the
* {@link MySqlConnectorConfig#INCONSISTENT_SCHEMA_HANDLING_MODE} configuration.
*/
private void informAboutUnknownTableIfRequired(MySqlPartition partition, MySqlOffsetContext offsetContext, Event event, TableId tableId,
Operation operation)
throws InterruptedException {
if (tableId != null && connectorConfig.getTableFilters().dataCollectionFilter().isIncluded(tableId)) {
metrics.onErroneousEvent(partition, "source = " + tableId + ", event " + event, operation);
EventHeaderV4 eventHeader = event.getHeader();
if (inconsistentSchemaHandlingMode == EventProcessingFailureHandlingMode.FAIL) {
LOGGER.error(
"Encountered change event '{}' at offset {} for table {} whose schema isn't known to this connector. One possible cause is an incomplete database history topic. Take a new snapshot in this case.{}"
+ "Use the mysqlbinlog tool to view the problematic event: mysqlbinlog --start-position={} --stop-position={} --verbose {}",
event, offsetContext.getOffset(), tableId, System.lineSeparator(), eventHeader.getPosition(),
eventHeader.getNextPosition(), offsetContext.getSource().binlogFilename());
throw new DebeziumException("Encountered change event for table " + tableId
+ " whose schema isn't known to this connector");
}
else if (inconsistentSchemaHandlingMode == EventProcessingFailureHandlingMode.WARN) {
LOGGER.warn(
"Encountered change event '{}' at offset {} for table {} whose schema isn't known to this connector. One possible cause is an incomplete database history topic. Take a new snapshot in this case.{}"
+ "The event will be ignored.{}"
+ "Use the mysqlbinlog tool to view the problematic event: mysqlbinlog --start-position={} --stop-position={} --verbose {}",
event, offsetContext.getOffset(), tableId, System.lineSeparator(), System.lineSeparator(),
eventHeader.getPosition(), eventHeader.getNextPosition(), offsetContext.getSource().binlogFilename());
}
else {
LOGGER.debug(
"Encountered change event '{}' at offset {} for table {} whose schema isn't known to this connector. One possible cause is an incomplete database history topic. Take a new snapshot in this case.{}"
+ "The event will be ignored.{}"
+ "Use the mysqlbinlog tool to view the problematic event: mysqlbinlog --start-position={} --stop-position={} --verbose {}",
event, offsetContext.getOffset(), tableId, System.lineSeparator(), System.lineSeparator(),
eventHeader.getPosition(), eventHeader.getNextPosition(), offsetContext.getSource().binlogFilename());
}
}
else {
if (tableId == null) {
EventData eventData = unwrapData(event);
if (eventData instanceof WriteRowsEventData) {
tableId = taskContext.getSchema().getExcludeTableId(((WriteRowsEventData) eventData).getTableId());
}
else if (eventData instanceof UpdateRowsEventData) {
tableId = taskContext.getSchema().getExcludeTableId(((UpdateRowsEventData) eventData).getTableId());
}
else if (eventData instanceof DeleteRowsEventData) {
tableId = taskContext.getSchema().getExcludeTableId(((DeleteRowsEventData) eventData).getTableId());
}
}
LOGGER.trace("Filtered data change event for {}", tableId);
metrics.onFilteredEvent(partition, "source = " + tableId, operation);
eventDispatcher.dispatchFilteredEvent(partition, offsetContext);
}
}
private void informAboutUnknownTableIfRequired(MySqlPartition partition, MySqlOffsetContext offsetContext, Event event, TableId tableId)
throws InterruptedException {
informAboutUnknownTableIfRequired(partition, offsetContext, event, tableId, null);
}
/**
* Generate source records for the supplied event with an {@link WriteRowsEventData}.
*
* @param partition the partition in which the even occurred
* @param event the database change data event to be processed; may not be null
* @throws InterruptedException if this thread is interrupted while blocking
*/
protected void handleInsert(MySqlPartition partition, MySqlOffsetContext offsetContext, Event event) throws InterruptedException {
handleChange(partition, offsetContext, event, Operation.CREATE, WriteRowsEventData.class, x -> taskContext.getSchema().getTableId(x.getTableId()),
WriteRowsEventData::getRows,
(tableId, row) -> eventDispatcher.dispatchDataChangeEvent(partition, tableId,
new MySqlChangeRecordEmitter(partition, offsetContext, clock, Operation.CREATE, null, row)));
}
/**
* Generate source records for the supplied event with an {@link UpdateRowsEventData}.
*
* @param partition the partition in which the even occurred
* @param event the database change data event to be processed; may not be null
* @throws InterruptedException if this thread is interrupted while blocking
*/
protected void handleUpdate(MySqlPartition partition, MySqlOffsetContext offsetContext, Event event) throws InterruptedException {
handleChange(partition, offsetContext, event, Operation.UPDATE, UpdateRowsEventData.class, x -> taskContext.getSchema().getTableId(x.getTableId()),
UpdateRowsEventData::getRows,
(tableId, row) -> eventDispatcher.dispatchDataChangeEvent(partition, tableId,
new MySqlChangeRecordEmitter(partition, offsetContext, clock, Operation.UPDATE, row.getKey(), row.getValue())));
}
/**
* Generate source records for the supplied event with an {@link DeleteRowsEventData}.
*
* @param partition the partition in which the even occurred
* @param event the database change data event to be processed; may not be null
* @throws InterruptedException if this thread is interrupted while blocking
*/
protected void handleDelete(MySqlPartition partition, MySqlOffsetContext offsetContext, Event event) throws InterruptedException {
handleChange(partition, offsetContext, event, Operation.DELETE, DeleteRowsEventData.class, x -> taskContext.getSchema().getTableId(x.getTableId()),
DeleteRowsEventData::getRows,
(tableId, row) -> eventDispatcher.dispatchDataChangeEvent(partition, tableId,
new MySqlChangeRecordEmitter(partition, offsetContext, clock, Operation.DELETE, row, null)));
}
private <T extends EventData, U> void handleChange(MySqlPartition partition, MySqlOffsetContext offsetContext, Event event, Operation operation,
Class<T> eventDataClass,
TableIdProvider<T> tableIdProvider,
RowsProvider<T, U> rowsProvider, BinlogChangeEmitter<U> changeEmitter)
throws InterruptedException {
if (skipEvent) {
// We can skip this because we should already be at least this far ...
LOGGER.info("Skipping previously processed row event: {}", event);
return;
}
if (ignoreDmlEventByGtidSource) {
LOGGER.debug("Skipping DML event because this GTID source is filtered: {}", event);
return;
}
final T data = unwrapData(event);
final TableId tableId = tableIdProvider.getTableId(data);
final List<U> rows = rowsProvider.getRows(data);
String changeType = operation.name();
if (tableId != null && taskContext.getSchema().schemaFor(tableId) != null) {
int count = 0;
int numRows = rows.size();
if (startingRowNumber < numRows) {
for (int row = startingRowNumber; row != numRows; ++row) {
offsetContext.setRowNumber(row, numRows);
offsetContext.event(tableId, eventTimestamp);
changeEmitter.emit(tableId, rows.get(row));
count++;
}
if (LOGGER.isDebugEnabled()) {
if (startingRowNumber != 0) {
LOGGER.debug("Emitted {} {} record(s) for last {} row(s) in event: {}",
count, changeType, numRows - startingRowNumber, event);
}
else {
LOGGER.debug("Emitted {} {} record(s) for event: {}", count, changeType, event);
}
}
offsetContext.changeEventCompleted();
}
else {
// All rows were previously processed ...
LOGGER.debug("Skipping previously processed {} event: {}", changeType, event);
}
}
else {
informAboutUnknownTableIfRequired(partition, offsetContext, event, tableId, operation);
}
startingRowNumber = 0;
}
/**
* Handle a {@link EventType#VIEW_CHANGE} event.
*
* @param event the database change data event to be processed; may not be null
* @throws InterruptedException if this thread is interrupted while blocking
*/
protected void viewChange(MySqlOffsetContext offsetContext, Event event) throws InterruptedException {
LOGGER.debug("View Change event: {}", event);
// do nothing
}
/**
* Handle a {@link EventType#XA_PREPARE} event.
*
* @param event the database change data event to be processed; may not be null
* @throws InterruptedException if this thread is interrupted while blocking
*/
protected void prepareTransaction(MySqlOffsetContext offsetContext, Event event) throws InterruptedException {
LOGGER.debug("XA Prepare event: {}", event);
// do nothing
}
private SSLMode sslModeFor(SecureConnectionMode mode) {
switch (mode) {
case DISABLED:
return SSLMode.DISABLED;
case PREFERRED:
return SSLMode.PREFERRED;
case REQUIRED:
return SSLMode.REQUIRED;
case VERIFY_CA:
return SSLMode.VERIFY_CA;
case VERIFY_IDENTITY:
return SSLMode.VERIFY_IDENTITY;
}
return null;
}
@Override
public void execute(ChangeEventSourceContext context, MySqlPartition partition, MySqlOffsetContext offsetContext) throws InterruptedException {
if (!connectorConfig.getSnapshotMode().shouldStream()) {
LOGGER.info("Streaming is disabled for snapshot mode {}", connectorConfig.getSnapshotMode());
return;
}
if (connectorConfig.getSnapshotMode() != MySqlConnectorConfig.SnapshotMode.NEVER) {
taskContext.getSchema().assureNonEmptySchema();
}
final Set<Operation> skippedOperations = connectorConfig.getSkippedOperations();
final MySqlOffsetContext effectiveOffsetContext = offsetContext != null
? offsetContext
: MySqlOffsetContext.initial(connectorConfig);
// Register our event handlers ...
eventHandlers.put(EventType.STOP, (event) -> handleServerStop(effectiveOffsetContext, event));
eventHandlers.put(EventType.HEARTBEAT, (event) -> handleServerHeartbeat(partition, effectiveOffsetContext, event));
eventHandlers.put(EventType.INCIDENT, (event) -> handleServerIncident(partition, effectiveOffsetContext, event));
eventHandlers.put(EventType.ROTATE, (event) -> handleRotateLogsEvent(effectiveOffsetContext, event));
eventHandlers.put(EventType.TABLE_MAP, (event) -> handleUpdateTableMetadata(partition, effectiveOffsetContext, event));
eventHandlers.put(EventType.QUERY, (event) -> handleQueryEvent(partition, effectiveOffsetContext, event));
if (!skippedOperations.contains(Operation.CREATE)) {
eventHandlers.put(EventType.WRITE_ROWS, (event) -> handleInsert(partition, effectiveOffsetContext, event));
eventHandlers.put(EventType.EXT_WRITE_ROWS, (event) -> handleInsert(partition, effectiveOffsetContext, event));
}
if (!skippedOperations.contains(Operation.UPDATE)) {
eventHandlers.put(EventType.UPDATE_ROWS, (event) -> handleUpdate(partition, effectiveOffsetContext, event));
eventHandlers.put(EventType.EXT_UPDATE_ROWS, (event) -> handleUpdate(partition, effectiveOffsetContext, event));
}
if (!skippedOperations.contains(Operation.DELETE)) {
eventHandlers.put(EventType.DELETE_ROWS, (event) -> handleDelete(partition, effectiveOffsetContext, event));
eventHandlers.put(EventType.EXT_DELETE_ROWS, (event) -> handleDelete(partition, effectiveOffsetContext, event));
}
eventHandlers.put(EventType.VIEW_CHANGE, (event) -> viewChange(effectiveOffsetContext, event));
eventHandlers.put(EventType.XA_PREPARE, (event) -> prepareTransaction(effectiveOffsetContext, event));
eventHandlers.put(EventType.XID, (event) -> handleTransactionCompletion(partition, effectiveOffsetContext, event));
// Conditionally register ROWS_QUERY handler to parse SQL statements.
if (connectorConfig.includeSqlQuery()) {
eventHandlers.put(EventType.ROWS_QUERY, (event) -> handleRowsQuery(effectiveOffsetContext, event));
}
BinaryLogClient.EventListener listener;
if (connectorConfig.bufferSizeForStreamingChangeEventSource() == 0) {
listener = (event) -> handleEvent(partition, effectiveOffsetContext, event);
}
else {
EventBuffer buffer = new EventBuffer(connectorConfig.bufferSizeForStreamingChangeEventSource(), this, context);
listener = (event) -> buffer.add(partition, effectiveOffsetContext, event);
}
client.registerEventListener(listener);
client.registerLifecycleListener(new ReaderThreadLifecycleListener(effectiveOffsetContext));
client.registerEventListener((event) -> onEvent(effectiveOffsetContext, event));
if (LOGGER.isDebugEnabled()) {
client.registerEventListener((event) -> logEvent(effectiveOffsetContext, event));
}
final boolean isGtidModeEnabled = connection.isGtidModeEnabled();
metrics.setIsGtidModeEnabled(isGtidModeEnabled);
// Get the current GtidSet from MySQL so we can get a filtered/merged GtidSet based off of the last Debezium checkpoint.
String availableServerGtidStr = connection.knownGtidSet();
if (isGtidModeEnabled) {
// The server is using GTIDs, so enable the handler ...
eventHandlers.put(EventType.GTID, (event) -> handleGtidEvent(effectiveOffsetContext, event));
// Now look at the GTID set from the server and what we've previously seen ...
GtidSet availableServerGtidSet = new GtidSet(availableServerGtidStr);
// also take into account purged GTID logs
GtidSet purgedServerGtidSet = connection.purgedGtidSet();
LOGGER.info("GTID set purged on server: {}", purgedServerGtidSet);
GtidSet filteredGtidSet = filterGtidSet(effectiveOffsetContext, availableServerGtidSet, purgedServerGtidSet);
if (filteredGtidSet != null) {
// We've seen at least some GTIDs, so start reading from the filtered GTID set ...
LOGGER.info("Registering binlog reader with GTID set: {}", filteredGtidSet);
String filteredGtidSetStr = filteredGtidSet.toString();
client.setGtidSet(filteredGtidSetStr);
effectiveOffsetContext.setCompletedGtidSet(filteredGtidSetStr);
gtidSet = new com.github.shyiko.mysql.binlog.GtidSet(filteredGtidSetStr);
}
else {
// We've not yet seen any GTIDs, so that means we have to start reading the binlog from the beginning ...
client.setBinlogFilename(effectiveOffsetContext.getSource().binlogFilename());
client.setBinlogPosition(effectiveOffsetContext.getSource().binlogPosition());
gtidSet = new com.github.shyiko.mysql.binlog.GtidSet("");
}
}
else {
// The server is not using GTIDs, so start reading the binlog based upon where we last left off ...
client.setBinlogFilename(effectiveOffsetContext.getSource().binlogFilename());
client.setBinlogPosition(effectiveOffsetContext.getSource().binlogPosition());
}
// We may be restarting in the middle of a transaction, so see how far into the transaction we have already processed...
initialEventsToSkip = effectiveOffsetContext.eventsToSkipUponRestart();
LOGGER.info("Skip {} events on streaming start", initialEventsToSkip);
// Set the starting row number, which is the next row number to be read ...
startingRowNumber = effectiveOffsetContext.rowsToSkipUponRestart();
LOGGER.info("Skip {} rows on streaming start", startingRowNumber);
// Only when we reach the first BEGIN event will we start to skip events ...
skipEvent = false;
try {
// Start the log reader, which starts background threads ...
if (context.isRunning()) {
long timeout = connectorConfig.getConnectionTimeout().toMillis();
long started = clock.currentTimeInMillis();
try {
LOGGER.debug("Attempting to establish binlog reader connection with timeout of {} ms", timeout);
client.connect(timeout);
// Need to wait for keepalive thread to be running, otherwise it can be left orphaned
// The problem is with timing. When the close is called too early after connect then
// the keepalive thread is not terminated
if (client.isKeepAlive()) {
LOGGER.info("Waiting for keepalive thread to start");
final Metronome metronome = Metronome.parker(Duration.ofMillis(100), clock);
int waitAttempts = 50;
boolean keepAliveThreadRunning = false;
while (!keepAliveThreadRunning && waitAttempts
for (Thread t : binaryLogClientThreads.values()) {
if (t.getName().startsWith(KEEPALIVE_THREAD_NAME) && t.isAlive()) {
LOGGER.info("Keepalive thread is running");
keepAliveThreadRunning = true;
}
}
metronome.pause();
}
}
}
catch (TimeoutException e) {
// If the client thread is interrupted *before* the client could connect, the client throws a timeout exception
// The only way we can distinguish this is if we get the timeout exception before the specified timeout has
// elapsed, so we simply check this (within 10%) ...
long duration = clock.currentTimeInMillis() - started;
if (duration > (0.9 * timeout)) {
double actualSeconds = TimeUnit.MILLISECONDS.toSeconds(duration);
throw new DebeziumException("Timed out after " + actualSeconds + " seconds while waiting to connect to MySQL at " +
connectorConfig.hostname() + ":" + connectorConfig.port() + " with user '" + connectorConfig.username() + "'", e);
}
// Otherwise, we were told to shutdown, so we don't care about the timeout exception
}
catch (AuthenticationException e) {
throw new DebeziumException("Failed to authenticate to the MySQL database at " +
connectorConfig.hostname() + ":" + connectorConfig.port() + " with user '" + connectorConfig.username() + "'", e);
}
catch (Throwable e) {
throw new DebeziumException("Unable to connect to the MySQL database at " +
connectorConfig.hostname() + ":" + connectorConfig.port() + " with user '" + connectorConfig.username() + "': " + e.getMessage(), e);
}
}
while (context.isRunning()) {
Thread.sleep(100);
}
}
finally {
try {
client.disconnect();
}
catch (Exception e) {
LOGGER.info("Exception while stopping binary log client", e);
}
}
}
private SSLSocketFactory getBinlogSslSocketFactory(MySqlConnectorConfig connectorConfig, MySqlConnection connection) {
String acceptedTlsVersion = connection.getSessionVariableForSslVersion();
if (!isNullOrEmpty(acceptedTlsVersion)) {
SSLMode sslMode = sslModeFor(connectorConfig.sslMode());
// Keystore settings can be passed via system properties too so we need to read them
final String password = System.getProperty("javax.net.ssl.keyStorePassword");
final String keyFilename = System.getProperty("javax.net.ssl.keyStore");
KeyManager[] keyManagers = null;
if (keyFilename != null) {
final char[] passwordArray = (password == null) ? null : password.toCharArray();
try {
KeyStore ks = KeyStore.getInstance("JKS");
ks.load(new FileInputStream(keyFilename), passwordArray);
KeyManagerFactory kmf = KeyManagerFactory.getInstance("NewSunX509");
kmf.init(ks, passwordArray);
keyManagers = kmf.getKeyManagers();
}
catch (KeyStoreException | IOException | CertificateException | NoSuchAlgorithmException | UnrecoverableKeyException e) {
throw new DebeziumException("Could not load keystore", e);
}
}
// DBZ-1208 Resembles the logic from the upstream BinaryLogClient, only that
// the accepted TLS version is passed to the constructed factory
if (sslMode == SSLMode.PREFERRED || sslMode == SSLMode.REQUIRED) {
final KeyManager[] finalKMS = keyManagers;
return new DefaultSSLSocketFactory(acceptedTlsVersion) {
@Override
protected void initSSLContext(SSLContext sc)
throws GeneralSecurityException {
sc.init(finalKMS, new TrustManager[]{
new X509TrustManager() {
@Override
public void checkClientTrusted(
X509Certificate[] x509Certificates,
String s)
throws CertificateException {
}
@Override
public void checkServerTrusted(
X509Certificate[] x509Certificates,
String s)
throws CertificateException {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
}
}, null);
}
};
}
else {
return new DefaultSSLSocketFactory(acceptedTlsVersion);
}
}
return null;
}
private void logStreamingSourceState() {
logStreamingSourceState(Level.ERROR);
}
protected void logEvent(MySqlOffsetContext offsetContext, Event event) {
LOGGER.trace("Received event: {}", event);
}
private void logStreamingSourceState(Level severity) {
final Object position = client == null ? "N/A" : client.getBinlogFilename() + "/" + client.getBinlogPosition();
final String message = "Error during binlog processing. Last offset stored = {}, binlog reader near position = {}";
switch (severity) {
case WARN:
LOGGER.warn(message, lastOffset, position);
break;
case DEBUG:
LOGGER.debug(message, lastOffset, position);
break;
default:
LOGGER.error(message, lastOffset, position);
}
}
public GtidSet filterGtidSet(MySqlOffsetContext offsetContext, GtidSet availableServerGtidSet, GtidSet purgedServerGtid) {
String gtidStr = offsetContext.gtidSet();
if (gtidStr == null) {
return null;
}
LOGGER.info("Attempting to generate a filtered GTID set");
LOGGER.info("GTID set from previous recorded offset: {}", gtidStr);
GtidSet filteredGtidSet = new GtidSet(gtidStr);
Predicate<String> gtidSourceFilter = connectorConfig.gtidSourceFilter();
if (gtidSourceFilter != null) {
filteredGtidSet = filteredGtidSet.retainAll(gtidSourceFilter);
LOGGER.info("GTID set after applying GTID source includes/excludes to previous recorded offset: {}", filteredGtidSet);
}
LOGGER.info("GTID set available on server: {}", availableServerGtidSet);
GtidSet mergedGtidSet;
if (connectorConfig.gtidNewChannelPosition() == GtidNewChannelPosition.EARLIEST) {
final GtidSet knownGtidSet = filteredGtidSet;
LOGGER.info("Using first available positions for new GTID channels");
final GtidSet relevantAvailableServerGtidSet = (gtidSourceFilter != null) ? availableServerGtidSet.retainAll(gtidSourceFilter) : availableServerGtidSet;
LOGGER.info("Relevant GTID set available on server: {}", relevantAvailableServerGtidSet);
mergedGtidSet = relevantAvailableServerGtidSet
.retainAll(uuid -> knownGtidSet.forServerWithId(uuid) != null)
.with(purgedServerGtid)
.with(filteredGtidSet);
}
else {
mergedGtidSet = availableServerGtidSet.with(filteredGtidSet);
}
LOGGER.info("Final merged GTID set to use when connecting to MySQL: {}", mergedGtidSet);
return mergedGtidSet;
}
MySqlStreamingChangeEventSourceMetrics getMetrics() {
return metrics;
}
void rewindBinaryLogClient(ChangeEventSourceContext context, BinlogPosition position) {
try {
if (context.isRunning()) {
LOGGER.debug("Rewinding binlog to position {}", position);
client.disconnect();
client.setBinlogFilename(position.getFilename());
client.setBinlogPosition(position.getPosition());
client.connect();
}
}
catch (IOException e) {
LOGGER.error("Unexpected error when re-connecting to the MySQL binary log reader", e);
}
}
BinlogPosition getCurrentBinlogPosition() {
return new BinlogPosition(client.getBinlogFilename(), client.getBinlogPosition());
}
/**
* Wraps the specified exception in a {@link DebeziumException}, ensuring that all useful state is captured inside
* the new exception's message.
*
* @param error the exception; may not be null
* @return the wrapped Kafka Connect exception
*/
protected DebeziumException wrap(Throwable error) {
assert error != null;
String msg = error.getMessage();
if (error instanceof ServerException) {
ServerException e = (ServerException) error;
msg = msg + " Error code: " + e.getErrorCode() + "; SQLSTATE: " + e.getSqlState() + ".";
}
else if (error instanceof SQLException) {
SQLException e = (SQLException) error;
msg = e.getMessage() + " Error code: " + e.getErrorCode() + "; SQLSTATE: " + e.getSQLState() + ".";
}
return new DebeziumException(msg, error);
}
protected final class ReaderThreadLifecycleListener implements LifecycleListener {
private final MySqlOffsetContext offsetContext;
ReaderThreadLifecycleListener(MySqlOffsetContext offsetContext) {
this.offsetContext = offsetContext;
}
@Override
public void onDisconnect(BinaryLogClient client) {
if (LOGGER.isInfoEnabled()) {
taskContext.temporaryLoggingContext(connectorConfig, "binlog", () -> {
Map<String, ?> offset = lastOffset;
if (offset != null) {
LOGGER.info("Stopped reading binlog after {} events, last recorded offset: {}", totalRecordCounter, offset);
}
else {
LOGGER.info("Stopped reading binlog after {} events, no new offset was recorded", totalRecordCounter);
}
});
}
}
@Override
public void onConnect(BinaryLogClient client) {
// Set up the MDC logging context for this thread ...
taskContext.configureLoggingContext("binlog");
// The event row number will be used when processing the first event ...
LOGGER.info("Connected to MySQL binlog at {}:{}, starting at {}", connectorConfig.hostname(), connectorConfig.port(), offsetContext);
}
@Override
public void onCommunicationFailure(BinaryLogClient client, Exception ex) {
LOGGER.debug("A communication failure event arrived", ex);
logStreamingSourceState();
try {
// Stop BinaryLogClient background threads
client.disconnect();
}
catch (final Exception e) {
LOGGER.debug("Exception while closing client", e);
}
errorHandler.setProducerThrowable(wrap(ex));
}
@Override
public void onEventDeserializationFailure(BinaryLogClient client, Exception ex) {
if (eventDeserializationFailureHandlingMode == EventProcessingFailureHandlingMode.FAIL) {
LOGGER.debug("A deserialization failure event arrived", ex);
logStreamingSourceState();
errorHandler.setProducerThrowable(wrap(ex));
}
else if (eventDeserializationFailureHandlingMode == EventProcessingFailureHandlingMode.WARN) {
LOGGER.warn("A deserialization failure event arrived", ex);
logStreamingSourceState(Level.WARN);
}
else {
LOGGER.debug("A deserialization failure event arrived", ex);
logStreamingSourceState(Level.DEBUG);
}
}
}
@FunctionalInterface
private interface TableIdProvider<E extends EventData> {
TableId getTableId(E data);
}
@FunctionalInterface
private interface RowsProvider<E extends EventData, U> {
List<U> getRows(E data);
}
} |
package algorithms.rotationalplanesweep;
public class RPSEdgeHeap {
private RPSScanner.Edge[] edges;
private int heapSize;
public RPSEdgeHeap(RPSScanner.Edge[] edges) {
this.edges = edges;
heapSize = 0;
for (int i=0; i<edges.length; ++i) {
edges[i].heapIndex = i;
}
}
public final RPSScanner.Edge[] getEdgeList() {
return edges;
}
public final void clear() {
heapSize = 0;
}
public final int size() {
return heapSize;
}
public final boolean isEmpty() {
return heapSize == 0;
}
public final RPSScanner.Edge getMin() {
return edges[0];
}
public final void delete(RPSScanner.Edge edge, int sx,int sy) {
// Safety check for Debugging:
//if (edge.heapIndex >= heapSize) throw new UnsupportedOperationException("ELEMENT NOT IN HEAP: " + edge);
int currentIndex = edge.heapIndex;
swap(currentIndex, heapSize-1);
--heapSize;
if (currentIndex >= heapSize) return;
bubbleUp(currentIndex, sx,sy);
bubbleDown(currentIndex, sx,sy);
}
public final void insert(RPSScanner.Edge edge, int sx,int sy) {
// Safety check for Debugging:
//if (edge.heapIndex < heapSize) throw new UnsupportedOperationException("ELEMENT ALREADY EXISTS: " + edge);
swap(edge.heapIndex, heapSize);
++heapSize;
bubbleUp(edge.heapIndex, sx,sy);
}
private final void bubbleDown(int i, int sx,int sy) {
while (true) {
int left = 2*i + 1;
if (left >= heapSize) return;
int right = 2*i + 2;
// Swap with the smaller one
int swapTarget = right;
if (right >= heapSize || edges[left].isLessThan(edges[right], sx, sy)) {
swapTarget = left;
}
if (edges[swapTarget].isLessThan(edges[i], sx, sy)) {
swap(i, swapTarget);
i = swapTarget;
} else {
return;
}
}
}
private final void bubbleUp(int i, int sx,int sy) {
while (i > 0) {
int parent = (i-1)/2;
if (edges[i].isLessThan(edges[parent], sx, sy)) {
swap(i, parent);
i = parent;
} else {
break;
}
}
}
private final void swap(int i, int j) {
RPSScanner.Edge temp = edges[i];
edges[i] = edges[j];
edges[j] = temp;
edges[i].heapIndex = i;
edges[j].heapIndex = j;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
for (int i=0; i<heapSize; ++i) {
RPSScanner.Edge e = edges[i];
sb.append(e.heapIndex + ": " + e.u.x + ", " + e.u.y + ", " + e.v.x + ", " + e.v.y);
sb.append("\n");
}
return sb.toString();
}
} |
package com.parc.ccn.data.util;
import java.io.IOException;
import java.math.BigInteger;
import java.sql.Timestamp;
import java.util.ArrayList;
import org.bouncycastle.util.encoders.Base64;
import com.parc.ccn.config.SystemConfiguration;
public class DataUtils {
public static <T extends Comparable<T>> int compare(T left, T right) {
int result = 0;
if (null != left) {
if (null == right)
return 1; // sort nothing before something
result = left.compareTo(right);
} else {
if (null != right)
result = -1; // sort nothing before something
// else fall through and compare publishers
else
result = 0; // null == null
}
return result;
}
/**
* Perform a lexigraphical comparison
* @param left
* @param right
* @return < 0 if left comes before right, 0 if they are equal, > 0 if left comes after right
*/
public static int compare(byte [] left, byte [] right) {
int result = 0;
if (null != left) {
if (null == right) {
result = 1;
} else {
// If a is shorter than b then a comes before b
if (left.length < right.length) {
result = -1;
} else if (left.length > right.length) {
result = 1;
} else {
// They have equal lengths - compare byte by byte
for (int i=0; i < left.length; ++i) {
if ((short)(left[i] & 0xff) < (short)(right[i] & 0xff)) {
result = -1;
break;
} else if ((short)(left[i] & 0xff) > (short)(right[i] & 0xff)) {
result = 1;
break;
}
}
}
}
} else {
if (null != right)
result = -1; // sort nothing before something
// else fall through and compare publishers
else
result = 0; // null == null
}
return result;
}
/**
* @see #compare(byte[], byte[])
*/
public static int compare(ArrayList<byte []> left, ArrayList<byte []> right) {
int result = 0;
if (null != left) {
if (null == right) {
result = 1;
} else {
// here we have the comparison.
int minlen = (left.size() < right.size()) ? left.size() : right.size();
for (int i=0; i < minlen; ++i) {
result = compare(left.get(i), right.get(i));
if (0 != result) break;
}
if (result == 0) {
// ok, they're equal up to the minimum length
if (left.size() < right.size()) {
result = -1;
} else if (left.size() > right.size()) {
result = 1;
}
// else they're equal, result = 0
}
}
} else {
if (null != right)
result = -1; // sort nothing before something
// else fall through and compare publishers
else
result = 0; // null == null
}
return result;
}
public static String printBytes(byte [] bytes) {
BigInteger bi = new BigInteger(1, bytes);
return bi.toString(SystemConfiguration.DEBUG_RADIX);
}
public static String printHexBytes(byte [] bytes) {
BigInteger bi = new BigInteger(1, bytes);
return bi.toString(16);
}
/*
* A place to centralize interfaces to base64 encoding/decoding, as the classes
* we use change depending on what ships with Java.
*/
public static byte [] base64Decode(byte [] input) throws IOException {
return Base64.decode(input);
}
public static byte [] base6Encode(byte [] input) {
return Base64.encode(input);
}
public static boolean arrayEquals(byte[] left, byte[] right, int length) {
if (left == null) {
return ((right == null) ? true : false);
}
if (right == null) {
return ((left == null) ? true : false);
}
if (left.length < length || right.length < length)
return false;
for (int i = 0; i < length; i++) {
if (left[i] != right[i])
return false;
}
return true;
}
/**
* Converts a timestamp into a fixed point representation, with 12 bits in the fractional
* component, and adds this to the ContentName as a version field. The timestamp is rounded
* to the nearest value in the fixed point representation.
* <p>
* This allows versions to be recorded as a timestamp with a 1/4096 second accuracy.
*/
public static byte [] timestampToBinaryTime12(Timestamp timestamp) {
long timeVal = timestampToBinaryTime12AsLong(timestamp);
return BigInteger.valueOf(timeVal).toByteArray();
}
public static long timestampToBinaryTime12AsLong(Timestamp timestamp) {
long timeVal = (timestamp.getTime() / 1000) * 4096L + (timestamp.getNanos() * 4096L + 500000000L) / 1000000000L;
return timeVal;
}
public static Timestamp binaryTime12ToTimestamp(byte [] binaryTime12) {
if ((null == binaryTime12) || (binaryTime12.length == 0)) {
throw new IllegalArgumentException("Invalid binary time!");
} else if (binaryTime12.length > 6) {
throw new IllegalArgumentException("Time unacceptably far in the future, can't decode: " + printHexBytes(binaryTime12));
}
long time = new BigInteger(binaryTime12).longValue();
Timestamp ts = binaryTime12ToTimestamp(time);
return ts;
}
public static Timestamp binaryTime12ToTimestamp(long binaryTime12AsLong) {
Timestamp ts = new Timestamp((binaryTime12AsLong / 4096L) * 1000L);
ts.setNanos((int)(((binaryTime12AsLong % 4096L) * 1000000000L) / 4096L));
return ts;
}
/**
* Compare timestamps taking into account the resolution lost in the conversion above.
*/
public static boolean timestampEquals(Timestamp t1, Timestamp t2) {
long timeVal1 = (t1.getTime() / 1000) * 4096L + (t1.getNanos() * 4096L + 500000000L) / 1000000000L;
long timeVal2 = (t2.getTime() / 1000) * 4096L + (t2.getNanos() * 4096L + 500000000L) / 1000000000L;
return (timeVal1 == timeVal2);
}
/**
* Rounding function for timestamps.
*/
public static Timestamp roundTimestamp(Timestamp origTimestamp) {
Timestamp newTimestamp = (Timestamp)origTimestamp.clone();
newTimestamp.setNanos((int)(((newTimestamp.getNanos() % 4096L) * 1000000000L) / 4096L));
return newTimestamp;
}
public static boolean isBinaryPrefix(byte [] prefix,
byte [] data) {
if ((null == prefix) || (prefix.length == 0))
return true;
if ((null == data) || (data.length < prefix.length))
return false;
for (int i=0; i < prefix.length; ++i) {
if (prefix[i] != data[i])
return false;
}
return true;
}
} |
package api.web.gw2.mapping.core;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.net.MalformedURLException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.logging.Level;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonNumber;
import javax.json.JsonObject;
import javax.json.JsonReader;
import javax.json.JsonValue;
final class JsonpDOMMarshaller extends JsonpAbstractMarshaller {
/**
* Creates a new empty instance.
*/
public JsonpDOMMarshaller() {
}
@Override
public <T> T loadObject(Class<T> targetClass, InputStream input) throws NullPointerException, IOException {
Objects.requireNonNull(targetClass);
Objects.requireNonNull(input);
try (final JsonReader jsonReader = Json.createReader(input)) {
final JsonObject jsonObject = jsonReader.readObject();
return marshallObject(jsonObject, null, targetClass);
} catch (NoSuchFieldException | IllegalArgumentException | IllegalAccessException | ClassNotFoundException | MalformedURLException | NoSuchMethodException | InvocationTargetException ex) {
logger.log(Level.SEVERE, null, ex);
final IOException exception = new IOException(ex);
throw exception;
}
}
@Override
public <T> Collection<T> loadObjectArray(Class<T> targetClass, InputStream input) throws IOException {
Objects.requireNonNull(targetClass);
Objects.requireNonNull(input);
try (final JsonReader jsonReader = Json.createReader(input)) {
final JsonArray jsonArray = jsonReader.readArray();
return marshallArray(jsonArray, null, targetClass);
} catch (Exception ex) {
logger.log(Level.SEVERE, null, ex);
final IOException exception = new IOException(ex);
throw exception;
}
}
@Override
public <T> T loadRuntimeObject(final String selector, final String pattern, final InputStream input) throws IOException {
Objects.requireNonNull(selector);
Objects.requireNonNull(pattern);
Objects.requireNonNull(input);
try (final JsonReader jsonReader = Json.createReader(input)) {
final JsonObject jsonObject = jsonReader.readObject();
final String type = jsonObject.getString(selector);
final String targetClassName = String.format(pattern, type);
final Class<T> targetClass = (Class<T>) Class.forName(targetClassName);
return marshallObject(jsonObject, null, targetClass);
} catch (NoSuchFieldException | IllegalArgumentException | IllegalAccessException | ClassNotFoundException | MalformedURLException | NoSuchMethodException | InvocationTargetException ex) {
logger.log(Level.SEVERE, null, ex);
final IOException exception = new IOException(ex);
throw exception;
}
}
private <T> T marshallObject(final JsonObject jsonObject, final Field field, final Class<T> targetClass) throws NoSuchFieldException, IllegalArgumentException, IllegalAccessException, ClassNotFoundException, NullPointerException, MalformedURLException, NoSuchMethodException, InvocationTargetException {
final T result = createConcreteEmptyInstance(targetClass);
final Class<T> concreteClass = (Class<T>) result.getClass();
for (final String key : jsonObject.keySet()) {
final String childFieldName = jsonKeyToJavaFieldName(key);
final Field childField = lookupField(childFieldName, concreteClass);
// If field does not exist, skip key.
if (childField == null) {
logWarningMissingField(key, childFieldName, targetClass);
continue;
}
Object valueFromJSON = defaultValueForField(childField);
final JsonValue jsonValue = jsonObject.get(key);
final JsonValue.ValueType valueType = jsonValue.getValueType();
switch (valueType) {
case TRUE: {
valueFromJSON = Boolean.TRUE;
}
break;
case FALSE: {
valueFromJSON = Boolean.FALSE;
}
break;
case NUMBER: {
final JsonNumber jsonNumber = jsonObject.getJsonNumber(key);
valueFromJSON = jsonNumberToJavaNumber(jsonNumber);
}
break;
case STRING: {
valueFromJSON = jsonObject.getString(key);
}
break;
case ARRAY:
case OBJECT: {
String typename = childField.getGenericType().getTypeName();
final boolean isOptional = childField.getAnnotation(OptionalValue.class) != null;
final boolean isSet = childField.getAnnotation(SetValue.class) != null;
final boolean isList = childField.getAnnotation(ListValue.class) != null;
final boolean isMap = childField.getAnnotation(MapValue.class) != null;
if (isOptional) {
typename = typename.replaceAll("java\\.util\\.Optional<", ""); // NOI18N.
}
if (isSet) {
typename = typename.replaceAll("java\\.util\\.Set<", ""); // NOI18N.
}
if (isList) {
typename = typename.replaceAll("java\\.util\\.List<", ""); // NOI18N.
}
if (isMap) {
typename = typename.replaceAll("java\\.util\\.Map<", ""); // NOI18N.
}
// Remove trailing >.
typename = typename.replaceAll(">+", ""); // NOI18N.
final String[] subTargetClassNames = typename.split(",\\s*");
final Class[] subTargetClasses = new Class[subTargetClassNames.length];
for (int index = 0; index < subTargetClassNames.length; index++) {
subTargetClasses[index] = Class.forName(subTargetClassNames[index]);
}
switch (valueType) {
case ARRAY: {
final JsonArray jsonArray = jsonObject.getJsonArray(key);
valueFromJSON = marshallArray(jsonArray, childField, subTargetClasses[0]);
}
break;
case OBJECT:
default: {
final JsonObject childJsonObject = jsonObject.getJsonObject(key);
if (isMap) {
valueFromJSON = marshallMap(childJsonObject, childField, subTargetClasses[0], subTargetClasses[1]);
} else {
valueFromJSON = marshallObject(childJsonObject, childField, subTargetClasses[0]);
}
}
}
}
break;
case NULL:
default:
}
final Object value = valueForField(childField, valueFromJSON);
childField.setAccessible(true);
childField.set(result, value);
}
return result;
}
private <T> List<T> marshallArray(final JsonArray jsonArray, final Field field, final Class<T> targetClass) {
return null;
}
private <T, V> Map<T, V> marshallMap(final JsonObject jsonObject, final Field field, final Class<T> keyClass, final Class<V> valueClass) throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, NoSuchFieldException, NullPointerException, MalformedURLException {
final HashMap result = new HashMap();
for (String key : jsonObject.keySet()) {
final JsonValue value = jsonObject.get(key);
final Object keyFromJSON = marshallEnumValue(field, key);
Object valueFromJSON = null;
switch (value.getValueType()) {
case ARRAY: {
final JsonArray jsonArray = jsonObject.getJsonArray(key);
valueFromJSON = marshallArray(jsonArray, null, valueClass);
}
break;
case OBJECT: {
final JsonObject childJsonObject = jsonObject.getJsonObject(key);
valueFromJSON = marshallObject(childJsonObject, null, valueClass);
}
break;
case TRUE:
case FALSE: {
valueFromJSON = jsonObject.getBoolean(key);
}
break;
case NUMBER: {
final JsonNumber jsonNumber = jsonObject.getJsonNumber(key);
valueFromJSON = jsonNumberToJavaNumber(jsonNumber);
}
break;
case STRING: {
valueFromJSON = jsonObject.getString(key);
}
break;
case NULL:
default:
}
result.put(keyFromJSON, valueFromJSON);
}
return Collections.unmodifiableMap(result);
}
/**
* Converts the number value returned by the JSON to either {@code Integer} or {@code Double}
* @param jsonNumber The JSON number.
* @return A {@code Number}, never {@code null}.
*/
private Number jsonNumberToJavaNumber(final JsonNumber jsonNumber) {
Number result = null;
if (jsonNumber.isIntegral()) {
result = jsonNumber.intValue();
} else {
result = jsonNumber.doubleValue();
}
return result;
}
} |
package api.web.gw2.mapping.v2.items;
import api.web.gw2.mapping.core.ImplementationSpecific;
import api.web.gw2.mapping.v2.APIv2;
@APIv2(endpoint = "v2/items") // NOI18N.
public enum ItemWeaponType {
/**
* Defines an axe weapon type.
*/
AXE("Axe"), // NOI18N.
/**
* Defines an long bow weapon type.
*/
LONG_BOW("LongBow"), // NOI18N.
/**
* Defines a dagger type.
*/
DAGGER("Dagger"), // NOI18N.
/**
* Defines a focus weapon type.
*/
FOCUS("Focus"), // NOI18N.
/**
* Defines a great sword weapon type.
*/
GREATSWORD("Greatsword"), // NOI18N.
/**
* Defines a hammer weapon type.
*/
HAMMER("Hammer"), // NOI18N.
/**
* Defines a harpoon weapon type.
*/
HARPOON("Harpoon"), // NOI18N.
/**
* Defines a mace weapon type.
*/
MACE("Mace"), // NOI18N.
/**
* Defines a pistol weapon type.
*/
PISTOL("Pistol"), // NOI18N.
/**
* Defines a rifle weapon type.
*/
RIFLE("Rifle"), // NOI18N.
/**
* Defines a scepter weapon type.
*/
SCEPTER("Scepter"), // NOI18N.
/**
* Defines a shield weapon type.
*/
SHIELD("Shield"), // NOI18N.
/**
* Defines short bow weapon type.
*/
SHORT_BOW("ShortBow"), // NOI18N.
/**
* Defines an speargun weapon type.
*/
SPEARGUN("Speargun"), // NOI18N.
/**
* Defines a staff weapon type.
*/
STAFF("Staff"), // NOI18N.
/**
* Defines an sword weapon type.
*/
SWORD("Sword"), // NOI18N.
/**
* Defines a torch weapon type.
*/
TORCH("Torch"), // NOI18N.
/**
* Defines a trident weapon type.
*/
TRIDENT("Trident"), // NOI18N.
/**
* Defines a warhorn weapon type.
*/
WARHORN("Warhorn"), // NOI18N.
/**
* Fail safe value.
*/
@ImplementationSpecific
UNKNOWN(null);
final String value;
private ItemWeaponType(final String value) {
this.value = value;
}
} |
package org.monarchinitiative.exomiser.core.prioritisers;
import com.fasterxml.jackson.annotation.JsonIgnore;
import org.monarchinitiative.exomiser.core.phenotype.PhenotypeMatch;
import org.monarchinitiative.exomiser.core.phenotype.PhenotypeTerm;
import org.monarchinitiative.exomiser.core.prioritisers.model.GeneDiseaseModel;
import org.monarchinitiative.exomiser.core.prioritisers.model.GeneModelPhenotypeMatch;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import static java.util.stream.Collectors.toMap;
/**
*
* @author Jules Jacobsen <jules.jacobsen@sanger.ac.uk>
*/
public class HiPhivePriorityResult extends AbstractPriorityResult {
private double humanScore = 0;
private double mouseScore = 0;
private double fishScore = 0;
private final double ppiScore;
private final boolean candidateGeneMatch;
private final List<PhenotypeTerm> queryPhenotypeTerms;
private final List<GeneModelPhenotypeMatch> phenotypeEvidence;
private final List<GeneModelPhenotypeMatch> ppiEvidence;
/**
* @param score The similarity score assigned by the random walk.
*/
public HiPhivePriorityResult(int geneId, String geneSymbol, double score, List<PhenotypeTerm> queryPhenotypeTerms, List<GeneModelPhenotypeMatch> phenotypeEvidence, List<GeneModelPhenotypeMatch> ppiEvidence, double ppiScore, boolean candidateGeneMatch) {
super(PriorityType.HIPHIVE_PRIORITY, geneId, geneSymbol, score);
this.queryPhenotypeTerms = queryPhenotypeTerms;
setPhenotypeEvidenceScores(phenotypeEvidence);
this.phenotypeEvidence = phenotypeEvidence;
this.ppiEvidence = ppiEvidence;
this.ppiScore = ppiScore;
this.candidateGeneMatch = candidateGeneMatch;
}
private void setPhenotypeEvidenceScores(List<GeneModelPhenotypeMatch> phenotypeEvidence) {
if (phenotypeEvidence != null) {
for (GeneModelPhenotypeMatch model : phenotypeEvidence) {
switch (model.getOrganism()) {
case HUMAN:
humanScore = model.getScore();
break;
case MOUSE:
mouseScore = model.getScore();
break;
case FISH:
fishScore = model.getScore();
break;
}
}
}
}
@Override
public String getGeneSymbol() {
return geneSymbol;
}
@Override
public double getScore() {
return score;
}
public List<PhenotypeTerm> getQueryPhenotypeTerms() {
return queryPhenotypeTerms;
}
public List<GeneModelPhenotypeMatch> getPhenotypeEvidence() {
return phenotypeEvidence;
}
public List<GeneModelPhenotypeMatch> getPpiEvidence() {
return ppiEvidence;
}
public double getHumanScore() {
return humanScore;
}
public double getMouseScore() {
return mouseScore;
}
public double getFishScore() {
return fishScore;
}
public double getPpiScore() {
return ppiScore;
}
public boolean isCandidateGeneMatch() {
return candidateGeneMatch;
}
/**
* @return A summary for the text output formats
*/
@JsonIgnore
public String getPhenotypeEvidenceText() {
StringBuilder humanBuilder = new StringBuilder();
StringBuilder mouseBuilder = new StringBuilder();
StringBuilder fishBuilder = new StringBuilder();
for (GeneModelPhenotypeMatch geneModelPhenotypeMatch : phenotypeEvidence) {
Map<PhenotypeTerm, PhenotypeMatch> bestMatchesForModel = getPhenotypeTermPhenotypeMatchMap(geneModelPhenotypeMatch);
switch (geneModelPhenotypeMatch.getOrganism()) {
case HUMAN:
GeneDiseaseModel geneDiseaseModel = (GeneDiseaseModel) geneModelPhenotypeMatch.getModel();
humanBuilder.append(geneDiseaseModel.getDiseaseTerm())
.append(" (").append(geneDiseaseModel.getDiseaseId()).append("): ");
makeBestPhenotypeMatchText(humanBuilder, bestMatchesForModel);
break;
case MOUSE:
makeBestPhenotypeMatchText(mouseBuilder, bestMatchesForModel);
break;
case FISH:
makeBestPhenotypeMatchText(fishBuilder, bestMatchesForModel);
}
}
StringBuilder humanPPIBuilder = new StringBuilder();
StringBuilder mousePPIBuilder = new StringBuilder();
StringBuilder fishPPIBuilder = new StringBuilder();
for (GeneModelPhenotypeMatch geneModelPhenotypeMatch : ppiEvidence) {
Map<PhenotypeTerm, PhenotypeMatch> bestMatchesForModel = getPhenotypeTermPhenotypeMatchMap(geneModelPhenotypeMatch);
String proximityToGeneSymbol = "Proximity to " + geneModelPhenotypeMatch.getHumanGeneSymbol() + " ";
switch (geneModelPhenotypeMatch.getOrganism()) {
case HUMAN:
GeneDiseaseModel geneDiseaseModel = (GeneDiseaseModel) geneModelPhenotypeMatch.getModel();
humanPPIBuilder.append(proximityToGeneSymbol)
.append("associated with ")
.append(geneDiseaseModel.getDiseaseTerm())
.append(" (").append(geneDiseaseModel.getDiseaseId()).append("): ");
makeBestPhenotypeMatchText(humanPPIBuilder, bestMatchesForModel);
break;
case MOUSE:
mousePPIBuilder.append(proximityToGeneSymbol);
makeBestPhenotypeMatchText(mousePPIBuilder, bestMatchesForModel);
break;
case FISH:
fishPPIBuilder.append(proximityToGeneSymbol);
makeBestPhenotypeMatchText(fishPPIBuilder, bestMatchesForModel);
}
}
String human = humanBuilder.toString();
String mouse = mouseBuilder.toString();
String fish = fishBuilder.toString();
String humanPPI = humanPPIBuilder.toString();
String mousePPI = mousePPIBuilder.toString();
String fishPPI = fishPPIBuilder.toString();
return String.format("%s\t%s\t%s\t%s\t%s\t%s", human, mouse, fish, humanPPI, mousePPI, fishPPI);
}
private Map<PhenotypeTerm, PhenotypeMatch> getPhenotypeTermPhenotypeMatchMap(GeneModelPhenotypeMatch geneModelPhenotypeMatch) {
return geneModelPhenotypeMatch
.getBestModelPhenotypeMatches()
.stream()
.collect(toMap(PhenotypeMatch::getQueryPhenotype, Function.identity()));
}
@JsonIgnore
@Override
public String getHTMLCode() {
StringBuilder stringBuilder = new StringBuilder();
for (GeneModelPhenotypeMatch geneModelPhenotypeMatch : phenotypeEvidence) {
switch (geneModelPhenotypeMatch.getOrganism()) {
case HUMAN:
GeneDiseaseModel geneDiseaseModel = (GeneDiseaseModel) geneModelPhenotypeMatch.getModel();
String diseaseLink = makeDiseaseLink(geneDiseaseModel.getDiseaseId(), geneDiseaseModel.getDiseaseTerm());
stringBuilder.append(String.format("<dl><dt>Phenotypic similarity %.3f to %s associated with %s.</dt>", geneModelPhenotypeMatch
.getScore(), diseaseLink, geneModelPhenotypeMatch.getHumanGeneSymbol()));
break;
case MOUSE:
stringBuilder.append(String.format("<dl><dt>Phenotypic similarity %.3f to mouse mutant involving <a href=\"http:
.getScore(), geneModelPhenotypeMatch.getHumanGeneSymbol(), geneModelPhenotypeMatch.getHumanGeneSymbol()));
break;
case FISH:
stringBuilder.append(String.format("<dl><dt>Phenotypic similarity %.3f to zebrafish mutant involving <a href=\"http://zfin.org/action/quicksearch/query?query=%s\">%s</a>.</dt>", geneModelPhenotypeMatch
.getScore(), geneModelPhenotypeMatch.getHumanGeneSymbol(), geneModelPhenotypeMatch.getHumanGeneSymbol()));
break;
}
Map<PhenotypeTerm, PhenotypeMatch> bestMatchesForModel = getPhenotypeTermPhenotypeMatchMap(geneModelPhenotypeMatch);
makeBestPhenotypeMatchHtml(stringBuilder, bestMatchesForModel);
stringBuilder.append("</dl>");
}
for (GeneModelPhenotypeMatch geneModelPhenotypeMatch : ppiEvidence) {
String stringDbLink = "http://string-db.org/newstring_cgi/show_network_section.pl?identifiers=" + geneSymbol + "%0D" + geneModelPhenotypeMatch
.getHumanGeneSymbol() + "&required_score=700&network_flavor=evidence&species=9606&limit=20";
switch (geneModelPhenotypeMatch.getOrganism()) {
case HUMAN:
GeneDiseaseModel geneDiseaseModel = (GeneDiseaseModel) geneModelPhenotypeMatch.getModel();
String diseaseLink = makeDiseaseLink(geneDiseaseModel.getDiseaseId(), geneDiseaseModel.getDiseaseTerm());
stringBuilder.append(String.format("<dl><dt>Proximity score %.3f in <a href=\"%s\">interactome to %s</a> and phenotypic similarity %.3f to %s associated with %s.</dt>", ppiScore, stringDbLink, geneModelPhenotypeMatch
.getHumanGeneSymbol(), geneModelPhenotypeMatch.getScore(), diseaseLink, geneModelPhenotypeMatch
.getHumanGeneSymbol()));
break;
case MOUSE:
stringBuilder.append(String.format("<dl><dt>Proximity score %.3f in <a href=\"%s\">interactome to %s</a> and phenotypic similarity %.3f to mouse mutant of %s.</dt>", ppiScore, stringDbLink, geneModelPhenotypeMatch
.getHumanGeneSymbol(), geneModelPhenotypeMatch.getScore(), geneModelPhenotypeMatch.getHumanGeneSymbol()));
break;
case FISH:
stringBuilder.append(String.format("<dl><dt>Proximity score %.3f in <a href=\"%s\">interactome to %s</a> and phenotypic similarity %.3f to fish mutant of %s.</dt>", ppiScore, stringDbLink, geneModelPhenotypeMatch
.getHumanGeneSymbol(), geneModelPhenotypeMatch.getScore(), geneModelPhenotypeMatch.getHumanGeneSymbol()));
break;
}
Map<PhenotypeTerm, PhenotypeMatch> bestModelPhenotypeMatches = getPhenotypeTermPhenotypeMatchMap(geneModelPhenotypeMatch);
makeBestPhenotypeMatchHtml(stringBuilder, bestModelPhenotypeMatches);
stringBuilder.append("</dl>");
}
String html = stringBuilder.toString();
if (html.isEmpty()) {
return "<dl><dt>No phenotype or PPI evidence</dt></dl>";
}
return html;
}
private void makeBestPhenotypeMatchText(StringBuilder stringBuilder, Map<PhenotypeTerm, PhenotypeMatch> bestModelPhenotypeMatches) {
for (PhenotypeTerm queryTerm : queryPhenotypeTerms) {
if (bestModelPhenotypeMatches.containsKey(queryTerm)) {// && bestModelPhenotypeMatches.get(queryTerm).getScore() > 1.75) {// RESTRICT TO HIGH QUALITY MATCHES
PhenotypeMatch match = bestModelPhenotypeMatches.get(queryTerm);
PhenotypeTerm matchTerm = match.getMatchPhenotype();
stringBuilder.append(String.format("%s (%s)-%s (%s), ", queryTerm.getLabel(), queryTerm.getId(), matchTerm.getLabel(), matchTerm.getId()));
}
}
}
private void makeBestPhenotypeMatchHtml(StringBuilder stringBuilder, Map<PhenotypeTerm, PhenotypeMatch> bestModelPhenotypeMatches) {
stringBuilder.append("<dt>Best Phenotype Matches:</dt>");
for (PhenotypeTerm queryTerm : queryPhenotypeTerms) {
if (bestModelPhenotypeMatches.containsKey(queryTerm)) {
PhenotypeMatch match = bestModelPhenotypeMatches.get(queryTerm);
PhenotypeTerm matchTerm = match.getMatchPhenotype();
stringBuilder.append(String.format("<dd>%s, %s - %s, %s</dd>", queryTerm.getId(), queryTerm.getLabel(), matchTerm.getId(), matchTerm.getLabel()));
} else {
stringBuilder.append(String.format("<dd>%s, %s -</dd>", queryTerm.getId(), queryTerm.getLabel()));
}
}
}
private String makeDiseaseLink(String diseaseId, String diseaseTerm) {
String[] databaseNameAndIdentifier = diseaseId.split(":");
String databaseName = databaseNameAndIdentifier[0];
String id = databaseNameAndIdentifier[1];
if (databaseName.equals("OMIM")) {
return "<a href=\"http:
} else {
return "<a href=\"http:
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
HiPhivePriorityResult that = (HiPhivePriorityResult) o;
return Double.compare(that.humanScore, humanScore) == 0 &&
Double.compare(that.mouseScore, mouseScore) == 0 &&
Double.compare(that.fishScore, fishScore) == 0 &&
Double.compare(that.ppiScore, ppiScore) == 0 &&
candidateGeneMatch == that.candidateGeneMatch;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), humanScore, mouseScore, fishScore, ppiScore, candidateGeneMatch);
}
@Override
public String toString() {
return "HiPhivePriorityResult{" +
"geneId=" + geneId +
", geneSymbol='" + geneSymbol + '\'' +
", score=" + score +
", humanScore=" + humanScore +
", mouseScore=" + mouseScore +
", fishScore=" + fishScore +
", ppiScore=" + ppiScore +
", candidateGeneMatch=" + candidateGeneMatch +
", queryPhenotypeTerms=" + queryPhenotypeTerms +
", phenotypeEvidence=" + phenotypeEvidence +
", ppiEvidence=" + ppiEvidence +
'}';
}
} |
package me.panpf.adapter;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import java.util.List;
@SuppressWarnings({"unused", "WeakerAccess"})
public class ItemActor {
/*
* ItemStorage AssemblyAdapter
* PagingListAdapter AssemblyRecyclerPageListAdapter getDataCount() getData()
* ItemStorage AssemblyRecyclerAdapter
*/
@NonNull
private AssemblyAdapter adapter;
public ItemActor(@NonNull AssemblyAdapter adapter) {
this.adapter = adapter;
}
public int getItemCount() {
int headerItemCount = adapter.getHeaderItemCount();
int dataCount = adapter.getDataCount();
int footerItemCount = adapter.getFooterItemCount();
if (dataCount > 0) {
return headerItemCount + dataCount + footerItemCount + (adapter.hasLoadMoreFooter() ? 1 : 0);
} else {
return headerItemCount + footerItemCount;
}
}
@Nullable
public Object getItem(int position) {
int headerItemCount = adapter.getHeaderItemCount();
int headerStartPosition = 0;
int headerEndPosition = headerItemCount - 1;
if (position >= headerStartPosition && position <= headerEndPosition && headerItemCount > 0) {
//noinspection UnnecessaryLocalVariable
int positionInHeaderList = position;
return adapter.getHeaderData(positionInHeaderList);
}
int dataCount = adapter.getDataCount();
int dataStartPosition = headerEndPosition + 1;
int dataEndPosition = headerEndPosition + dataCount;
if (position >= dataStartPosition && position <= dataEndPosition && dataCount > 0) {
int positionInDataList = position - headerItemCount;
return adapter.getData(positionInDataList);
}
int footerItemCount = adapter.getFooterItemCount();
int footerStartPosition = dataEndPosition + 1;
int footerEndPosition = dataEndPosition + footerItemCount;
if (position >= footerStartPosition && position <= footerEndPosition && footerItemCount > 0) {
int positionInFooterList = position - headerItemCount - dataCount;
return adapter.getFooterData(positionInFooterList);
}
if (dataCount > 0 && adapter.hasLoadMoreFooter() && position == getItemCount() - 1) {
FixedItemInfo loadMoreFixedItemInfo = adapter.getLoadMoreFixedItemInfo();
return loadMoreFixedItemInfo != null ? loadMoreFixedItemInfo.getData() : null;
}
return null;
}
public int getPositionInPart(int position) {
int headerItemCount = adapter.getHeaderItemCount();
int headerStartPosition = 0;
int headerEndPosition = headerItemCount - 1;
if (position >= headerStartPosition && position <= headerEndPosition && headerItemCount > 0) {
return position;
}
int dataCount = adapter.getDataCount();
int dataStartPosition = headerEndPosition + 1;
int dataEndPosition = headerEndPosition + dataCount;
if (position >= dataStartPosition && position <= dataEndPosition && dataCount > 0) {
return position - headerItemCount;
}
int footerItemCount = adapter.getFooterItemCount();
int footerStartPosition = dataEndPosition + 1;
int footerEndPosition = dataEndPosition + footerItemCount;
if (position >= footerStartPosition && position <= footerEndPosition && footerItemCount > 0) {
return position - headerItemCount - dataCount;
}
if (dataCount > 0 && adapter.hasLoadMoreFooter() && position == adapter.getItemCount() - 1) {
return 0;
}
throw new IllegalArgumentException("Illegal position: " + position);
}
public int getSpanSize(int position) {
int headerItemCount = adapter.getHeaderItemCount();
int headerStartPosition = 0;
int headerEndPosition = headerItemCount - 1;
List<FixedItemInfo> headerItemList = adapter.getHeaderItemList();
if (headerItemList != null && position >= headerStartPosition && position <= headerEndPosition && headerItemCount > 0) {
//noinspection UnnecessaryLocalVariable
int positionInHeaderList = position;
return headerItemList.get(positionInHeaderList).getItemFactory().getSpanSize();
}
int dataCount = adapter.getDataCount();
int dataStartPosition = headerEndPosition + 1;
int dataEndPosition = headerEndPosition + dataCount;
List<ItemFactory> itemFactoryList = adapter.getItemFactoryList();
if (itemFactoryList != null && position >= dataStartPosition && position <= dataEndPosition && dataCount > 0) {
int positionInDataList = position - headerItemCount;
Object dataObject = adapter.getData(positionInDataList);
ItemFactory itemFactory;
for (int w = 0, size = itemFactoryList.size(); w < size; w++) {
itemFactory = itemFactoryList.get(w);
if (itemFactory.isTarget(dataObject)) {
return itemFactory.getSpanSize();
}
}
throw new IllegalStateException(String.format("Didn't find suitable ItemFactory. positionInDataList=%d, dataObject=%s",
positionInDataList, dataObject != null ? dataObject.getClass().getName() : null));
}
int footerItemCount = adapter.getFooterItemCount();
int footerStartPosition = dataEndPosition + 1;
int footerEndPosition = dataEndPosition + footerItemCount;
List<FixedItemInfo> footerItemList = adapter.getHeaderItemList();
if (footerItemList != null && position >= footerStartPosition && position <= footerEndPosition && footerItemCount > 0) {
int positionInFooterList = position - headerItemCount - dataCount;
return footerItemList.get(positionInFooterList).getItemFactory().getSpanSize();
}
FixedItemInfo loadMoreFixedItemInfo = adapter.getLoadMoreFixedItemInfo();
if (loadMoreFixedItemInfo != null && dataCount > 0 && adapter.hasLoadMoreFooter() && position == getItemCount() - 1) {
return loadMoreFixedItemInfo.getItemFactory().getSpanSize();
}
return 1;
}
public int getItemViewType(int position) {
int headerItemCount = adapter.getHeaderItemCount();
int headerStartPosition = 0;
int headerEndPosition = headerItemCount - 1;
List<FixedItemInfo> headerItemList = adapter.getHeaderItemList();
if (headerItemList != null && position >= headerStartPosition && position <= headerEndPosition && headerItemCount > 0) {
//noinspection UnnecessaryLocalVariable
int positionInHeaderList = position;
return headerItemList.get(positionInHeaderList).getItemFactory().getItemType();
}
List<ItemFactory> itemFactoryList = adapter.getItemFactoryList();
int dataCount = adapter.getDataCount();
int dataStartPosition = headerEndPosition + 1;
int dataEndPosition = headerEndPosition + dataCount;
if (itemFactoryList != null && position >= dataStartPosition && position <= dataEndPosition && dataCount > 0) {
int positionInDataList = position - headerItemCount;
Object dataObject = adapter.getData(positionInDataList);
ItemFactory itemFactory;
for (int w = 0, size = itemFactoryList.size(); w < size; w++) {
itemFactory = itemFactoryList.get(w);
if (itemFactory.isTarget(dataObject)) {
return itemFactory.getItemType();
}
}
throw new IllegalStateException(String.format("Didn't find suitable ItemFactory. positionInDataList=%d, dataObject=%s",
positionInDataList, dataObject != null ? dataObject.toString() : null));
}
List<FixedItemInfo> footerItemList = adapter.getFooterItemList();
int footerItemCount = adapter.getFooterItemCount();
int footerStartPosition = dataEndPosition + 1;
int footerEndPosition = dataEndPosition + footerItemCount;
if (footerItemList != null && position >= footerStartPosition && position <= footerEndPosition && footerItemCount > 0) {
int positionInFooterList = position - headerItemCount - dataCount;
return footerItemList.get(positionInFooterList).getItemFactory().getItemType();
}
FixedItemInfo loadMoreFixedItemInfo = adapter.getLoadMoreFixedItemInfo();
if (loadMoreFixedItemInfo != null && dataCount > 0 && adapter.hasLoadMoreFooter() && position == getItemCount() - 1) {
return loadMoreFixedItemInfo.getItemFactory().getItemType();
}
throw new IllegalStateException("Not found match viewType, position: " + position);
}
} |
package com.KST.eCommerce;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
* @author Kent Tsuenchy
*/
public class EcommercePlatform {
private final Session session;
private final PaymentProcessor checkout;
private final ArrayList<User> users;
public EcommercePlatform() {
Database db = new Database("users.data");
this.session = new Session();
this.checkout = new PaymentProcessor();
this.users = db.readUsers();
db.closeDb();
}
public ArrayList<Item> listItems() {
ArrayList<Item> items = new ArrayList<>();
for (User u : users) {
for (Item i : ((Seller) u).getItems()) {
items.add(i);
}
}
return (ArrayList<Item>) items.clone();
}
public Session getSession() {
return this.session;
}
public boolean login(String name, String password) {
try {
return this.session.login(this.users, name, password);
} catch (Exception ex) {
Logger.getLogger(EcommercePlatform.class.getName()).log(Level.SEVERE, null, ex);
}
return false;
}
public PaymentProcessor getPaymentProcessor() {
return this.checkout;
}
public void showInterface() {
EcommerceGUI.showGUI(this);
}
public static void main(String[] args) {
EcommercePlatform platform = new EcommercePlatform();
platform.showInterface();
}
} |
package com.evh98.vision.screens;
import com.evh98.vision.Vision;
import com.evh98.vision.ui.Pane;
import com.evh98.vision.util.Controller;
import com.evh98.vision.util.Graphics;
import com.evh98.vision.util.Palette;
import javafx.event.EventHandler;
import javafx.scene.Scene;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.input.KeyEvent;
public class MainScreen extends Screen {
int x = 1;
int y = 1;
Pane games = new Pane(Palette.RED, "Games", '\uf2db', -1600, -832);
Pane media = new Pane(Palette.BLUE, "Media", '\uf19d', 64, -832);
Pane apps = new Pane(Palette.YELLOW, "Apps", '\uf313', -1600, 48);
Pane system = new Pane(Palette.GREEN, "System", '\uf1c6', 64, 48);
public MainScreen(GraphicsContext gc) {
super(gc);
}
@Override
public void render() {
Graphics.drawBackground(gc, Graphics.background_colored);
if (x == 1 && y == 1) {
games.renderAlt(gc);
} else {
games.render(gc);
}
if (x == 2 && y == 1) {
media.renderAlt(gc);
} else {
media.render(gc);
}
if (x == 1 && y == 2) {
apps.renderAlt(gc);
} else {
apps.render(gc);
}
if (x == 2 && y == 2) {
system.renderAlt(gc);
} else {
system.render(gc);
}
}
@Override
public void update(Scene scene) {
scene.setOnKeyPressed(new EventHandler<KeyEvent>(){
@Override
public void handle(KeyEvent e) {
if (Controller.isLeft(e)) {
if (x == 0) {
y = 1;
x = 1;
} else if (x == 2) {
x = 1;
}
}
if (Controller.isRight(e)) {
if (x == 0 || x == 1) {
x++;
if (y == 0) {
y = 1;
}
}
}
if (Controller.isUp(e)) {
if (y == 0) {
y = 1;
x = 1;
} else if (y == 2) {
y = 1;
}
}
if (Controller.isDown(e)) {
if (y == 0 || y == 1) {
y++;
if (x == 0) {
x = 1;
}
}
}
if (Controller.isGreen(e)) {
if (x == 1 && y == 1) {
// Vision.setScreen(Vision.game_screen);
}
else if (x == 2 && y == 1) {
Vision.setScreen(Vision.media_screen);
}
else if (x == 1 && y == 2) {
// Vision.setScreen(Vision.app_screen);
}
else if (x == 2 && y == 2) {
system.renderAlt(gc);
}
}
if (Controller.isRed(e)) {
System.exit(0);
}
}
});
}
} |
package com.github.picrazy2.TypingTest;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Random;
public class Main{
static long startTime = 0;
static long currentTime = 0;
static Random random = new Random();
static ArrayList<String> cont = new ArrayList<String>();
static ArrayList<String> fullCont = new ArrayList<String>();
static ArrayList<String> order = new ArrayList<String>();
public static void main(String[] args){
// TODO Auto-generated method stub
String line = null;
int correctCounter = 0;
File file = new File("bank.txt");
try{
BufferedReader reader = new BufferedReader(new FileReader(file));
line = reader.readLine();
while(line!=null){
cont.add(line);
fullCont.add(line);
line = reader.readLine();
}
}
catch(IOException e){
// TODO Auto-generated catch block
e.printStackTrace();
}
System.out.println("1 minute starts after pressing 1");
BufferedReader systemIn = new BufferedReader(new InputStreamReader(System.in));
boolean start = true;
int counter = 0;
while(true){
try{
String input = systemIn.readLine();
if(input.equals("quit")){
break;
}
else if(input.equals("1") && start){
makeWords();
start = false;
startTime = System.currentTimeMillis();
for(int i = counter; i<counter+5; i++){
System.out.print(order.get(i%order.size()) + " ");
}
System.out.println();
}else if(!start){
currentTime = System.currentTimeMillis();
if(currentTime-startTime>=10000){
System.out.println("Time up! Score: "+correctCounter);
break;
}
else{
for(int i = counter; i<counter+5; i++){
System.out.print(order.get(i%order.size()) + " ");
}
System.out.println();
if(input.equals(order.get(counter%order.size()-1))){
correctCounter++;
}
}
}
}
catch(IOException e){
// TODO Auto-generated catch block
e.printStackTrace();
}
counter++;
}
}
public static void makeWords(){
while(cont.size()!=0){
int temp = random.nextInt(cont.size());
order.add(cont.get(temp));
cont.remove(temp);
}
}
} |
package com.github.uiautomatorstub;
import com.android.uiautomator.core.UiSelector;
public class Selector {
private String _text;
private String _textContains;
private String _textMatches;
private String _textStartsWith;
private String _className;
private String _classNameMatches;
private String _description;
private String _descriptionContains;
private String _descriptionMatches;
private String _descriptionStartsWith;
private boolean _checkable;
private boolean _checked;
private boolean _clickable;
private boolean _longClickable;
private boolean _scrollable;
private boolean _enabled;
private boolean _focusable;
private boolean _focused;
private boolean _selected;
private String _packageName;
private String _packageNameMatches;
private String _resourceId;
private String _resourceIdMatches;
private int _index;
private int _instance;
private Selector[] _childOrSiblingSelector = new Selector[]{};
private String[] _childOrSibling = new String[]{};
private long _mask;
public static final long MASK_TEXT = 0x01;
public static final long MASK_TEXTCONTAINS = 0x02;
public static final long MASK_TEXTMATCHES = 0x04;
public static final long MASK_TEXTSTARTSWITH = 0x08;
public static final long MASK_CLASSNAME = 0x10;
public static final long MASK_CLASSNAMEMATCHES = 0x20;
public static final long MASK_DESCRIPTION = 0x40;
public static final long MASK_DESCRIPTIONCONTAINS = 0x80;
public static final long MASK_DESCRIPTIONMATCHES = 0x0100;
public static final long MASK_DESCRIPTIONSTARTSWITH = 0x0200;
public static final long MASK_CHECKABLE = 0x0400;
public static final long MASK_CHECKED = 0x0800;
public static final long MASK_CLICKABLE = 0x1000;
public static final long MASK_LONGCLICKABLE = 0x2000;
public static final long MASK_SCROLLABLE = 0x4000;
public static final long MASK_ENABLED = 0x8000;
public static final long MASK_FOCUSABLE = 0x010000;
public static final long MASK_FOCUSED = 0x020000;
public static final long MASK_SELECTED = 0x040000;
public static final long MASK_PACKAGENAME = 0x080000;
public static final long MASK_PACKAGENAMEMATCHES = 0x100000;
public static final long MASK_RESOURCEID = 0x200000;
public static final long MASK_RESOURCEIDMATCHES = 0x400000;
public static final long MASK_INDEX = 0x800000;
public static final long MASK_INSTANCE = 0x01000000;
public UiSelector toUiSelector() {
UiSelector s = new UiSelector();
if ((getMask() & Selector.MASK_CHECKABLE) > 0 && android.os.Build.VERSION.SDK_INT >= 18)
s = s.clickable(this.isClickable());
if ((getMask() & Selector.MASK_CHECKED) > 0)
s = s.checked(isChecked());
if ((getMask() & Selector.MASK_CLASSNAME) > 0 && android.os.Build.VERSION.SDK_INT >= 17)
s = s.className(getClassName());
if ((getMask() & Selector.MASK_CLASSNAMEMATCHES) > 0 && android.os.Build.VERSION.SDK_INT >= 17)
s = s.classNameMatches(getClassNameMatches());
if ((getMask() & Selector.MASK_CLICKABLE) > 0)
s = s.clickable(isClickable());
if ((getMask() & Selector.MASK_DESCRIPTION) > 0)
s = s.description(getDescription());
if ((getMask() & Selector.MASK_DESCRIPTIONCONTAINS) > 0)
s = s.descriptionContains(getDescriptionContains());
if ((getMask() & Selector.MASK_DESCRIPTIONMATCHES) > 0 && android.os.Build.VERSION.SDK_INT >= 17)
s = s.descriptionMatches(getDescriptionMatches());
if ((getMask() & Selector.MASK_DESCRIPTIONSTARTSWITH) > 0)
s = s.descriptionStartsWith(getDescriptionStartsWith());
if ((getMask() & Selector.MASK_ENABLED) > 0)
s = s.enabled(isEnabled());
if ((getMask() & Selector.MASK_FOCUSABLE) > 0)
s = s.focusable(isFocusable());
if ((getMask() & Selector.MASK_FOCUSED) > 0)
s = s.focused(isFocused());
if ((getMask() & Selector.MASK_INDEX) > 0)
s = s.index(getIndex());
if ((getMask() & Selector.MASK_INSTANCE) > 0)
s = s .instance(getInstance());
if ((getMask() & Selector.MASK_LONGCLICKABLE) > 0 && android.os.Build.VERSION.SDK_INT >= 17)
s = s.longClickable(isLongClickable());
if ((getMask() & Selector.MASK_PACKAGENAME) > 0)
s = s.packageName(getPackageName());
if ((getMask() & Selector.MASK_PACKAGENAMEMATCHES) > 0 && android.os.Build.VERSION.SDK_INT >= 17)
s = s.packageNameMatches(getPackageNameMatches());
if ((getMask() & Selector.MASK_RESOURCEID) > 0 && android.os.Build.VERSION.SDK_INT >= 18)
s = s.resourceId(getResourceId());
if ((getMask() & Selector.MASK_RESOURCEIDMATCHES) > 0 && android.os.Build.VERSION.SDK_INT >= 18)
s = s.resourceIdMatches(getResourceIdMatches());
if ((getMask() & Selector.MASK_SCROLLABLE) > 0)
s = s.scrollable(isScrollable());
if ((getMask() & Selector.MASK_SELECTED) > 0)
s = s.selected(isSelected());
if ((getMask() & Selector.MASK_TEXT) > 0)
s = s.text(getText());
if ((getMask() & Selector.MASK_TEXTCONTAINS) > 0)
s = s.textContains(getTextContains());
if ((getMask() & Selector.MASK_TEXTSTARTSWITH) > 0)
s = s.textStartsWith(getTextStartsWith());
if ((getMask() & Selector.MASK_TEXTMATCHES) > 0 && android.os.Build.VERSION.SDK_INT >= 17)
s = s.textMatches(getTextMatches());
for (int i = 0; i < this.getChildOrSibling().length && i < this.getChildOrSiblingSelector().length; i++) {
if (this.getChildOrSibling()[i].toLowerCase().equals("child"))
s = s.childSelector(getChildOrSiblingSelector()[i].toUiSelector());
else if (this.getChildOrSibling()[i].toLowerCase().equals("sibling"))
s = s.fromParent((getChildOrSiblingSelector()[i].toUiSelector()));
}
return s;
}
public String getText() {
return _text;
}
public void setText(String text) {
this._text = text;
}
public String getClassName() {
return _className;
}
public void setClassName(String className) {
this._className = className;
}
public String getDescription() {
return _description;
}
public void setDescription(String description) {
this._description = description;
}
public String getTextContains() {
return _textContains;
}
public void setTextContains(String _textContains) {
this._textContains = _textContains;
}
public String getTextMatches() {
return _textMatches;
}
public void setTextMatches(String _textMatches) {
this._textMatches = _textMatches;
}
public String getTextStartsWith() {
return _textStartsWith;
}
public void setTextStartsWith(String _textStartsWith) {
this._textStartsWith = _textStartsWith;
}
public String getClassNameMatches() {
return _classNameMatches;
}
public void setClassNameMatches(String _classNameMatches) {
this._classNameMatches = _classNameMatches;
}
public String getDescriptionContains() {
return _descriptionContains;
}
public void setDescriptionContains(String _descriptionContains) {
this._descriptionContains = _descriptionContains;
}
public String getDescriptionMatches() {
return _descriptionMatches;
}
public void setDescriptionMatches(String _descriptionMatches) {
this._descriptionMatches = _descriptionMatches;
}
public String getDescriptionStartsWith() {
return _descriptionStartsWith;
}
public void setDescriptionStartsWith(String _descriptionStartsWith) {
this._descriptionStartsWith = _descriptionStartsWith;
}
public boolean isCheckable() {
return _checkable;
}
public void setCheckable(boolean _checkable) {
this._checkable = _checkable;
}
public boolean isChecked() {
return _checked;
}
public void setChecked(boolean _checked) {
this._checked = _checked;
}
public boolean isClickable() {
return _clickable;
}
public void setClickable(boolean _clickable) {
this._clickable = _clickable;
}
public boolean isScrollable() {
return _scrollable;
}
public void setScrollable(boolean _scrollable) {
this._scrollable = _scrollable;
}
public boolean isLongClickable() {
return _longClickable;
}
public void setLongClickable(boolean _longClickable) {
this._longClickable = _longClickable;
}
public boolean isEnabled() {
return _enabled;
}
public void setEnabled(boolean _enabled) {
this._enabled = _enabled;
}
public boolean isFocusable() {
return _focusable;
}
public void setFocusable(boolean _focusable) {
this._focusable = _focusable;
}
public boolean isFocused() {
return _focused;
}
public void setFocused(boolean _focused) {
this._focused = _focused;
}
public boolean isSelected() {
return _selected;
}
public void setSelected(boolean _selected) {
this._selected = _selected;
}
public String getPackageName() {
return _packageName;
}
public void setPackageName(String _packageName) {
this._packageName = _packageName;
}
public String getPackageNameMatches() {
return _packageNameMatches;
}
public void setPackageNameMatches(String _packageNameMatches) {
this._packageNameMatches = _packageNameMatches;
}
public String getResourceId() {
return _resourceId;
}
public void setResourceId(String _resourceId) {
this._resourceId = _resourceId;
}
public String getResourceIdMatches() {
return _resourceIdMatches;
}
public void setResourceIdMatches(String _resourceIdMatches) {
this._resourceIdMatches = _resourceIdMatches;
}
public int getIndex() {
return _index;
}
public void setIndex(int _index) {
this._index = _index;
}
public int getInstance() {
return _instance;
}
public void setInstance(int _instance) {
this._instance = _instance;
}
public long getMask() {
return _mask;
}
public void setMask(long _mask) {
this._mask = _mask;
}
public Selector[] getChildOrSiblingSelector() {
return _childOrSiblingSelector;
}
public void setChildOrSiblingSelector(Selector[] _childOrSiblingSelector) {
this._childOrSiblingSelector = _childOrSiblingSelector;
}
public String[] getChildOrSibling() {
return _childOrSibling;
}
public void setChildOrSibling(String[] _childOrSibling) {
this._childOrSibling = _childOrSibling;
}
} |
package org.eclipse.persistence.mappings.converters;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import org.eclipse.persistence.mappings.*;
import org.eclipse.persistence.internal.helper.*;
import org.eclipse.persistence.mappings.foundation.AbstractDirectMapping;
import org.eclipse.persistence.sessions.*;
import org.eclipse.persistence.exceptions.*;
import org.eclipse.persistence.internal.security.PrivilegedAccessHelper;
import org.eclipse.persistence.internal.security.PrivilegedNewInstanceFromClass;
import org.eclipse.persistence.internal.sessions.AbstractSession;
/**
* <b>Purpose</b>: Allows a class name to be converted to and from a new instance of the class.
*
* When using a ClassInstanceConverter, the database will store the Class name and the java object
* model will contain an instance of that class initialized with its default constructor
*
* @author James Sutherland
* @since OracleAS TopLink 10<i>g</i> (10.0.3)
*/
public class ClassInstanceConverter implements Converter {
protected DatabaseMapping mapping;
/**
* PUBLIC:
* Default constructor.
*/
public ClassInstanceConverter() {
}
/**
* INTERNAL:
* Convert the class name to a class, then create an instance of the class.
*/
public Object convertDataValueToObjectValue(Object fieldValue, Session session) {
Object attributeValue = null;
if (fieldValue != null) {
Class attributeClass = (Class)((AbstractSession)session).getDatasourcePlatform().convertObject(fieldValue, ClassConstants.CLASS);
try {
if (PrivilegedAccessHelper.shouldUsePrivilegedAccess()){
try {
attributeValue = AccessController.doPrivileged(new PrivilegedNewInstanceFromClass(attributeClass));
} catch (PrivilegedActionException exception) {
throw ConversionException.couldNotBeConverted(fieldValue, attributeClass, exception.getException());
}
} else {
attributeValue = PrivilegedAccessHelper.newInstanceFromClass(attributeClass);
}
} catch (Exception exception) {
throw ConversionException.couldNotBeConverted(fieldValue, attributeClass, exception);
}
}
return attributeValue;
}
/**
* INTERNAL:
* Convert to the field class.
*/
public Object convertObjectValueToDataValue(Object attributeValue, Session session) {
if (attributeValue == null) {
return null;
}
return attributeValue.getClass().getName();
}
/**
* INTERNAL:
* Set the mapping.
*/
public void initialize(DatabaseMapping mapping, Session session) {
this.mapping = mapping;
// CR#... Mapping must also have the field classification.
if (getMapping().isDirectToFieldMapping()) {
AbstractDirectMapping directMapping = (AbstractDirectMapping)getMapping();
// Allow user to specify field type to override computed value. (i.e. blob, nchar)
if (directMapping.getFieldClassification() == null) {
directMapping.setFieldClassification(ClassConstants.STRING);
}
}
}
/**
* INTERNAL:
* Return the mapping.
*/
protected DatabaseMapping getMapping() {
return mapping;
}
/**
* INTERNAL:
* If the converter converts the value to a non-atomic value, i.e.
* a value that can have its' parts changed without being replaced,
* then it must return false, serialization can be non-atomic.
*/
public boolean isMutable() {
return false;
}
} |
package com.kevin.widgets.views;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Typeface;
import android.text.TextUtils.TruncateAt;
import android.util.AttributeSet;
import android.util.Log;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.Animation.AnimationListener;
import android.view.animation.AnimationSet;
import android.view.animation.AnimationUtils;
import android.view.animation.ScaleAnimation;
import android.view.animation.TranslateAnimation;
import android.widget.ImageView;
import android.widget.ImageView.ScaleType;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.kevin.widgets.R;
/*
* This widget constructed by 10 views, but only [label] and [custom] could
* be configured. Other views are just decorate.
* [up_left_corner] [+/-] [label] [up_right_corner]
* [left] [custom layout/empty] [right]
* [down_left] [down_right]
*
*/
public class LabelLayout extends ViewGroup {
public static final int LABEL_MIDDLE = 0;
public static final int LABEL_LEFT = 1;
public static final int LABEL_RIGHT = 2;
private static final String TAG = "LabelLayout";
static final int INDEX_LEFT_UP = 0;
static final int INDEX_SHOW_CONTROL = 1;
static final int INDEX_TEXT = 2;
static final int INDEX_RIGHT_UP = 3;
static final int INDEX_LEFT = 4;
static final int INDEX_RIGHT = 5;
static final int INDEX_LEFT_BOTTOM = 6;
static final int INDEX_RIGHT_BOTTOM = 7;
static final int INDEX_CONTENT = 8;
static final int INDEX_CLOSED = 9;
static final int DEFAULT_VIEWS_NUM = 10;
float dpToPx = 1;
int mTvHeight = 0;
int mTvWidth = 0;
int mCustomHeight = 0;
int mCustomWidth = 0;
int mDefaultWidth = 12; // at least 12dp to have better looking
int mDefaultTopHeight = 0;
int mDefaultBottomHeight = 0;
int mDefaultContentHeight = 0;
int mLabelSize = 18;
static final int ANIMATION_DURATION = 150;
boolean mContentIsOpen = true;
TextView mTvLabel = null;
TextView mTvHide = null;
ImageView mShowControl = null;
LinearLayout mContent = null;
// unit dip
static final int SIDE_WIDTH = 2;
static final int ROW_TOP_HEIGHT = 6;
static final int ROW_BOTTOM_HEIGHT = 12;
static final int ROW_HIDE_HEIGHT = 15;
int mLabelPos;
int mLabelOffset;
String mLabel = "";
boolean debug = false;
boolean mEnableAnimation = true;
boolean mEnableHideContent = true;
int mChildTop, mChildLeft, mChildRight, mChildBottom;
public LabelLayout(Context context) {
this(context, null);
}
public LabelLayout(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public LabelLayout(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
mTvLabel = new TextView(context, attrs, defStyle);
mShowControl = new ImageView(context);
mContent = new LinearLayout(context, attrs, defStyle);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.OutlineLayoutConfig);
mLabelPos = a.getInt(R.styleable.OutlineLayoutConfig_label_pos, LABEL_MIDDLE);
mLabelOffset = (int)a.getDimensionPixelSize(R.styleable.OutlineLayoutConfig_label_offset, 0);
debug("mLabelOffset:"+mLabelOffset);
mEnableAnimation = a.getBoolean(R.styleable.OutlineLayoutConfig_animation, true);
mLabelSize *= dpToPx;
canHideContent(a.getBoolean(R.styleable.OutlineLayoutConfig_canHideContent, true));
a.recycle();
dpToPx = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 1,
getResources().getDisplayMetrics());
mDefaultWidth = (int) (mDefaultWidth * dpToPx);
mDefaultTopHeight = (int) (ROW_TOP_HEIGHT * dpToPx);
mDefaultBottomHeight = (int) (ROW_BOTTOM_HEIGHT * dpToPx);;
mDefaultContentHeight = (int)(ROW_HIDE_HEIGHT*dpToPx);
mTvLabel.setSingleLine(true);
mTvLabel.setEllipsize(TruncateAt.END);
// mTvLabel.setText(mLabel);
// mTvLabel.setTextSize(TypedValue.COMPLEX_UNIT_PX, mLabelSize);
if(!mTvLabel.getText().toString().equals(""))
mTvLabel.setPadding(4, 0, 4, 0); // reset padding to prevent it got impacted from parent ViewGroup
mContent.setPadding(0, 0, 0, 0); // clean content layout's padding
// mContent.setLayoutParams(new LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,ViewGroup.LayoutParams.MATCH_PARENT));
mTvLabel.setGravity(Gravity.CENTER);
mShowControl.setBackgroundResource(R.drawable.collapse_selector);
mShowControl.setScaleType(ScaleType.FIT_XY);
// default is opened, set it selected
mShowControl.setSelected(true);
// setup and add decorate views
View ul = new View(context);
ul.setBackgroundResource(R.drawable.up_left_first);
View ur = new View(context);
ur.setBackgroundResource(R.drawable.up_right_first);
View l = new View(context);
l.setBackgroundResource(R.drawable.left);
View r = new View(context);
r.setBackgroundResource(R.drawable.right);
View bl = new View(context);
bl.setBackgroundResource(R.drawable.down_left);
View br = new View(context);
br.setBackgroundResource(R.drawable.down_right);
addView(ul, INDEX_LEFT_UP);
addView(mShowControl, INDEX_SHOW_CONTROL);
addView(mTvLabel, INDEX_TEXT);
addView(ur, INDEX_RIGHT_UP);
addView(l, INDEX_LEFT);
addView(r, INDEX_RIGHT);
addView(bl, INDEX_LEFT_BOTTOM);
addView(br, INDEX_RIGHT_BOTTOM);
addView(mContent, INDEX_CONTENT);
mTvHide = new TextView(context);
mTvHide.setTypeface(Typeface.DEFAULT_BOLD);
mTvHide.setText("...");
mTvHide.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT,LayoutParams.WRAP_CONTENT));
mTvHide.setVisibility(View.GONE);
mTvHide.setGravity(Gravity.CENTER_HORIZONTAL);
addView(mTvHide, INDEX_CLOSED);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int count = getChildCount();
Log.d(TAG, "child count:"+count);
// Measurement will ultimately be computing these values.
int contentHeight = 0;
int contentWidth = 0;
int childState = 0;
int givenWidth = MeasureSpec.getSize(widthMeasureSpec);
int givenHeight = MeasureSpec.getSize(heightMeasureSpec);
int reportHeight = givenWidth;
int reportWidth = givenHeight;
View v = getChildAt(INDEX_TEXT);
debug(String.format("measure spec w:%s h:%s", MeasureSpec.toString(widthMeasureSpec),MeasureSpec.toString(heightMeasureSpec)));
v.measure(0, 0);
mTvHeight = v.getMeasuredHeight();
mTvWidth = v.getMeasuredWidth();
if(mDefaultTopHeight < mTvHeight)
mDefaultTopHeight = mTvHeight;
contentWidth = mTvWidth + 2*mDefaultWidth;
contentHeight = mDefaultTopHeight + mDefaultBottomHeight + mDefaultContentHeight;
if(mEnableHideContent) {
mShowControl.getLayoutParams().height = mDefaultTopHeight;
mShowControl.getLayoutParams().width = mDefaultTopHeight;
}
int childCount = count-DEFAULT_VIEWS_NUM;
View childs[] = new View[childCount];
for (int i = 0; i <childCount; i++)
childs[i] = getChildAt(i+DEFAULT_VIEWS_NUM);
for (int i=0;i <childs.length;i++) {
this.removeView(childs[i]);
mContent.addView(childs[i]);
}
LayoutParams params = getLayoutParams();
int specW = MeasureSpec.EXACTLY;
int specH = MeasureSpec.EXACTLY;
if(params.height == LayoutParams.WRAP_CONTENT)
specH = MeasureSpec.AT_MOST;
if(params.width == LayoutParams.WRAP_CONTENT)
specH = MeasureSpec.AT_MOST;
mContent.measure(MeasureSpec.makeMeasureSpec(givenWidth-2*mDefaultWidth-getPaddingLeft()-getPaddingRight(), MeasureSpec.getMode(widthMeasureSpec)),
MeasureSpec.makeMeasureSpec(givenHeight-mDefaultTopHeight-mDefaultBottomHeight-getPaddingTop()-getPaddingBottom(), MeasureSpec.getMode(heightMeasureSpec)));
contentHeight = mContent.getMeasuredHeight();
contentWidth = mContent.getMeasuredWidth();
if(mContentIsOpen) {
mCustomHeight = contentHeight;
mCustomWidth = contentWidth;
} else {
mCustomHeight = mDefaultContentHeight;
mCustomWidth = contentWidth;
}
contentHeight = mCustomHeight+ mDefaultTopHeight + mDefaultBottomHeight + getPaddingTop()+getPaddingBottom();
contentWidth = mCustomWidth + 2*mDefaultWidth+ getPaddingLeft()+getPaddingRight();
debug(getLabel()+" maxHeight:" + contentHeight + " maxWidth:" + contentWidth);
if(params.height == LayoutParams.WRAP_CONTENT)
reportHeight = contentHeight;
if(params.width == LayoutParams.WRAP_CONTENT)
reportWidth = contentWidth;
// Report our final dimensions.
setMeasuredDimension(
resolveSizeAndState(reportWidth, widthMeasureSpec, childState),
resolveSizeAndState(reportHeight, heightMeasureSpec,
childState << MEASURED_HEIGHT_STATE_SHIFT));
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
debug(String.format("%s Layout size l:%d, t:%d, r:%d, b:%d",getLabel(), l, t, r, b));
int icon_size = 0;
int width = r - l;
int height = b - t;
int ul_width, ur_width;
ul_width = ur_width = width/2;
int paddingLeft = getPaddingLeft();
int paddingRight = getPaddingRight();
int paddingTop = getPaddingTop();
int paddingBottom = getPaddingBottom();
mChildBottom = height - paddingBottom;
mChildTop = paddingTop;
mChildLeft = paddingLeft;
mChildRight = width - paddingRight;
debug(String.format("padding l:%d t:%d r:%d b:%d", paddingLeft, paddingTop, paddingRight, paddingBottom));
int childWidth = mChildRight - mChildLeft;
int childHeight = mChildBottom - mChildTop;
if(mEnableHideContent)
icon_size = mDefaultTopHeight;
// check text view wider than total width
if(mTvWidth > childWidth - 2 *(mDefaultWidth))
mTvWidth = childWidth - 2 *(mDefaultWidth);
switch (mLabelPos) {
case LABEL_MIDDLE:
ul_width = (childWidth - mTvWidth) /2+mLabelOffset;
ur_width = childWidth - ul_width -mTvWidth;
ul_width -= icon_size/2;
ur_width -= icon_size/2;
break;
case LABEL_LEFT:
ul_width = mDefaultWidth+mLabelOffset;
ur_width = childWidth -ul_width - mTvWidth - icon_size;
break;
case LABEL_RIGHT:
ur_width = mDefaultWidth+mLabelOffset;
ul_width = childWidth -ur_width - mTvWidth - icon_size;
break;
}
View v = getChildAt(INDEX_LEFT_UP);
childLayout(v, 0, mDefaultTopHeight/2, ul_width, mTvHeight);
v = getChildAt(INDEX_RIGHT_UP);
childLayout(v, childWidth - ur_width, mDefaultTopHeight/2, childWidth, mTvHeight);
v = getChildAt(INDEX_SHOW_CONTROL);
if(mEnableHideContent) {
v.setVisibility(View.VISIBLE);
childLayout(v, ul_width, 0, icon_size+ul_width, icon_size);
} else {
v.setVisibility(View.GONE);
}
v = getChildAt(INDEX_TEXT);
childLayout(v, ul_width+icon_size, 0, ul_width+mTvWidth+icon_size, mTvHeight);
if (mCustomHeight > childHeight - mDefaultBottomHeight - mDefaultTopHeight)
mCustomHeight = childHeight - mDefaultBottomHeight - mDefaultTopHeight;
if (mCustomWidth > childWidth - 2 * mDefaultWidth)
mCustomWidth = childWidth - 2 * mDefaultWidth;
View left = getChildAt(INDEX_LEFT);
childLayout(left, 0, mTvHeight, mDefaultWidth, mDefaultTopHeight + mCustomHeight);
View right = getChildAt(INDEX_RIGHT);
right.layout(childWidth - mDefaultWidth, mDefaultTopHeight, childWidth, mDefaultTopHeight
+ mCustomHeight);
childLayout(right, childWidth - mDefaultWidth, mDefaultTopHeight, childWidth, mDefaultTopHeight
+ mCustomHeight);
if(mContentIsOpen) {
// TODO this should according how many child added here
// v = getChildAt(DEFAULT_VIEWS_NUM);
v = getChildAt(INDEX_CONTENT);
if(v != null) {
childLayout(v, mDefaultWidth, mDefaultTopHeight, childWidth - mDefaultWidth,
mDefaultTopHeight + mCustomHeight);
debug(String.format(getLabel()+"content layout at l:%d t:%d, r:%d, b:%d",mDefaultWidth,mDefaultTopHeight, (r - mDefaultWidth), (mDefaultTopHeight + mCustomHeight)));
if(v.getVisibility() != View.VISIBLE) {
fadeInAnimation(v);
v.setVisibility(View.VISIBLE);
}
}
} else {
v = getChildAt(INDEX_CONTENT);
if(v != null) {
if(v.getVisibility() == View.VISIBLE)
fadeOutAnimation(v);
v.setVisibility(View.INVISIBLE);
childLayout(mTvHide, mDefaultWidth, mDefaultTopHeight-mDefaultContentHeight/2, childWidth - mDefaultWidth,
mDefaultTopHeight + mCustomHeight-mDefaultContentHeight/2);
}
}
v = getChildAt(INDEX_LEFT_BOTTOM);
childLayout(v, 0, mDefaultTopHeight + mCustomHeight, childWidth / 2, childHeight);
v = getChildAt(INDEX_RIGHT_BOTTOM);
childLayout(v, childWidth / 2, mDefaultTopHeight + mCustomHeight, childWidth, childHeight);
debug(String.format("%s bottom layout at l:%d t:%d, r:%d, b:%d",getLabel(),childWidth / 2, mDefaultTopHeight + mCustomHeight, r, b));
}
void childLayout(View v, int l, int t, int r, int b) {
v.layout(l+mChildLeft, t+mChildTop, r+mChildLeft, b+mChildTop);
}
void fadeInAnimation(final View ll) {
if(mEnableAnimation) {
AnimationSet set = new AnimationSet(true);
ScaleAnimation anim1 = new ScaleAnimation(0,1,0,1, Animation.RELATIVE_TO_SELF, 0.5f, Animation.RELATIVE_TO_SELF, 0.01f);
anim1.setDuration(ANIMATION_DURATION);
anim1.setZAdjustment(TranslateAnimation.ZORDER_BOTTOM);
// anim1.setInterpolator(new AccelerateInterpolator());
Animation anim2 = AnimationUtils.loadAnimation(
getContext(), android.R.anim.fade_in);
anim2.setDuration(ANIMATION_DURATION);
// anim2.setInterpolator(new AccelerateInterpolator());
anim2.setAnimationListener(new AnimationListener() {
@Override
public void onAnimationEnd(Animation animation) {
}
@Override
public void onAnimationRepeat(Animation animation) {
}
@Override
public void onAnimationStart(Animation animation) {
mTvHide.setVisibility(View.GONE);
}
});
set.addAnimation(anim1);
set.addAnimation(anim2);
ll.startAnimation(set);
} else {
mTvHide.setVisibility(View.GONE);
}
}
void fadeOutAnimation(final View ll) {
if(mEnableAnimation) {
AnimationSet set = new AnimationSet(true);
ScaleAnimation anim1 = new ScaleAnimation(1,0,1,0, Animation.RELATIVE_TO_SELF, 0.5f, Animation.RELATIVE_TO_SELF, 0.01f);
anim1.setDuration(ANIMATION_DURATION);
anim1.setZAdjustment(TranslateAnimation.ZORDER_BOTTOM);
// anim1.setInterpolator(new DecelerateInterpolator());
Animation anim2 = AnimationUtils.loadAnimation(
getContext(), android.R.anim.fade_out);
anim2.setDuration(ANIMATION_DURATION);
// anim2.setInterpolator(new DecelerateInterpolator());
set.addAnimation(anim1);
set.addAnimation(anim2);
anim1.setAnimationListener(new AnimationListener() {
@Override
public void onAnimationEnd(Animation animation) {
mTvHide.setVisibility(View.VISIBLE);
}
@Override
public void onAnimationRepeat(Animation animation) {
}
@Override
public void onAnimationStart(Animation animation) {
}
});
ll.startAnimation(set);
} else {
mTvHide.setVisibility(View.VISIBLE);
}
}
public void enableAnimation(boolean enable) {
mEnableAnimation = enable;
}
String getLabel() {
return "["+mTvLabel.getText()+"]:";
}
void debug(String msg) {
if(debug)
Log.d(TAG,msg);
}
public void showContent(boolean enable) {
mContentIsOpen = enable;
if(mShowControl != null)
mShowControl.setSelected(mContentIsOpen);
requestLayout();
}
public boolean getContentOpen() {
return mContentIsOpen;
}
public void canHideContent(boolean enable) {
mEnableHideContent = enable;
if(mEnableHideContent) {
mTvLabel.setBackgroundResource(android.R.drawable.list_selector_background);
mTvLabel.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mContentIsOpen = ! mContentIsOpen;
if(mShowControl != null)
mShowControl.setSelected(mContentIsOpen);
requestLayout();
}
});
mShowControl.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mContentIsOpen = ! mContentIsOpen;
if(mShowControl != null)
mShowControl.setSelected(mContentIsOpen);
requestLayout();
}
});
// mTvLabel.setPaintFlags(mTvLabel.getPaintFlags() | Paint.UNDERLINE_TEXT_FLAG);
} else {
mTvLabel.setOnClickListener(null);
// mTvLabel.setPaintFlags(mTvLabel.getPaintFlags() & ~Paint.UNDERLINE_TEXT_FLAG);
}
}
} |
package com.sola.instagram.model;
import java.util.List;
import org.json.JSONException;
import org.json.JSONObject;
import com.sola.instagram.model.Media;
public class VideoMedia extends Media {
/**
* Low resolution version of the media's video
*/
protected Video lowResolutionVideo;
/**
* Standard resolution version of the media's video
*/
protected Video standardResolutionVideo;
public VideoMedia(JSONObject obj, String accessToken) throws JSONException {
super(obj, accessToken);
JSONObject videos = obj.getJSONObject("videos");
this.setLowResolutionVideo(this.new Video(videos.getJSONObject("low_resolution")));
this.setStandardResolutionVideo(this.new Video(videos.getJSONObject("standard_resolution")));
}
public Video getLowResolutionVideo() {
return lowResolutionVideo;
}
protected void setLowResolutionVideo(Video lowResolutionVideo) {
this.lowResolutionVideo = lowResolutionVideo;
}
public Video getStandardResolutionVideo() {
return standardResolutionVideo;
}
protected void setStandardResolutionVideo(Video standardResolutionVideo) {
this.standardResolutionVideo = standardResolutionVideo;
}
/**
* Object for a media video
* with the JSON representation
* <pre>
* {
* "url":"",
* "width":0,
* "height":0
* }
* </pre>
* @author Sola Ogunsakin
* @version 2013-08-17
*/
public class Video {
/**
* Link to this video
*/
String uri;
/**
* Width of this video
*/
int width;
/**
* Height of this video
*/
int heigth;
/**
* Makes a new Video object out of a JSONObject
* @param obj json object used to create this video
* @throws JSONException
*/
public Video(JSONObject obj) throws JSONException {
this.setUri(obj.getString("url"));
this.setWidth(obj.getInt("width"));
this.setHeigth(obj.getInt("height"));
}
/**
* Returns the url link to this video
* @return The url link to this video
*/
public String getUri() {
return uri;
}
/**
* Sets this video's url
* @param url url for this video
*/
protected void setUri(String uri) {
this.uri = uri;
}
/**
* Returns the width of this video
* @return The width of this video
*/
public int getWidth() {
return width;
}
/**
* Sets this video's width
* @param width width of this video
*/
protected void setWidth(int width) {
this.width = width;
}
/**
* Returns the height of this video
* @return The height of this video
*/
public int getHeigth() {
return heigth;
}
/**
* Sets this video's height
* @param height height of this video
*/
protected void setHeigth(int heigth) {
this.heigth = heigth;
}
/**
* Checks if two video objects are equal
* @param o The object to be compared
* @return True of the two objects are equal, false otherwise
*/
public boolean equals(Object o) {
if(o == null) return false;
if(o == this) return true;
if(o.getClass() != this.getClass()) return false;
return ((VideoMedia.Video)o).getUri().equals(getUri());
}
}
} |
package com.bugsnag;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import com.bugsnag.callbacks.Callback;
import com.bugsnag.delivery.Delivery;
import com.bugsnag.delivery.HttpDelivery;
import com.bugsnag.delivery.OutputStreamDelivery;
import com.bugsnag.serialization.Serializer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
public class BugsnagTest {
private Bugsnag bugsnag;
/**
* Create a new test Bugsnag client
*/
@Before
public void initBugsnag() {
bugsnag = new Bugsnag("apikey");
}
/**
* Close test Bugsnag
*/
@After
public void closeBugsnag() {
bugsnag.close();
}
@Test
public void testNoDeliveryFails() {
bugsnag.setDelivery(null);
boolean result = bugsnag.notify(new RuntimeException());
assertFalse(result);
}
@Test
public void testIgnoreClasses() {
bugsnag.setDelivery(BugsnagTestUtils.generateDelivery());
// Ignore neither
bugsnag.setIgnoreClasses();
assertTrue(bugsnag.notify(new RuntimeException()));
assertTrue(bugsnag.notify(new TestException()));
// Ignore just RuntimeException
bugsnag.setIgnoreClasses(RuntimeException.class.getName());
assertFalse(bugsnag.notify(new RuntimeException()));
assertTrue(bugsnag.notify(new TestException()));
// Ignore both
bugsnag.setIgnoreClasses(RuntimeException.class.getName(), TestException.class.getName());
assertFalse(bugsnag.notify(new RuntimeException()));
assertFalse(bugsnag.notify(new TestException()));
}
@Test
public void testNotifyReleaseStages() {
bugsnag.setDelivery(BugsnagTestUtils.generateDelivery());
bugsnag.setReleaseStage("production");
// Never send
bugsnag.setNotifyReleaseStages();
assertFalse(bugsnag.notify(new Throwable()));
// Ignore 'production'
bugsnag.setNotifyReleaseStages("staging", "development");
assertFalse(bugsnag.notify(new Throwable()));
// Allow 'production'
bugsnag.setNotifyReleaseStages("production");
assertTrue(bugsnag.notify(new Throwable()));
// Allow 'production' and others
bugsnag.setNotifyReleaseStages("production", "staging", "development");
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testProjectPackages() {
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertTrue(report.getExceptions().get(0).getStacktrace().get(0).isInProject());
}
@Override
public void close() {
}
});
bugsnag.setProjectPackages("com.bugsnag");
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testAppVersion() {
bugsnag.setAppVersion("1.2.3");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("1.2.3", report.getApp().get("version"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testAppType() {
bugsnag.setAppType("testtype");
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("testtype", report.getApp().get("type"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testSeverity() {
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals(Severity.INFO.getValue(), report.getSeverity());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable(), Severity.INFO));
}
@Test
public void testFilters() {
bugsnag.setFilters("testfilter1", "testfilter2");
bugsnag.setDelivery(new Delivery() {
@SuppressWarnings("unchecked")
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
Map<String, Object> firstTab =
(Map<String, Object>) report.getMetaData().get("firsttab");
final Map<String, Object> secondTab =
(Map<String, Object>) report.getMetaData().get("secondtab");
assertEquals("[FILTERED]", firstTab.get("testfilter1"));
assertEquals("[FILTERED]", firstTab.get("testfilter2"));
assertEquals("secretpassword", firstTab.get("testfilter3"));
assertEquals("[FILTERED]", secondTab.get("testfilter1"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable(), new Callback() {
@Override
public void beforeNotify(Report report) {
report.addToTab("firsttab", "testfilter1", "secretpassword");
report.addToTab("firsttab", "testfilter2", "secretpassword");
report.addToTab("firsttab", "testfilter3", "secretpassword");
report.addToTab("secondtab", "testfilter1", "secretpassword");
}
}));
}
@Test
public void testFilterHeaders() {
bugsnag.setDelivery(new Delivery() {
@SuppressWarnings("unchecked")
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
Map<String, Object> requestTab =
(Map<String, Object>) report.getMetaData().get("request");
Map<String, Object> headersMap =
(Map<String, Object>) requestTab.get("headers");
assertEquals("[FILTERED]", headersMap.get("Authorization"));
assertEquals("User:Password", headersMap.get("authorization"));
assertEquals("[FILTERED]", headersMap.get("Cookie"));
assertEquals("123456ABCDEF", headersMap.get("cookie"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable(), new Callback() {
@Override
public void beforeNotify(Report report) {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Authorization", "User:Password");
headers.put("authorization", "User:Password");
headers.put("Cookie", "123456ABCDEF");
headers.put("cookie", "123456ABCDEF");
report.addToTab("request", "headers", headers);
}
}));
}
@Test
public void testUser() {
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("123", report.getUser().get("id"));
assertEquals("test@example.com", report.getUser().get("email"));
assertEquals("test name", report.getUser().get("name"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable(), new Callback() {
@Override
public void beforeNotify(Report report) {
report.setUser("123", "test@example.com", "test name");
}
}));
}
@Test
public void testContext() {
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.setContext("the context");
}
});
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("the context", report.getContext());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testGroupingHash() {
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.setGroupingHash("the grouping hash");
}
});
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("the grouping hash", report.getGroupingHash());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testSingleCallback() {
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.setApiKey("newapikey");
}
});
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("newapikey", report.getApiKey());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testSingleCallbackInNotify() {
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("newapikey", report.getApiKey());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable(), new Callback() {
@Override
public void beforeNotify(Report report) {
report.setApiKey("newapikey");
}
}));
}
@Test
public void testCallbackOrder() {
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.setApiKey("newapikey");
}
});
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.setApiKey("secondnewapikey");
}
});
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertEquals("secondnewapikey", report.getApiKey());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testCallbackCancel() {
bugsnag.setDelivery(BugsnagTestUtils.generateDelivery());
bugsnag.addCallback(new Callback() {
@Override
public void beforeNotify(Report report) {
report.cancel();
}
});
// Test the report is not sent
assertFalse(bugsnag.notify(new Throwable()));
}
@SuppressWarnings("deprecation") // ensures deprecated setEndpoint method still works correctly
@Test
public void testEndpoint() {
bugsnag.setDelivery(new HttpDelivery() {
String endpoint;
@Override
public void setEndpoint(String endpoint) {
this.endpoint = endpoint;
}
@Override
public void setTimeout(int timeout) {
}
@Override
public void setProxy(Proxy proxy) {
}
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
assertEquals("https:
}
@Override
public void close() {
}
});
bugsnag.setEndpoints("https:
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testProxy() {
bugsnag.setDelivery(new HttpDelivery() {
Proxy proxy;
@Override
public void setEndpoint(String endpoint) {
}
@Override
public void setTimeout(int timeout) {
}
@Override
public void setProxy(Proxy proxy) {
this.proxy = proxy;
}
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
assertEquals("/127.0.0.1:8080", proxy.address().toString());
}
@Override
public void close() {
}
});
Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress("127.0.0.1", 8080));
bugsnag.setProxy(proxy);
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testSendThreads() {
bugsnag.setSendThreads(true);
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
// There is information about at least one thread
assertTrue(report.getThreads().size() > 0);
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testHandledIncrementNoSession() {
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
assertNull(report.getSession());
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testHandledIncrementWithSession() {
bugsnag.startSession();
bugsnag.setDelivery(new Delivery() {
@Override
public void deliver(Serializer serializer, Object object, Map<String, String> headers) {
Report report = ((Notification) object).getEvents().get(0);
Map<String, Object> session = report.getSession();
assertNotNull(session);
@SuppressWarnings("unchecked")
Map<String, Object> handledCounts = (Map<String, Object>) session.get("events");
assertEquals(1, handledCounts.get("handled"));
assertEquals(0, handledCounts.get("unhandled"));
}
@Override
public void close() {
}
});
assertTrue(bugsnag.notify(new Throwable()));
}
@Test
public void testMultipleHandledIncrementWithSession() {
bugsnag.startSession();
StubNotificationDelivery testDelivery = new StubNotificationDelivery();
bugsnag.setDelivery(testDelivery);
assertTrue(bugsnag.notify(new Throwable()));
assertTrue(bugsnag.notify(new Throwable()));
assertTrue(bugsnag.notify(new Throwable()));
for (int i = 0 ; i < testDelivery.getNotifications().size(); i++) {
Report report = testDelivery.getNotifications().get(i).getEvents().get(0);
Map<String, Object> session = report.getSession();
assertNotNull(session);
@SuppressWarnings("unchecked")
Map<String, Object> handledCounts = (Map<String, Object>) session.get("events");
assertEquals(i + 1, handledCounts.get("handled"));
assertEquals(0, handledCounts.get("unhandled"));
}
}
@Test
public void testSerialization() {
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
bugsnag.setDelivery(new OutputStreamDelivery(byteStream));
bugsnag.notify(new RuntimeException());
// Exact content will vary with stacktrace so just check for some content
assertTrue(new String(byteStream.toByteArray()).length() > 0);
}
@Test(expected = UnsupportedOperationException.class)
public void testUncaughtHandlerModification() {
Set<Bugsnag> bugsnags = Bugsnag.uncaughtExceptionClients();
bugsnags.clear();
}
// Test exception class
private class TestException extends RuntimeException {
private static final long serialVersionUID = -458298914160798211L;
}
} |
package com.telmomenezes.jfastemd;
public class Feature2D implements Feature {
private double x;
private double y;
public Feature2D(double x, double y) {
this.x = x;
this.y = y;
}
public double groundDist(Feature f) {
Feature2D f2d = (Feature2D)f;
double deltaX = x - f2d.x;
double deltaY = y - f2d.y;
return Math.sqrt((deltaX * deltaX) + (deltaY * deltaY));
}
} |
package com.thefloow.challenge;
import java.util.Properties;
public class TextAnalyzer {
public static void main(String[] args) {
// TODO Auto-generated method stub
java.util.concurrent.Semaphore sem = new java.util.concurrent.Semaphore(10, true);
sem.acquireUninterruptibly();
}
} |
package com.worizon.jsonrpc;
/**
* High level exception of a remote procedure call. Error codes specified at the JSONRPC spec are mapped as
* a JsonRpcException. Error codes different from the spec are mapped and thrown as a RemoteException.
*
* @author Enric Cecilla
* @since 1.0.0
*/
@SuppressWarnings("serial")
public class RemoteException extends RuntimeException {
private JsonRpcError error = null;
public RemoteException(){
super();
}
public RemoteException( JsonRpcError error ){
super(error.toString());
this.error = error;
}
public RemoteException( String message ){
super( message );
}
public RemoteException( String message, Throwable cause ){
super( message, cause );
}
public RemoteException( Throwable cause ){
super( cause );
}
public int getCode(){
return error.getCode();
}
@Override
public String getMessage(){
return error.getMessage();
}
} |
package com4j.util;
import com4j.Com4jObject;
import com4j.ComObjectListener;
import java.util.Map;
import java.util.WeakHashMap;
/**
* {@link ComObjectListener} implementation that collects all
* newly created {@link Com4jObject}s
*
* <p>
* The intended use of this class is to record objects created
* in a certain block and then dispose them all (except a few marked explicitly)
* at once at some later point.
*
* <p>
* See the following code example for a typical usage:
* <pre>
* <pre class=code>
void foo() {
// we will start using COM objects.
// so we'll register the listener and start keeping
// track of COM objects we create.
ComObjectCollector col = new ComObjectCollector();
COM4J.addListener(col);
try {
// use COM objects as much as you want
IFoo foo = doALotOfComStuff();
// do this to avoid COM objects from disposed by the disposeAll method.
col.remove(foo);
} finally {
// dispose all the COM objects created in this thread
// since the listener is registered.
// But "foo" won't be disposed because of the remove method.
col.disposeAll();
// make sure to remove the listener
COM4J.removeListener(col);
}
}
*</pre>
*
* @author Kohsuke Kawaguchi (kk@kohsuke.org)
*/
public class ComObjectCollector implements ComObjectListener {
protected final Map<Com4jObject,Object> objects = new WeakHashMap<Com4jObject,Object>();
public void onNewObject(Com4jObject obj) {
objects.put(obj,null);
}
/**
* Removes the given object from the list of {@link Com4jObject}s that
* this class keeps.
*
* <p>
* If the application knows certain {@link Com4jObject} needs to live after
* the {@link #disposeAll()} method, this method can be called to avoid the object
* from being disposed.
*
* <p>
* If the object passed in is not known to this {@link ComObjectCollector},
* it is a no-op.
*/
public void remove(Com4jObject obj) {
objects.remove(obj);
}
/**
* Calls the {@link Com4jObject#dispose()} method for all the {@link Com4jObject}s
* known to this {@link ComObjectCollector}.
*
* <p>
* Each time this method is called, it forgets all the disposed objects.
*/
public void disposeAll() {
for( Com4jObject o : objects.keySet() )
o.dispose();
objects.clear();
}
} |
package me.nallar.patched;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import net.minecraftforge.event.Cancelable;
import net.minecraftforge.event.Event;
public abstract class PatchEvent extends Event {
private static Map<Class, Boolean> annotationMap; // Class -> boolean instead of Class -> (Class -> boolean) because forge ignores annotation type
public static void staticConstruct() {
annotationMap = new ConcurrentHashMap<Class, Boolean>();
}
@Override
protected boolean hasAnnotation(Class annotation) {
return hasAnnotation(annotation, this.getClass());
}
private static boolean hasAnnotation(Class annotation, Class cls) {
Boolean cachedResult = annotationMap.get(cls);
if (cachedResult != null) {
return cachedResult;
}
Class searchClass = cls;
while (searchClass != Event.class) {
if (searchClass.isAnnotationPresent(Cancelable.class)) // Forge bug, not fixed. Buggy behaviour may be required for some mods.
{
annotationMap.put(cls, true);
return true;
}
searchClass = searchClass.getSuperclass();
}
annotationMap.put(cls, false);
return false;
}
} |
package powell.cellarium;
import powell.cellarium.block.BlockBlood;
import powell.cellarium.block.BlockBoilingBlood;
import powell.cellarium.block.BlockTearsOfChildren;
import powell.cellarium.block.BloodOre;
import powell.cellarium.block.BloodStone;
import powell.cellarium.block.PresenceOfPowell;
import powell.cellarium.fluids.Blood;
import powell.cellarium.fluids.BoilingBlood;
import powell.cellarium.fluids.TearsOfChildren;
import powell.cellarium.item.BloodIngot;
import powell.cellarium.item.BucketOfBlood;
import powell.cellarium.item.BucketOfBoilingBlood;
import powell.cellarium.item.BucketOfTears;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.item.crafting.FurnaceRecipes;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.fluids.BlockFluidClassic;
import net.minecraftforge.fluids.Fluid;
import net.minecraftforge.fluids.FluidContainerRegistry;
import net.minecraftforge.fluids.FluidRegistry;
import net.minecraftforge.fluids.ItemFluidContainer;
import cpw.mods.fml.common.Mod;
import cpw.mods.fml.common.Mod.EventHandler; // used in 1.6.2
//import cpw.mods.fml.common.Mod.PreInit; // used in 1.5.2
//import cpw.mods.fml.common.Mod.Init; // used in 1.5.2
//import cpw.mods.fml.common.Mod.PostInit; // used in 1.5.2
import cpw.mods.fml.common.Mod.Instance;
import cpw.mods.fml.common.SidedProxy;
import cpw.mods.fml.common.event.FMLInitializationEvent;
import cpw.mods.fml.common.event.FMLPostInitializationEvent;
import cpw.mods.fml.common.event.FMLPreInitializationEvent;
import cpw.mods.fml.common.network.NetworkMod;
import cpw.mods.fml.common.registry.GameRegistry;
import cpw.mods.fml.common.registry.LanguageRegistry;
@Mod(modid="CellariumID", name="Cellarium", version="0.0.0")
@NetworkMod(clientSideRequired=true)
public class Cellarium
{
//public static boolean displayclear = true;
//Custom Fluids/Blocks/Items
public static Fluid tearsOfChildren;
public static Fluid blood;
public static Fluid boilingBlood;
public static BlockFluidClassic blockTearsOfChildren;
public static BlockFluidClassic blockBlood;
public static BlockFluidClassic blockBoilingBlood;
public static Block genericDirt;
public static Block bloodStone;
public static Block bloodOre;
public static Block presenceOfPowell;
public static ItemFluidContainer bucketOfTears;
public static ItemFluidContainer bucketOfBlood;
public static ItemFluidContainer bucketOfBoilingBlood;
public static Item bloodIngot;
//end Custom
// The instance of your mod that Forge uses.
@Instance(value = "CellariumID")
public static Cellarium instance;
//trying custom tab
public static CreativeTabs tabCellarium = new CreativeTabs("tabCellarium") {
public ItemStack getIconItemStack() {
return new ItemStack(Item.eyeOfEnder, 1, 0);
}
};
//end tab
// Says where the client and server 'proxy' code is loaded.
@SidedProxy(clientSide="powell.cellarium.client.ClientProxy", serverSide="powell.cellarium.CommonProxy")
public static CommonProxy proxy;
@EventHandler // used in 1.6.2
//@PreInit // used in 1.5.2
public void preInit(FMLPreInitializationEvent event)
{
// Stub Method
//Create Fluids First
tearsOfChildren = new TearsOfChildren();
blood = new Blood();
boilingBlood = new BoilingBlood();
//end Fluids
//create Blocks
blockTearsOfChildren = (BlockFluidClassic) new BlockTearsOfChildren(777).setUnlocalizedName("blockTearsOfChildren").setCreativeTab(tabCellarium);
blockBlood = (BlockFluidClassic) new BlockBlood(775).setUnlocalizedName("blockBlood").setCreativeTab(tabCellarium);
blockBoilingBlood = (BlockFluidClassic) new BlockBoilingBlood(773).setUnlocalizedName("blockBoilingBlood").setCreativeTab(tabCellarium);
genericDirt = new GenericBlock(500, Material.ground)
.setHardness(0.5F).setStepSound(Block.soundGravelFootstep)
.setUnlocalizedName("genericDirt").setCreativeTab(tabCellarium);
presenceOfPowell = new PresenceOfPowell(790);
bloodStone = new BloodStone(791);
bloodOre = new BloodOre(792);
//end Blocks
//create Items
bucketOfTears = (ItemFluidContainer) new BucketOfTears(778,777).setUnlocalizedName("bucketOfTears").setCreativeTab(tabCellarium);
bucketOfBlood = (ItemFluidContainer) new BucketOfBlood(776,775).setUnlocalizedName("bucketOfBlood").setCreativeTab(tabCellarium);
bucketOfBoilingBlood = (ItemFluidContainer) new BucketOfBoilingBlood(774,773).setUnlocalizedName("bucketOfBoilingBlood").setCreativeTab(tabCellarium);
bloodIngot = new BloodIngot(7777);
//end Items
}
@EventHandler // used in 1.6.2
//@Init // used in 1.5.2
public void load(FMLInitializationEvent event)
{
proxy.registerRenderers();
//Sets name for Custom Tab
LanguageRegistry.instance().addStringLocalization("itemGroup.tabCellarium", "en_US", "Cellarium");
//LanguageRegistry.addName(tearsOfChildren, "Tears of Children"); //seems to cause a NullPointer Exception
LanguageRegistry.addName(blockTearsOfChildren, "Tears Of Children");
LanguageRegistry.addName(blockBlood, "Blood");
LanguageRegistry.addName(blockBoilingBlood, "Boiling Blood");
GameRegistry.registerBlock(blockTearsOfChildren, "blockTearsOfChildren");
GameRegistry.registerBlock(blockBlood, "blockBlood");
GameRegistry.registerBlock(blockBoilingBlood,"blockBoilingBlood");
LanguageRegistry.addName(genericDirt, "Generic Dirt");
MinecraftForge.setBlockHarvestLevel(genericDirt, "shovel", 0);
GameRegistry.registerBlock(genericDirt, "genericDirt");
GameRegistry.registerItem(bucketOfTears, "bucketOfTears");
LanguageRegistry.addName(bucketOfTears, "Bucket of Tears");
GameRegistry.registerItem(bucketOfBlood, "bucketOfBlood");
LanguageRegistry.addName(bucketOfBlood, "Bucket Of Blood");
GameRegistry.registerItem(bucketOfBoilingBlood, "bucketOfBoilingBlood");
LanguageRegistry.addName(bucketOfBoilingBlood, "Bucket of Boiling Blood");
GameRegistry.registerBlock(presenceOfPowell, "presenceOfPowell");
LanguageRegistry.addName(presenceOfPowell, "Presence of Powell");
GameRegistry.registerBlock(bloodStone,"bloodStone");
LanguageRegistry.addName(bloodStone, "Blood Stone");
MinecraftForge.setBlockHarvestLevel(bloodStone, "pickaxe", 3);
GameRegistry.registerBlock(bloodOre, "bloodOre");
LanguageRegistry.addName(bloodOre, "Blood Ore");
MinecraftForge.setBlockHarvestLevel(bloodOre, "pickaxe", 3);
GameRegistry.registerItem(bloodIngot, "bloodIngot");
LanguageRegistry.addName(bloodIngot, "Blood Ingot");
FurnaceRecipes.smelting().addSmelting(bloodOre.blockID, new ItemStack(bloodIngot), 1.0f);
//FluidContainerRegistry.registerFluidContainer(FluidRegistry.getFluidStack(Cellarium.tearsOfChildren.getUnlocalizedName(), FluidContainerRegistry.BUCKET_VOLUME), new ItemStack(bucketOfTears), new ItemStack(Item.bucketEmpty));
}
@EventHandler // used in 1.6.2
//@PostInit // used in 1.5.2
public void postInit(FMLPostInitializationEvent event)
{
// Stub Method
}
} |
package org.atlasapi.query;
import static org.atlasapi.annotation.Annotation.AVAILABLE_LOCATIONS;
import static org.atlasapi.annotation.Annotation.BRAND_REFERENCE;
import static org.atlasapi.annotation.Annotation.BRAND_SUMMARY;
import static org.atlasapi.annotation.Annotation.BROADCASTS;
import static org.atlasapi.annotation.Annotation.CHANNEL;
import static org.atlasapi.annotation.Annotation.CHANNELS;
import static org.atlasapi.annotation.Annotation.CHANNEL_GROUP;
import static org.atlasapi.annotation.Annotation.CHANNEL_GROUPS;
import static org.atlasapi.annotation.Annotation.CHANNEL_SUMMARY;
import static org.atlasapi.annotation.Annotation.CLIPS;
import static org.atlasapi.annotation.Annotation.CONTENT_DETAIL;
import static org.atlasapi.annotation.Annotation.CONTENT_SUMMARY;
import static org.atlasapi.annotation.Annotation.DESCRIPTION;
import static org.atlasapi.annotation.Annotation.EXTENDED_DESCRIPTION;
import static org.atlasapi.annotation.Annotation.EXTENDED_ID;
import static org.atlasapi.annotation.Annotation.FIRST_BROADCASTS;
import static org.atlasapi.annotation.Annotation.ID;
import static org.atlasapi.annotation.Annotation.ID_SUMMARY;
import static org.atlasapi.annotation.Annotation.IMAGES;
import static org.atlasapi.annotation.Annotation.KEY_PHRASES;
import static org.atlasapi.annotation.Annotation.LOCATIONS;
import static org.atlasapi.annotation.Annotation.META_ENDPOINT;
import static org.atlasapi.annotation.Annotation.META_MODEL;
import static org.atlasapi.annotation.Annotation.NEXT_BROADCASTS;
import static org.atlasapi.annotation.Annotation.PARENT;
import static org.atlasapi.annotation.Annotation.PEOPLE;
import static org.atlasapi.annotation.Annotation.PLATFORM;
import static org.atlasapi.annotation.Annotation.REGIONS;
import static org.atlasapi.annotation.Annotation.RELATED_LINKS;
import static org.atlasapi.annotation.Annotation.SEGMENT_EVENTS;
import static org.atlasapi.annotation.Annotation.SERIES_REFERENCE;
import static org.atlasapi.annotation.Annotation.SERIES_SUMMARY;
import static org.atlasapi.annotation.Annotation.SUB_ITEMS;
import static org.atlasapi.annotation.Annotation.TOPICS;
import static org.atlasapi.annotation.Annotation.VARIATIONS;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import org.atlasapi.AtlasPersistenceModule;
import org.atlasapi.annotation.Annotation;
import org.atlasapi.application.auth.ApplicationSourcesFetcher;
import org.atlasapi.application.auth.UserFetcher;
import org.atlasapi.channel.Channel;
import org.atlasapi.channel.ChannelGroup;
import org.atlasapi.channel.ChannelGroupResolver;
import org.atlasapi.channel.ChannelResolver;
import org.atlasapi.content.Content;
import org.atlasapi.content.ContentType;
import org.atlasapi.content.ItemAndBroadcast;
import org.atlasapi.content.MediaType;
import org.atlasapi.criteria.attribute.Attributes;
import org.atlasapi.generation.EndpointClassInfoSingletonStore;
import org.atlasapi.generation.ModelClassInfoSingletonStore;
import org.atlasapi.generation.model.EndpointClassInfo;
import org.atlasapi.generation.model.ModelClassInfo;
import org.atlasapi.output.AnnotationRegistry;
import org.atlasapi.output.EntityListWriter;
import org.atlasapi.output.QueryResultWriter;
import org.atlasapi.output.ScrubbablesSegmentRelatedLinkMerger;
import org.atlasapi.output.SegmentRelatedLinkMergingFetcher;
import org.atlasapi.output.annotation.AvailableLocationsAnnotation;
import org.atlasapi.output.annotation.BrandReferenceAnnotation;
import org.atlasapi.output.annotation.BrandSummaryAnnotation;
import org.atlasapi.output.annotation.BroadcastsAnnotation;
import org.atlasapi.output.annotation.ChannelAnnotation;
import org.atlasapi.output.annotation.ChannelGroupAnnotation;
import org.atlasapi.output.annotation.ChannelGroupChannelsAnnotation;
import org.atlasapi.output.annotation.ChannelGroupMembershipAnnotation;
import org.atlasapi.output.annotation.ChannelGroupMembershipListWriter;
import org.atlasapi.output.annotation.ChannelVariationAnnotation;
import org.atlasapi.output.annotation.LegacyChannelAnnotation;
import org.atlasapi.output.annotation.ChannelSummaryWriter;
import org.atlasapi.output.annotation.ChannelsAnnotation;
import org.atlasapi.output.annotation.ClipsAnnotation;
import org.atlasapi.output.annotation.ContentDescriptionAnnotation;
import org.atlasapi.output.annotation.DescriptionAnnotation;
import org.atlasapi.output.annotation.EndpointInfoAnnotation;
import org.atlasapi.output.annotation.ExtendedDescriptionAnnotation;
import org.atlasapi.output.annotation.ExtendedIdentificationAnnotation;
import org.atlasapi.output.annotation.FirstBroadcastAnnotation;
import org.atlasapi.output.annotation.IdentificationAnnotation;
import org.atlasapi.output.annotation.IdentificationSummaryAnnotation;
import org.atlasapi.output.annotation.KeyPhrasesAnnotation;
import org.atlasapi.output.annotation.LocationsAnnotation;
import org.atlasapi.output.annotation.ModelInfoAnnotation;
import org.atlasapi.output.annotation.NextBroadcastAnnotation;
import org.atlasapi.output.annotation.NullWriter;
import org.atlasapi.output.annotation.ParentChannelAnnotation;
import org.atlasapi.output.annotation.PeopleAnnotation;
import org.atlasapi.output.annotation.PlatformAnnotation;
import org.atlasapi.output.annotation.RegionsAnnotation;
import org.atlasapi.output.annotation.RelatedLinksAnnotation;
import org.atlasapi.output.annotation.SegmentEventsAnnotation;
import org.atlasapi.output.annotation.SeriesReferenceAnnotation;
import org.atlasapi.output.annotation.SeriesSummaryAnnotation;
import org.atlasapi.output.annotation.SubItemAnnotation;
import org.atlasapi.output.annotation.TopicsAnnotation;
import org.atlasapi.output.writers.BroadcastWriter;
import org.atlasapi.persistence.output.MongoContainerSummaryResolver;
import org.atlasapi.persistence.output.MongoRecentlyBroadcastChildrenResolver;
import org.atlasapi.persistence.output.MongoUpcomingItemsResolver;
import org.atlasapi.persistence.output.RecentlyBroadcastChildrenResolver;
import org.atlasapi.persistence.output.UpcomingItemsResolver;
import org.atlasapi.query.annotation.AnnotationIndex;
import org.atlasapi.query.annotation.ImagesAnnotation;
import org.atlasapi.query.annotation.ResourceAnnotationIndex;
import org.atlasapi.query.common.AttributeCoercers;
import org.atlasapi.query.common.ContextualQueryContextParser;
import org.atlasapi.query.common.ContextualQueryParser;
import org.atlasapi.query.common.IndexAnnotationsExtractor;
import org.atlasapi.query.common.IndexContextualAnnotationsExtractor;
import org.atlasapi.query.common.QueryAtomParser;
import org.atlasapi.query.common.QueryAttributeParser;
import org.atlasapi.query.common.QueryContextParser;
import org.atlasapi.query.common.QueryExecutor;
import org.atlasapi.query.common.QueryParser;
import org.atlasapi.query.common.Resource;
import org.atlasapi.query.common.StandardQueryParser;
import org.atlasapi.query.v4.channel.ChannelController;
import org.atlasapi.query.v4.channel.ChannelListWriter;
import org.atlasapi.query.v4.channel.ChannelQueryResultWriter;
import org.atlasapi.query.v4.channelgroup.ChannelGroupChannelWriter;
import org.atlasapi.query.v4.channelgroup.ChannelGroupController;
import org.atlasapi.query.v4.channelgroup.ChannelGroupListWriter;
import org.atlasapi.query.v4.channelgroup.ChannelGroupQueryResultWriter;
import org.atlasapi.query.v4.content.ContentController;
import org.atlasapi.query.v4.meta.LinkCreator;
import org.atlasapi.query.v4.meta.MetaApiLinkCreator;
import org.atlasapi.query.v4.meta.endpoint.EndpointController;
import org.atlasapi.query.v4.meta.endpoint.EndpointInfoListWriter;
import org.atlasapi.query.v4.meta.endpoint.EndpointInfoQueryResultWriter;
import org.atlasapi.query.v4.meta.model.ModelController;
import org.atlasapi.query.v4.meta.model.ModelInfoListWriter;
import org.atlasapi.query.v4.meta.model.ModelInfoQueryResultWriter;
import org.atlasapi.query.v4.schedule.LegacyChannelListWriter;
import org.atlasapi.query.v4.schedule.ContentListWriter;
import org.atlasapi.query.v4.schedule.ScheduleController;
import org.atlasapi.query.v4.schedule.ScheduleEntryListWriter;
import org.atlasapi.query.v4.schedule.ScheduleListWriter;
import org.atlasapi.query.v4.schedule.ScheduleQueryResultWriter;
import org.atlasapi.query.v4.search.ContentQueryResultWriter;
import org.atlasapi.query.v4.search.SearchController;
import org.atlasapi.query.v4.topic.PopularTopicController;
import org.atlasapi.query.v4.topic.TopicContentController;
import org.atlasapi.query.v4.topic.TopicContentResultWriter;
import org.atlasapi.query.v4.topic.TopicController;
import org.atlasapi.query.v4.topic.TopicListWriter;
import org.atlasapi.query.v4.topic.TopicQueryResultWriter;
import org.atlasapi.search.SearchResolver;
import org.atlasapi.source.Sources;
import org.atlasapi.topic.PopularTopicIndex;
import org.atlasapi.topic.Topic;
import org.atlasapi.topic.TopicResolver;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.metabroadcast.common.ids.NumberToShortStringCodec;
import com.metabroadcast.common.ids.SubstitutionTableNumberCodec;
import com.metabroadcast.common.persistence.mongo.DatabasedMongo;
import com.metabroadcast.common.query.Selection;
import com.metabroadcast.common.query.Selection.SelectionBuilder;
import com.metabroadcast.common.time.SystemClock;
@Configuration
@Import({QueryModule.class})
public class QueryWebModule {
private @Value("${local.host.name}") String localHostName;
private @Value("${atlas.uri}") String baseAtlasUri;
private @Autowired DatabasedMongo mongo;
private @Autowired QueryModule queryModule;
private @Autowired org.atlasapi.media.channel.ChannelResolver legacyChannelResolver;
private @Autowired SearchResolver v4SearchResolver;
private @Autowired TopicResolver topicResolver;
private @Autowired PopularTopicIndex popularTopicIndex;
private @Autowired UserFetcher userFetcher;
private @Autowired ApplicationSourcesFetcher configFetcher;
private @Autowired AtlasPersistenceModule persistenceModule;
private @Autowired QueryExecutor<Channel> channelQueryExecutor;
private @Autowired ChannelResolver channelResolver;
private @Autowired QueryExecutor<ChannelGroup> channelGroupQueryExecutor;
private @Autowired ChannelGroupResolver channelGroupResolver;
@Bean NumberToShortStringCodec idCodec() {
return SubstitutionTableNumberCodec.lowerCaseOnly();
}
@Bean SelectionBuilder selectionBuilder() {
return Selection.builder().withDefaultLimit(50).withMaxLimit(100);
}
@Bean
ScheduleController v4ScheduleController() {
EntityListWriter<ItemAndBroadcast> entryListWriter =
new ScheduleEntryListWriter(contentListWriter(), new BroadcastWriter("broadcasts", idCodec()));
ScheduleListWriter scheduleWriter = new ScheduleListWriter(legacyChannelListWriter(), entryListWriter);
return new ScheduleController(queryModule.equivalentScheduleStoreScheduleQueryExecutor(),
configFetcher, new ScheduleQueryResultWriter(scheduleWriter),
new IndexContextualAnnotationsExtractor(ResourceAnnotationIndex.combination()
.addExplicitSingleContext(channelAnnotationIndex())
.addExplicitListContext(contentAnnotationIndex())
.combine()));
}
@Bean
TopicController v4TopicController() {
return new TopicController(topicQueryParser(),
queryModule.topicQueryExecutor(), new TopicQueryResultWriter(topicListWriter()));
}
@Bean
ContentController contentController() {
return new ContentController(contentQueryParser(),
queryModule.contentQueryExecutor(), new ContentQueryResultWriter(contentListWriter()));
}
@Bean
TopicContentController topicContentController() {
ContextualQueryContextParser contextParser = new ContextualQueryContextParser(configFetcher,
userFetcher,
new IndexContextualAnnotationsExtractor(ResourceAnnotationIndex.combination()
.addImplicitListContext(contentAnnotationIndex())
.addExplicitSingleContext(topicAnnotationIndex())
.combine()
), selectionBuilder());
ContextualQueryParser<Topic, Content> parser = new ContextualQueryParser<Topic, Content>(
Resource.TOPIC, Attributes.TOPIC_ID, Resource.CONTENT, idCodec(),
contentQueryAttributeParser(),
contextParser);
return new TopicContentController(parser, queryModule.topicContentQueryExecutor(),
new TopicContentResultWriter(topicListWriter(), contentListWriter()));
}
@Bean
LinkCreator linkCreator() {
return new MetaApiLinkCreator(baseAtlasUri);
}
@Bean
ModelController modelController() {
QueryResultWriter<ModelClassInfo> resultWriter = new ModelInfoQueryResultWriter(modelListWriter());
ContextualQueryContextParser contextParser = new ContextualQueryContextParser(
configFetcher,
userFetcher,
new IndexContextualAnnotationsExtractor(ResourceAnnotationIndex.combination()
.addImplicitListContext(modelInfoAnnotationIndex())
.combine()
),
selectionBuilder()
);
return new ModelController(ModelClassInfoSingletonStore.INSTANCE, resultWriter, contextParser);
}
@Bean
EndpointController endpointController() {
QueryResultWriter<EndpointClassInfo> resultWriter = new EndpointInfoQueryResultWriter(endpointListWriter());
ContextualQueryContextParser contextParser = new ContextualQueryContextParser(
configFetcher,
userFetcher,
new IndexContextualAnnotationsExtractor(ResourceAnnotationIndex.combination()
.addImplicitListContext(endpointInfoAnnotationIndex())
.combine()
),
selectionBuilder()
);
return new EndpointController(EndpointClassInfoSingletonStore.INSTANCE, resultWriter, contextParser);
}
private QueryAttributeParser contentQueryAttributeParser() {
return new QueryAttributeParser(ImmutableList.of(
QueryAtomParser.valueOf(Attributes.ID, AttributeCoercers.idCoercer(idCodec())),
QueryAtomParser.valueOf(Attributes.CONTENT_TYPE, AttributeCoercers.enumCoercer(ContentType.fromKey())),
QueryAtomParser.valueOf(Attributes.SOURCE, AttributeCoercers.enumCoercer(Sources.fromKey())),
QueryAtomParser.valueOf(Attributes.ALIASES_NAMESPACE, AttributeCoercers.stringCoercer()),
QueryAtomParser.valueOf(Attributes.ALIASES_VALUE, AttributeCoercers.stringCoercer()),
QueryAtomParser.valueOf(Attributes.TOPIC_ID, AttributeCoercers.idCoercer(idCodec())),
QueryAtomParser.valueOf(Attributes.TOPIC_RELATIONSHIP, AttributeCoercers.stringCoercer()),
QueryAtomParser.valueOf(Attributes.TOPIC_SUPERVISED, AttributeCoercers.booleanCoercer()),
QueryAtomParser.valueOf(Attributes.TOPIC_WEIGHTING, AttributeCoercers.floatCoercer())
));
}
private StandardQueryParser<Content> contentQueryParser() {
QueryContextParser contextParser = new QueryContextParser(configFetcher,
userFetcher,
new IndexAnnotationsExtractor(contentAnnotationIndex()), selectionBuilder());
return new StandardQueryParser<Content>(Resource.CONTENT,
contentQueryAttributeParser(),
idCodec(), contextParser);
}
@Bean
ChannelController channelController() {
return new ChannelController(
channelQueryParser(),
channelQueryExecutor,
new ChannelQueryResultWriter(channelListWriter())
);
}
@Bean
ChannelGroupController channelGroupController() {
return new ChannelGroupController(
channelGroupQueryParser(),
channelGroupQueryExecutor,
new ChannelGroupQueryResultWriter(channelGroupListWriter())
);
}
private ChannelGroupListWriter channelGroupListWriter() {
return new ChannelGroupListWriter(AnnotationRegistry.<ChannelGroup>builder()
.registerDefault(CHANNEL_GROUP, new ChannelGroupAnnotation())
.register(
CHANNELS,
new ChannelGroupChannelsAnnotation(
new ChannelGroupChannelWriter(channelResolver)
),
CHANNEL_GROUP
)
.register(REGIONS, new RegionsAnnotation(channelGroupResolver), CHANNEL_GROUP)
.register(PLATFORM, new PlatformAnnotation(channelGroupResolver), CHANNEL_GROUP)
.build());
}
private QueryParser<ChannelGroup> channelGroupQueryParser() {
QueryContextParser contextParser = new QueryContextParser(
configFetcher,
userFetcher,
new IndexAnnotationsExtractor(
channelGroupAnnotationIndex()
),
selectionBuilder()
);
return new StandardQueryParser<>(
Resource.CHANNEL_GROUP,
channelGroupQueryAttributeParser(),
idCodec(),
contextParser
);
}
private AnnotationIndex channelGroupAnnotationIndex() {
return ResourceAnnotationIndex.builder(Resource.CHANNEL_GROUP, Annotation.all()).build();
}
private QueryAttributeParser channelGroupQueryAttributeParser() {
return new QueryAttributeParser(
ImmutableList.of(
QueryAtomParser.valueOf(Attributes.ID, AttributeCoercers.idCoercer(idCodec()))
)
);
}
private StandardQueryParser<Channel> channelQueryParser() {
QueryContextParser contextParser = new QueryContextParser(
configFetcher,
userFetcher,
new IndexAnnotationsExtractor(
channelAnnotationIndex()
),
selectionBuilder()
);
return new StandardQueryParser<>(
Resource.CHANNEL,
channelQueryAttributeParser(),
idCodec(),
contextParser
);
}
private QueryAttributeParser channelQueryAttributeParser() {
return new QueryAttributeParser(
ImmutableList.of(
QueryAtomParser.valueOf(Attributes.ID, AttributeCoercers.idCoercer(idCodec())),
QueryAtomParser.valueOf(Attributes.AVAILABLE_FROM, AttributeCoercers.enumCoercer(Sources.fromKey())),
QueryAtomParser.valueOf(Attributes.BROADCASTER, AttributeCoercers.enumCoercer(Sources.fromKey())),
QueryAtomParser.valueOf(Attributes.ORDER_BY, AttributeCoercers.stringCoercer()),
QueryAtomParser.valueOf(
Attributes.MEDIA_TYPE,
AttributeCoercers.enumCoercer(
new Function<String, Optional<MediaType>>() {
@Override
public Optional<MediaType> apply(String input) {
return MediaType.fromKey(input);
}
}
)
)
)
);
}
private StandardQueryParser<Topic> topicQueryParser() {
QueryContextParser contextParser = new QueryContextParser(configFetcher, userFetcher,
new IndexAnnotationsExtractor(topicAnnotationIndex()), selectionBuilder());
return new StandardQueryParser<Topic>(Resource.TOPIC,
new QueryAttributeParser(ImmutableList.of(
QueryAtomParser.valueOf(Attributes.ID, AttributeCoercers.idCoercer(idCodec())),
QueryAtomParser.valueOf(Attributes.TOPIC_TYPE, AttributeCoercers.enumCoercer(Topic.Type.fromKey())),
QueryAtomParser.valueOf(Attributes.SOURCE, AttributeCoercers.enumCoercer(Sources.fromKey())),
QueryAtomParser.valueOf(Attributes.ALIASES_NAMESPACE, AttributeCoercers.stringCoercer()),
QueryAtomParser.valueOf(Attributes.ALIASES_VALUE, AttributeCoercers.stringCoercer())
)),
idCodec(), contextParser
);
}
@Bean
PopularTopicController popularTopicController() {
return new PopularTopicController(topicResolver, popularTopicIndex, new TopicQueryResultWriter(topicListWriter()), configFetcher);
}
@Bean
SearchController searchController() {
return new SearchController(v4SearchResolver, configFetcher, new ContentQueryResultWriter(contentListWriter()));
}
@Bean
ResourceAnnotationIndex contentAnnotationIndex() {
return ResourceAnnotationIndex.builder(Resource.CONTENT, Annotation.all())
.attach(Annotation.TOPICS, topicAnnotationIndex(), Annotation.ID)
.build();
}
@Bean
ResourceAnnotationIndex topicAnnotationIndex() {
return ResourceAnnotationIndex.builder(Resource.TOPIC, Annotation.all()).build();
}
@Bean
ResourceAnnotationIndex channelAnnotationIndex() {
return ResourceAnnotationIndex.builder(Resource.CHANNEL, Annotation.all()).build();
}
@Bean
ResourceAnnotationIndex modelInfoAnnotationIndex() {
return ResourceAnnotationIndex.builder(Resource.MODEL_INFO, Annotation.all()).build();
}
@Bean
ResourceAnnotationIndex endpointInfoAnnotationIndex() {
return ResourceAnnotationIndex.builder(Resource.ENDPOINT_INFO, Annotation.all()).build();
}
@Bean
EntityListWriter<Content> contentListWriter() {
ImmutableSet<Annotation> commonImplied = ImmutableSet.of(ID_SUMMARY);
RecentlyBroadcastChildrenResolver recentlyBroadcastResolver = new MongoRecentlyBroadcastChildrenResolver(mongo);
UpcomingItemsResolver upcomingChildrenResolver = new MongoUpcomingItemsResolver(mongo);
MongoContainerSummaryResolver containerSummaryResolver = new MongoContainerSummaryResolver(mongo, idCodec());
return new ContentListWriter(AnnotationRegistry.<Content>builder()
.registerDefault(ID_SUMMARY, new IdentificationSummaryAnnotation(idCodec()))
.register(ID, new IdentificationAnnotation(), commonImplied)
.register(EXTENDED_ID, new ExtendedIdentificationAnnotation(idCodec()), ImmutableSet.of(ID))
.register(SERIES_REFERENCE, new SeriesReferenceAnnotation(idCodec()), commonImplied)
.register(SERIES_SUMMARY, new SeriesSummaryAnnotation(idCodec(), containerSummaryResolver), commonImplied, ImmutableSet.of(SERIES_REFERENCE))
.register(BRAND_REFERENCE, new BrandReferenceAnnotation(idCodec()), commonImplied)
.register(BRAND_SUMMARY, new BrandSummaryAnnotation(idCodec(), containerSummaryResolver), commonImplied, ImmutableSet.of(BRAND_REFERENCE))
.register(DESCRIPTION, new ContentDescriptionAnnotation(), ImmutableSet.of(ID, SERIES_REFERENCE, BRAND_REFERENCE))
.register(EXTENDED_DESCRIPTION, new ExtendedDescriptionAnnotation(), ImmutableSet.of(DESCRIPTION, EXTENDED_ID))
.register(SUB_ITEMS, new SubItemAnnotation(idCodec()), commonImplied)
.register(CLIPS, new ClipsAnnotation(), commonImplied)
.register(PEOPLE, new PeopleAnnotation(), commonImplied)
.register(TOPICS, new TopicsAnnotation(topicResolver, topicListWriter()), commonImplied)
//.register(CONTENT_GROUPS, new ContentGroupsAnnotation(contentGroupResolver), commonImplied)
.register(SEGMENT_EVENTS, new SegmentEventsAnnotation(segmentRelatedLinkMergingFetcher()), commonImplied)
.register(RELATED_LINKS, new RelatedLinksAnnotation(), commonImplied)
.register(KEY_PHRASES, new KeyPhrasesAnnotation(), commonImplied)
.register(LOCATIONS, new LocationsAnnotation(), commonImplied)
.register(BROADCASTS, new BroadcastsAnnotation(idCodec()), commonImplied)
.register(FIRST_BROADCASTS, new FirstBroadcastAnnotation(idCodec()), commonImplied)
.register(NEXT_BROADCASTS, new NextBroadcastAnnotation(new SystemClock(), idCodec()), commonImplied)
.register(AVAILABLE_LOCATIONS, new AvailableLocationsAnnotation(), commonImplied)
.register(IMAGES, new ImagesAnnotation(), commonImplied)
//.register(UPCOMING, new UpcomingAnnotation(idCodec(), upcomingChildrenResolver), commonImplied)
//.register(PRODUCTS, new ProductsAnnotation(productResolver), commonImplied)
//.register(RECENTLY_BROADCAST, new RecentlyBroadcastAnnotation(idCodec(), recentlyBroadcastResolver), commonImplied)
.register(CHANNELS, new ChannelsAnnotation(), commonImplied)
.register(CONTENT_SUMMARY, NullWriter.create(Content.class), ImmutableSet.of(DESCRIPTION, BRAND_SUMMARY,
SERIES_SUMMARY, BROADCASTS, LOCATIONS))
.register(CONTENT_DETAIL, NullWriter.create(Content.class), ImmutableSet.of(EXTENDED_DESCRIPTION, SUB_ITEMS, CLIPS,
PEOPLE, BRAND_SUMMARY, SERIES_SUMMARY, BROADCASTS, LOCATIONS, KEY_PHRASES, RELATED_LINKS))
.build());
}
@Bean
protected EntityListWriter<Topic> topicListWriter() {
return new TopicListWriter(AnnotationRegistry.<Topic>builder()
.registerDefault(ID_SUMMARY, new IdentificationSummaryAnnotation(idCodec()))
.register(ID, new IdentificationAnnotation(), ID_SUMMARY)
.register(EXTENDED_ID, new ExtendedIdentificationAnnotation(idCodec()), ImmutableSet.of(ID))
.register(DESCRIPTION, new DescriptionAnnotation<Topic>(), ImmutableSet.of(ID))
.build());
}
@Bean
SegmentRelatedLinkMergingFetcher segmentRelatedLinkMergingFetcher() {
return new SegmentRelatedLinkMergingFetcher(persistenceModule.segmentStore(), new ScrubbablesSegmentRelatedLinkMerger());
}
protected EntityListWriter<org.atlasapi.media.channel.Channel> legacyChannelListWriter() {
return new LegacyChannelListWriter(AnnotationRegistry.<org.atlasapi.media.channel.Channel>builder()
// .registerDefault(ID_SUMMARY, new IdentificationSummaryAnnotation(idCodec()))
// .register(ID, new IdentificationAnnotation(), ID_SUMMARY)
// .register(EXTENDED_ID, new ExtendedIdentificationAnnotation(idCodec()), ImmutableSet.of(ID))
.registerDefault(CHANNEL_SUMMARY, new ChannelSummaryWriter(idCodec()))
.register(CHANNEL, new LegacyChannelAnnotation(), ImmutableSet.of(CHANNEL_SUMMARY))
.build());
}
protected EntityListWriter<Channel> channelListWriter() {
return new ChannelListWriter(
AnnotationRegistry.<Channel>builder()
.registerDefault(CHANNEL, new ChannelAnnotation())
.register(
CHANNEL_GROUPS,
new ChannelGroupMembershipAnnotation(
new ChannelGroupMembershipListWriter(
"channel_groups",
"channel_group",
channelGroupResolver
)
),
CHANNEL
)
.register(PARENT, new ParentChannelAnnotation(channelResolver), CHANNEL)
.register(VARIATIONS, new ChannelVariationAnnotation(channelResolver), CHANNEL)
.build());
}
@Bean
protected EntityListWriter<ModelClassInfo> modelListWriter() {
return new ModelInfoListWriter(AnnotationRegistry.<ModelClassInfo>builder()
.registerDefault(META_MODEL, new ModelInfoAnnotation<>(linkCreator()))
.build());
}
@Bean
protected EntityListWriter<EndpointClassInfo> endpointListWriter() {
return new EndpointInfoListWriter(AnnotationRegistry.<EndpointClassInfo>builder()
.registerDefault(META_ENDPOINT, new EndpointInfoAnnotation<>(linkCreator()))
.build());
}
} |
/**
* Container for link between two Osm nodes
*
* @author ab
*/
package btools.router;
import java.io.IOException;
import btools.mapaccess.OsmLink;
import btools.mapaccess.OsmLinkHolder;
import btools.mapaccess.OsmNode;
import btools.mapaccess.OsmTransferNode;
import btools.mapaccess.TurnRestriction;
final class OsmPath implements OsmLinkHolder
{
/**
* The cost of that path (a modified distance)
*/
public int cost = 0;
/**
* The elevation-hysteresis-buffer (0-10 m)
*/
private int ehbd; // in micrometer
private int ehbu; // in micrometer
// the elevation assumed for that path can have a value
// if the corresponding node has not
public short selev;
public int airdistance = 0; // distance to endpos
private OsmNode sourceNode;
private OsmNode targetNode;
private OsmLink link;
public OsmPathElement originElement;
public OsmPathElement myElement;
private float traffic;
private OsmLinkHolder nextForLink = null;
public int treedepth = 0;
// the position of the waypoint just before
// this path position (for angle calculation)
public int originLon;
public int originLat;
// the classifier of the segment just before this paths position
public float lastClassifier;
public MessageData message;
public void unregisterUpTree( RoutingContext rc )
{
try
{
OsmPathElement pe = originElement;
while( pe instanceof OsmPathElementWithTraffic && ((OsmPathElementWithTraffic)pe).unregister(rc) )
{
pe = pe.origin;
}
}
catch( IOException ioe )
{
throw new RuntimeException( ioe );
}
}
public void registerUpTree()
{
if ( originElement instanceof OsmPathElementWithTraffic )
{
OsmPathElementWithTraffic ot = (OsmPathElementWithTraffic)originElement;
ot.register();
ot.addTraffic( traffic );
}
}
OsmPath()
{
}
OsmPath( OsmLink link )
{
this();
this.link = link;
targetNode = link.getTarget( null );
selev = targetNode.getSElev();
originLon = -1;
originLat = -1;
}
OsmPath( OsmPath origin, OsmLink link, OsmTrack refTrack, boolean detailMode, RoutingContext rc )
{
this();
if ( origin.myElement == null )
{
origin.myElement = OsmPathElement.create( origin, rc.countTraffic );
}
this.originElement = origin.myElement;
this.link = link;
this.sourceNode = origin.targetNode;
this.targetNode = link.getTarget( sourceNode );
this.cost = origin.cost;
this.ehbd = origin.ehbd;
this.ehbu = origin.ehbu;
this.lastClassifier = origin.lastClassifier;
addAddionalPenalty(refTrack, detailMode, origin, link, rc );
}
private void addAddionalPenalty(OsmTrack refTrack, boolean detailMode, OsmPath origin, OsmLink link, RoutingContext rc )
{
byte[] description = link.descriptionBitmap;
if ( description == null ) throw new IllegalArgumentException( "null description for: " + link );
boolean recordTransferNodes = detailMode || rc.countTraffic;
boolean recordMessageData = detailMode;
rc.nogomatch = false;
// extract the 3 positions of the first section
int lon0 = origin.originLon;
int lat0 = origin.originLat;
OsmNode p1 = sourceNode;
int lon1 = p1.getILon();
int lat1 = p1.getILat();
short ele1 = origin.selev;
int linkdisttotal = 0;
MessageData msgData = recordMessageData ? new MessageData() : null;
boolean isReverse = link.isReverse( sourceNode );
// evaluate the way tags
rc.expctxWay.evaluate( rc.inverseDirection ^ isReverse, description );
// calculate the costfactor inputs
boolean isTrafficBackbone = cost == 0 && rc.expctxWay.getIsTrafficBackbone() > 0.f;
float turncostbase = rc.expctxWay.getTurncost();
float cfup = rc.expctxWay.getUphillCostfactor();
float cfdown = rc.expctxWay.getDownhillCostfactor();
float cf = rc.expctxWay.getCostfactor();
cfup = cfup == 0.f ? cf : cfup;
cfdown = cfdown == 0.f ? cf : cfdown;
float newClassifier = rc.expctxWay.getInitialClassifier();
if ( newClassifier == 0. )
{
newClassifier = (cfup + cfdown + cf)/3;
}
float classifierDiff = newClassifier - lastClassifier;
if ( classifierDiff > 0.0005 || classifierDiff < -0.0005 )
{
lastClassifier = newClassifier;
float initialcost = rc.expctxWay.getInitialcost();
int iicost = (int)initialcost;
if ( recordMessageData )
{
msgData.linkinitcost += iicost;
}
cost += iicost;
}
OsmTransferNode transferNode = link.geometry == null ? null
: rc.geometryDecoder.decodeGeometry( link.geometry, p1, targetNode, isReverse );
boolean isFirstSection = true;
for(;;)
{
originLon = lon1;
originLat = lat1;
int lon2;
int lat2;
short ele2;
if ( transferNode == null )
{
lon2 = targetNode.ilon;
lat2 = targetNode.ilat;
ele2 = targetNode.selev;
}
else
{
lon2 = transferNode.ilon;
lat2 = transferNode.ilat;
ele2 = transferNode.selev;
}
// check turn restrictions: do we have one with that origin?
boolean checkTRs = false;
if ( isFirstSection )
{
isFirstSection = false;
// TODO: TRs for inverse routing would need inverse TR logic,
// inverse routing for now just for target island check, so don't care (?)
// in detail mode (=final pass) no TR to not mess up voice hints
checkTRs = rc.considerTurnRestrictions && !rc.inverseDirection && !detailMode;
}
if ( checkTRs )
{
boolean hasAnyPositive = false;
boolean hasPositive = false;
boolean hasNegative = false;
TurnRestriction tr = sourceNode.firstRestriction;
while( tr != null )
{
boolean trValid = ! (tr.exceptBikes() && rc.bikeMode);
if ( trValid && tr.fromLon == lon0 && tr.fromLat == lat0 )
{
if ( tr.isPositive )
{
hasAnyPositive = true;
}
if ( tr.toLon == lon2 && tr.toLat == lat2 )
{
if ( tr.isPositive )
{
hasPositive = true;
}
else
{
hasNegative = true;
}
}
}
tr = tr.next;
}
if ( !hasPositive && ( hasAnyPositive || hasNegative ) )
{
cost = -1;
return;
}
}
// if recording, new MessageData for each section (needed for turn-instructions)
if ( recordMessageData && msgData.wayKeyValues != null )
{
originElement.message = msgData;
msgData = new MessageData();
}
int dist = rc.calcDistance( lon1, lat1, lon2, lat2 );
boolean stopAtEndpoint = false;
if ( rc.shortestmatch )
{
if ( rc.isEndpoint )
{
stopAtEndpoint = true;
ele2 = interpolateEle( ele1, ele2, rc.wayfraction );
}
else
{
// we just start here, reset cost
cost = 0;
ehbd = 0;
ehbu = 0;
lon0 = -1; // reset turncost-pipe
lat0 = -1;
if ( recordTransferNodes )
{
if ( rc.wayfraction > 0. )
{
ele1 = interpolateEle( ele1, ele2, 1. - rc.wayfraction );
originElement = OsmPathElement.create( rc.ilonshortest, rc.ilatshortest, ele1, null, rc.countTraffic );
}
else
{
originElement = null; // prevent duplicate point
}
}
}
}
if ( recordMessageData )
{
msgData.linkdist += dist;
}
linkdisttotal += dist;
// apply a start-direction if appropriate (by faking the origin position)
if ( lon0 == -1 && lat0 == -1 )
{
double coslat = Math.cos( ( lat1 - 90000000 ) * 0.00000001234134 );
if ( rc.startDirectionValid && coslat > 0. )
{
double dir = rc.startDirection.intValue() / 57.29578;
lon0 = lon1 - (int) ( 1000. * Math.sin( dir ) / coslat );
lat0 = lat1 - (int) ( 1000. * Math.cos( dir ) );
}
}
if ( !isTrafficBackbone && lon0 != -1 && lat0 != -1 )
{
// penalty proportional to direction change
double cos = rc.calcCosAngle( lon0, lat0, lon1, lat1, lon2, lat2 );
int actualturncost = (int)(cos * turncostbase + 0.2 ); // e.g. turncost=90 -> 90 degree = 90m penalty
cost += actualturncost;
if ( recordMessageData )
{
msgData.linkturncost += actualturncost;
msgData.turnangle = (float)rc.calcAngle( lon0, lat0, lon1, lat1, lon2, lat2 );
}
}
// only the part of the descend that does not fit into the elevation-hysteresis-buffer
// leads to an immediate penalty
int elefactor = 250000;
if ( ele2 == Short.MIN_VALUE ) ele2 = ele1;
if ( ele1 != Short.MIN_VALUE )
{
ehbd += (ele1 - ele2)*elefactor - dist * rc.downhillcutoff;
ehbu += (ele2 - ele1)*elefactor - dist * rc.uphillcutoff;
}
float downweight = 0.f;
if ( ehbd > rc.elevationpenaltybuffer )
{
downweight = 1.f;
int excess = ehbd - rc.elevationpenaltybuffer;
int reduce = dist * rc.elevationbufferreduce;
if ( reduce > excess )
{
downweight = ((float)excess)/reduce;
reduce = excess;
}
excess = ehbd - rc.elevationmaxbuffer;
if ( reduce < excess )
{
reduce = excess;
}
ehbd -= reduce;
if ( rc.downhillcostdiv > 0 )
{
int elevationCost = reduce/rc.downhillcostdiv;
cost += elevationCost;
if ( recordMessageData )
{
msgData.linkelevationcost += elevationCost;
}
}
}
else if ( ehbd < 0 )
{
ehbd = 0;
}
float upweight = 0.f;
if ( ehbu > rc.elevationpenaltybuffer )
{
upweight = 1.f;
int excess = ehbu - rc.elevationpenaltybuffer;
int reduce = dist * rc.elevationbufferreduce;
if ( reduce > excess )
{
upweight = ((float)excess)/reduce;
reduce = excess;
}
excess = ehbu - rc.elevationmaxbuffer;
if ( reduce < excess )
{
reduce = excess;
}
ehbu -= reduce;
if ( rc.uphillcostdiv > 0 )
{
int elevationCost = reduce/rc.uphillcostdiv;
cost += elevationCost;
if ( recordMessageData )
{
msgData.linkelevationcost += elevationCost;
}
}
}
else if ( ehbu < 0 )
{
ehbu = 0;
}
// get the effective costfactor (slope dependent)
float costfactor = cfup*upweight + cf*(1.f - upweight - downweight) + cfdown*downweight;
if ( isTrafficBackbone )
{
costfactor = 0.f;
}
float fcost = dist * costfactor + 0.5f;
if ( ( costfactor > 9998. && !detailMode ) || fcost + cost >= 2000000000. )
{
cost = -1;
return;
}
int waycost = (int)(fcost);
cost += waycost;
// calculate traffic
if ( rc.countTraffic )
{
int minDist = (int)rc.trafficSourceMinDist;
int cost2 = cost < minDist ? minDist : cost;
traffic += dist*rc.expctxWay.getTrafficSourceDensity()*Math.pow(cost2/10000.f,rc.trafficSourceExponent);
}
if ( recordMessageData )
{
msgData.costfactor = costfactor;
msgData.priorityclassifier = (int)rc.expctxWay.getPriorityClassifier();
msgData.classifiermask = (int)rc.expctxWay.getClassifierMask();
msgData.lon = lon2;
msgData.lat = lat2;
msgData.ele = ele2;
msgData.wayKeyValues = rc.expctxWay.getKeyValueDescription( isReverse, description );
}
if ( stopAtEndpoint )
{
if ( recordTransferNodes )
{
originElement = OsmPathElement.create( rc.ilonshortest, rc.ilatshortest, ele2, originElement, rc.countTraffic );
originElement.cost = cost;
if ( recordMessageData )
{
originElement.message = msgData;
}
}
if ( rc.nogomatch )
{
cost = -1;
}
return;
}
if ( transferNode == null )
{
if ( refTrack != null && refTrack.containsNode( targetNode ) && refTrack.containsNode( sourceNode ) )
{
int reftrackcost = linkdisttotal;
cost += reftrackcost;
}
selev = ele2;
break;
}
transferNode = transferNode.next;
if ( recordTransferNodes )
{
originElement = OsmPathElement.create( lon2, lat2, ele2, originElement, rc.countTraffic );
originElement.cost = cost;
originElement.addTraffic( traffic );
traffic = 0;
}
lon0 = lon1;
lat0 = lat1;
lon1 = lon2;
lat1 = lat2;
ele1 = ele2;
}
// check for nogo-matches (after the *actual* start of segment)
if ( rc.nogomatch )
{
cost = -1;
return;
}
// finally add node-costs for target node
if ( targetNode.nodeDescription != null )
{
boolean nodeAccessGranted = rc.expctxWay.getNodeAccessGranted() != 0.;
rc.expctxNode.evaluate( nodeAccessGranted , targetNode.nodeDescription );
float initialcost = rc.expctxNode.getInitialcost();
if ( initialcost >= 1000000. )
{
cost = -1;
return;
}
int iicost = (int)initialcost;
cost += iicost;
if ( recordMessageData )
{
msgData.linknodecost += iicost;
msgData.nodeKeyValues = rc.expctxNode.getKeyValueDescription( nodeAccessGranted, targetNode.nodeDescription );
}
}
if ( recordMessageData )
{
message = msgData;
}
}
public short interpolateEle( short e1, short e2, double fraction )
{
if ( e1 == Short.MIN_VALUE || e2 == Short.MIN_VALUE )
{
return Short.MIN_VALUE;
}
return (short)( e1*(1.-fraction) + e2*fraction );
}
public int elevationCorrection( RoutingContext rc )
{
return ( rc.downhillcostdiv > 0 ? ehbd/rc.downhillcostdiv : 0 )
+ ( rc.uphillcostdiv > 0 ? ehbu/rc.uphillcostdiv : 0 );
}
public boolean definitlyWorseThan( OsmPath p, RoutingContext rc )
{
int c = p.cost;
if ( rc.downhillcostdiv > 0 )
{
int delta = p.ehbd - ehbd;
if ( delta > 0 ) c += delta/rc.downhillcostdiv;
}
if ( rc.uphillcostdiv > 0 )
{
int delta = p.ehbu - ehbu;
if ( delta > 0 ) c += delta/rc.uphillcostdiv;
}
return cost > c;
}
public OsmNode getSourceNode()
{
return sourceNode;
}
public OsmNode getTargetNode()
{
return targetNode;
}
public OsmLink getLink()
{
return link;
}
public void setNextForLink( OsmLinkHolder holder )
{
nextForLink = holder;
}
public OsmLinkHolder getNextForLink()
{
return nextForLink;
}
} |
package com.otus.alexeenko.l8;
import com.otus.alexeenko.l8.services.DataBaseService;
import com.otus.alexeenko.l8.services.custom.CustomService;
import com.otus.alexeenko.l8.services.datasets.AddressDataSet;
import com.otus.alexeenko.l8.services.datasets.PhoneDataSet;
import com.otus.alexeenko.l8.services.datasets.UserDataSet;
import java.util.Arrays;
import java.util.List;
public class L8 {
public static void main(String[] args) {
org.apache.log4j.BasicConfigurator.configure();
List<PhoneDataSet> phones = Arrays.asList(new PhoneDataSet(1L, 911, "1"),
new PhoneDataSet(2L, 921, "2"));
UserDataSet dataSet1 = new UserDataSet(1L, "First", 22,
phones, new AddressDataSet(1L, "Kings Row", 90));
UserDataSet dataSet2 = new UserDataSet(2L, "Second", 17,
phones, new AddressDataSet(2L, "Dorado", 200));
DataBaseService db = new CustomService();
try {
db.save(dataSet1);
db.save(dataSet2);
for (int i = 0; i < 10; ++i) {
if (i % 5 == 3)
db.load(2L, UserDataSet.class);
else
db.load(1L, UserDataSet.class);
Thread.sleep(2000);
}
UserDataSet result = db.load(1L, UserDataSet.class);
System.out.println(result.toString());
} catch (Exception e) {
e.printStackTrace();
} finally {
db.dispose();
}
}
} |
package com.atlassian.jira.plugins.dvcs.dao.impl;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import net.java.ao.EntityStreamCallback;
import net.java.ao.Query;
import net.java.ao.RawEntity;
import net.java.ao.schema.PrimaryKey;
import net.java.ao.schema.Table;
import org.apache.commons.lang.ArrayUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.atlassian.activeobjects.external.ActiveObjects;
import com.atlassian.jira.plugins.dvcs.activeobjects.ActiveObjectsUtils;
import com.atlassian.jira.plugins.dvcs.activeobjects.v3.ChangesetMapping;
import com.atlassian.jira.plugins.dvcs.activeobjects.v3.IssueToChangesetMapping;
import com.atlassian.jira.plugins.dvcs.activeobjects.v3.RepositoryToChangesetMapping;
import com.atlassian.jira.plugins.dvcs.dao.ChangesetDao;
import com.atlassian.jira.plugins.dvcs.dao.impl.transform.ChangesetTransformer;
import com.atlassian.jira.plugins.dvcs.model.Changeset;
import com.atlassian.jira.plugins.dvcs.model.ChangesetFile;
import com.atlassian.jira.plugins.dvcs.model.GlobalFilter;
import com.atlassian.jira.util.json.JSONArray;
import com.atlassian.jira.util.json.JSONException;
import com.atlassian.jira.util.json.JSONObject;
import com.atlassian.sal.api.transaction.TransactionCallback;
public class ChangesetDaoImpl implements ChangesetDao
{
private static final Logger log = LoggerFactory.getLogger(ChangesetDaoImpl.class);
private final ActiveObjects activeObjects;
private final ChangesetTransformer transformer = new ChangesetTransformer();
public ChangesetDaoImpl(ActiveObjects activeObjects)
{
this.activeObjects = activeObjects;
}
private List<Changeset> transform(ChangesetMapping changesetMapping)
{
return transformer.transform(changesetMapping);
}
private List<Changeset> transform(List<ChangesetMapping> changesetMappings)
{
List<Changeset> changesets = new ArrayList<Changeset>();
for (ChangesetMapping changesetMapping : changesetMappings)
{
changesets.addAll(transform(changesetMapping));
}
return changesets;
}
@Override
public void removeAllInRepository(final int repositoryId)
{
activeObjects.executeInTransaction(new TransactionCallback<Object>()
{
@Override
public Object doInTransaction()
{
// todo: transaction: plugin use SalTransactionManager and there is empty implementation of TransactionSynchronisationManager.
// todo: Therefore there are only entityCache transactions. No DB transactions.
// delete association repo - changesets
Query query = Query.select().where(RepositoryToChangesetMapping.REPOSITORY_ID + " = ?", repositoryId);
log.debug("deleting repo - changesets associations from RepoToChangeset with id = [ {} ]", new String[]{String.valueOf(repositoryId)});
ActiveObjectsUtils.delete(activeObjects, RepositoryToChangesetMapping.class, query);
// delete association issues - changeset
query = Query.select().where(
IssueToChangesetMapping.CHANGESET_ID + " not in " +
"(select \"" + RepositoryToChangesetMapping.CHANGESET_ID + "\" from \"" + RepositoryToChangesetMapping.TABLE_NAME + "\")");
log.debug("deleting orphaned issue-changeset associations");
ActiveObjectsUtils.delete(activeObjects, IssueToChangesetMapping.class, query);
// delete orphaned changesets
query = Query.select().where(
"ID not in " +
"(select \"" + RepositoryToChangesetMapping.CHANGESET_ID + "\" from \"" + RepositoryToChangesetMapping.TABLE_NAME + "\")");
log.debug("deleting orphaned changesets");
ActiveObjectsUtils.delete(activeObjects, ChangesetMapping.class, query);
return null;
}
});
}
@Override
public Changeset create(final Changeset changeset, final Set<String> extractedIssues)
{
ChangesetMapping changesetMapping = activeObjects.executeInTransaction(new TransactionCallback<ChangesetMapping>()
{
@Override
public ChangesetMapping doInTransaction()
{
ChangesetMapping chm = getChangesetMapping(changeset);
if (chm == null)
{
chm = activeObjects.create(ChangesetMapping.class);
fillProperties(changeset, chm);
chm.save();
}
associateRepositoryToChangeset(chm, changeset.getRepositoryId());
if (extractedIssues != null)
{
associateIssuesToChangeset(chm, extractedIssues);
}
return chm;
}
});
changeset.setId(changesetMapping.getID());
return changeset;
}
@Override
public Changeset update(final Changeset changeset)
{
activeObjects.executeInTransaction(new TransactionCallback<ChangesetMapping>()
{
@Override
public ChangesetMapping doInTransaction()
{
ChangesetMapping chm = getChangesetMapping(changeset);
if (chm != null)
{
fillProperties(changeset, chm);
chm.save();
} else
{
log.warn("Changest with node {} is not exists.", changeset.getNode());
}
return chm;
}
});
return changeset;
}
private ChangesetMapping getChangesetMapping(Changeset changeset)
{
ChangesetMapping[] mappings = activeObjects.find(ChangesetMapping.class,
ChangesetMapping.NODE + " = ? ", changeset.getNode());
if (mappings.length > 1)
{
log.warn("More changesets with same Node. Same changesets count: {}, Node: {}, Repository: {}",
new Object[]{mappings.length, changeset.getNode(), changeset.getRepositoryId()});
}
return (ArrayUtils.isNotEmpty(mappings)) ? mappings[0] : null;
}
public void fillProperties(Changeset changeset, ChangesetMapping chm)
{
// we need to remove null characters '\u0000' because PostgreSQL cannot store String values with such
// characters
// todo: remove NULL Chars before call setters
chm.setNode(changeset.getNode());
chm.setRawAuthor(changeset.getRawAuthor());
chm.setAuthor(changeset.getAuthor());
chm.setDate(changeset.getDate());
chm.setRawNode(changeset.getRawNode());
chm.setBranch(changeset.getBranch());
chm.setMessage(changeset.getMessage());
chm.setAuthorEmail(changeset.getAuthorEmail());
chm.setSmartcommitAvailable(changeset.isSmartcommitAvaliable());
JSONArray parentsJson = new JSONArray();
for (String parent : changeset.getParents())
{
parentsJson.put(parent);
}
String parentsData = parentsJson.toString();
if (parentsData.length() > 255)
{
parentsData = ChangesetMapping.TOO_MANY_PARENTS;
}
chm.setParentsData(parentsData);
JSONObject filesDataJson = new JSONObject();
JSONArray filesJson = new JSONArray();
try
{
List<ChangesetFile> files = changeset.getFiles();
int count = changeset.getAllFileCount();
filesDataJson.put("count", count);
for (int i = 0; i < Math.min(count, Changeset.MAX_VISIBLE_FILES); i++)
{
ChangesetFile changesetFile = files.get(i);
JSONObject fileJson = new JSONObject();
fileJson.put("filename", changesetFile.getFile());
fileJson.put("status", changesetFile.getFileAction().getAction());
fileJson.put("additions", changesetFile.getAdditions());
fileJson.put("deletions", changesetFile.getDeletions());
filesJson.put(fileJson);
}
filesDataJson.put("files", filesJson);
chm.setFilesData(filesDataJson.toString());
} catch (JSONException e)
{
log.error("Creating files JSON failed!", e);
}
chm.setVersion(ChangesetMapping.LATEST_VERSION);
chm.save();
}
private Changeset filterByRepository(List<Changeset> changesets, int repositoryId)
{
for (Changeset changeset : changesets)
{
if (changeset.getRepositoryId() == repositoryId)
{
return changeset;
}
}
return null;
}
private void associateIssuesToChangeset(ChangesetMapping changesetMapping, Set<String> extractedIssues)
{
// remove all assoc issues-changeset
Query query = Query.select().where(IssueToChangesetMapping.CHANGESET_ID + " = ? ", changesetMapping);
ActiveObjectsUtils.delete(activeObjects, IssueToChangesetMapping.class, query);
// insert all
for (String extractedIssue : extractedIssues)
{
final Map<String, Object> map = new MapRemovingNullCharacterFromStringValues();
map.put(IssueToChangesetMapping.ISSUE_KEY, extractedIssue);
map.put(IssueToChangesetMapping.PROJECT_KEY, parseProjectKey(extractedIssue));
map.put(IssueToChangesetMapping.CHANGESET_ID, changesetMapping.getID());
activeObjects.create(IssueToChangesetMapping.class, map);
}
}
private void associateRepositoryToChangeset(ChangesetMapping changesetMapping, int repositoryId)
{
RepositoryToChangesetMapping[] mappings = activeObjects.find(RepositoryToChangesetMapping.class,
RepositoryToChangesetMapping.REPOSITORY_ID + " = ? and " +
RepositoryToChangesetMapping.CHANGESET_ID + " = ? ",
repositoryId,
changesetMapping);
if (ArrayUtils.isEmpty(mappings))
{
final Map<String, Object> map = new MapRemovingNullCharacterFromStringValues();
map.put(RepositoryToChangesetMapping.REPOSITORY_ID, repositoryId);
map.put(RepositoryToChangesetMapping.CHANGESET_ID, changesetMapping);
activeObjects.create(RepositoryToChangesetMapping.class, map);
}
}
public static String parseProjectKey(String issueKey)
{
return issueKey.substring(0, issueKey.indexOf("-"));
}
@Override
public Changeset getByNode(final int repositoryId, final String changesetNode)
{
final ChangesetMapping changesetMapping = activeObjects.executeInTransaction(new TransactionCallback<ChangesetMapping>()
{
@Override
public ChangesetMapping doInTransaction()
{
Query query = Query.select()
.alias(ChangesetMapping.class, "chm")
.alias(RepositoryToChangesetMapping.class, "rtchm")
.join(RepositoryToChangesetMapping.class, "chm.ID = rtchm." + RepositoryToChangesetMapping.CHANGESET_ID)
.where("chm." + ChangesetMapping.NODE + " = ? AND rtchm." + RepositoryToChangesetMapping.REPOSITORY_ID + " = ? ", changesetNode, repositoryId);
ChangesetMapping[] mappings = activeObjects.find(ChangesetMapping.class, query);
return mappings.length != 0 ? mappings[0] : null;
}
});
final List<Changeset> changesets = transform(changesetMapping);
return changesets != null ? filterByRepository(changesets, repositoryId) : null;
}
@Override
public List<Changeset> getByIssueKey(final String issueKey)
{
final List<ChangesetMapping> changesetMappings = activeObjects.executeInTransaction(new TransactionCallback<List<ChangesetMapping>>()
{
@Override
public List<ChangesetMapping> doInTransaction()
{
ChangesetMapping[] mappings = activeObjects.find(ChangesetMapping.class,
Query.select()
.alias(ChangesetMapping.class, "chm")
.alias(IssueToChangesetMapping.class, "itchm")
.join(IssueToChangesetMapping.class, "chm.ID = itchm." + IssueToChangesetMapping.CHANGESET_ID)
.where("itchm." + IssueToChangesetMapping.ISSUE_KEY + " = ?", issueKey)
.order(ChangesetMapping.DATE));
return Arrays.asList(mappings);
}
});
return transform(changesetMappings);
}
@Override
public List<Changeset> getLatestChangesets(final int maxResults, final GlobalFilter gf)
{
if (maxResults <= 0)
{
return Collections.emptyList();
}
final List<ChangesetMapping> changesetMappings = activeObjects.executeInTransaction(new TransactionCallback<List<ChangesetMapping>>()
{
@Override
public List<ChangesetMapping> doInTransaction()
{
String baseWhereClause = new GlobalFilterQueryWhereClauseBuilder(gf).build();
Query query = Query.select()
.alias(ChangesetMapping.class, "CHANGESET")
.alias(IssueToChangesetMapping.class, "ISSUE")
.join(IssueToChangesetMapping.class, "CHANGESET.ID = ISSUE." + IssueToChangesetMapping.CHANGESET_ID)
.where(baseWhereClause).limit(maxResults).order(ChangesetMapping.DATE + " DESC");
ChangesetMapping[] mappings = activeObjects.find(ChangesetMapping.class, query);
return Arrays.asList(mappings);
}
});
return transform(changesetMappings);
}
@Override
public void forEachLatestChangesetsAvailableForSmartcommitDo(final int repositoryId, final ForEachChangesetClosure closure)
{
Query query = createLatestChangesetsAvailableForSmartcommitQuery(repositoryId);
activeObjects.stream(ChangesetMapping.class, query, new EntityStreamCallback<ChangesetMapping, Integer>()
{
@Override
public void onRowRead(ChangesetMapping mapping)
{
closure.execute(mapping);
}
});
}
private Query createLatestChangesetsAvailableForSmartcommitQuery(int repositoryId)
{
return Query.select("*")
.from(ChangesetMapping.class)
.alias(ChangesetMapping.class, "chm")
.alias(RepositoryToChangesetMapping.class, "rtchm")
.join(RepositoryToChangesetMapping.class, "chm.ID = rtchm." + RepositoryToChangesetMapping.CHANGESET_ID)
.where("rtchm." + RepositoryToChangesetMapping.REPOSITORY_ID + " = ? and chm."+ChangesetMapping.SMARTCOMMIT_AVAILABLE+" = ? " , repositoryId, Boolean.TRUE)
.order(ChangesetMapping.DATE + " DESC");
}
@Override
public Set<String> findReferencedProjects(int repositoryId)
{
Query query = Query.select(IssueToChangesetMapping.PROJECT_KEY).distinct()
.alias(ProjectKey.class, "pk")
.alias(ChangesetMapping.class, "chm")
.alias(RepositoryToChangesetMapping.class, "rtchm")
.join(ChangesetMapping.class, "chm.ID = pk." + IssueToChangesetMapping.CHANGESET_ID)
.join(RepositoryToChangesetMapping.class, "chm.ID = rtchm." + RepositoryToChangesetMapping.CHANGESET_ID)
.where("rtchm." + RepositoryToChangesetMapping.REPOSITORY_ID + " = ?", repositoryId)
.order(IssueToChangesetMapping.PROJECT_KEY);
final Set<String> projectKeys = new HashSet<String>();
activeObjects.stream(ProjectKey.class, query, new EntityStreamCallback<ProjectKey, String>()
{
@Override
public void onRowRead(ProjectKey mapping)
{
projectKeys.add(mapping.getProjectKey());
}
});
return projectKeys;
}
@Table("IssueToChangeset")
static interface ProjectKey extends RawEntity<String>
{
@PrimaryKey(IssueToChangesetMapping.PROJECT_KEY)
String getProjectKey();
void setProjectKey();
}
@Override
public void markSmartcommitAvailability(int id, boolean available)
{
final ChangesetMapping changesetMapping = activeObjects.get(ChangesetMapping.class, id);
changesetMapping.setSmartcommitAvailable(available);
activeObjects.executeInTransaction(new TransactionCallback<Void>()
{
@Override
public Void doInTransaction()
{
changesetMapping.save();
return null;
}
});
}
} |
package com.atlassian.jira.plugins.dvcs.dao.impl;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.atlassian.jira.plugins.dvcs.dao.ChangesetDao;
import com.atlassian.jira.plugins.dvcs.util.ActiveObjectsUtils;
import net.java.ao.EntityStreamCallback;
import net.java.ao.Query;
import net.java.ao.RawEntity;
import net.java.ao.schema.PrimaryKey;
import net.java.ao.schema.Table;
import org.apache.commons.lang.ArrayUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.atlassian.activeobjects.external.ActiveObjects;
import com.atlassian.jira.plugins.dvcs.activeobjects.QueryHelper;
import com.atlassian.jira.plugins.dvcs.activeobjects.v3.ChangesetMapping;
import com.atlassian.jira.plugins.dvcs.activeobjects.v3.IssueToChangesetMapping;
import com.atlassian.jira.plugins.dvcs.activeobjects.v3.RepositoryToChangesetMapping;
import com.atlassian.jira.plugins.dvcs.dao.impl.transform.ChangesetTransformer;
import com.atlassian.jira.plugins.dvcs.model.Changeset;
import com.atlassian.jira.plugins.dvcs.model.ChangesetFile;
import com.atlassian.jira.plugins.dvcs.model.GlobalFilter;
import com.atlassian.jira.util.json.JSONArray;
import com.atlassian.jira.util.json.JSONException;
import com.atlassian.jira.util.json.JSONObject;
import com.atlassian.sal.api.transaction.TransactionCallback;
public class ChangesetDaoImpl implements ChangesetDao
{
private static final Logger log = LoggerFactory.getLogger(ChangesetDaoImpl.class);
private final ActiveObjects activeObjects;
private final ChangesetTransformer transformer = new ChangesetTransformer();
private QueryHelper queryHelper;
public ChangesetDaoImpl(ActiveObjects activeObjects, QueryHelper queryHelper)
{
this.activeObjects = activeObjects;
this.queryHelper = queryHelper;
}
private List<Changeset> transform(ChangesetMapping changesetMapping)
{
return transformer.transform(changesetMapping);
}
private List<Changeset> transform(List<ChangesetMapping> changesetMappings)
{
List<Changeset> changesets = new ArrayList<Changeset>();
for (ChangesetMapping changesetMapping : changesetMappings)
{
changesets.addAll(transform(changesetMapping));
}
return changesets;
}
@Override
public void removeAllInRepository(final int repositoryId)
{
activeObjects.executeInTransaction(new TransactionCallback<Object>()
{
@Override
public Object doInTransaction()
{
// todo: transaction: plugin use SalTransactionManager and there is empty implementation of TransactionSynchronisationManager.
// todo: Therefore there are only entityCache transactions. No DB transactions.
// delete association repo - changesets
Query query = Query.select().where(RepositoryToChangesetMapping.REPOSITORY_ID + " = ?", repositoryId);
log.debug("deleting repo - changesets associations from RepoToChangeset with id = [ {} ]", new String[]{String.valueOf(repositoryId)});
ActiveObjectsUtils.delete(activeObjects, RepositoryToChangesetMapping.class, query);
// delete association issues - changeset
query = Query.select().where(
IssueToChangesetMapping.CHANGESET_ID + " not in " +
"(select " + queryHelper.getSqlColumnName(RepositoryToChangesetMapping.CHANGESET_ID) + " from " + queryHelper.getSqlTableName(RepositoryToChangesetMapping.TABLE_NAME) + ")");
log.debug("deleting orphaned issue-changeset associations");
ActiveObjectsUtils.delete(activeObjects, IssueToChangesetMapping.class, query);
// delete orphaned changesets
query = Query.select().where(
"ID not in " +
"(select " + queryHelper.getSqlColumnName(RepositoryToChangesetMapping.CHANGESET_ID) + " from " + queryHelper.getSqlTableName(RepositoryToChangesetMapping.TABLE_NAME) + ")");
log.debug("deleting orphaned changesets");
ActiveObjectsUtils.delete(activeObjects, ChangesetMapping.class, query);
return null;
}
});
}
@Override
public Changeset create(final Changeset changeset, final Set<String> extractedIssues)
{
ChangesetMapping changesetMapping = activeObjects.executeInTransaction(new TransactionCallback<ChangesetMapping>()
{
@Override
public ChangesetMapping doInTransaction()
{
ChangesetMapping chm = getChangesetMapping(changeset);
if (chm == null)
{
chm = activeObjects.create(ChangesetMapping.class);
fillProperties(changeset, chm);
chm.save();
}
associateRepositoryToChangeset(chm, changeset.getRepositoryId());
if (extractedIssues != null)
{
associateIssuesToChangeset(chm, extractedIssues);
}
return chm;
}
});
changeset.setId(changesetMapping.getID());
return changeset;
}
@Override
public Changeset update(final Changeset changeset)
{
activeObjects.executeInTransaction(new TransactionCallback<ChangesetMapping>()
{
@Override
public ChangesetMapping doInTransaction()
{
ChangesetMapping chm = getChangesetMapping(changeset);
if (chm != null)
{
fillProperties(changeset, chm);
chm.save();
} else
{
log.warn("Changest with node {} is not exists.", changeset.getNode());
}
return chm;
}
});
return changeset;
}
private ChangesetMapping getChangesetMapping(Changeset changeset)
{
// A Query is little bit more complicated, but:
// 1. previous implementation did not properly fill RAW_NODE, in some cases it is null, in some other cases it is empty string
String hasRawNode = "( " + ChangesetMapping.RAW_NODE + " is not null AND " + ChangesetMapping.RAW_NODE + " != '') ";
// 2. Latest implementation is using full RAW_NODE, but not all records contains it!
String matchRawNode = ChangesetMapping.RAW_NODE + " = ? ";
// 3. Previous implementation has used NODE, but it is mix in some cases it is short version, in some cases it is full version
String matchNode = ChangesetMapping.NODE + " like ? ";
String shortNode = changeset.getNode().substring(0, 12) + "%";
ChangesetMapping[] mappings = activeObjects.find(ChangesetMapping.class, "(" + hasRawNode + " AND " + matchRawNode + " ) OR ( NOT "
+ hasRawNode + " AND " + matchNode + " ) ", changeset.getRawNode(), shortNode);
if (mappings.length > 1)
{
log.warn("More changesets with same Node. Same changesets count: {}, Node: {}, Repository: {}", new Object[] { mappings.length,
changeset.getNode(), changeset.getRepositoryId() });
}
return (ArrayUtils.isNotEmpty(mappings)) ? mappings[0] : null;
}
public void fillProperties(Changeset changeset, ChangesetMapping chm)
{
// we need to remove null characters '\u0000' because PostgreSQL cannot store String values with such
// characters
// todo: remove NULL Chars before call setters
chm.setNode(changeset.getNode());
chm.setRawAuthor(changeset.getRawAuthor());
chm.setAuthor(changeset.getAuthor());
chm.setDate(changeset.getDate());
chm.setRawNode(changeset.getRawNode());
chm.setBranch(changeset.getBranch());
chm.setMessage(changeset.getMessage());
chm.setAuthorEmail(changeset.getAuthorEmail());
chm.setSmartcommitAvailable(changeset.isSmartcommitAvaliable());
JSONArray parentsJson = new JSONArray();
for (String parent : changeset.getParents())
{
parentsJson.put(parent);
}
String parentsData = parentsJson.toString();
if (parentsData.length() > 255)
{
parentsData = ChangesetMapping.TOO_MANY_PARENTS;
}
chm.setParentsData(parentsData);
JSONObject filesDataJson = new JSONObject();
JSONArray filesJson = new JSONArray();
try
{
List<ChangesetFile> files = changeset.getFiles();
int count = changeset.getAllFileCount();
filesDataJson.put("count", count);
for (int i = 0; i < Math.min(count, Changeset.MAX_VISIBLE_FILES); i++)
{
ChangesetFile changesetFile = files.get(i);
JSONObject fileJson = new JSONObject();
fileJson.put("filename", changesetFile.getFile());
fileJson.put("status", changesetFile.getFileAction().getAction());
fileJson.put("additions", changesetFile.getAdditions());
fileJson.put("deletions", changesetFile.getDeletions());
filesJson.put(fileJson);
}
filesDataJson.put("files", filesJson);
chm.setFilesData(filesDataJson.toString());
} catch (JSONException e)
{
log.error("Creating files JSON failed!", e);
}
chm.setVersion(ChangesetMapping.LATEST_VERSION);
chm.save();
}
private Changeset filterByRepository(List<Changeset> changesets, int repositoryId)
{
for (Changeset changeset : changesets)
{
if (changeset.getRepositoryId() == repositoryId)
{
return changeset;
}
}
return null;
}
private void associateIssuesToChangeset(ChangesetMapping changesetMapping, Set<String> extractedIssues)
{
// remove all assoc issues-changeset
Query query = Query.select().where(IssueToChangesetMapping.CHANGESET_ID + " = ? ", changesetMapping);
ActiveObjectsUtils.delete(activeObjects, IssueToChangesetMapping.class, query);
// insert all
for (String extractedIssue : extractedIssues)
{
final Map<String, Object> map = new MapRemovingNullCharacterFromStringValues();
map.put(IssueToChangesetMapping.ISSUE_KEY, extractedIssue);
map.put(IssueToChangesetMapping.PROJECT_KEY, parseProjectKey(extractedIssue));
map.put(IssueToChangesetMapping.CHANGESET_ID, changesetMapping.getID());
activeObjects.create(IssueToChangesetMapping.class, map);
}
}
private void associateRepositoryToChangeset(ChangesetMapping changesetMapping, int repositoryId)
{
RepositoryToChangesetMapping[] mappings = activeObjects.find(RepositoryToChangesetMapping.class,
RepositoryToChangesetMapping.REPOSITORY_ID + " = ? and " +
RepositoryToChangesetMapping.CHANGESET_ID + " = ? ",
repositoryId,
changesetMapping);
if (ArrayUtils.isEmpty(mappings))
{
final Map<String, Object> map = new MapRemovingNullCharacterFromStringValues();
map.put(RepositoryToChangesetMapping.REPOSITORY_ID, repositoryId);
map.put(RepositoryToChangesetMapping.CHANGESET_ID, changesetMapping);
activeObjects.create(RepositoryToChangesetMapping.class, map);
}
}
public static String parseProjectKey(String issueKey)
{
return issueKey.substring(0, issueKey.indexOf("-"));
}
@Override
public Changeset getByNode(final int repositoryId, final String changesetNode)
{
final ChangesetMapping changesetMapping = activeObjects.executeInTransaction(new TransactionCallback<ChangesetMapping>()
{
@Override
public ChangesetMapping doInTransaction()
{
Query query = Query.select()
.alias(ChangesetMapping.class, "chm")
.alias(RepositoryToChangesetMapping.class, "rtchm")
.join(RepositoryToChangesetMapping.class, "chm.ID = rtchm." + RepositoryToChangesetMapping.CHANGESET_ID)
.where("chm." + ChangesetMapping.NODE + " = ? AND rtchm." + RepositoryToChangesetMapping.REPOSITORY_ID + " = ? ", changesetNode, repositoryId);
ChangesetMapping[] mappings = activeObjects.find(ChangesetMapping.class, query);
return mappings.length != 0 ? mappings[0] : null;
}
});
final List<Changeset> changesets = transform(changesetMapping);
return changesets != null ? filterByRepository(changesets, repositoryId) : null;
}
@Override
public List<Changeset> getByIssueKey(final Iterable<String> issueKeys)
{
final GlobalFilter gf = new GlobalFilter();
gf.setInIssues(issueKeys);
final String baseWhereClause = new GlobalFilterQueryWhereClauseBuilder(gf).build();
final List<ChangesetMapping> changesetMappings = activeObjects.executeInTransaction(new TransactionCallback<List<ChangesetMapping>>()
{
@Override
public List<ChangesetMapping> doInTransaction()
{
ChangesetMapping[] mappings = activeObjects.find(ChangesetMapping.class,
Query.select()
.alias(ChangesetMapping.class, "CHANGESET")
.alias(IssueToChangesetMapping.class, "ISSUE")
.join(IssueToChangesetMapping.class, "CHANGESET.ID = ISSUE." + IssueToChangesetMapping.CHANGESET_ID)
.where(baseWhereClause)
.order(ChangesetMapping.DATE));
return Arrays.asList(mappings);
}
});
return transform(changesetMappings);
}
@Override
public List<Changeset> getLatestChangesets(final int maxResults, final GlobalFilter gf)
{
if (maxResults <= 0)
{
return Collections.emptyList();
}
final List<ChangesetMapping> changesetMappings = activeObjects.executeInTransaction(new TransactionCallback<List<ChangesetMapping>>()
{
@Override
public List<ChangesetMapping> doInTransaction()
{
String baseWhereClause = new GlobalFilterQueryWhereClauseBuilder(gf).build();
Query query = Query.select()
.alias(ChangesetMapping.class, "CHANGESET")
.alias(IssueToChangesetMapping.class, "ISSUE")
.join(IssueToChangesetMapping.class, "CHANGESET.ID = ISSUE." + IssueToChangesetMapping.CHANGESET_ID)
.where(baseWhereClause).limit(maxResults).order(ChangesetMapping.DATE + " DESC");
ChangesetMapping[] mappings = activeObjects.find(ChangesetMapping.class, query);
return Arrays.asList(mappings);
}
});
return transform(changesetMappings);
}
@Override
public void forEachLatestChangesetsAvailableForSmartcommitDo(final int repositoryId, final ForEachChangesetClosure closure)
{
Query query = createLatestChangesetsAvailableForSmartcommitQuery(repositoryId);
activeObjects.stream(ChangesetMapping.class, query, new EntityStreamCallback<ChangesetMapping, Integer>()
{
@Override
public void onRowRead(ChangesetMapping mapping)
{
final List<Changeset> changesets = transform(mapping);
Changeset changeset = changesets != null ? filterByRepository(changesets, repositoryId) : null;
closure.execute(changeset);
}
});
}
private Query createLatestChangesetsAvailableForSmartcommitQuery(int repositoryId)
{
return Query.select("*")
.from(ChangesetMapping.class)
.alias(ChangesetMapping.class, "chm")
.alias(RepositoryToChangesetMapping.class, "rtchm")
.join(RepositoryToChangesetMapping.class, "chm.ID = rtchm." + RepositoryToChangesetMapping.CHANGESET_ID)
.where("rtchm." + RepositoryToChangesetMapping.REPOSITORY_ID + " = ? and chm."+ChangesetMapping.SMARTCOMMIT_AVAILABLE+" = ? " , repositoryId, Boolean.TRUE)
.order(ChangesetMapping.DATE + " DESC");
}
@Override
public Set<String> findReferencedProjects(int repositoryId)
{
Query query = Query.select(IssueToChangesetMapping.PROJECT_KEY).distinct()
.alias(ProjectKey.class, "pk")
.alias(ChangesetMapping.class, "chm")
.alias(RepositoryToChangesetMapping.class, "rtchm")
.join(ChangesetMapping.class, "chm.ID = pk." + IssueToChangesetMapping.CHANGESET_ID)
.join(RepositoryToChangesetMapping.class, "chm.ID = rtchm." + RepositoryToChangesetMapping.CHANGESET_ID)
.where("rtchm." + RepositoryToChangesetMapping.REPOSITORY_ID + " = ?", repositoryId)
.order(IssueToChangesetMapping.PROJECT_KEY);
final Set<String> projectKeys = new HashSet<String>();
activeObjects.stream(ProjectKey.class, query, new EntityStreamCallback<ProjectKey, String>()
{
@Override
public void onRowRead(ProjectKey mapping)
{
projectKeys.add(mapping.getProjectKey());
}
});
return projectKeys;
}
@Table("IssueToChangeset")
static interface ProjectKey extends RawEntity<String>
{
@PrimaryKey(IssueToChangesetMapping.PROJECT_KEY)
String getProjectKey();
void setProjectKey();
}
@Override
public void markSmartcommitAvailability(int id, boolean available)
{
final ChangesetMapping changesetMapping = activeObjects.get(ChangesetMapping.class, id);
changesetMapping.setSmartcommitAvailable(available);
activeObjects.executeInTransaction(new TransactionCallback<Void>()
{
@Override
public Void doInTransaction()
{
changesetMapping.save();
return null;
}
});
}
@Override
public int getChangesetCount(final int repositoryId)
{
return activeObjects.executeInTransaction(new TransactionCallback<Integer>()
{
@Override
public Integer doInTransaction()
{
Query query = Query.select().where(RepositoryToChangesetMapping.REPOSITORY_ID + " = ?", repositoryId);
return activeObjects.count(RepositoryToChangesetMapping.class, query);
}
});
}
} |
package dml.runtime.matrix.io;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.TreeMap;
import java.util.Map.Entry;
import dml.runtime.functionobjects.Builtin;
import dml.runtime.functionobjects.Multiply;
import dml.runtime.functionobjects.Plus;
import dml.runtime.functionobjects.SwapIndex;
import dml.runtime.instructions.CPInstructions.KahanObject;
import dml.runtime.instructions.MRInstructions.SelectInstruction.IndexRange;
import dml.runtime.matrix.operators.AggregateBinaryOperator;
import dml.runtime.matrix.operators.AggregateOperator;
import dml.runtime.matrix.operators.AggregateUnaryOperator;
import dml.runtime.matrix.operators.BinaryOperator;
import dml.runtime.matrix.operators.LeftScalarOperator;
import dml.runtime.matrix.operators.Operator;
import dml.runtime.matrix.operators.ReorgOperator;
import dml.runtime.matrix.operators.ScalarOperator;
import dml.runtime.matrix.operators.UnaryOperator;
import dml.runtime.util.UtilFunctions;
import dml.utils.DMLRuntimeException;
import dml.utils.DMLUnsupportedOperationException;
public class MatrixBlockDSM extends MatrixValue{
//protected static final Log LOG = LogFactory.getLog(MatrixBlock1D.class);
private int rlen;
private int clen;
private int maxrow, maxcolumn;
private boolean sparse;
private double[] denseBlock=null;
private int nonZeros=0;
public static final double SPARCITY_TURN_POINT=0.4;
private SparseRow[] sparseRows=null;
public static boolean checkSparcityOnAggBinary(MatrixBlockDSM m1, MatrixBlockDSM m2)
{
double n=m1.getNumRows();
double k=m1.getNumColumns();
double m=m2.getNumColumns();
double nz1=m1.getNonZeros();
double nz2=m2.getNonZeros();
double pq=nz1*nz2/n/k/k/m;
// double estimated= 1-Math.pow(1-pq, k);
return ( 1-Math.pow(1-pq, k) < SPARCITY_TURN_POINT );
}
private static boolean checkSparcityOnBinary(MatrixBlockDSM m1, MatrixBlockDSM m2)
{
double n=m1.getNumRows();
double m=m1.getNumColumns();
double nz1=m1.getNonZeros();
double nz2=m2.getNonZeros();
//1-(1-p)*(1-q)
// double estimated=1- (1-nz1/n/m)*(1-nz2/n/m);
return ( 1- (1-nz1/n/m)*(1-nz2/n/m) < SPARCITY_TURN_POINT);
}
private static boolean checkRealSparcity(MatrixBlockDSM m)
{
return ( (double)m.getNonZeros()/(double)m.getNumRows()/(double)m.getNumColumns() < SPARCITY_TURN_POINT);
}
public MatrixBlockDSM()
{
rlen=0;
clen=0;
sparse=true;
nonZeros=0;
maxrow = maxcolumn = 0;
}
public MatrixBlockDSM(int rl, int cl, boolean sp)
{
rlen=rl;
clen=cl;
sparse=sp;
nonZeros=0;
maxrow = maxcolumn = 0;
}
public MatrixBlockDSM(MatrixBlockDSM that)
{
this.copy(that);
}
public int getNumRows()
{
return rlen;
}
public int getNumColumns()
{
return clen;
}
// Return the maximum row encountered WITHIN the current block
public int getMaxRow() {
if (!sparse)
return getNumRows();
else {
return maxrow;
}
}
// Return the maximum column encountered WITHIN the current block
public int getMaxColumn() {
if (!sparse)
return getNumColumns();
else {
return maxcolumn;
}
}
public void setMaxRow(int _r) {
maxrow = _r;
}
public void setMaxColumn(int _c) {
maxcolumn = _c;
}
// NOTE: setNumRows() and setNumColumns() are used only in tertiaryInstruction (for contingency tables)
public void setNumRows(int _r) {
rlen = _r;
}
public void setNumColumns(int _c) {
clen = _c;
}
public void print()
{
System.out.println("spathanks" +
"rse? = "+sparse);
if(!sparse)
System.out.println("nonzeros = "+nonZeros);
for(int i=0; i<rlen; i++)
{
for(int j=0; j<clen; j++)
{
System.out.print(getValue(i, j)+"\t");
}
System.out.println();
}
}
public boolean isInSparseFormat()
{
return sparse;
}
private void resetSparse()
{
if(sparseRows!=null)
{
for(int i=0; i<sparseRows.length; i++)
if(sparseRows[i]!=null)
sparseRows[i].reset();
}
}
public void reset()
{
if(sparse)
{
resetSparse();
}
else
{
if(denseBlock!=null)
{
if(denseBlock.length<rlen*clen)
denseBlock=null;
else
Arrays.fill(denseBlock, 0, rlen*clen, 0);
}
}
nonZeros=0;
}
public void reset(int rl, int cl) {
rlen=rl;
clen=cl;
nonZeros=0;
reset();
}
public void reset(int rl, int cl, boolean sp)
{
sparse=sp;
reset(rl, cl);
}
public void resetDenseWithValue(int rl, int cl, double v) {
rlen=rl;
clen=cl;
sparse=false;
if(v==0)
{
reset();
return;
}
int limit=rlen*clen;
if(denseBlock==null || denseBlock.length<limit)
denseBlock=new double[limit];
Arrays.fill(denseBlock, 0, limit, v);
nonZeros=limit;
}
public void examSparsity()
{
if(sparse)
{
if(nonZeros>rlen*clen*SPARCITY_TURN_POINT)
sparseToDense();
}else
{
if(nonZeros<rlen*clen*SPARCITY_TURN_POINT)
denseToSparse();
}
}
private void copySparseToSparse(MatrixBlockDSM that)
{
this.nonZeros=that.nonZeros;
if(that.sparseRows==null)
{
resetSparse();
return;
}
adjustSparseRows(that.rlen);
for(int i=0; i<that.sparseRows.length; i++)
{
if(that.sparseRows[i]!=null)
{
if(sparseRows[i]==null)
sparseRows[i]=new SparseRow(that.sparseRows[i]);
else
sparseRows[i].copy(that.sparseRows[i]);
}else if(this.sparseRows[i]!=null)
this.sparseRows[i].reset();
}
}
private void copyDenseToDense(MatrixBlockDSM that)
{
this.nonZeros=that.nonZeros;
if(that.denseBlock==null)
{
if(denseBlock!=null)
Arrays.fill(denseBlock, 0);
return;
}
int limit=rlen*clen;
if(denseBlock==null || denseBlock.length<limit)
denseBlock=new double[limit];
System.arraycopy(that.denseBlock, 0, this.denseBlock, 0, limit);
}
private void copySparseToDense(MatrixBlockDSM that)
{
this.nonZeros=that.nonZeros;
if(that.sparseRows==null)
{
if(denseBlock!=null)
Arrays.fill(denseBlock, 0);
return;
}
int limit=rlen*clen;
if(denseBlock==null || denseBlock.length<limit)
denseBlock=new double[limit];
else
Arrays.fill(denseBlock, 0, limit, 0);
int start=0;
for(int r=0; r<that.sparseRows.length; r++, start+=clen)
{
if(that.sparseRows[r]==null) continue;
double[] values=that.sparseRows[r].getValueContainer();
int[] cols=that.sparseRows[r].getIndexContainer();
for(int i=0; i<that.sparseRows[r].size(); i++)
{
denseBlock[start+cols[i]]=values[i];
}
}
}
private void copyDenseToSparse(MatrixBlockDSM that)
{
this.nonZeros=that.nonZeros;
if(that.denseBlock==null)
{
resetSparse();
return;
}
adjustSparseRows(rlen-1);
int n=0;
for(int r=0; r<rlen; r++)
{
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow();
else
sparseRows[r].reset();
for(int c=0; c<clen; c++)
{
if(that.denseBlock[n]!=0)
sparseRows[r].append(c, that.denseBlock[n]);
n++;
}
}
}
public void copy(MatrixValue thatValue)
{
MatrixBlockDSM that;
try {
that = checkType(thatValue);
} catch (DMLUnsupportedOperationException e) {
throw new RuntimeException(e);
}
this.rlen=that.rlen;
this.clen=that.clen;
this.sparse=checkRealSparcity(that);
if(this.sparse && that.sparse)
copySparseToSparse(that);
else if(this.sparse && !that.sparse)
copyDenseToSparse(that);
else if(!this.sparse && that.sparse)
copySparseToDense(that);
else
copyDenseToDense(that);
}
public double[] getDenseArray()
{
if(sparse)
return null;
return denseBlock;
}
//TODO: this function is used in many places, but may not be the right api to expose sparse cells.
public HashMap<CellIndex, Double> getSparseMap()
{
if(!sparse || sparseRows==null)
return null;
HashMap<CellIndex, Double> map=new HashMap<CellIndex, Double>(nonZeros);
for(int r=0; r<sparseRows.length; r++)
{
if(sparseRows[r]==null) continue;
double[] values=sparseRows[r].getValueContainer();
int[] cols=sparseRows[r].getIndexContainer();
for(int i=0; i<sparseRows[r].size(); i++)
map.put(new CellIndex(r, cols[i]), values[i]);
}
return map;
}
public int getNonZeros()
{
return nonZeros;
}
//only apply to non zero cells
public void sparseScalarOperationsInPlace(ScalarOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
if(sparse)
{
if(sparseRows==null)
return;
nonZeros=0;
for(int r=0; r<sparseRows.length; r++)
{
if(sparseRows[r]==null) continue;
double[] values=sparseRows[r].getValueContainer();
int[] cols=sparseRows[r].getIndexContainer();
int pos=0;
for(int i=0; i<sparseRows[r].size(); i++)
{
double v=op.executeScalar(values[i]);
if(v!=0)
{
values[pos]=v;
cols[pos]=cols[i];
pos++;
nonZeros++;
}
}
sparseRows[r].truncate(pos);
}
}else
{
if(denseBlock==null)
return;
int limit=rlen*clen;
nonZeros=0;
for(int i=0; i<limit; i++)
{
denseBlock[i]=op.executeScalar(denseBlock[i]);
if(denseBlock[i]!=0)
nonZeros++;
}
}
}
//only apply to non zero cells
public void sparseUnaryOperationsInPlace(UnaryOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
if(sparse)
{
if(sparseRows==null)
return;
nonZeros=0;
for(int r=0; r<sparseRows.length; r++)
{
if(sparseRows[r]==null) continue;
double[] values=sparseRows[r].getValueContainer();
int[] cols=sparseRows[r].getIndexContainer();
int pos=0;
for(int i=0; i<sparseRows[r].size(); i++)
{
double v=op.fn.execute(values[i]);
if(v!=0)
{
values[pos]=v;
cols[pos]=cols[i];
pos++;
nonZeros++;
}
}
sparseRows[r].truncate(pos);
}
}else
{
if(denseBlock==null)
return;
int limit=rlen*clen;
nonZeros=0;
for(int i=0; i<limit; i++)
{
denseBlock[i]=op.fn.execute(denseBlock[i]);
if(denseBlock[i]!=0)
nonZeros++;
}
}
}
public void denseScalarOperationsInPlace(ScalarOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
double v;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
v=op.executeScalar(getValue(r, c));
setValue(r, c, v);
}
}
public void denseUnaryOperationsInPlace(UnaryOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
double v;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
v=op.fn.execute(getValue(r, c));
setValue(r, c, v);
}
}
private static MatrixBlockDSM checkType(MatrixValue block) throws DMLUnsupportedOperationException
{
if( block!=null && !(block instanceof MatrixBlockDSM))
throw new DMLUnsupportedOperationException("the Matrix Value is not MatrixBlockDSM!");
return (MatrixBlockDSM) block;
}
public MatrixValue scalarOperations(ScalarOperator op, MatrixValue result)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
checkType(result);
if(result==null)
result=new MatrixBlockDSM(rlen, clen, sparse);
result.copy(this);
if(op.sparseSafe)
((MatrixBlockDSM)result).sparseScalarOperationsInPlace(op);
else
((MatrixBlockDSM)result).denseScalarOperationsInPlace(op);
// System.out.println(result);
return result;
}
public void scalarOperationsInPlace(ScalarOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
if(op.sparseSafe)
this.sparseScalarOperationsInPlace(op);
else
this.denseScalarOperationsInPlace(op);
}
public MatrixValue unaryOperations(UnaryOperator op, MatrixValue result)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
checkType(result);
if(result==null)
result=new MatrixBlockDSM(rlen, clen, sparse);
result.copy(this);
if(op.sparseSafe)
((MatrixBlockDSM)result).sparseUnaryOperationsInPlace(op);
else
((MatrixBlockDSM)result).denseUnaryOperationsInPlace(op);
return result;
}
public void unaryOperationsInPlace(UnaryOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
if(op.sparseSafe)
this.sparseUnaryOperationsInPlace(op);
else
this.denseUnaryOperationsInPlace(op);
}
private MatrixBlockDSM denseBinaryHelp(BinaryOperator op, MatrixBlockDSM that, MatrixBlockDSM result)
throws DMLRuntimeException
{
boolean resultSparse=checkSparcityOnBinary(this, that);
if(result==null)
result=new MatrixBlockDSM(rlen, clen, resultSparse);
else
result.reset(rlen, clen, resultSparse);
//double st = System.nanoTime();
double v;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
v=op.fn.execute(this.getValue(r, c), that.getValue(r, c));
result.appendValue(r, c, v);
}
//double en = System.nanoTime();
//System.out.println("denseBinaryHelp()-new: " + (en-st)/Math.pow(10, 9) + " sec.");
return result;
}
/*
* like a merge sort
*/
private static void mergeForSparseBinary(BinaryOperator op, double[] values1, int[] cols1, int size1,
double[] values2, int[] cols2, int size2, int resultRow, MatrixBlockDSM result)
throws DMLRuntimeException
{
int p1=0, p2=0, column;
double v;
//merge
while(p1<size1 && p2< size2)
{
if(cols1[p1]<cols2[p2])
{
v=op.fn.execute(values1[p1], 0);
column=cols1[p1];
p1++;
}else if(cols1[p1]==cols2[p2])
{
v=op.fn.execute(values1[p1], values2[p2]);
column=cols1[p1];
p1++;
p2++;
}else
{
v=op.fn.execute(0, values2[p2]);
column=cols2[p2];
p2++;
}
result.appendValue(resultRow, column, v);
}
//add left over
appendLeftForSparseBinary(op, values1, cols1, size1, p1, resultRow, result);
appendRightForSparseBinary(op, values2, cols2, size2, p2, resultRow, result);
}
private static void appendLeftForSparseBinary(BinaryOperator op, double[] values1, int[] cols1, int size1,
int startPosition, int resultRow, MatrixBlockDSM result)
throws DMLRuntimeException
{
int column;
double v;
int p1=startPosition;
//take care of left over
while(p1<size1)
{
v=op.fn.execute(values1[p1], 0);
column=cols1[p1];
p1++;
result.appendValue(resultRow, column, v);
}
}
private static void appendRightForSparseBinary(BinaryOperator op, double[] values2, int[] cols2, int size2,
int startPosition, int resultRow, MatrixBlockDSM result) throws DMLRuntimeException
{
int column;
double v;
int p2=startPosition;
while(p2<size2)
{
v=op.fn.execute(0, values2[p2]);
column=cols2[p2];
p2++;
result.appendValue(resultRow, column, v);
}
}
private MatrixBlockDSM sparseBinaryHelp(BinaryOperator op, MatrixBlockDSM that, MatrixBlockDSM result)
throws DMLRuntimeException
{
boolean resultSparse=checkSparcityOnBinary(this, that);
if(result==null)
result=new MatrixBlockDSM(rlen, clen, resultSparse);
else
result.reset(rlen, clen, resultSparse);
if(this.sparse && that.sparse)
{
//special case, if both matrices are all 0s, just return
if(this.sparseRows==null && that.sparseRows==null)
return result;
if(result.sparse)
result.adjustSparseRows(result.rlen-1);
if(this.sparseRows!=null)
this.adjustSparseRows(rlen-1);
if(that.sparseRows!=null)
that.adjustSparseRows(that.rlen-1);
if(this.sparseRows!=null && that.sparseRows!=null)
{
for(int r=0; r<rlen; r++)
{
if(this.sparseRows[r]==null && that.sparseRows[r]==null)
continue;
if(result.sparse)
{
int estimateSize=0;
if(this.sparseRows[r]!=null)
estimateSize+=this.sparseRows[r].size();
if(that.sparseRows[r]!=null)
estimateSize+=that.sparseRows[r].size();
estimateSize=Math.min(clen, estimateSize);
if(result.sparseRows[r]==null)
result.sparseRows[r]=new SparseRow(estimateSize);
else if(result.sparseRows[r].capacity()<estimateSize)
result.sparseRows[r].recap(estimateSize);
}
if(this.sparseRows[r]!=null && that.sparseRows[r]!=null)
{
mergeForSparseBinary(op, this.sparseRows[r].getValueContainer(),
this.sparseRows[r].getIndexContainer(), this.sparseRows[r].size(),
that.sparseRows[r].getValueContainer(),
that.sparseRows[r].getIndexContainer(), that.sparseRows[r].size(), r, result);
}else if(this.sparseRows[r]==null)
{
appendRightForSparseBinary(op, that.sparseRows[r].getValueContainer(),
that.sparseRows[r].getIndexContainer(), that.sparseRows[r].size(), 0, r, result);
}else
{
appendLeftForSparseBinary(op, this.sparseRows[r].getValueContainer(),
this.sparseRows[r].getIndexContainer(), this.sparseRows[r].size(), 0, r, result);
}
}
}else if(this.sparseRows==null)
{
for(int r=0; r<rlen; r++)
{
if(that.sparseRows[r]==null)
continue;
if(result.sparse)
{
if(result.sparseRows[r]==null)
result.sparseRows[r]=new SparseRow(that.sparseRows[r].size());
else if(result.sparseRows[r].capacity()<that.sparseRows[r].size())
result.sparseRows[r].recap(that.sparseRows[r].size());
}
appendRightForSparseBinary(op, that.sparseRows[r].getValueContainer(),
that.sparseRows[r].getIndexContainer(), that.sparseRows[r].size(), 0, r, result);
}
}else
{
for(int r=0; r<rlen; r++)
{
if(this.sparseRows[r]==null)
continue;
if(result.sparse)
{
if(result.sparseRows[r]==null)
result.sparseRows[r]=new SparseRow(this.sparseRows[r].size());
else if(result.sparseRows[r].capacity()<that.sparseRows[r].size())
result.sparseRows[r].recap(this.sparseRows[r].size());
}
appendLeftForSparseBinary(op, this.sparseRows[r].getValueContainer(),
this.sparseRows[r].getIndexContainer(), this.sparseRows[r].size(), 0, r, result);
}
}
}
else
{
double thisvalue, thatvalue, resultvalue;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
thisvalue=this.getValue(r, c);
thatvalue=that.getValue(r, c);
if(thisvalue==0 && thatvalue==0)
continue;
resultvalue=op.fn.execute(thisvalue, thatvalue);
result.appendValue(r, c, resultvalue);
}
}
// System.out.println("-- input 1: \n"+this.toString());
// System.out.println("-- input 2: \n"+that.toString());
// System.out.println("~~ output: \n"+result);
return result;
}
public MatrixValue binaryOperations(BinaryOperator op, MatrixValue thatValue, MatrixValue result)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlockDSM that=checkType(thatValue);
checkType(result);
if(this.rlen!=that.rlen || this.clen!=that.clen)
throw new RuntimeException("block sizes are not matched for binary " +
"cell operations: "+this.rlen+"*"+this.clen+" vs "+ that.rlen+"*"
+that.clen);
if(op.sparseSafe)
return sparseBinaryHelp(op, that, (MatrixBlockDSM)result);
else
return denseBinaryHelp(op, that, (MatrixBlockDSM)result);
}
public void incrementalAggregate(AggregateOperator aggOp, MatrixValue correction,
MatrixValue newWithCorrection)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
assert(aggOp.correctionExists);
MatrixBlockDSM cor=checkType(correction);
MatrixBlockDSM newWithCor=checkType(newWithCorrection);
KahanObject buffer=new KahanObject(0, 0);
if(aggOp.correctionLocation==1)
{
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.getValue(r, c);
buffer._correction=cor.getValue(0, c);
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.getValue(r, c),
newWithCor.getValue(r+1, c));
setValue(r, c, buffer._sum);
cor.setValue(0, c, buffer._correction);
}
}else if(aggOp.correctionLocation==2)
{
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.getValue(r, c);
buffer._correction=cor.getValue(r, 0);;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.getValue(r, c),
newWithCor.getValue(r, c+1));
setValue(r, c, buffer._sum);
cor.setValue(r, 0, buffer._correction);
}
}else if(aggOp.correctionLocation==0)
{
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.getValue(r, c);
buffer._correction=cor.getValue(r, c);
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.getValue(r, c));
setValue(r, c, buffer._sum);
cor.setValue(r, c, buffer._correction);
}
}else if(aggOp.correctionLocation==3)
{
double n, n2, mu2;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.getValue(r, c);
n=cor.getValue(0, c);
buffer._correction=cor.getValue(1, c);
mu2=newWithCor.getValue(r, c);
n2=newWithCor.getValue(r+1, c);
n=n+n2;
double toadd=(mu2-buffer._sum)*n2/n;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, toadd);
setValue(r, c, buffer._sum);
cor.setValue(0, c, n);
cor.setValue(1, c, buffer._correction);
}
}else if(aggOp.correctionLocation==4)
{
double n, n2, mu2;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.getValue(r, c);
n=cor.getValue(r, 0);
buffer._correction=cor.getValue(r, 1);
mu2=newWithCor.getValue(r, c);
n2=newWithCor.getValue(r, c+1);
n=n+n2;
double toadd=(mu2-buffer._sum)*n2/n;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, toadd);
setValue(r, c, buffer._sum);
cor.setValue(r, 0, n);
cor.setValue(r, 1, buffer._correction);
}
}
else
throw new DMLRuntimeException("unrecognized correctionLocation: "+aggOp.correctionLocation);
}
public void incrementalAggregate(AggregateOperator aggOp, MatrixValue newWithCorrection)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
assert(aggOp.correctionExists);
MatrixBlockDSM newWithCor=checkType(newWithCorrection);
KahanObject buffer=new KahanObject(0, 0);
if(aggOp.correctionLocation==1)
{
for(int r=0; r<rlen-1; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.getValue(r, c);
buffer._correction=this.getValue(r+1, c);
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.getValue(r, c),
newWithCor.getValue(r+1, c));
setValue(r, c, buffer._sum);
setValue(r+1, c, buffer._correction);
}
}else if(aggOp.correctionLocation==2)
{
for(int r=0; r<rlen; r++)
for(int c=0; c<clen-1; c++)
{
buffer._sum=this.getValue(r, c);
buffer._correction=this.getValue(r, c+1);
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.getValue(r, c),
newWithCor.getValue(r, c+1));
setValue(r, c, buffer._sum);
setValue(r, c+1, buffer._correction);
}
}/*else if(aggOp.correctionLocation==0)
{
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
//buffer._sum=this.getValue(r, c);
//buffer._correction=0;
//buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newWithCor.getValue(r, c));
setValue(r, c, this.getValue(r, c)+newWithCor.getValue(r, c));
}
}*/else if(aggOp.correctionLocation==3)
{
double n, n2, mu2;
for(int r=0; r<rlen-2; r++)
for(int c=0; c<clen; c++)
{
buffer._sum=this.getValue(r, c);
n=this.getValue(r+1, c);
buffer._correction=this.getValue(r+2, c);
mu2=newWithCor.getValue(r, c);
n2=newWithCor.getValue(r+1, c);
n=n+n2;
double toadd=(mu2-buffer._sum)*n2/n;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, toadd);
setValue(r, c, buffer._sum);
setValue(r+1, c, n);
setValue(r+2, c, buffer._correction);
}
}else if(aggOp.correctionLocation==4)
{
double n, n2, mu2;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen-2; c++)
{
buffer._sum=this.getValue(r, c);
n=this.getValue(r, c+1);
buffer._correction=this.getValue(r, c+2);
mu2=newWithCor.getValue(r, c);
n2=newWithCor.getValue(r, c+1);
n=n+n2;
double toadd=(mu2-buffer._sum)*n2/n;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, toadd);
setValue(r, c, buffer._sum);
setValue(r, c+1, n);
setValue(r, c+2, buffer._correction);
}
}
else
throw new DMLRuntimeException("unrecognized correctionLocation: "+aggOp.correctionLocation);
}
//allocate space if sparseRows[r] doesnot exist
private void adjustSparseRows(int r)
{
if(sparseRows==null)
sparseRows=new SparseRow[rlen];
else if(sparseRows.length<=r)
{
SparseRow[] oldSparseRows=sparseRows;
sparseRows=new SparseRow[rlen];
for(int i=0; i<Math.min(oldSparseRows.length, rlen); i++)
sparseRows[i]=oldSparseRows[i];
}
// if(sparseRows[r]==null)
// sparseRows[r]=new SparseRow();
}
@Override
/*
* If (r,c) \in Block, add v to existing cell
* If not, add a new cell with index (r,c)
*/
public void addValue(int r, int c, double v) {
if(sparse)
{
adjustSparseRows(r);
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow();
double curV=sparseRows[r].get(c);
if(curV==0)
nonZeros++;
curV+=v;
if(curV==0)
nonZeros
else
sparseRows[r].set(c, curV);
}else
{
int limit=rlen*clen;
if(denseBlock==null)
{
denseBlock=new double[limit];
Arrays.fill(denseBlock, 0, limit, 0);
}
int index=r*clen+c;
if(denseBlock[index]==0)
nonZeros++;
denseBlock[index]+=v;
if(denseBlock[index]==0)
nonZeros
}
}
@Override
public void setValue(int r, int c, double v) {
if(r>rlen || c > clen)
throw new RuntimeException("indexes ("+r+","+c+") out of range ("+rlen+","+clen+")");
if(sparse)
{
if( (sparseRows==null || sparseRows.length<=r || sparseRows[r]==null) && v==0.0)
return;
adjustSparseRows(r);
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow();
if(sparseRows[r].set(c, v))
nonZeros++;
}else
{
if(denseBlock==null && v==0.0)
return;
int limit=rlen*clen;
if(denseBlock==null)
{
denseBlock=new double[limit];
Arrays.fill(denseBlock, 0, limit, 0);
}
int index=r*clen+c;
if(denseBlock[index]==0)
nonZeros++;
denseBlock[index]=v;
if(v==0)
nonZeros
}
}
/*
* append value is only used when values are appended at the end of each row for the sparse representation
* This can only be called, when the caller knows the access pattern of the block
*/
public void appendValue(int r, int c, double v)
{
if(v==0) return;
if(!sparse)
setValue(r, c, v);
else
{
adjustSparseRows(r);
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow();
sparseRows[r].append(c, v);
nonZeros++;
}
}
@Override
public void setValue(CellIndex index, double v) {
setValue(index.row, index.column, v);
}
@Override
public double getValue(int r, int c) {
if(r>rlen || c > clen)
throw new RuntimeException("indexes ("+r+","+c+") out of range ("+rlen+","+clen+")");
if(sparse)
{
if(sparseRows==null || sparseRows.length<=r || sparseRows[r]==null)
return 0;
Double d=sparseRows[r].get(c);
if(d!=null)
return d;
else
return 0;
}else
{
if(denseBlock==null)
return 0;
return denseBlock[r*clen+c];
}
}
@Override
public void getCellValues(Collection<Double> ret) {
int limit=rlen*clen;
if(sparse)
{
if(sparseRows==null)
{
for(int i=0; i<limit; i++)
ret.add(0.0);
}else
{
for(int r=0; r<sparseRows.length; r++)
{
if(sparseRows[r]==null) continue;
double[] container=sparseRows[r].getValueContainer();
for(int j=0; j<sparseRows[r].size(); j++)
ret.add(container[j]);
}
int zeros=limit-ret.size();
for(int i=0; i<zeros; i++)
ret.add(0.0);
}
}else
{
if(denseBlock==null)
{
for(int i=0; i<limit; i++)
ret.add(0.0);
}else
{
for(int i=0; i<limit; i++)
ret.add(denseBlock[i]);
}
}
}
@Override
public void getCellValues(Map<Double, Integer> ret) {
int limit=rlen*clen;
if(sparse)
{
if(sparseRows==null)
{
ret.put(0.0, limit);
}else
{
for(int r=0; r<sparseRows.length; r++)
{
if(sparseRows[r]==null) continue;
double[] container=sparseRows[r].getValueContainer();
for(int j=0; j<sparseRows[r].size(); j++)
{
Double v=container[j];
Integer old=ret.get(v);
if(old!=null)
ret.put(v, old+1);
else
ret.put(v, 1);
}
}
int zeros=limit-ret.size();
Integer old=ret.get(0.0);
if(old!=null)
ret.put(0.0, old+zeros);
else
ret.put(0.0, zeros);
}
}else
{
if(denseBlock==null)
{
ret.put(0.0, limit);
}else
{
for(int i=0; i<limit; i++)
{
double v=denseBlock[i];
Integer old=ret.get(v);
if(old!=null)
ret.put(v, old+1);
else
ret.put(v, 1);
}
}
}
}
@Override
public void readFields(DataInput in) throws IOException {
rlen=in.readInt();
clen=in.readInt();
sparse=in.readBoolean();
if(sparse)
readSparseBlock(in);
else
readDenseBlock(in);
}
private void readDenseBlock(DataInput in) throws IOException {
int limit=rlen*clen;
if(denseBlock==null || denseBlock.length < limit )
denseBlock=new double[limit];
nonZeros=0;
for(int i=0; i<limit; i++)
{
denseBlock[i]=in.readDouble();
if(denseBlock[i]!=0)
nonZeros++;
}
}
private void readSparseBlock(DataInput in) throws IOException {
this.adjustSparseRows(rlen-1);
nonZeros=0;
for(int r=0; r<rlen; r++)
{
int nr=in.readInt();
nonZeros+=nr;
if(nr==0)
{
if(sparseRows[r]!=null)
sparseRows[r].reset();
continue;
}
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow(nr);
else
sparseRows[r].reset();
for(int j=0; j<nr; j++)
sparseRows[r].append(in.readInt(), in.readDouble());
}
}
@Override
public void write(DataOutput out) throws IOException {
out.writeInt(rlen);
out.writeInt(clen);
if(sparse)
{
if(sparseRows==null)
writeEmptyBlock(out);
//if it should be dense, then write to the dense format
else if(nonZeros>rlen*clen*SPARCITY_TURN_POINT)
writeSparseToDense(out);
else
writeSparseBlock(out);
}else
{
if(denseBlock==null)
writeEmptyBlock(out);
//if it should be sparse
else if(nonZeros<rlen*clen*SPARCITY_TURN_POINT)
writeDenseToSparse(out);
else
writeDenseBlock(out);
}
}
private void writeEmptyBlock(DataOutput out) throws IOException
{
out.writeBoolean(true);
for(int r=0; r<rlen; r++)
out.writeInt(0);
}
private void writeDenseBlock(DataOutput out) throws IOException {
out.writeBoolean(sparse);
int limit=rlen*clen;
for(int i=0; i<limit; i++)
out.writeDouble(denseBlock[i]);
}
private void writeSparseBlock(DataOutput out) throws IOException {
out.writeBoolean(sparse);
int r=0;
for(;r<sparseRows.length; r++)
{
if(sparseRows[r]==null)
out.writeInt(0);
else
{
int nr=sparseRows[r].size();
out.writeInt(nr);
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
for(int j=0; j<nr; j++)
{
out.writeInt(cols[j]);
out.writeDouble(values[j]);
}
}
}
for(;r<rlen; r++)
out.writeInt(0);
}
private void writeSparseToDense(DataOutput out) throws IOException {
out.writeBoolean(false);
for(int i=0; i<rlen; i++)
for(int j=0; j<clen; j++)
out.writeDouble(getValue(i, j));
}
private void writeDenseToSparse(DataOutput out) throws IOException {
if(denseBlock==null)
{
writeEmptyBlock(out);
return;
}
out.writeBoolean(true);
int start=0;
for(int r=0; r<rlen; r++)
{
//count nonzeros
int nr=0;
for(int i=start; i<start+clen; i++)
if(denseBlock[i]!=0.0)
nr++;
out.writeInt(nr);
for(int c=0; c<clen; c++)
{
if(denseBlock[start]!=0.0)
{
out.writeInt(c);
out.writeDouble(denseBlock[start]);
}
start++;
}
}
// if(num!=nonZeros)
// throw new IOException("nonZeros = "+nonZeros+", but should be "+num);
}
@Override
public int compareTo(Object arg0) {
// don't compare blocks
return 0;
}
@Override
public MatrixValue reorgOperations(ReorgOperator op, MatrixValue result,
int startRow, int startColumn, int length)
throws DMLUnsupportedOperationException, DMLRuntimeException {
checkType(result);
boolean reducedDim=op.fn.computeDimension(rlen, clen, tempCellIndex);
boolean sps;
if(reducedDim)
sps=false;
else
sps=checkRealSparcity(this);
if(result==null)
result=new MatrixBlockDSM(tempCellIndex.row, tempCellIndex.column, sps);
else
result.reset(tempCellIndex.row, tempCellIndex.column, sps);
CellIndex temp = new CellIndex(0, 0);
if(sparse)
{
if(sparseRows!=null)
{
for(int r=0; r<sparseRows.length; r++)
{
if(sparseRows[r]==null) continue;
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
for(int i=0; i<sparseRows[r].size(); i++)
{
tempCellIndex.set(r, cols[i]);
op.fn.execute(tempCellIndex, temp);
result.setValue(temp.row, temp.column, values[i]);
}
}
}
}else
{
if(denseBlock!=null)
{
int limit=rlen*clen;
int r,c;
for(int i=0; i<limit; i++)
{
r=i/clen;
c=i%clen;
temp.set(r, c);
op.fn.execute(temp, temp);
result.setValue(temp.row, temp.column, denseBlock[i]);
}
}
}
return result;
}
@Override
public MatrixValue selectOperations(MatrixValue result, IndexRange range)
throws DMLUnsupportedOperationException, DMLRuntimeException {
checkType(result);
boolean sps;
if((double)nonZeros/(double)rlen/(double)clen*(double)(range.rowEnd-range.rowStart+1)*(double)(range.colEnd-range.colStart+1)
/(double)rlen/(double)clen< SPARCITY_TURN_POINT)
sps=true;
else sps=false;
if(result==null)
result=new MatrixBlockDSM(tempCellIndex.row, tempCellIndex.column, sps);
else
result.reset(tempCellIndex.row, tempCellIndex.column, sps);
if(sparse)
{
if(sparseRows!=null)
{
for(int r=(int)range.rowStart; r<=range.rowEnd; r++)
{
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
int start=sparseRows[r].searchIndexesFirstGTE((int)range.colStart);
int end=sparseRows[r].searchIndexesFirstLTE((int)range.colEnd);
for(int i=start; i<=end; i++)
result.setValue(r, cols[i], values[i]);
}
}
}else
{
if(denseBlock!=null)
{
int i=0;
for(int r=(int) range.rowStart; r<=range.rowEnd; r++)
{
for(int c=(int) range.colStart; c<=range.colEnd; c++)
result.setValue(r, c, denseBlock[i+c]);
i+=clen;
}
}
}
return result;
}
private void traceHelp(AggregateUnaryOperator op, MatrixBlockDSM result,
int blockingFactorRow, int blockingFactorCol, MatrixIndexes indexesIn)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
//test whether this block contains any cell in the diag
long topRow=UtilFunctions.cellIndexCalculation(indexesIn.getRowIndex(), blockingFactorRow, 0);
long bottomRow=UtilFunctions.cellIndexCalculation(indexesIn.getRowIndex(), blockingFactorRow, this.rlen-1);
long leftColumn=UtilFunctions.cellIndexCalculation(indexesIn.getColumnIndex(), blockingFactorCol, 0);
long rightColumn=UtilFunctions.cellIndexCalculation(indexesIn.getColumnIndex(), blockingFactorCol, this.clen-1);
long start=Math.max(topRow, leftColumn);
long end=Math.min(bottomRow, rightColumn);
if(start>end)
return;
if(op.aggOp.correctionExists)
{
KahanObject buffer=new KahanObject(0,0);
for(long i=start; i<=end; i++)
{
buffer=(KahanObject) op.aggOp.increOp.fn.execute(buffer,
getValue(UtilFunctions.cellInBlockCalculation(i, blockingFactorRow), UtilFunctions.cellInBlockCalculation(i, blockingFactorCol)));
}
result.setValue(0, 0, buffer._sum);
if(op.aggOp.correctionLocation==1)//extra row
result.setValue(1, 0, buffer._correction);
else if(op.aggOp.correctionLocation==2)
result.setValue(0, 1, buffer._correction);
else
throw new DMLRuntimeException("unrecognized correctionLocation: "+op.aggOp.correctionLocation);
}else
{
double newv=0;
for(long i=start; i<=end; i++)
{
newv+=op.aggOp.increOp.fn.execute(newv,
getValue(UtilFunctions.cellInBlockCalculation(i, blockingFactorRow), UtilFunctions.cellInBlockCalculation(i, blockingFactorCol)));
}
result.setValue(0, 0, newv);
}
}
//change to a column vector
private void diagM2VHelp(AggregateUnaryOperator op, MatrixBlockDSM result,
int blockingFactorRow, int blockingFactorCol, MatrixIndexes indexesIn) throws DMLUnsupportedOperationException, DMLRuntimeException
{
//test whether this block contains any cell in the diag
long topRow=UtilFunctions.cellIndexCalculation(indexesIn.getRowIndex(), blockingFactorRow, 0);
long bottomRow=UtilFunctions.cellIndexCalculation(indexesIn.getRowIndex(), blockingFactorRow, this.rlen-1);
long leftColumn=UtilFunctions.cellIndexCalculation(indexesIn.getColumnIndex(), blockingFactorCol, 0);
long rightColumn=UtilFunctions.cellIndexCalculation(indexesIn.getColumnIndex(), blockingFactorCol, this.clen-1);
long start=Math.max(topRow, leftColumn);
long end=Math.min(bottomRow, rightColumn);
if(start>end)
return;
for(long i=start; i<=end; i++)
{
int cellRow=UtilFunctions.cellInBlockCalculation(i, blockingFactorRow);
int cellCol=UtilFunctions.cellInBlockCalculation(i, blockingFactorCol);
result.setValue(cellRow, 0, getValue(cellRow, cellCol));
}
}
private void incrementalAggregateUnaryHelp(AggregateOperator aggOp, MatrixBlockDSM result, int row, int column,
double newvalue, KahanObject buffer) throws DMLRuntimeException
{
if(aggOp.correctionExists)
{
if(aggOp.correctionLocation==1 || aggOp.correctionLocation==2)
{
int corRow=row, corCol=column;
if(aggOp.correctionLocation==1)//extra row
corRow++;
else if(aggOp.correctionLocation==2)
corCol++;
else
throw new DMLRuntimeException("unrecognized correctionLocation: "+aggOp.correctionLocation);
buffer._sum=result.getValue(row, column);
buffer._correction=result.getValue(corRow, corCol);
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, newvalue);
result.setValue(row, column, buffer._sum);
result.setValue(corRow, corCol, buffer._correction);
}else if(aggOp.correctionLocation==0)
{
throw new DMLRuntimeException("unrecognized correctionLocation: "+aggOp.correctionLocation);
}else// for mean
{
int corRow=row, corCol=column;
int countRow=row, countCol=column;
if(aggOp.correctionLocation==3)//extra row
{
countRow++;
corRow+=2;
}
else if(aggOp.correctionLocation==4)
{
countCol++;
corCol+=2;
}
else
throw new DMLRuntimeException("unrecognized correctionLocation: "+aggOp.correctionLocation);
buffer._sum=result.getValue(row, column);
buffer._correction=result.getValue(corRow, corCol);
double count=result.getValue(countRow, countCol)+1.0;
double toadd=(newvalue-buffer._sum)/count;
buffer=(KahanObject) aggOp.increOp.fn.execute(buffer, toadd);
result.setValue(row, column, buffer._sum);
result.setValue(corRow, corCol, buffer._correction);
result.setValue(countRow, countCol, count);
}
}else
{
newvalue=aggOp.increOp.fn.execute(result.getValue(row, column), newvalue);
result.setValue(row, column, newvalue);
}
}
private void sparseAggregateUnaryHelp(AggregateUnaryOperator op, MatrixBlockDSM result,
int blockingFactorRow, int blockingFactorCol, MatrixIndexes indexesIn) throws DMLRuntimeException
{
//initialize result
if(op.aggOp.initialValue!=0)
result.resetDenseWithValue(result.rlen, result.clen, op.aggOp.initialValue);
KahanObject buffer=new KahanObject(0,0);
int r = 0, c = 0;
if(sparse)
{
if(sparseRows!=null)
{
for(r=0; r<sparseRows.length; r++)
{
if(sparseRows[r]==null) continue;
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
for(int i=0; i<sparseRows[r].size(); i++)
{
result.tempCellIndex.set(r, cols[i]);
op.indexFn.execute(result.tempCellIndex, result.tempCellIndex);
incrementalAggregateUnaryHelp(op.aggOp, result, result.tempCellIndex.row, result.tempCellIndex.column, values[i], buffer);
}
}
}
}else
{
if(denseBlock!=null)
{
int limit=rlen*clen;
for(int i=0; i<limit; i++)
{
r=i/clen;
c=i%clen;
result.tempCellIndex.set(r, c);
op.indexFn.execute(result.tempCellIndex, result.tempCellIndex);
incrementalAggregateUnaryHelp(op.aggOp, result, result.tempCellIndex.row, result.tempCellIndex.column, denseBlock[i], buffer);
}
}
}
}
private void denseAggregateUnaryHelp(AggregateUnaryOperator op, MatrixBlockDSM result,
int blockingFactorRow, int blockingFactorCol, MatrixIndexes indexesIn) throws DMLRuntimeException
{
//initialize
if(op.aggOp.initialValue!=0)
result.resetDenseWithValue(result.rlen, result.clen, op.aggOp.initialValue);
KahanObject buffer=new KahanObject(0,0);
for(int i=0; i<rlen; i++)
for(int j=0; j<clen; j++)
{
result.tempCellIndex.set(i, j);
op.indexFn.execute(result.tempCellIndex, result.tempCellIndex);
incrementalAggregateUnaryHelp(op.aggOp, result, result.tempCellIndex.row, result.tempCellIndex.column, getValue(i,j), buffer);
}
}
public MatrixValue aggregateUnaryOperations(AggregateUnaryOperator op, MatrixValue result,
int blockingFactorRow, int blockingFactorCol, MatrixIndexes indexesIn)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
op.indexFn.computeDimension(rlen, clen, tempCellIndex);
if(op.aggOp.correctionExists)
{
switch(op.aggOp.correctionLocation)
{
case 1: tempCellIndex.row++; break;
case 2: tempCellIndex.column++; break;
case 3: tempCellIndex.row+=2; break;
case 4: tempCellIndex.column+=2; break;
default:
throw new DMLRuntimeException("unrecognized correctionLocation: "+op.aggOp.correctionLocation);
}
/*
if(op.aggOp.correctionLocation==1)
tempCellIndex.row++;
else if(op.aggOp.correctionLocation==2)
tempCellIndex.column++;
else
throw new DMLRuntimeException("unrecognized correctionLocation: "+op.aggOp.correctionLocation); */
}
if(result==null)
result=new MatrixBlockDSM(tempCellIndex.row, tempCellIndex.column, false);
else
result.reset(tempCellIndex.row, tempCellIndex.column, false);
//TODO: this code is hack to support trace, and should be removed when selection is supported
if(op.isTrace)
traceHelp(op, (MatrixBlockDSM)result, blockingFactorRow, blockingFactorCol, indexesIn);
else if(op.isDiagM2V)
diagM2VHelp(op, (MatrixBlockDSM)result, blockingFactorRow, blockingFactorCol, indexesIn);
else if(op.sparseSafe)
sparseAggregateUnaryHelp(op, (MatrixBlockDSM)result, blockingFactorRow, blockingFactorCol, indexesIn);
else
denseAggregateUnaryHelp(op, (MatrixBlockDSM)result, blockingFactorRow, blockingFactorCol, indexesIn);
return result;
}
private static void sparseAggregateBinaryHelp(MatrixBlockDSM m1, MatrixBlockDSM m2,
MatrixBlockDSM result, AggregateBinaryOperator op) throws DMLRuntimeException
{
if(!m1.sparse && !m2.sparse)
aggBinDense(m1, m2, result, op);
else if(m1.sparse && m2.sparse)
aggBinSparse(m1, m2, result, op);
else if(m1.sparse)
aggBinSparseDense(m1, m2, result, op);
else
aggBinDenseSparse(m1, m2, result, op);
}
public MatrixValue aggregateBinaryOperations(MatrixValue m1Value, MatrixValue m2Value,
MatrixValue result, AggregateBinaryOperator op)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlockDSM m1=checkType(m1Value);
MatrixBlockDSM m2=checkType(m2Value);
checkType(result);
if(m1.clen!=m2.rlen)
throw new RuntimeException("dimenstions do not match for matrix multiplication");
int rl=m1.rlen;
int cl=m2.clen;
boolean sp=checkSparcityOnAggBinary(m1, m2);
if(result==null)
result=new MatrixBlockDSM(rl, cl, sp);//m1.sparse&&m2.sparse);
else
result.reset(rl, cl, sp);//m1.sparse&&m2.sparse);
if(op.sparseSafe)
sparseAggregateBinaryHelp(m1, m2, (MatrixBlockDSM)result, op);
else
aggBinSparseUnsafe(m1, m2, (MatrixBlockDSM)result, op);
return result;
}
/*
* to perform aggregateBinary when the first matrix is dense and the second is sparse
*/
private static void aggBinDenseSparse(MatrixBlockDSM m1, MatrixBlockDSM m2,
MatrixBlockDSM result, AggregateBinaryOperator op) throws DMLRuntimeException
{
if(m2.sparseRows==null)
return;
for(int k=0; k<m2.sparseRows.length; k++)
{
if(m2.sparseRows[k]==null) continue;
int[] cols=m2.sparseRows[k].getIndexContainer();
double[] values=m2.sparseRows[k].getValueContainer();
for(int p=0; p<m2.sparseRows[k].size(); p++)
{
int j=cols[p];
for(int i=0; i<m1.rlen; i++)
{
double old=result.getValue(i, j);
double aik=m1.getValue(i, k);
double addValue=op.binaryFn.execute(aik, values[p]);
double newvalue=op.aggOp.increOp.fn.execute(old, addValue);
result.setValue(i, j, newvalue);
}
}
}
}
/*
* to perform aggregateBinary when the first matrix is sparse and the second is dense
*/
private static void aggBinSparseDense(MatrixBlockDSM m1, MatrixBlockDSM m2,
MatrixBlockDSM result, AggregateBinaryOperator op) throws DMLRuntimeException
{
if(m1.sparseRows==null)
return;
for(int i=0; i<m1.sparseRows.length; i++)
{
if(m1.sparseRows[i]==null) continue;
int[] cols=m1.sparseRows[i].getIndexContainer();
double[] values=m1.sparseRows[i].getValueContainer();
for(int j=0; j<m2.clen; j++)
{
double aij=0;
for(int p=0; p<m1.sparseRows[i].size(); p++)
{
int k=cols[p];
double addValue=op.binaryFn.execute(values[p], m2.getValue(k, j));
aij=op.aggOp.increOp.fn.execute(aij, addValue);
}
result.appendValue(i, j, aij);
}
}
}
/*
* to perform aggregateBinary when both matrices are sparse
*/
public static void aggBinSparse(MatrixBlockDSM m1, MatrixBlockDSM m2,
MatrixBlockDSM result, AggregateBinaryOperator op) throws DMLRuntimeException
{
if(m1.sparseRows==null || m2.sparseRows==null)
return;
//double[] cache=null;
TreeMap<Integer, Double> cache=null;
if(result.isInSparseFormat())
{
//cache=new double[m2.getNumColumns()];
cache=new TreeMap<Integer, Double>();
}
for(int i=0; i<m1.sparseRows.length; i++)
{
if(m1.sparseRows[i]==null) continue;
int[] cols1=m1.sparseRows[i].getIndexContainer();
double[] values1=m1.sparseRows[i].getValueContainer();
for(int p=0; p<m1.sparseRows[i].size(); p++)
{
int k=cols1[p];
if(m2.sparseRows[k]==null) continue;
int[] cols2=m2.sparseRows[k].getIndexContainer();
double[] values2=m2.sparseRows[k].getValueContainer();
for(int q=0; q<m2.sparseRows[k].size(); q++)
{
int j=cols2[q];
double addValue=op.binaryFn.execute(values1[p], values2[q]);
if(result.isInSparseFormat())
{
//cache[j]=op.aggOp.increOp.fn.execute(cache[j], addValue);
Double old=cache.get(j);
if(old==null)
old=0.0;
cache.put(j, op.aggOp.increOp.fn.execute(old, addValue));
}else
{
double old=result.getValue(i, j);
double newvalue=op.aggOp.increOp.fn.execute(old, addValue);
result.setValue(i, j, newvalue);
}
}
}
if(result.isInSparseFormat())
{
/*for(int j=0; j<cache.length; j++)
{
if(cache[j]!=0)
{
result.appendValue(i, j, cache[j]);
cache[j]=0;
}
}*/
for(Entry<Integer, Double> e: cache.entrySet())
{
result.appendValue(i, e.getKey(), e.getValue());
}
cache.clear();
}
}
}
public static void matrixMult(MatrixBlockDSM matrixA, MatrixBlockDSM matrixB,
MatrixBlockDSM resultMatrix)
{
int l, i, j, aIndex, bIndex, cIndex;
double temp;
double[] a = matrixA.getDenseArray();
double[] b = matrixB.getDenseArray();
if(a==null || b==null)
return;
if(resultMatrix.denseBlock==null)
resultMatrix.denseBlock = new double[resultMatrix.rlen * resultMatrix.clen];
Arrays.fill(resultMatrix.denseBlock, 0, resultMatrix.denseBlock.length, 0);
double[] c=resultMatrix.denseBlock;
int m = matrixA.rlen;
int n = matrixB.clen;
int k = matrixA.clen;
int nnzs=0;
for(l = 0; l < k; l++)
{
aIndex = l;
cIndex = 0;
for(i = 0; i < m; i++)
{
// aIndex = i * k + l => a[i, l]
temp = a[aIndex];
if(temp != 0)
{
bIndex = l * n;
for(j = 0; j < n; j++)
{
// bIndex = l * n + j => b[l, j]
// cIndex = i * n + j => c[i, j]
if(c[cIndex]==0)
nnzs++;
c[cIndex] = c[cIndex] + temp * b[bIndex];
if(c[cIndex]==0)
nnzs
cIndex++;
bIndex++;
}
}else
cIndex+=n;
aIndex += k;
}
}
resultMatrix.nonZeros=nnzs;
}
private static void aggBinSparseUnsafe(MatrixBlockDSM m1, MatrixBlockDSM m2, MatrixBlockDSM result,
AggregateBinaryOperator op) throws DMLRuntimeException
{
for(int i=0; i<m1.rlen; i++)
for(int j=0; j<m2.clen; j++)
{
double aggValue=op.aggOp.initialValue;
for(int k=0; k<m1.clen; k++)
{
double aik=m1.getValue(i, k);
double bkj=m2.getValue(k, j);
double addValue=op.binaryFn.execute(aik, bkj);
aggValue=op.aggOp.increOp.fn.execute(aggValue, addValue);
}
result.appendValue(i, j, aggValue);
}
}
/*
* to perform aggregateBinary when both matrices are dense
*/
private static void aggBinDense(MatrixBlockDSM m1, MatrixBlockDSM m2, MatrixBlockDSM result, AggregateBinaryOperator op) throws DMLRuntimeException
{
if(op.binaryFn instanceof Multiply && (op.aggOp.increOp.fn instanceof Plus) && !result.sparse)
{
matrixMult(m1, m2, result);
} else
{
int j, l, i, cIndex, bIndex, aIndex;
double temp;
double v;
double[] a = m1.getDenseArray();
double[] b = m2.getDenseArray();
if(a==null || b==null)
return;
for(l = 0; l < m1.clen; l++)
{
aIndex = l;
cIndex = 0;
for(i = 0; i < m1.rlen; i++)
{
// aIndex = l + i * m1clen
temp = a[aIndex];
bIndex = l * m1.rlen;
for(j = 0; j < m2.clen; j++)
{
// cIndex = i * m1.rlen + j
// bIndex = l * m1.rlen + j
v = op.aggOp.increOp.fn.execute(result.getValue(i, j), op.binaryFn.execute(temp, b[bIndex]));
result.setValue(i, j, v);
cIndex++;
bIndex++;
}
aIndex += m1.clen;
}
}
}
}
@Override
/*
* D = ctable(A,v2,W)
* this <- A; scalarThat <- v2; that2 <- W; result <- D
*/
public void tertiaryOperations(Operator op, double scalarThat,
MatrixValue that2, HashMap<CellIndex, Double> ctableResult)
throws DMLUnsupportedOperationException, DMLRuntimeException {
/*
* (i1,j1,v1) from input1 (this)
* (v2) from sclar_input2 (scalarThat)
* (i3,j3,w) from input3 (that2)
*/
double v1;
double v2 = scalarThat;
double w;
if(sparse)
{
if(sparseRows!=null)
{
for(int r=0; r<sparseRows.length; r++)
{
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
for(int i=0; i<sparseRows[r].size(); i++)
{
// output (v1,v2,w)
v1 = values[i];
w = that2.getValue(r, cols[i]);
updateCtable(v1, v2, w, ctableResult);
}
}
}
}else
{
if(denseBlock!=null)
{
int limit=rlen*clen;
int r,c;
for(int i=0; i<limit; i++)
{
r=i/clen;
c=i%clen;
v1 = this.getValue(r, c);
w = that2.getValue(r, c);
updateCtable(v1, v2, w, ctableResult);
}
}
}
}
/*
* D = ctable(A,v2,w)
* this <- A; scalar_that <- v2; scalar_that2 <- w; result <- D
*/
@Override
public void tertiaryOperations(Operator op, double scalarThat,
double scalarThat2, HashMap<CellIndex, Double> ctableResult)
throws DMLUnsupportedOperationException, DMLRuntimeException {
/*
* (i1,j1,v1) from input1 (this)
* (v2) from sclar_input2 (scalarThat)
* (w) from scalar_input3 (scalarThat2)
*/
double v1;
double v2 = scalarThat;
double w = scalarThat2;
if(sparse)
{
if(sparseRows!=null)
{
for(int r=0; r<sparseRows.length; r++)
{
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
for(int i=0; i<sparseRows[r].size(); i++)
{
// output (v1,v2,w)
v1 = values[i];
updateCtable(v1, v2, w, ctableResult);
}
}
}
}else
{
if(denseBlock!=null)
{
int limit=rlen*clen;
int r,c;
for(int i=0; i<limit; i++)
{
r=i/clen;
c=i%clen;
v1 = this.getValue(r, c);
updateCtable(v1, v2, w, ctableResult);
}
}
}
}
/*
* D = ctable(A,B,w)
* this <- A; that <- B; scalar_that2 <- w; result <- D
*/
@Override
public void tertiaryOperations(Operator op, MatrixValue that,
double scalarThat2, HashMap<CellIndex, Double> ctableResult)
throws DMLUnsupportedOperationException, DMLRuntimeException {
/*
* (i1,j1,v1) from input1 (this)
* (i1,j1,v2) from input2 (that)
* (w) from scalar_input3 (scalarThat2)
*/
double v1, v2;
double w = scalarThat2;
if(sparse)
{
if(sparseRows!=null)
{
for(int r=0; r<sparseRows.length; r++)
{
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
for(int i=0; i<sparseRows[r].size(); i++)
{
// output (v1,v2,w)
v1 = values[i];
v2 = that.getValue(r, cols[i]);
updateCtable(v1, v2, w, ctableResult);
}
}
}
}else
{
if(denseBlock!=null)
{
int limit=rlen*clen;
int r,c;
for(int i=0; i<limit; i++)
{
r=i/clen;
c=i%clen;
v1 = this.getValue(r, c);
v2 = that.getValue(r, c);
updateCtable(v1, v2, w, ctableResult);
}
}
}
}
/*
* D = ctable(A,B,W)
* this <- A; that <- B; that2 <- W; result <- D
*/
public void tertiaryOperations(Operator op, MatrixValue that, MatrixValue that2, HashMap<CellIndex, Double> ctableResult)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
/*
* (i1,j1,v1) from input1 (this)
* (i1,j1,v2) from input2 (that)
* (i1,j1,w) from input3 (that2)
*/
double v1, v2, w;
if(sparse)
{
if(sparseRows!=null)
{
for(int r=0; r<sparseRows.length; r++)
{
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
for(int i=0; i<sparseRows[r].size(); i++)
{
// output (v1,v2,w)
v1 = values[i];
v2 = that.getValue(r, cols[i]);
w = that2.getValue(r, cols[i]);
updateCtable(v1, v2, w, ctableResult);
}
}
}
}else
{
if(denseBlock!=null)
{
int limit=rlen*clen;
int r,c;
for(int i=0; i<limit; i++)
{
r=i/clen;
c=i%clen;
v1 = this.getValue(r, c);
v2 = that.getValue(r, c);
w = that2.getValue(r, c);
updateCtable(v1, v2, w, ctableResult);
}
}
}
}
public void binaryOperationsInPlace(BinaryOperator op, MatrixValue thatValue)
throws DMLUnsupportedOperationException, DMLRuntimeException
{
MatrixBlockDSM that=checkType(thatValue);
if(this.rlen!=that.rlen || this.clen!=that.clen)
throw new RuntimeException("block sizes are not matched for binary " +
"cell operations: "+this.rlen+"*"+this.clen+" vs "+ that.rlen+"*"
+that.clen);
// System.out.println("-- this:\n"+this);
// System.out.println("-- that:\n"+that);
if(op.sparseSafe)
sparseBinaryInPlaceHelp(op, that);
else
denseBinaryInPlaceHelp(op, that);
// System.out.println("-- this (result):\n"+this);
}
public void denseToSparse() {
sparse=true;
adjustSparseRows(rlen-1);
reset();
if(denseBlock==null)
return;
int index=0;
for(int r=0; r<rlen; r++)
{
if(sparseRows[r]==null)
sparseRows[r]=new SparseRow();
for(int c=0; c<clen; c++)
{
if(denseBlock[index]!=0)
{
sparseRows[r].append(c, denseBlock[index]);
nonZeros++;
}
index++;
}
}
}
public void sparseToDense() {
sparse=false;
int limit=rlen*clen;
if(denseBlock==null || denseBlock.length < limit )
denseBlock=new double[limit];
Arrays.fill(denseBlock, 0, limit, 0);
nonZeros=0;
if(sparseRows==null)
return;
for(int r=0; r<sparseRows.length; r++)
{
if(sparseRows[r]==null) continue;
int[] cols=sparseRows[r].getIndexContainer();
double[] values=sparseRows[r].getValueContainer();
for(int i=0; i<sparseRows[r].size(); i++)
{
if(values[i]==0) continue;
denseBlock[r*clen+cols[i]]=values[i];
nonZeros++;
}
}
}
private void denseBinaryInPlaceHelp(BinaryOperator op, MatrixBlockDSM that) throws DMLRuntimeException
{
boolean resultSparse=checkSparcityOnBinary(this, that);
if(resultSparse && !this.sparse)
denseToSparse();
else if(!resultSparse && this.sparse)
sparseToDense();
double v;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
v=op.fn.execute(this.getValue(r, c), that.getValue(r, c));
setValue(r, c, v);
}
}
private void sparseBinaryInPlaceHelp(BinaryOperator op, MatrixBlockDSM that) throws DMLRuntimeException
{
boolean resultSparse=checkSparcityOnBinary(this, that);
if(resultSparse && !this.sparse)
denseToSparse();
else if(!resultSparse && this.sparse)
sparseToDense();
if(this.sparse && that.sparse)
{
//special case, if both matrices are all 0s, just return
if(this.sparseRows==null && that.sparseRows==null)
return;
if(this.sparseRows!=null)
adjustSparseRows(rlen-1);
if(that.sparseRows!=null)
that.adjustSparseRows(rlen-1);
if(this.sparseRows!=null && that.sparseRows!=null)
{
for(int r=0; r<rlen; r++)
{
if(this.sparseRows[r]==null && that.sparseRows[r]==null)
continue;
if(that.sparseRows[r]==null)
{
double[] values=this.sparseRows[r].getValueContainer();
for(int i=0; i<this.sparseRows[r].size(); i++)
values[i]=op.fn.execute(values[i], 0);
}else
{
int estimateSize=0;
if(this.sparseRows[r]!=null)
estimateSize+=this.sparseRows[r].size();
if(that.sparseRows[r]!=null)
estimateSize+=that.sparseRows[r].size();
estimateSize=Math.min(clen, estimateSize);
//temp
SparseRow thisRow=this.sparseRows[r];
this.sparseRows[r]=new SparseRow(estimateSize);
if(thisRow!=null)
{
nonZeros-=thisRow.size();
mergeForSparseBinary(op, thisRow.getValueContainer(),
thisRow.getIndexContainer(), thisRow.size(),
that.sparseRows[r].getValueContainer(),
that.sparseRows[r].getIndexContainer(), that.sparseRows[r].size(), r, this);
}else
{
appendRightForSparseBinary(op, that.sparseRows[r].getValueContainer(),
that.sparseRows[r].getIndexContainer(), that.sparseRows[r].size(), 0, r, this);
}
}
}
}else if(this.sparseRows==null)
{
this.sparseRows=new SparseRow[rlen];
for(int r=0; r<rlen; r++)
{
if(that.sparseRows[r]==null)
continue;
this.sparseRows[r]=new SparseRow(that.sparseRows[r].size());
appendRightForSparseBinary(op, that.sparseRows[r].getValueContainer(),
that.sparseRows[r].getIndexContainer(), that.sparseRows[r].size(), 0, r, this);
}
}else
{
for(int r=0; r<rlen; r++)
{
if(this.sparseRows[r]==null)
continue;
appendLeftForSparseBinary(op, this.sparseRows[r].getValueContainer(),
this.sparseRows[r].getIndexContainer(), this.sparseRows[r].size(), 0, r, this);
}
}
}else
{
double thisvalue, thatvalue, resultvalue;
for(int r=0; r<rlen; r++)
for(int c=0; c<clen; c++)
{
thisvalue=this.getValue(r, c);
thatvalue=that.getValue(r, c);
resultvalue=op.fn.execute(thisvalue, thatvalue);
this.setValue(r, c, resultvalue);
}
}
}
public static MatrixBlockDSM getRandomSparseMatrix(int rows, int cols, double sparsity, long seed)
{
Random random=new Random(seed);
MatrixBlockDSM m=new MatrixBlockDSM(rows, cols, true);
m.sparseRows=new SparseRow[rows];
for(int i=0; i<rows; i++)
{
m.sparseRows[i]=new SparseRow();
for(int j=0; j<cols; j++)
{
if(random.nextDouble()>sparsity)
continue;
m.sparseRows[i].append(j, random.nextDouble());
m.nonZeros++;
}
}
return m;
}
public static MatrixBlock1D getRandomSparseMatrix1D(int rows, int cols, double sparsity, long seed)
{
Random random=new Random(seed);
MatrixBlock1D m=new MatrixBlock1D(rows, cols, true);
for(int i=0; i<rows; i++)
{
for(int j=0; j<cols; j++)
{
if(random.nextDouble()>sparsity)
continue;
m.addValue(i, j, random.nextDouble());
}
}
return m;
}
public String toString()
{
String ret="sparse? = "+sparse+"\n" ;
ret+="nonzeros = "+nonZeros+"\n";
ret+="size: "+rlen+" X "+clen+"\n";
boolean toprint=false;
if(sparse)
{
int len=0;
if(sparseRows!=null)
len=sparseRows.length;
int i=0;
for(; i<len; i++)
{
ret+="row +"+i+": "+sparseRows[i]+"\n";
if(sparseRows[i]!=null)
{
for(int j=0; j<sparseRows[i].size(); j++)
if(sparseRows[i].getValueContainer()[j]!=0.0)
toprint=true;
}
}
for(; i<rlen; i++)
{
ret+="row +"+i+": null\n";
}
}else
{
int start=0;
if(this.denseBlock==null)
return ret;
for(int i=0; i<rlen; i++)
{
for(int j=0; j<clen; j++)
{
if(this.denseBlock[start+j]!=0.0)
toprint=true;
ret+=this.denseBlock[start+j]+"\t";
}
ret+="\n";
start+=clen;
}
}
if(!toprint)
return "sparse? = "+sparse+"\nnonzeros = "+nonZeros+"\n";
return ret;
}
public static boolean equal(MatrixBlock1D m1, MatrixBlockDSM m2)
{
boolean ret=true;
for(int i=0; i<m1.getNumRows(); i++)
for(int j=0; j<m1.getNumColumns(); j++)
if(Math.abs(m1.getValue(i, j)-m2.getValue(i, j))>0.0000000001)
{
System.out.println(m1.getValue(i, j)+" vs "+m2.getValue(i, j)+":"+ (Math.abs(m1.getValue(i, j)-m2.getValue(i, j))));
ret=false;
}
return ret;
}
static class Factory1D implements ObjectFactory
{
int rows, cols;
double sparsity;
public Factory1D(int rows, int cols, double sparsity) {
this.rows=rows;
this.cols=cols;
this.sparsity=sparsity;
}
public Object makeObject() {
return getRandomSparseMatrix1D(rows, cols, sparsity, 1);
}
}
static class FactoryDSM implements ObjectFactory
{
int rows, cols;
double sparsity;
public FactoryDSM(int rows, int cols, double sparsity) {
this.rows=rows;
this.cols=cols;
this.sparsity=sparsity;
}
public Object makeObject() {
return getRandomSparseMatrix(rows, cols, sparsity, 1);
}
}
public static void printResults(String info, long oldtime, long newtime)
{
// System.out.println(info+((double)oldtime/(double)newtime));
System.out.println(((double)oldtime/(double)newtime));
}
public static void onerun(int rows, int cols, double sparsity, int runs) throws Exception
{
// MemoryTestBench bench=new MemoryTestBench();
// bench.showMemoryUsage(new Factory1D(rows, cols, sparsity));
// bench.showMemoryUsage(new FactoryDSM(rows, cols, sparsity));
System.out.println("
// System.out.println("rows: "+rows+", cols: "+cols+", sparsity: "+sparsity+", runs: "+runs);
System.out.println(sparsity);
MatrixBlock1D m_old=getRandomSparseMatrix1D(rows, cols, sparsity, 1);
//m_old.examSparsity();
MatrixBlock1D m_old2=getRandomSparseMatrix1D(rows, cols, sparsity, 2);
//m_old2.examSparsity();
MatrixBlock1D m_old3=new MatrixBlock1D(rows, cols, true);
//System.out.println(m_old);
MatrixBlockDSM m_new=getRandomSparseMatrix(rows, cols, sparsity, 1);
//m_new.examSparsity();
MatrixBlockDSM m_new2=getRandomSparseMatrix(rows, cols, sparsity, 2);
// m_new2.examSparsity();
MatrixBlockDSM m_new3=new MatrixBlockDSM(rows, cols, true);
// System.out.println(m_new);
long start, oldtime, newtime;
//Operator op;
UnaryOperator op=new UnaryOperator(Builtin.getBuiltinFnObject("round"));
/* start=System.nanoTime();
for(int i=0; i<runs; i++)
m_old.unaryOperationsInPlace(op);
oldtime=System.nanoTime()-start;
start=System.nanoTime();
for(int i=0; i<runs; i++)
m_new.unaryOperationsInPlace(op);
newtime=System.nanoTime()-start;
if(!equal(m_old, m_new))
System.err.println("result doesn't match!");
printResults("unary inplace op: ", oldtime, newtime);
// System.out.println("sparsity of m_mew: "+m_new.isInSparseFormat()+"\t sparsity of m_old: "+m_old.isInSparseFormat());
*/
start=System.nanoTime();
for(int i=0; i<runs; i++)
m_old.unaryOperations(op, m_old3);
oldtime=System.nanoTime()-start;
start=System.nanoTime();
for(int i=0; i<runs; i++)
m_new.unaryOperations(op, m_new3);
newtime=System.nanoTime()-start;
if(!equal(m_old3, m_new3))
System.err.println("result doesn't match!");
//System.out.println("unary op: "+oldtime+", "+newtime+", "+((double)oldtime/(double)newtime));
printResults("unary op: ", oldtime, newtime);
// System.out.println("sparsity of m_mew: "+m_new.isInSparseFormat()+"\t sparsity of m_old: "+m_old.isInSparseFormat());
LeftScalarOperator op1=new LeftScalarOperator(Multiply.getMultiplyFnObject(), 2);
/* start=System.nanoTime();
for(int i=0; i<runs; i++)
m_old.scalarOperationsInPlace(op1);
oldtime=System.nanoTime()-start;
start=System.nanoTime();
for(int i=0; i<runs; i++)
m_new.scalarOperationsInPlace(op1);
newtime=System.nanoTime()-start;
if(!equal(m_old, m_new))
System.err.println("result doesn't match!");
printResults("scalar inplace op: ", oldtime, newtime);
// System.out.println("sparsity of m_mew: "+m_new.isInSparseFormat()+"\t sparsity of m_old: "+m_old.isInSparseFormat());
*/
start=System.nanoTime();
for(int i=0; i<runs; i++)
m_old.scalarOperations(op1, m_old3);
oldtime=System.nanoTime()-start;
start=System.nanoTime();
for(int i=0; i<runs; i++)
m_new.scalarOperations(op1, m_new3);
newtime=System.nanoTime()-start;
if(!equal(m_old3, m_new3))
System.err.println("result doesn't match!");
// System.out.println("scalar op: "+oldtime+", "+newtime+", "+((double)oldtime/(double)newtime));
printResults("scalar op: ", oldtime, newtime);
// System.out.println("sparsity of m_mew: "+m_new.isInSparseFormat()+"\t sparsity of m_old: "+m_old.isInSparseFormat());
BinaryOperator op11=new BinaryOperator(Plus.getPlusFnObject());
/* start=System.nanoTime();
for(int i=0; i<runs; i++)
{
long begin=System.nanoTime();
m_old.binaryOperationsInPlace(op11, m_old2);
System.out.println("sparsity of m_mew: "+m_new.isInSparseFormat()+"\t sparsity of m_old: "+m_old.isInSparseFormat());
// System.out.println(System.nanoTime()-begin);
}
oldtime=System.nanoTime()-start;
start=System.nanoTime();
// System.out.println("~~~");
for(int i=0; i<runs; i++)
{
long begin=System.nanoTime();
m_new.binaryOperationsInPlace(op11, m_new2);
System.out.println("sparsity of m_mew: "+m_new.isInSparseFormat()+"\t sparsity of m_old: "+m_old.isInSparseFormat());
// System.out.println(System.nanoTime()-begin);
}
newtime=System.nanoTime()-start;
if(!equal(m_old, m_new))
System.err.println("result doesn't match!");
//System.out.println("binary op: "+oldtime+", "+newtime+", "+((double)oldtime/(double)newtime));
printResults("binary op inplace: ", oldtime, newtime);
System.out.println("sparsity of m_mew: "+m_new.isInSparseFormat()+"\t sparsity of m_old: "+m_old.isInSparseFormat());
*/
start=System.nanoTime();
for(int i=0; i<runs; i++)
{
// long begin=System.nanoTime();
m_old.binaryOperations(op11, m_old2, m_old3);
// System.out.println(System.nanoTime()-begin);
}
oldtime=System.nanoTime()-start;
// System.out.println("~~~");
start=System.nanoTime();
for(int i=0; i<runs; i++)
{
// long begin=System.nanoTime();
m_new.binaryOperations(op11, m_new2, m_new3);
// System.out.println(System.nanoTime()-begin);
}
newtime=System.nanoTime()-start;
if(!equal(m_old3, m_new3))
System.err.println("result doesn't match!");
//System.out.println("binary op: "+oldtime+", "+newtime+", "+((double)oldtime/(double)newtime));
printResults("binary op: ", oldtime, newtime);
// System.out.println("sparsity of m_mew: "+m_new.isInSparseFormat()+"\t sparsity of m_old: "+m_old.isInSparseFormat());
ReorgOperator op12=new ReorgOperator(SwapIndex.getSwapIndexFnObject());
start=System.nanoTime();
for(int i=0; i<runs; i++)
m_old.reorgOperations(op12, m_old3, 0, 0, m_old.getNumRows());
oldtime=System.nanoTime()-start;
start=System.nanoTime();
for(int i=0; i<runs; i++)
m_new.reorgOperations(op12, m_new3, 0, 0, m_old.getNumRows());
newtime=System.nanoTime()-start;
if(!equal(m_old3, m_new3))
System.err.println("result doesn't match!");
//System.out.println("unary op: "+oldtime+", "+newtime+", "+((double)oldtime/(double)newtime));
printResults("reorg op: ", oldtime, newtime);
// System.out.println("sparsity of m_mew: "+m_new.isInSparseFormat()+"\t sparsity of m_old: "+m_old.isInSparseFormat());
/* AggregateBinaryOperator op13=new AggregateBinaryOperator(Multiply.getMultiplyFnObject(), new AggregateOperator(0, Plus.getPlusFnObject()));
start=System.nanoTime();
for(int i=0; i<runs; i++)
m_old.aggregateBinaryOperations(m_old, m_old2, m_old3, op13);
oldtime=System.nanoTime()-start;
start=System.nanoTime();
for(int i=0; i<runs; i++)
m_new.aggregateBinaryOperations(m_new, m_new2, m_new3, op13);
newtime=System.nanoTime()-start;
if(!equal(m_old3, m_new3))
System.err.println("result doesn't match!");
//System.out.println("binary op: "+oldtime+", "+newtime+", "+((double)oldtime/(double)newtime));
printResults("aggregate binary op: ", oldtime, newtime);
// System.out.println("sparsity of m_mew: "+m_new.isInSparseFormat()+"\t sparsity of m_old: "+m_old.isInSparseFormat());
*/
/* AggregateUnaryOperator op14=new AggregateUnaryOperator(new AggregateOperator(0, Plus.getPlusFnObject()), ReduceAll.getReduceAllFnObject());
start=System.nanoTime();
for(int i=0; i<runs; i++)
m_old.aggregateUnaryOperations(op14, m_old3, m_old.getNumRows(), m_old.getNumColumns(), new MatrixIndexes(1, 1));
oldtime=System.nanoTime()-start;
start=System.nanoTime();
for(int i=0; i<runs; i++)
m_new.aggregateUnaryOperations(op14, m_new3, m_old.getNumRows(), m_old.getNumColumns(), new MatrixIndexes(1, 1));
newtime=System.nanoTime()-start;
if(!equal(m_old3, m_new3))
System.err.println("result doesn't match!");
// System.out.println("scalar op: "+oldtime+", "+newtime+", "+((double)oldtime/(double)newtime));
printResults("aggregate unary op: ", oldtime, newtime);
// System.out.println("sparsity of m_mew: "+m_new.isInSparseFormat()+"\t sparsity of m_old: "+m_old.isInSparseFormat());
*/
}
public static void main(String[] args) throws Exception
{
int rows=1000, cols=1000, runs=10;
double[] sparsities=new double[]{0.005, 0.01, 0.02, 0.04, 0.06, 0.08, 0.1};
for(double sparsity: sparsities)
onerun(rows, cols, sparsity, runs);
}
} |
package com.bbn.kbp.events2014.bin.QA.Warnings;
import com.bbn.bue.common.StringUtils;
import com.bbn.bue.common.annotations.MoveToBUECommon;
import com.bbn.bue.common.files.FileUtils;
import com.bbn.bue.common.symbols.Symbol;
import com.bbn.kbp.events2014.Response;
import com.bbn.kbp.events2014.TypeRoleFillerRealis;
import com.bbn.kbp.events2014.bin.QA.AssessmentQA;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableTable;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import com.google.common.collect.Table;
import com.google.common.io.Files;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import java.util.Set;
public class ConflictingTypeWarningRule extends OverlapWarningRule {
private static final Logger log = LoggerFactory.getLogger(ConflictingTypeWarningRule.class);
private final ImmutableTable<Symbol, Symbol, ImmutableSet<Symbol>> eventToRoleToFillerType;
private ConflictingTypeWarningRule(final Table<Symbol, Symbol, ImmutableSet<Symbol>> eventToRoleToFillerType) {
super();
this.eventToRoleToFillerType = ImmutableTable.copyOf(eventToRoleToFillerType);
}
@Override
protected boolean warningAppliesTo(TypeRoleFillerRealis fst, TypeRoleFillerRealis snd) {
if (fst == snd || fst.type().equals(snd.type())) {
return false;
}
return true;
}
@Override
protected Multimap<? extends Response, ? extends Warning> findOverlap(
final TypeRoleFillerRealis fst, final Iterable<Response> first,
final TypeRoleFillerRealis snd, final Iterable<Response> second) {
final ImmutableMultimap.Builder<Response, Warning> result =
ImmutableMultimap.builder();
for (final Response a : first) {
final Set<Symbol> atypes = eventToRoleToFillerType.get(a.type(), a.role());
for (final Response b : second) {
if (a == b || b.canonicalArgument().string().trim().isEmpty()) {
continue;
}
if (a.canonicalArgument().equals(b.canonicalArgument())) {
final Set<Symbol> btypes = Sets.newHashSet(eventToRoleToFillerType.get(b.type(), b.role()));
btypes.retainAll(atypes);
if (btypes.size() == 0) {
result.put(b, Warning.create(String
.format(
"%s has same string as %s but mismatched types %s/%s and %s/%s in trfr %s",
a.canonicalArgument().string(),
b.canonicalArgument().string(), a.type().asString(), a.role().asString(),
b.type().asString(), b.role().asString(), AssessmentQA.readableTRFR(snd)),
Warning.SEVERITY.MINIOR));
}
}
}
}
return result.build();
}
public static ConflictingTypeWarningRule create(File argsFile, File rolesFile)
throws IOException {
final ImmutableMultimap<Symbol, Symbol> roleToTypes = FileUtils.loadSymbolMultimap(
Files.asCharSource(rolesFile, Charsets.UTF_8));
final ImmutableMultimap<Symbol, Symbol> eventTypesToRoles = FileUtils.loadSymbolMultimap(
Files.asCharSource(argsFile, Charsets.UTF_8));
final ImmutableTable<Symbol, Symbol, ImmutableSet<Symbol>> eventToRoleToFillerType =
composeToTableOfSets(roleToTypes, eventTypesToRoles);
log.info("Role to type mapping: {}",
StringUtils.NewlineJoiner.withKeyValueSeparator(" -> ").join(roleToTypes.asMap()));
for (Symbol r : eventToRoleToFillerType.rowKeySet()) {
for (Symbol c : eventToRoleToFillerType.columnKeySet()) {
if (c != null) {
log.info("{}.{}: {}", r, c, eventToRoleToFillerType.get(r, c));
}
}
}
return new ConflictingTypeWarningRule(ImmutableTable.copyOf(eventToRoleToFillerType));
}
@MoveToBUECommon
private static <R,C,V> ImmutableTable<R, C, ImmutableSet<V>> composeToTableOfSets(
final Multimap<R, C> first,
final Multimap<C, V> second) {
final ImmutableTable.Builder<R,C,ImmutableSet<V>> ret = ImmutableTable.builder();
for (final Map.Entry<R, C> firstEntry : first.entries()) {
final R rowKey = firstEntry.getKey();
final C colKey = firstEntry.getValue();
ret.put(rowKey, colKey, ImmutableSet.copyOf(second.get(colKey)));
}
return ret.build();
}
} |
package org.safehaus.subutai.ui.pig.wizard;
import com.vaadin.server.FileResource;
import com.vaadin.shared.ui.label.ContentMode;
import com.vaadin.ui.*;
import org.safehaus.subutai.shared.protocol.FileUtil;
import org.safehaus.subutai.ui.pig.PigUI;
/**
* @author dilshat
*/
public class WelcomeStep extends Panel {
public WelcomeStep(final Wizard wizard) {
setSizeFull();
GridLayout grid = new GridLayout(10, 6);
grid.setSpacing(true);
grid.setMargin(true);
grid.setSizeFull();
Label welcomeMsg = new Label("<center><h2>Welcome to Pig Installation Wizard!</h2>");
welcomeMsg.setContentMode(ContentMode.HTML);
grid.addComponent(welcomeMsg, 3, 1, 6, 2);
Label logoImg = new Label();
logoImg.setIcon(new FileResource(FileUtil.getFile(PigUI.MODULE_IMAGE, this)));
logoImg.setContentMode(ContentMode.HTML);
logoImg.setHeight(200, Unit.PIXELS);
logoImg.setWidth(180, Unit.PIXELS);
grid.addComponent(logoImg, 1, 3, 2, 5);
Button next = new Button("Start");
next.addStyleName("default");
next.setWidth(100, Unit.PIXELS);
grid.addComponent(next, 6, 4, 6, 4);
grid.setComponentAlignment(next, Alignment.BOTTOM_RIGHT);
next.addClickListener(new Button.ClickListener() {
@Override
public void buttonClick(Button.ClickEvent clickEvent) {
wizard.init();
wizard.next();
}
});
setContent(grid);
}
} |
package org.geneontology.minerva.server.external;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import org.semanticweb.owlapi.model.IRI;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.util.concurrent.ExecutionError;
import com.google.common.util.concurrent.UncheckedExecutionException;
public class CachingExternalLookupService implements ExternalLookupService {
private final LoadingCache<IRI, List<LookupEntry>> cache;
private final ExternalLookupService service;
public CachingExternalLookupService(ExternalLookupService service, int size, long duration, TimeUnit unit) {
this.service = service;
cache = CacheBuilder.newBuilder()
.expireAfterWrite(duration, unit)
.maximumSize(size)
.build(new CacheLoader<IRI, List<LookupEntry>>() {
@Override
public List<LookupEntry> load(IRI key) throws Exception {
List<LookupEntry> lookup = CachingExternalLookupService.this.service.lookup(key);
if (lookup == null || lookup.isEmpty()) {
throw new Exception("No legal value for key.");
}
return lookup;
}
});
}
public CachingExternalLookupService(Iterable<ExternalLookupService> services, int size, long duration, TimeUnit unit) {
this(new CombinedExternalLookupService(services), size, duration, unit);
}
public CachingExternalLookupService(int size, long duration, TimeUnit unit, ExternalLookupService...services) {
this(Arrays.asList(services), size, duration, unit);
}
@Override
public List<LookupEntry> lookup(IRI id) {
try {
return cache.get(id);
} catch (ExecutionException e) {
return null;
} catch (UncheckedExecutionException e) {
return null;
} catch (ExecutionError e) {
return null;
}
}
@Override
public LookupEntry lookup(IRI id, String taxon) {
LookupEntry entry = null;
List<LookupEntry> list = cache.getUnchecked(id);
for (LookupEntry current : list) {
if (taxon.equals(current.taxon)) {
entry = current;
break;
}
}
return entry;
}
@Override
public String toString() {
return "Caching("+service.toString()+")";
}
} |
package hudson.util;
import com.thoughtworks.xstream.converters.Converter;
import com.thoughtworks.xstream.converters.MarshallingContext;
import com.thoughtworks.xstream.converters.UnmarshallingContext;
import com.thoughtworks.xstream.converters.collections.AbstractCollectionConverter;
import com.thoughtworks.xstream.io.HierarchicalStreamReader;
import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
import com.thoughtworks.xstream.mapper.Mapper;
import hudson.model.Describable;
import hudson.model.Saveable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
/**
* Collection whose change is notified to the parent object for persistence.
*
* @author Kohsuke Kawaguchi
* @since 1.MULTISOURCE
*/
public class PersistedList<T> implements Iterable<T> {
protected final CopyOnWriteList<T> data = new CopyOnWriteList<T>();
protected Saveable owner = Saveable.NOOP;
protected PersistedList() {
}
protected PersistedList(Collection<? extends T> initialList) {
data.replaceBy(initialList);
}
public PersistedList(Saveable owner) {
setOwner(owner);
}
public void setOwner(Saveable owner) {
this.owner = owner;
}
public void add(T item) throws IOException {
data.add(item);
onModified();
}
public void addAll(Collection<? extends T> items) throws IOException {
data.addAll(items);
onModified();
}
public void replaceBy(Collection<? extends T> col) throws IOException {
data.replaceBy(col);
onModified();
}
public T get(int index) {
return data.get(index);
}
public <U extends T> U get(Class<U> type) {
for (T t : data)
if(type.isInstance(t))
return type.cast(t);
return null;
}
/**
* Gets all instances that matches the given type.
*/
public <U extends T> List<U> getAll(Class<U> type) {
List<U> r = new ArrayList<U>();
for (T t : data)
if(type.isInstance(t))
r.add(type.cast(t));
return r;
}
public int size() {
return data.size();
}
/**
* Removes an instance by its type.
*/
public void remove(Class<? extends T> type) throws IOException {
for (T t : data) {
if(t.getClass()==type) {
data.remove(t);
onModified();
return;
}
}
}
public boolean remove(T o) throws IOException {
boolean b = data.remove(o);
if (b) onModified();
return b;
}
public void removeAll(Class<? extends T> type) throws IOException {
boolean modified=false;
for (T t : data) {
if(t.getClass()==type) {
data.remove(t);
modified=true;
}
}
if(modified)
onModified();
}
public void clear() {
data.clear();
}
public Iterator<T> iterator() {
return data.iterator();
}
/**
* Called when a list is mutated.
*/
protected void onModified() throws IOException {
owner.save();
}
/**
* Returns the snapshot view of instances as list.
*/
public List<T> toList() {
return data.getView();
}
/**
* Gets all the {@link Describable}s in an array.
*/
public T[] toArray(T[] array) {
return data.toArray(array);
}
public void addAllTo(Collection<? super T> dst) {
data.addAllTo(dst);
}
public boolean isEmpty() {
return data.isEmpty();
}
/**
* {@link Converter} implementation for XStream.
*
* Serializaion form is compatible with plain {@link List}.
*/
public static class ConverterImpl extends AbstractCollectionConverter {
CopyOnWriteList.ConverterImpl copyOnWriteListConverter;
public ConverterImpl(Mapper mapper) {
super(mapper);
copyOnWriteListConverter = new CopyOnWriteList.ConverterImpl(mapper());
}
public boolean canConvert(Class type) {
// handle subtypes in case the onModified method is overridden.
return PersistedList.class.isAssignableFrom(type);
}
public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
for (Object o : (PersistedList) source)
writeItem(o, context, writer);
}
public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
CopyOnWriteList core = copyOnWriteListConverter.unmarshal(reader, context);
try {
PersistedList r = (PersistedList)context.getRequiredType().newInstance();
r.data.replaceBy(core);
return r;
} catch (InstantiationException e) {
InstantiationError x = new InstantiationError();
x.initCause(e);
throw x;
} catch (IllegalAccessException e) {
IllegalAccessError x = new IllegalAccessError();
x.initCause(e);
throw x;
}
}
}
} |
package org.wildfly.extension.mod_cluster;
import static org.wildfly.extension.mod_cluster.ModClusterLogger.ROOT_LOGGER;
import java.net.InetSocketAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.jboss.as.network.OutboundSocketBinding;
import org.jboss.as.network.SocketBinding;
import org.jboss.as.network.SocketBindingManager;
import org.jboss.modcluster.ModClusterService;
import org.jboss.modcluster.config.ProxyConfiguration;
import org.jboss.modcluster.config.impl.ModClusterConfig;
import org.jboss.modcluster.load.LoadBalanceFactorProvider;
import org.jboss.msc.inject.Injector;
import org.jboss.msc.inject.MapInjector;
import org.jboss.msc.service.Service;
import org.jboss.msc.service.ServiceName;
import org.jboss.msc.service.StartContext;
import org.jboss.msc.service.StopContext;
import org.jboss.msc.value.InjectedValue;
import org.jboss.msc.value.Value;
/**
* Service configuring and starting mod_cluster.
*
* @author Jean-Frederic Clere
* @author Radoslav Husar
*/
public class ContainerEventHandlerService implements Service<ModClusterService> {
public static final ServiceName SERVICE_NAME = ServiceName.JBOSS.append(ModClusterExtension.SUBSYSTEM_NAME);
public static final ServiceName CONFIG_SERVICE_NAME = SERVICE_NAME.append("config");
private LoadBalanceFactorProvider load;
private ModClusterConfig config;
private final Value<SocketBindingManager> bindingManager;
private final InjectedValue<SocketBinding> binding = new InjectedValue<>();
private final Map<String, OutboundSocketBinding> outboundSocketBindings = new HashMap<>();
private volatile ModClusterService eventHandler;
ContainerEventHandlerService(ModClusterConfig config, LoadBalanceFactorProvider load, Value<SocketBindingManager> bindingManager) {
this.config = config;
this.load = load;
this.bindingManager = bindingManager;
}
@Override
public ModClusterService getValue() throws IllegalStateException, IllegalArgumentException {
return this.eventHandler;
}
@Override
public void start(StartContext context) {
ROOT_LOGGER.debugf("Starting mod_cluster extension");
boolean isMulticast = isMulticastEnabled(bindingManager.getValue().getDefaultInterfaceBinding().getNetworkInterfaces());
// Resolve and configure proxies
if (outboundSocketBindings.size() > 0) {
List<ProxyConfiguration> proxies = new LinkedList<>();
for (final OutboundSocketBinding binding : outboundSocketBindings.values()) {
proxies.add(new ProxyConfiguration() {
@Override
public InetSocketAddress getRemoteAddress() {
// Both host and port may not be null in the model, no need to validate here
// Don't do resolving here, let mod_cluster deal with it
return new InetSocketAddress(binding.getUnresolvedDestinationAddress(), binding.getDestinationPort());
}
@Override
public InetSocketAddress getLocalAddress() {
if (binding.getOptionalSourceAddress() != null) {
return new InetSocketAddress(binding.getOptionalSourceAddress(), binding.getAbsoluteSourcePort() == null ? 0 : binding.getAbsoluteSourcePort());
} else if (binding.getAbsoluteSourcePort() != null) {
// Bind to port only if source address is not configured
return new InetSocketAddress(binding.getAbsoluteSourcePort());
}
// No binding configured so don't bind
return null;
}
});
}
config.setProxyConfigurations(proxies);
}
// Set advertise if no proxies are configured
if (config.getProxyConfigurations().isEmpty()) {
config.setAdvertise(isMulticast);
}
// Read node to set configuration.
if (config.getAdvertise()) {
// There should be a socket-binding.... Well no it needs an advertise socket :-(
final SocketBinding binding = this.binding.getOptionalValue();
if (binding != null) {
config.setAdvertiseSocketAddress(binding.getMulticastSocketAddress());
config.setAdvertiseInterface(binding.getSocketAddress().getAddress());
if (!isMulticast) {
ROOT_LOGGER.multicastInterfaceNotAvailable();
}
}
}
this.eventHandler = new ModClusterService(config, load);
}
private boolean isMulticastEnabled(Collection<NetworkInterface> ifaces) {
for (NetworkInterface iface : ifaces) {
try {
if (iface.isUp() && (iface.supportsMulticast() || iface.isLoopback())) {
return true;
}
} catch (SocketException e) {
// Ignore
}
}
return false;
}
@Override
public void stop(StopContext context) {
this.eventHandler.shutdown();
this.eventHandler = null;
}
Injector<SocketBinding> getSocketBindingInjector() {
return this.binding;
}
Injector<OutboundSocketBinding> getOutboundSocketBindingInjector(String name) {
return new MapInjector<>(outboundSocketBindings, name);
}
} |
package lucee.commons.sql;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.regex.Pattern;
import lucee.print;
import lucee.commons.date.TimeZoneUtil;
import lucee.commons.io.SystemUtil;
import lucee.commons.lang.ExceptionUtil;
import lucee.commons.lang.ParserString;
import lucee.commons.lang.StringUtil;
import lucee.runtime.config.Config;
import lucee.runtime.db.driver.ConnectionProxy;
import lucee.runtime.engine.ThreadLocalPageContext;
import lucee.runtime.exp.PageException;
import lucee.runtime.op.Caster;
import lucee.runtime.type.sql.BlobImpl;
import lucee.runtime.type.sql.ClobImpl;
public class SQLUtil {
private static final String ESCAPE_CHARS = "\\{}[]^$*.?+";
public static Pattern pattern(String pstr, boolean ignoreCase) {
char[] carr = pstr.toCharArray();
char c;
StringBuilder sb = new StringBuilder();
for (int i = 0; i < carr.length; i++) {
c = carr[i];
if (ESCAPE_CHARS.indexOf(c) != -1) {
sb.append('\\');
sb.append(c);
}
else if (c == '%') {
sb.append(".*");
}
else if (c == '_') {
sb.append(".");
}
else {
if (ignoreCase) {
sb.append('[');
sb.append(Character.toLowerCase(c));
sb.append('|');
sb.append(Character.toUpperCase(c));
sb.append(']');
}
else sb.append(c);
}
}
return Pattern.compile(sb.toString());
}
public static boolean match(Pattern pattern, String string) {
return pattern.matcher(string).matches();
}
public static String removeLiterals(String sql) {
if (StringUtil.isEmpty(sql)) return sql;
return removeLiterals(new ParserString(sql), true);
}
private static String removeLiterals(ParserString ps, boolean escapeMysql) {
StringBuilder sb = new StringBuilder();
char c, p = (char) 0;
boolean inside = false;
do {
c = ps.getCurrent();
if (c == '\'') {
if (inside) {
if (escapeMysql && p == '\\') {}
else if (ps.hasNext() && ps.getNext() == '\'') ps.next();
else inside = false;
}
else {
inside = true;
}
}
else {
if (!inside && c != '*' && c != '=' && c != '?') sb.append(c);
}
p = c;
ps.next();
}
while (!ps.isAfterLast());
if (inside && escapeMysql) {
ps.setPos(0);
return removeLiterals(ps, false);
}
return sb.toString();
}
/**
* create a blog Object
*
* @param conn
* @param value
* @return
* @throws PageException
* @throws SQLException
*/
public static Blob toBlob(Connection conn, Object value) throws PageException, SQLException {
if (value instanceof Blob) return (Blob) value;
// Java >= 1.6
if (SystemUtil.JAVA_VERSION >= SystemUtil.JAVA_VERSION_6) {
try {
Blob blob = conn.createBlob();
blob.setBytes(1, Caster.toBinary(value));
return blob;
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
return BlobImpl.toBlob(value);
}
}
// Java < 1.6
if (isOracle(conn)) {
Blob blob = OracleBlob.createBlob(conn, Caster.toBinary(value), null);
if (blob != null) return blob;
}
return BlobImpl.toBlob(value);
}
/**
* create a clob Object
*
* @param conn
* @param value
* @return
* @throws PageException
* @throws SQLException
*/
public static Clob toClob(Connection conn, Object value) throws PageException, SQLException {
if (value instanceof Clob) return (Clob) value;
// Java >= 1.6
if (SystemUtil.JAVA_VERSION >= SystemUtil.JAVA_VERSION_6) {
Clob clob = conn.createClob();
clob.setString(1, Caster.toString(value));
return clob;
}
// Java < 1.6
if (isOracle(conn)) {
Clob clob = OracleClob.createClob(conn, Caster.toString(value), null);
if (clob != null) return clob;
}
return ClobImpl.toClob(value);
}
public static boolean isOracle(Connection conn) {
if (conn instanceof ConnectionProxy) conn = ((ConnectionProxy) conn).getConnection();
return StringUtil.indexOfIgnoreCase(conn.getClass().getName(), "oracle") != -1;
}
public static void closeEL(Statement stat) {
if (stat != null) {
try {
stat.close();
}
catch (SQLException e) {}
}
}
public static void closeEL(Connection conn) {
if (conn != null) {
try {
conn.close();
}
catch (SQLException e) {}
}
}
public static void closeEL(ResultSet rs) {
if (rs != null) {
try {
rs.close();
}
catch (SQLException e) {}
}
}
public static String connectionStringTranslatedPatch(Config config, String connStr) {
print.e("1 ->" + connStr);
if (connStr.equalsIgnoreCase("null?USEUNICODE=true")) print.ds();
if (connStr == null || !StringUtil.startsWithIgnoreCase(connStr, "jdbc:mysql://")) return connStr;
// MySQL
if (StringUtil.indexOfIgnoreCase(connStr, "serverTimezone=") != -1) {
print.e("1.1 ->" + connStr);
return connStr;
}
char del = connStr.indexOf('?') != -1 ? '&' : '?';
print.e("2 ->" + (connStr + del + "serverTimezone=" + TimeZoneUtil.toString(ThreadLocalPageContext.getTimeZone(config))));
return connStr + del + "serverTimezone=" + TimeZoneUtil.toString(ThreadLocalPageContext.getTimeZone(config));
}
} |
package roart.indicator;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.math3.util.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import roart.config.MyMyConfig;
import roart.db.DbAccess;
import roart.db.DbDao;
import roart.model.ResultItemTable;
import roart.model.ResultItemTableRow;
import roart.model.StockItem;
import roart.pipeline.Pipeline;
import roart.pipeline.PipelineConstants;
import roart.util.Constants;
import roart.util.MarketData;
import roart.util.PeriodData;
import roart.util.TaUtil;
public abstract class Indicator {
protected static Logger log = LoggerFactory.getLogger(Indicator.class);
protected String title;
protected MyMyConfig conf;
protected int category;
protected String key;
protected int fieldSize = 0;
protected Map<String, MarketData> marketdatamap;
protected Map<String, PeriodData> periodDataMap;
protected Map<String, Integer>[] periodmap;
protected Object[] emptyField;
protected Map<String, Double[][]> listMap;
protected Map<String, double[][]> truncListMap;
// TODO save and return this map
// TODO need getters for this and not? buy/sell
protected Map<String, Object[]> objectMap;
protected Map<String, Object[]> objectFixedMap;
//Map<String, Double> resultMap;
protected Map<String, Double[]> calculatedMap;
protected Map<String, Object[]> resultMap;
protected Map<String, Map<String, Object[]>> marketObjectMap;
protected Map<String, Map<String, Object[]>> marketResultMap;
protected Map<String, Map<String, Double[]>> marketCalculatedMap;
public Indicator(MyMyConfig conf, String string, int category) {
this.title = string;
this.conf = conf;
this.category = category;
}
abstract public boolean isEnabled();
protected abstract Double[] getCalculated(MyMyConfig conf, Map<String, Object[]> objectMap, String id);
protected abstract void getFieldResult(MyMyConfig conf, TaUtil tu, Double[] momentum, Object[] fields);
public Object[] getResultItemTitle() {
Object[] titleArray = new Object[1];
titleArray[0] = title;
return titleArray;
}
/*
public Object calculate(double[] array) {
return null;
}
*/
public Object calculate(double[][] array) {
return null;
}
/*
public Object calculate(Double[] array) {
return calculate(ArrayUtils.toPrimitive(array));
}
*/
public Object calculate(Double[][] array) {
double[][] newArray = new double[array.length][];
for (int i = 0; i < array.length; i ++) {
newArray[i] = ArrayUtils.toPrimitive(array[i]);
}
return calculate(newArray);
}
public Object calculate(scala.collection.Seq[] objArray) {
System.out.println(objArray);
double[][] newArray = new double[objArray.length][];
for (int i = 0; i < objArray.length; i++) {
List list = scala.collection.JavaConversions.seqAsJavaList(objArray[0]);
Double[] array = new Double[list.size()];
System.out.println(list.toArray(array));
array = (Double[]) list.toArray(array);
newArray[i] = ArrayUtils.toPrimitive(array);
}
return calculate(newArray);
}
public List<Integer> getTypeList() {
return null;
}
public Map<Integer, String> getMapTypes() {
return null;
}
public Map<Integer, List<ResultItemTableRow>> otherTables() {
return null;
}
public Map<String, Object> getResultMap() {
return null;
}
public Object[] getDayResult(Object[] objs, int offset) {
return null;
}
public Map<String, Object> getLocalResultMap() {
Map<String, Object> map = new HashMap<>();
map.put(PipelineConstants.RESULT, calculatedMap);
map.put(PipelineConstants.OBJECT, objectMap);
map.put(PipelineConstants.OBJECTFIXED, objectFixedMap);
map.put(PipelineConstants.LIST, listMap);
map.put(PipelineConstants.TRUNCLIST, truncListMap);
map.put(PipelineConstants.RESULT, calculatedMap);
map.put(PipelineConstants.MARKETOBJECT, marketObjectMap);
map.put(PipelineConstants.MARKETCALCULATED, marketCalculatedMap);
map.put(PipelineConstants.MARKETRESULT, marketResultMap);
try {
System.out.println("vix " + Arrays.asList(listMap.get("VIX")[0]));
System.out.println("vix " + Arrays.asList(truncListMap.get("VIX")[0]));
System.out.println("vix " + Arrays.asList(objectMap.get("VIX")[2]));
Object[] o = (Object[]) objectMap.get("VIX");
System.out.println("vix " + Arrays.asList((double[])o[2]) + " " + o[2]);
double[] i = (double[]) o[2];
double[] j = (double[]) o[1];
double[] k = (double[]) o[0];
System.out.println("i " + i.getClass().getName() + Arrays.asList(i));
} catch (Exception e) {
System.out.println(e.getMessage());
int jj = 0;
}
return map;
}
public int getResultSize() {
return 0;
}
public String indicatorName() {
return null;
}
public int getCategory() {
return category;
}
public boolean wantForExtras() {
return false;
}
// TODO make an oo version of this
protected void calculateAll(MyMyConfig conf, Map<String, MarketData> marketdatamap,
Map<String, PeriodData> periodDataMap, int category, Pipeline[] datareaders) throws Exception {
DbAccess dbDao = DbDao.instance(conf);
SimpleDateFormat dt = new SimpleDateFormat(Constants.MYDATEFORMAT);
String dateme = dt.format(conf.getdate());
Map<String, Pipeline> pipelineMap = IndicatorUtils.getPipelineMap(datareaders);
Pipeline datareader = pipelineMap.get("" + category);
this.listMap = (Map<String, Double[][]>) datareader.getLocalResultMap().get(PipelineConstants.LIST);
this.truncListMap = (Map<String, double[][]>) datareader.getLocalResultMap().get(PipelineConstants.TRUNCLIST);
if (!anythingHere(listMap)) {
System.out.println("empty"+key);
return;
}
List<Map> resultList = getMarketCalcResults(conf, dbDao, truncListMap);
objectMap = resultList.get(0);
calculatedMap = resultList.get(1);
resultMap = resultList.get(2);
}
private boolean anythingHere(Map<String, Double[][]> listMap2) {
for (Double[][] array : listMap2.values()) {
for (int i = 0; i < array[0].length; i++) {
if (array[0][i] != null) {
return true;
}
}
}
return false;
}
protected void calculateForExtras(Pipeline[] datareaders) {
Map<String, Pipeline> pipelineMap = IndicatorUtils.getPipelineMap(datareaders);
Pipeline extrareader = pipelineMap.get(PipelineConstants.EXTRAREADER);
if (extrareader == null) {
return;
}
Map<String, Object> localResults = extrareader.getLocalResultMap();
Map<Pair, List<StockItem>> pairStockMap = (Map<Pair, List<StockItem>>) localResults.get(PipelineConstants.PAIRSTOCK);
Map<Pair, Map<Date, StockItem>> pairDateMap = (Map<Pair, Map<Date, StockItem>>) localResults.get(PipelineConstants.PAIRDATE);
Map<Pair, String> pairCatMap = (Map<Pair, String>) localResults.get(PipelineConstants.PAIRCAT);
Map<Pair, Double[][]> pairListMap = (Map<Pair, Double[][]>) localResults.get(PipelineConstants.PAIRLIST);
Map<Pair, List<Date>> pairDateListMap = (Map<Pair, List<Date>>) localResults.get(PipelineConstants.PAIRDATELIST);
Map<Pair, double[][]> pairTruncListMap = (Map<Pair, double[][]>) localResults.get(PipelineConstants.PAIRTRUNCLIST);
System.out.println("lockeys" + localResults.keySet());
Map<Pair, List<StockItem>> pairMap = pairStockMap;
Map<String, Map<String, double[][]>> marketListMap = new HashMap<>();
for(Pair pair : pairMap.keySet()) {
String market = (String) pair.getFirst();
Map<String, double[][]> aListMap = marketListMap.get(market);
if (aListMap == null) {
aListMap = new HashMap<>();
marketListMap.put(market, aListMap);
}
String id = (String) pair.getSecond();
double[][] truncList = pairTruncListMap.get(pair);
if (truncList == null) {
System.out.println("bl");
}
//double[] aTruncList = truncListMap.get(id);
if (truncList != null) {
aListMap.put(id, truncList);
}
}
Pipeline datareader = pipelineMap.get("" + category);
marketObjectMap = new HashMap<>();
marketCalculatedMap = new HashMap<>();
marketResultMap = new HashMap<>();
//this.listMap = (Map<String, Double[]>) datareader.getLocalResultMap().get(PipelineConstants.LIST);
//this.truncListMap = (Map<String, double[]>) datareader.getLocalResultMap().get(PipelineConstants.TRUNCLIST);
DbAccess dbDao = DbDao.instance(conf);
for (String market : marketListMap.keySet()) {
Map<String, double[][]> truncListMap = marketListMap.get(market);
List<Map> resultList = getMarketCalcResults(conf, dbDao, truncListMap);
if (resultList == null || resultList.isEmpty()) {
continue;
}
Map anObjectMap = resultList.get(0);
Map aCalculatedMap = resultList.get(1);
Map aResultMap = resultList.get(2);
marketObjectMap.put(market, anObjectMap);
marketCalculatedMap.put(market, aCalculatedMap);
marketResultMap.put(market, aResultMap);
}
}
protected List getMarketCalcResults(MyMyConfig conf, DbAccess dbDao, Map<String, double[][]> truncListMap) {
List<Map> resultList = new ArrayList<>();
if (truncListMap == null || truncListMap.isEmpty()) {
return resultList;
}
long time0 = System.currentTimeMillis();
log.info("time0 " + (System.currentTimeMillis() - time0));
long time2 = System.currentTimeMillis();
Map<String, Object[]> objectMap = dbDao.doCalculationsArr(conf, truncListMap, key, this, conf.wantPercentizedPriceIndex());
log.info("time2 " + (System.currentTimeMillis() - time2));
long time1 = System.currentTimeMillis();
TaUtil tu = new TaUtil();
//PeriodData perioddata = periodDataMap.get(periodstr);
log.info("listmap " + truncListMap.size() + " " + truncListMap.keySet());
Map<String, Double[]> calculatedMap = getCalculatedMap(conf, tu, objectMap, truncListMap);
Map<String, Object[]> resultMap = getResultMap(conf, tu, objectMap, calculatedMap);
log.info("time1 " + (System.currentTimeMillis() - time1));
resultList.add(objectMap);
resultList.add(calculatedMap);
resultList.add(resultMap);
return resultList;
}
protected Map<String, Double[]> getCalculatedMap(MyMyConfig conf, TaUtil tu, Map<String, Object[]> objectMap, Map<String, double[][]> truncListMap) {
Map<String, Double[]> result = new HashMap<>();
for (String id : truncListMap.keySet()) {
Double[] calculated = getCalculated(conf, objectMap, id);
if (calculated != null) {
result.put(id, calculated);
// TODO and continue?
} else {
System.out.println("nothing for id" + id);
}
}
return result;
}
protected Map<String, Object[]> getResultMap(MyMyConfig conf, TaUtil tu, Map<String, Object[]> objectMap, Map<String, Double[]> momMap) {
Map<String, Object[]> result = new HashMap<>();
if (listMap == null) {
return result;
}
for (String id : listMap.keySet()) {
Double[] momentum = momMap.get(id);
Object[] fields = new Object[fieldSize];
result.put(id, fields);
if (momentum == null) {
System.out.println("zero mom for id " + id);
}
getFieldResult(conf, tu, momentum, fields);
}
return result;
}
public Object[] getResultItem(StockItem stock) {
TaUtil tu = new TaUtil();
String market = conf.getMarket();
String id = stock.getId();
Pair<String, String> pair = new Pair<>(market, id);
Set<Pair<String, String>> ids = new HashSet<>();
ids.add(pair);
String periodstr = key;
PeriodData perioddata = periodDataMap.get(periodstr);
if (perioddata == null) {
//System.out.println("key " + key + " : " + periodDataMap.keySet());
log.info("key " + key + " : " + periodDataMap.keySet());
}
if (resultMap == null) {
return emptyField;
}
Object[] result = resultMap.get(id);
if (result == null) {
result = emptyField;
}
return result;
}
} |
package com.yahoo.vespa.hosted.provision.maintenance;
import com.yahoo.config.provision.Deployer;
import com.yahoo.config.provision.NodeResources;
import com.yahoo.jdisc.Metric;
import com.yahoo.vespa.hosted.provision.Node;
import com.yahoo.vespa.hosted.provision.NodeList;
import com.yahoo.vespa.hosted.provision.NodeRepository;
import com.yahoo.vespa.hosted.provision.maintenance.MaintenanceDeployment.Move;
import com.yahoo.vespa.hosted.provision.node.Agent;
import com.yahoo.vespa.hosted.provision.provisioning.HostCapacity;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.logging.Level;
import java.util.stream.Collectors;
/**
* A maintainer which attempts to ensure there is spare capacity available in chunks which can fit
* all node resource configuration in use, such that the system is able to quickly replace a failed node
* if necessary.
*
* This also emits the following metrics:
* - Overcommitted hosts: Hosts whose capacity is less than the sum of its children's
* - Spare host capacity, or how many hosts the repository can stand to lose without ending up in a situation where it's
* unable to find a new home for orphaned tenants.
*
* @author mgimle
* @author bratseth
*/
public class SpareCapacityMaintainer extends NodeRepositoryMaintainer {
private final int maxIterations;
private final Deployer deployer;
private final Metric metric;
public SpareCapacityMaintainer(Deployer deployer,
NodeRepository nodeRepository,
Metric metric,
Duration interval) {
this(deployer, nodeRepository, metric, interval,
10_000 // Should take less than a few minutes
);
}
public SpareCapacityMaintainer(Deployer deployer,
NodeRepository nodeRepository,
Metric metric,
Duration interval,
int maxIterations) {
super(nodeRepository, interval);
this.deployer = deployer;
this.metric = metric;
this.maxIterations = maxIterations;
}
@Override
protected void maintain() {
if ( ! nodeRepository().zone().getCloud().allowHostSharing()) return;
CapacityChecker capacityChecker = new CapacityChecker(nodeRepository());
List<Node> overcommittedHosts = capacityChecker.findOvercommittedHosts();
if (overcommittedHosts.size() != 0) {
log.log(Level.WARNING, String.format("%d nodes are overcommitted! [ %s ]",
overcommittedHosts.size(),
overcommittedHosts.stream().map(Node::hostname).collect(Collectors.joining(", "))));
}
metric.set("overcommittedHosts", overcommittedHosts.size(), null);
Optional<CapacityChecker.HostFailurePath> failurePath = capacityChecker.worstCaseHostLossLeadingToFailure();
if (failurePath.isPresent()) {
int spareHostCapacity = failurePath.get().hostsCausingFailure.size() - 1;
if (spareHostCapacity == 0) {
List<Move> mitigation = findMitigation(failurePath.get());
if (execute(mitigation)) {
// We succeeded or are in the process of taking a step to mitigate.
// Report with the assumption this will eventually succeed to avoid alerting before we're stuck
spareHostCapacity++;
}
}
metric.set("spareHostCapacity", spareHostCapacity, null);
}
}
private boolean execute(List<Move> mitigation) {
if (mitigation.isEmpty()) {
log.warning("Out of spare capacity. No mitigation could be found");
return false;
}
Move firstMove = mitigation.get(0);
if (firstMove.node().allocation().get().membership().retired()) return true; // Already in progress
boolean success = firstMove.execute(false, Agent.SpareCapacityMaintainer, deployer, metric, nodeRepository());
log.info("Out of spare capacity. Mitigation plan: " + mitigation + ". First move successful: " + success);
return success;
}
private List<Move> findMitigation(CapacityChecker.HostFailurePath failurePath) {
Optional<Node> nodeWhichCantMove = failurePath.failureReason.tenant;
if (nodeWhichCantMove.isEmpty()) return List.of();
Node node = nodeWhichCantMove.get();
NodeList allNodes = nodeRepository().list();
// Allocation will assign the two most empty nodes as "spares", which will not be allocated on
// unless needed for node failing. Our goal here is to make room on these spares for the given node
HostCapacity hostCapacity = new HostCapacity(allNodes, nodeRepository().resourcesCalculator());
Set<Node> spareHosts = hostCapacity.findSpareHosts(allNodes.hosts().satisfies(node.resources()).asList(), 2);
List<Node> hosts = allNodes.hosts().except(spareHosts).asList();
CapacitySolver capacitySolver = new CapacitySolver(hostCapacity, maxIterations);
List<Move> shortestMitigation = null;
for (Node spareHost : spareHosts) {
List<Move> mitigation = capacitySolver.makeRoomFor(node, spareHost, hosts, List.of(), List.of());
if (mitigation == null) continue;
if (shortestMitigation == null || shortestMitigation.size() > mitigation.size())
shortestMitigation = mitigation;
}
if (shortestMitigation == null || shortestMitigation.isEmpty()) return List.of();
return shortestMitigation;
}
private static class CapacitySolver {
private final HostCapacity hostCapacity;
private final int maxIterations;
private int iterations = 0;
CapacitySolver(HostCapacity hostCapacity, int maxIterations) {
this.hostCapacity = hostCapacity;
this.maxIterations = maxIterations;
}
/** The map of subproblem solutions already found. The value is null when there is no solution. */
private Map<SolutionKey, List<Move>> solutions = new HashMap<>();
/**
* Finds the shortest sequence of moves which makes room for the given node on the given host,
* assuming the given moves already made over the given hosts' current allocation.
*
* @param node the node to make room for
* @param host the target host to make room on
* @param hosts the hosts onto which we can move nodes
* @param movesConsidered the moves already being considered to add as part of this scenario
* (after any moves made by this)
* @param movesMade the moves already made in this scenario
* @return the list of movesMade with the moves needed for this appended, in the order they should be performed,
* or null if no sequence could be found
*/
List<Move> makeRoomFor(Node node, Node host, List<Node> hosts, List<Move> movesConsidered, List<Move> movesMade) {
SolutionKey solutionKey = new SolutionKey(node, host, movesConsidered, movesMade);
List<Move> solution = solutions.get(solutionKey);
if (solution == null) {
solution = findRoomFor(node, host, hosts, movesConsidered, movesMade);
solutions.put(solutionKey, solution);
}
return solution;
}
private List<Move> findRoomFor(Node node, Node host, List<Node> hosts,
List<Move> movesConsidered, List<Move> movesMade) {
if (iterations++ > maxIterations)
return null;
if ( ! host.resources().satisfies(node.resources())) return null;
NodeResources freeCapacity = freeCapacityWith(movesMade, host);
if (freeCapacity.satisfies(node.resources())) return List.of();
List<Move> shortest = null;
for (var i = subsets(hostCapacity.allNodes().childrenOf(host), 5); i.hasNext(); ) {
List<Node> childrenToMove = i.next();
if ( ! addResourcesOf(childrenToMove, freeCapacity).satisfies(node.resources())) continue;
List<Move> moves = move(childrenToMove, host, hosts, movesConsidered, movesMade);
if (moves == null) continue;
if (shortest == null || moves.size() < shortest.size())
shortest = moves;
}
if (shortest == null) return null;
return append(movesMade, shortest);
}
private List<Move> move(List<Node> nodes, Node host, List<Node> hosts, List<Move> movesConsidered, List<Move> movesMade) {
List<Move> moves = new ArrayList<>();
for (Node childToMove : nodes) {
List<Move> childMoves = move(childToMove, host, hosts, movesConsidered, append(movesMade, moves));
if (childMoves == null) return null;
moves.addAll(childMoves);
}
return moves;
}
private List<Move> move(Node node, Node host, List<Node> hosts, List<Move> movesConsidered, List<Move> movesMade) {
if (contains(node, movesConsidered)) return null;
if (contains(node, movesMade)) return null;
List<Move> shortest = null;
for (Node target : hosts) {
if (target.equals(host)) continue;
Move move = new Move(node, host, target);
List<Move> childMoves = makeRoomFor(node, target, hosts, append(movesConsidered, move), movesMade);
if (childMoves == null) continue;
if (shortest == null || shortest.size() > childMoves.size() + 1) {
shortest = new ArrayList<>(childMoves);
shortest.add(move);
}
}
return shortest;
}
private boolean contains(Node node, List<Move> moves) {
return moves.stream().anyMatch(move -> move.node().equals(node));
}
private NodeResources addResourcesOf(List<Node> nodes, NodeResources resources) {
for (Node node : nodes)
resources = resources.add(node.resources());
return resources;
}
private Iterator<List<Node>> subsets(NodeList nodes, int maxSize) {
return new SubsetIterator(nodes.asList(), maxSize);
}
private List<Move> append(List<Move> a, List<Move> b) {
List<Move> list = new ArrayList<>();
list.addAll(a);
list.addAll(b);
return list;
}
private List<Move> append(List<Move> moves, Move move) {
List<Move> list = new ArrayList<>(moves);
list.add(move);
return list;
}
private NodeResources freeCapacityWith(List<Move> moves, Node host) {
NodeResources resources = hostCapacity.freeCapacityOf(host);
for (Move move : moves) {
if ( ! move.toHost().equals(host)) continue;
resources = resources.subtract(move.node().resources());
}
for (Move move : moves) {
if ( ! move.fromHost().equals(host)) continue;
resources = resources.add(move.node().resources());
}
return resources;
}
}
private static class SolutionKey {
private final Node node;
private final Node host;
private final List<Move> movesConsidered;
private final List<Move> movesMade;
private final int hash;
public SolutionKey(Node node, Node host, List<Move> movesConsidered, List<Move> movesMade) {
this.node = node;
this.host = host;
this.movesConsidered = movesConsidered;
this.movesMade = movesMade;
hash = Objects.hash(node, host, movesConsidered, movesMade);
}
@Override
public int hashCode() { return hash; }
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (o == null || o.getClass() != this.getClass()) return false;
SolutionKey other = (SolutionKey)o;
if ( ! other.node.equals(this.node)) return false;
if ( ! other.host.equals(this.host)) return false;
if ( ! other.movesConsidered.equals(this.movesConsidered)) return false;
if ( ! other.movesMade.equals(this.movesMade)) return false;
return true;
}
}
private static class SubsetIterator implements Iterator<List<Node>> {
private final List<Node> nodes;
private final int maxLength;
// A number whose binary representation determines which items of list we'll include
private int i = 0; // first "previous" = 0 -> skip the empty set
private List<Node> next = null;
public SubsetIterator(List<Node> nodes, int maxLength) {
this.nodes = new ArrayList<>(nodes.subList(0, Math.min(nodes.size(), 31)));
this.maxLength = maxLength;
}
@Override
public boolean hasNext() {
if (next != null) return true;
// find next
while (++i < 1<<nodes.size()) {
int ones = Integer.bitCount(i);
if (ones > maxLength) continue;
next = new ArrayList<>(ones);
for (int position = 0; position < nodes.size(); position++) {
if (hasOneAtPosition(position, i))
next.add(nodes.get(position));
}
return true;
}
return false;
}
@Override
public List<Node> next() {
if ( ! hasNext()) throw new IllegalStateException("No more elements");
var current = next;
next = null;
return current;
}
private boolean hasOneAtPosition(int position, int number) {
return (number & (1 << position)) > 0;
}
}
} |
package foundation.omni.rest.omniwallet.mjdk;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.json.JsonMapper;
import foundation.omni.CurrencyID;
import foundation.omni.netapi.omniwallet.OmniwalletAbstractClient;
import foundation.omni.netapi.omniwallet.json.AddressVerifyInfo;
import foundation.omni.netapi.omniwallet.json.OmniwalletAddressBalance;
import foundation.omni.netapi.omniwallet.json.OmniwalletClientModule;
import foundation.omni.netapi.omniwallet.json.OmniwalletPropertiesListResponse;
import foundation.omni.netapi.omniwallet.json.RevisionInfo;
import org.bitcoinj.core.Address;
import org.bitcoinj.core.NetworkParameters;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.URI;
import java.net.URLEncoder;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.net.http.HttpResponse.BodyHandlers;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* {@link OmniwalletAbstractClient} implementation using JDK 11+ {@link HttpClient}
*/
public class OmniwalletModernJDKClient extends OmniwalletAbstractClient {
private static final Logger log = LoggerFactory.getLogger(OmniwalletModernJDKClient.class);
final HttpClient client;
private final JsonMapper objectMapper = new JsonMapper();
public OmniwalletModernJDKClient(URI baseURI) {
this(baseURI, true, false, null);
}
/**
*
* @param baseURI Base URL of server
* @param debug Enable debugging, logging, etc.
* @param strictMode Only accept valid amounts from server
* @param netParams Specify active Bitcoin network (used for Address validation)
*/
public OmniwalletModernJDKClient(URI baseURI, boolean debug, boolean strictMode, NetworkParameters netParams) {
super(baseURI, true, false, netParams);
log.info("OmniwalletModernJDKClient opened for: {}", baseURI);
this.client = HttpClient.newBuilder()
.connectTimeout(Duration.ofMinutes(2))
.build();
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
objectMapper.registerModule(new OmniwalletClientModule(netParams));
}
@Override
protected CompletableFuture<OmniwalletPropertiesListResponse> propertiesList() {
HttpRequest request = buildGetRequest("/v1/properties/list");
return sendAsync(request, OmniwalletPropertiesListResponse.class);
}
@Override
public CompletableFuture<RevisionInfo> revisionInfo() {
HttpRequest request = buildGetRequest("/v1/system/revision.json");
return sendAsync(request, RevisionInfo.class);
}
@Override
protected CompletableFuture<Map<Address, OmniwalletAddressBalance>> balanceMapForAddress(Address address) {
return balanceMapForAddresses(Collections.singletonList(address));
}
@Override
protected CompletableFuture<Map<Address, OmniwalletAddressBalance>> balanceMapForAddresses(List<Address> addresses) {
TypeReference<Map<Address, OmniwalletAddressBalance>> typeRef = new TypeReference<>() {};
String addressesFormEnc = formEncodeAddressList(addresses);
log.info("Addresses are: {}", addressesFormEnc);
HttpRequest request = buildPostRequest("/v2/address/addr/", addressesFormEnc);
return sendAsync(request, typeRef);
}
@Override
protected CompletableFuture<List<AddressVerifyInfo>> verifyAddresses(CurrencyID currencyID) {
TypeReference<List<AddressVerifyInfo>> typeRef = new TypeReference<>() {};
HttpRequest request = buildGetRequest("/v1/mastercoin_verify/addresses?currency_id=" + currencyID.toString());
return sendAsync(request, typeRef);
}
private <R> CompletableFuture<R> sendAsync(HttpRequest request, Class<R> clazz) {
log.debug("Send aysnc: {}", request);
return sendAsyncCommon(request)
.thenApply(mappingFunc(clazz));
}
private <R> CompletableFuture<R> sendAsync(HttpRequest request, TypeReference<R> typeReference) {
log.debug("Send aysnc: {}", request);
return sendAsyncCommon(request)
.thenApply(mappingFunc(typeReference));
}
private CompletableFuture<String> sendAsyncCommon(HttpRequest request) {
log.debug("Send aysnc: {}", request);
return client.sendAsync(request, BodyHandlers.ofString())
.thenApply(HttpResponse::body)
.whenComplete(OmniwalletModernJDKClient::log);
}
private <R> MappingFunction<R> mappingFunc(Class<R> clazz) {
return s -> objectMapper.readValue(s, clazz);
}
private <R> MappingFunction<R> mappingFunc(TypeReference<R> typeReference) {
return s -> objectMapper.readValue(s, typeReference);
}
private HttpRequest buildGetRequest(String uriPath) {
return HttpRequest
.newBuilder(baseURI.resolve(uriPath))
.header("Accept", "application/json")
.build();
}
private HttpRequest buildPostRequest(String uriPath, String postData) {
return HttpRequest
.newBuilder(baseURI.resolve(uriPath))
.header("Content-Type", "application/x-www-form-urlencoded")
.header("Accept", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(postData))
.build();
}
/**
* Convert an address list containing 1 or more entries
* @param addressList A list of addresses
* @return a form-encoded string containing the list of addresses
*/
static String formEncodeAddressList(List<Address> addressList) {
return addressList.stream()
.map(Address::toString) // Convert to string
.map(a -> URLEncoder.encode(a, StandardCharsets.UTF_8)) // URL Encode as UTF-8
.map(a -> "addr=" + a) // Form encode
.collect(Collectors.joining("&"));
}
private static void log(String s, Throwable t) {
if ((s != null)) {
log.debug(s.substring(0 ,Math.min(100, s.length())));
} else {
log.error("exception: ", t);
}
}
@FunctionalInterface
interface MappingFunction<R> extends ThrowingFunction<String, R> {}
@FunctionalInterface
interface ThrowingFunction<T,R> extends Function<T, R> {
/**
* Gets a result wrapping checked Exceptions with {@link RuntimeException}
* @return a result
*/
@Override
default R apply(T t) {
try {
return applyThrows(t);
} catch (final Exception e) {
throw new CompletionException(e);
}
}
/**
* Gets a result.
*
* @return a result
* @throws Exception Any checked Exception
*/
R applyThrows(T t) throws Exception;
}
} |
package org.eclipse.dawnsci.analysis.dataset.impl;
import java.io.Serializable;
import java.lang.reflect.Array;
import java.text.Format;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.math3.complex.Complex;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import org.apache.commons.math3.stat.descriptive.moment.Variance;
import org.eclipse.dawnsci.analysis.api.dataset.IDataset;
import org.eclipse.dawnsci.analysis.api.dataset.ILazyDataset;
import org.eclipse.dawnsci.analysis.api.dataset.Slice;
import org.eclipse.dawnsci.analysis.api.dataset.SliceND;
import org.eclipse.dawnsci.analysis.api.metadata.ErrorMetadata;
import org.eclipse.dawnsci.analysis.api.metadata.MetadataType;
import org.eclipse.dawnsci.analysis.api.monitor.IMonitor;
import org.eclipse.dawnsci.analysis.dataset.metadata.ErrorMetadataImpl;
/**
* Generic container class for data
* <p/>
* Each subclass has an array of primitive types, elements of this array are grouped or
* compounded to make items
* <p/>
* Data items can be boolean, integer, float, complex float, vector float, etc
*/
public abstract class AbstractDataset extends LazyDatasetBase implements Dataset {
/**
* Boolean
*/
public static final int BOOL = Dataset.BOOL;
/**
* Signed 8-bit integer
*/
public static final int INT8 = Dataset.INT8;
/**
* Signed 16-bit integer
*/
public static final int INT16 = Dataset.INT16;
/**
* Signed 32-bit integer
*/
public static final int INT32 = Dataset.INT32;
/**
* Integer (same as signed 32-bit integer)
*/
public static final int INT = Dataset.INT;
/**
* Signed 64-bit integer
*/
public static final int INT64 = Dataset.INT64;
/**
* 32-bit floating point
*/
public static final int FLOAT32 = Dataset.FLOAT32;
/**
* 64-bit floating point
*/
public static final int FLOAT64 = Dataset.FLOAT64;
/**
* Floating point (same as 64-bit floating point)
*/
public static final int FLOAT = Dataset.FLOAT;
/**
* 64-bit complex floating point (real and imaginary parts are 32-bit floats)
*/
public static final int COMPLEX64 = Dataset.COMPLEX64;
/**
* 128-bit complex floating point (real and imaginary parts are 64-bit floats)
*/
public static final int COMPLEX128 = Dataset.COMPLEX128;
/**
* Complex floating point (same as 64-bit floating point)
*/
public static final int COMPLEX = Dataset.COMPLEX;
/**
* Date
*/
public static final int DATE = Dataset.DATE;
/**
* String
*/
public static final int STRING = Dataset.STRING;
/**
* Object
*/
public static final int OBJECT = Dataset.OBJECT;
/**
* Array of signed 8-bit integers
*/
public static final int ARRAYINT8 = Dataset.ARRAYINT8;
/**
* Array of signed 16-bit integers
*/
public static final int ARRAYINT16 = Dataset.ARRAYINT16;
/**
* Array of three signed 16-bit integers for RGB values
*/
public static final int RGB = Dataset.RGB;
/**
* Array of signed 32-bit integers
*/
public static final int ARRAYINT32 = Dataset.ARRAYINT32;
/**
* Array of signed 64-bit integers
*/
public static final int ARRAYINT64 = Dataset.ARRAYINT64;
/**
* Array of 32-bit floating points
*/
public static final int ARRAYFLOAT32 = Dataset.ARRAYFLOAT32;
/**
* Array of 64-bit floating points
*/
public static final int ARRAYFLOAT64 = Dataset.ARRAYFLOAT64;
protected static boolean isDTypeElemental(int dtype) {
return dtype <= COMPLEX128 || dtype == RGB;
}
protected static boolean isDTypeFloating(int dtype) {
return dtype == FLOAT32 || dtype == FLOAT64 || dtype == COMPLEX64 || dtype == COMPLEX128 ||
dtype == ARRAYFLOAT32 || dtype == ARRAYFLOAT64;
}
protected static boolean isDTypeComplex(int dtype) {
return dtype == COMPLEX64 || dtype == COMPLEX128;
}
protected int size; // number of items
transient protected AbstractDataset base; // is null when not a view
protected int[] stride; // can be null for row-major, contiguous datasets
protected int offset;
/**
* The data itself, held in a 1D array, but the object will wrap it to appear as possessing as many dimensions as
* wanted
*/
protected Serializable odata = null;
/**
* Set aliased data as base data
*/
abstract protected void setData();
/**
* These members hold cached values. If their values are null, then recalculate, otherwise just use the values
*/
transient protected HashMap<String, Object> storedValues = null;
/**
* Constructor required for serialisation.
*/
public AbstractDataset() {
}
@Override
public synchronized Dataset synchronizedCopy() {
return clone();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!getClass().equals(obj.getClass())) {
if (getRank() == 0) // for zero-rank datasets
return obj.equals(getObjectAbs(0));
return false;
}
Dataset other = (Dataset) obj;
if (getElementsPerItem() != other.getElementsPerItem())
return false;
if (size != other.getSize())
return false;
if (!Arrays.equals(shape, other.getShapeRef())) {
return false;
}
if (getRank() == 0) // for zero-rank datasets
return other.getObjectAbs(0).equals(getObjectAbs(0));
return true;
}
@Override
public int hashCode() {
return getHash();
}
@Override
abstract public AbstractDataset clone();
protected Format stringFormat = null;
@Override
public void setStringFormat(Format format) {
stringFormat = format;
}
@Override
public Dataset cast(final int dtype) {
if (getDtype() == dtype) {
return this;
}
return DatasetUtils.cast(this, dtype);
}
@Override
public Dataset cast(final boolean repeat, final int dtype, final int isize) {
if (getDtype() == dtype && getElementsPerItem() == isize) {
return this;
}
return DatasetUtils.cast(this, repeat, dtype, isize);
}
@Override
abstract public AbstractDataset getView();
/**
* Copy fields from original to view
* @param orig
* @param view
* @param clone if true, then clone everything but bulk data
* @param cloneMetadata if true, clone metadata
*/
protected static void copyToView(Dataset orig, AbstractDataset view, boolean clone, boolean cloneMetadata) {
view.name = orig.getName();
view.size = orig.getSize();
view.odata = orig.getBuffer();
view.offset = orig.getOffset();
view.base = orig instanceof AbstractDataset ? ((AbstractDataset) orig).base : null;
if (clone) {
view.shape = orig.getShape();
copyStoredValues(orig, view, false);
view.stride = orig instanceof AbstractDataset && ((AbstractDataset) orig).stride != null ?
((AbstractDataset) orig).stride.clone() : null;
} else {
view.shape = orig.getShapeRef();
view.stride = orig instanceof AbstractDataset ? ((AbstractDataset) orig).stride : null;
}
view.metadata = getMetadataMap(orig, cloneMetadata);
int odtype = orig.getDtype();
int vdtype = view.getDtype();
if (getBestDType(odtype, vdtype) != vdtype) {
view.storedValues = null; // as copy is a demotion
}
if (odtype != vdtype && view.storedValues != null) {
view.storedValues.remove(STORE_SHAPELESS_HASH);
view.storedValues.remove(STORE_HASH);
}
}
protected static Map<Class<? extends MetadataType>, List<MetadataType>> getMetadataMap(Dataset a, boolean clone) {
if (a == null)
return null;
List<MetadataType> all = null;
try {
all = a.getMetadata(null);
} catch (Exception e) {
}
if (all == null)
return null;
HashMap<Class<? extends MetadataType>, List<MetadataType>> map = new HashMap<Class<? extends MetadataType>, List<MetadataType>>();
for (MetadataType m : all) {
if (m == null) {
continue;
}
Class<? extends MetadataType> c = findMetadataTypeSubInterfaces(m.getClass());
List<MetadataType> l = map.get(c);
if (l == null) {
l = new ArrayList<MetadataType>();
map.put(c, l);
}
if (clone)
m = m.clone();
l.add(m);
}
return map;
}
@Override
public IntegerDataset getIndices() {
final IntegerDataset ret = DatasetUtils.indices(shape);
if (getName() != null) {
ret.setName("Indices of " + getName());
}
return ret;
}
@Override
public Dataset getTransposedView(int... axes) {
axes = checkPermutatedAxes(shape, axes);
AbstractDataset t = getView();
if (axes == null || getRank() == 1)
return t;
int rank = shape.length;
int[] tstride = new int[rank];
int[] toffset = new int[1];
int[] nshape = createStrides(new SliceND(shape), this, tstride, toffset);
int[] nstride = new int[rank];
for (int i = 0; i < rank; i++) {
final int ax = axes[i];
nstride[i] = tstride[ax];
nshape[i] = shape[ax];
}
t.shape = nshape;
t.stride = nstride;
t.offset = toffset[0];
t.base = base == null ? this : base;
copyStoredValues(this, t, true);
t.transposeMetadata(axes);
return t;
}
@Override
public Dataset transpose(int... axes) {
Dataset t = getTransposedView(axes);
return t == null ? clone() : t.clone();
}
@Override
public Dataset swapAxes(int axis1, int axis2) {
int rank = shape.length;
if (axis1 < 0)
axis1 += rank;
if (axis2 < 0)
axis2 += rank;
if (axis1 < 0 || axis2 < 0 || axis1 >= rank || axis2 >= rank) {
logger.error("Axis value invalid - out of range");
throw new IllegalArgumentException("Axis value invalid - out of range");
}
if (rank == 1 || axis1 == axis2) {
return this;
}
int[] axes = new int[rank];
for (int i = 0; i < rank; i++) {
axes[i] = i;
}
axes[axis1] = axis2;
axes[axis2] = axis1;
return getTransposedView(axes);
}
@Override
public Dataset flatten() {
if (stride != null) { // need to make a copy if not contiguous
return clone().flatten();
}
return reshape(size);
}
/**
* Calculate total number of items in given shape
* @param shape
* @return size
*/
public static long calcLongSize(final int[] shape) {
double dsize = 1.0;
if (shape == null || shape.length == 0) // special case of zero-rank shape
return 1;
for (int i = 0; i < shape.length; i++) {
// make sure the indexes isn't zero or negative
if (shape[i] == 0) {
return 0;
} else if (shape[i] < 0) {
throw new IllegalArgumentException(String.format(
"The %d-th is %d which is an illegal argument as it is negative", i, shape[i]));
}
dsize *= shape[i];
}
// check to see if the size is larger than an integer, i.e. we can't allocate it
if (dsize > Long.MAX_VALUE) {
throw new IllegalArgumentException("Size of the dataset is too large to allocate");
}
return (long) dsize;
}
/**
* Calculate total number of items in given shape
* @param shape
* @return size
*/
public static int calcSize(final int[] shape) {
long lsize = calcLongSize(shape);
// check to see if the size is larger than an integer, i.e. we can't allocate it
if (lsize > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Size of the dataset is too large to allocate");
}
return (int) lsize;
}
/**
* Find dataset type that best fits given types The best type takes into account complex and array datasets
*
* @param atype
* first dataset type
* @param btype
* second dataset type
* @return best dataset type
*/
public static int getBestDType(final int atype, final int btype) {
int besttype;
int a = atype >= ARRAYINT8 ? atype / ARRAYMUL : atype;
int b = btype >= ARRAYINT8 ? btype / ARRAYMUL : btype;
if (isDTypeFloating(a)) {
if (!isDTypeFloating(b)) {
b = getBestFloatDType(b);
if (isDTypeComplex(a)) {
b += COMPLEX64 - FLOAT32;
}
}
} else if (isDTypeFloating(b)) {
a = getBestFloatDType(a);
if (isDTypeComplex(b)) {
a += COMPLEX64 - FLOAT32;
}
}
besttype = a > b ? a : b;
if (atype >= ARRAYINT8 || btype >= ARRAYINT8) {
if (besttype >= COMPLEX64) {
throw new IllegalArgumentException("Complex type cannot be promoted to compound type");
}
besttype *= ARRAYMUL;
}
return besttype;
}
/**
* The largest dataset type suitable for a summation of around a few thousand items without changing from the "kind"
* of dataset
*
* @param otype
* @return largest dataset type available for given dataset type
*/
public static int getLargestDType(final int otype) {
switch (otype) {
case BOOL:
case INT8:
case INT16:
return INT32;
case INT32:
case INT64:
return INT64;
case FLOAT32:
case FLOAT64:
return FLOAT64;
case COMPLEX64:
case COMPLEX128:
return COMPLEX128;
case ARRAYINT8:
case ARRAYINT16:
return ARRAYINT32;
case ARRAYINT32:
case ARRAYINT64:
return ARRAYINT64;
case ARRAYFLOAT32:
case ARRAYFLOAT64:
return ARRAYFLOAT64;
}
throw new IllegalArgumentException("Unsupported dataset type");
}
/**
* Find floating point dataset type that best fits given types The best type takes into account complex and array
* datasets
*
* @param otype
* old dataset type
* @return best dataset type
*/
public static int getBestFloatDType(final int otype) {
int btype;
switch (otype) {
case BOOL:
case INT8:
case INT16:
case ARRAYINT8:
case ARRAYINT16:
case FLOAT32:
case ARRAYFLOAT32:
case COMPLEX64:
btype = FLOAT32; // demote, if necessary
break;
case INT32:
case INT64:
case ARRAYINT32:
case ARRAYINT64:
case FLOAT64:
case ARRAYFLOAT64:
case COMPLEX128:
btype = FLOAT64; // promote, if necessary
break;
default:
btype = otype; // for array datasets, preserve type
break;
}
return btype;
}
/**
* Find floating point dataset type that best fits given class The best type takes into account complex and array
* datasets
*
* @param cls
* of an item or element
* @return best dataset type
*/
public static int getBestFloatDType(Class<? extends Object> cls) {
return getBestFloatDType(getDTypeFromClass(cls));
}
transient private static final Map<Class<?>, Integer> dtypeMap = createDTypeMap();
private static Map<Class<?>, Integer> createDTypeMap() {
Map<Class<?>, Integer> result = new HashMap<Class<?>, Integer>();
result.put(Boolean.class, BOOL);
result.put(Byte.class, INT8);
result.put(Short.class, INT16);
result.put(Integer.class, INT32);
result.put(Long.class, INT64);
result.put(Float.class, FLOAT32);
result.put(Double.class, FLOAT64);
result.put(boolean.class, BOOL);
result.put(byte.class, INT8);
result.put(short.class, INT16);
result.put(int.class, INT32);
result.put(long.class, INT64);
result.put(float.class, FLOAT32);
result.put(double.class, FLOAT64);
result.put(Complex.class, COMPLEX128);
result.put(String.class, STRING);
result.put(Date.class, DATE);
result.put(Object.class, OBJECT);
return result;
}
/**
* Get dataset type from a class
*
* @param cls
* @return dataset type
*/
public static int getDTypeFromClass(Class<? extends Object> cls) {
return getDTypeFromClass(cls, 1);
}
/**
* Get dataset type from a class
*
* @param cls
* @return dataset type
*/
public static int getDTypeFromClass(Class<? extends Object> cls, int isize) {
Integer dtype = dtypeMap.get(cls);
if (dtype == null) {
throw new IllegalArgumentException("Class of object not supported");
}
if (isize != 1) {
if (dtype < FLOAT64)
dtype *= ARRAYMUL;
}
return dtype;
}
/**
* Get dataset type from an object. The following are supported: Java Number objects, Apache common math Complex
* objects, Java arrays and lists
*
* @param obj
* @return dataset type
*/
public static int getDTypeFromObject(Object obj) {
int dtype = -1;
if (obj == null) {
return dtype;
}
if (obj instanceof List<?>) {
List<?> jl = (List<?>) obj;
int l = jl.size();
for (int i = 0; i < l; i++) {
int ldtype = getDTypeFromObject(jl.get(i));
if (ldtype > dtype) {
dtype = ldtype;
}
}
} else if (obj.getClass().isArray()) {
Class<?> ca = obj.getClass().getComponentType();
if (isComponentSupported(ca)) {
return getDTypeFromClass(ca);
}
int l = Array.getLength(obj);
for (int i = 0; i < l; i++) {
Object lo = Array.get(obj, i);
int ldtype = getDTypeFromObject(lo);
if (ldtype > dtype) {
dtype = ldtype;
}
}
} else if (obj instanceof Dataset) {
return ((Dataset) obj).getDtype();
} else if (obj instanceof ILazyDataset) {
dtype = getDTypeFromClass(((ILazyDataset) obj).elementClass(), ((ILazyDataset) obj).getElementsPerItem());
} else {
dtype = getDTypeFromClass(obj.getClass());
}
return dtype;
}
/**
* @param comp
* @return true if supported
*/
public static boolean isComponentSupported(Class<? extends Object> comp) {
return comp.isPrimitive() || Number.class.isAssignableFrom(comp) || comp.equals(Boolean.class) || comp.equals(Complex.class) || comp.equals(String.class);
}
/**
* Get dataset type from given dataset
* @param d
* @return dataset type
*/
public static int getDType(ILazyDataset d) {
if (d instanceof LazyDatasetBase)
return ((LazyDatasetBase) d).getDtype();
return getDTypeFromClass(d.elementClass(), d.getElementsPerItem());
}
/**
* Get shape from object (array or list supported)
* @param obj
* @return shape
*/
public static int[] getShapeFromObject(final Object obj) {
ArrayList<Integer> lshape = new ArrayList<Integer>();
getShapeFromObj(lshape, obj, 0);
if (obj != null && lshape.size() == 0) {
return new int[0]; // cope with a single item
}
final int rank = lshape.size();
final int[] shape = new int[rank];
for (int i = 0; i < rank; i++) {
shape[i] = lshape.get(i);
}
return shape;
}
/**
* Get shape from object
* @param ldims
* @param obj
* @param depth
* @return true if there is a possibility of differing lengths
*/
private static boolean getShapeFromObj(final ArrayList<Integer> ldims, Object obj, int depth) {
if (obj == null)
return true;
if (obj instanceof List<?>) {
List<?> jl = (List<?>) obj;
int l = jl.size();
updateShape(ldims, depth, l);
for (int i = 0; i < l; i++) {
Object lo = jl.get(i);
if (!getShapeFromObj(ldims, lo, depth + 1)) {
break;
}
}
return true;
}
Class<? extends Object> ca = obj.getClass().getComponentType();
if (ca != null) {
final int l = Array.getLength(obj);
updateShape(ldims, depth, l);
if (isComponentSupported(ca)) {
return true;
}
for (int i = 0; i < l; i++) {
Object lo = Array.get(obj, i);
if (!getShapeFromObj(ldims, lo, depth + 1)) {
break;
}
}
return true;
} else if (obj instanceof IDataset) {
int[] s = ((IDataset) obj).getShape();
for (int i = 0; i < s.length; i++) {
updateShape(ldims, depth++, s[i]);
}
return true;
} else {
return false; // not an array of any type
}
}
private static void updateShape(final ArrayList<Integer> ldims, final int depth, final int l) {
if (depth >= ldims.size()) {
ldims.add(l);
} else if (l > ldims.get(depth)) {
ldims.set(depth, l);
}
}
/**
* Fill dataset from object at depth dimension
* @param obj
* @param depth
* @param pos position
*/
protected void fillData(Object obj, final int depth, final int[] pos) {
if (obj == null) {
int dtype = getDtype();
if (dtype == FLOAT32)
set(Float.NaN, pos);
else if (dtype == FLOAT64)
set(Double.NaN, pos);
return;
}
if (obj instanceof List<?>) {
List<?> jl = (List<?>) obj;
int l = jl.size();
for (int i = 0; i < l; i++) {
Object lo = jl.get(i);
fillData(lo, depth + 1, pos);
pos[depth]++;
}
pos[depth] = 0;
} else if (obj.getClass().isArray()) {
int l = Array.getLength(obj);
for (int i = 0; i < l; i++) {
Object lo = Array.get(obj, i);
fillData(lo, depth + 1, pos);
pos[depth]++;
}
pos[depth] = 0;
} else if (obj instanceof IDataset) {
boolean[] a = new boolean[shape.length];
for (int i = depth; i < a.length; i++)
a[i] = true;
setSlice(obj, getSliceIteratorFromAxes(pos, a));
} else {
set(obj, pos);
}
}
protected static boolean toBoolean(final Object b) {
if (b instanceof Number) {
return ((Number) b).longValue() != 0;
} else if (b instanceof Boolean) {
return ((Boolean) b).booleanValue();
} else if (b instanceof Complex) {
return ((Complex) b).getReal() != 0;
} else if (b instanceof Dataset) {
Dataset db = (Dataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toBoolean(db.getObjectAbs(0));
} else if (b instanceof IDataset) {
IDataset db = (IDataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toBoolean(db.getObject(new int[db.getRank()]));
} else {
logger.error("Argument is of unsupported class");
throw new IllegalArgumentException("Argument is of unsupported class");
}
}
protected static long toLong(final Object b) {
if (b instanceof Number) {
return ((Number) b).longValue();
} else if (b instanceof Boolean) {
return ((Boolean) b).booleanValue() ? 1 : 0;
} else if (b instanceof Complex) {
return (long) ((Complex) b).getReal();
} else if (b instanceof Dataset) {
Dataset db = (Dataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toLong(db.getObjectAbs(0));
} else if (b instanceof IDataset) {
IDataset db = (IDataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toLong(db.getObject(new int[db.getRank()]));
} else {
logger.error("Argument is of unsupported class");
throw new IllegalArgumentException("Argument is of unsupported class");
}
}
protected static double toReal(final Object b) {
if (b instanceof Number) {
return ((Number) b).doubleValue();
} else if (b instanceof Boolean) {
return ((Boolean) b).booleanValue() ? 1 : 0;
} else if (b instanceof Complex) {
return ((Complex) b).getReal();
} else if (b.getClass().isArray()) {
if (Array.getLength(b) == 0)
return 0;
return toReal(Array.get(b, 0));
} else if (b instanceof Dataset) {
Dataset db = (Dataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toReal(db.getObjectAbs(0));
} else if (b instanceof IDataset) {
IDataset db = (Dataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toReal(db.getObject(new int[db.getRank()]));
} else {
logger.error("Argument is of unsupported class");
throw new IllegalArgumentException("Argument is of unsupported class");
}
}
protected static double toImag(final Object b) {
if (b instanceof Number) {
return 0;
} else if (b instanceof Boolean) {
return 0;
} else if (b instanceof Complex) {
return ((Complex) b).getImaginary();
} else if (b.getClass().isArray()) {
if (Array.getLength(b) < 2)
return 0;
return toReal(Array.get(b, 1));
} else if (b instanceof Dataset) {
Dataset db = (Dataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toImag(db.getObjectAbs(0));
} else if (b instanceof IDataset) {
IDataset db = (Dataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toImag(db.getObject(new int[db.getRank()]));
} else {
logger.error("Argument is of unsupported class");
throw new IllegalArgumentException("Argument is of unsupported class");
}
}
@Override
public IndexIterator getIterator(final boolean withPosition) {
if (stride != null)
return new StrideIterator(shape, stride, offset);
return withPosition ? new ContiguousIteratorWithPosition(shape, size) : new ContiguousIterator(size);
}
@Override
public IndexIterator getIterator() {
return getIterator(false);
}
@Override
public PositionIterator getPositionIterator(final int... axes) {
return new PositionIterator(shape, axes);
}
@Override
public IndexIterator getSliceIterator(final int[] start, final int[] stop, final int[] step) {
return getSliceIterator(new SliceND(shape, start, stop, step));
}
/**
* @param slice
* @return an slice iterator that operates like an IndexIterator
*/
public IndexIterator getSliceIterator(SliceND slice) {
if (calcLongSize(slice.getShape()) == 0) {
return new NullIterator(shape, slice.getShape());
}
if (stride != null)
return new StrideIterator(getElementsPerItem(), shape, stride, offset, slice);
return new SliceIterator(shape, size, slice);
}
@Override
public SliceIterator getSliceIteratorFromAxes(final int[] pos, boolean[] axes) {
int rank = shape.length;
int[] start;
int[] stop = new int[rank];
int[] step = new int[rank];
if (pos == null) {
start = new int[rank];
} else if (pos.length == rank) {
start = pos.clone();
} else {
throw new IllegalArgumentException("pos array length is not equal to rank of dataset");
}
if (axes == null) {
axes = new boolean[rank];
Arrays.fill(axes, true);
} else if (axes.length != rank) {
throw new IllegalArgumentException("axes array length is not equal to rank of dataset");
}
for (int i = 0; i < rank; i++) {
if (axes[i]) {
stop[i] = shape[i];
} else {
stop[i] = start[i] + 1;
}
step[i] = 1;
}
return (SliceIterator) getSliceIterator(start, stop, step);
}
@Override
public BooleanIterator getBooleanIterator(Dataset choice) {
return getBooleanIterator(choice, true);
}
@Override
public BooleanIterator getBooleanIterator(Dataset choice, boolean value) {
return new BooleanIterator(getIterator(), choice, value);
}
@Override
public Dataset getByBoolean(Dataset selection) {
checkCompatibility(selection);
final int length = ((Number) selection.sum()).intValue();
final int is = getElementsPerItem();
Dataset r = DatasetFactory.zeros(is, new int[] { length }, getDtype());
BooleanIterator biter = getBooleanIterator(selection);
int i = 0;
while (biter.hasNext()) {
r.setObjectAbs(i, getObjectAbs(biter.index));
i += is;
}
return r;
}
@Override
public Dataset getBy1DIndex(IntegerDataset index) {
final int is = getElementsPerItem();
final Dataset r = DatasetFactory.zeros(is, index.getShape(), getDtype());
final IntegerIterator iter = new IntegerIterator(index, size, is);
int i = 0;
while (iter.hasNext()) {
r.setObjectAbs(i, getObjectAbs(iter.index));
i += is;
}
return r;
}
@Override
public Dataset getByIndexes(final Object... indexes) {
final IntegersIterator iter = new IntegersIterator(shape, indexes);
final int is = getElementsPerItem();
final Dataset r = DatasetFactory.zeros(is, iter.getShape(), getDtype());
final int[] pos = iter.getPos();
int i = 0;
while (iter.hasNext()) {
r.setObjectAbs(i, getObject(pos));
i += is;
}
return r;
}
/**
* @param dtype
* @return (boxed) class of constituent element
*/
public static Class<?> elementClass(final int dtype) {
switch (dtype) {
case BOOL:
return Boolean.class;
case INT8:
case ARRAYINT8:
return Byte.class;
case INT16:
case ARRAYINT16:
case RGB:
return Short.class;
case INT32:
case ARRAYINT32:
return Integer.class;
case INT64:
case ARRAYINT64:
return Long.class;
case FLOAT32:
case ARRAYFLOAT32:
return Float.class;
case FLOAT64:
case ARRAYFLOAT64:
return Double.class;
case COMPLEX64:
return Float.class;
case COMPLEX128:
return Double.class;
case STRING:
return String.class;
}
return Object.class;
}
@Override
public Class<?> elementClass() {
return elementClass(getDtype());
}
@Override
public boolean hasFloatingPointElements() {
Class<?> cls = elementClass();
return cls == Float.class || cls == Double.class;
}
@Override
public int getElementsPerItem() {
return getElementsPerItem(getDtype());
}
@Override
public int getItemsize() {
return getItemsize(getDtype(), getElementsPerItem());
}
/**
* @param dtype
* @return number of elements per item
*/
public static int getElementsPerItem(final int dtype) {
switch (dtype) {
case ARRAYINT8:
case ARRAYINT16:
case ARRAYINT32:
case ARRAYINT64:
case ARRAYFLOAT32:
case ARRAYFLOAT64:
throw new UnsupportedOperationException("Multi-element type unsupported");
case COMPLEX64:
case COMPLEX128:
return 2;
}
return 1;
}
/**
* @param dtype
* @return length of single item in bytes
*/
public static int getItemsize(final int dtype) {
return getItemsize(dtype, getElementsPerItem(dtype));
}
/**
* @param dtype
* @param isize
* number of elements in an item
* @return length of single item in bytes
*/
public static int getItemsize(final int dtype, final int isize) {
int size;
switch (dtype) {
case BOOL:
size = 1; // How is this defined?
break;
case INT8:
case ARRAYINT8:
size = Byte.SIZE / 8;
break;
case INT16:
case ARRAYINT16:
case RGB:
size = Short.SIZE / 8;
break;
case INT32:
case ARRAYINT32:
size = Integer.SIZE / 8;
break;
case INT64:
case ARRAYINT64:
size = Long.SIZE / 8;
break;
case FLOAT32:
case ARRAYFLOAT32:
case COMPLEX64:
size = Float.SIZE / 8;
break;
case FLOAT64:
case ARRAYFLOAT64:
case COMPLEX128:
size = Double.SIZE / 8;
break;
default:
size = 0;
break;
}
return size * isize;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(final String name) {
this.name = name;
}
@Override
public int getSize() {
if (odata == null) {
throw new NullPointerException("The data object inside the dataset has not been allocated, "
+ "this suggests a failed or absent construction of the dataset");
}
return size;
}
@Override
public int[] getShape() {
// make a copy of the dimensions data, and put that out
if (shape == null) {
logger.warn("Shape is null!!!");
return new int[] {};
}
return shape.clone();
}
@Override
public int getRank() {
return shape.length;
}
@Override
public int getNbytes() {
return getSize() * getItemsize();
}
/**
* Check for -1 placeholder in shape and replace if necessary
* @param shape
* @param size
*/
private void checkShape(int[] shape, int size) {
int rank = shape.length;
int found = -1;
int nsize = 1;
for (int i = 0; i < rank; i++) {
int d = shape[i];
if (d == -1) {
if (found == -1) {
found = i;
} else {
logger.error("Can only have one -1 placeholder in shape");
throw new IllegalArgumentException("Can only have one -1 placeholder in shape");
}
} else {
nsize *= d;
}
}
if (found >= 0) {
shape[found] = size/nsize;
} else if (nsize != size) {
logger.error("New shape is not same size as old shape");
throw new IllegalArgumentException("New size is not same as the old size. Old size is "+size+" new size is "+nsize+" and shape is "+Arrays.toString(shape));
}
}
@Override
public void setShape(final int... shape) {
int[] nshape = shape.clone();
checkShape(nshape, size);
if (Arrays.equals(this.shape, nshape))
return;
if (stride != null) {
// the only compatible shapes are ones where new dimensions are factors of old dimensions
// or are combined adjacent old dimensions
int[] oshape = this.shape;
int orank = oshape.length;
int nrank = nshape.length;
int diff = nrank - orank;
int[] nstride = new int[nrank];
boolean ones = true;
// work forwards for broadcasting cases
for (int i = 0, j = 0; i < orank || j < nrank;) {
if (j >= diff && i < orank && j < nrank && oshape[i] == nshape[j]) {
nstride[j++] = stride[i++];
} else if (j < nrank && nshape[j] == 1) {
nstride[j++] = 0;
} else if (i < orank && oshape[i] == 1) {
i++;
} else {
if (j < nrank)
j++;
if (i < orank)
i++;
ones = false;
}
}
if (!ones) { // not just ones differ in shapes
int[] ostride = stride;
int ob = 0;
int oe = 1;
int nb = 0;
int ne = 1;
while (ob < orank && nb < nrank) {
int ol = oshape[ob];
int nl = nshape[nb];
if (nl < ol) { // find group of shape dimensions that form common size
do { // case where new shape spreads single dimension over several dimensions
if (ne == nrank) {
break;
}
nl *= nshape[ne++];
} while (nl < ol);
if (nl != ol) {
logger.error("Subshape is incompatible with single dimension");
throw new IllegalArgumentException("Subshape is incompatible with single dimension");
}
int on = ne - 1;
while (nshape[on] == 1) {
on
}
nstride[on] = ostride[ob];
for (int n = on - 1; n >= nb; n
if (nshape[n] == 1)
continue;
nstride[n] = nshape[on] * nstride[on];
on = n;
}
} else if (ol < nl) {
do { // case where new shape combines several dimensions into one dimension
if (oe == orank) {
break;
}
ol *= oshape[oe++];
} while (ol < nl);
if (nl != ol) {
logger.error("Single dimension is incompatible with subshape");
throw new IllegalArgumentException("Single dimension is incompatible with subshape");
}
int oo = oe - 1;
while (oshape[oo] == 1) {
oo
}
int os = ostride[oo];
for (int o = oo - 1; o >= ob; o
if (oshape[o] == 1)
continue;
if (ostride[o] != oshape[oo] * ostride[oo]) {
logger.error("Subshape cannot be a non-contiguous view");
throw new IllegalArgumentException("Subshape cannot be a non-contiguous view");
}
oo = o;
}
nstride[nb] = os;
} else {
nstride[nb] = ostride[ob];
}
ob = oe++;
nb = ne++;
}
}
stride = nstride;
}
reshapeMetadata(this.shape, nshape);
this.shape = nshape;
if (storedValues != null)
filterStoredValues(storedValues); // as it is dependent on shape
}
@Override
public int[] getShapeRef() {
return shape;
}
@Override
public int getOffset() {
return offset;
}
@Override
public int[] getStrides() {
return stride;
}
@Override
public Serializable getBuffer() {
return odata;
}
/**
* Create a stride array from dataset
* @param a dataset
* @param offset output offset
* @return new strides
*/
public static int[] createStrides(Dataset a, final int[] offset) {
return createStrides(a.getElementsPerItem(), a.getShapeRef(), a.getStrides(), a.getOffset(), offset);
}
/**
* Create a stride array from dataset
* @param isize
* @param shape
* @param oStride original stride
* @param oOffset original offset (only used if there is an original stride)
* @param offset output offset
* @return new strides
*/
public static int[] createStrides(final int isize, final int[] shape, final int[] oStride, final int oOffset, final int[] offset) {
int rank = shape.length;
final int[] stride;
if (oStride == null) {
offset[0] = 0;
stride = new int[rank];
int s = isize;
for (int j = rank - 1; j >= 0; j
stride[j] = s;
s *= shape[j];
}
} else {
offset[0] = oOffset;
stride = oStride.clone();
}
return stride;
}
/**
* Create a stride array from slice information and a dataset
* @param slice
* @param a dataset
* @param stride output stride
* @param offset output offset
* @return new shape
*/
public static int[] createStrides(final SliceND slice, final Dataset a, final int[] stride, final int[] offset) {
return createStrides(slice, a.getElementsPerItem(), a.getShapeRef(), a.getStrides(), a.getOffset(), stride, offset);
}
/**
* Create a stride array from slice and dataset information
* @param slice
* @param isize
* @param shape
* @param oStride original stride
* @param oOffset original offset (only used if there is an original stride)
* @param stride output stride
* @param offset output offset
* @return new shape
*/
public static int[] createStrides(final SliceND slice, final int isize, final int[] shape, final int[] oStride, final int oOffset, final int[] stride, final int[] offset) {
int[] lstart = slice.getStart();
int[] lstep = slice.getStep();
int[] newShape = slice.getShape();
int rank = shape.length;
if (oStride == null) {
int s = isize;
offset[0] = 0;
for (int j = rank - 1; j >= 0; j
stride[j] = s * lstep[j];
offset[0] += s * lstart[j];
s *= shape[j];
}
} else {
offset[0] = oOffset;
for (int j = 0; j < rank; j++) {
int s = oStride[j];
stride[j] = lstep[j] * s;
offset[0] += lstart[j] * s;
}
}
return newShape;
}
/**
* Create a stride array from a dataset to a broadcast shape
* @param a dataset
* @param broadcastShape
* @return stride array
*/
public static int[] createBroadcastStrides(Dataset a, final int[] broadcastShape) {
return createBroadcastStrides(a.getElementsPerItem(), a.getShapeRef(), a.getStrides(), broadcastShape);
}
/**
* Create a stride array from a dataset to a broadcast shape
* @param isize
* @param shape
* @param oStride original stride
* @param broadcastShape
* @return stride array
*/
public static int[] createBroadcastStrides(final int isize, final int[] shape, final int[] oStride, final int[] broadcastShape) {
int rank = shape.length;
if (broadcastShape.length != rank) {
throw new IllegalArgumentException("Dataset must have same rank as broadcast shape");
}
int[] stride = new int[rank];
if (oStride == null) {
int s = isize;
for (int j = rank - 1; j >= 0; j
if (broadcastShape[j] == shape[j]) {
stride[j] = s;
s *= shape[j];
} else {
stride[j] = 0;
}
}
} else {
for (int j = 0; j < rank; j++) {
if (broadcastShape[j] == shape[j]) {
stride[j] = oStride[j];
} else {
stride[j] = 0;
}
}
}
return stride;
}
@Override
public Dataset getSliceView(final int[] start, final int[] stop, final int[] step) {
return getSliceView(new SliceND(shape, start, stop, step));
}
@Override
public Dataset getSliceView(Slice... slice) {
if (slice == null || slice.length == 0) {
return getView();
}
return getSliceView(new SliceND(shape, slice));
}
/**
* Get a slice of the dataset. The returned dataset is a view on a selection of items
* @param slice
* @return slice view
*/
@Override
public Dataset getSliceView(SliceND slice) {
if (slice.isAll()) {
return getView();
}
final int rank = shape.length;
int[] sStride = new int[rank];
int[] sOffset = new int[1];
int[] sShape = createStrides(slice, this, sStride, sOffset);
AbstractDataset s = getView();
s.shape = sShape;
s.size = calcSize(sShape);
s.stride = sStride;
s.offset = sOffset[0];
s.base = base == null ? this : base;
s.metadata = copyMetadata();
s.sliceMetadata(true, slice);
s.setDirty();
s.setName(name + BLOCK_OPEN + slice + BLOCK_CLOSE);
return s;
}
/**
* Get flattened view index of given position
* @param pos
* the integer array specifying the n-D position
* @return the index on the flattened dataset
*/
private int getFlat1DIndex(final int[] pos) {
final int imax = pos.length;
if (imax == 0) {
return 0;
}
return get1DIndexFromShape(pos);
}
/**
* Get flattened view index of given position
* @param shape
* @param pos
* the integer array specifying the n-D position
* @return the index on the flattened dataset
*/
public static int getFlat1DIndex(final int[] shape, final int[] pos) {
final int imax = pos.length;
if (imax == 0) {
return 0;
}
return get1DIndexFromShape(shape, pos);
}
@Override
public int get1DIndex(final int... n) {
final int imax = n.length;
final int rank = shape.length;
if (imax == 0) {
if (rank == 0 || (rank == 1 && shape[0] <= 1))
return stride == null ? 0 : offset;
throw new IllegalArgumentException("One or more index parameters must be supplied");
} else if (imax > rank) {
throw new IllegalArgumentException("No of index parameters is different to the shape of data: " + imax
+ " given " + rank + " required");
}
return stride == null ? get1DIndexFromShape(n) : get1DIndexFromStrides(n);
}
private static void throwAIOOBException(int i, int s, int d) {
throw new ArrayIndexOutOfBoundsException("Index (" + i + ") out of range [-" + s + "," + s
+ "] in dimension " + d);
}
/**
* @param i
* @return the index on the data array corresponding to that location
*/
protected int get1DIndex(int i) {
if (shape.length > 1) {
logger.debug("This dataset is not 1D but was addressed as such");
return get1DIndex(new int[] {i});
}
if (i < 0) {
i += shape[0];
}
if (i < 0 || i >= shape[0]) {
throwAIOOBException(i, shape[0], 0);
}
return stride == null ? i : i*stride[0] + offset;
}
/**
* @param i
* @param j
* @return the index on the data array corresponding to that location
*/
protected int get1DIndex(int i, int j) {
if (shape.length != 2) {
logger.debug("This dataset is not 2D but was addressed as such");
return get1DIndex(new int[] {i, j});
}
if (i < 0) {
i += shape[0];
}
if (i < 0 || i >= shape[0]) {
throwAIOOBException(i, shape[0], 0);
}
if (j < 0) {
j += shape[1];
}
if (j < 0 || j >= shape[1]) {
throwAIOOBException(i, shape[1], 1);
}
return stride == null ? i*shape[1] + j : i*stride[0] + j*stride[1] + offset;
}
protected int get1DIndexFromShape(final int... n) {
return get1DIndexFromShape(shape, n);
}
protected static int get1DIndexFromShape(final int[] shape, final int... n) {
final int imax = n.length;
final int rank = shape.length;
// if (rank != imax) {
int index = 0;
int i = 0;
for (; i < imax; i++) {
final int si = shape[i];
int ni = n[i];
if (ni < 0) {
ni += si;
}
if (ni < 0 || ni >= si) {
throwAIOOBException(ni, si, i);
}
index = index * si + ni;
}
for (; i < rank; i++) {
index *= shape[i];
}
return index;
}
private int get1DIndexFromStrides(final int... n) {
return get1DIndexFromStrides(shape, stride, offset, n);
}
private static int get1DIndexFromStrides(final int[] shape, final int[] stride, final int offset, final int... n) {
final int rank = shape.length;
if (rank != n.length) {
throw new IllegalArgumentException("Number of position indexes must be equal to rank");
}
int index = offset;
for (int i = 0; i < rank; i++) {
final int si = shape[i];
int ni = n[i];
if (ni < 0) {
ni += si;
}
if (ni < 0 || ni >= si) {
throwAIOOBException(ni, si, i);
}
index += stride[i] * ni;
}
return index;
}
@Override
public int[] getNDPosition(final int n) {
if (isIndexInRange(n)) {
throw new IllegalArgumentException("Index provided " + n
+ "is larger then the size of the containing array");
}
return stride == null ? getNDPositionFromShape(n, shape) : getNDPositionFromStrides(n);
}
private boolean isIndexInRange(final int n) {
if (stride == null) {
return n >= size;
}
return n >= getBufferLength();
}
/**
* @return entire buffer length
*/
abstract protected int getBufferLength();
/**
* Get n-D position from given index
* @param n index
* @param shape
* @return n-D position
*/
public static int[] getNDPositionFromShape(int n, int[] shape) {
if (shape == null || shape.length == 0)
return new int[0];
int rank = shape.length;
if (rank == 1) {
return new int[] { n };
}
int[] output = new int[rank];
for (rank--; rank > 0; rank--) {
output[rank] = n % shape[rank];
n /= shape[rank];
}
output[0] = n;
return output;
}
private int[] getNDPositionFromStrides(int n) {
n -= offset;
int rank = shape.length;
if (rank == 1) {
return new int[] { n / stride[0] };
}
int[] output = new int[rank];
int i = 0;
while (i != n) { // TODO find more efficient way than this exhaustive search
int j = rank - 1;
for (; j >= 0; j
output[j]++;
i += stride[j];
if (output[j] >= shape[j]) {
output[j] = 0;
i -= shape[j] * stride[j];
} else {
break;
}
}
if (j == -1) {
logger.error("Index was not found in this strided dataset");
throw new IllegalArgumentException("Index was not found in this strided dataset");
}
}
return output;
}
@Override
public int checkAxis(int axis) {
return checkAxis(shape.length, axis);
}
/**
* Check that axis is in range [-rank,rank)
*
* @param rank
* @param axis
* @return sanitized axis in range [0, rank)
*/
protected static int checkAxis(int rank, int axis) {
if (axis < 0) {
axis += rank;
}
if (axis < 0 || axis >= rank) {
throw new IndexOutOfBoundsException("Axis " + axis + " given is out of range [0, " + rank + ")");
}
return axis;
}
protected static final char BLOCK_OPEN = '[';
protected static final char BLOCK_CLOSE = ']';
@Override
public String toString() {
return toString(false);
}
@Override
public String toString(boolean showData) {
final int rank = shape == null ? 0 : shape.length;
final StringBuilder out = new StringBuilder();
if (!showData) {
if (name != null && name.length() > 0) {
out.append("Dataset '");
out.append(name);
out.append("' has shape ");
} else {
out.append("Dataset shape is ");
}
out.append(BLOCK_OPEN);
if (rank > 0) {
out.append(shape[0]);
}
for (int i = 1; i < rank; i++) {
out.append(", " + shape[i]);
}
out.append(BLOCK_CLOSE);
return out.toString();
}
if (size == 0) {
return out.toString();
}
if (rank > 0) {
int[] pos = new int[rank];
final StringBuilder lead = new StringBuilder();
printBlocks(out, lead, 0, pos);
} else {
out.append(getString());
}
return out.toString();
}
/**
* Limit to strings output via the toString() method
*/
private static int maxStringLength = 120;
/**
* Set maximum line length for toString() method
* @param maxLineLength
*/
public static void setMaxLineLength(int maxLineLength) {
maxStringLength = maxLineLength;
}
/**
* @return maximum line length for toString() method
*/
public static int getMaxLineLength() {
return maxStringLength;
}
/**
* Limit to number of sub-blocks output via the toString() method
*/
private static final int MAX_SUBBLOCKS = 6;
private final static String SEPARATOR = ",";
private final static String SPACE = " ";
private final static String ELLIPSIS = "...";
private final static String NEWLINE = "\n";
/**
* Make a line of output for last dimension of dataset
*
* @param start
* @return line
*/
private StringBuilder makeLine(final int end, final int... start) {
StringBuilder line = new StringBuilder();
final int[] pos;
if (end >= start.length) {
pos = Arrays.copyOf(start, end + 1);
} else {
pos = start;
}
pos[end] = 0;
line.append(BLOCK_OPEN);
line.append(getString(pos));
final int length = shape[end];
// trim elements printed if length exceed estimate of maximum elements
int excess = length - maxStringLength / 3; // space + number + separator
if (excess > 0) {
int index = (length - excess) / 2;
for (int y = 1; y < index; y++) {
line.append(SEPARATOR + SPACE);
pos[end] = y;
line.append(getString(pos));
}
index = (length + excess) / 2;
for (int y = index; y < length; y++) {
line.append(SEPARATOR + SPACE);
pos[end] = y;
line.append(getString(pos));
}
} else {
for (int y = 1; y < length; y++) {
line.append(SEPARATOR + SPACE);
pos[end] = y;
line.append(getString(pos));
}
}
line.append(BLOCK_CLOSE);
// trim string down to limit
excess = line.length() - maxStringLength - ELLIPSIS.length() - 1;
if (excess > 0) {
int index = line.substring(0, (line.length() - excess) / 2).lastIndexOf(SEPARATOR) + 2;
StringBuilder out = new StringBuilder(line.subSequence(0, index));
out.append(ELLIPSIS + SEPARATOR);
index = line.substring((line.length() + excess) / 2).indexOf(SEPARATOR) + (line.length() + excess) / 2 + 1;
out.append(line.subSequence(index, line.length()));
return out;
}
return line;
}
/**
* recursive method to print blocks
*/
private void printBlocks(final StringBuilder out, final StringBuilder lead, final int level, final int[] pos) {
if (out.length() > 0) {
char last = out.charAt(out.length() - 1);
if (last != BLOCK_OPEN) {
out.append(lead);
}
}
final int end = getRank() - 1;
if (level != end) {
out.append(BLOCK_OPEN);
int length = shape[level];
// first sub-block
pos[level] = 0;
StringBuilder newlead = new StringBuilder(lead);
newlead.append(SPACE);
printBlocks(out, newlead, level + 1, pos);
if (length < 2) { // escape
out.append(BLOCK_CLOSE);
return;
}
out.append(SEPARATOR + NEWLINE);
for (int i = level + 1; i < end; i++) {
out.append(NEWLINE);
}
// middle sub-blocks
if (length < MAX_SUBBLOCKS) {
for (int x = 1; x < length - 1; x++) {
pos[level] = x;
printBlocks(out, newlead, level + 1, pos);
if (end <= level + 1) {
out.append(SEPARATOR + NEWLINE);
} else {
out.append(SEPARATOR + NEWLINE + NEWLINE);
}
}
} else {
final int excess = length - MAX_SUBBLOCKS;
int xmax = (length - excess) / 2;
for (int x = 1; x < xmax; x++) {
pos[level] = x;
printBlocks(out, newlead, level + 1, pos);
if (end <= level + 1) {
out.append(SEPARATOR + NEWLINE);
} else {
out.append(SEPARATOR + NEWLINE + NEWLINE);
}
}
out.append(newlead);
out.append(ELLIPSIS + SEPARATOR + NEWLINE);
xmax = (length + excess) / 2;
for (int x = xmax; x < length - 1; x++) {
pos[level] = x;
printBlocks(out, newlead, level + 1, pos);
if (end <= level + 1) {
out.append(SEPARATOR + NEWLINE);
} else {
out.append(SEPARATOR + NEWLINE + NEWLINE);
}
}
}
// last sub-block
pos[level] = length - 1;
printBlocks(out, newlead, level + 1, pos);
out.append(BLOCK_CLOSE);
} else {
out.append(makeLine(end, pos));
}
}
@Override
public void setDirty() {
if (storedValues != null)
storedValues.clear();
}
@Override
public Dataset squeezeEnds() {
return squeeze(true);
}
@Override
public Dataset squeeze() {
return squeeze(false);
}
@Override
public Dataset squeeze(boolean onlyFromEnds) {
final int[] tshape = squeezeShape(shape, onlyFromEnds);
final int[] oshape = shape;
if (stride == null) {
shape = tshape;
} else {
int rank = shape.length;
int trank = tshape.length;
if (trank < rank) {
int[] tstride = new int[tshape.length];
if (onlyFromEnds) {
for (int i = 0; i < rank; i++) {
if (shape[i] != 1) {
for (int k = 0; k < trank; k++) {
tstride[k] = stride[i++];
}
break;
}
}
} else {
int t = 0;
for (int i = 0; i < rank; i++) {
if (shape[i] != 1) {
tstride[t++] = stride[i];
}
}
}
shape = tshape;
stride = tstride;
}
}
reshapeMetadata(oshape, shape);
return this;
}
/**
* Remove dimensions of 1 in given shape - from both ends only, if true
*
* @param oshape
* @param onlyFromEnds
* @return newly squeezed shape (or original if unsqueezed)
*/
public static int[] squeezeShape(final int[] oshape, boolean onlyFromEnds) {
int unitDims = 0;
int rank = oshape.length;
int start = 0;
if (onlyFromEnds) {
int i = rank - 1;
for (; i >= 0; i
if (oshape[i] == 1) {
unitDims++;
} else {
break;
}
}
for (int j = 0; j <= i; j++) {
if (oshape[j] == 1) {
unitDims++;
} else {
start = j;
break;
}
}
} else {
for (int i = 0; i < rank; i++) {
if (oshape[i] == 1) {
unitDims++;
}
}
}
if (unitDims == 0) {
return oshape;
}
int[] newDims = new int[rank - unitDims];
if (unitDims == rank)
return newDims; // zero-rank dataset
if (onlyFromEnds) {
rank = newDims.length;
for (int i = 0; i < rank; i++) {
newDims[i] = oshape[i+start];
}
} else {
int j = 0;
for (int i = 0; i < rank; i++) {
if (oshape[i] > 1) {
newDims[j++] = oshape[i];
if (j >= newDims.length)
break;
}
}
}
return newDims;
}
/**
* Remove dimension of 1 in given shape
*
* @param oshape
* @param axis
* @return newly squeezed shape
*/
public static int[] squeezeShape(final int[] oshape, int axis) {
if (oshape == null || oshape.length == 0) {
return new int[0];
}
int rank = oshape.length;
if (axis < 0) {
axis += rank;
}
if (axis < 0 || axis >= rank) {
logger.error("Axis argument is outside allowed range");
throw new IllegalArgumentException("Axis argument is outside allowed range");
}
int[] nshape = new int[rank-1];
for (int i = 0; i < axis; i++) {
nshape[i] = oshape[i];
}
for (int i = axis+1; i < rank; i++) {
nshape[i-1] = oshape[i];
}
return nshape;
}
/**
* Check if shapes are broadcast compatible
*
* @param ashape
* @param bshape
* @return true if they are compatible
*/
public static boolean areShapesBroadcastCompatible(final int[] ashape, final int[] bshape) {
if (ashape.length < bshape.length) {
return areShapesBroadcastCompatible(bshape, ashape);
}
for (int a = ashape.length - bshape.length, b = 0; a < ashape.length && b < bshape.length; a++, b++) {
if (ashape[a] != bshape[b] && ashape[a] != 1 && bshape[b] != 1) {
return false;
}
}
return true;
}
/**
* Check if shapes are compatible, ignoring extra axes of length 1
*
* @param ashape
* @param bshape
* @return true if they are compatible
*/
public static boolean areShapesCompatible(final int[] ashape, final int[] bshape) {
List<Integer> alist = new ArrayList<Integer>();
for (int a : ashape) {
if (a > 1) alist.add(a);
}
final int imax = alist.size();
int i = 0;
for (int b : bshape) {
if (b == 1)
continue;
if (i >= imax || b != alist.get(i++))
return false;
}
return i == imax;
}
/**
* Check if shapes are compatible but skip axis
*
* @param ashape
* @param bshape
* @param axis
* @return true if they are compatible
*/
public static boolean areShapesCompatible(final int[] ashape, final int[] bshape, final int axis) {
if (ashape.length != bshape.length) {
return false;
}
final int rank = ashape.length;
for (int i = 0; i < rank; i++) {
if (i != axis && ashape[i] != bshape[i]) {
return false;
}
}
return true;
}
@Override
public boolean isCompatibleWith(final ILazyDataset g) {
return areShapesCompatible(shape, g.getShape());
}
@Override
public void checkCompatibility(final ILazyDataset g) throws IllegalArgumentException {
checkCompatibility(this, g);
}
public static void checkCompatibility(final ILazyDataset g, final ILazyDataset h) throws IllegalArgumentException {
if (!areShapesCompatible(g.getShape(), h.getShape())) {
throw new IllegalArgumentException("Shapes do not match");
}
}
@Override
public Dataset reshape(final int... shape) {
Dataset a = getView();
try {
a.setShape(shape);
} catch (IllegalArgumentException e) {
a = a.clone();
a.setShape(shape);
}
return a;
}
/**
* Create a dataset from object (automatically detect dataset type)
*
* @param obj
* can be a Java list, array or Number
* @return dataset
*/
public static Dataset array(final Object obj) {
return DatasetFactory.createFromObject(obj);
}
/**
* Create a dataset from object (automatically detect dataset type)
*
* @param obj
* can be a Java list, array or Number
* @param isUnsigned
* if true, interpret integer values as unsigned by increasing element bit width
* @return dataset
*/
public static Dataset array(final Object obj, boolean isUnsigned) {
return DatasetFactory.createFromObject(obj, isUnsigned);
}
/**
* Create a dataset from object
*
* @param obj
* can be a Java list, array or Number
* @param dtype
* @return dataset
*/
public static Dataset array(final Object obj, final int dtype) {
return DatasetFactory.createFromObject(obj, dtype);
}
/**
* Create dataset of appropriate type from list
*
* @param objectList
* @return dataset filled with values from list
*/
public static Dataset createFromList(List<?> objectList) {
return DatasetFactory.createFromList(objectList);
}
/**
* @param shape
* @param dtype
* @return a new dataset of given shape and type, filled with zeros
*/
public static Dataset zeros(final int[] shape, final int dtype) {
return DatasetFactory.zeros(shape, dtype);
}
/**
* @param itemSize
* if equal to 1, then non-compound dataset is returned
* @param shape
* @param dtype
* @return a new dataset of given item size, shape and type, filled with zeros
*/
public static Dataset zeros(final int itemSize, final int[] shape, final int dtype) {
return DatasetFactory.zeros(itemSize, shape, dtype);
}
/**
* @param dataset
* @return a new dataset of same shape and type as input dataset, filled with zeros
*/
public static Dataset zeros(final Dataset dataset) {
return zeros(dataset, dataset.getDtype());
}
/**
* Create a new dataset of same shape as input dataset, filled with zeros. If dtype is not
* explicitly compound then an elemental dataset is created
* @param dataset
* @param dtype
* @return a new dataset
*/
public static Dataset zeros(final Dataset dataset, final int dtype) {
final int[] shape = dataset.getShapeRef();
final int isize = isDTypeElemental(dtype) ? 1 :dataset.getElementsPerItem();
return zeros(isize, shape, dtype);
}
/**
* @param dataset
* @return a new dataset of same shape and type as input dataset, filled with ones
*/
public static Dataset ones(final Dataset dataset) {
return ones(dataset, dataset.getDtype());
}
/**
* Create a new dataset of same shape as input dataset, filled with ones. If dtype is not
* explicitly compound then an elemental dataset is created
* @param dataset
* @param dtype
* @return a new dataset
*/
public static Dataset ones(final Dataset dataset, final int dtype) {
final int[] shape = dataset.getShapeRef();
final int isize = isDTypeElemental(dtype) ? 1 :dataset.getElementsPerItem();
return ones(isize, shape, dtype);
}
/**
* @param shape
* @param dtype
* @return a new dataset of given shape and type, filled with ones
*/
public static Dataset ones(final int[] shape, final int dtype) {
return DatasetFactory.ones(shape, dtype);
}
/**
* @param itemSize
* if equal to 1, then non-compound dataset is returned
* @param shape
* @param dtype
* @return a new dataset of given item size, shape and type, filled with ones
*/
public static Dataset ones(final int itemSize, final int[] shape, final int dtype) {
return DatasetFactory.ones(itemSize, shape, dtype);
}
/**
* @param stop
* @param dtype
* @return a new dataset of given shape and type, filled with values determined by parameters
*/
public static Dataset arange(final double stop, final int dtype) {
return arange(0, stop, 1, dtype);
}
/**
* @param start
* @param stop
* @param step
* @param dtype
* @return a new 1D dataset of given type, filled with values determined by parameters
*/
public static Dataset arange(final double start, final double stop, final double step, final int dtype) {
return DatasetFactory.createRange(start, stop, step, dtype);
}
/**
* @param start
* @param stop
* @param step
* @return number of steps to take
*/
public static int calcSteps(final double start, final double stop, final double step) {
if (step > 0) {
return (int) Math.ceil((stop - start) / step);
}
return (int) Math.ceil((stop - start) / step);
}
@Override
public boolean isComplex() {
int type = getDtype();
return type == COMPLEX64 || type == COMPLEX128;
}
@Override
public Dataset real() {
return this;
}
@Override
public Dataset realView() {
return getView();
}
@Override
public Dataset getSlice(final int[] start, final int[] stop, final int[] step) {
return getSlice(new SliceND(shape, start, stop, step));
}
@Override
public Dataset getSlice(Slice... slice) {
return getSlice(new SliceND(shape, slice));
}
@Override
public Dataset getSlice(IMonitor monitor, Slice... slice) {
return getSlice(slice);
}
@Override
public Dataset getSlice(IMonitor monitor, SliceND slice) {
return getSlice(slice);
}
@Override
public Dataset getSlice(IMonitor monitor, int[] start, int[] stop, int[] step) {
return getSlice(start, stop, step);
}
/**
* Get a slice of the dataset. The returned dataset is a copied selection of items
* @param slice
* @return The dataset of the sliced data
*/
@Override
public Dataset getSlice(final SliceND slice) {
SliceIterator it = (SliceIterator) getSliceIterator(slice);
AbstractDataset s = getSlice(it);
s.metadata = copyMetadata();
s.sliceMetadata(true, slice);
return s;
}
/**
* Get a slice of the dataset. The returned dataset is a copied selection of items
*
* @param iterator Slice iterator
* @return The dataset of the sliced data
*/
abstract public AbstractDataset getSlice(final SliceIterator iterator);
@Override
public Dataset setSlice(final Object obj, final SliceND slice) {
Dataset ds;
if (obj instanceof Dataset) {
ds = (Dataset) obj;
} else if (!(obj instanceof IDataset)) {
ds = DatasetFactory.createFromObject(obj, isComplex() || getElementsPerItem() == 1 ? FLOAT64 : ARRAYFLOAT64);
} else {
ds = DatasetUtils.convertToDataset((IDataset) obj);
}
return setSlicedView(getSliceView(slice), ds);
}
@Override
public Dataset setSlice(final Object obj, final int[] start, final int[] stop, final int[] step) {
return setSlice(obj, new SliceND(shape, start, stop, step));
}
/**
* Set a view of current dataset to given dataset with broadcasting
* @param view
* @param d
* @return this dataset
*/
abstract Dataset setSlicedView(Dataset view, Dataset d);
@Override
public Dataset setSlice(Object obj, Slice... slice) {
if (slice == null || slice.length == 0) {
return setSlice(obj, new SliceND(shape));
}
return setSlice(obj, new SliceND(shape, slice));
}
@Override
public boolean all() {
return Comparisons.allTrue(this);
}
@Override
public BooleanDataset all(final int axis) {
return Comparisons.allTrue(this, axis);
}
@Override
public boolean any() {
return Comparisons.anyTrue(this);
}
@Override
public BooleanDataset any(final int axis) {
return Comparisons.anyTrue(this, axis);
}
@Override
public Dataset ifloorDivide(final Object o) {
return idivide(o).ifloor();
}
@Override
public double residual(final Object o) {
return residual(o, null, false);
}
@Override
public double residual(final Object o, boolean ignoreNaNs) {
return residual(o, null, ignoreNaNs);
}
public static final String STORE_HASH = "hash";
protected static final String STORE_SHAPELESS_HASH = "shapelessHash";
public static final String STORE_MAX = "max";
public static final String STORE_MIN = "min";
protected static final String STORE_MAX_POS = "maxPos";
protected static final String STORE_MIN_POS = "minPos";
protected static final String STORE_STATS = "stats";
protected static final String STORE_SUM = "sum";
protected static final String STORE_MEAN = "mean";
protected static final String STORE_VAR = "var";
private static final String STORE_POS_MAX = "+max";
private static final String STORE_POS_MIN = "+min";
protected static final String STORE_COUNT = "count";
private static final String STORE_INDEX = "Index";
protected static final String STORE_BROADCAST = "Broadcast";
/**
* Get value from store
*
* @param key
* @return value
*/
public Object getStoredValue(String key) {
if (storedValues == null) {
return null;
}
return storedValues.get(key);
}
/**
* Set value in store
* <p>
* This is a <b>private method</b>: do not use!
*
* @param key
* @param obj
*/
public void setStoredValue(String key, Object obj) {
if (storedValues == null) {
storedValues = new HashMap<String, Object>();
}
storedValues.put(key, obj);
}
protected static String storeName(boolean ignoreNaNs, String name) {
return storeName(ignoreNaNs, false, name);
}
protected static String storeName(boolean ignoreNaNs, boolean ignoreInfs, String name) {
return (ignoreInfs ? "inf" : "") + (ignoreNaNs ? "nan" : "") + name;
}
/**
* Copy stored values from original to derived dataset
* @param orig
* @param derived
* @param shapeChanged
*/
protected static void copyStoredValues(IDataset orig, AbstractDataset derived, boolean shapeChanged) {
if (orig instanceof AbstractDataset && ((AbstractDataset) orig).storedValues != null) {
derived.storedValues = new HashMap<String, Object>(((AbstractDataset) orig).storedValues);
if (shapeChanged) {
filterStoredValues(derived.storedValues);
}
}
}
private static void filterStoredValues(Map<String, Object> map) {
map.remove(STORE_HASH);
List<String> keys = new ArrayList<String>();
for (String n : map.keySet()) {
if (n.contains("-")) { // remove anything which is axis-specific
keys.add(n);
}
}
for (String n : keys) {
map.remove(n);
}
}
/**
* Calculate minimum and maximum for a dataset
* @param ignoreNaNs if true, ignore NaNs
* @param ignoreInfs if true, ignore infinities
*/
protected void calculateMaxMin(final boolean ignoreNaNs, final boolean ignoreInfs) {
IndexIterator iter = getIterator();
double amax = Double.NEGATIVE_INFINITY;
double amin = Double.POSITIVE_INFINITY;
double pmax = Double.MIN_VALUE;
double pmin = Double.POSITIVE_INFINITY;
double hash = 0;
boolean hasNaNs = false;
while (iter.hasNext()) {
final double val = getElementDoubleAbs(iter.index);
if (Double.isNaN(val)) {
hash = (hash * 19) % Integer.MAX_VALUE;
if (ignoreNaNs)
continue;
hasNaNs = true;
} else if (Double.isInfinite(val)) {
hash = (hash * 19) % Integer.MAX_VALUE;
if (ignoreInfs)
continue;
} else {
hash = (hash * 19 + val) % Integer.MAX_VALUE;
}
if (val > amax) {
amax = val;
}
if (val < amin) {
amin = val;
}
if (val > 0) {
if (val < pmin) {
pmin = val;
}
if (val > pmax) {
pmax = val;
}
}
}
int ihash = ((int) hash) * 19 + getDtype() * 17 + getElementsPerItem();
setStoredValue(storeName(ignoreNaNs, ignoreInfs, STORE_SHAPELESS_HASH), ihash);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MAX), hasNaNs ? Double.NaN : fromDoubleToNumber(amax));
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MIN), hasNaNs ? Double.NaN : fromDoubleToNumber(amin));
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_POS_MAX), hasNaNs ? Double.NaN : fromDoubleToNumber(pmax));
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_POS_MIN), hasNaNs ? Double.NaN : fromDoubleToNumber(pmin));
}
/**
* Calculate summary statistics for a dataset
* @param ignoreNaNs if true, ignore NaNs
* @param ignoreInfs if true, ignore infinities
* @param name
*/
protected void calculateSummaryStats(final boolean ignoreNaNs, final boolean ignoreInfs, final String name) {
final IndexIterator iter = getIterator();
final SummaryStatistics stats = new SummaryStatistics();
//sum of logs is slow and we dont use it, so blocking its calculation here
stats.setSumLogImpl(new NullStorelessUnivariateStatistic());
if (storedValues == null || !storedValues.containsKey(STORE_HASH)) {
boolean hasNaNs = false;
double hash = 0;
double pmax = Double.MIN_VALUE;
double pmin = Double.POSITIVE_INFINITY;
while (iter.hasNext()) {
final double val = getElementDoubleAbs(iter.index);
if (Double.isNaN(val)) {
hash = (hash * 19) % Integer.MAX_VALUE;
if (ignoreNaNs)
continue;
hasNaNs = true;
} else if (Double.isInfinite(val)) {
hash = (hash * 19) % Integer.MAX_VALUE;
if (ignoreInfs)
continue;
} else {
hash = (hash * 19 + val) % Integer.MAX_VALUE;
}
if (val > 0) {
if (val < pmin) {
pmin = val;
}
if (val > pmax) {
pmax = val;
}
}
stats.addValue(val);
}
int ihash = ((int) hash) * 19 + getDtype() * 17 + getElementsPerItem();
setStoredValue(storeName(ignoreNaNs, ignoreInfs, STORE_SHAPELESS_HASH), ihash);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MAX), hasNaNs ? Double.NaN : fromDoubleToNumber(stats.getMax()));
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MIN), hasNaNs ? Double.NaN : fromDoubleToNumber(stats.getMin()));
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_POS_MAX), hasNaNs ? Double.NaN : fromDoubleToNumber(pmax));
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_POS_MIN), hasNaNs ? Double.NaN : fromDoubleToNumber(pmin));
storedValues.put(name, stats);
} else {
while (iter.hasNext()) {
final double val = getElementDoubleAbs(iter.index);
if (ignoreNaNs && Double.isNaN(val)) {
continue;
}
if (ignoreInfs && Double.isInfinite(val)) {
continue;
}
stats.addValue(val);
}
storedValues.put(name, stats);
}
}
/**
* Calculate summary statistics for a dataset along an axis
* @param ignoreNaNs if true, ignore NaNs
* @param ignoreInfs if true, ignore infinities
* @param axis
*/
protected void calculateSummaryStats(final boolean ignoreNaNs, final boolean ignoreInfs, final int axis) {
int rank = getRank();
int[] oshape = getShape();
int alen = oshape[axis];
oshape[axis] = 1;
int[] nshape = new int[rank - 1];
for (int i = 0; i < axis; i++) {
nshape[i] = oshape[i];
}
for (int i = axis + 1; i < rank; i++) {
nshape[i - 1] = oshape[i];
}
final int dtype = getDtype();
IntegerDataset count = new IntegerDataset(nshape);
Dataset max = DatasetFactory.zeros(nshape, dtype);
Dataset min = DatasetFactory.zeros(nshape, dtype);
IntegerDataset maxIndex = new IntegerDataset(nshape);
IntegerDataset minIndex = new IntegerDataset(nshape);
Dataset sum = DatasetFactory.zeros(nshape, getLargestDType(dtype));
DoubleDataset mean = new DoubleDataset(nshape);
DoubleDataset var = new DoubleDataset(nshape);
IndexIterator qiter = max.getIterator(true);
int[] qpos = qiter.getPos();
int[] spos = oshape.clone();
while (qiter.hasNext()) {
int i = 0;
for (; i < axis; i++) {
spos[i] = qpos[i];
}
spos[i++] = 0;
for (; i < rank; i++) {
spos[i] = qpos[i - 1];
}
final SummaryStatistics stats = new SummaryStatistics();
//sum of logs is slow and we dont use it, so blocking its calculation here
stats.setSumLogImpl(new NullStorelessUnivariateStatistic());
double amax = Double.NEGATIVE_INFINITY;
double amin = Double.POSITIVE_INFINITY;
boolean hasNaNs = false;
if (ignoreNaNs) {
for (int j = 0; j < alen; j++) {
spos[axis] = j;
final double val = getDouble(spos);
if (Double.isNaN(val)) {
hasNaNs = true;
continue;
} else if (ignoreInfs && Double.isInfinite(val)) {
continue;
}
if (val > amax) {
amax = val;
}
if (val < amin) {
amin = val;
}
stats.addValue(val);
}
} else {
for (int j = 0; j < alen; j++) {
spos[axis] = j;
final double val = getDouble(spos);
if (hasNaNs) {
if (!Double.isNaN(val))
stats.addValue(0);
continue;
}
if (Double.isNaN(val)) {
amax = Double.NaN;
amin = Double.NaN;
hasNaNs = true;
} else if (ignoreInfs && Double.isInfinite(val)) {
continue;
} else {
if (val > amax) {
amax = val;
}
if (val < amin) {
amin = val;
}
}
stats.addValue(val);
}
}
count.setAbs(qiter.index, (int) stats.getN());
max.setObjectAbs(qiter.index, amax);
min.setObjectAbs(qiter.index, amin);
boolean fmax = false;
boolean fmin = false;
if (hasNaNs) {
if (ignoreNaNs) {
for (int j = 0; j < alen; j++) {
spos[axis] = j;
final double val = getDouble(spos);
if (Double.isNaN(val))
continue;
if (!fmax && val == amax) {
maxIndex.setAbs(qiter.index, j);
fmax = true;
if (fmin)
break;
}
if (!fmin && val == amin) {
minIndex.setAbs(qiter.index, j);
fmin = true;
if (fmax)
break;
}
}
} else {
for (int j = 0; j < alen; j++) {
spos[axis] = j;
final double val = getDouble(spos);
if (Double.isNaN(val)) {
maxIndex.setAbs(qiter.index, j);
minIndex.setAbs(qiter.index, j);
break;
}
}
}
} else {
for (int j = 0; j < alen; j++) {
spos[axis] = j;
final double val = getDouble(spos);
if (!fmax && val == amax) {
maxIndex.setAbs(qiter.index, j);
fmax = true;
if (fmin)
break;
}
if (!fmin && val == amin) {
minIndex.setAbs(qiter.index, j);
fmin = true;
if (fmax)
break;
}
}
}
sum.setObjectAbs(qiter.index, stats.getSum());
mean.setAbs(qiter.index, stats.getMean());
var.setAbs(qiter.index, stats.getVariance());
}
setStoredValue(storeName(ignoreNaNs, ignoreInfs, STORE_COUNT + "-" + axis), count);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MAX + "-" + axis), max);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MIN + "-" + axis), min);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_SUM + "-" + axis), sum);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MEAN + "-" + axis), mean);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_VAR + "-" + axis), var);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MAX + STORE_INDEX + "-" + axis), maxIndex);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MIN + STORE_INDEX + "-" + axis), minIndex);
}
/**
* @param x
* @return number from given double
*/
abstract protected Number fromDoubleToNumber(double x);
// return biggest native primitive if integer (should test for 64bit?)
private static Number fromDoubleToBiggestNumber(double x, int dtype) {
switch (dtype) {
case BOOL:
case INT8:
case INT16:
case INT32:
return Integer.valueOf((int) (long) x);
case INT64:
return Long.valueOf((long) x);
case FLOAT32:
return Float.valueOf((float) x);
case FLOAT64:
return Double.valueOf(x);
}
return null;
}
private SummaryStatistics getStatistics(boolean ignoreNaNs) {
boolean ignoreInfs = false; // TODO
if (!hasFloatingPointElements()) {
ignoreNaNs = false;
}
String n = storeName(ignoreNaNs, ignoreInfs, STORE_STATS);
SummaryStatistics stats = (SummaryStatistics) getStoredValue(n);
if (stats == null) {
calculateSummaryStats(ignoreNaNs, ignoreInfs, n);
stats = (SummaryStatistics) getStoredValue(n);
}
return stats;
}
@Override
public int[] maxPos() {
return maxPos(false);
}
@Override
public int[] minPos() {
return minPos(false);
}
private int getHash() {
Object value = getStoredValue(STORE_HASH);
if (value == null) {
value = getStoredValue(STORE_SHAPELESS_HASH);
if (value == null) {
calculateMaxMin(false, false);
value = getStoredValue(STORE_SHAPELESS_HASH);
}
int ihash = (Integer) value;
int rank = shape.length;
for (int i = 0; i < rank; i++) {
ihash = ihash * 17 + shape[i];
}
storedValues.put(STORE_HASH, ihash);
return ihash;
}
return (Integer) value;
}
protected Object getMaxMin(boolean ignoreNaNs, boolean ignoreInfs, String key) {
if (!hasFloatingPointElements()) {
ignoreNaNs = false;
ignoreInfs = false;
}
key = storeName(ignoreNaNs, ignoreInfs , key);
Object value = getStoredValue(key);
if (value == null) {
calculateMaxMin(ignoreNaNs, ignoreInfs);
value = getStoredValue(key);
}
return value;
}
private Object getStatistics(boolean ignoreNaNs, int axis, String stat) {
if (!hasFloatingPointElements())
ignoreNaNs = false;
boolean ignoreInfs = false; // TODO
stat = storeName(ignoreNaNs, ignoreInfs , stat);
axis = checkAxis(axis);
Object obj = getStoredValue(stat);
if (obj == null) {
calculateSummaryStats(ignoreNaNs, ignoreInfs, axis);
obj = getStoredValue(stat);
}
return obj;
}
@Override
public Number max(boolean... ignoreInvalids) {
boolean igNan = ignoreInvalids!=null && ignoreInvalids.length>0 ? ignoreInvalids[0] : false;
boolean igInf = ignoreInvalids!=null && ignoreInvalids.length>1 ? ignoreInvalids[1] : igNan;
return (Number) getMaxMin(igNan, igInf, STORE_MAX);
}
@Override
public Number positiveMax(boolean ignoreInvalids) {
return (Number) getMaxMin(ignoreInvalids, ignoreInvalids, STORE_POS_MAX);
}
@Override
public Number positiveMax(boolean ignoreNaNs, boolean ignoreInfs) {
return (Number) getMaxMin(ignoreNaNs, ignoreInfs, STORE_POS_MAX);
}
@Override
public Dataset max(int axis) {
return max(false, axis);
}
@Override
public Dataset max(boolean ignoreNaNs, int axis) {
return (Dataset) getStatistics(ignoreNaNs, axis, STORE_MAX + "-" + axis);
}
@Override
public Number min(boolean... ignoreInvalids) {
boolean igNan = ignoreInvalids!=null && ignoreInvalids.length>0 ? ignoreInvalids[0] : false;
boolean igInf = ignoreInvalids!=null && ignoreInvalids.length>1 ? ignoreInvalids[1] : igNan;
return (Number) getMaxMin(igNan, igInf, STORE_MIN);
}
@Override
public Number positiveMin(boolean ignoreInvalids) {
return (Number) getMaxMin(ignoreInvalids, ignoreInvalids, STORE_POS_MIN);
}
@Override
public Number positiveMin(boolean ignoreNaNs, boolean ignoreInfs) {
return (Number) getMaxMin(ignoreNaNs, ignoreInfs, STORE_POS_MIN);
}
@Override
public Dataset min(int axis) {
return min(false, axis);
}
@Override
public Dataset min(boolean ignoreNaNs, int axis) {
return (Dataset) getStatistics(ignoreNaNs, axis, STORE_MIN + "-" + axis);
}
@Override
public int argMax() {
return argMax(false);
}
@Override
public int argMax(boolean ignoreInvalids) {
return getFlat1DIndex(maxPos(ignoreInvalids));
}
@Override
public IntegerDataset argMax(int axis) {
return argMax(false, axis);
}
@Override
public IntegerDataset argMax(boolean ignoreNaNs, int axis) {
return (IntegerDataset) getStatistics(ignoreNaNs, axis, STORE_MAX + STORE_INDEX + "-" + axis);
}
@Override
public int argMin() {
return argMin(false);
}
@Override
public int argMin(boolean ignoreInvalids) {
return getFlat1DIndex(minPos(ignoreInvalids));
}
@Override
public IntegerDataset argMin(int axis) {
return argMin(false, axis);
}
@Override
public IntegerDataset argMin(boolean ignoreNaNs, int axis) {
return (IntegerDataset) getStatistics(ignoreNaNs, axis, STORE_MIN + STORE_INDEX + "-" + axis);
}
@Override
public Number peakToPeak() {
return fromDoubleToNumber(max().doubleValue() - min().doubleValue());
}
@Override
public Dataset peakToPeak(int axis) {
return Maths.subtract(max(axis), min(axis));
}
@Override
public long count() {
return count(false);
}
@Override
public long count(boolean ignoreNaNs) {
return getStatistics(ignoreNaNs).getN();
}
@Override
public Dataset count(int axis) {
return count(false, axis);
}
@Override
public Dataset count(boolean ignoreNaNs, int axis) {
return (Dataset) getStatistics(ignoreNaNs, axis, STORE_COUNT + "-" + axis);
}
@Override
public Object sum() {
return sum(false);
}
@Override
public Object sum(boolean ignoreNaNs) {
return getStatistics(ignoreNaNs).getSum();
}
@Override
public Dataset sum(int axis) {
return sum(false, axis);
}
@Override
public Dataset sum(boolean ignoreNaNs, int axis) {
return (Dataset) getStatistics(ignoreNaNs, axis, STORE_SUM + "-" + axis);
}
@Override
public Object typedSum() {
return typedSum(getDtype());
}
@Override
public Object typedSum(int dtype) {
return fromDoubleToBiggestNumber(getStatistics(false).getSum(), dtype);
}
@Override
public Dataset typedSum(int dtype, int axis) {
return DatasetUtils.cast(sum(axis), dtype);
}
@Override
public Object product() {
return Stats.product(this);
}
@Override
public Dataset product(int axis) {
return Stats.product(this, axis);
}
@Override
public Object typedProduct(int dtype) {
return Stats.typedProduct(this, dtype);
}
@Override
public Dataset typedProduct(int dtype, int axis) {
return Stats.typedProduct(this, dtype, axis);
}
@Override
public Object mean(boolean... ignoreNaNs) {
boolean ig = ignoreNaNs!=null && ignoreNaNs.length>0 ? ignoreNaNs[0] : false;
return getStatistics(ig).getMean();
}
@Override
public Dataset mean(int axis) {
return mean(false, axis);
}
@Override
public Dataset mean(boolean ignoreNaNs, int axis) {
return (Dataset) getStatistics(ignoreNaNs, axis, STORE_MEAN + "-" + axis);
}
@Override
public Number variance() {
return variance(false);
}
@Override
public Number variance(boolean isDatasetWholePopulation) {
SummaryStatistics stats = getStatistics(false);
if (isDatasetWholePopulation) {
Variance newVar = (Variance) stats.getVarianceImpl().copy();
newVar.setBiasCorrected(false);
return newVar.getResult();
}
return stats.getVariance();
}
@Override
public Dataset variance(int axis) {
return (Dataset) getStatistics(false, axis, STORE_VAR + "-" + axis);
}
@Override
public Number stdDeviation() {
return Math.sqrt(variance().doubleValue());
}
@Override
public Number stdDeviation(boolean isDatasetWholePopulation) {
return Math.sqrt(variance(isDatasetWholePopulation).doubleValue());
}
@Override
public Dataset stdDeviation(int axis) {
final Dataset v = (Dataset) getStatistics(false, axis, STORE_VAR + "-" + axis);
return Maths.sqrt(v);
}
@Override
public Number rootMeanSquare() {
final SummaryStatistics stats = getStatistics(false);
final double mean = stats.getMean();
return Math.sqrt(stats.getVariance() + mean * mean);
}
@Override
public Dataset rootMeanSquare(int axis) {
Dataset v = (Dataset) getStatistics(false, axis, STORE_VAR + "-" + axis);
Dataset m = (Dataset) getStatistics(false, axis, STORE_MEAN + "-" + axis);
Dataset result = Maths.power(m, 2);
return Maths.sqrt(result.iadd(v));
}
/**
* Set item from compatible dataset in a direct and speedy way. Remember to setDirty afterwards.
*
* @param dindex
* @param sindex
* @param src
* is the source data buffer
*/
protected abstract void setItemDirect(final int dindex, final int sindex, final Object src);
@Override
public boolean hasErrors() {
return super.getError() != null;
}
protected Dataset getInternalError() {
ILazyDataset led = super.getError();
if (led == null)
return null;
Dataset ed = DatasetUtils.sliceAndConvertLazyDataset(led);
if (!(led instanceof Dataset)) {
setError(ed); // set back
}
// check for broadcast strides
Object bs = getStoredValue(STORE_BROADCAST);
if (bs == null) {
bs = new BroadcastStride(ed, shape);
setStoredValue(STORE_BROADCAST, bs);
}
return ed;
}
class BroadcastStride {
private int[] bStride;
private int[] nShape;
private int bOffset;
public BroadcastStride(Dataset d, final int[] newShape) {
d.setShape(BroadcastIterator.padShape(d.getShapeRef(), newShape.length - d.getRank())); // set to padded shape
bStride = createBroadcastStrides(d, newShape);
nShape = newShape.clone();
bOffset = d.getOffset();
}
public int get1DIndex(int i) {
if (i < 0) {
i += nShape[0];
}
if (i < 0 || i >= nShape[0]) {
throwAIOOBException(i, nShape[0], 0);
}
return i*bStride[0] + bOffset;
}
protected int get1DIndex(int i, int j) {
if (i < 0) {
i += nShape[0];
}
if (i < 0 || i >= nShape[0]) {
throwAIOOBException(i, nShape[0], 0);
}
if (j < 0) {
j += nShape[1];
}
if (j < 0 || j >= nShape[1]) {
throwAIOOBException(i, nShape[1], 1);
}
return i*bStride[0] + j*bStride[1] + bOffset;
}
protected int get1DIndex(int... n) {
return get1DIndexFromStrides(nShape, bStride, bOffset, n);
}
}
@Override
public Dataset getError() {
Dataset ed = getInternalError();
if (ed == null)
return null;
if (ed.getSize() != getSize()) {
DoubleDataset errors = new DoubleDataset(shape);
errors.setSlice(ed);
return errors;
}
return ed;
}
@Override
public double getError(final int i) {
Dataset ed = getInternalError();
if (ed == null)
return 0;
BroadcastStride bs = (BroadcastStride) getStoredValue(STORE_BROADCAST);
return ed.getElementDoubleAbs(bs.get1DIndex(i));
}
@Override
public double getError(final int i, final int j) {
Dataset ed = getInternalError();
if (ed == null)
return 0;
BroadcastStride bs = (BroadcastStride) getStoredValue(STORE_BROADCAST);
return ed.getElementDoubleAbs(bs.get1DIndex(i, j));
}
@Override
public double getError(int... pos) {
Dataset ed = getInternalError();
if (ed == null)
return 0;
BroadcastStride bs = (BroadcastStride) getStoredValue(STORE_BROADCAST);
return ed.getElementDoubleAbs(bs.get1DIndex(pos));
}
@Override
public double[] getErrorArray(final int i) {
Dataset ed = getInternalError();
if (ed == null)
return null;
return new double[] {getError(i)};
}
@Override
public double[] getErrorArray(final int i, final int j) {
Dataset ed = getInternalError();
if (ed == null)
return null;
return new double[] {getError(i, j)};
}
@Override
public double[] getErrorArray(int... pos) {
Dataset ed = getInternalError();
if (ed == null)
return null;
return new double[] {getError(pos)};
}
protected Dataset getInternalSquaredError() {
Dataset sed = getErrorBuffer();
// check for broadcast strides
Object bs = getStoredValue(STORE_BROADCAST);
if (bs == null) {
bs = new BroadcastStride(sed, shape);
setStoredValue(STORE_BROADCAST, bs);
}
return sed;
}
@Override
public Dataset getErrorBuffer() {
ErrorMetadata emd = getErrorMetadata();
if (emd == null)
return null;
if (!(emd instanceof ErrorMetadataImpl)) {
ILazyDataset led = emd.getError();
Dataset ed = DatasetUtils.sliceAndConvertLazyDataset(led);
emd = new ErrorMetadataImpl();
setMetadata(emd);
((ErrorMetadataImpl) emd).setError(ed);
}
return ((ErrorMetadataImpl) emd).getSquaredError();
}
@Override
public void setError(Serializable errors) {
super.setError(errors);
Object bs = getStoredValue(STORE_BROADCAST);
if (bs != null) {
setStoredValue(STORE_BROADCAST, null);
}
}
/**
* Set a copy of the buffer that backs the (squared) error data
* @param buffer can be null, anything that can be used to create a DoubleDataset or CompoundDoubleDataset
*/
@Override
public void setErrorBuffer(Serializable buffer) {
Object bs = getStoredValue(STORE_BROADCAST);
if (bs != null) {
setStoredValue(STORE_BROADCAST, null);
}
if (buffer == null) {
clearMetadata(ErrorMetadata.class);
return;
}
IDataset d = (IDataset) createFromSerializable(buffer, false);
ErrorMetadata emd = getErrorMetadata();
if (!(emd instanceof ErrorMetadataImpl)) {
emd = new ErrorMetadataImpl();
setMetadata(emd);
}
((ErrorMetadataImpl) emd).setSquaredError(d);
}
} |
package org.eclipse.dawnsci.analysis.dataset.impl;
import java.io.Serializable;
import java.lang.reflect.Array;
import java.text.Format;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.math3.complex.Complex;
import org.apache.commons.math3.stat.descriptive.StorelessUnivariateStatistic;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import org.apache.commons.math3.stat.descriptive.moment.Variance;
import org.eclipse.dawnsci.analysis.api.dataset.IDataset;
import org.eclipse.dawnsci.analysis.api.dataset.ILazyDataset;
import org.eclipse.dawnsci.analysis.api.dataset.Slice;
import org.eclipse.dawnsci.analysis.api.dataset.SliceND;
import org.eclipse.dawnsci.analysis.api.metadata.ErrorMetadata;
import org.eclipse.dawnsci.analysis.api.metadata.MetadataType;
import org.eclipse.dawnsci.analysis.api.monitor.IMonitor;
import org.eclipse.dawnsci.analysis.dataset.metadata.ErrorMetadataImpl;
/**
* Generic container class for data
* <p/>
* Each subclass has an array of primitive types, elements of this array are grouped or
* compounded to make items
* <p/>
* Data items can be boolean, integer, float, complex float, vector float, etc
*/
public abstract class AbstractDataset extends LazyDatasetBase implements Dataset {
/**
* Boolean
*/
public static final int BOOL = Dataset.BOOL;
/**
* Signed 8-bit integer
*/
public static final int INT8 = Dataset.INT8;
/**
* Signed 16-bit integer
*/
public static final int INT16 = Dataset.INT16;
/**
* Signed 32-bit integer
*/
public static final int INT32 = Dataset.INT32;
/**
* Integer (same as signed 32-bit integer)
*/
public static final int INT = Dataset.INT;
/**
* Signed 64-bit integer
*/
public static final int INT64 = Dataset.INT64;
/**
* 32-bit floating point
*/
public static final int FLOAT32 = Dataset.FLOAT32;
/**
* 64-bit floating point
*/
public static final int FLOAT64 = Dataset.FLOAT64;
/**
* Floating point (same as 64-bit floating point)
*/
public static final int FLOAT = Dataset.FLOAT;
/**
* 64-bit complex floating point (real and imaginary parts are 32-bit floats)
*/
public static final int COMPLEX64 = Dataset.COMPLEX64;
/**
* 128-bit complex floating point (real and imaginary parts are 64-bit floats)
*/
public static final int COMPLEX128 = Dataset.COMPLEX128;
/**
* Complex floating point (same as 64-bit floating point)
*/
public static final int COMPLEX = Dataset.COMPLEX;
/**
* String
*/
public static final int STRING = Dataset.STRING;
/**
* Object
*/
public static final int OBJECT = Dataset.OBJECT;
/**
* Array of signed 8-bit integers
*/
public static final int ARRAYINT8 = Dataset.ARRAYINT8;
/**
* Array of signed 16-bit integers
*/
public static final int ARRAYINT16 = Dataset.ARRAYINT16;
/**
* Array of three signed 16-bit integers for RGB values
*/
public static final int RGB = Dataset.RGB;
/**
* Array of signed 32-bit integers
*/
public static final int ARRAYINT32 = Dataset.ARRAYINT32;
/**
* Array of signed 64-bit integers
*/
public static final int ARRAYINT64 = Dataset.ARRAYINT64;
/**
* Array of 32-bit floating points
*/
public static final int ARRAYFLOAT32 = Dataset.ARRAYFLOAT32;
/**
* Array of 64-bit floating points
*/
public static final int ARRAYFLOAT64 = Dataset.ARRAYFLOAT64;
protected static boolean isDTypeElemental(int dtype) {
return dtype <= COMPLEX128 || dtype == RGB;
}
protected static boolean isDTypeFloating(int dtype) {
return dtype == FLOAT32 || dtype == FLOAT64 || dtype == COMPLEX64 || dtype == COMPLEX128 ||
dtype == ARRAYFLOAT32 || dtype == ARRAYFLOAT64;
}
protected static boolean isDTypeComplex(int dtype) {
return dtype == COMPLEX64 || dtype == COMPLEX128;
}
protected int size; // number of items
transient protected AbstractDataset base; // is null when not a view
protected int[] stride; // can be null for row-major, contiguous datasets
protected int offset;
/**
* The data itself, held in a 1D array, but the object will wrap it to appear as possessing as many dimensions as
* wanted
*/
protected Serializable odata = null;
/**
* Set aliased data as base data
*/
abstract protected void setData();
/**
* These members hold cached values. If their values are null, then recalculate, otherwise just use the values
*/
transient protected HashMap<String, Object> storedValues = null;
/**
* Constructor required for serialisation.
*/
public AbstractDataset() {
}
@Override
public synchronized Dataset synchronizedCopy() {
return clone();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!getClass().equals(obj.getClass())) {
if (getRank() == 0) // for zero-rank datasets
return obj.equals(getObjectAbs(0));
return false;
}
Dataset other = (Dataset) obj;
if (getElementsPerItem() != other.getElementsPerItem())
return false;
if (size != other.getSize())
return false;
if (!Arrays.equals(shape, other.getShapeRef())) {
return false;
}
if (getRank() == 0) // for zero-rank datasets
return other.getObjectAbs(0).equals(getObjectAbs(0));
return true;
}
@Override
public int hashCode() {
return getHash();
}
@Override
abstract public AbstractDataset clone();
protected Format stringFormat = null;
@Override
public void setStringFormat(Format format) {
stringFormat = format;
}
@Override
public Dataset cast(final int dtype) {
if (getDtype() == dtype) {
return this;
}
return DatasetUtils.cast(this, dtype);
}
@Override
public Dataset cast(final boolean repeat, final int dtype, final int isize) {
if (getDtype() == dtype && getElementsPerItem() == isize) {
return this;
}
return DatasetUtils.cast(this, repeat, dtype, isize);
}
@Override
abstract public AbstractDataset getView();
/**
* Copy fields from original to view
* @param orig
* @param view
* @param clone if true, then clone everything but bulk data
* @param cloneMetadata if true, clone metadata
*/
protected static void copyToView(Dataset orig, AbstractDataset view, boolean clone, boolean cloneMetadata) {
view.name = orig.getName();
view.size = orig.getSize();
view.odata = orig.getBuffer();
view.offset = orig.getOffset();
view.base = orig instanceof AbstractDataset ? ((AbstractDataset) orig).base : null;
if (clone) {
view.shape = orig.getShape();
copyStoredValues(orig, view, false);
view.stride = orig instanceof AbstractDataset && ((AbstractDataset) orig).stride != null ?
((AbstractDataset) orig).stride.clone() : null;
} else {
view.shape = orig.getShapeRef();
view.stride = orig instanceof AbstractDataset ? ((AbstractDataset) orig).stride : null;
}
view.metadata = getMetadataMap(orig, cloneMetadata);
int odtype = orig.getDtype();
int vdtype = view.getDtype();
if (getBestDType(odtype, vdtype) != vdtype) {
view.storedValues = null; // as copy is a demotion
}
if (odtype != vdtype && view.storedValues != null) {
view.storedValues.remove(STORE_SHAPELESS_HASH);
view.storedValues.remove(STORE_HASH);
}
}
protected static Map<Class<? extends MetadataType>, List<MetadataType>> getMetadataMap(Dataset a, boolean clone) {
if (a == null)
return null;
List<MetadataType> all = null;
try {
all = a.getMetadata(null);
} catch (Exception e) {
}
if (all == null)
return null;
HashMap<Class<? extends MetadataType>, List<MetadataType>> map = new HashMap<Class<? extends MetadataType>, List<MetadataType>>();
for (MetadataType m : all) {
if (m == null) {
continue;
}
Class<? extends MetadataType> c = findMetadataTypeSubInterfaces(m.getClass());
List<MetadataType> l = map.get(c);
if (l == null) {
l = new ArrayList<MetadataType>();
map.put(c, l);
}
if (clone)
m = m.clone();
l.add(m);
}
return map;
}
@Override
public IntegerDataset getIndices() {
final IntegerDataset ret = DatasetUtils.indices(shape);
if (getName() != null) {
ret.setName("Indices of " + getName());
}
return ret;
}
@Override
public Dataset getTransposedView(int... axes) {
axes = checkPermutatedAxes(shape, axes);
AbstractDataset t = getView();
if (axes == null || getRank() == 1)
return t;
int rank = shape.length;
int[] tstride = new int[rank];
int[] toffset = new int[1];
int[] nshape = createStrides(new SliceND(shape), this, tstride, toffset);
int[] nstride = new int[rank];
for (int i = 0; i < rank; i++) {
final int ax = axes[i];
nstride[i] = tstride[ax];
nshape[i] = shape[ax];
}
t.shape = nshape;
t.stride = nstride;
t.offset = toffset[0];
t.base = base == null ? this : base;
copyStoredValues(this, t, true);
t.transposeMetadata(axes);
return t;
}
@Override
public Dataset transpose(int... axes) {
Dataset t = getTransposedView(axes);
return t == null ? clone() : t.clone();
}
@Override
public Dataset swapAxes(int axis1, int axis2) {
int rank = shape.length;
if (axis1 < 0)
axis1 += rank;
if (axis2 < 0)
axis2 += rank;
if (axis1 < 0 || axis2 < 0 || axis1 >= rank || axis2 >= rank) {
logger.error("Axis value invalid - out of range");
throw new IllegalArgumentException("Axis value invalid - out of range");
}
if (rank == 1 || axis1 == axis2) {
return this;
}
int[] axes = new int[rank];
for (int i = 0; i < rank; i++) {
axes[i] = i;
}
axes[axis1] = axis2;
axes[axis2] = axis1;
return getTransposedView(axes);
}
@Override
public Dataset flatten() {
return reshape(size);
}
/**
* Calculate total number of items in given shape
* @param shape
* @return size
*/
public static long calcLongSize(final int[] shape) {
double dsize = 1.0;
if (shape == null || shape.length == 0) // special case of zero-rank shape
return 1;
for (int i = 0; i < shape.length; i++) {
// make sure the indexes isn't zero or negative
if (shape[i] == 0) {
return 0;
} else if (shape[i] < 0) {
throw new IllegalArgumentException(String.format(
"The %d-th is %d which is an illegal argument as it is negative", i, shape[i]));
}
dsize *= shape[i];
}
// check to see if the size is larger than an integer, i.e. we can't allocate it
if (dsize > Long.MAX_VALUE) {
throw new IllegalArgumentException("Size of the dataset is too large to allocate");
}
return (long) dsize;
}
/**
* Calculate total number of items in given shape
* @param shape
* @return size
*/
public static int calcSize(final int[] shape) {
long lsize = calcLongSize(shape);
// check to see if the size is larger than an integer, i.e. we can't allocate it
if (lsize > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Size of the dataset is too large to allocate");
}
return (int) lsize;
}
/**
* Find dataset type that best fits given types The best type takes into account complex and array datasets
*
* @param atype
* first dataset type
* @param btype
* second dataset type
* @return best dataset type
*/
public static int getBestDType(final int atype, final int btype) {
int besttype;
int a = atype >= ARRAYINT8 ? atype / ARRAYMUL : atype;
int b = btype >= ARRAYINT8 ? btype / ARRAYMUL : btype;
if (isDTypeFloating(a)) {
if (!isDTypeFloating(b)) {
b = getBestFloatDType(b);
if (isDTypeComplex(a)) {
b += COMPLEX64 - FLOAT32;
}
}
} else if (isDTypeFloating(b)) {
a = getBestFloatDType(a);
if (isDTypeComplex(b)) {
a += COMPLEX64 - FLOAT32;
}
}
besttype = a > b ? a : b;
if (atype >= ARRAYINT8 || btype >= ARRAYINT8) {
if (besttype >= COMPLEX64) {
throw new IllegalArgumentException("Complex type cannot be promoted to compound type");
}
besttype *= ARRAYMUL;
}
return besttype;
}
/**
* The largest dataset type suitable for a summation of around a few thousand items without changing from the "kind"
* of dataset
*
* @param otype
* @return largest dataset type available for given dataset type
*/
public static int getLargestDType(final int otype) {
switch (otype) {
case BOOL:
case INT8:
case INT16:
return INT32;
case INT32:
case INT64:
return INT64;
case FLOAT32:
case FLOAT64:
return FLOAT64;
case COMPLEX64:
case COMPLEX128:
return COMPLEX128;
case ARRAYINT8:
case ARRAYINT16:
return ARRAYINT32;
case ARRAYINT32:
case ARRAYINT64:
return ARRAYINT64;
case ARRAYFLOAT32:
case ARRAYFLOAT64:
return ARRAYFLOAT64;
}
throw new IllegalArgumentException("Unsupported dataset type");
}
/**
* Find floating point dataset type that best fits given types The best type takes into account complex and array
* datasets
*
* @param otype
* old dataset type
* @return best dataset type
*/
public static int getBestFloatDType(final int otype) {
int btype;
switch (otype) {
case BOOL:
case INT8:
case INT16:
case ARRAYINT8:
case ARRAYINT16:
case FLOAT32:
case ARRAYFLOAT32:
case COMPLEX64:
btype = FLOAT32; // demote, if necessary
break;
case INT32:
case INT64:
case ARRAYINT32:
case ARRAYINT64:
case FLOAT64:
case ARRAYFLOAT64:
case COMPLEX128:
btype = FLOAT64; // promote, if necessary
break;
default:
btype = otype; // for array datasets, preserve type
break;
}
return btype;
}
/**
* Find floating point dataset type that best fits given class The best type takes into account complex and array
* datasets
*
* @param cls
* of an item or element
* @return best dataset type
*/
public static int getBestFloatDType(Class<? extends Object> cls) {
return getBestFloatDType(getDTypeFromClass(cls));
}
transient private static final Map<Class<?>, Integer> dtypeMap = createDTypeMap();
private static Map<Class<?>, Integer> createDTypeMap() {
Map<Class<?>, Integer> result = new HashMap<Class<?>, Integer>();
result.put(Boolean.class, BOOL);
result.put(Byte.class, INT8);
result.put(Short.class, INT16);
result.put(Integer.class, INT32);
result.put(Long.class, INT64);
result.put(Float.class, FLOAT32);
result.put(Double.class, FLOAT64);
result.put(boolean.class, BOOL);
result.put(byte.class, INT8);
result.put(short.class, INT16);
result.put(int.class, INT32);
result.put(long.class, INT64);
result.put(float.class, FLOAT32);
result.put(double.class, FLOAT64);
result.put(Complex.class, COMPLEX128);
result.put(String.class, STRING);
result.put(Object.class, OBJECT);
return result;
}
/**
* Get dataset type from a class
*
* @param cls
* @return dataset type
*/
public static int getDTypeFromClass(Class<? extends Object> cls) {
return getDTypeFromClass(cls, 1);
}
/**
* Get dataset type from a class
*
* @param cls
* @return dataset type
*/
public static int getDTypeFromClass(Class<? extends Object> cls, int isize) {
Integer dtype = dtypeMap.get(cls);
if (dtype == null) {
throw new IllegalArgumentException("Class of object not supported");
}
if (isize != 1) {
if (dtype < FLOAT64)
dtype *= ARRAYMUL;
}
return dtype;
}
/**
* Get dataset type from an object. The following are supported: Java Number objects, Apache common math Complex
* objects, Java arrays and lists
*
* @param obj
* @return dataset type
*/
public static int getDTypeFromObject(Object obj) {
int dtype = -1;
if (obj == null) {
return dtype;
}
if (obj instanceof List<?>) {
List<?> jl = (List<?>) obj;
int l = jl.size();
for (int i = 0; i < l; i++) {
int ldtype = getDTypeFromObject(jl.get(i));
if (ldtype > dtype) {
dtype = ldtype;
}
}
} else if (obj.getClass().isArray()) {
Class<?> ca = obj.getClass().getComponentType();
if (isComponentSupported(ca)) {
return getDTypeFromClass(ca);
}
int l = Array.getLength(obj);
for (int i = 0; i < l; i++) {
Object lo = Array.get(obj, i);
int ldtype = getDTypeFromObject(lo);
if (ldtype > dtype) {
dtype = ldtype;
}
}
} else if (obj instanceof Dataset) {
return ((Dataset) obj).getDtype();
} else if (obj instanceof ILazyDataset) {
dtype = getDTypeFromClass(((ILazyDataset) obj).elementClass(), ((ILazyDataset) obj).getElementsPerItem());
} else {
dtype = getDTypeFromClass(obj.getClass());
}
return dtype;
}
/**
* @param comp
* @return true if supported
*/
public static boolean isComponentSupported(Class<? extends Object> comp) {
return comp.isPrimitive() || Number.class.isAssignableFrom(comp) || comp.equals(Boolean.class) || comp.equals(Complex.class) || comp.equals(String.class);
}
/**
* Get dataset type from given dataset
* @param d
* @return dataset type
*/
public static int getDType(ILazyDataset d) {
if (d instanceof Dataset)
return ((Dataset) d).getDtype();
return getDTypeFromClass(d.elementClass(), d.getElementsPerItem());
}
/**
* Get shape from object (array or list supported)
* @param obj
* @return shape
*/
public static int[] getShapeFromObject(final Object obj) {
ArrayList<Integer> lshape = new ArrayList<Integer>();
getShapeFromObj(lshape, obj, 0);
if (obj != null && lshape.size() == 0) {
return new int[0]; // cope with a single item
}
final int rank = lshape.size();
final int[] shape = new int[rank];
for (int i = 0; i < rank; i++) {
shape[i] = lshape.get(i);
}
return shape;
}
/**
* Get shape from object
* @param ldims
* @param obj
* @param depth
* @return true if there is a possibility of differing lengths
*/
private static boolean getShapeFromObj(final ArrayList<Integer> ldims, Object obj, int depth) {
if (obj == null)
return true;
if (obj instanceof List<?>) {
List<?> jl = (List<?>) obj;
int l = jl.size();
updateShape(ldims, depth, l);
for (int i = 0; i < l; i++) {
Object lo = jl.get(i);
if (!getShapeFromObj(ldims, lo, depth + 1)) {
break;
}
}
return true;
}
Class<? extends Object> ca = obj.getClass().getComponentType();
if (ca != null) {
final int l = Array.getLength(obj);
updateShape(ldims, depth, l);
if (isComponentSupported(ca)) {
return true;
}
for (int i = 0; i < l; i++) {
Object lo = Array.get(obj, i);
if (!getShapeFromObj(ldims, lo, depth + 1)) {
break;
}
}
return true;
} else if (obj instanceof IDataset) {
int[] s = ((IDataset) obj).getShape();
for (int i = 0; i < s.length; i++) {
updateShape(ldims, depth++, s[i]);
}
return true;
} else {
return false; // not an array of any type
}
}
private static void updateShape(final ArrayList<Integer> ldims, final int depth, final int l) {
if (depth >= ldims.size()) {
ldims.add(l);
} else if (l > ldims.get(depth)) {
ldims.set(depth, l);
}
}
/**
* Fill dataset from object at depth dimension
* @param obj
* @param depth
* @param pos position
*/
protected void fillData(Object obj, final int depth, final int[] pos) {
if (obj == null) {
int dtype = getDtype();
if (dtype == FLOAT32)
set(Float.NaN, pos);
else if (dtype == FLOAT64)
set(Double.NaN, pos);
return;
}
if (obj instanceof List<?>) {
List<?> jl = (List<?>) obj;
int l = jl.size();
for (int i = 0; i < l; i++) {
Object lo = jl.get(i);
fillData(lo, depth + 1, pos);
pos[depth]++;
}
pos[depth] = 0;
} else if (obj.getClass().isArray()) {
int l = Array.getLength(obj);
for (int i = 0; i < l; i++) {
Object lo = Array.get(obj, i);
fillData(lo, depth + 1, pos);
pos[depth]++;
}
pos[depth] = 0;
} else if (obj instanceof IDataset) {
boolean[] a = new boolean[shape.length];
for (int i = depth; i < a.length; i++)
a[i] = true;
setSlice(obj, getSliceIteratorFromAxes(pos, a));
} else {
set(obj, pos);
}
}
protected static boolean toBoolean(final Object b) {
if (b instanceof Number) {
return ((Number) b).longValue() != 0;
} else if (b instanceof Boolean) {
return ((Boolean) b).booleanValue();
} else if (b instanceof Complex) {
return ((Complex) b).getReal() != 0;
} else if (b instanceof Dataset) {
Dataset db = (Dataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toBoolean(db.getObjectAbs(0));
} else if (b instanceof IDataset) {
IDataset db = (Dataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toBoolean(db.getObject(new int[db.getRank()]));
} else {
logger.error("Argument is of unsupported class");
throw new IllegalArgumentException("Argument is of unsupported class");
}
}
protected static long toLong(final Object b) {
if (b instanceof Number) {
double t = ((Number) b).doubleValue();
if (Double.isNaN(t) || Double.isInfinite(t)) {
return 0;
}
return ((Number) b).longValue();
} else if (b instanceof Boolean) {
return ((Boolean) b).booleanValue() ? 1 : 0;
} else if (b instanceof Complex) {
return (long) ((Complex) b).getReal();
} else if (b instanceof Dataset) {
Dataset db = (Dataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toLong(db.getObjectAbs(0));
} else if (b instanceof IDataset) {
IDataset db = (Dataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toLong(db.getObject(new int[db.getRank()]));
} else {
logger.error("Argument is of unsupported class");
throw new IllegalArgumentException("Argument is of unsupported class");
}
}
protected static double toReal(final Object b) {
if (b instanceof Number) {
return ((Number) b).doubleValue();
} else if (b instanceof Boolean) {
return ((Boolean) b).booleanValue() ? 1 : 0;
} else if (b instanceof Complex) {
return ((Complex) b).getReal();
} else if (b.getClass().isArray()) {
if (Array.getLength(b) == 0)
return 0;
return toReal(Array.get(b, 0));
} else if (b instanceof Dataset) {
Dataset db = (Dataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toReal(db.getObjectAbs(0));
} else if (b instanceof IDataset) {
IDataset db = (Dataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toReal(db.getObject(new int[db.getRank()]));
} else {
logger.error("Argument is of unsupported class");
throw new IllegalArgumentException("Argument is of unsupported class");
}
}
protected static double toImag(final Object b) {
if (b instanceof Number) {
return 0;
} else if (b instanceof Boolean) {
return 0;
} else if (b instanceof Complex) {
return ((Complex) b).getImaginary();
} else if (b.getClass().isArray()) {
if (Array.getLength(b) < 2)
return 0;
return toReal(Array.get(b, 1));
} else if (b instanceof Dataset) {
Dataset db = (Dataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toImag(db.getObjectAbs(0));
} else if (b instanceof IDataset) {
IDataset db = (Dataset) b;
if (db.getSize() != 1) {
logger.error("Given dataset must have only one item");
throw new IllegalArgumentException("Given dataset must have only one item");
}
return toImag(db.getObject(new int[db.getRank()]));
} else {
logger.error("Argument is of unsupported class");
throw new IllegalArgumentException("Argument is of unsupported class");
}
}
@Override
public IndexIterator getIterator(final boolean withPosition) {
if (stride != null)
return new StrideIterator(shape, stride, offset);
return withPosition ? new ContiguousIteratorWithPosition(shape, size) : new ContiguousIterator(size);
}
@Override
public IndexIterator getIterator() {
return getIterator(false);
}
@Override
public PositionIterator getPositionIterator(final int... axes) {
return new PositionIterator(shape, axes);
}
@Override
public IndexIterator getSliceIterator(final int[] start, final int[] stop, final int[] step) {
return getSliceIterator(new SliceND(shape, start, stop, step));
}
/**
* @param slice
* @return an slice iterator that operates like an IndexIterator
*/
public IndexIterator getSliceIterator(SliceND slice) {
if (calcLongSize(slice.getShape()) == 0) {
return new NullIterator(shape, slice.getShape());
}
if (stride != null)
return new StrideIterator(getElementsPerItem(), shape, stride, offset, slice);
return new SliceIterator(shape, size, slice);
}
@Override
public SliceIterator getSliceIteratorFromAxes(final int[] pos, boolean[] axes) {
int rank = shape.length;
int[] start;
int[] stop = new int[rank];
int[] step = new int[rank];
if (pos == null) {
start = new int[rank];
} else if (pos.length == rank) {
start = pos.clone();
} else {
throw new IllegalArgumentException("pos array length is not equal to rank of dataset");
}
if (axes == null) {
axes = new boolean[rank];
Arrays.fill(axes, true);
} else if (axes.length != rank) {
throw new IllegalArgumentException("axes array length is not equal to rank of dataset");
}
for (int i = 0; i < rank; i++) {
if (axes[i]) {
stop[i] = shape[i];
} else {
stop[i] = start[i] + 1;
}
step[i] = 1;
}
return (SliceIterator) getSliceIterator(start, stop, step);
}
@Override
public BooleanIterator getBooleanIterator(Dataset choice) {
return getBooleanIterator(choice, true);
}
@Override
public BooleanIterator getBooleanIterator(Dataset choice, boolean value) {
return new BooleanIterator(getIterator(), choice, value);
}
@Override
public Dataset getByBoolean(Dataset selection) {
checkCompatibility(selection);
final int length = ((Number) selection.sum()).intValue();
final int is = getElementsPerItem();
Dataset r = DatasetFactory.zeros(is, new int[] { length }, getDtype());
BooleanIterator biter = getBooleanIterator(selection);
int i = 0;
while (biter.hasNext()) {
r.setObjectAbs(i, getObjectAbs(biter.index));
i += is;
}
return r;
}
@Override
public Dataset getBy1DIndex(IntegerDataset index) {
final int is = getElementsPerItem();
final Dataset r = DatasetFactory.zeros(is, index.getShape(), getDtype());
final IntegerIterator iter = new IntegerIterator(index, size, is);
int i = 0;
while (iter.hasNext()) {
r.setObjectAbs(i, getObjectAbs(iter.index));
i += is;
}
return r;
}
@Override
public Dataset getByIndexes(final Object... indexes) {
final IntegersIterator iter = new IntegersIterator(shape, indexes);
final int is = getElementsPerItem();
final Dataset r = DatasetFactory.zeros(is, iter.getShape(), getDtype());
final int[] pos = iter.getPos();
int i = 0;
while (iter.hasNext()) {
r.setObjectAbs(i, getObject(pos));
i += is;
}
return r;
}
/**
* @param dtype
* @return (boxed) class of constituent element
*/
public static Class<?> elementClass(final int dtype) {
switch (dtype) {
case BOOL:
return Boolean.class;
case INT8:
case ARRAYINT8:
return Byte.class;
case INT16:
case ARRAYINT16:
case RGB:
return Short.class;
case INT32:
case ARRAYINT32:
return Integer.class;
case INT64:
case ARRAYINT64:
return Long.class;
case FLOAT32:
case ARRAYFLOAT32:
return Float.class;
case FLOAT64:
case ARRAYFLOAT64:
return Double.class;
case COMPLEX64:
return Float.class;
case COMPLEX128:
return Double.class;
case STRING:
return String.class;
}
return Object.class;
}
@Override
public Class<?> elementClass() {
return elementClass(getDtype());
}
@Override
public boolean hasFloatingPointElements() {
Class<?> cls = elementClass();
return cls == Float.class || cls == Double.class;
}
@Override
public int getElementsPerItem() {
return getElementsPerItem(getDtype());
}
@Override
public int getItemsize() {
return getItemsize(getDtype(), getElementsPerItem());
}
/**
* @param dtype
* @return number of elements per item
*/
public static int getElementsPerItem(final int dtype) {
switch (dtype) {
case ARRAYINT8:
case ARRAYINT16:
case ARRAYINT32:
case ARRAYINT64:
case ARRAYFLOAT32:
case ARRAYFLOAT64:
throw new UnsupportedOperationException("Multi-element type unsupported");
case COMPLEX64:
case COMPLEX128:
return 2;
}
return 1;
}
/**
* @param dtype
* @return length of single item in bytes
*/
public static int getItemsize(final int dtype) {
return getItemsize(dtype, getElementsPerItem(dtype));
}
/**
* @param dtype
* @param isize
* number of elements in an item
* @return length of single item in bytes
*/
public static int getItemsize(final int dtype, final int isize) {
int size;
switch (dtype) {
case BOOL:
size = 1; // How is this defined?
break;
case INT8:
case ARRAYINT8:
size = Byte.SIZE / 8;
break;
case INT16:
case ARRAYINT16:
case RGB:
size = Short.SIZE / 8;
break;
case INT32:
case ARRAYINT32:
size = Integer.SIZE / 8;
break;
case INT64:
case ARRAYINT64:
size = Long.SIZE / 8;
break;
case FLOAT32:
case ARRAYFLOAT32:
case COMPLEX64:
size = Float.SIZE / 8;
break;
case FLOAT64:
case ARRAYFLOAT64:
case COMPLEX128:
size = Double.SIZE / 8;
break;
default:
size = 0;
break;
}
return size * isize;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(final String name) {
this.name = name;
}
@Override
public int getSize() {
if (odata == null) {
throw new NullPointerException("The data object inside the dataset has not been allocated, "
+ "this suggests a failed or absent construction of the dataset");
}
return size;
}
@Override
public int[] getShape() {
// make a copy of the dimensions data, and put that out
if (shape == null) {
logger.warn("Shape is null!!!");
return new int[] {};
}
return shape.clone();
}
@Override
public int getRank() {
return shape.length;
}
@Override
public int getNbytes() {
return getSize() * getItemsize();
}
/**
* Check for -1 placeholder in shape and replace if necessary
* @param shape
* @param size
*/
private void checkShape(int[] shape, int size) {
int rank = shape.length;
int found = -1;
int nsize = 1;
for (int i = 0; i < rank; i++) {
int d = shape[i];
if (d == -1) {
if (found == -1) {
found = i;
} else {
logger.error("Can only have one -1 placeholder in shape");
throw new IllegalArgumentException("Can only have one -1 placeholder in shape");
}
} else {
nsize *= d;
}
}
if (found >= 0) {
shape[found] = size/nsize;
} else if (nsize != size) {
logger.error("New shape is not same size as old shape");
throw new IllegalArgumentException("New shape is not same size as old shape");
}
}
@Override
public void setShape(final int... shape) {
int[] nshape = shape.clone();
checkShape(nshape, size);
if (Arrays.equals(this.shape, nshape))
return;
if (stride != null) {
// the only compatible shapes are ones where new dimensions are factors of old dimensions
// or are combined adjacent old dimensions
int[] oshape = this.shape;
int orank = oshape.length;
int nrank = nshape.length;
int[] nstride = new int[nrank];
boolean ones = true;
for (int i = 0, j = 0; i < orank || j < nrank;) {
if (i < orank && j < nrank && oshape[i] == nshape[j]) {
nstride[j++] = stride[i++];
} else if (j < nrank && nshape[j] == 1) {
nstride[j++] = 0;
} else if (i < orank && oshape[i] == 1) {
i++;
} else {
if (j < nrank)
j++;
if (i < orank)
i++;
ones = false;
}
}
if (!ones) { // not just ones differ in shapes
int[] ostride = stride;
int ob = 0;
int oe = 1;
int nb = 0;
int ne = 1;
while (ob < orank && nb < nrank) {
int ol = oshape[ob];
int nl = nshape[nb];
if (nl < ol) { // find group of shape dimensions that form common size
do { // case where new shape spreads single dimension over several dimensions
if (ne == nrank) {
break;
}
nl *= nshape[ne++];
} while (nl < ol);
if (nl != ol) {
logger.error("Subshape is incompatible with single dimension");
throw new IllegalArgumentException("Subshape is incompatible with single dimension");
}
int on = ne - 1;
while (nshape[on] == 1) {
on
}
nstride[on] = ostride[ob];
for (int n = on - 1; n >= nb; n
if (nshape[n] == 1)
continue;
nstride[n] = nshape[on] * nstride[on];
on = n;
}
} else if (ol < nl) {
do { // case where new shape combines several dimensions into one dimension
if (oe == orank) {
break;
}
ol *= oshape[oe++];
} while (ol < nl);
if (nl != ol) {
logger.error("Single dimension is incompatible with subshape");
throw new IllegalArgumentException("Single dimension is incompatible with subshape");
}
int oo = oe - 1;
while (oshape[oo] == 1) {
oo
}
int os = ostride[oo];
for (int o = oo - 1; o >= ob; o
if (oshape[o] == 1)
continue;
if (ostride[o] != oshape[oo] * ostride[oo]) {
logger.error("Subshape cannot be a non-contiguous view");
throw new IllegalArgumentException("Subshape cannot be a non-contiguous view");
}
oo = o;
}
nstride[nb] = os;
} else {
nstride[nb] = ostride[ob];
}
ob = oe++;
nb = ne++;
}
}
stride = nstride;
}
reshapeMetadata(this.shape, nshape);
this.shape = nshape;
if (storedValues != null)
filterStoredValues(storedValues); // as it is dependent on shape
}
@Override
public int[] getShapeRef() {
return shape;
}
@Override
public int getOffset() {
return offset;
}
@Override
public int[] getStrides() {
return stride;
}
@Override
public Serializable getBuffer() {
return odata;
}
/**
* Create a stride array from dataset
* @param a dataset
* @param offset output offset
* @return new strides
*/
public static int[] createStrides(Dataset a, final int[] offset) {
return createStrides(a.getElementsPerItem(), a.getShapeRef(), a.getStrides(), a.getOffset(), offset);
}
/**
* Create a stride array from dataset
* @param isize
* @param shape
* @param oStride original stride
* @param oOffset original offset (only used if there is an original stride)
* @param offset output offset
* @return new strides
*/
public static int[] createStrides(final int isize, final int[] shape, final int[] oStride, final int oOffset, final int[] offset) {
int rank = shape.length;
final int[] stride;
if (oStride == null) {
offset[0] = 0;
stride = new int[rank];
int s = isize;
for (int j = rank - 1; j >= 0; j
stride[j] = s;
s *= shape[j];
}
} else {
offset[0] = oOffset;
stride = oStride.clone();
}
return stride;
}
/**
* Create a stride array from slice information and a dataset
* @param slice
* @param a dataset
* @param stride output stride
* @param offset output offset
* @return new shape
*/
public static int[] createStrides(final SliceND slice, final Dataset a, final int[] stride, final int[] offset) {
return createStrides(slice, a.getElementsPerItem(), a.getShapeRef(), a.getStrides(), a.getOffset(), stride, offset);
}
/**
* Create a stride array from slice and dataset information
* @param slice
* @param isize
* @param shape
* @param oStride original stride
* @param oOffset original offset (only used if there is an original stride)
* @param stride output stride
* @param offset output offset
* @return new shape
*/
public static int[] createStrides(final SliceND slice, final int isize, final int[] shape, final int[] oStride, final int oOffset, final int[] stride, final int[] offset) {
int[] lstart = slice.getStart();
int[] lstep = slice.getStep();
int[] newShape = slice.getShape();
int rank = shape.length;
if (oStride == null) {
int s = isize;
offset[0] = 0;
for (int j = rank - 1; j >= 0; j
stride[j] = s * lstep[j];
offset[0] += s * lstart[j];
s *= shape[j];
}
} else {
offset[0] = oOffset;
for (int j = 0; j < rank; j++) {
int s = oStride[j];
stride[j] = lstep[j] * s;
offset[0] += lstart[j] * s;
}
}
return newShape;
}
/**
* Create a stride array from a dataset to a broadcast shape
* @param a dataset
* @param broadcastShape
* @return stride array
*/
public static int[] createBroadcastStrides(Dataset a, final int[] broadcastShape) {
return createBroadcastStrides(a.getElementsPerItem(), a.getShapeRef(), a.getStrides(), broadcastShape);
}
/**
* Create a stride array from a dataset to a broadcast shape
* @param isize
* @param shape
* @param oStride original stride
* @param broadcastShape
* @return stride array
*/
public static int[] createBroadcastStrides(final int isize, final int[] shape, final int[] oStride, final int[] broadcastShape) {
int rank = shape.length;
if (broadcastShape.length != rank) {
throw new IllegalArgumentException("Dataset must have same rank as broadcast shape");
}
int[] stride = new int[rank];
if (oStride == null) {
int s = isize;
for (int j = rank - 1; j >= 0; j
if (broadcastShape[j] == shape[j]) {
stride[j] = s;
s *= shape[j];
} else {
stride[j] = 0;
}
}
} else {
for (int j = 0; j < rank; j++) {
if (broadcastShape[j] == shape[j]) {
stride[j] = oStride[j];
} else {
stride[j] = 0;
}
}
}
return stride;
}
@Override
public Dataset getSliceView(final int[] start, final int[] stop, final int[] step) {
return getSliceView(new SliceND(shape, start, stop, step));
}
@Override
public Dataset getSliceView(Slice... slice) {
if (slice == null || slice.length == 0) {
int[] sOffset = new int[1];
int[] sStride = createStrides(this, sOffset);
AbstractDataset s = getView();
s.stride = sStride;
s.offset = sOffset[0];
s.base = base == null ? this : base;
return s;
}
return getSliceView(new SliceND(shape, slice));
}
/**
* Get a slice of the dataset. The returned dataset is a view on a selection of items
* @param slice
* @return slice view
*/
@Override
public Dataset getSliceView(SliceND slice) {
final int rank = shape.length;
int[] sStride = new int[rank];
int[] sOffset = new int[1];
int[] sShape = createStrides(slice, this, sStride, sOffset);
AbstractDataset s = getView();
s.shape = sShape;
s.size = calcSize(sShape);
s.stride = sStride;
s.offset = sOffset[0];
s.base = base == null ? this : base;
s.metadata = copyMetadata();
s.sliceMetadata(true, slice);
if (slice.isAll()) {
s.setName(name);
} else {
s.setDirty();
s.setName(name + BLOCK_OPEN + slice + BLOCK_CLOSE);
}
return s;
}
/**
* Get flattened view index of given position
* @param pos
* the integer array specifying the n-D position
* @return the index on the flattened dataset
*/
private int getFlat1DIndex(final int[] pos) {
final int imax = pos.length;
if (imax == 0) {
return 0;
}
return get1DIndexFromShape(pos);
}
/**
* Get flattened view index of given position
* @param shape
* @param pos
* the integer array specifying the n-D position
* @return the index on the flattened dataset
*/
public static int getFlat1DIndex(final int[] shape, final int[] pos) {
final int imax = pos.length;
if (imax == 0) {
return 0;
}
return get1DIndexFromShape(shape, pos);
}
/**
* Function that uses the knowledge of the dataset to calculate the index in the data array
* that corresponds to the n-dimensional position given by the int array. The input values
* <b>must</b> be inside the arrays, this should be ok as this function is mainly in code which
* will be run inside the get and set functions
*
* @param n
* the integer array specifying the n-D position
* @return the index on the data array corresponding to that location
*/
public int get1DIndex(final int... n) {
final int imax = n.length;
final int rank = shape.length;
if (imax == 0) {
if (rank == 0 || (rank == 1 && shape[0] <= 1))
return stride == null ? 0 : offset;
throw new IllegalArgumentException("One or more index parameters must be supplied");
} else if (imax > rank) {
throw new IllegalArgumentException("No of index parameters is different to the shape of data: " + imax
+ " given " + rank + " required");
}
return stride == null ? get1DIndexFromShape(n) : get1DIndexFromStrides(n);
}
private static void throwAIOOBException(int i, int s, int d) {
throw new ArrayIndexOutOfBoundsException("Index (" + i + ") out of range [-" + s + "," + s
+ "] in dimension " + d);
}
/**
* @param i
* @return the index on the data array corresponding to that location
*/
protected int get1DIndex(int i) {
if (shape.length > 1) {
logger.debug("This dataset is not 1D but was addressed as such");
return get1DIndex(new int[] {i});
}
if (i < 0) {
i += shape[0];
}
if (i < 0 || i >= shape[0]) {
throwAIOOBException(i, shape[0], 0);
}
return stride == null ? i : i*stride[0] + offset;
}
/**
* @param i
* @param j
* @return the index on the data array corresponding to that location
*/
protected int get1DIndex(int i, int j) {
if (shape.length != 2) {
logger.debug("This dataset is not 2D but was addressed as such");
return get1DIndex(new int[] {i, j});
}
if (i < 0) {
i += shape[0];
}
if (i < 0 || i >= shape[0]) {
throwAIOOBException(i, shape[0], 0);
}
if (j < 0) {
j += shape[1];
}
if (j < 0 || j >= shape[1]) {
throwAIOOBException(i, shape[1], 1);
}
return stride == null ? i*shape[1] + j : i*stride[0] + j*stride[1] + offset;
}
protected int get1DIndexFromShape(final int... n) {
return get1DIndexFromShape(shape, n);
}
protected static int get1DIndexFromShape(final int[] shape, final int... n) {
final int imax = n.length;
final int rank = shape.length;
// if (rank != imax) {
int index = 0;
int i = 0;
for (; i < imax; i++) {
final int si = shape[i];
int ni = n[i];
if (ni < 0) {
ni += si;
}
if (ni < 0 || ni >= si) {
throwAIOOBException(ni, si, i);
}
index = index * si + ni;
}
for (; i < rank; i++) {
index *= shape[i];
}
return index;
}
private int get1DIndexFromStrides(final int... n) {
return get1DIndexFromStrides(shape, stride, offset, n);
}
private static int get1DIndexFromStrides(final int[] shape, final int[] stride, final int offset, final int... n) {
final int rank = shape.length;
if (rank != n.length) {
throw new IllegalArgumentException("Number of position indexes must be equal to rank");
}
int index = offset;
for (int i = 0; i < rank; i++) {
final int si = shape[i];
int ni = n[i];
if (ni < 0) {
ni += si;
}
if (ni < 0 || ni >= si) {
throwAIOOBException(ni, si, i);
}
index += stride[i] * ni;
}
return index;
}
@Override
public int[] getNDPosition(final int n) {
if (isIndexInRange(n)) {
throw new IllegalArgumentException("Index provided " + n
+ "is larger then the size of the containing array");
}
return stride == null ? getNDPositionFromShape(n, shape) : getNDPositionFromStrides(n);
}
private boolean isIndexInRange(final int n) {
if (stride == null) {
return n >= size;
}
return n >= getBufferLength();
}
/**
* @return entire buffer length
*/
abstract protected int getBufferLength();
/**
* Get n-D position from given index
* @param n index
* @param shape
* @return n-D position
*/
public static int[] getNDPositionFromShape(int n, int[] shape) {
if (shape == null || shape.length == 0)
return new int[0];
int rank = shape.length;
if (rank == 1) {
return new int[] { n };
}
int[] output = new int[rank];
for (rank--; rank > 0; rank--) {
output[rank] = n % shape[rank];
n /= shape[rank];
}
output[0] = n;
return output;
}
private int[] getNDPositionFromStrides(int n) {
n -= offset;
int rank = shape.length;
if (rank == 1) {
return new int[] { n / stride[0] };
}
int[] output = new int[rank];
int i = 0;
while (i != n) { // TODO find more efficient way than this exhaustive search
int j = rank - 1;
for (; j >= 0; j
output[j]++;
i += stride[j];
if (output[j] >= shape[j]) {
output[j] = 0;
i -= shape[j] * stride[j];
} else {
break;
}
}
if (j == -1) {
logger.error("Index was not found in this strided dataset");
throw new IllegalArgumentException("Index was not found in this strided dataset");
}
}
return output;
}
@Override
public int checkAxis(int axis) {
return checkAxis(shape.length, axis);
}
/**
* Check that axis is in range [-rank,rank)
*
* @param rank
* @param axis
* @return sanitized axis in range [0, rank)
*/
protected static int checkAxis(int rank, int axis) {
if (axis < 0) {
axis += rank;
}
if (axis < 0 || axis >= rank) {
throw new IndexOutOfBoundsException("Axis " + axis + " given is out of range [0, " + rank + ")");
}
return axis;
}
protected static final char BLOCK_OPEN = '[';
protected static final char BLOCK_CLOSE = ']';
@Override
public String toString() {
return toString(false);
}
@Override
public String toString(boolean showData) {
final int rank = shape == null ? 0 : shape.length;
final StringBuilder out = new StringBuilder();
if (!showData) {
if (name != null && name.length() > 0) {
out.append("Dataset '");
out.append(name);
out.append("' has shape ");
} else {
out.append("Dataset shape is ");
}
out.append(BLOCK_OPEN);
if (rank > 0) {
out.append(shape[0]);
}
for (int i = 1; i < rank; i++) {
out.append(", " + shape[i]);
}
out.append(BLOCK_CLOSE);
return out.toString();
}
if (size == 0) {
return out.toString();
}
if (rank > 0) {
int[] pos = new int[rank];
final StringBuilder lead = new StringBuilder();
printBlocks(out, lead, 0, pos);
} else {
out.append(getString());
}
return out.toString();
}
/**
* Limit to strings output via the toString() method
*/
private static int maxStringLength = 120;
/**
* Set maximum line length for toString() method
* @param maxLineLength
*/
public static void setMaxLineLength(int maxLineLength) {
maxStringLength = maxLineLength;
}
/**
* @return maximum line length for toString() method
*/
public static int getMaxLineLength() {
return maxStringLength;
}
/**
* Limit to number of sub-blocks output via the toString() method
*/
private static final int MAX_SUBBLOCKS = 6;
private final static String SEPARATOR = ",";
private final static String SPACE = " ";
private final static String ELLIPSIS = "...";
private final static String NEWLINE = "\n";
/**
* Make a line of output for last dimension of dataset
*
* @param start
* @return line
*/
private StringBuilder makeLine(final int end, final int... start) {
StringBuilder line = new StringBuilder();
final int[] pos;
if (end >= start.length) {
pos = Arrays.copyOf(start, end + 1);
} else {
pos = start;
}
pos[end] = 0;
line.append(BLOCK_OPEN);
line.append(getString(pos));
final int length = shape[end];
// trim elements printed if length exceed estimate of maximum elements
int excess = length - maxStringLength / 3; // space + number + separator
if (excess > 0) {
int index = (length - excess) / 2;
for (int y = 1; y < index; y++) {
line.append(SEPARATOR + SPACE);
pos[end] = y;
line.append(getString(pos));
}
index = (length + excess) / 2;
for (int y = index; y < length; y++) {
line.append(SEPARATOR + SPACE);
pos[end] = y;
line.append(getString(pos));
}
} else {
for (int y = 1; y < length; y++) {
line.append(SEPARATOR + SPACE);
pos[end] = y;
line.append(getString(pos));
}
}
line.append(BLOCK_CLOSE);
// trim string down to limit
excess = line.length() - maxStringLength - ELLIPSIS.length() - 1;
if (excess > 0) {
int index = line.substring(0, (line.length() - excess) / 2).lastIndexOf(SEPARATOR) + 2;
StringBuilder out = new StringBuilder(line.subSequence(0, index));
out.append(ELLIPSIS + SEPARATOR);
index = line.substring((line.length() + excess) / 2).indexOf(SEPARATOR) + (line.length() + excess) / 2 + 1;
out.append(line.subSequence(index, line.length()));
return out;
}
return line;
}
/**
* recursive method to print blocks
*/
private void printBlocks(final StringBuilder out, final StringBuilder lead, final int level, final int[] pos) {
if (out.length() > 0) {
char last = out.charAt(out.length() - 1);
if (last != BLOCK_OPEN) {
out.append(lead);
}
}
final int end = getRank() - 1;
if (level != end) {
out.append(BLOCK_OPEN);
int length = shape[level];
// first sub-block
pos[level] = 0;
StringBuilder newlead = new StringBuilder(lead);
newlead.append(SPACE);
printBlocks(out, newlead, level + 1, pos);
if (length < 2) { // escape
out.append(BLOCK_CLOSE);
return;
}
out.append(SEPARATOR + NEWLINE);
for (int i = level + 1; i < end; i++) {
out.append(NEWLINE);
}
// middle sub-blocks
if (length < MAX_SUBBLOCKS) {
for (int x = 1; x < length - 1; x++) {
pos[level] = x;
printBlocks(out, newlead, level + 1, pos);
if (end <= level + 1) {
out.append(SEPARATOR + NEWLINE);
} else {
out.append(SEPARATOR + NEWLINE + NEWLINE);
}
}
} else {
final int excess = length - MAX_SUBBLOCKS;
int xmax = (length - excess) / 2;
for (int x = 1; x < xmax; x++) {
pos[level] = x;
printBlocks(out, newlead, level + 1, pos);
if (end <= level + 1) {
out.append(SEPARATOR + NEWLINE);
} else {
out.append(SEPARATOR + NEWLINE + NEWLINE);
}
}
out.append(newlead);
out.append(ELLIPSIS + SEPARATOR + NEWLINE);
xmax = (length + excess) / 2;
for (int x = xmax; x < length - 1; x++) {
pos[level] = x;
printBlocks(out, newlead, level + 1, pos);
if (end <= level + 1) {
out.append(SEPARATOR + NEWLINE);
} else {
out.append(SEPARATOR + NEWLINE + NEWLINE);
}
}
}
// last sub-block
pos[level] = length - 1;
printBlocks(out, newlead, level + 1, pos);
out.append(BLOCK_CLOSE);
} else {
out.append(makeLine(end, pos));
}
}
@Override
public void setDirty() {
if (storedValues != null)
storedValues.clear();
}
@Override
public Dataset squeezeEnds() {
return squeeze(true);
}
@Override
public Dataset squeeze() {
return squeeze(false);
}
@Override
public Dataset squeeze(boolean onlyFromEnds) {
final int[] tshape = squeezeShape(shape, onlyFromEnds);
final int[] oshape = shape;
if (stride == null) {
shape = tshape;
} else {
int rank = shape.length;
int trank = tshape.length;
if (trank < rank) {
int[] tstride = new int[tshape.length];
if (onlyFromEnds) {
for (int i = 0; i < rank; i++) {
if (shape[i] != 1) {
for (int k = 0; k < trank; k++) {
tstride[k] = stride[i++];
}
break;
}
}
} else {
int t = 0;
for (int i = 0; i < rank; i++) {
if (shape[i] != 1) {
tstride[t++] = stride[i];
}
}
}
shape = tshape;
stride = tstride;
}
}
reshapeMetadata(oshape, shape);
return this;
}
/**
* Remove dimensions of 1 in given shape - from both ends only, if true
*
* @param oshape
* @param onlyFromEnds
* @return newly squeezed shape (or original if unsqueezed)
*/
public static int[] squeezeShape(final int[] oshape, boolean onlyFromEnds) {
int unitDims = 0;
int rank = oshape.length;
int start = 0;
if (onlyFromEnds) {
int i = rank - 1;
for (; i >= 0; i
if (oshape[i] == 1) {
unitDims++;
} else {
break;
}
}
for (int j = 0; j <= i; j++) {
if (oshape[j] == 1) {
unitDims++;
} else {
start = j;
break;
}
}
} else {
for (int i = 0; i < rank; i++) {
if (oshape[i] == 1) {
unitDims++;
}
}
}
if (unitDims == 0) {
return oshape;
}
int[] newDims = new int[rank - unitDims];
if (unitDims == rank)
return newDims; // zero-rank dataset
if (onlyFromEnds) {
rank = newDims.length;
for (int i = 0; i < rank; i++) {
newDims[i] = oshape[i+start];
}
} else {
int j = 0;
for (int i = 0; i < rank; i++) {
if (oshape[i] > 1) {
newDims[j++] = oshape[i];
if (j >= newDims.length)
break;
}
}
}
return newDims;
}
/**
* Remove dimension of 1 in given shape
*
* @param oshape
* @param axis
* @return newly squeezed shape
*/
public static int[] squeezeShape(final int[] oshape, int axis) {
if (oshape == null || oshape.length == 0) {
return new int[0];
}
int rank = oshape.length;
if (axis < 0) {
axis += rank;
}
if (axis < 0 || axis >= rank) {
logger.error("Axis argument is outside allowed range");
throw new IllegalArgumentException("Axis argument is outside allowed range");
}
int[] nshape = new int[rank-1];
for (int i = 0; i < axis; i++) {
nshape[i] = oshape[i];
}
for (int i = axis+1; i < rank; i++) {
nshape[i-1] = oshape[i];
}
return nshape;
}
/**
* Check if shapes are compatible, ignoring axes of length 1
*
* @param ashape
* @param bshape
* @return true if they are compatible
*/
public static boolean areShapesCompatible(final int[] ashape, final int[] bshape) {
List<Integer> alist = new ArrayList<Integer>();
for (int a : ashape) {
if (a > 1) alist.add(a);
}
final int imax = alist.size();
int i = 0;
for (int b : bshape) {
if (b == 1)
continue;
if (i >= imax || b != alist.get(i++))
return false;
}
return i == imax;
}
/**
* Check if shapes are compatible but skip axis
*
* @param ashape
* @param bshape
* @param axis
* @return true if they are compatible
*/
public static boolean areShapesCompatible(final int[] ashape, final int[] bshape, final int axis) {
if (ashape.length != bshape.length) {
return false;
}
final int rank = ashape.length;
for (int i = 0; i < rank; i++) {
if (i != axis && ashape[i] != bshape[i]) {
return false;
}
}
return true;
}
@Override
public boolean isCompatibleWith(final ILazyDataset g) {
return areShapesCompatible(shape, g.getShape());
}
@Override
public void checkCompatibility(final ILazyDataset g) throws IllegalArgumentException {
checkCompatibility(this, g);
}
public static void checkCompatibility(final ILazyDataset g, final ILazyDataset h) throws IllegalArgumentException {
if (!areShapesCompatible(g.getShape(), h.getShape())) {
throw new IllegalArgumentException("Shapes do not match");
}
}
@Override
public Dataset reshape(final int... shape) {
Dataset a = getView();
try {
a.setShape(shape);
} catch (IllegalArgumentException e) {
a = a.clone();
a.setShape(shape);
}
return a;
}
/**
* Create a dataset from object (automatically detect dataset type)
*
* @param obj
* can be a Java list, array or Number
* @return dataset
*/
public static Dataset array(final Object obj) {
return DatasetFactory.createFromObject(obj);
}
/**
* Create a dataset from object (automatically detect dataset type)
*
* @param obj
* can be a Java list, array or Number
* @param isUnsigned
* if true, interpret integer values as unsigned by increasing element bit width
* @return dataset
*/
public static Dataset array(final Object obj, boolean isUnsigned) {
return DatasetFactory.createFromObject(obj, isUnsigned);
}
/**
* Create a dataset from object
*
* @param obj
* can be a Java list, array or Number
* @param dtype
* @return dataset
*/
public static Dataset array(final Object obj, final int dtype) {
return DatasetFactory.createFromObject(obj, dtype);
}
/**
* Create dataset of appropriate type from list
*
* @param objectList
* @return dataset filled with values from list
*/
public static Dataset createFromList(List<?> objectList) {
return DatasetFactory.createFromList(objectList);
}
/**
* @param shape
* @param dtype
* @return a new dataset of given shape and type, filled with zeros
*/
public static Dataset zeros(final int[] shape, final int dtype) {
return DatasetFactory.zeros(shape, dtype);
}
/**
* @param itemSize
* if equal to 1, then non-compound dataset is returned
* @param shape
* @param dtype
* @return a new dataset of given item size, shape and type, filled with zeros
*/
public static Dataset zeros(final int itemSize, final int[] shape, final int dtype) {
return DatasetFactory.zeros(itemSize, shape, dtype);
}
/**
* @param dataset
* @return a new dataset of same shape and type as input dataset, filled with zeros
*/
public static Dataset zeros(final Dataset dataset) {
return zeros(dataset, dataset.getDtype());
}
/**
* Create a new dataset of same shape as input dataset, filled with zeros. If dtype is not
* explicitly compound then an elemental dataset is created
* @param dataset
* @param dtype
* @return a new dataset
*/
public static Dataset zeros(final Dataset dataset, final int dtype) {
final int[] shape = dataset.getShapeRef();
final int isize = isDTypeElemental(dtype) ? 1 :dataset.getElementsPerItem();
return zeros(isize, shape, dtype);
}
/**
* @param dataset
* @return a new dataset of same shape and type as input dataset, filled with ones
*/
public static Dataset ones(final Dataset dataset) {
return ones(dataset, dataset.getDtype());
}
/**
* Create a new dataset of same shape as input dataset, filled with ones. If dtype is not
* explicitly compound then an elemental dataset is created
* @param dataset
* @param dtype
* @return a new dataset
*/
public static Dataset ones(final Dataset dataset, final int dtype) {
final int[] shape = dataset.getShapeRef();
final int isize = isDTypeElemental(dtype) ? 1 :dataset.getElementsPerItem();
return ones(isize, shape, dtype);
}
/**
* @param shape
* @param dtype
* @return a new dataset of given shape and type, filled with ones
*/
public static Dataset ones(final int[] shape, final int dtype) {
return DatasetFactory.ones(shape, dtype);
}
/**
* @param itemSize
* if equal to 1, then non-compound dataset is returned
* @param shape
* @param dtype
* @return a new dataset of given item size, shape and type, filled with ones
*/
public static Dataset ones(final int itemSize, final int[] shape, final int dtype) {
return DatasetFactory.ones(itemSize, shape, dtype);
}
/**
* @param stop
* @param dtype
* @return a new dataset of given shape and type, filled with values determined by parameters
*/
public static Dataset arange(final double stop, final int dtype) {
return arange(0, stop, 1, dtype);
}
/**
* @param start
* @param stop
* @param step
* @param dtype
* @return a new 1D dataset of given type, filled with values determined by parameters
*/
public static Dataset arange(final double start, final double stop, final double step, final int dtype) {
return DatasetFactory.createRange(start, stop, step, dtype);
}
/**
* @param start
* @param stop
* @param step
* @return number of steps to take
*/
public static int calcSteps(final double start, final double stop, final double step) {
if (step > 0) {
return (int) Math.ceil((stop - start) / step);
}
return (int) Math.ceil((stop - start) / step);
}
@Override
public boolean isComplex() {
int type = getDtype();
return type == COMPLEX64 || type == COMPLEX128;
}
@Override
public Dataset real() {
return this;
}
@Override
public Dataset realView() {
return getView();
}
@Override
public Dataset getSlice(final int[] start, final int[] stop, final int[] step) {
return getSlice(new SliceND(shape, start, stop, step));
}
@Override
public Dataset getSlice(Slice... slice) {
return getSlice(new SliceND(shape, slice));
}
@Override
public Dataset getSlice(IMonitor monitor, Slice... slice) {
return getSlice(slice);
}
@Override
public Dataset getSlice(IMonitor monitor, SliceND slice) {
return getSlice(slice);
}
@Override
public Dataset getSlice(IMonitor monitor, int[] start, int[] stop, int[] step) {
return getSlice(start, stop, step);
}
/**
* Get a slice of the dataset. The returned dataset is a copied selection of items
* @param slice
* @return The dataset of the sliced data
*/
@Override
public Dataset getSlice(final SliceND slice) {
SliceIterator it = (SliceIterator) getSliceIterator(slice);
AbstractDataset s = getSlice(it);
s.metadata = copyMetadata();
s.sliceMetadata(true, slice);
return s;
}
/**
* Get a slice of the dataset. The returned dataset is a copied selection of items
*
* @param iterator Slice iterator
* @return The dataset of the sliced data
*/
abstract public AbstractDataset getSlice(final SliceIterator iterator);
@Override
public Dataset setSlice(final Object obj, final SliceND slice) {
Dataset ds;
if (obj instanceof Dataset) {
ds = (Dataset) obj;
} else if (!(obj instanceof IDataset)) {
ds = DatasetFactory.createFromObject(obj, isComplex() || getElementsPerItem() == 1 ? FLOAT64 : ARRAYFLOAT64);
} else {
ds = DatasetUtils.convertToDataset((ILazyDataset) obj);
}
return setSlicedView(getSliceView(slice), ds);
}
@Override
public Dataset setSlice(final Object obj, final int[] start, final int[] stop, final int[] step) {
return setSlice(obj, new SliceND(shape, start, stop, step));
}
/**
* Set a view of current dataset to given dataset with broadcasting
* @param view
* @param d
* @return this dataset
*/
abstract Dataset setSlicedView(Dataset view, Dataset d);
@Override
public Dataset setSlice(Object obj, Slice... slice) {
if (slice == null || slice.length == 0) {
return setSlice(obj, new SliceND(shape));
}
return setSlice(obj, new SliceND(shape, slice));
}
@Override
public boolean all() {
return Comparisons.allTrue(this);
}
@Override
public BooleanDataset all(final int axis) {
return Comparisons.allTrue(this, axis);
}
@Override
public boolean any() {
return Comparisons.anyTrue(this);
}
@Override
public BooleanDataset any(final int axis) {
return Comparisons.anyTrue(this, axis);
}
@Override
public Dataset ifloorDivide(final Object o) {
return idivide(o).ifloor();
}
@Override
public double residual(final Object o) {
return residual(o, null, false);
}
@Override
public double residual(final Object o, boolean ignoreNaNs) {
return residual(o, null, ignoreNaNs);
}
public static final String STORE_HASH = "hash";
protected static final String STORE_SHAPELESS_HASH = "shapelessHash";
public static final String STORE_MAX = "max";
public static final String STORE_MIN = "min";
protected static final String STORE_MAX_POS = "maxPos";
protected static final String STORE_MIN_POS = "minPos";
protected static final String STORE_STATS = "stats";
protected static final String STORE_SUM = "sum";
protected static final String STORE_MEAN = "mean";
protected static final String STORE_VAR = "var";
private static final String STORE_POS_MAX = "+max";
private static final String STORE_POS_MIN = "+min";
protected static final String STORE_COUNT = "count";
private static final String STORE_INDEX = "Index";
protected static final String STORE_BROADCAST = "Broadcast";
/**
* Get value from store
*
* @param key
* @return value
*/
public Object getStoredValue(String key) {
if (storedValues == null) {
return null;
}
return storedValues.get(key);
}
/**
* Set value in store
* <p>
* This is a <b>private method</b>: do not use!
*
* @param key
* @param obj
*/
public void setStoredValue(String key, Object obj) {
if (storedValues == null) {
storedValues = new HashMap<String, Object>();
}
storedValues.put(key, obj);
}
protected static String storeName(boolean ignoreNaNs, String name) {
return storeName(ignoreNaNs, false, name);
}
protected static String storeName(boolean ignoreNaNs, boolean ignoreInfs, String name) {
return (ignoreInfs ? "inf" : "") + (ignoreNaNs ? "nan" : "") + name;
}
/**
* Copy stored values from original to derived dataset
* @param orig
* @param derived
* @param shapeChanged
*/
protected static void copyStoredValues(IDataset orig, AbstractDataset derived, boolean shapeChanged) {
if (orig instanceof AbstractDataset && ((AbstractDataset) orig).storedValues != null) {
derived.storedValues = new HashMap<String, Object>(((AbstractDataset) orig).storedValues);
if (shapeChanged) {
filterStoredValues(derived.storedValues);
}
}
}
private static void filterStoredValues(Map<String, Object> map) {
map.remove(STORE_HASH);
List<String> keys = new ArrayList<String>();
for (String n : map.keySet()) {
if (n.contains("-")) { // remove anything which is axis-specific
keys.add(n);
}
}
for (String n : keys) {
map.remove(n);
}
}
/**
* Calculate minimum and maximum for a dataset
* @param ignoreNaNs if true, ignore NaNs
* @param ignoreInfs if true, ignore infinities
*/
protected void calculateMaxMin(final boolean ignoreNaNs, final boolean ignoreInfs) {
IndexIterator iter = getIterator();
double amax = Double.NEGATIVE_INFINITY;
double amin = Double.POSITIVE_INFINITY;
double pmax = Double.MIN_VALUE;
double pmin = Double.POSITIVE_INFINITY;
double hash = 0;
boolean hasNaNs = false;
while (iter.hasNext()) {
final double val = getElementDoubleAbs(iter.index);
if (Double.isNaN(val)) {
hash = (hash * 19) % Integer.MAX_VALUE;
if (ignoreNaNs)
continue;
hasNaNs = true;
} else if (Double.isInfinite(val)) {
hash = (hash * 19) % Integer.MAX_VALUE;
if (ignoreInfs)
continue;
} else {
hash = (hash * 19 + val) % Integer.MAX_VALUE;
}
if (val > amax) {
amax = val;
}
if (val < amin) {
amin = val;
}
if (val > 0) {
if (val < pmin) {
pmin = val;
}
if (val > pmax) {
pmax = val;
}
}
}
int ihash = ((int) hash) * 19 + getDtype() * 17 + getElementsPerItem();
setStoredValue(storeName(ignoreNaNs, ignoreInfs, STORE_SHAPELESS_HASH), ihash);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MAX), hasNaNs ? Double.NaN : fromDoubleToNumber(amax));
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MIN), hasNaNs ? Double.NaN : fromDoubleToNumber(amin));
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_POS_MAX), hasNaNs ? Double.NaN : fromDoubleToNumber(pmax));
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_POS_MIN), hasNaNs ? Double.NaN : fromDoubleToNumber(pmin));
}
/**
* Calculate summary statistics for a dataset
* @param ignoreNaNs if true, ignore NaNs
* @param ignoreInfs if true, ignore infinities
* @param name
*/
protected void calculateSummaryStats(final boolean ignoreNaNs, final boolean ignoreInfs, final String name) {
final IndexIterator iter = getIterator();
final SummaryStatistics stats = new SummaryStatistics();
if (storedValues == null || !storedValues.containsKey(STORE_HASH)) {
boolean hasNaNs = false;
double hash = 0;
double pmax = Double.MIN_VALUE;
double pmin = Double.POSITIVE_INFINITY;
while (iter.hasNext()) {
final double val = getElementDoubleAbs(iter.index);
if (Double.isNaN(val)) {
hash = (hash * 19) % Integer.MAX_VALUE;
if (ignoreNaNs)
continue;
hasNaNs = true;
} else if (Double.isInfinite(val)) {
hash = (hash * 19) % Integer.MAX_VALUE;
if (ignoreInfs)
continue;
} else {
hash = (hash * 19 + val) % Integer.MAX_VALUE;
}
if (val > 0) {
if (val < pmin) {
pmin = val;
}
if (val > pmax) {
pmax = val;
}
}
stats.addValue(val);
}
int ihash = ((int) hash) * 19 + getDtype() * 17 + getElementsPerItem();
setStoredValue(storeName(ignoreNaNs, ignoreInfs, STORE_SHAPELESS_HASH), ihash);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MAX), hasNaNs ? Double.NaN : fromDoubleToNumber(stats.getMax()));
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MIN), hasNaNs ? Double.NaN : fromDoubleToNumber(stats.getMin()));
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_POS_MAX), hasNaNs ? Double.NaN : fromDoubleToNumber(pmax));
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_POS_MIN), hasNaNs ? Double.NaN : fromDoubleToNumber(pmin));
storedValues.put(name, stats);
} else {
while (iter.hasNext()) {
final double val = getElementDoubleAbs(iter.index);
if (ignoreNaNs && Double.isNaN(val)) {
continue;
}
if (ignoreInfs && Double.isInfinite(val)) {
continue;
}
stats.addValue(val);
}
storedValues.put(name, stats);
}
}
/**
* Calculate summary statistics for a dataset along an axis
* @param ignoreNaNs if true, ignore NaNs
* @param ignoreInfs if true, ignore infinities
* @param axis
*/
protected void calculateSummaryStats(final boolean ignoreNaNs, final boolean ignoreInfs, final int axis) {
int rank = getRank();
int[] oshape = getShape();
int alen = oshape[axis];
oshape[axis] = 1;
int[] nshape = new int[rank - 1];
for (int i = 0; i < axis; i++) {
nshape[i] = oshape[i];
}
for (int i = axis + 1; i < rank; i++) {
nshape[i - 1] = oshape[i];
}
final int dtype = getDtype();
IntegerDataset count = new IntegerDataset(nshape);
Dataset max = DatasetFactory.zeros(nshape, dtype);
Dataset min = DatasetFactory.zeros(nshape, dtype);
IntegerDataset maxIndex = new IntegerDataset(nshape);
IntegerDataset minIndex = new IntegerDataset(nshape);
Dataset sum = DatasetFactory.zeros(nshape, getLargestDType(dtype));
DoubleDataset mean = new DoubleDataset(nshape);
DoubleDataset var = new DoubleDataset(nshape);
IndexIterator qiter = max.getIterator(true);
int[] qpos = qiter.getPos();
int[] spos = oshape.clone();
while (qiter.hasNext()) {
int i = 0;
for (; i < axis; i++) {
spos[i] = qpos[i];
}
spos[i++] = 0;
for (; i < rank; i++) {
spos[i] = qpos[i - 1];
}
final SummaryStatistics stats = new SummaryStatistics();
double amax = Double.NEGATIVE_INFINITY;
double amin = Double.POSITIVE_INFINITY;
boolean hasNaNs = false;
if (ignoreNaNs) {
for (int j = 0; j < alen; j++) {
spos[axis] = j;
final double val = getDouble(spos);
if (Double.isNaN(val)) {
hasNaNs = true;
continue;
} else if (ignoreInfs && Double.isInfinite(val)) {
continue;
}
if (val > amax) {
amax = val;
}
if (val < amin) {
amin = val;
}
stats.addValue(val);
}
} else {
for (int j = 0; j < alen; j++) {
spos[axis] = j;
final double val = getDouble(spos);
if (hasNaNs) {
if (!Double.isNaN(val))
stats.addValue(0);
continue;
}
if (Double.isNaN(val)) {
amax = Double.NaN;
amin = Double.NaN;
hasNaNs = true;
} else if (ignoreInfs && Double.isInfinite(val)) {
continue;
} else {
if (val > amax) {
amax = val;
}
if (val < amin) {
amin = val;
}
}
stats.addValue(val);
}
}
count.setAbs(qiter.index, (int) stats.getN());
max.setObjectAbs(qiter.index, amax);
min.setObjectAbs(qiter.index, amin);
boolean fmax = false;
boolean fmin = false;
if (hasNaNs) {
if (ignoreNaNs) {
for (int j = 0; j < alen; j++) {
spos[axis] = j;
final double val = getDouble(spos);
if (Double.isNaN(val))
continue;
if (!fmax && val == amax) {
maxIndex.setAbs(qiter.index, j);
fmax = true;
if (fmin)
break;
}
if (!fmin && val == amin) {
minIndex.setAbs(qiter.index, j);
fmin = true;
if (fmax)
break;
}
}
} else {
for (int j = 0; j < alen; j++) {
spos[axis] = j;
final double val = getDouble(spos);
if (Double.isNaN(val)) {
maxIndex.setAbs(qiter.index, j);
minIndex.setAbs(qiter.index, j);
break;
}
}
}
} else {
for (int j = 0; j < alen; j++) {
spos[axis] = j;
final double val = getDouble(spos);
if (!fmax && val == amax) {
maxIndex.setAbs(qiter.index, j);
fmax = true;
if (fmin)
break;
}
if (!fmin && val == amin) {
minIndex.setAbs(qiter.index, j);
fmin = true;
if (fmax)
break;
}
}
}
sum.setObjectAbs(qiter.index, stats.getSum());
mean.setAbs(qiter.index, stats.getMean());
var.setAbs(qiter.index, stats.getVariance());
}
setStoredValue(storeName(ignoreNaNs, ignoreInfs, STORE_COUNT + "-" + axis), count);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MAX + "-" + axis), max);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MIN + "-" + axis), min);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_SUM + "-" + axis), sum);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MEAN + "-" + axis), mean);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_VAR + "-" + axis), var);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MAX + STORE_INDEX + "-" + axis), maxIndex);
storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MIN + STORE_INDEX + "-" + axis), minIndex);
}
/**
* @param x
* @return number from given double
*/
abstract protected Number fromDoubleToNumber(double x);
// return biggest native primitive if integer (should test for 64bit?)
private static Number fromDoubleToBiggestNumber(double x, int dtype) {
switch (dtype) {
case BOOL:
case INT8:
case INT16:
case INT32:
return Integer.valueOf((int) (long) x);
case INT64:
return Long.valueOf((long) x);
case FLOAT32:
return Float.valueOf((float) x);
case FLOAT64:
return Double.valueOf(x);
}
return null;
}
private SummaryStatistics getStatistics(boolean ignoreNaNs) {
boolean ignoreInfs = false; // TODO
if (!hasFloatingPointElements()) {
ignoreNaNs = false;
}
String n = storeName(ignoreNaNs, ignoreInfs, STORE_STATS);
SummaryStatistics stats = (SummaryStatistics) getStoredValue(n);
if (stats == null) {
calculateSummaryStats(ignoreNaNs, ignoreInfs, n);
stats = (SummaryStatistics) getStoredValue(n);
}
return stats;
}
@Override
public int[] maxPos() {
return maxPos(false);
}
@Override
public int[] minPos() {
return minPos(false);
}
private int getHash() {
Object value = getStoredValue(STORE_HASH);
if (value == null) {
value = getStoredValue(STORE_SHAPELESS_HASH);
if (value == null) {
calculateMaxMin(false, false);
value = getStoredValue(STORE_SHAPELESS_HASH);
}
int ihash = (Integer) value;
int rank = shape.length;
for (int i = 0; i < rank; i++) {
ihash = ihash * 17 + shape[i];
}
storedValues.put(STORE_HASH, ihash);
return ihash;
}
return (Integer) value;
}
protected Object getMaxMin(boolean ignoreNaNs, boolean ignoreInfs, String key) {
if (!hasFloatingPointElements()) {
ignoreNaNs = false;
ignoreInfs = false;
}
key = storeName(ignoreNaNs, ignoreInfs , key);
Object value = getStoredValue(key);
if (value == null) {
calculateMaxMin(ignoreNaNs, ignoreInfs);
value = getStoredValue(key);
}
return value;
}
private Object getStatistics(boolean ignoreNaNs, int axis, String stat) {
if (!hasFloatingPointElements())
ignoreNaNs = false;
boolean ignoreInfs = false; // TODO
stat = storeName(ignoreNaNs, ignoreInfs , stat);
axis = checkAxis(axis);
Object obj = getStoredValue(stat);
if (obj == null) {
calculateSummaryStats(ignoreNaNs, ignoreInfs, axis);
obj = getStoredValue(stat);
}
return obj;
}
@Override
public Number max(boolean... ignoreInvalids) {
boolean igNan = ignoreInvalids!=null && ignoreInvalids.length>0 ? ignoreInvalids[0] : false;
boolean igInf = ignoreInvalids!=null && ignoreInvalids.length>1 ? ignoreInvalids[1] : igNan;
return (Number) getMaxMin(igNan, igInf, STORE_MAX);
}
@Override
public Number positiveMax(boolean ignoreInvalids) {
return (Number) getMaxMin(ignoreInvalids, ignoreInvalids, STORE_POS_MAX);
}
@Override
public Number positiveMax(boolean ignoreNaNs, boolean ignoreInfs) {
return (Number) getMaxMin(ignoreNaNs, ignoreInfs, STORE_POS_MAX);
}
@Override
public Dataset max(int axis) {
return max(false, axis);
}
@Override
public Dataset max(boolean ignoreNaNs, int axis) {
return (Dataset) getStatistics(ignoreNaNs, axis, STORE_MAX + "-" + axis);
}
@Override
public Number min(boolean... ignoreInvalids) {
boolean igNan = ignoreInvalids!=null && ignoreInvalids.length>0 ? ignoreInvalids[0] : false;
boolean igInf = ignoreInvalids!=null && ignoreInvalids.length>1 ? ignoreInvalids[1] : igNan;
return (Number) getMaxMin(igNan, igInf, STORE_MIN);
}
@Override
public Number positiveMin(boolean ignoreInvalids) {
return (Number) getMaxMin(ignoreInvalids, ignoreInvalids, STORE_POS_MIN);
}
@Override
public Number positiveMin(boolean ignoreNaNs, boolean ignoreInfs) {
return (Number) getMaxMin(ignoreNaNs, ignoreInfs, STORE_POS_MIN);
}
@Override
public Dataset min(int axis) {
return min(false, axis);
}
@Override
public Dataset min(boolean ignoreNaNs, int axis) {
return (Dataset) getStatistics(ignoreNaNs, axis, STORE_MIN + "-" + axis);
}
@Override
public int argMax() {
return argMax(false);
}
@Override
public int argMax(boolean ignoreInvalids) {
return getFlat1DIndex(maxPos(ignoreInvalids));
}
@Override
public IntegerDataset argMax(int axis) {
return argMax(false, axis);
}
@Override
public IntegerDataset argMax(boolean ignoreNaNs, int axis) {
return (IntegerDataset) getStatistics(ignoreNaNs, axis, STORE_MAX + STORE_INDEX + "-" + axis);
}
@Override
public int argMin() {
return argMin(false);
}
@Override
public int argMin(boolean ignoreInvalids) {
return getFlat1DIndex(minPos(ignoreInvalids));
}
@Override
public IntegerDataset argMin(int axis) {
return argMin(false, axis);
}
@Override
public IntegerDataset argMin(boolean ignoreNaNs, int axis) {
return (IntegerDataset) getStatistics(ignoreNaNs, axis, STORE_MIN + STORE_INDEX + "-" + axis);
}
@Override
public Number peakToPeak() {
return fromDoubleToNumber(max().doubleValue() - min().doubleValue());
}
@Override
public Dataset peakToPeak(int axis) {
return Maths.subtract(max(axis), min(axis));
}
@Override
public long count() {
return count(false);
}
@Override
public long count(boolean ignoreNaNs) {
return getStatistics(ignoreNaNs).getN();
}
@Override
public Dataset count(int axis) {
return count(false, axis);
}
@Override
public Dataset count(boolean ignoreNaNs, int axis) {
return (Dataset) getStatistics(ignoreNaNs, axis, STORE_COUNT + "-" + axis);
}
@Override
public Object sum() {
return sum(false);
}
@Override
public Object sum(boolean ignoreNaNs) {
return getStatistics(ignoreNaNs).getSum();
}
@Override
public Dataset sum(int axis) {
return sum(false, axis);
}
@Override
public Dataset sum(boolean ignoreNaNs, int axis) {
return (Dataset) getStatistics(ignoreNaNs, axis, STORE_SUM + "-" + axis);
}
@Override
public Object typedSum() {
return typedSum(getDtype());
}
@Override
public Object typedSum(int dtype) {
return fromDoubleToBiggestNumber(getStatistics(false).getSum(), dtype);
}
@Override
public Dataset typedSum(int dtype, int axis) {
return DatasetUtils.cast(sum(axis), dtype);
}
@Override
public Object product() {
return Stats.product(this);
}
@Override
public Dataset product(int axis) {
return Stats.product(this, axis);
}
@Override
public Object typedProduct(int dtype) {
return Stats.typedProduct(this, dtype);
}
@Override
public Dataset typedProduct(int dtype, int axis) {
return Stats.typedProduct(this, dtype, axis);
}
@Override
public Object mean(boolean... ignoreNaNs) {
boolean ig = ignoreNaNs!=null && ignoreNaNs.length>0 ? ignoreNaNs[0] : false;
return getStatistics(ig).getMean();
}
@Override
public Dataset mean(int axis) {
return mean(false, axis);
}
@Override
public Dataset mean(boolean ignoreNaNs, int axis) {
return (Dataset) getStatistics(ignoreNaNs, axis, STORE_MEAN + "-" + axis);
}
@Override
public Number variance() {
return variance(false);
}
@Override
public Number variance(boolean isDatasetWholePopulation) {
SummaryStatistics stats = getStatistics(false);
if (isDatasetWholePopulation) {
StorelessUnivariateStatistic oldVar = stats.getVarianceImpl();
stats.setVarianceImpl(new Variance(false));
Number var = stats.getVariance();
stats.setVarianceImpl(oldVar);
return var;
}
return stats.getVariance();
}
@Override
public Dataset variance(int axis) {
return (Dataset) getStatistics(false, axis, STORE_VAR + "-" + axis);
}
@Override
public Number stdDeviation() {
return Math.sqrt(variance().doubleValue());
}
@Override
public Number stdDeviation(boolean isDatasetWholePopulation) {
return Math.sqrt(variance(isDatasetWholePopulation).doubleValue());
}
@Override
public Dataset stdDeviation(int axis) {
final Dataset v = (Dataset) getStatistics(false, axis, STORE_VAR + "-" + axis);
return Maths.sqrt(v);
}
@Override
public Number rootMeanSquare() {
final SummaryStatistics stats = getStatistics(false);
final double mean = stats.getMean();
return Math.sqrt(stats.getVariance() + mean * mean);
}
@Override
public Dataset rootMeanSquare(int axis) {
Dataset v = (Dataset) getStatistics(false, axis, STORE_VAR + "-" + axis);
Dataset m = (Dataset) getStatistics(false, axis, STORE_MEAN + "-" + axis);
Dataset result = Maths.power(m, 2);
return Maths.sqrt(result.iadd(v));
}
/**
* Set item from compatible dataset in a direct and speedy way. Remember to setDirty afterwards.
*
* @param dindex
* @param sindex
* @param src
* is the source data buffer
*/
protected abstract void setItemDirect(final int dindex, final int sindex, final Object src);
@Override
public boolean hasErrors() {
return super.getError() != null;
}
protected Dataset getInternalError() {
ILazyDataset led = super.getError();
if (led == null)
return null;
Dataset ed = null;
if (led instanceof IDataset) {
ed = DatasetUtils.convertToDataset(led);
if (!(led instanceof Dataset)) {
setError(ed); // set back
}
} else {
ed = DatasetUtils.convertToDataset(led.getSlice());
setError(ed);
}
// check for broadcast strides
Object bs = getStoredValue(STORE_BROADCAST);
if (bs == null) {
bs = new BroadcastStride(ed, shape);
setStoredValue(STORE_BROADCAST, bs);
}
return ed;
}
class BroadcastStride {
private int[] bStride;
private int[] nShape;
private int bOffset;
public BroadcastStride(Dataset d, final int[] newShape) {
d.setShape(BroadcastIterator.padShape(d.getShapeRef(), newShape.length - d.getRank())); // set to padded shape
bStride = createBroadcastStrides(d, newShape);
nShape = newShape.clone();
bOffset = d.getOffset();
}
public int get1DIndex(int i) {
if (i < 0) {
i += nShape[0];
}
if (i < 0 || i >= nShape[0]) {
throwAIOOBException(i, nShape[0], 0);
}
return i*bStride[0] + bOffset;
}
protected int get1DIndex(int i, int j) {
if (i < 0) {
i += nShape[0];
}
if (i < 0 || i >= nShape[0]) {
throwAIOOBException(i, nShape[0], 0);
}
if (j < 0) {
j += nShape[1];
}
if (j < 0 || j >= nShape[1]) {
throwAIOOBException(i, nShape[1], 1);
}
return i*bStride[0] + j*bStride[1] + bOffset;
}
protected int get1DIndex(int... n) {
return get1DIndexFromStrides(nShape, bStride, bOffset, n);
}
}
@Override
public Dataset getError() {
Dataset ed = getInternalError();
if (ed == null)
return null;
if (ed.getSize() != getSize()) {
DoubleDataset errors = new DoubleDataset(shape);
errors.setSlice(ed);
return errors;
}
return ed;
}
@Override
public double getError(final int i) {
Dataset ed = getInternalError();
if (ed == null)
return 0;
BroadcastStride bs = (BroadcastStride) getStoredValue(STORE_BROADCAST);
return ed.getElementDoubleAbs(bs.get1DIndex(i));
}
@Override
public double getError(final int i, final int j) {
Dataset ed = getInternalError();
if (ed == null)
return 0;
BroadcastStride bs = (BroadcastStride) getStoredValue(STORE_BROADCAST);
return ed.getElementDoubleAbs(bs.get1DIndex(i, j));
}
@Override
public double getError(int... pos) {
Dataset ed = getInternalError();
if (ed == null)
return 0;
BroadcastStride bs = (BroadcastStride) getStoredValue(STORE_BROADCAST);
return ed.getElementDoubleAbs(bs.get1DIndex(pos));
}
@Override
public double[] getErrorArray(final int i) {
Dataset ed = getInternalError();
if (ed == null)
return null;
return new double[] {getError(i)};
}
@Override
public double[] getErrorArray(final int i, final int j) {
Dataset ed = getInternalError();
if (ed == null)
return null;
return new double[] {getError(i, j)};
}
@Override
public double[] getErrorArray(int... pos) {
Dataset ed = getInternalError();
if (ed == null)
return null;
return new double[] {getError(pos)};
}
protected Dataset getInternalSquaredError() {
Dataset sed = getErrorBuffer();
// check for broadcast strides
Object bs = getStoredValue(STORE_BROADCAST);
if (bs == null) {
bs = new BroadcastStride(sed, shape);
setStoredValue(STORE_BROADCAST, bs);
}
return sed;
}
@Override
public Dataset getErrorBuffer() {
ErrorMetadata emd = getErrorMetadata();
if (emd == null)
return null;
if (!(emd instanceof ErrorMetadataImpl)) {
ILazyDataset led = emd.getError();
Dataset ed = null;
if (led instanceof IDataset) {
ed = (Dataset) led;
} else {
ed = DatasetUtils.convertToDataset(led.getSlice());
}
emd = new ErrorMetadataImpl();
setMetadata(emd);
((ErrorMetadataImpl) emd).setError(ed);
}
return ((ErrorMetadataImpl) emd).getSquaredError();
}
@Override
public void setError(Serializable errors) {
super.setError(errors);
Object bs = getStoredValue(STORE_BROADCAST);
if (bs != null) {
setStoredValue(STORE_BROADCAST, null);
}
}
/**
* Set a copy of the buffer that backs the (squared) error data
* @param buffer can be null, anything that can be used to create a DoubleDataset or CompoundDoubleDataset
*/
@Override
public void setErrorBuffer(Serializable buffer) {
Object bs = getStoredValue(STORE_BROADCAST);
if (bs != null) {
setStoredValue(STORE_BROADCAST, null);
}
if (buffer == null) {
clearMetadata(ErrorMetadata.class);
return;
}
IDataset d = (IDataset) createFromSerializable(buffer, false);
ErrorMetadata emd = getErrorMetadata();
if (!(emd instanceof ErrorMetadataImpl)) {
emd = new ErrorMetadataImpl();
setMetadata(emd);
}
((ErrorMetadataImpl) emd).setSquaredError(d);
}
} |
package org.eclipse.dawnsci.analysis.dataset.impl;
import org.eclipse.dawnsci.analysis.api.roi.IRectangularROI;
public class SummedAreaTable {
private int[] shape; // We cache shape for speed reasons (it is cloned in the dataset on getShape())
/**
* Must be declared as Dataset because IDataset can cause the unwanted autoboxing problem.
*/
private Dataset image;
private double[] sum, sum2; // Use double[] because faster than Dataset
/**
* Calls SummedAreaTable(Image, false)
* @param image
* @throws Exception
*/
public SummedAreaTable(Dataset image) throws Exception {
this(image, false);
}
/**
* Constructs the summed table.
* @param image
* @param willRequireVariance set to true if you know that you need fano or variance
* @throws Exception
*/
public SummedAreaTable(Dataset image, boolean willRequireVariance) throws Exception {
this.image = image;
this.shape = image.getShape();
if (image.getRank()!=2) throw new Exception("You may only get sum table of 2D data!");
if (shape[0]<1) throw new Exception("You may only get sum table with image of side > 0");
if (shape[1]<1) throw new Exception("You may only get sum table with image of side > 0");
createSummedTable(image, willRequireVariance);
}
/**
* We mess about here with creating the sums in one pass for speed reasons.
* The test SummedAreaTableTest should check if logic remains correct.
* @param image
* @param requireSum2
* @throws Exception
*/
private void createSummedTable(Dataset image, boolean requireSum2) throws Exception {
if (image.getRank()!=2) throw new Exception("You may only compute the summed image table of 2D data!");
if (sum!=null && sum2!=null) return;
if (sum!=null && !requireSum2) return;
//Create integral
boolean requireSum = false;
if (sum == null) {
sum = new double[shape[0]*shape[1]];
requireSum = true;
}
if (requireSum2 && sum2==null) {
sum2 = new double[shape[0]*shape[1]];
}
// Create a position iterator
final int[] pos = new int[]{0,0};
for(int i=0;i<sum.length;++i) {
final double value = image.getElementDoubleAbs(i);
fillNDPositionFromShape(i, shape, pos);
if (requireSum) fill(i, value, sum, pos, shape);
if (requireSum2) fill(i, Math.pow(value, 2d), sum2, pos, shape);
}
}
/**
* private static final so that compiler will inline it
* @param value
* @param sum
* @param pos
* @param shape
*/
private static final void fill(int index, double value, double[] sum, int[] pos, int[] shape) {
int x = pos[0];
int y = pos[1];
// I(x,y) = i(x,y) + I(x-1,y) + I(x,y-1) - I(x-1,y-1)
//Calculate coefficients
double sxm = (x > 0) ? sum[get1DIndexFast(x-1,y,shape)] : 0;
double sym = (y > 0) ? sum[get1DIndexFast(x,y-1,shape)] : 0;
double sxym = (x > 0 && y > 0) ? sum[get1DIndexFast(x-1,y-1,shape)] : 0;
double val = value + sxm + sym - sxym;
sum[index] = val; // Fast
}
/**
* Creates a fano image where each pixel is the fano factor
* for a give box surrounding it.
*
* This operation has improved speed because it uses the summed area table
* to compute a fast mean and variance for a given box.
*
* @param box
* @return fano factor image using box passed in.
* @throws Exception
*/
public Dataset getFanoImage(int... box) throws Exception {
if (box[0] % 2 == 0) throw new Exception("Box first dim is not odd!");
if (box[1] % 2 == 0) throw new Exception("Box second dim is not odd!");
// Compute some things to save FPOs
int n = box[0]*box[1]; // Save a FPO inside loop.
final double[] fano = new double[shape[0]*shape[1]];
int r1 = (int)Math.floor(box[0]/2d); // for instance 3->1, 5->2, 7->3
int r2 = (int)Math.floor(box[1]/2d); // for instance 3->1, 5->2, 7->3
int[] radii = new int[]{r1, r2};
if (sum2==null) createSummedTable(image, true);
int[] point = new int[]{0,0};
int[] coords = new int[]{0,0,0,0};
for (int i = 0; i < fano.length; i++) {
// Point from the iterator
fillNDPositionFromShape(i, shape, point);
// Save FPO by calculating coords once per pixel and
// passing to getBoxVarianceInternal and getBoxMeanInternal
fillCoordsInternal(point, shape, radii, coords);
// Call fano (variance/mean)
fano[i] = getBoxFanoFactorInternal(coords, n);
}
return new DoubleDataset(fano, shape);
}
/**
* Give a point point, this will return the sum of a box around it.
* The box should really be an odd number such that the point is in the center
* @param box
* @return the sum of a box around point of shape box
* @throws Exception
*/
public double getBoxSum(IRectangularROI box) throws Exception {
if (box.getIntLength(0) % 2 == 0) throw new Exception("Box first dim is not odd!");
if (box.getIntLength(1) % 2 == 0) throw new Exception("Box second dim is not odd!");
return getBoxSumInternal(sum, createCoords(box), shape);
}
/**
*
* @param box
* @return mean from the summed area table
*/
public double getBoxMean(IRectangularROI box) throws Exception {
if (box.getIntLength(0) % 2 == 0) throw new Exception("Box first dim is not odd!");
if (box.getIntLength(1) % 2 == 0) throw new Exception("Box second dim is not odd!");
int[] coords = createCoords(box);
int[] bx = getBox(coords);
return getBoxSumInternal(sum, coords, shape) / (bx[0]*bx[1]);
}
/**
* private static final so that compiler will inline it
* @param coords
* @return box
*/
private static final int[] getBox(int... coords) {
int minx = coords[0];
int miny = coords[1];
int maxx = coords[2];
int maxy = coords[3];
int w = maxx-minx+1;
int h = maxy-miny+1;
return new int[]{w,h};
}
/**
*
* @param point
* @param box
* @return sum of box
* @throws Exception
*/
public double getBoxSum(int[] point, int... box) throws Exception {
if (box[0] % 2 == 0) throw new Exception("Box first dim is not odd!");
if (box[1] % 2 == 0) throw new Exception("Box second dim is not odd!");
return getBoxSumInternal(sum, createCoords(point, box), shape);
}
/**
*
* @param box
* @return mean from the summed area table
* @throws Exception
*/
public double getBoxMean(int[] point, int... box) throws Exception {
if (box[0] % 2 == 0) throw new Exception("Box first dim is not odd!");
if (box[1] % 2 == 0) throw new Exception("Box second dim is not odd!");
int[] coords = createCoords(point, box);
return getBoxMeanInternal(coords, box[0]*box[1]);
}
private double getBoxMeanInternal(int[] coords, int n) {
return getBoxSumInternal(sum, coords, shape) / n;
}
/**
* private static final so that compiler will inline it
*
* @param coords Coordinates of box: x1,y1,x2,y2
* @return the sum of a region
*/
private static final double getBoxSumInternal(double[] sum, int[] coords, int[] shape) {
int minx = coords[0];
int miny = coords[1];
int maxx = coords[2];
int maxy = coords[3];
double A = (minx > 0 && miny > 0) ? sum[get1DIndexFast(minx-1, miny-1, shape)] : 0d;
double B = (miny > 0) ? sum[get1DIndexFast(maxx, miny-1, shape)] : 0d;
double C = (minx > 0) ? sum[get1DIndexFast(minx-1, maxy, shape)] : 0d;
double D = sum[get1DIndexFast(maxx, maxy, shape)];
return (D+A-B-C);
}
/**
* private static final so that compiler will inline it
* @param i
* @param j
* @param shape
* @return index
*/
private final static int get1DIndexFast(int i, int j, int[]shape) {
return i*shape[1] + j;
}
private final static void fillNDPositionFromShape(int n, int[] shape, int[] pos) {
pos[1] = n % shape[1];
n /= shape[1];
pos[0] = n;
}
/**
* Get the variance for a given box.
*
* (1/n)(S2 - S1^2/n)
*
* Where:
* S1 is sum of box ( D+A-B-C of sum )
* S2 is sum^2 of box ( D+A-B-C of sum )
* n is number of pixels box covers
*
* @param box
* @return variance
* @throws Exception
*/
public double getBoxVariance(IRectangularROI box) throws Exception {
if (box.getIntLength(0) % 2 == 0) throw new Exception("Box first dim is not odd!");
if (box.getIntLength(1) % 2 == 0) throw new Exception("Box second dim is not odd!");
if (sum2==null) createSummedTable(image, true);
int[] coords = createCoords(box);
return getBoxVariance(getBox(coords), coords);
}
/**
* Get the variance for a given box.
*
* (1/n)(S2 - S1^2/n)
*
* Where:
* S1 is sum of box ( D+A-B-C of sum )
* S2 is sum^2 of box ( D+A-B-C of sum2 )
* n is number of pixels box covers
* @throws Exception
*
**/
public double getBoxVariance(int[] point, int... box) throws Exception {
if (sum2==null) createSummedTable(image, true);
int [] coords = createCoords(point, box);
return getBoxVarianceInternal(coords, box[0]*box[1]);
}
private double getBoxVarianceInternal(int[] coords, int n) {
double s1 = getBoxSumInternal(sum, coords, shape);
double s2 = getBoxSumInternal(sum2, coords, shape);
return (1d/n)*(s2 - (Math.pow(s1, 2d)/n));
}
/**
* Get the variance for a given box.
*
* (1/n)(S2 - S1^2/n)
*
* Where:
* S1 is sum of box ( D+A-B-C of sum )
* S2 is sum^2 of box ( D+A-B-C of sum )
* n is number of pixels box covers
*
* @param box
* @return variance
* @throws Exception
*/
public double getBoxFanoFactor(IRectangularROI box) throws Exception {
if (box.getIntLength(0) % 2 == 0) throw new Exception("Box first dim is not odd!");
if (box.getIntLength(1) % 2 == 0) throw new Exception("Box second dim is not odd!");
if (sum2==null) createSummedTable(image, true);
int[] coords = createCoords(box);
return getBoxFanoFactor(getBox(coords), coords);
}
/**
* Get the variance for a given box.
*
* (1/n)(S2 - S1^2/n)
*
* Where:
* S1 is sum of box ( D+A-B-C of sum )
* S2 is sum^2 of box ( D+A-B-C of sum2 )
* n is number of pixels box covers
* @throws Exception
*
**/
public double getBoxFanoFactor(int[] point, int... box) throws Exception {
if (sum2==null) createSummedTable(image, true);
int [] coords = createCoords(point, box);
return getBoxFanoFactorInternal(coords, box[0]*box[1]);
}
private double getBoxFanoFactorInternal(int[] coords, int n) {
double variance = getBoxVarianceInternal(coords, n);
double mean = getBoxMeanInternal(coords, n);
double fano = variance/mean;
if (Double.isInfinite(fano)) fano = 0d;
if (Double.isNaN(fano)) fano = 0d;
return fano;
}
/**
* private static final so that compiler will inline it
* @param box
* @return coords
*/
private static final int[] createCoords(IRectangularROI box) {
return new int[]{box.getIntPoint()[0],
box.getIntPoint()[1],
box.getIntPoint()[0]+box.getIntLength(0),
box.getIntPoint()[1]+box.getIntLength(1)};
}
private final int[] createCoords(int[] point, int[] box) {
int w = box[0];
int h = box[1];
int r1 = (int)Math.floor(w/2d); // for instance 3->1, 5->2, 7->3
int r2 = (int)Math.floor(h/2d); // for instance 3->1, 5->2, 7->3
final int[] coords = new int[]{0,0,0,0};
fillCoordsInternal(point, shape, new int[]{r1, r2}, coords);
return coords;
}
private final static void fillCoordsInternal(int[] point, int[] shape, int[] radii, int[] coords) {
int x = point[0];
int y = point[1];
int r1 = radii[0]; // for instance 3->1, 5->2, 7->3
int r2 = radii[1]; // for instance 3->1, 5->2, 7->3
int minx = x-r1;
if (minx<0) minx=0;
int maxx = x+r1;
if (maxx>=shape[0]) maxx = shape[0]-1;
int miny = y-r2;
if (miny<0) miny=0;
int maxy = y+r2;
if (maxy>=shape[1]) maxy = shape[1]-1;
coords[0] = minx;
coords[1] = miny;
coords[2] = maxx;
coords[3] = maxy;
}
public int[] getShape() {
return shape;
}
public double getDouble(int i, int j) {
return sum[get1DIndexFast(i, j, shape)];
}
} |
package org.ontoware.rdfreactor.runtime;
import org.ontoware.rdf2go.model.Model;
import org.ontoware.rdf2go.model.node.BlankNode;
import org.ontoware.rdf2go.model.node.DatatypeLiteral;
import org.ontoware.rdf2go.model.node.LanguageTagLiteral;
import org.ontoware.rdf2go.model.node.Literal;
import org.ontoware.rdf2go.model.node.Node;
import org.ontoware.rdf2go.model.node.Resource;
import org.ontoware.rdf2go.model.node.URI;
import org.ontoware.rdf2go.vocabulary.RDF;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ReactorRuntimeEntity implements Resource {
private static Logger log = LoggerFactory.getLogger(ReactorRuntimeEntity.class);
/**
* the underlying RDF2Go model in which the triples representing the
* properties of this object are saved
*/
protected Model model;
/**
* the URI of the RDFS class from which this object is an instance
*/
protected URI classURI;
/**
* the identifier of this instance is a URI or a BlankNode. It is used as
* the Subject of all triples representing this instance in the RDF model.
*/
private Resource instanceIdentifier;
/**
* Constructor: create a ReactorBaseImpl for the RDFS/OWL schema class
* identified by classURI, with instanceIdentifier as the identifing URL or
* BlankNode.
*
* @param model,
* the underlying RDF2Go model
* @param classURI,
* URI of the RDFS/OWL class from which this object is an
* instance
* @param instanceIdentifier,
* has to be an URI or URL or BlankNode
* @param write
* if true, the triple (this, rdf:type, classURI) is written to
* the model (in addition to any other triples denoting
* properties of the instance)
*/
public ReactorRuntimeEntity(Model model, URI classURI,
Resource instanceIdentifier, boolean write) {
if( model == null)
throw new IllegalArgumentException("model may not be null");
if( classURI == null)
throw new IllegalArgumentException("classURI may not be null");
if( instanceIdentifier == null)
throw new IllegalArgumentException("instanceIdentifier may not be null");
this.model = model;
this.classURI = classURI;
this.instanceIdentifier = (Resource) instanceIdentifier;
// this can lead to concurrenty exceptions when used in
// iterators on the model
if (write) {
try {
// add type information only if not present
if (!model
.contains(this.instanceIdentifier, RDF.type, classURI)) {
log.debug("adding type information: "
+ this.instanceIdentifier + " a " + classURI);
Base.add(model,instanceIdentifier,RDF.type, classURI);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
/**
* Constructor: create a ReactorBaseImpl for the RDFS/OWL schema class
* identified by classURI, with instanceIdentifier as the identifing URL or
* BlankNode. Don't write (this, rdf:type, classURI) for this instance to the
* model.
*
* @param model,
* the underlying RDF2Go model
* @param classURI,
* URI of the RDFS/OWL class from which this object is an
* instance
* @param instanceIdentifier,
* has to be an URI or URL or BlankNode
*/
public ReactorRuntimeEntity(Model model, URI classURI,
Resource instanceIdentifier) {
// FIXME: default true or false?
this(model, classURI, instanceIdentifier, false);
}
/**
* implements
*
* @see ReactorEntity
*/
public Resource getResource() {
return this.instanceIdentifier;
}
public Model getModel() {
return this.model;
}
/**
* @return the URI of the RDFS schema class of which this object is an
* instance
*/
public URI getRDFSClassURI() {
return this.classURI;
}
// override some java.lang.Object methods
/**
* implement
*
* @see Object methods
*/
public boolean equals(Object other) {
if (other instanceof ReactorRuntimeEntity) {
return ((ReactorRuntimeEntity) other).getResource().equals(
this.getResource());
} else if (other instanceof URI) {
return this.getResource().equals(other);
} else
return false;
}
/**
* implement
*
* @see Object methods
*/
public int hashCode() {
return this.instanceIdentifier.hashCode();
}
/**
* implement
*
* @see Object methods
* @return a string representation of the instance identifier (URI or blank
* node). Representations are dependant on the used RDF2Go adaptor.
*/
public String toString() {
return this.instanceIdentifier.toString();
}
/**
* Cast .this object to the given target Java type.
*
* @param targetType -
* Java type to which to cast this object
* @return converted object
*/
public Object castTo(Class<?> targetType) {
return Base.castTo(model, instanceIdentifier, targetType);
}
public boolean isInstanceof( URI classURI ) {
return Base.hasInstance(model, classURI, instanceIdentifier);
}
public BlankNode asBlankNode() throws ClassCastException {
return instanceIdentifier.asBlankNode();
}
public DatatypeLiteral asDatatypeLiteral() throws ClassCastException {
return instanceIdentifier.asDatatypeLiteral();
}
public LanguageTagLiteral asLanguageTagLiteral() throws ClassCastException {
return instanceIdentifier.asLanguageTagLiteral();
}
public Literal asLiteral() throws ClassCastException {
return instanceIdentifier.asLiteral();
}
public Resource asResource() throws ClassCastException {
return instanceIdentifier.asResource();
}
public URI asURI() throws ClassCastException {
return instanceIdentifier.asURI();
}
public int compareTo(Node o) {
return instanceIdentifier.compareTo(o);
}
public String toSPARQL() {
return instanceIdentifier.toSPARQL();
}
} |
package org.oscm.ui.dialog.mp.subscriptionDetails;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyListOf;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import javax.faces.component.UIComponent;
import javax.faces.component.html.HtmlInputHidden;
import javax.faces.component.html.HtmlSelectOneRadio;
import javax.faces.event.ValueChangeEvent;
import javax.servlet.http.HttpServletRequest;
import org.junit.Before;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.oscm.internal.components.response.Response;
import org.oscm.internal.intf.OperatorService;
import org.oscm.internal.intf.SessionService;
import org.oscm.internal.intf.SubscriptionService;
import org.oscm.internal.intf.SubscriptionServiceInternal;
import org.oscm.internal.subscriptiondetails.POSubscriptionDetails;
import org.oscm.internal.subscriptiondetails.SubscriptionDetailsService;
import org.oscm.internal.triggerprocess.TriggerProcessesService;
import org.oscm.internal.types.enumtypes.*;
import org.oscm.internal.types.exception.ConcurrentModificationException;
import org.oscm.internal.types.exception.MandatoryUdaMissingException;
import org.oscm.internal.types.exception.NonUniqueBusinessKeyException;
import org.oscm.internal.types.exception.ObjectNotFoundException;
import org.oscm.internal.types.exception.OperationNotPermittedException;
import org.oscm.internal.types.exception.OperationPendingException;
import org.oscm.internal.types.exception.OrganizationAuthoritiesException;
import org.oscm.internal.types.exception.SaaSApplicationException;
import org.oscm.internal.types.exception.SubscriptionMigrationException;
import org.oscm.internal.types.exception.SubscriptionStateException;
import org.oscm.internal.types.exception.TechnicalServiceNotAliveException;
import org.oscm.internal.types.exception.ValidationException;
import org.oscm.internal.usergroupmgmt.POUserGroup;
import org.oscm.internal.usergroupmgmt.UserGroupService;
import org.oscm.internal.vo.VOBillingContact;
import org.oscm.internal.vo.VOOrganization;
import org.oscm.internal.vo.VOParameter;
import org.oscm.internal.vo.VOParameterDefinition;
import org.oscm.internal.vo.VOPaymentInfo;
import org.oscm.internal.vo.VOPriceModel;
import org.oscm.internal.vo.VOPricedRole;
import org.oscm.internal.vo.VORoleDefinition;
import org.oscm.internal.vo.VOService;
import org.oscm.internal.vo.VOServiceEntry;
import org.oscm.internal.vo.VOSubscription;
import org.oscm.internal.vo.VOSubscriptionDetails;
import org.oscm.internal.vo.VOTriggerProcess;
import org.oscm.internal.vo.VOUda;
import org.oscm.internal.vo.VOUdaDefinition;
import org.oscm.internal.vo.VOUserDetails;
import org.oscm.json.JsonConverter;
import org.oscm.ui.beans.BaseBean;
import org.oscm.ui.beans.BillingContactBean;
import org.oscm.ui.beans.MenuBean;
import org.oscm.ui.beans.OrganizationBean;
import org.oscm.ui.beans.PaymentAndBillingVisibleBean;
import org.oscm.ui.beans.PaymentInfoBean;
import org.oscm.ui.beans.SessionBean;
import org.oscm.ui.beans.UdaBean;
import org.oscm.ui.beans.UserBean;
import org.oscm.ui.common.JSFUtils;
import org.oscm.ui.common.UiDelegate;
import org.oscm.ui.dialog.mp.subscriptionwizard.SubscriptionsHelper;
import org.oscm.ui.dialog.mp.userGroups.SubscriptionUnitCtrl;
import org.oscm.ui.dialog.mp.userGroups.SubscriptionUnitModel;
import org.oscm.ui.model.PricedParameterRow;
import org.oscm.ui.model.Service;
import org.oscm.ui.model.UdaRow;
import org.oscm.ui.model.User;
import org.oscm.ui.stubs.ApplicationStub;
import org.oscm.ui.stubs.FacesContextStub;
import org.oscm.ui.stubs.ResourceBundleStub;
import com.google.common.collect.Sets;
public class ManageSubscriptionCtrlTest {
private static final long ANY_PARAMETER_KEY = 234523;
private static final String OUTCOME_SUCCESS = "success";
private static final String OUTCOME_SUBSCRIPTION_NOT_AVAILABLE = "subscriptionNotAccessible";
private static final String OUTCOME_DEASSIGNED_USER_OR_ERROR = "deassignedUserOrError";
private static final String JSON_STRING = "someJsonString";
private static final String JSON_STRING_WITH_QUOTATION = "someJson'String";
private static final String BACK = "back";
private static final String OUTCOME_SUBSCRIPTION_NEED_APPROVAL = "subscriptionNeedApproval";
private final boolean SUBSCRIPTION_FREE = true;
private ManageSubscriptionCtrl ctrl;
SubscriptionService subscriptionService;
SubscriptionServiceInternal subscriptionServiceInternal;
private BillingContactBean billingContactBean;
private PaymentInfoBean paymentInfoBean;
private HttpServletRequest httpRequest;
private VOSubscriptionDetails sub;
private List<VOTriggerProcess> waitingForApprovalTriggerProcesses;
private SubscriptionDetailsService subscriptionDetailsService;
private ManageSubscriptionModel model;
private TriggerProcessesService triggerProcessService;
private UserBean userBean;
private SubscriptionsHelper subscriptionsHelper;
private JsonConverter jsonConverter;
private SubscriptionUnitCtrl subscriptionUnitCtrl;
private SubscriptionUnitModel subscriptionUnitModel;
private UserGroupService userGroupService;
private OperatorService operatorService;
private PaymentAndBillingVisibleBean paymentAndBillingVisibleBean;
@Before
public void setup() throws SaaSApplicationException {
ctrl = spy(new ManageSubscriptionCtrl());
SessionBean session = mock(SessionBean.class);
triggerProcessService = mock(TriggerProcessesService.class);
subscriptionServiceInternal = mock(SubscriptionServiceInternal.class);
subscriptionService = mock(SubscriptionService.class);
billingContactBean = mock(BillingContactBean.class);
paymentAndBillingVisibleBean = mock(PaymentAndBillingVisibleBean.class);
ctrl.ui = mock(UiDelegate.class);
subscriptionsHelper = mock(SubscriptionsHelper.class);
paymentInfoBean = mock(PaymentInfoBean.class);
subscriptionDetailsService = mock(SubscriptionDetailsService.class);
httpRequest = mock(HttpServletRequest.class);
model = new ManageSubscriptionModel();
userBean = mock(UserBean.class);
subscriptionsHelper = mock(SubscriptionsHelper.class);
jsonConverter = mock(JsonConverter.class);
subscriptionUnitCtrl = mock(SubscriptionUnitCtrl.class);
subscriptionUnitModel = mock(SubscriptionUnitModel.class);
userGroupService = mock(UserGroupService.class);
ctrl.setModel(model);
ctrl.setTriggerProcessService(triggerProcessService);
ctrl.setSubscriptionService(subscriptionService);
ctrl.setMenuBean(mock(MenuBean.class));
ctrl.setSessionBean(session);
ctrl.setBillingContactBean(billingContactBean);
ctrl.setPaymentInfoBean(paymentInfoBean);
ctrl.setUserBean(userBean);
ctrl.setSubscriptionDetailsService(subscriptionDetailsService);
ctrl.setSubscriptionServiceInternal(subscriptionServiceInternal);
ctrl.setJsonConverter(jsonConverter);
ctrl.setSubscriptionsHelper(subscriptionsHelper);
subscriptionUnitCtrl.setModel(subscriptionUnitModel);
ctrl.setSubscriptionUnitCtrl(subscriptionUnitCtrl);
ctrl.setUserGroupService(userGroupService);
ctrl.setPaymentAndBillingVisibleBean(paymentAndBillingVisibleBean);
waitingForApprovalTriggerProcesses = new ArrayList<>();
when(
triggerProcessService
.getAllWaitingForApprovalTriggerProcessesBySubscriptionId(anyString()))
.thenReturn(new Response(waitingForApprovalTriggerProcesses));
when(session.getSelectedSubscriptionId()).thenReturn("subscriptionId");
when(ctrl.ui.getRequest()).thenReturn(httpRequest);
model.setSubscription(new VOSubscriptionDetails());
model.setService(new Service(new VOService()));
model.getSubscription().setSubscriptionId("test");
VOSubscriptionDetails subscription = givenSubscription(SUBSCRIPTION_FREE);
subscription.setStatus(SubscriptionStatus.ACTIVE);
POSubscriptionDetails subscriptionDetails = givenPOSubscriptionDetails();
subscriptionDetails.setSubscription(subscription);
when(
subscriptionDetailsService.getSubscriptionDetails(anyString(),
anyString())).thenReturn(
new Response(subscriptionDetails));
when(subscriptionDetailsService.loadSubscriptionStatus(anyLong()))
.thenReturn(new Response(SubscriptionStatus.ACTIVE));
when(ctrl.ui.getViewLocale()).thenReturn(Locale.GERMAN);
doReturn(Boolean.FALSE).when(subscriptionDetailsService)
.isUserAssignedToTheSubscription(anyLong(), anyLong());
stubMessageBundles();
List<UdaRow> udaRows = new ArrayList<>();
model.setSubscriptionUdaRows(udaRows);
}
private VOSubscriptionDetails givenSubscription(boolean isFree) {
VOSubscriptionDetails subscription = new VOSubscriptionDetails();
subscription.setSubscribedService(new VOService());
if (isFree) {
// set price model to subscription
VOPriceModel priceModel = new VOPriceModel();
priceModel.setType(PriceModelType.FREE_OF_CHARGE);
subscription.setPriceModel(priceModel);
} else {
// set price model to subscription
VOPriceModel priceModel = new VOPriceModel();
priceModel.setType(PriceModelType.PER_UNIT);
subscription.setPriceModel(priceModel);
prepareBillingContact(subscription, 10000);
preparePaymentInfo(subscription, 10001);
}
return subscription;
}
private VOSubscriptionDetails givenSubscriptionWithParameters(long key,
String value) {
// create new service parameter
List<VOParameter> parameters = new LinkedList<>();
VOParameterDefinition parameterDef = new VOParameterDefinition();
parameterDef.setParameterId("paramId");
parameterDef.setValueType(ParameterValueType.PWD);
parameterDef.setModificationType(ParameterModificationType.STANDARD);
VOParameter parameter = new VOParameter(parameterDef);
// STANDARD parameters should be configurable
parameter.setConfigurable(true);
parameter.setKey(key);
parameter.setValue(value);
parameters.add(parameter);
// create service
VOService service = new VOService();
service.setParameters(parameters);
// set service to subscription
VOSubscriptionDetails subscription = new VOSubscriptionDetails();
subscription.setSubscribedService(service);
return subscription;
}
private void preparePaymentInfo(VOSubscriptionDetails subscription, long key) {
VOPaymentInfo paymentInfo = new VOPaymentInfo();
paymentInfo.setKey(key);
subscription.setPaymentInfo(paymentInfo);
}
private void prepareBillingContact(VOSubscriptionDetails subscription,
long key) {
VOBillingContact billingContact = new VOBillingContact();
billingContact.setKey(key);
subscription.setBillingContact(billingContact);
}
private POSubscriptionDetails givenPOSubscriptionDetails() {
List<VOUdaDefinition> udaDefinitions = new LinkedList<>();
VOUdaDefinition subscriptionDefinition = new VOUdaDefinition();
subscriptionDefinition.setTargetType(UdaBean.CUSTOMER_SUBSCRIPTION);
udaDefinitions.add(subscriptionDefinition);
VOUdaDefinition customerDefinition = new VOUdaDefinition();
customerDefinition.setTargetType(UdaBean.CUSTOMER);
udaDefinitions.add(customerDefinition);
POSubscriptionDetails subscriptionDetails = new POSubscriptionDetails();
subscriptionDetails.setUdasDefinitions(udaDefinitions);
subscriptionDetails.setStatus(SubscriptionStatus.ACTIVE);
return subscriptionDetails;
}
private void stubMessageBundles() {
FacesContextStub contextStub = new FacesContextStub(Locale.ENGLISH);
when(ctrl.ui.getFacesContext()).thenReturn(contextStub);
ResourceBundleStub testBundle = new ResourceBundleStub();
((ApplicationStub) contextStub.getApplication())
.setResourceBundleStub(testBundle);
testBundle.addResource(
SubscriptionDetailsCtrlConstants.SUBSCRIPTION_STATE_WARNING,
"Subscription state warning {0}");
testBundle.addResource("SubscriptionStatus.SUSPENDED_UPD",
"suspended update");
testBundle.addResource("SubscriptionStatus.PENDING", "pending");
testBundle.addResource("SubscriptionStatus.PENDING_UPD",
"pending update");
testBundle.addResource("SubscriptionStatus.SUSPENDED",
"suspended - please update your payment information");
}
private void modify_assertRefreshModel(boolean refreshModellExpected)
throws Exception {
int numberExpectedRefreshMethodCall = refreshModellExpected ? 1 : 0;
verify(ctrl, times(numberExpectedRefreshMethodCall)).refreshModel(
any(VOSubscriptionDetails.class));
}
private void modify_assertUISuccessMessage(boolean successMsgExpected) {
int numberExpectedSuccessMethodCall = successMsgExpected ? 1 : 0;
verify(ctrl.ui, times(numberExpectedSuccessMethodCall)).handle(
SubscriptionDetailsCtrlConstants.INFO_SUBSCRIPTION_SAVED,
model.getSubscription().getSubscriptionId());
}
private void modifySubscription(SubscriptionStatus status,
long parameterKey, String value)
throws NonUniqueBusinessKeyException, ObjectNotFoundException,
OperationNotPermittedException, ValidationException,
SubscriptionMigrationException, ConcurrentModificationException,
TechnicalServiceNotAliveException, OperationPendingException,
MandatoryUdaMissingException, SubscriptionStateException,
OrganizationAuthoritiesException {
sub = givenSubscriptionWithParameters(parameterKey, value);
sub.setSubscriptionId("test");
sub.setStatus(status);
when(
subscriptionService.modifySubscription(
any(VOSubscriptionDetails.class),
anyListOf(VOParameter.class), anyListOf(VOUda.class)))
.thenReturn(sub);
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) {
return null;
}
}).when(ctrl).refreshOrgAndSubscriptionUdasInModel(anyString());
}
private User prepareSubOwner(String userId, boolean isSelected) {
VOUserDetails userDetails = new VOUserDetails();
userDetails.setUserId(userId);
User owner = new User(userDetails);
owner.setFirstName("FirstName");
owner.setLastName("LastName");
owner.setOwnerSelected(isSelected);
return owner;
}
private void assertSubscriptionUdaRows() {
List<UdaRow> refreshedSubscriptionUdaRows = model
.getSubscriptionUdaRows();
assertEquals(1, refreshedSubscriptionUdaRows.size());
assertEquals(UdaBean.CUSTOMER_SUBSCRIPTION,
refreshedSubscriptionUdaRows.get(0).getUdaDefinition()
.getTargetType());
}
private VOUserDetails prepareVOUserDetails_SubMgr(String userId,
boolean isSubMgr) {
VOUserDetails user = new VOUserDetails();
user.setUserId(userId);
if (isSubMgr) {
user.addUserRole(UserRoleType.SUBSCRIPTION_MANAGER);
}
return user;
}
private VOUserDetails prepareVOUserDetails_OrgAdmin(String userId) {
VOUserDetails user = new VOUserDetails();
user.setUserId(userId);
user.addUserRole(UserRoleType.ORGANIZATION_ADMIN);
return user;
}
private void setSubscriptionOwners(boolean isOwner1Select,
boolean isOwner2Select) {
List<User> subOwners = new ArrayList<>();
subOwners.add(prepareSubOwner("owner1", isOwner1Select));
subOwners.add(prepareSubOwner("owner2", isOwner2Select));
model.setSubscriptionOwners(subOwners);
}
private void checkSubscriptionOwner(String ownerUserId) {
if (ownerUserId != null)
for (User user : model.getSubscriptionOwners()) {
if (user.isOwnerSelected()) {
assertEquals(
Boolean.TRUE,
Boolean.valueOf(user.getUserId().equalsIgnoreCase(
ownerUserId)));
}
}
}
private ValueChangeEvent initChangeOwnerEvent(String userId) {
ValueChangeEvent event = mock(ValueChangeEvent.class);
when(event.getNewValue()).thenReturn("true");
HtmlSelectOneRadio radio = mock(HtmlSelectOneRadio.class);
when(event.getComponent()).thenReturn(radio);
when(radio.getSubmittedValue()).thenReturn("true");
HtmlInputHidden input = mock(HtmlInputHidden.class);
when(input.getValue()).thenReturn(userId);
when(input.getRendererType()).thenReturn("javax.faces.Hidden");
List<UIComponent> componentList = new ArrayList<>();
componentList.add(input);
when(radio.getChildren()).thenReturn(componentList);
return event;
}
private void assertParametersModified(long key, String value) {
assertEquals(model.getServiceParameters().get(0).getParameter()
.getKey(), model.getSubscriptionParameters().get(0)
.getParameter().getKey());
assertEquals(model.getServiceParameters().get(0).getParameter()
.getValue(), model.getSubscriptionParameters().get(0)
.getParameter().getValue());
assertEquals(model.getServiceParameters().get(0).getParameter()
.getKey(), key);
assertEquals(model.getServiceParameters().get(0).getParameter()
.getValue(), value);
assertEquals(
model.getService().getVO().getParameters().get(0).getKey(),
model.getServiceParameters().get(0).getParameter().getKey());
assertEquals(model.getService().getVO().getParameters().get(0)
.getValue(), model.getServiceParameters().get(0).getParameter()
.getValue());
}
@Test
public void modify_subscriptionSuspended() throws Exception {
// given
when(
subscriptionService.modifySubscription(
any(VOSubscriptionDetails.class),
anyListOf(VOParameter.class), anyListOf(VOUda.class)))
.thenReturn(null);
PricedParameterRow param = decorateWithPricedParameterRow();
// when
String outcome = ctrl.modify();
// then
assertEquals(SubscriptionDetailsCtrlConstants.OUTCOME_SUCCESS, outcome);
modify_assertRefreshModel(false);
modify_assertUISuccessMessage(false);
verify(param).rewriteEncryptedValues();
}
@Test
public void modify_subscriptionModified_SYNC() throws Exception {
// given
modifySubscription(SubscriptionStatus.ACTIVE, ANY_PARAMETER_KEY,
"ANYTHING");
PricedParameterRow param = decorateWithPricedParameterRow();
// when
String outcome = ctrl.modify();
// then
verify(ctrl.ui, times(1)).handle(
SubscriptionDetailsCtrlConstants.INFO_SUBSCRIPTION_SAVED,
"test");
assertEquals(SubscriptionDetailsCtrlConstants.OUTCOME_SUCCESS, outcome);
assertEquals(Boolean.FALSE, Boolean.valueOf(model.isConfigDirty()));
modify_assertRefreshModel(true);
modify_assertUISuccessMessage(true);
verify(param).rewriteEncryptedValues();
}
@Test
public void modify_subscriptionModified_ASYNC() throws Exception {
// given
modifySubscription(SubscriptionStatus.PENDING_UPD, ANY_PARAMETER_KEY,
"ANYTHING");
PricedParameterRow param = decorateWithPricedParameterRow();
// when
String outcome = ctrl.modify();
// then
verify(ctrl.ui, times(1)).handle(
SubscriptionDetailsCtrlConstants.INFO_SUBSCRIPTION_ASYNC_SAVED,
"test");
verify(ctrl.ui, times(1)).handle(
SubscriptionDetailsCtrlConstants.INFO_SUBSCRIPTION_ASYNC_SAVED,
"test");
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isShowStateWarning()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isUsersTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isCfgTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isUpgTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isReadOnlyParams()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isAsyncModified()));
assertEquals(SubscriptionDetailsCtrlConstants.OUTCOME_SUCCESS, outcome);
assertEquals(JSFUtils.getText(
SubscriptionDetailsCtrlConstants.SUBSCRIPTION_STATE_WARNING,
new Object[] { "pending update" }), model.getStateWarning());
verify(param).rewriteEncryptedValues();
}
@Test
public void modify_ASYNC_bug10821() throws Exception {
// given
modifySubscription(SubscriptionStatus.PENDING_UPD, ANY_PARAMETER_KEY,
"ANYTHING");
model.setConfigurationChanged(true);
PricedParameterRow param = decorateWithPricedParameterRow();
// when
ctrl.modify();
// then
assertEquals(Boolean.TRUE,
Boolean.valueOf(model.isConfigurationChanged()));
verify(param).rewriteEncryptedValues();
}
@Test
public void modify_NoSubscriptionOwnerSelected_withoutStoredOwner()
throws Exception {
// given
modifySubscription(SubscriptionStatus.ACTIVE, ANY_PARAMETER_KEY,
"ANYTHING");
model.setSelectedOwner(null);
model.setStoredOwner(null);
PricedParameterRow param = decorateWithPricedParameterRow();
// when
String outcome = ctrl.modify();
// then
assertEquals(SubscriptionDetailsCtrlConstants.OUTCOME_SUCCESS, outcome);
modify_assertRefreshModel(true);
modify_assertUISuccessMessage(true);
assertNull(sub.getOwnerId());
assertNull(model.getStoredOwner());
verify(param).rewriteEncryptedValues();
}
private PricedParameterRow decorateWithPricedParameterRow() {
List<PricedParameterRow> paramList = new ArrayList<>();
PricedParameterRow param = spy(new PricedParameterRow());
doReturn(new VOParameter()).when(param).getParameter();
VOParameterDefinition paramDef = new VOParameterDefinition();
doReturn(paramDef).when(param).getParameterDefinition();
paramDef.setParameterId("parameterId");
paramDef.setValueType(ParameterValueType.PWD);
paramList.add(param);
model.setSubscriptionParameters(paramList);
return param;
}
@Test
public void modify_NoSubscriptionOwnerSelected_withStoredOwner()
throws Exception {
// given
modifySubscription(SubscriptionStatus.ACTIVE, ANY_PARAMETER_KEY,
"ANYTHING");
model.setSelectedOwner(null);
User owner = prepareSubOwner("owner", true);
model.setStoredOwner(owner);
PricedParameterRow param = decorateWithPricedParameterRow();
// when
String outcome = ctrl.modify();
// then
assertEquals(SubscriptionDetailsCtrlConstants.OUTCOME_SUCCESS, outcome);
modify_assertRefreshModel(true);
verify(ctrl.ui, times(1)).handle(
SubscriptionDetailsCtrlConstants.INFO_SUBSCRIPTION_SAVED,
"test");
assertNull(sub.getOwnerId());
assertNull(model.getStoredOwner());
assertEquals(Boolean.FALSE, Boolean.valueOf(owner.isOwnerSelected()));
verify(param).rewriteEncryptedValues();
}
@Test
public void modify_SelectSubscriptionOwner_SYNC() throws Exception {
// given
modifySubscription(SubscriptionStatus.ACTIVE, ANY_PARAMETER_KEY,
"ANYTHING");
model.setStoredOwner(prepareSubOwner("owner1", false));
model.setSelectedOwner(prepareSubOwner("owner2", true));
VOSubscriptionDetails subTemp = new VOSubscriptionDetails();
model.setSubscription(subTemp);
PricedParameterRow param = decorateWithPricedParameterRow();
// when
String outcome = ctrl.modify();
// then
assertEquals(SubscriptionDetailsCtrlConstants.OUTCOME_SUCCESS, outcome);
modify_assertRefreshModel(true);
modify_assertUISuccessMessage(true);
assertEquals("owner2", subTemp.getOwnerId());
assertEquals("owner2", model.getStoredOwner().getUserId());
assertEquals(Boolean.TRUE,
Boolean.valueOf(model.getStoredOwner().isOwnerSelected()));
verify(param).rewriteEncryptedValues();
}
@Test
public void modify_SelectSubscriptionOwner_ASYNC() throws Exception {
// given
modifySubscription(SubscriptionStatus.PENDING_UPD, ANY_PARAMETER_KEY,
"ANYTHING");
model.setStoredOwner(prepareSubOwner("owner1", false));
model.setSelectedOwner(prepareSubOwner("owner2", true));
VOSubscriptionDetails subTemp = new VOSubscriptionDetails();
subTemp.setSubscriptionId("test");
model.setSubscription(subTemp);
PricedParameterRow param = decorateWithPricedParameterRow();
// when
String outcome = ctrl.modify();
// then
assertEquals(SubscriptionDetailsCtrlConstants.OUTCOME_SUCCESS, outcome);
verify(ctrl.ui, times(1)).handle(
SubscriptionDetailsCtrlConstants.INFO_SUBSCRIPTION_ASYNC_SAVED,
"test");
assertEquals("owner2", subTemp.getOwnerId());
assertEquals("owner1", model.getStoredOwner().getUserId());
assertEquals(Boolean.FALSE,
Boolean.valueOf(model.getStoredOwner().isOwnerSelected()));
verify(param).rewriteEncryptedValues();
}
@Test
public void refreshModel() throws Exception {
// given
VOSubscriptionDetails subscription = mock(VOSubscriptionDetails.class);
VOServiceEntry voService = mock(VOServiceEntry.class);
doReturn(voService).when(subscription).getSubscribedService();
when(
subscriptionService.modifySubscription(
any(VOSubscriptionDetails.class),
anyListOf(VOParameter.class), anyListOf(VOUda.class)))
.thenReturn(subscription);
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) {
return null;
}
}).when(ctrl).refreshOrgAndSubscriptionUdasInModel(anyString());
// when
ctrl.refreshModel(subscription);
// then
verify(ctrl, times(1)).refreshSubscriptionParametersInModel(
subscription);
verify(ctrl, times(1))
.refreshOrgAndSubscriptionUdasInModel(anyString());
assertEquals(Boolean.FALSE,
Boolean.valueOf(model.isConfigurationChanged()));
assertEquals(Boolean.FALSE, Boolean.valueOf(model.isConfigDirty()));
}
@Test
public void refreshSubscriptionParametersInModel() {
// given
VOSubscriptionDetails givenSubscription = givenSubscriptionWithParameters(
ANY_PARAMETER_KEY, "ANY THING");
// when
ctrl.refreshSubscriptionParametersInModel(givenSubscription);
// then
List<VOParameter> givenSubscriptionParameters = givenSubscription
.getSubscribedService().getParameters();
List<PricedParameterRow> refreshedModelServiceParameters = ctrl
.getModel().getServiceParameters();
List<PricedParameterRow> refreshedModelSubscriptionParameters = ctrl
.getModel().getSubscriptionParameters();
assertEquals(givenSubscriptionParameters.size(),
refreshedModelSubscriptionParameters.size());
assertEquals(givenSubscriptionParameters.get(0).getKey(),
refreshedModelSubscriptionParameters.get(0).getParameter()
.getKey());
assertEquals(givenSubscriptionParameters.size(),
refreshedModelServiceParameters.size());
assertEquals(givenSubscriptionParameters.get(0).getKey(),
refreshedModelServiceParameters.get(0).getParameter().getKey());
}
@Test
public void updateRoles_userNotAssignedToTheSubscription() throws Exception {
// given
final String OUTCOME_MODIFICATION_ERROR = "concurrentModificationError";
ManageSubscriptionModel model = new ManageSubscriptionModel();
VOUserDetails voUserDetails = new VOUserDetails();
voUserDetails.setKey(8988L);
User user = new User(voUserDetails);
List<User> assignedUsers = new ArrayList<>();
assignedUsers.add(user);
model.setAssignedUsers(assignedUsers);
VOSubscriptionDetails sub = new VOSubscriptionDetails();
sub.setKey(9988L);
model.setSubscription(sub);
ctrl.setModel(model);
// when
String result = ctrl.updateRoles();
// then
assertEquals(OUTCOME_MODIFICATION_ERROR, result);
}
@Test
public void refreshOrgAndSubscriptionUdasInModel() throws Exception {
// given
when(ctrl.ui.getViewLocale()).thenReturn(Locale.ENGLISH);
SubscriptionDetailsService subscriptionDetailService = mock(SubscriptionDetailsService.class);
String subscriptionId = anyString();
when(
subscriptionDetailService.getSubscriptionDetails(
subscriptionId, anyString())).thenReturn(
new Response(givenPOSubscriptionDetails()));
// when
ctrl.refreshOrgAndSubscriptionUdasInModel(subscriptionId);
// then
assertSubscriptionUdaRows();
}
@Test
public void initialize_succeed() throws Exception {
// given
doNothing().when(ctrl).initializeSubscription(anyString());
// when
ctrl.initialize();
// then
verify(ctrl.ui, never()).handleException(
any(SaaSApplicationException.class));
assertTrue(model.isSubscriptionExisting());
assertFalse(model.isReadOnlyParams());
}
@Test
public void getInitialize_ObjectNotFoundException() throws Exception {
// given
doThrow(new ObjectNotFoundException()).when(ctrl)
.initializeSubscription(anyInt());
// when
ctrl.initialize();
// then
verify(ctrl.ui, times(1)).handleException(
any(ObjectNotFoundException.class));
}
/**
* Bugfix 9921
*/
@Test
public void initializeSubscription() throws Exception {
// given
SubscriptionDetailsService ss = mock(SubscriptionDetailsService.class);
VOSubscriptionDetails subscription = givenSubscription(!SUBSCRIPTION_FREE);
POSubscriptionDetails subscriptionDetails = givenPOSubscriptionDetails();
subscriptionDetails.setSubscription(subscription);
when(ss.getSubscriptionDetails(eq("subscription_id"), anyString()))
.thenReturn(new Response(subscriptionDetails));
when(ctrl.ui.getViewLocale()).thenReturn(Locale.ENGLISH);
when(ctrl.getSubscriptionUnitCtrl().getModel()).thenReturn(
subscriptionUnitModel);
// when
ctrl.initializeSubscription("subscription_id");
// then
verify(ctrl, times(1)).initPaymentInfo();
// verify no subscription owner is added
assertEquals(0, model.getSubscriptionOwners().size());
}
@Test
public void initializeSubscription_Bug10481_SubMgr() throws Exception {
// given
SubscriptionDetailsService ss = mock(SubscriptionDetailsService.class);
VOSubscriptionDetails subscription = givenSubscription(!SUBSCRIPTION_FREE);
POSubscriptionDetails subscriptionDetails = givenPOSubscriptionDetails();
subscriptionDetails.setSubscription(subscription);
when(ss.getSubscriptionDetails(eq("subscription_id"), anyString()))
.thenReturn(new Response(subscriptionDetails));
when(ctrl.ui.getViewLocale()).thenReturn(Locale.ENGLISH);
when(Boolean.valueOf(userBean.isLoggedInAndAdmin())).thenReturn(
Boolean.FALSE);
when(Boolean.valueOf(userBean.isLoggedInAndSubscriptionManager()))
.thenReturn(Boolean.TRUE);
when(ctrl.getSubscriptionUnitCtrl().getModel()).thenReturn(
subscriptionUnitModel);
// when
ctrl.initializeSubscription("subscription_id");
// then
assertEquals(Boolean.TRUE,
Boolean.valueOf(model.getIsReportIssueAllowed()));
}
@Test
public void initializeSubscription_Bug10481_Admin() throws Exception {
// given
SubscriptionDetailsService ss = mock(SubscriptionDetailsService.class);
VOSubscriptionDetails subscription = givenSubscription(!SUBSCRIPTION_FREE);
POSubscriptionDetails subscriptionDetails = givenPOSubscriptionDetails();
subscriptionDetails.setSubscription(subscription);
when(ss.getSubscriptionDetails(eq("subscription_id"), anyString()))
.thenReturn(new Response(subscriptionDetails));
when(ctrl.ui.getViewLocale()).thenReturn(Locale.ENGLISH);
when(Boolean.valueOf(userBean.isLoggedInAndAdmin())).thenReturn(
Boolean.TRUE);
when(Boolean.valueOf(userBean.isLoggedInAndSubscriptionManager()))
.thenReturn(Boolean.FALSE);
when(ctrl.getSubscriptionUnitCtrl().getModel()).thenReturn(
subscriptionUnitModel);
// when
ctrl.initializeSubscription("subscription_id");
// then
assertEquals(Boolean.TRUE,
Boolean.valueOf(model.getIsReportIssueAllowed()));
}
@Test
public void initializeSubscription_Bug11075_Admin() throws Exception {
// given
SubscriptionDetailsService ss = mock(SubscriptionDetailsService.class);
VOSubscriptionDetails subscription = givenSubscription(!SUBSCRIPTION_FREE);
POSubscriptionDetails subscriptionDetails = givenPOSubscriptionDetails();
subscriptionDetails.setSubscription(subscription);
when(ss.getSubscriptionDetails(eq("subscription_id"), anyString()))
.thenReturn(new Response(subscriptionDetails));
List<VOTriggerProcess> triggerProcessList = new ArrayList<>();
VOTriggerProcess triggerProcess = new VOTriggerProcess();
triggerProcessList.add(triggerProcess);
Response response = mock(Response.class);
when(
triggerProcessService
.getAllWaitingForApprovalTriggerProcessesBySubscriptionId(any(String.class)))
.thenReturn(response);
when(response.getResultList(VOTriggerProcess.class)).thenReturn(
triggerProcessList);
when(ctrl.getSubscriptionUnitCtrl().getModel()).thenReturn(
subscriptionUnitModel);
// when
ctrl.initializeSubscription("subscription_id");
// then
assertEquals(Boolean.TRUE,
Boolean.valueOf(model.isUnsubscribeButtonDisabled()));
verify(ctrl.ui, times(1)).handleProgress();
}
@Test
public void initializeSubscription_addSubscriptionOwners_withStoredOwner()
throws Exception {
// given
VOSubscriptionDetails subscription = givenSubscription(!SUBSCRIPTION_FREE);
subscription.setOwnerId("owner");
POSubscriptionDetails subscriptionDetails = givenPOSubscriptionDetails();
subscriptionDetails.setSubscription(subscription);
List<VOUserDetails> userList = new ArrayList<>();
userList.add(prepareVOUserDetails_SubMgr("owner", true));
userList.add(prepareVOUserDetails_OrgAdmin("admin"));
userList.add(prepareVOUserDetails_SubMgr("notowner", false));
subscriptionDetails.setUsersForOrganization(userList);
when(
subscriptionDetailsService.getSubscriptionDetails(
eq("subscription_id"), anyString())).thenReturn(
new Response(subscriptionDetails));
when(ctrl.ui.getViewLocale()).thenReturn(Locale.ENGLISH);
when(ctrl.getSubscriptionUnitCtrl().getModel()).thenReturn(
subscriptionUnitModel);
// when
ctrl.initializeSubscription("subscription_id");
// then
assertEquals(2, model.getSubscriptionOwners().size());
assertEquals("owner", model.getSubscriptionOwners().get(0).getUserId());
assertEquals("admin", model.getSubscriptionOwners().get(1).getUserId());
assertEquals("owner", model.getSelectedOwner().getUserId());
assertEquals("owner", model.getStoredOwner().getUserId());
assertEquals(Boolean.FALSE,
Boolean.valueOf(model.isNoSubscriptionOwner()));
}
@Test
public void initializeSubscription_addSubscriptionOwners_withoutStoredOwner()
throws Exception {
// given
VOSubscriptionDetails subscription = givenSubscription(!SUBSCRIPTION_FREE);
POSubscriptionDetails subscriptionDetails = givenPOSubscriptionDetails();
subscriptionDetails.setSubscription(subscription);
List<VOUserDetails> userList = new ArrayList<>();
userList.add(prepareVOUserDetails_SubMgr("owner", true));
userList.add(prepareVOUserDetails_SubMgr("notowner", false));
subscriptionDetails.setUsersForOrganization(userList);
when(
subscriptionDetailsService.getSubscriptionDetails(
eq("subscription_id"), anyString())).thenReturn(
new Response(subscriptionDetails));
when(ctrl.ui.getViewLocale()).thenReturn(Locale.ENGLISH);
when(ctrl.getSubscriptionUnitCtrl().getModel()).thenReturn(
subscriptionUnitModel);
// when
ctrl.initializeSubscription("subscription_id");
// then
assertEquals(1, model.getSubscriptionOwners().size());
assertEquals("owner", model.getSubscriptionOwners().get(0).getUserId());
assertNull(model.getSelectedOwner());
assertNull(model.getStoredOwner());
assertEquals(Boolean.TRUE,
Boolean.valueOf(model.isNoSubscriptionOwner()));
}
@Test
public void initializeSubscription_NoPaymentInfoNoBillingContact()
throws Exception {
// given
SubscriptionDetailsService ss = mock(SubscriptionDetailsService.class);
VOSubscriptionDetails subscription = givenSubscription(SUBSCRIPTION_FREE);
POSubscriptionDetails subscriptionDetails = givenPOSubscriptionDetails();
subscriptionDetails.setSubscription(subscription);
when(ss.getSubscriptionDetails(eq("subscription_id"), anyString()))
.thenReturn(new Response(subscriptionDetails));
when(ctrl.ui.getViewLocale()).thenReturn(Locale.ENGLISH);
when(ctrl.getSubscriptionUnitCtrl().getModel()).thenReturn(
subscriptionUnitModel);
// when
ctrl.initializeSubscription("subscription_id");
// then
verify(ctrl, times(1)).initPaymentInfo();
}
@Test
public void initPaymentInfo() {
// given
VOSubscriptionDetails subscription = givenSubscription(!SUBSCRIPTION_FREE);
model.setSubscription(subscription);
// when
ctrl.initPaymentInfo();
// then
verify(billingContactBean).getBillingContacts();
verify(paymentInfoBean).getPaymentInfosForSubscription();
verify(billingContactBean, times(1)).setSelectedBillingContactKey(
eq(Long.valueOf(model.getSubscription().getBillingContact()
.getKey())));
verify(paymentInfoBean, times(1))
.setSelectedPaymentInfoForSubscriptionKey(
eq(Long.valueOf(model.getSubscription()
.getPaymentInfo().getKey())));
}
@Test
public void initPaymentInfo_NoPaymentInfoNoBillingContact() {
// given
VOSubscriptionDetails subscription = givenSubscription(SUBSCRIPTION_FREE);
model.setSubscription(subscription);
// when
ctrl.initPaymentInfo();
// then
verify(billingContactBean).getBillingContacts();
verify(paymentInfoBean).getPaymentInfosForSubscription();
verifyNoMoreInteractions(billingContactBean, paymentInfoBean);
}
@Test
public void leavePaymentTab_ok() {
// given
VOSubscriptionDetails subscription = givenSubscription(!SUBSCRIPTION_FREE);
model.setSubscription(subscription);
// when
String result = ctrl.leavePaymentTab();
// then
verify(billingContactBean, times(1)).setSelectedBillingContactKey(
eq(Long.valueOf(model.getSubscription().getBillingContact()
.getKey())));
boolean isDirty = model.isDirty();
assertFalse(isDirty);
assertNull(result);
}
@Test
public void setPopupTargetSelectOwners_NoSubscriptionOwners() {
// given
model.setSubscriptionOwners(new ArrayList<User>());
// when
String result = ctrl.setPopupTargetSelectOwners();
// then
assertEquals("dontOpenModalDialog", result);
}
@Test
public void setPopupTargetSelectOwners_NoSelectOwner() {
// given
model.setSelectedOwner(null);
setSubscriptionOwners(true, false);
// when
String result = ctrl.setPopupTargetSelectOwners();
// then
for (User user : model.getSubscriptionOwners()) {
assertEquals(Boolean.FALSE, Boolean.valueOf(user.isSelected()));
}
assertEquals(Boolean.FALSE, Boolean.valueOf(ctrl.isOwnerSelected()));
assertNull(result);
}
@Test
public void setPopupTargetSelectOwners_WithSubscriptionOwner() {
// given
model.setSelectedOwner(prepareSubOwner("owner1", true));
setSubscriptionOwners(false, false);
// when
String result = ctrl.setPopupTargetSelectOwners();
// then
checkSubscriptionOwner("owner1");
assertEquals(Boolean.TRUE, Boolean.valueOf(ctrl.isOwnerSelected()));
assertNull(result);
}
@Test
public void setPopupTargetSelectOwners_WithWrongSubscriptionOwner() {
// given
model.setSelectedOwner(prepareSubOwner("owner1", true));
setSubscriptionOwners(false, true);
// when
String result = ctrl.setPopupTargetSelectOwners();
// then
checkSubscriptionOwner("owner1");
assertEquals(Boolean.TRUE, Boolean.valueOf(ctrl.isOwnerSelected()));
assertNull(result);
}
@Test
public void refreshOwner_ClickNoOwner() {
// given
model.setNoSubscriptionOwner(true);
model.setSelectedOwner(new User(new VOUserDetails()));
// when
ctrl.refreshOwner();
// then
assertNull(model.getSelectedOwner());
}
@Test
public void refreshOwner_ClickOwner() {
// given
model.setNoSubscriptionOwner(false);
model.setStoredOwner(prepareSubOwner("owner1", true));
// when
ctrl.refreshOwner();
// then
assertNotNull(model.getSelectedOwner());
assertEquals("owner1", model.getSelectedOwner().getUserId());
}
@Test
public void refreshSelectedOwnerName_NoUser() {
// when
ctrl.refreshSelectedOwnerName(null);
// then
assertEquals(
JSFUtils.getText("subscription.noOwner", new Object[] { "" }),
model.getSelectedOwnerName());
}
@Test
public void refreshSelectedOwnerName() {
// given
doReturn(Boolean.FALSE).when(ctrl.ui).isNameSequenceReversed();
// when
ctrl.refreshSelectedOwnerName(prepareSubOwner("owner1", true));
// then
assertEquals("FirstName LastName(owner1)", model.getSelectedOwnerName());
}
@Test
public void refreshSelectedOwnerName_ReverseNameSequence() {
// given
doReturn(Boolean.TRUE).when(ctrl.ui).isNameSequenceReversed();
// when
ctrl.refreshSelectedOwnerName(prepareSubOwner("owner1", true));
// then
assertEquals("LastName FirstName(owner1)", model.getSelectedOwnerName());
}
@Test
public void updateSelectedOwner() {
// given
model.setSelectedOwner(prepareSubOwner("owner1", true));
setSubscriptionOwners(false, true);
// when
ctrl.updateSelectedOwner();
// then
assertEquals(
Boolean.TRUE,
Boolean.valueOf(model.getSelectedOwner().getUserId()
.equalsIgnoreCase("owner2")));
}
@Test
public void selectedOwnerChanged() {
// given
setSubscriptionOwners(false, true);
ValueChangeEvent event = initChangeOwnerEvent("owner1");
// when
ctrl.selectedOwnerChanged(event);
// then
checkSubscriptionOwner("owner1");
assertEquals(Boolean.TRUE, Boolean.valueOf(ctrl.isOwnerSelected()));
}
@Test
public void selectedOwnerChangedToNoOwner() {
// given
setSubscriptionOwners(false, true);
ValueChangeEvent event = initChangeOwnerEvent(" ");
// when
ctrl.selectedOwnerChanged(event);
// then
for (User user : model.getSubscriptionOwners()) {
assertEquals(Boolean.FALSE, Boolean.valueOf(user.isOwnerSelected()));
}
assertEquals(Boolean.FALSE, Boolean.valueOf(ctrl.isOwnerSelected()));
}
@Test
public void validateSubscriptionAccessible_subscriptionInvalid()
throws Exception {
// given
when(ctrl.getSubscriptionsHelper()).thenReturn(
new SubscriptionsHelper());
when(subscriptionDetailsService.loadSubscriptionStatus(anyLong()))
.thenReturn(new Response(SubscriptionStatus.INVALID));
// when
boolean result = ctrl.getSubscriptionsHelper()
.validateSubscriptionStatus(model.getSubscription(),
subscriptionDetailsService);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(result));
}
@Test
public void validateSubscriptionAccessible_subscriptionDeactivated()
throws Exception {
// given
when(ctrl.getSubscriptionsHelper()).thenReturn(
new SubscriptionsHelper());
when(subscriptionDetailsService.loadSubscriptionStatus(anyLong()))
.thenReturn(new Response(SubscriptionStatus.DEACTIVATED));
// when
boolean result = ctrl.getSubscriptionsHelper()
.validateSubscriptionStatus(model.getSubscription(),
subscriptionDetailsService);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(result));
}
@Test
public void subscriptionDetailsOperations_subscriptionInvalid()
throws Exception {
// given
model.getSubscription().setSubscriptionId("subscriptionId");
ctrl.setSubscriptionsHelper(new SubscriptionsHelper());
when(subscriptionDetailsService.loadSubscriptionStatus(anyLong()))
.thenReturn(new Response(SubscriptionStatus.INVALID));
// when
String assignUsers = ctrl.assignUsers();
String deassignUser = ctrl.deassignUser();
String modify = ctrl.modify();
String savePayment = ctrl.savePayment();
String unsubscribe = ctrl.unsubscribe();
String updateRoles = ctrl.updateRoles();
// then
assertEquals(OUTCOME_SUBSCRIPTION_NOT_AVAILABLE, assignUsers);
assertEquals(OUTCOME_SUBSCRIPTION_NOT_AVAILABLE, deassignUser);
assertEquals(OUTCOME_SUBSCRIPTION_NOT_AVAILABLE, modify);
assertEquals(OUTCOME_SUBSCRIPTION_NOT_AVAILABLE, savePayment);
assertEquals(OUTCOME_SUBSCRIPTION_NOT_AVAILABLE, unsubscribe);
assertEquals(OUTCOME_SUBSCRIPTION_NOT_AVAILABLE, updateRoles);
}
@Test
public void subscriptionDetailsOperations_subscriptionNeedApproval()
throws Exception {
// given
model.getSubscription().setSubscriptionId("subscriptionId");
when(
Boolean.valueOf(subscriptionService
.unsubscribeFromService(anyString()))).thenReturn(
Boolean.FALSE);
SessionService sessionService = mock(SessionService.class);
ctrl.setSessionService(sessionService);
// when
String unsubscribe = ctrl.unsubscribe();
// then
assertEquals(OUTCOME_SUBSCRIPTION_NEED_APPROVAL, unsubscribe);
}
@Test
public void assignUsers_subscriptionAccessible() throws Exception {
// when
String result = ctrl.assignUsers();
// then
assertEquals(BACK, result);
verify(subscriptionsHelper, times(1)).validateSubscriptionStatus(
any(VOSubscriptionDetails.class),
any(SubscriptionDetailsService.class));
}
@Test
public void deassignUser_subscriptionAccessible() throws Exception {
// given
model.getSubscription().setSubscriptionId("subscriptionId");
// when
String result = ctrl.deassignUser();
// then
assertEquals(OUTCOME_DEASSIGNED_USER_OR_ERROR, result);
verify(subscriptionsHelper, times(1)).validateSubscriptionStatus(
model.getSubscription(), subscriptionDetailsService);
}
@Test
public void savePayment_subscriptionAccessible() throws Exception {
// given
when(
subscriptionService.modifySubscriptionPaymentData(
any(VOSubscription.class), any(VOBillingContact.class),
any(VOPaymentInfo.class))).thenReturn(
new VOSubscriptionDetails());
// when
String result = ctrl.savePayment();
// then
assertEquals(OUTCOME_SUCCESS, result);
verify(subscriptionsHelper, times(1)).validateSubscriptionStatus(
any(VOSubscriptionDetails.class),
any(SubscriptionDetailsService.class));
}
@Test
public void updateRoles_subscriptionAccessible() throws Exception {
// given
List<User> users = new ArrayList<>();
model.setAssignedUsers(users);
// when
String result = ctrl.updateRoles();
// then
assertEquals(BACK, result);
assertEquals(null, model.getSubscription());
verify(subscriptionsHelper, times(1)).validateSubscriptionStatus(
any(VOSubscriptionDetails.class),
any(SubscriptionDetailsService.class));
}
@Test
public void actionLoadIframe_setShowExternalConfigurator() throws Exception {
// given
List<User> users = new ArrayList<>();
model.setAssignedUsers(users);
doReturn(JSON_STRING).when(jsonConverter)
.getServiceParametersAsJsonString(
anyListOf(PricedParameterRow.class), anyBoolean(),
anyBoolean());
// when
ctrl.actionLoadIframe();
// then
assertTrue(model.getShowExternalConfigurator());
}
@Test
public void actionLoadIframe_success() throws Exception {
// given
List<User> users = new ArrayList<>();
model.setAssignedUsers(users);
doReturn(JSON_STRING).when(jsonConverter)
.getServiceParametersAsJsonString(
anyListOf(PricedParameterRow.class), anyBoolean(),
anyBoolean());
// when
String result = ctrl.actionLoadIframe();
// then
assertNull(result);
assertTrue(model.isLoadIframe());
assertFalse(model.getHideExternalConfigurator());
assertEquals(JSON_STRING, model.getServiceParametersAsJSONString());
}
@Test
public void actionLoadIframe_successWithSinleQuotation() throws Exception {
// given
List<User> users = new ArrayList<>();
model.setAssignedUsers(users);
doReturn(JSON_STRING_WITH_QUOTATION).when(jsonConverter)
.getServiceParametersAsJsonString(
anyListOf(PricedParameterRow.class), anyBoolean(),
anyBoolean());
// when
ctrl.actionLoadIframe();
// then
assertEquals("someJson\\'String",
model.getServiceParametersAsJSONString());
}
@Test
public void actionLoadIframe_jsonError() throws Exception {
// given
List<User> users = new ArrayList<>();
model.setAssignedUsers(users);
doReturn(null).when(jsonConverter)
.getServiceParametersAsJsonString(
anyListOf(PricedParameterRow.class), anyBoolean(),
anyBoolean());
// when
String result = ctrl.actionLoadIframe();
// then
assertEquals(SubscriptionDetailsCtrlConstants.OUTCOME_ERROR, result);
assertFalse(model.isLoadIframe());
assertTrue(model.getHideExternalConfigurator());
assertNull(model.getServiceParametersAsJSONString());
}
@Test
public void setStateWarningAndTabDisabled_ACTIVE() {
// given
POSubscriptionDetails subscriptionDetail = givenPOSubscriptionDetails();
doReturn(Boolean.FALSE).when(ctrl).checkTriggerProcessForSubscription(
subscriptionDetail.getSubscription());
// when
ctrl.setStateWarningAndTabDisabled(subscriptionDetail);
// then
assertFalse(model.isUsersTabDisabled());
assertFalse(model.isCfgTabDisabled());
assertFalse(model.isPayTabDisabled());
assertFalse(model.isUpgTabDisabled());
assertEquals(Boolean.FALSE,
Boolean.valueOf(model.isUnsubscribeButtonDisabled()));
}
@Test
public void setStateWarningAndTabDisabled_EXPIRED() {
// given
POSubscriptionDetails subscriptionDetail = givenPOSubscriptionDetails();
subscriptionDetail.setStatus(SubscriptionStatus.EXPIRED);
doReturn(Boolean.FALSE).when(ctrl).checkTriggerProcessForSubscription(
subscriptionDetail.getSubscription());
// when
ctrl.setStateWarningAndTabDisabled(subscriptionDetail);
// then
assertTrue(model.isUsersTabDisabled());
assertTrue(model.isCfgTabDisabled());
assertTrue(model.isPayTabDisabled());
assertFalse(model.isUpgTabDisabled());
assertEquals(Boolean.FALSE,
Boolean.valueOf(model.isUnsubscribeButtonDisabled()));
}
@Test
public void setStateWarningAndTabDisabled_PENDING() {
// given
POSubscriptionDetails subscriptionDetail = givenPOSubscriptionDetails();
subscriptionDetail.setStatus(SubscriptionStatus.PENDING);
doReturn(Boolean.FALSE).when(ctrl).checkTriggerProcessForSubscription(
subscriptionDetail.getSubscription());
// when
ctrl.setStateWarningAndTabDisabled(subscriptionDetail);
// then
assertTrue(model.isShowStateWarning());
assertTrue(model.isUsersTabDisabled());
assertTrue(model.isCfgTabDisabled());
assertFalse(model.isPayTabDisabled());
assertTrue(model.isUpgTabDisabled());
assertEquals(Boolean.FALSE,
Boolean.valueOf(model.isUnsubscribeButtonDisabled()));
assertEquals(JSFUtils.getText(
SubscriptionDetailsCtrlConstants.SUBSCRIPTION_STATE_WARNING,
new Object[] { "pending" }), model.getStateWarning());
}
@Test
public void setStateWarningAndTabDisabled_PENDING_UPD() {
// given
POSubscriptionDetails subscriptionDetail = givenPOSubscriptionDetails();
subscriptionDetail.setStatus(SubscriptionStatus.PENDING_UPD);
doReturn(Boolean.FALSE).when(ctrl).checkTriggerProcessForSubscription(
subscriptionDetail.getSubscription());
// when
ctrl.setStateWarningAndTabDisabled(subscriptionDetail);
// then
assertTrue(model.isShowStateWarning());
assertTrue(model.isUsersTabDisabled());
assertTrue(model.isCfgTabDisabled());
assertFalse(model.isPayTabDisabled());
assertTrue(model.isUpgTabDisabled());
assertEquals(Boolean.FALSE,
Boolean.valueOf(model.isUnsubscribeButtonDisabled()));
assertEquals(JSFUtils.getText(
SubscriptionDetailsCtrlConstants.SUBSCRIPTION_STATE_WARNING,
new Object[] { "pending update" }), model.getStateWarning());
}
@Test
public void setStateWarningAndTabDisabled_SUSPENDED() {
// given
POSubscriptionDetails subscriptionDetail = givenPOSubscriptionDetails();
subscriptionDetail.setStatus(SubscriptionStatus.SUSPENDED);
doReturn(Boolean.FALSE).when(ctrl).checkTriggerProcessForSubscription(
subscriptionDetail.getSubscription());
// when
ctrl.setStateWarningAndTabDisabled(subscriptionDetail);
// then
assertTrue(model.isUsersTabDisabled());
assertTrue(model.isCfgTabDisabled());
assertFalse(model.isPayTabDisabled());
assertFalse(model.isUpgTabDisabled());
assertEquals(Boolean.FALSE,
Boolean.valueOf(model.isUnsubscribeButtonDisabled()));
}
@Test
public void setStateWarningAndTabDisabled_SUSPENDED_UPD() {
// given
POSubscriptionDetails subscriptionDetail = givenPOSubscriptionDetails();
subscriptionDetail.setStatus(SubscriptionStatus.SUSPENDED_UPD);
doReturn(Boolean.FALSE).when(ctrl).checkTriggerProcessForSubscription(
subscriptionDetail.getSubscription());
// when
ctrl.setStateWarningAndTabDisabled(subscriptionDetail);
// then
assertTrue(model.isShowStateWarning());
assertTrue(model.isUsersTabDisabled());
assertTrue(model.isCfgTabDisabled());
assertFalse(model.isPayTabDisabled());
assertTrue(model.isUpgTabDisabled());
assertEquals(Boolean.FALSE,
Boolean.valueOf(model.isUnsubscribeButtonDisabled()));
assertEquals(JSFUtils.getText(
SubscriptionDetailsCtrlConstants.SUBSCRIPTION_STATE_WARNING,
new Object[] { "suspended update" }), model.getStateWarning());
}
@Test
public void setStateWarningAndTabDisabled_waitingForReply_true_ACTIVE() {
// given
POSubscriptionDetails subscriptionDetail = givenPOSubscriptionDetails();
doReturn(Boolean.TRUE).when(ctrl).checkTriggerProcessForSubscription(
subscriptionDetail.getSubscription());
// when
ctrl.setStateWarningAndTabDisabled(subscriptionDetail);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isUsersTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isCfgTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isPayTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isUpgTabDisabled()));
assertEquals(Boolean.TRUE,
Boolean.valueOf(model.isUnsubscribeButtonDisabled()));
}
@Test
public void setStateWarningAndTabDisabled_waitingForReply_true_EXPIRED() {
// given
POSubscriptionDetails subscriptionDetail = givenPOSubscriptionDetails();
subscriptionDetail.setStatus(SubscriptionStatus.EXPIRED);
doReturn(Boolean.TRUE).when(ctrl).checkTriggerProcessForSubscription(
subscriptionDetail.getSubscription());
// when
ctrl.setStateWarningAndTabDisabled(subscriptionDetail);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isUsersTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isCfgTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isPayTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isUpgTabDisabled()));
assertEquals(Boolean.TRUE,
Boolean.valueOf(model.isUnsubscribeButtonDisabled()));
}
@Test
public void setStateWarningAndTabDisabled_waitingForReply_true_PENDING() {
// given
POSubscriptionDetails subscriptionDetail = givenPOSubscriptionDetails();
subscriptionDetail.setStatus(SubscriptionStatus.PENDING);
doReturn(Boolean.TRUE).when(ctrl).checkTriggerProcessForSubscription(
subscriptionDetail.getSubscription());
// when
ctrl.setStateWarningAndTabDisabled(subscriptionDetail);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isShowStateWarning()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isUsersTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isCfgTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isPayTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isUpgTabDisabled()));
assertEquals(Boolean.TRUE,
Boolean.valueOf(model.isUnsubscribeButtonDisabled()));
assertEquals(JSFUtils.getText(
SubscriptionDetailsCtrlConstants.SUBSCRIPTION_STATE_WARNING,
new Object[] { "pending" }), model.getStateWarning());
}
@Test
public void setStateWarningAndTabDisabled_waitingForReply_true_PENDING_UPD() {
// given
POSubscriptionDetails subscriptionDetail = givenPOSubscriptionDetails();
subscriptionDetail.setStatus(SubscriptionStatus.PENDING_UPD);
doReturn(Boolean.TRUE).when(ctrl).checkTriggerProcessForSubscription(
subscriptionDetail.getSubscription());
// when
ctrl.setStateWarningAndTabDisabled(subscriptionDetail);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isShowStateWarning()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isUsersTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isCfgTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isPayTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isUpgTabDisabled()));
assertEquals(Boolean.TRUE,
Boolean.valueOf(model.isUnsubscribeButtonDisabled()));
assertEquals(JSFUtils.getText(
SubscriptionDetailsCtrlConstants.SUBSCRIPTION_STATE_WARNING,
new Object[] { "pending update" }), model.getStateWarning());
}
@Test
public void setStateWarningAndTabDisabled_waitingForReply_true_SUSPENDED() {
// given
POSubscriptionDetails subscriptionDetail = givenPOSubscriptionDetails();
subscriptionDetail.setStatus(SubscriptionStatus.SUSPENDED);
doReturn(Boolean.TRUE).when(ctrl).checkTriggerProcessForSubscription(
subscriptionDetail.getSubscription());
// when
ctrl.setStateWarningAndTabDisabled(subscriptionDetail);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isUsersTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isCfgTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isPayTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isUpgTabDisabled()));
assertEquals(Boolean.TRUE,
Boolean.valueOf(model.isUnsubscribeButtonDisabled()));
}
@Test
public void setStateWarningAndTabDisabled_waitingForReply_true_SUSPENDED_UPD() {
// given
POSubscriptionDetails subscriptionDetail = givenPOSubscriptionDetails();
subscriptionDetail.setStatus(SubscriptionStatus.SUSPENDED_UPD);
doReturn(Boolean.TRUE).when(ctrl).checkTriggerProcessForSubscription(
subscriptionDetail.getSubscription());
// when
ctrl.setStateWarningAndTabDisabled(subscriptionDetail);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isShowStateWarning()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isUsersTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isCfgTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isPayTabDisabled()));
assertEquals(Boolean.TRUE, Boolean.valueOf(model.isUpgTabDisabled()));
assertEquals(Boolean.TRUE,
Boolean.valueOf(model.isUnsubscribeButtonDisabled()));
assertEquals(JSFUtils.getText(
SubscriptionDetailsCtrlConstants.SUBSCRIPTION_STATE_WARNING,
new Object[] { "suspended update" }), model.getStateWarning());
}
@Test
public void checkTriggerProcessForSubscription_true() {
// given
VOSubscriptionDetails subscriptionDetail = new VOSubscriptionDetails();
VOTriggerProcess triggerProcess = new VOTriggerProcess();
waitingForApprovalTriggerProcesses.add(triggerProcess);
// when
boolean result = ctrl
.checkTriggerProcessForSubscription(subscriptionDetail);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(result));
}
@Test
public void reload_bug10504() throws Exception {
// given
VOSubscriptionDetails subscription = givenSubscription(!SUBSCRIPTION_FREE);
subscription.setOwnerId("owner1");
subscription.setSubscriptionId("subscription_id");
subscription.setKey(10L);
POSubscriptionDetails subscriptionDetails = givenPOSubscriptionDetails();
subscriptionDetails.setSubscription(subscription);
List<VOUserDetails> userList = new ArrayList<>();
userList.add(prepareVOUserDetails_SubMgr("owner1", true));
subscriptionDetails.setUsersForOrganization(userList);
when(
subscriptionDetailsService.getSubscriptionDetails(eq(10L),
anyString())).thenReturn(
new Response(subscriptionDetails));
when(ctrl.ui.getViewLocale()).thenReturn(Locale.ENGLISH);
setSubscriptionOwners(true, false);
model.setSubscription(subscription);
model.setNoSubscriptionOwner(true);
when(ctrl.getSubscriptionUnitCtrl().getModel()).thenReturn(
subscriptionUnitModel);
// when
ctrl.reload();
// then
assertEquals(Boolean.FALSE,
Boolean.valueOf(model.isNoSubscriptionOwner()));
assertEquals(Boolean.FALSE, Boolean.valueOf(model.isConfigDirty()));
}
@Test
public void changeTwiceParameters_bug10833() throws Exception {
// given
modifySubscription(SubscriptionStatus.ACTIVE, 1, "10");
PricedParameterRow param = decorateWithPricedParameterRow();
// when
ctrl.modify();
// then
assertParametersModified(1, "10");
// given
modifySubscription(SubscriptionStatus.ACTIVE, 2, "20");
// when
ctrl.modify();
// then
assertParametersModified(2, "20");
verify(param).rewriteEncryptedValues();
}
@SuppressWarnings("boxing")
@Test
public void getNoPaymentTypeAvailableMSG_Admin() {
// given
Service service = new Service(new VOService());
service.setSubscribable(true);
model.setService(service);
when(userBean.isLoggedInAndAdmin()).thenReturn(Boolean.TRUE);
// when
String result = ctrl.getNoPaymentTypeAvailableMSG();
// then
assertEquals(
result,
SubscriptionDetailsCtrlConstants.MESSAGE_NO_PAYMENT_TYPE_ENABLED);
}
@SuppressWarnings("boxing")
@Test
public void getNoPaymentTypeAvailableMSG_NonAdmin() {
// given
Service service = new Service(new VOService());
service.setSubscribable(true);
model.setService(service);
when(userBean.isLoggedInAndAdmin()).thenReturn(Boolean.FALSE);
// when
String result = ctrl.getNoPaymentTypeAvailableMSG();
// then
assertEquals(
result,
SubscriptionDetailsCtrlConstants.MESSAGE_NO_PAYMENT_TYPE_AVAILABLE);
}
private void mockPriceModelWithRolePrices(
VOSubscriptionDetails voSubscription) {
VOPriceModel pm = mock(VOPriceModel.class);
doReturn(pm).when(voSubscription).getPriceModel();
mockRolePrices(pm);
}
private void mockRolePrices(VOPriceModel pm) {
List<VOPricedRole> roles = new ArrayList<>();
for (int i = 0; i < 5; i++) {
VOPricedRole role = new VOPricedRole();
role.setRole(new VORoleDefinition());
role.setPricePerUser(BigDecimal.ONE);
roles.add(role);
}
doReturn(roles).when(pm).getRoleSpecificUserPrices();
}
@SuppressWarnings("boxing")
@Test
public void initializePriceModel_Subscription() {
// given
VOSubscriptionDetails voSubscription = mock(VOSubscriptionDetails.class);
mockPriceModelWithRolePrices(voSubscription);
// when
ctrl.initializePriceModelForSubscription(voSubscription);
// then
assertNotNull(model.getRoleSpecificPrices());
assertEquals(Boolean.FALSE, model.getRoleSpecificPrices().isEmpty());
}
@Test
public void shouldOwnerWarningBeShownTest_UnitAdministrator() {
// given
User owner = prepareSubOwnerWithRole("owner", true,
UserRoleType.UNIT_ADMINISTRATOR);
model.setSelectedOwner(owner);
model.setSelectedOwnerName("owner");
// when
boolean showOwnerWarning = ctrl.shouldOwnerWarningBeShown();
// then
assertTrue(showOwnerWarning);
verify(ctrl).setOwnerWarningMessage(
BaseBean.WARNING_UNIT_NOT_SELECTED_UNIT_ADMIN,
new Object[] { "owner" });
}
@Test
public void shouldOwnerWarningBeShownTest_SubscriptionManager() {
// given
User owner = prepareSubOwnerWithRole("owner", true,
UserRoleType.SUBSCRIPTION_MANAGER);
model.setSelectedOwner(owner);
model.setSelectedOwnerName("owner");
model.getSubscription().setUnitName("unit1");
model.getSubscription().setUnitKey(1L);
// when
boolean showOwnerWarning = ctrl.shouldOwnerWarningBeShown();
// then
assertTrue(showOwnerWarning);
verify(ctrl).setOwnerWarningMessage(BaseBean.WARNING_OWNER_IS_SUB_MAN,
new Object[] { "unit1", "owner" });
}
@Test
public void shouldOwnerWarningBeShownTest_OrgAdmin() {
// given
User owner = prepareSubOwnerWithRole("owner", true,
UserRoleType.ORGANIZATION_ADMIN);
model.setSelectedOwner(owner);
// when
boolean showOwnerWarning = ctrl.shouldOwnerWarningBeShown();
// then
assertFalse(showOwnerWarning);
}
@Test
public void shouldOwnerWarningBeShownTest_UnitAdminOfSameUnit() {
// given
User owner = prepareSubOwnerWithRole("owner", true,
UserRoleType.UNIT_ADMINISTRATOR);
model.setSelectedOwner(owner);
model.getSubscription().setUnitName("unit1");
model.getSubscription().setUnitKey(1L);
List<POUserGroup> usergroups = new ArrayList<>();
usergroups.add(prepareUserGroup(1L, "unit1"));
when(
userGroupService.getUserGroupsForUserWithRole(anyLong(),
anyLong())).thenReturn(usergroups);
// when
boolean showOwnerWarning = ctrl.shouldOwnerWarningBeShown();
// then
assertFalse(showOwnerWarning);
}
@Test
public void shouldOwnerWarningBeShownTest_UnitAdminOfAnotherUnit() {
// given
User owner = prepareSubOwnerWithRole("owner", true,
UserRoleType.UNIT_ADMINISTRATOR);
model.setSelectedOwner(owner);
model.setSelectedOwnerName("owner");
model.getSubscription().setUnitName("unit2");
model.getSubscription().setUnitKey(2L);
List<POUserGroup> usergroups = new ArrayList<>();
usergroups.add(prepareUserGroup(1L, "unit1"));
when(
userGroupService.getUserGroupsForUserWithRole(anyLong(),
anyLong())).thenReturn(usergroups);
// when
boolean showOwnerWarning = ctrl.shouldOwnerWarningBeShown();
// then
assertTrue(showOwnerWarning);
verify(ctrl).setOwnerWarningMessage(
BaseBean.WARNING_OWNER_NOT_A_UNIT_ADMIN,
new Object[] { "unit2", "owner" });
}
@Test
public void testInitializeSubscriptionOwners() {
// given
UserBean userBean = new UserBean();
userBean = spy(userBean);
ctrl.setUserBean(userBean);
OrganizationBean organizationBean = mock(OrganizationBean.class);
VOOrganization org = new VOOrganization();
org.setKey(1L);
when(organizationBean.getOrganization()).thenReturn(org);
userBean.setOrganizationBean(organizationBean);
operatorService = mock(OperatorService.class);
ctrl.setOperatorService(operatorService);
List<User> users = new ArrayList<>();
List<VOUserDetails> voUsers = new ArrayList<>();
VOUserDetails voUserDetails = new VOUserDetails();
voUserDetails.setUserRoles(Sets
.newHashSet(UserRoleType.SUBSCRIPTION_MANAGER));
voUsers.add(voUserDetails);
User user = new User(voUserDetails);
users.add(user);
model = spy(model);
ctrl.setModel(model);
model.setSubscriptionOwners(new ArrayList<User>());
when(
ctrl.getOperatorService().getSubscriptionOwnersForAssignment(
anyLong())).thenReturn(voUsers);
// when
String result = ctrl.initializeSubscriptionOwners();
// then
assertTrue(users.contains(user));
assertEquals(null, result);
}
@Test
public void testInitializeSubscriptionOwnersNotAllowed() {
// given
UserBean userBean = new UserBean();
userBean = spy(userBean);
ctrl.setUserBean(userBean);
OrganizationBean organizationBean = mock(OrganizationBean.class);
VOOrganization org = new VOOrganization();
org.setKey(1L);
when(organizationBean.getOrganization()).thenReturn(org);
userBean.setOrganizationBean(organizationBean);
operatorService = mock(OperatorService.class);
ctrl.setOperatorService(operatorService);
List<User> users = new ArrayList<>();
User user = mock(User.class);
users.add(user);
model.setSubscriptionOwners(users);
// when
String result = ctrl.initializeSubscriptionOwners();
// then
assertEquals("dontOpenModalDialog", result);
}
@Test
public void testSetPopupTargetAssignUsers_initializeUnassignedUsers() {
// given
model = spy(model);
VOUserDetails details = new VOUserDetails(123456L, 654321);
VOUserDetails details2 = new VOUserDetails(123457L, 654322);
List<VOUserDetails> voList = new ArrayList<>();
voList.add(details);
User user2 = new User(details2);
List<User> list = new ArrayList<>();
list.add(user2);
ctrl.setModel(model);
model.setUnassignedUsers(list);
OrganizationBean organizationBean = mock(OrganizationBean.class);
VOOrganization voOrgganisation = mock(VOOrganization.class);
operatorService = mock(OperatorService.class);
userBean.setOrganizationBean(organizationBean);
ctrl.setOperatorService(operatorService);
when(userBean.getOrganizationBean()).thenReturn(organizationBean);
when(organizationBean.getOrganization()).thenReturn(voOrgganisation);
when(voOrgganisation.getKey()).thenReturn(10000L);
when(model.getCurrentSubscriptionKey()).thenReturn(20000L);
when(operatorService.getUnassignedUsersByOrg(anyLong(), anyLong()))
.thenReturn(voList);
// when
ctrl.setPopupTargetAssignUsers();
// then
boolean check = false;
List<User> finalList = model.getUnassignedUsers();
for (User finalUser : finalList) {
if (finalUser.getVOUserDetails().equals(details)) {
check = true;
}
}
assertTrue(check);
assertFalse(model.getUnassignedUsers().contains(user2));
}
private User prepareSubOwnerWithRole(String userId, boolean isSelected,
UserRoleType userRole) {
VOUserDetails userDetails = new VOUserDetails();
userDetails.setUserId(userId);
Set<UserRoleType> userRoles = new HashSet<>();
userRoles.add(userRole);
userDetails.setUserRoles(userRoles);
User owner = new User(userDetails);
owner.setFirstName("FirstName");
owner.setLastName("LastName");
owner.setOwnerSelected(isSelected);
return owner;
}
private POUserGroup prepareUserGroup(long groupKey, String groupName) {
POUserGroup poUserGroup = new POUserGroup();
poUserGroup.setKey(1L);
poUserGroup.setGroupName("unit1");
return poUserGroup;
}
} |
package org.openoffice;
import share.LogWriter;
import stats.InternalLogWriter;
import lib.TestParameters;
import util.DynamicClassLoader;
import base.TestBase;
import helper.AppProvider;
import helper.ClParser;
import helper.CfgParser;
import com.sun.star.beans.XPropertyAccess;
import com.sun.star.beans.PropertyValue;
import com.sun.star.task.XJob;
import com.sun.star.uno.XInterface;
import com.sun.star.comp.loader.FactoryHelper;
import com.sun.star.lang.XMultiServiceFactory;
import com.sun.star.lang.XServiceInfo;
import com.sun.star.lang.XSingleServiceFactory;
import com.sun.star.lang.XTypeProvider;
import com.sun.star.uno.Type;
import com.sun.star.registry.XRegistryKey;
import com.sun.star.beans.NamedValue;
import java.util.Vector;
/**
* The main class, will call ClParser and CfgParser to <br>
* fill the TestParameters.<br>
* Will then call the appropriate Testbase to run the tests.
*/
public class RunnerService implements XJob, XServiceInfo,
XTypeProvider, XPropertyAccess {
static public final String __serviceName = "org.openoffice.Runner";
static public final String __implName = "org.openoffice.RunnerService";
static private XMultiServiceFactory xMSF = null;
/**
* ct'tor
* Construct an own office provider for tests
*/
public RunnerService(XMultiServiceFactory xMSF) {
}
public Object execute(NamedValue[] args) {
// construct valid arguments from the given stuff
int arg_length=args.length;
String[] arguments = new String[arg_length*2];
for ( int i=0; i< arg_length; i++ ) {
arguments[i*2] = args[i].Name;
Object o = args[i].Value;
arguments[i*2+1] = o.toString();
}
TestParameters param = new TestParameters();
DynamicClassLoader dcl = new DynamicClassLoader();
// take the standard log writer
String standardLogWriter = param.LogWriter;
String standardOutProducer = param.OutProducer;
ClParser cli = new ClParser();
//parse the arguments if an ini-parameter is given
String iniFile = cli.getIniPath(arguments);
//initialize cfgParser with ini-path
CfgParser ini = new CfgParser(iniFile);
//parse ConfigFile
ini.getIniParameters(param);
//parse the commandline arguments
cli.getCommandLineParameter(param,arguments);
// now compare the standard log writer with the parameters:
// if we have a new one, use the new, else use the internal
// log writer
if (((String)param.get("LogWriter")).equals(standardLogWriter))
param.put("LogWriter", "stats.InternalLogWriter");
if (((String)param.get("OutProducer")).equals(standardOutProducer))
param.put("OutProducer", "stats.InternalLogWriter");
LogWriter log = (LogWriter) dcl.getInstance(
(String)param.get("LogWriter"));
param.put("ServiceFactory", xMSF);
param.ServiceFactory = xMSF; //(XMultiServiceFactory)
// appProvider.getManager(param);
log.println("TestJob: "+param.get("TestJob"));
TestBase toExecute = (TestBase)dcl.getInstance("base.java_fat_service");
boolean worked = toExecute.executeTest(param);
if (!worked)
log.println("Test did not execute correctly.");
String returnString = "";
if (log instanceof InternalLogWriter)
returnString = ((InternalLogWriter)log).getLog();
return returnString;
}
/**
* This function provides the service name
* @return the service name
*/
public String getServiceName() {
return __serviceName;
}
/**
* Get all implemented types of this class.
* @return An array of implemented interface types.
* @see com.sun.star.lang.XTypeProvider
*/
public Type[] getTypes() {
Type[] type = new Type[5];
type[0] = new Type(XInterface.class);
type[1] = new Type(XTypeProvider.class);
type[2] = new Type(XJob.class);
type[3] = new Type(XServiceInfo.class);
type[4] = new Type(XPropertyAccess.class);
return type;
}
/**
* Get the implementation id.
* @return An empty implementation id.
* @see com.sun.star.lang.XTypeProvider
*/
public byte[] getImplementationId() {
return new byte[0];
}
/**
* Function for reading the implementation name.
*
* @return the implementation name
* @see com.sun.star.lang.XServiceInfo
*/
public String getImplementationName() {
return __implName;
}
/**
* Does the implementation support this service?
*
* @param serviceName The name of the service in question
* @return true, if service is supported, false otherwise
* @see com.sun.star.lang.XServiceInfo
*/
public boolean supportsService(String serviceName) {
if(serviceName.equals(__serviceName))
return true;
return false;
}
/**
* Function for reading all supported services
*
* @return An aaray with all supported service names
* @see com.sun.star.lang.XServiceInfo
*/
public String[] getSupportedServiceNames() {
String[] supServiceNames = {__serviceName};
return supServiceNames;
}
/**
* Return all valid testcases from the object descriptions
* @return The valid testcases as property values
*/
public PropertyValue[] getPropertyValues() {
PropertyValue[] pVal = null;
java.net.URL url = this.getClass().getResource("/objdsc");
if (url == null) {
pVal = new PropertyValue[1];
pVal[0] = new PropertyValue();
pVal[0].Name = "Error";
pVal[0].Value = "OOoRunner.jar file doesn't contain object " +
"descriptions: don't know what to test.";
return pVal;
}
Vector v = new Vector(600);
try {
// open connection to Jar
java.net.JarURLConnection con =
(java.net.JarURLConnection)url.openConnection();
// get Jar file from connection
java.util.jar.JarFile f = con.getJarFile();
// Enumerate over all entries
java.util.Enumeration aEnum = f.entries();
while (aEnum.hasMoreElements()) {
String entry = aEnum.nextElement().toString();
if (entry.endsWith(".csv")) {
String module = null;
String object = null;
int startIndex = entry.indexOf("objdsc/") + 7;
int endIndex = entry.lastIndexOf('/');
/* int endIndex = entry.indexOf('.');
module = entry.substring(startIndex, endIndex);
startIndex = 0;
endIndex = module.lastIndexOf('/'); */
module = entry.substring(startIndex, endIndex);
// special cases
if (entry.indexOf("/file/") != -1 || entry.indexOf("/xmloff/") != -1) {
endIndex = entry.indexOf(".csv");
object = entry.substring(0, endIndex);
endIndex = object.lastIndexOf('.');
startIndex = object.indexOf('.');
while (startIndex != endIndex) {
object = object.substring(startIndex+1);
startIndex = object.indexOf('.');
endIndex = object.lastIndexOf('.');
}
}
/* else if (entry.indexOf("/xmloff/") != -1) {
endIndex = entry.indexOf(".csv");
object = entry.substring(0, endIndex);
endIndex = entry.lastIndexOf('.');
while (object.indexOf('.') != endIndex) {
object = object.substring(object.indexOf('.')+1);
}
} */
else {
startIndex = 0;
endIndex = entry.indexOf(".csv");
object = entry.substring(startIndex, endIndex);
startIndex = object.lastIndexOf('.');
object = object.substring(startIndex+1);
}
v.add(module+"."+object);
}
}
}
catch(java.io.IOException e) {
e.printStackTrace();
}
int size = v.size();
String[] sTestCases = new String[size];
v.toArray(sTestCases);
java.util.Arrays.sort(sTestCases);
pVal = new PropertyValue[size];
for (int i=0; i<size; i++) {
pVal[i] = new PropertyValue();
pVal[i].Name = "TestCase"+i;
pVal[i].Value = sTestCases[i];
}
return pVal;
}
/**
*
* Gives a factory for creating the service.
* This method is called by the <code>JavaLoader</code>
* <p>
* @return returns a <code>XSingleServiceFactory</code> for creating the component
* @param implName the name of the implementation for which a service is desired
* @param multiFactory the service manager to be used if needed
* @param regKey the registryKey
* @see com.sun.star.comp.loader.JavaLoader
*/
public static XSingleServiceFactory __getServiceFactory(String implName,
XMultiServiceFactory multiFactory, XRegistryKey regKey)
{
XSingleServiceFactory xSingleServiceFactory = null;
if (implName.equals(RunnerService.class.getName()))
xSingleServiceFactory = FactoryHelper.getServiceFactory(
RunnerService.class, __serviceName, multiFactory, regKey);
xMSF = multiFactory;
return xSingleServiceFactory;
}
/**
* Writes the service information into the given registry key.
* This method is called by the <code>JavaLoader</code>
* <p>
* @return returns true if the operation succeeded
* @param regKey the registryKey
* @see com.sun.star.comp.loader.JavaLoader
*/
public static boolean __writeRegistryServiceInfo(XRegistryKey regKey) {
return FactoryHelper.writeRegistryServiceInfo(RunnerService.class.getName(),
__serviceName, regKey);
}
/**
* empty: not needed here.
*/
public void setPropertyValues(PropertyValue[] propertyValue)
throws com.sun.star.beans.UnknownPropertyException,
com.sun.star.beans.PropertyVetoException,
com.sun.star.lang.IllegalArgumentException,
com.sun.star.lang.WrappedTargetException {
// empty implementation
}
} |
package net.leolink.android.twitter4a;
import net.leolink.android.twitter4a.utils.Constants;
import net.leolink.android.twitter4a.widget.LoginDialog;
import net.leolink.android.twitter4a.widget.Spinner;
import twitter4j.Twitter;
import twitter4j.TwitterException;
import twitter4j.TwitterFactory;
import twitter4j.User;
import twitter4j.auth.AccessToken;
import twitter4j.auth.RequestToken;
import twitter4j.conf.ConfigurationBuilder;
import android.app.Activity;
import android.net.Uri;
import android.os.AsyncTask;
import android.util.Log;
import android.view.Window;
import android.webkit.CookieManager;
public abstract class Twitter4A {
public final static String TAG = "twitter4a";
// twitter4j's objects
private Twitter mTwitter;
private RequestToken mTwitterRequestToken;
private AccessToken mTwitterAccessToken;
private User mTwitterUser;
// twitter4a's objects
private String mConsumerKey;
private String mConsumerSecret;
private Activity mContext;
private boolean isLoggedIn = false;
private boolean isLoggingIn = false;
// Constructor
public Twitter4A(Activity context, String consumerKey,
String consumerSecret) {
mContext = context;
mConsumerKey = consumerKey;
mConsumerSecret = consumerSecret;
}
public void login() {
if (!isLoggingIn) {
// run an AsyncTask to get authentication URL, then open login dialog
new AsyncTask<Void, String, Void>() {
@Override
protected Void doInBackground(Void... voids) {
ConfigurationBuilder builder = new ConfigurationBuilder();
builder.setOAuthConsumerKey(mConsumerKey);
builder.setOAuthConsumerSecret(mConsumerSecret);
twitter4j.conf.Configuration configuration = builder.build();
TwitterFactory factory = new TwitterFactory(configuration);
mTwitter = factory.getInstance();
try {
mTwitterRequestToken = mTwitter.getOAuthRequestToken(Constants.TWITTER_CALLBACK_PREFIX);
publishProgress(mTwitterRequestToken.getAuthenticationURL());
} catch (TwitterException e) {
e.printStackTrace();
}
return null;
}
@Override
protected void onProgressUpdate(String... values) {
// open LoginDialog to let user login to Twitter
if (!mContext.isFinishing())
new LoginDialog(mContext, Twitter4A.this, values[0]).show();
}
}.execute();
// Prevent calling login() function consecutively which leads to
// multiple LoginDialogs are opened at the same time
isLoggingIn = true;
}
}
// get data after login successfully
public void handleSuccessfulLogin(String uri) {
final Uri mUri = Uri.parse(uri);
final String verifier = mUri.getQueryParameter(Constants.URL_TWITTER_OAUTH_VERIFIER);
// Because this task need to using network which cannot be run on UI
// thread since Android 3.0 (maybe equivalent to API 11), so I need to
// use AsyncTask here!
new AsyncTask<Void, Void, Void>() {
private Spinner spinner;
protected void onPreExecute() {
spinner = new Spinner(mContext);
spinner.requestWindowFeature(Window.FEATURE_NO_TITLE);
spinner.setCancelable(true);
spinner.show();
}
@Override
protected Void doInBackground(Void... params) {
try {
mTwitterAccessToken = mTwitter.getOAuthAccessToken(mTwitterRequestToken, verifier);
mTwitterUser = mTwitter.showUser(mTwitterAccessToken.getUserId());
} catch (TwitterException e) {
// call loginFailedCallback
loginFailedCallback();
e.printStackTrace();
}
return null;
}
protected void onPostExecute(Void result) {
if (spinner.isShowing()) {
// dismiss the spinner
spinner.dismiss();
// if everything is okay, set isLoggedIn = true
isLoggedIn = true;
// call the callback
loginCallback();
} else { // if spinner is explicitly cancelled by user
// remove everything
mTwitter = null;
mTwitterAccessToken = null;
mTwitterRequestToken = null;
mTwitterUser = null;
// call loginFailedCallback
loginFailedCallback();
}
}
}.execute();
}
// logging out
public void logout() {
// remove all Twitter4J objects
mTwitter = null;
mTwitterAccessToken = null;
mTwitterRequestToken = null;
mTwitterUser = null;
isLoggedIn = false;
// inform programmers :)
Log.d(TAG, "Logged out successfully!");
// call the logout call back
this.logoutCallback();
}
// this method is called after login successfully
protected abstract void loginCallback();
// this method is called after logout successfully
protected abstract void logoutCallback();
// this method is called when login progress couldn't succeed for some reasons
public void loginFailedCallback() {
Log.e(TAG, "Login failed!");
}
public void setLoggingIn(boolean isLoggingIn) {
this.isLoggingIn = isLoggingIn;
}
// Get Twitter4J's objects -> For people who want to more than just a login
/**
* @return Twitter object of Twitter4J library
*/
public Twitter getTwitter4J() {
return mTwitter;
}
/**
* @return AccessToken object of Twitter4J library
*/
public AccessToken getTwitter4JAccessToken() {
return mTwitterAccessToken;
}
/**
* @return User object of Twitter4J library
*/
public User getTwitter4JUser() {
return mTwitterUser;
}
/**
* @return null if not logged in yet, otherwise return token of the current session
*/
public String getToken() {
if (mTwitterAccessToken != null)
return mTwitterAccessToken.getToken();
else
return null;
}
/**
* @return null if not logged in yet, otherwise return secret token of the current session
*/
public String getTokenSecret() {
if (mTwitterAccessToken != null)
return mTwitterAccessToken.getTokenSecret();
else
return null;
}
// Get basic user data -> For people who just want a login to get some basic data
/**
* @return return current login state
*/
public boolean isLoggedIn() {
return isLoggedIn;
}
/**
* @return userID of the current logged in user
*/
public long getUserID() {
if (mTwitterUser != null)
return mTwitterUser.getId();
else
return 0;
}
/**
* @return user name of the current logged in user
*/
public String getUsername() {
if (mTwitterUser != null)
return mTwitterUser.getName();
else
return null;
}
/**
* @return original profile picture URL of the user of the current logged in user
*/
public String getProfilePicURL() {
if (mTwitterUser != null)
return mTwitterUser.getOriginalProfileImageURL();
else
return null;
}
} |
package org.splevo.ui.jobs;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IWorkspace;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.emf.common.util.URI;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.JavaCore;
import org.splevo.extraction.ExtractionService;
import org.splevo.project.SPLevoProject;
import de.uka.ipd.sdq.workflow.AbstractJob;
import de.uka.ipd.sdq.workflow.exceptions.JobFailedException;
import de.uka.ipd.sdq.workflow.exceptions.RollbackFailedException;
import de.uka.ipd.sdq.workflow.exceptions.UserCanceledException;
/**
* Job to extract a software model from an eclipse java project.
*/
public class ExtractionJob extends AbstractJob {
/** The splevo project to store the required data to. */
private SPLevoProject splevoProject;
/** Flag whether the leading or the integration project should be extracted. */
private boolean processLeading;
/**
* Constructor to create an extraction job with the required references.
*
* @param splevoProject
* The splevo project to get and store required information
* @param processLeading
* True/false wether this job is responsible for the leading implementation.
*/
public ExtractionJob(SPLevoProject splevoProject, boolean processLeading) {
this.splevoProject = splevoProject;
this.processLeading = processLeading;
}
/**
* Runs the long running operation.
*
* @param monitor
* the progress monitor
* @return the status of the job. This should be OK or CANCLED
*/
public IStatus run(IProgressMonitor monitor) {
monitor.beginTask("Software Model Extraction", 100);
List<String> projectNames = null;
String variantName = null;
if (processLeading) {
projectNames = splevoProject.getLeadingProjects();
variantName = splevoProject.getVariantNameLeading();
} else {
projectNames = splevoProject.getIntegrationProjects();
variantName = splevoProject.getVariantNameIntegration();
}
IJavaProject mainProject = null;
try {
mainProject = getJavaProject(projectNames.get(0));
} catch (CoreException ce) {
return Status.CANCEL_STATUS;
}
List<IJavaProject> additionalProjects = new ArrayList<IJavaProject>();
List<String> additionalProjectNames = new ArrayList<String>();
for (int i = 1; i < projectNames.size(); i++) {
try {
additionalProjects.add(getJavaProject(projectNames.get(i)));
additionalProjectNames.add(projectNames.get(i));
} catch (CoreException e) {
return Status.CANCEL_STATUS;
}
}
// prepare the target path
URI targetURI = buildTargetURI(variantName);
logger.info("Extraction target: " + targetURI);
logger.info("Main Project: " + mainProject.getElementName());
logger.info("Additional Projects: " + additionalProjectNames);
// check if the process was canceled
if (monitor.isCanceled()) {
return Status.CANCEL_STATUS;
}
// extract model
ExtractionService extractionService = new ExtractionService();
try {
monitor.subTask("Extract Model for project: " + variantName);
extractionService.extractProject(mainProject, additionalProjects, monitor, targetURI);
} catch (Exception e) {
e.printStackTrace();
return Status.CANCEL_STATUS;
}
monitor.subTask("Update SPLevo project information");
if (processLeading) {
splevoProject.setSourceModelPathLeading(targetURI.path());
} else {
splevoProject.setSourceModelPathIntegration(targetURI.path());
}
// finish run
monitor.done();
if (monitor.isCanceled()) {
return Status.CANCEL_STATUS;
}
return Status.OK_STATUS;
}
/**
* Get the JavaProject for a specific project name.
*
* @param projectName
* The name to search for.
* @return The Identified project if a java one is found. Null otherwise.
* @throws CoreException
* Identifies that the project's nature could not be checked.
*/
private IJavaProject getJavaProject(String projectName) throws CoreException {
IJavaProject javaProject = null;
IWorkspace workspace = ResourcesPlugin.getWorkspace();
IProject project = workspace.getRoot().getProject(projectName);
if (project.hasNature(JavaCore.NATURE_ID)) {
javaProject = JavaCore.create(project);
}
return javaProject;
}
/**
* Build the target uri for the model extraction.
*
* @param variantName
* The name of the variant to extract.
* @return The prepared URI.
*/
private URI buildTargetURI(String variantName) {
String basePath = getBasePath(splevoProject);
String targetPath = basePath + variantName + "/" + variantName + "_java2kdm.xmi";
URI targetURI = URI.createURI(targetPath);
return targetURI;
}
/**
* Build the base path for the target models.
*
* @param splevoProject
* The SPLevo project to interact with.
* @return The base path to store the extracted models at.
*/
private String getBasePath(SPLevoProject splevoProject) {
return splevoProject.getWorkspace() + "models/sourcemodels/";
}
@Override
public void execute(IProgressMonitor monitor) throws JobFailedException, UserCanceledException {
run(monitor);
}
@Override
public void rollback(IProgressMonitor monitor) throws RollbackFailedException {
// no rollback possible
}
/**
* Get the name of the extraction job. This depends on whether the leading or the integration
* job should be extracted.
*
* @return The name of the job.
*/
@Override
public String getName() {
if (processLeading) {
return "Model Extraction Job " + splevoProject.getVariantNameLeading();
} else {
return "Model Extraction Job " + splevoProject.getVariantNameIntegration();
}
}
} |
package edu.wustl.catissuecore.query;
import java.io.Serializable;
import javax.swing.tree.DefaultMutableTreeNode;
import edu.wustl.catissuecore.util.global.Constants;
import edu.wustl.catissuecore.vo.TreeNode;
/**
* TreeNodeData represents the node in the query result view tree.
* @author poornima_govindrao
*/
public class TreeNodeData implements TreeNode, Serializable
{
private String identifier;
private String objectName;
private String parentObjectIdentifier;
private String parentObjectName;
private String combinedParentIdentifier;
private String combinedParentObjectName;
/**
* Initializes an empty node.
*/
public TreeNodeData()
{
identifier = null;
objectName = null;
parentObjectIdentifier = null;
parentObjectName = null;
combinedParentIdentifier = null;
combinedParentObjectName = null;
}
/**
* Sets the systemIdentifier of the data this node represents.
* @param identifier the systemIdentifier.
* @see #getId()
*/
public void setIdentifier(String identifier)
{
this.identifier = identifier;
}
/**
* Returns the systemIdentifier of the data this node represents.
* @return the systemIdentifier of the data this node represents.
* @see #setId(long)
*/
public Object getIdentifier()
{
return identifier;
}
/**
* @return Returns the objectName.
*/
public String getObjectName() {
return objectName;
}
/**
* @param objectName The objectName to set.
*/
public void setObjectName(String objectName) {
this.objectName = objectName;
}
/**
* @return Returns the parentObjectName.
*/
public String getParentObjectName() {
return parentObjectName;
}
/**
* @param parentObjectName The parentObjectName to set.
*/
public void setParentObjectName(String parentObjectName) {
this.parentObjectName = parentObjectName;
}
/**
* @param parentIdentifier The parentIdentifier to set.
*/
public void setParentIdentifier(String parentIdentifier) {
this.parentObjectIdentifier = parentIdentifier;
}
public void initialiseRoot() {
this.setObjectName(Constants.ROOT);
}
public TreeNode getParentTreeNode()
{
TreeNodeData node = new TreeNodeData();
node.setIdentifier(this.combinedParentIdentifier);
node.setObjectName(this.getCombinedParentObjectName());
return node;
}
public boolean isChildOf(TreeNode treeNode)
{
TreeNodeData node = (TreeNodeData)treeNode;
if ((this.getCombinedParentIdentifier() != null) && (this.getParentIdentifier() != null))
{
return ((this.parentObjectIdentifier.equals(node.getIdentifier()) && this.parentObjectName.equals(node.getObjectName()))
&& (this.combinedParentIdentifier.equals(node.getCombinedParentIdentifier()) && this.combinedParentObjectName.equals(node.getCombinedParentObjectName())));
}
return (this.parentObjectIdentifier.equals(node.getIdentifier()) && this.parentObjectName.equals(node.getObjectName()));
}
public boolean hasEqualParents(TreeNode treeNode)
{
TreeNodeData node = (TreeNodeData) treeNode;
return this.getIdentifier().equals(node.getCombinedParentIdentifier());
}
public Object getParentIdentifier()
{
return this.parentObjectIdentifier;
}
/**
* @return Returns the combinedParentIdentifier.
*/
public String getCombinedParentIdentifier() {
return combinedParentIdentifier;
}
/**
* @param combinedParentIdentifier The combinedParentIdentifier to set.
*/
public void setCombinedParentIdentifier(String combinedParentIdentifier) {
this.combinedParentIdentifier = combinedParentIdentifier;
}
/**
* @return Returns the combinedParentObjectName.
*/
public String getCombinedParentObjectName() {
return combinedParentObjectName;
}
/**
* @param combinedParentObjectName The combinedParentObjectName to set.
*/
public void setCombinedParentObjectName(String combinedParentObjectName) {
this.combinedParentObjectName = combinedParentObjectName;
}
/* (non-Javadoc)
* @see edu.wustl.catissuecore.vo.TreeNode#isPresentIn(javax.swing.tree.DefaultMutableTreeNode)
*/
public boolean isPresentIn(DefaultMutableTreeNode parentNode)
{
for (int i = 0; i < parentNode.getChildCount(); i++)
{
DefaultMutableTreeNode childNode = (DefaultMutableTreeNode) parentNode.getChildAt(i);
TreeNodeData node = (TreeNodeData)childNode.getUserObject();
if (this.identifier.equals(node.getIdentifier()) )
{
return true;
}
}
return false;
}
public String toString()
{
String nodeName = this.objectName;
if (this.identifier != null)
nodeName = nodeName + ":" + this.identifier;
return nodeName;
}
} |
package io.spine.validate;
import com.google.protobuf.Message;
import com.google.protobuf.ProtocolMessageEnum;
import com.google.protobuf.Value;
import io.spine.annotation.Internal;
import io.spine.base.Error;
import io.spine.type.MessageClass;
import java.util.List;
import java.util.Map;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.lang.String.format;
import static java.lang.System.lineSeparator;
/**
* Utility class for working with {@link ConstraintViolation}s.
*
* @author Alexander Yevsyukov
*/
public class ConstraintViolations {
private ConstraintViolations() {
// Prevent instantiation of this utility class.
}
/**
* Returns a formatted string using the format string and parameters from the violation.
*
* @param violation violation which contains the format string and
* arguments referenced by the format specifiers in it
* @return a formatted string
* @see String#format(String, Object...)
*/
public static String toText(ConstraintViolation violation) {
checkNotNull(violation);
String format = violation.getMsgFormat();
List<String> params = violation.getParamList();
String parentViolationFormatted = format(format, params.toArray());
StringBuilder resultBuilder = new StringBuilder(parentViolationFormatted);
if (violation.getViolationCount() > 0) {
resultBuilder.append(toText(violation.getViolationList()));
}
return resultBuilder.toString();
}
/**
* Returns a formatted string using the format string and parameters from each of
* the violations passed.
*
* @param violations violations which contain the format string and
* arguments referenced by the format specifiers in each of them
* @return a formatted string
* @see #toText(ConstraintViolation)
*/
public static String toText(Iterable<ConstraintViolation> violations) {
checkNotNull(violations);
StringBuilder resultBuilder = new StringBuilder("Violation list:");
for (ConstraintViolation childViolation : violations) {
String childViolationFormatted = toText(childViolation);
resultBuilder.append(lineSeparator())
.append(childViolationFormatted);
}
return resultBuilder.toString();
}
/**
* Returns a formatted string using the specified format string and parameters
* from the violation.
*
* @param format a format string
* @param violation violation which contains arguments referenced by the format
* specifiers in the format string
* @return a formatted string
* @see String#format(String, Object...)
*/
public static String toText(String format, ConstraintViolation violation) {
checkNotNull(format);
checkNotNull(violation);
List<String> params = violation.getParamList();
String parentViolationFormatted = format(format, params.toArray());
StringBuilder resultBuilder = new StringBuilder(parentViolationFormatted);
if (violation.getViolationCount() > 0) {
resultBuilder.append(toText(format, violation.getViolationList()));
}
return resultBuilder.toString();
}
/**
* Returns a formatted string using the specified format string and parameters from
* each of the violations passed.
*
* @param format a format string
* @param violations violations which contain the arguments referenced by the format
* specifiers in the format string
* @return a formatted string
* @see String#format(String, Object...)
*/
public static String toText(String format, Iterable<ConstraintViolation> violations) {
checkNotNull(format);
checkNotNull(violations);
StringBuilder resultBuilder = new StringBuilder("Violations:");
for (ConstraintViolation childViolation : violations) {
String childViolationFormatted = toText(format, childViolation);
resultBuilder.append(lineSeparator())
.append(childViolationFormatted);
}
return resultBuilder.toString();
}
/**
* A helper class for building exceptions used to report invalid {@code Message}s,
* which have fields that violate validation constraint(s).
*
* @param <E> type of {@code Exception} to build
* @param <M> type of the {@code Message}
* @param <C> type of the {@linkplain MessageClass} of {@code |M|}.
* @param <R> type of an error code to use for error reporting; must be a Protobuf enum value
*/
@Internal
public abstract static class ExceptionFactory<E extends Exception,
M extends Message,
C extends MessageClass<?>,
R extends ProtocolMessageEnum> {
private final Iterable<ConstraintViolation> constraintViolations;
private final M message;
/**
* Creates an {@code ExceptionFactory} instance for a given message and
* constraint violations.
*
* @param message an invalid event message
* @param constraintViolations constraint violations for the event message
*/
protected ExceptionFactory(M message,
Iterable<ConstraintViolation> constraintViolations) {
this.constraintViolations = constraintViolations;
this.message = message;
}
/**
* Obtains a {@code MessageClass} for an invalid {@code Message}.
*/
protected abstract C getMessageClass();
/**
* Obtains an error code to use for error reporting.
*/
protected abstract R getErrorCode();
/**
* Obtains an error text to use for error reporting.
*
* <p>This text will also be used as a base for an exception message to generate.
*/
protected abstract String getErrorText();
/**
* Obtains the {@code Message}-specific type attributes for error reporting.
*/
protected abstract Map<String, Value> getMessageTypeAttribute(Message message);
/**
* Defines the way to create an instance of exception, basing on the source {@code Message},
* exception text and a generated {@code Error}.
*/
protected abstract E createException(String exceptionMsg, M message, Error error);
private String formatExceptionMessage() {
return format("%s. Message class: %s. " +
"See Error.getValidationError() for details.",
getErrorText(), getMessageClass());
}
private Error createError() {
ValidationError validationError =
ValidationError.newBuilder()
.addAllConstraintViolation(constraintViolations)
.build();
R errorCode = getErrorCode();
String typeName = errorCode.getDescriptorForType()
.getFullName();
String errorTextTemplate = getErrorText();
String errorText = format("%s %s",
errorTextTemplate,
toText(constraintViolations));
Error.Builder error = Error.newBuilder()
.setType(typeName)
.setCode(errorCode.getNumber())
.setValidationError(validationError)
.setMessage(errorText)
.putAllAttributes(getMessageTypeAttribute(message));
return error.build();
}
/**
* Creates an exception instance for an invalid message, which has fields that
* violate validation constraint(s).
*/
public E newException() {
return createException(formatExceptionMessage(), message, createError());
}
}
} |
package de.aima13.whoami.support;
import org.sqlite.JDBC;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.sql.*;
import java.util.Map;
import java.util.TreeMap;
/**
* DataSourceManager der die Verbindungen zu den SQLite Datenbank handelt.
*
* @author Marvin Klose
* @version 2.0
*/
public class DataSourceManager {
// Datenbanken interessieren geht der Zugriff durch die Zuordnung schneller
private static Map<Path, Connection> openConnections = new TreeMap<Path, Connection>();
private Connection dbConnection = null;
public DataSourceManager(Path sqliteDatabase) throws ClassNotFoundException, SQLException {
Class.forName("org.sqlite.JDBC");
System.out.println("Found " + JDBC.class.getName() + "!");
dbConnection = getAlreadyOpenConnection(sqliteDatabase);
if (dbConnection == null) {
if (sqliteDatabase.toString().contains("Chrome")) {
dbConnection = getConnectionFromShadowCopy(sqliteDatabase);
} else {
dbConnection = DriverManager.getConnection
("jdbc:sqlite:" + sqliteDatabase.toString());
}
}
}
/**
* Sind alle Module fertig alle Resourcen wieder freigeben.
*/
public static void closeRemainingOpenConnections() {
for (Map.Entry<Path, Connection> entry : openConnections.entrySet()) {
try {
if (!entry.getValue().isClosed()) {
entry.getValue().close();
openConnections.remove(entry.getKey());
}
} catch (SQLException e) {
e.printStackTrace();
}
}
}
private Connection getConnectionFromShadowCopy(Path source) {
File chromeCopy = null;
try {
chromeCopy = File.createTempFile("chrome", ".sqlite", null);
chromeCopy.deleteOnExit();
} catch (IOException e) {
e.printStackTrace();
}
try {
Files.copy(source, chromeCopy.toPath(), StandardCopyOption.REPLACE_EXISTING);
} catch (IOException e) {
e.printStackTrace();
}
Connection fakedConnection = null;
try {
Class.forName("org.sqlite.JDBC");
fakedConnection = DriverManager.getConnection
("jdbc:sqlite:" + chromeCopy.toString());
openConnections.put(source, fakedConnection);
} catch (SQLException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
return fakedConnection;
}
/**
* Zur Abfrage, ob Verbindung noch offen ist bzw von null verschieden.
*
* @return Boolean, der angibt ob die Verbindung zustande gekommen ist, bzw. noch offen ist.
*/
public boolean isConnected() {
try {
return !dbConnection.isClosed();
} catch (SQLException e) {
return false;
}
}
public synchronized ResultSet querySqlStatement(String statement) throws SQLException {
Statement s = dbConnection.createStatement();
ResultSet rs = s.executeQuery(statement);
return rs;
}
private Connection getAlreadyOpenConnection(Path lookUpPath) {
if (openConnections.containsKey(lookUpPath)) {
return openConnections.get(lookUpPath);
}
return null;
}
} |
package io.spine.validate;
import com.google.protobuf.Any;
import com.google.protobuf.Message;
import io.spine.option.DigitsOption;
import io.spine.option.MaxOption;
import io.spine.option.MinOption;
import io.spine.option.OptionsProto;
import java.util.regex.Pattern;
import static io.spine.protobuf.TypeConverter.toAny;
/**
* Validates fields of number types (protobuf: int32, double, etc).
*
* @param <V>
* the type of the field value
*/
abstract class NumberFieldValidator<V extends Number & Comparable<V>> extends FieldValidator<V> {
private static final Pattern PATTERN_DOT = Pattern.compile("\\.");
private final MinOption min;
private final MaxOption max;
private final DigitsOption digitsOption;
/**
* Creates a new validator instance.
*
* @param fieldValue
* the value to validate
*/
NumberFieldValidator(FieldValue fieldValue) {
super(fieldValue, false, false);
this.min = fieldValue.valueOf(OptionsProto.min);
this.max = fieldValue.valueOf(OptionsProto.max);
this.digitsOption = fieldValue.valueOf(OptionsProto.digits);
}
/** Converts a string representation to a number. */
protected abstract V toNumber(String value);
/** Returns an absolute value of the number. */
protected abstract V getAbs(V number);
/**
* Wraps a value to a corresponding message wrapper
* ({@link com.google.protobuf.DoubleValue DoubleValue},
* {@link com.google.protobuf.Int32Value Int32Value}, etc) and {@link Any}.
*/
Any wrap(V value) {
Any result = toAny(value);
return result;
}
@Override
protected void validateOwnRules() {
for (V value : getValues()) {
validateRangeOptions(value);
validateDigitsOption(value);
}
}
/**
* Returns {@code false}.
*
* <p>There's no way to define whether a Protobuf numeric field is {@code 0} or not set.
*/
@Override
protected boolean isNotSet(V value) {
return false;
}
private void validateRangeOptions(V value) {
if (notFitToMin(value)) {
addViolation(minOrMax(value,
min,
min.getMsgFormat(),
min.getExclusive(),
min.getValue()));
}
if (notFitToMax(value)) {
addViolation(minOrMax(value,
max,
max.getMsgFormat(),
max.getExclusive(),
max.getValue()));
}
}
private boolean notFitToMin(V value) {
String minAsString = min.getValue();
if (minAsString.isEmpty()) {
return false;
}
int comparison = compareToValueOf(value, minAsString);
return min.getExclusive()
? comparison <= 0
: comparison < 0;
}
private boolean notFitToMax(V value) {
String maxAsString = max.getValue();
if (maxAsString.isEmpty()) {
return false;
}
int comparison = compareToValueOf(value, maxAsString);
return max.getExclusive()
? comparison >= 0
: comparison > 0;
}
private int compareToValueOf(V value, String number) {
V bound = toNumber(number);
int comparison = value.compareTo(bound);
return comparison;
}
private void validateDigitsOption(V value) {
int intDigitsMax = digitsOption.getIntegerMax();
int fractionDigitsMax = digitsOption.getFractionMax();
if (intDigitsMax < 1 || fractionDigitsMax < 1) {
return;
}
V abs = getAbs(value);
String[] parts = PATTERN_DOT.split(String.valueOf(abs));
int intDigitsCount = parts[0].length();
int fractionDigitsCount = parts[1].length();
boolean isInvalid = (intDigitsCount > intDigitsMax) ||
(fractionDigitsCount > fractionDigitsMax);
if (isInvalid) {
addViolation(digits(value));
}
}
private ConstraintViolation minOrMax(V value,
Message option,
String customMsg,
boolean exclusive,
String constraint) {
String msg = getErrorMsgFormat(option, customMsg);
ConstraintViolation violation = ConstraintViolation
.newBuilder()
.setMsgFormat(msg)
.addParam(exclusive ? "" : "or equal to")
.addParam(constraint)
.setFieldPath(getFieldPath())
.setFieldValue(wrap(value))
.build();
return violation;
}
private ConstraintViolation digits(V value) {
String msg = getErrorMsgFormat(digitsOption, digitsOption.getMsgFormat());
String intMax = String.valueOf(digitsOption.getIntegerMax());
String fractionMax = String.valueOf(digitsOption.getFractionMax());
ConstraintViolation violation = ConstraintViolation
.newBuilder()
.setMsgFormat(msg)
.addParam(intMax)
.addParam(fractionMax)
.setFieldPath(getFieldPath())
.setFieldValue(wrap(value))
.build();
return violation;
}
} |
package uk.ac.ebi.atlas.utils;
import uk.ac.ebi.atlas.resource.DataFileHub;
import uk.ac.ebi.atlas.trader.ConfigurationTrader;
import uk.ac.ebi.atlas.trader.ExperimentTrader;
import com.google.common.collect.Ordering;
import com.google.common.collect.TreeMultimap;
import uk.ac.ebi.atlas.model.ExperimentType;
import javax.inject.Inject;
import java.util.Collections;
import java.util.Set;
public class ExperimentSorter {
private final DataFileHub dataFileHub;
private final ExperimentTrader experimentTrader;
private final ConfigurationTrader configurationTrader;
@Inject
public ExperimentSorter(DataFileHub dataFileHub, ExperimentTrader experimentTrader, ConfigurationTrader configurationTrader) {
this.dataFileHub = dataFileHub;
this.experimentTrader = experimentTrader;
this.configurationTrader = configurationTrader;
}
public TreeMultimap<Long, String> reverseSortAllExperimentsPerSize() {
return reverseSortExperimentsPerSize(
ExperimentType.MICROARRAY_1COLOUR_MRNA_DIFFERENTIAL,
ExperimentType.MICROARRAY_1COLOUR_MICRORNA_DIFFERENTIAL,
ExperimentType.MICROARRAY_2COLOUR_MRNA_DIFFERENTIAL,
ExperimentType.RNASEQ_MRNA_DIFFERENTIAL,
ExperimentType.RNASEQ_MRNA_BASELINE,
ExperimentType.PROTEOMICS_BASELINE);
}
public TreeMultimap<Long, String> reverseSortExperimentsPerSize(ExperimentType... experimentTypes) {
TreeMultimap<Long, String> fileSizeToExperimentsMap = TreeMultimap.create(Collections.reverseOrder(), Ordering.natural());
for (ExperimentType experimentType : experimentTypes) {
for (String experimentAccession : experimentTrader.getPublicExperimentAccessions(experimentType)) {
fileSizeToExperimentsMap.put(estimateSizeOfExperiment(experimentAccession, experimentType), experimentAccession);
}
}
return fileSizeToExperimentsMap;
}
private long estimateSizeOfExperiment(String experimentAccession, ExperimentType experimentType) {
switch (experimentType) {
case RNASEQ_MRNA_BASELINE:
case PROTEOMICS_BASELINE:
return estimateSizeOfBaselineExperiment(experimentAccession);
case RNASEQ_MRNA_DIFFERENTIAL:
return estimateSizeOfDifferentialExperiment(experimentAccession);
case MICROARRAY_1COLOUR_MRNA_DIFFERENTIAL:
case MICROARRAY_1COLOUR_MICRORNA_DIFFERENTIAL:
case MICROARRAY_2COLOUR_MRNA_DIFFERENTIAL:
return estimateSizeOfMicroarrayExperiment(experimentAccession);
default:
return 0;
}
}
private long estimateSizeOfMicroarrayExperiment(String experimentAccession) {
Set<String> arrayDesigns =
configurationTrader.getMicroarrayExperimentConfiguration(experimentAccession).getArrayDesignAccessions();
long n = 0;
for (String arrayDesign : arrayDesigns) {
n += dataFileHub.getMicroarrayExperimentFiles(experimentAccession, arrayDesign).analytics.size();
}
return n;
}
private long estimateSizeOfDifferentialExperiment(String experimentAccession) {
return dataFileHub.getDifferentialExperimentFiles(experimentAccession).analytics.size();
}
private long estimateSizeOfBaselineExperiment(String experimentAccession) {
return dataFileHub.getBaselineExperimentFiles(experimentAccession).main.size();
}
} |
package org.frameworkset.json;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import org.frameworkset.util.annotations.DateFormateMeta;
import java.io.File;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Writer;
import java.lang.reflect.Type;
public class Jackson2ObjectMapper implements JacksonObjectMapper {
protected ObjectMapper mapper = null;
private String dateFormat;
private String locale;
private String timeZone;
private boolean disableTimestamp = false;
boolean failedOnUnknownProperties = false;
@Override
public String getDateFormat() {
return dateFormat;
}
@Override
public void setDateFormat(String dateFormat) {
this.dateFormat = dateFormat;
}
@Override
public String getLocale() {
return locale;
}
@Override
public void setLocale(String locale) {
this.locale = locale;
}
@Override
public String getTimeZone() {
return timeZone;
}
@Override
public void setTimeZone(String timeZone) {
this.timeZone = timeZone;
}
@Override
public boolean isDisableTimestamp() {
return disableTimestamp;
}
@Override
public void setDisableTimestamp(boolean disableTimestamp) {
this.disableTimestamp = disableTimestamp;
}
public boolean isFailedOnUnknownProperties() {
return failedOnUnknownProperties;
}
@Override
public void setFailedOnUnknownProperties(boolean failedOnUnknownProperties) {
this.failedOnUnknownProperties = failedOnUnknownProperties;
}
public Jackson2ObjectMapper(){
mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES,false);
}
/* (non-Javadoc)
* @see org.frameworkset.json.JacksonObjectMapper#json2Object(java.lang.String, java.lang.Class)
*/
@Override
public <T> T json2Object(String jsonString,Class<T> toclass) {
// TODO Auto-generated method stub
return json2Object(jsonString,toclass,true);
}
public JavaType getJavaType(Class containerType,Class ... beanClass){
JavaType javaType = mapper.getTypeFactory().constructParametricType(containerType, beanClass);
return javaType;
}
public JavaType getJavaMapType(Class containerType, Class keyClass,Class valueClass){
JavaType javaType = mapper.getTypeFactory().constructMapType(containerType, keyClass,valueClass);
return javaType;
}
public ObjectMapper getObjectMapper(){
return mapper;
}
/* (non-Javadoc)
* @see org.frameworkset.json.JacksonObjectMapper#json2Object(java.lang.String, java.lang.Class, boolean)
*/
@Override
public <T> T json2Object(String jsonString,Class<T> toclass,boolean ALLOW_SINGLE_QUOTES) {
// TODO Auto-generated method stub
// String jsonString = "[{'from_date':'2001-09-21','to_date':'2011-04-02','company':'','department':'xxx','position':'' },{'from_date':'0002-12-01','to_date':'2011-04-02', 'company':'','department':'xxx','position':'' }]";
// mapper.configure(Feature.ALLOW_SINGLE_QUOTES, ALLOW_SINGLE_QUOTES);
try {
T value = mapper.readValue(jsonString, toclass);
return value;
} catch (Exception e) {
throw new IllegalArgumentException(jsonString,e);
}
}
/* (non-Javadoc)
* @see org.frameworkset.json.JacksonObjectMapper#json2Object(java.lang.String, java.lang.Class, boolean)
*/
@Override
public <T> T json2Object(InputStream jsonString,Class<T> toclass,boolean ALLOW_SINGLE_QUOTES) {
// TODO Auto-generated method stub
// String jsonString = "[{'from_date':'2001-09-21','to_date':'2011-04-02','company':'','department':'xxx','position':'' },{'from_date':'0002-12-01','to_date':'2011-04-02', 'company':'','department':'xxx','position':'' }]";
// mapper.configure(Feature.ALLOW_SINGLE_QUOTES, ALLOW_SINGLE_QUOTES);
try {
T value = mapper.readValue(jsonString, toclass);
return value;
} catch (Exception e) {
throw new IllegalArgumentException("",e);
}
}
// public <T> T json2Object(String jsonString,TypeReference<T> ref) {
// return json2Object(jsonString,ref,true);
// public <T> T json2Object(String jsonString,TypeReference<T> ref,boolean ALLOW_SINGLE_QUOTES) {
// // TODO Auto-generated method stub
//// String jsonString = "[{'from_date':'2001-09-21','to_date':'2011-04-02','company':'','department':'xxx','position':'' },{'from_date':'0002-12-01','to_date':'2011-04-02', 'company':'','department':'xxx','position':'' }]";
// ObjectMapper mapper = new ObjectMapper();
// mapper.configure(Feature.ALLOW_SINGLE_QUOTES, ALLOW_SINGLE_QUOTES);
// try {
// T value = mapper.readValue(jsonString, ref);
// return value;
// } catch (Exception e) {
@Override
public <T> T json2ObjectWithType(InputStream jsonString, final JsonTypeReference<T> ref, boolean ALLOW_SINGLE_QUOTES) {
// TODO Auto-generated method stub
TypeReference<T> ref_ = new TypeReference<T>(){
@Override
public Type getType() {
// TODO Auto-generated method stub
return ref.getType();
}
};
try {
T value = mapper.readValue(jsonString, ref_);
return value;
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
public <T> T json2ObjectWithType(String jsonString,final JsonTypeReference<T> ref,boolean ALLOW_SINGLE_QUOTES) {
// TODO Auto-generated method stub
// String jsonString = "[{'from_date':'2001-09-21','to_date':'2011-04-02','company':'','department':'xxx','position':'' },{'from_date':'0002-12-01','to_date':'2011-04-02', 'company':'','department':'xxx','position':'' }]";
TypeReference<T> ref_ = new TypeReference<T>(){
@Override
public Type getType() {
// TODO Auto-generated method stub
return ref.getType();
}
};
try {
T value = mapper.readValue(jsonString, ref_);
return value;
} catch (Exception e) {
throw new IllegalArgumentException(jsonString,e);
}
}
/* (non-Javadoc)
* @see org.frameworkset.json.JacksonObjectMapper#object2json(java.lang.Object)
*/
@Override
public String object2json(Object object) {
return object2json(object,true) ;
}
/* (non-Javadoc)
* @see org.frameworkset.json.JacksonObjectMapper#object2json(java.lang.Object, boolean)
*/
@Override
public String object2json(Object object,boolean ALLOW_SINGLE_QUOTES) {
// ObjectMapper mapper = new ObjectMapper();
// mapper.configure(Feature.ALLOW_SINGLE_QUOTES, ALLOW_SINGLE_QUOTES);
try {
String value = mapper.writeValueAsString(object);
return value;
} catch (Exception e) {
throw new IllegalArgumentException("json",e);
}
}
/* (non-Javadoc)
* @see org.frameworkset.json.JacksonObjectMapper#object2json(java.lang.Object, java.io.File)
*/
@Override
public void object2json(Object object,File writer) {
object2json(object,writer,true) ;
}
/* (non-Javadoc)
* @see org.frameworkset.json.JacksonObjectMapper#object2json(java.lang.Object, java.io.File, boolean)
*/
@Override
public void object2json(Object object,File writer,boolean ALLOW_SINGLE_QUOTES) {
// ObjectMapper mapper = new ObjectMapper();
// mapper.configure(Feature.ALLOW_SINGLE_QUOTES, ALLOW_SINGLE_QUOTES);
try {
mapper.writeValue(writer,object);
} catch (Exception e) {
throw new IllegalArgumentException("json",e);
}
}
/* (non-Javadoc)
* @see org.frameworkset.json.JacksonObjectMapper#object2json(java.lang.Object, java.io.OutputStream)
*/
@Override
public void object2json(Object object,OutputStream writer) {
object2json(object,writer,true) ;
}
/* (non-Javadoc)
* @see org.frameworkset.json.JacksonObjectMapper#object2json(java.lang.Object, java.io.OutputStream, boolean)
*/
@Override
public void object2json(Object object,OutputStream writer,boolean ALLOW_SINGLE_QUOTES) {
// ObjectMapper mapper = new ObjectMapper();
// mapper.configure(Feature.ALLOW_SINGLE_QUOTES, ALLOW_SINGLE_QUOTES);
try {
mapper.writeValue(writer,object);
} catch (Exception e) {
throw new IllegalArgumentException("json",e);
}
}
/* (non-Javadoc)
* @see org.frameworkset.json.JacksonObjectMapper#object2json(java.lang.Object, java.io.Writer)
*/
@Override
public void object2json(Object object,Writer writer) {
object2json(object,writer,true) ;
}
/* (non-Javadoc)
* @see org.frameworkset.json.JacksonObjectMapper#object2json(java.lang.Object, java.io.Writer, boolean)
*/
@Override
public void object2json(Object object,Writer writer,boolean ALLOW_SINGLE_QUOTES) {
// ObjectMapper mapper = new ObjectMapper();
// mapper.configure(Feature.ALLOW_SINGLE_QUOTES, ALLOW_SINGLE_QUOTES);
try {
mapper.writeValue(writer,object);
} catch (Exception e) {
throw new IllegalArgumentException("json",e);
}
}
/* (non-Javadoc)
* @see org.frameworkset.json.JacksonObjectMapper#object2jsonAsbyte(java.lang.Object)
*/
@Override
public byte[] object2jsonAsbyte(Object object) {
return object2jsonAsbyte(object,true) ;
}
/* (non-Javadoc)
* @see org.frameworkset.json.JacksonObjectMapper#object2jsonAsbyte(java.lang.Object, boolean)
*/
@Override
public byte[] object2jsonAsbyte(Object object,boolean ALLOW_SINGLE_QUOTES) {
// ObjectMapper mapper = new ObjectMapper();
// mapper.configure(Feature.ALLOW_SINGLE_QUOTES, ALLOW_SINGLE_QUOTES);
try {
return mapper.writeValueAsBytes(object);
} catch (Exception e) {
throw new IllegalArgumentException("json",e);
}
}
@Override
public void init() {
if(dateFormat != null && !dateFormat.equals("")) {
DateFormateMeta dateFormateMeta = DateFormateMeta.buildDateFormateMeta(this.dateFormat, this.locale, this.timeZone);
this.mapper.setDateFormat(dateFormateMeta.toDateFormat());
}
if(this.disableTimestamp){
mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
}
this.mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES,failedOnUnknownProperties);
}
} |
package com.opengamma.financial.analytics.conversion;
import java.util.List;
import javax.time.calendar.ZonedDateTime;
import org.apache.commons.lang.Validate;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.analytics.financial.instrument.InstrumentDefinition;
import com.opengamma.analytics.financial.instrument.bond.BondFixedSecurityDefinition;
import com.opengamma.analytics.financial.instrument.future.BondFutureDefinition;
import com.opengamma.core.security.SecuritySource;
import com.opengamma.financial.security.bond.BondSecurity;
import com.opengamma.financial.security.future.BondFutureDeliverable;
import com.opengamma.financial.security.future.BondFutureSecurity;
/**
* FIXME CASE - BondFutureDefinition needs a reference price. Without a trade, where will it come from?
*/
public class BondFutureSecurityConverter extends AbstractFutureSecurityVisitor<InstrumentDefinition<?>> {
private final SecuritySource _securitySource;
private final BondSecurityConverter _bondConverter;
public BondFutureSecurityConverter(final SecuritySource securitySource, final BondSecurityConverter bondConverter) {
Validate.notNull(securitySource, "security source");
Validate.notNull(bondConverter, "bond converter");
_securitySource = securitySource;
_bondConverter = bondConverter;
}
@Override
public InstrumentDefinition<?> visitBondFutureSecurity(final BondFutureSecurity bondFuture) {
Validate.notNull(bondFuture);
final ZonedDateTime tradingLastDate = bondFuture.getExpiry().getExpiry();
final ZonedDateTime noticeFirstDate = bondFuture.getFirstDeliveryDate();
final ZonedDateTime noticeLastDate = bondFuture.getLastDeliveryDate();
final double notional = bondFuture.getUnitAmount();
final List<BondFutureDeliverable> basket = bondFuture.getBasket();
final int n = basket.size();
final BondFixedSecurityDefinition[] deliverables = new BondFixedSecurityDefinition[n];
final double[] conversionFactor = new double[n];
for (int i = 0; i < n; i++) {
final BondFutureDeliverable deliverable = basket.get(i);
final BondSecurity bondSecurity = (BondSecurity) _securitySource.getSecurity(deliverable.getIdentifiers());
if (bondSecurity == null) {
throw new OpenGammaRuntimeException("No security found with identifiers " + deliverable.getIdentifiers());
}
deliverables[i] = (BondFixedSecurityDefinition) bondSecurity.accept(_bondConverter); //TODO check type
conversionFactor[i] = deliverable.getConversionFactor();
}
return new BondFutureDefinition(tradingLastDate, noticeFirstDate, noticeLastDate, notional, deliverables, conversionFactor);
}
} |
package com.opengamma.integration.copier.portfolio.rowparser;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.joda.beans.BeanBuilder;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.PropertyReadWrite;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.financial.conversion.JodaBeanConverters;
import com.opengamma.financial.security.equity.EquitySecurity;
import com.opengamma.financial.security.future.EquityFutureSecurity;
import com.opengamma.financial.security.future.InterestRateFutureSecurity;
import com.opengamma.financial.security.option.EquityBarrierOptionSecurity;
import com.opengamma.financial.security.option.EquityOptionSecurity;
import com.opengamma.financial.security.option.IRFutureOptionSecurity;
import com.opengamma.financial.security.option.SwaptionSecurity;
import com.opengamma.financial.security.swap.FixedInterestRateLeg;
import com.opengamma.financial.security.swap.FixedVarianceSwapLeg;
import com.opengamma.financial.security.swap.FloatingGearingIRLeg;
import com.opengamma.financial.security.swap.FloatingInterestRateLeg;
import com.opengamma.financial.security.swap.FloatingSpreadIRLeg;
import com.opengamma.financial.security.swap.FloatingVarianceSwapLeg;
import com.opengamma.financial.security.swap.InterestRateLeg;
import com.opengamma.financial.security.swap.Notional;
import com.opengamma.financial.security.swap.SwapLeg;
import com.opengamma.financial.security.swap.SwapSecurity;
import com.opengamma.financial.security.swap.VarianceSwapLeg;
import com.opengamma.master.position.ManageablePosition;
import com.opengamma.master.position.ManageableTrade;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.ManageableSecurityLink;
import com.opengamma.util.ArgumentChecker;
/**
* A generic row parser for Joda beans that automatically identifies fields to be persisted to rows/populated from rows
*/
public class JodaBeanRowParser extends RowParser {
private static final Logger s_logger = LoggerFactory.getLogger(JodaBeanRowParser.class);
/**
* Types of swap leg that might be encountered, and for which additional fields are generated
*/
private static final Class<?>[] SWAP_LEG_CLASSES = {
SwapLeg.class,
InterestRateLeg.class,
FixedInterestRateLeg.class,
FloatingInterestRateLeg.class,
FloatingGearingIRLeg.class,
FloatingSpreadIRLeg.class,
VarianceSwapLeg.class,
FixedVarianceSwapLeg.class,
FloatingVarianceSwapLeg.class
};
/**
* The packages where security classes are to be found
*/
private static final String[] CLASS_PACKAGES = {
"com.opengamma.financial.security.bond",
"com.opengamma.financial.security.capfloor",
"com.opengamma.financial.security.cash",
"com.opengamma.financial.security.deposit",
"com.opengamma.financial.security.equity",
"com.opengamma.financial.security.forward",
"com.opengamma.financial.security.fra",
"com.opengamma.financial.security.future",
"com.opengamma.financial.security.fx",
"com.opengamma.financial.security.option",
"com.opengamma.financial.security.swap",
};
/**
* Security properties to ignore when scanning
*/
private static final String[] IGNORE_METAPROPERTIES = {
"attributes",
"uniqueid",
"objectid",
"securitylink",
"trades",
"attributes",
"gicscode",
"parentpositionid",
"providerid",
"deal"
};
/**
* Column prefixes
*/
private static final String POSITION_PREFIX = "position";
private static final String TRADE_PREFIX = "trade";
private static final String UNDERLYING_PREFIX = "underlying";
/**
* Every security class name ends with this
*/
private static final String CLASS_POSTFIX = "Security";
/**
* The security class that this parser is adapted to
*/
private Class<DirectBean> _securityClass;
/**
* The underlying security class(es) for the security class above
*/
private List<Class<?>> _underlyingSecurityClasses;
/**
* Map from column name to the field's Java type
*/
private SortedMap<String, Class<?>> _columns = new TreeMap<String, Class<?>>();
static {
// Register the automatic string converters with Joda Beans
JodaBeanConverters.getInstance();
// Force registration of various meta beans that might not have been loaded yet
ManageablePosition.meta();
ManageableTrade.meta();
Notional.meta();
SwapLeg.meta();
InterestRateLeg.meta();
FixedInterestRateLeg.meta();
FloatingInterestRateLeg.meta();
FloatingGearingIRLeg.meta();
FloatingSpreadIRLeg.meta();
VarianceSwapLeg.meta();
FixedVarianceSwapLeg.meta();
FloatingVarianceSwapLeg.meta();
EquitySecurity.meta();
SwapSecurity.meta();
}
protected JodaBeanRowParser(String securityName) throws OpenGammaRuntimeException {
ArgumentChecker.notEmpty(securityName, "securityName");
// Find the corresponding security class
_securityClass = getClass(securityName + CLASS_POSTFIX);
// Find the underlying(s)
_underlyingSecurityClasses = getUnderlyingSecurityClasses(_securityClass);
// Set column map
_columns = recursiveGetColumnMap(_securityClass, "");
for (Class<?> securityClass : _underlyingSecurityClasses) {
_columns.putAll(recursiveGetColumnMap(securityClass, UNDERLYING_PREFIX + securityClass.getSimpleName() + ":"));
}
_columns.putAll(recursiveGetColumnMap(ManageablePosition.class, POSITION_PREFIX + ":"));
_columns.putAll(recursiveGetColumnMap(ManageableTrade.class, TRADE_PREFIX + ":"));
}
private List<Class<?>> getUnderlyingSecurityClasses(Class<DirectBean> securityClass) {
List<Class<?>> result = new ArrayList<Class<?>>();
// Futures
if (EquityFutureSecurity.class.isAssignableFrom(securityClass)) {
result.add(EquitySecurity.class);
// Options
} else if (EquityBarrierOptionSecurity.class.isAssignableFrom(securityClass)) {
result.add(EquitySecurity.class);
} else if (EquityOptionSecurity.class.isAssignableFrom(securityClass)) {
result.add(EquitySecurity.class);
} else if (IRFutureOptionSecurity.class.isAssignableFrom(securityClass)) {
result.add(InterestRateFutureSecurity.class);
} else if (SwaptionSecurity.class.isAssignableFrom(securityClass)) {
result.add(SwapSecurity.class);
}
return result;
}
/**
* Creates a new row parser for the specified security type and tool context
* @param securityName the type of the security for which a row parser is to be created
* @return the RowParser class for the specified security type, or null if unable to identify a suitable parser
*/
public static JodaBeanRowParser newJodaBeanRowParser(String securityName) {
// Now using the JodaBean parser
ArgumentChecker.notEmpty(securityName, "securityName");
try {
return new JodaBeanRowParser(securityName);
} catch (Throwable e) {
throw new OpenGammaRuntimeException("Could not create a row parser for security type " + securityName, e);
}
}
// Import routines: construct security(ies), position, trade
@Override
public ManageableSecurity[] constructSecurity(Map<String, String> row) {
ArgumentChecker.notNull(row, "row");
ManageableSecurity security = (ManageableSecurity) recursiveConstructBean(row, _securityClass, "");
if (security != null) {
ArrayList<ManageableSecurity> securities = new ArrayList<ManageableSecurity>();
securities.add(security);
for (Class<?> underlyingClass : _underlyingSecurityClasses) {
ManageableSecurity underlying = (ManageableSecurity) recursiveConstructBean(row, underlyingClass, UNDERLYING_PREFIX + underlyingClass.getSimpleName().toLowerCase() + ":");
if (underlying != null) {
securities.add(underlying);
} else {
s_logger.warn("Could not populate underlying security of type " + underlyingClass);
}
}
return securities.toArray(new ManageableSecurity[securities.size()]);
} else {
return null;
}
}
@Override
public ManageablePosition constructPosition(Map<String, String> row, ManageableSecurity security) {
ArgumentChecker.notNull(row, "row");
ArgumentChecker.notNull(security, "security");
ManageablePosition result = (ManageablePosition) recursiveConstructBean(row, ManageablePosition.class, "position:");
if (result != null) {
result.setSecurityLink(new ManageableSecurityLink(security.getExternalIdBundle()));
}
return result;
}
@Override
public ManageableTrade constructTrade(Map<String, String> row, ManageableSecurity security, ManageablePosition position) {
ArgumentChecker.notNull(row, "row");
ArgumentChecker.notNull(security, "security");
ArgumentChecker.notNull(position, "position");
ManageableTrade result = (ManageableTrade) recursiveConstructBean(row, ManageableTrade.class, "trade:");
if (result != null) {
if (result.getTradeDate() == null) {
return null;
}
result.setSecurityLink(new ManageableSecurityLink(security.getExternalIdBundle()));
}
return result;
}
// Export routines: construct row from security, position, trade
@Override
public Map<String, String> constructRow(ManageableSecurity[] securities) {
ArgumentChecker.notNull(securities, "securities");
Map<String, String> result = recursiveConstructRow(securities[0], "");
for (int i = 1; i < securities.length; i++) {
result.putAll(recursiveConstructRow(securities[i], UNDERLYING_PREFIX + securities[i].getClass().getSimpleName() + ":"));
}
return result;
}
@Override
public Map<String, String> constructRow(ManageablePosition position) {
ArgumentChecker.notNull(position, "position");
return recursiveConstructRow(position, "position:");
}
@Override
public Map<String, String> constructRow(ManageableTrade trade) {
ArgumentChecker.notNull(trade, "trade");
return recursiveConstructRow(trade, "trade:");
}
// Utility routines
@Override
public String[] getColumns() {
return _columns.keySet().toArray(new String[_columns.size()]);
}
@Override
public int getSecurityHashCode() {
HashCodeBuilder hashCodeBuilder = new HashCodeBuilder();
for (Entry<String, Class<?>> entry : _columns.entrySet()) {
hashCodeBuilder.append(entry.getKey());
hashCodeBuilder.append(entry.getValue().getCanonicalName());
}
return hashCodeBuilder.toHashCode();
}
/**
* Extract a map of column (field) names and types from the properties of the specified direct bean class.
* Appropriate member classes (such as swap legs) are recursively traversed and their columns also extracted
* and added to the map.
* @param clazz The bean type from which to extract properties
* @param prefix The class membership path traced from the top-level bean class to the current class
* @return A map of the column names and their types
*/
@SuppressWarnings("unchecked")
private SortedMap<String, Class<?>> recursiveGetColumnMap(Class<?> clazz, String prefix) {
// Scan through and capture the list of relevant properties and their types
SortedMap<String, Class<?>> columns = new TreeMap<String, Class<?>>();
for (MetaProperty<?> metaProperty : JodaBeanUtils.metaBean(clazz).metaPropertyIterable()) {
// Skip any undesired properties, process the rest
if (!ignoreMetaProperty(metaProperty)) {
// Add a column for the property (used either for the actual value
// or for the class name in the case of a non-convertible bean
columns.put(prefix + metaProperty.name(), metaProperty.propertyType());
// If this is a bean without a converter recursively extract all
// columns for the metabean and all its subclasses
if (isBean(metaProperty.propertyType()) && !isConvertible(metaProperty.propertyType())) {
// This is the bean (might be an abstract class/subclassed)
Class<DirectBean> beanClass = (Class<DirectBean>) metaProperty.propertyType().asSubclass(DirectBean.class);
// Recursively extract this bean's properties
columns.putAll(recursiveGetColumnMap(beanClass, prefix + metaProperty.name() + ":"));
// Identify ALL subclasses of this bean and extract all their properties
for (Class<?> subClass : getSubClasses(beanClass)) {
columns.putAll(recursiveGetColumnMap((Class<DirectBean>) subClass, prefix + metaProperty.name() + ":"));
}
}
}
}
return columns;
}
/**
* Build a bean of the specified type by extracting property values from the supplied map of field names to
* values, using recursion to construct the member beans in the same manner.
* @param row The map from property (or column, or field) names to values
* @param clazz The bean type of which to construct an instance
* @param prefix The class membership path traced from the top-level bean class to the current class
* @return The constructed security bean
*/
private DirectBean recursiveConstructBean(Map<String, String> row, Class<?> clazz, String prefix) {
try {
// Get a reference to the meta-bean
Method metaMethod = clazz.getMethod("meta", (Class<?>[]) null);
DirectMetaBean metaBean = (DirectMetaBean) metaMethod.invoke(null, (Object[]) null);
// Get a new builder from the meta-bean
@SuppressWarnings("unchecked")
BeanBuilder<? extends DirectBean> builder = (BeanBuilder<? extends DirectBean>) metaBean.builder();
// Populate the bean from the supplied row using the builder
for (MetaProperty<?> metaProperty : JodaBeanUtils.metaBean(clazz).metaPropertyIterable()) {
// Skip any undesired properties, process the rest
if (!ignoreMetaProperty(metaProperty)) {
// If this property is itself a bean without a converter, recurse to populate relevant fields
if (isBean(metaProperty.propertyType()) && !isConvertible(metaProperty.propertyType())) {
// Get the actual type of this bean from the relevant column
String className = row.get((prefix + metaProperty.name()).trim().toLowerCase());
Class<DirectBean> beanClass = getClass(className);
// Recursively set properties
builder.set(metaProperty.name(),
recursiveConstructBean(row, beanClass, prefix + metaProperty.name() + ":"));
// If not a bean, or it is a bean for which a converter exists, just set value in builder using joda convert
} else {
// Convert raw value in row to the target property's type
String rawValue = row.get((prefix + metaProperty.name()).trim().toLowerCase());
if (isConvertible(metaProperty.propertyType())) {
// Set property value
if (rawValue != null && !rawValue.equals("")) {
builder.set(metaProperty.name(),
JodaBeanUtils.stringConverter().convertFromString(metaProperty.propertyType(), rawValue));
} else {
s_logger.info("Skipping empty or null value for " + prefix + metaProperty.name());
}
} else if (List.class.isAssignableFrom(metaProperty.propertyType()) &&
isConvertible(JodaBeanUtils.collectionType(metaProperty, metaProperty.propertyType()))) {
builder.set(metaProperty.name(), stringToList(rawValue, JodaBeanUtils.collectionType(metaProperty, metaProperty.propertyType())));
} else {
throw new OpenGammaRuntimeException("Property '" + prefix + metaProperty.name() + "' (" + metaProperty.propertyType() + ") cannot be populated from a string");
}
}
}
}
// Actually build the bean
return builder.build();
} catch (Throwable ex) {
s_logger.error("Could not create a " + clazz.getSimpleName() + ": " + ex.getMessage());
return null;
}
}
/**
* Extracts a map of column names to values from a supplied security bean's properties, using recursion to
* extract properties from any member beans.
* @param bean The bean instance from which to extract property values
* @param prefix The class membership path traced from the top-level bean class to the current class
* @return A map of extracted column names and values
*/
private Map<String, String> recursiveConstructRow(DirectBean bean, String prefix) {
Map<String, String> result = new HashMap<String, String>();
// Populate the row from the bean's properties
for (MetaProperty<?> metaProperty : bean.metaBean().metaPropertyIterable()) {
// Skip any undesired properties, process the rest
if (!ignoreMetaProperty(metaProperty)) {
// If this property is itself a bean without a converter, recurse to populate relevant columns
if (isBean(metaProperty.propertyType()) && !isConvertible(metaProperty.propertyType())) {
// Store the class name in a separate column (to help identify the correct subclass during loading)
result.put(prefix + metaProperty.name(), metaProperty.get(bean).getClass().getSimpleName());
// Recursively extract bean's columns
result.putAll(recursiveConstructRow((DirectBean) metaProperty.get(bean), prefix + metaProperty.name() + ":"));
// If not a bean, or it is a bean for which a converter exists, just extract its value using joda convert
} else {
// Set the column
if (_columns.containsKey(prefix + metaProperty.name())) {
// Can convert
if (isConvertible(metaProperty.propertyType())) {
result.put(prefix + metaProperty.name(), metaProperty.getString(bean));
// Is list, needs to be decomposed
} else if (List.class.isAssignableFrom(metaProperty.propertyType()) &&
isConvertible(JodaBeanUtils.collectionType(metaProperty, metaProperty.propertyType()))) {
result.put(prefix + metaProperty.name(), listToString((List<?>) metaProperty.get(bean)));
// Cannot convert :(
} else {
throw new OpenGammaRuntimeException("Property '" + prefix + metaProperty.name() + "' (" + metaProperty.propertyType() + ") cannot be converted to a string");
}
} else {
s_logger.info("No matching column found for property " + prefix + metaProperty.name());
}
}
}
}
return result;
}
/**
* Converts a list of objects to a |-separated string of their JodaConverted string representations
* @param i the list to be converted
* @return the |-separated string string
*/
private String listToString(List<?> i) {
String result = "";
for (Object o : i) {
if (isConvertible(o.getClass())) {
result = result + JodaBeanUtils.stringConverter().convertToString(o) + " | ";
} else {
throw new OpenGammaRuntimeException("Cannot convert " + o.getClass() + " contained in list");
}
}
return result.substring(0, result.lastIndexOf('|')).trim();
}
/**
* Converts a |-separated string to a list of objects using JodaConvert.
* @param raw the string to parse
* @param t the class to convert to
* @return the list of objects of type t
*/
private List<?> stringToList(String raw, Class<?> t) {
List<Object> result = new ArrayList<Object>();
for (String s : raw.split("\\|")) {
result.add(JodaBeanUtils.stringConverter().convertFromString(t, s.trim()));
}
return result;
}
/**
* Given a class name, look for the class in the list of packages specified by CLASS_PACKAGES and return it
* or throw exception if not found
* @param className the class name to seek
* @return the corresponding class
*/
@SuppressWarnings("unchecked")
private Class<DirectBean> getClass(String className) {
Class<DirectBean> theClass = null;
for (String prefix : CLASS_PACKAGES) {
try {
String fullName = prefix + "." + className;
theClass = (Class<DirectBean>) Class.forName(fullName);
break;
} catch (Throwable ex) { }
}
if (theClass == null) {
throw new OpenGammaRuntimeException("Could not load class " + className);
}
return theClass;
}
/**
* Given a bean class, find its subclasses; this is current hard coded as Java can neither identify the
* classes within a package, nor identify a class's subclasses. Currently identifies swap legs.
* @param beanClass
* @return
*/
private Collection<Class<?>> getSubClasses(Class<?> beanClass) {
Collection<Class<?>> subClasses = new ArrayList<Class<?>>();
// This has to be hard-coded since Java can neither identify the classes within a package, nor identify a class's subclasses
if (SwapLeg.class.isAssignableFrom(beanClass)) {
for (Class<?> c : SWAP_LEG_CLASSES) {
subClasses.add(c);
}
}
return (Collection<Class<?>>) subClasses;
}
/**
* Checks whether the supplied class has a registered Joda string converter
* @param clazz the class to check
* @return the answer
*/
private boolean isConvertible(Class<?> clazz) {
try {
JodaBeanUtils.stringConverter().findConverter(clazz);
return true;
} catch (Throwable ex) {
return false;
}
}
/**
* Determines whether the supplied class is a direct bean
* @param clazz the class in question
* @return the answer
*/
private boolean isBean(Class<?> clazz) {
return DirectBean.class.isAssignableFrom(clazz) ? true : false;
}
/**
* Checks whether the specified metaproperty is to be ignored when extracting fields
* @param mp the metaproperty in question
* @return the answer
*/
private boolean ignoreMetaProperty(MetaProperty<?> mp) {
if (mp.readWrite() != PropertyReadWrite.READ_WRITE) {
return true;
}
String s = mp.name().trim().toLowerCase();
for (String t : IGNORE_METAPROPERTIES) {
if (s.equals(t.trim().toLowerCase())) {
return true;
}
}
return false;
}
} |
package gov.nih.nci.cabig.caaers.domain.expeditedfields;
public enum ExpeditedReportSection {
BASICS_SECTION("Adverse Events", true),
ADVERSE_EVENT_SECTION("Adverse Events"),
REPORTER_INFO_SECTION("Reporter"),
RADIATION_INTERVENTION_SECTION("Radiation"),
AGENTS_INTERVENTION_SECTION("Agents", true),
SURGERY_INTERVENTION_SECTION("Surgery"),
MEDICAL_DEVICE_SECTION("Device", true),
DESCRIPTION_SECTION("Describe Event", true),
STUDY_INTERVENTIONS("Study Interventions", true),
MEDICAL_INFO_SECTION("Subject Details", true),
TREATMENT_INFO_SECTION("Course", true),
LABS_SECTION("Labs", true),
PRIOR_THERAPIES_SECTION("Prior Therapies", true),
PRE_EXISTING_CONDITION_SECTION("Pre-existing Conditions", true),
CONCOMITANT_MEDICATION_SECTION("Conmeds"),
OTHER_CAUSE_SECTION("Other Causes"),
ATTRIBUTION_SECTION("Attribution", true),
ADDITIONAL_INFO_SECTION("Additional Info"),
SUBMIT_REPORT_SECTION("Submit", true),
OUTCOME_SECTION("Outcome");
private String displayName;
private boolean associatedToBusinessRules;
private ExpeditedReportSection(String displayName) {
this(displayName, false);
}
private ExpeditedReportSection(String displayName, boolean associatedToBusinessRules) {
this.displayName = displayName;
this.associatedToBusinessRules = associatedToBusinessRules;
}
public String getDisplayName() {
return displayName;
}
public boolean isAssociatedToBusinessRules() {
return associatedToBusinessRules;
}
public static ExpeditedReportSection getByDisplayName(String displayName) {
for (ExpeditedReportSection section : values()) {
if (section.displayName.equals(displayName)) return section;
}
return null;
}
} |
package com.quickblox.sample.groupchatwebrtc.activities;
import android.app.Fragment;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.SystemClock;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.quickblox.chat.QBChatService;
import com.quickblox.chat.QBSignaling;
import com.quickblox.chat.QBWebRTCSignaling;
import com.quickblox.chat.listeners.QBVideoChatSignalingManagerListener;
import com.quickblox.sample.core.utils.Toaster;
import com.quickblox.sample.groupchatwebrtc.R;
import com.quickblox.sample.groupchatwebrtc.db.QbUsersDbManager;
import com.quickblox.sample.groupchatwebrtc.fragments.BaseConversationFragment;
import com.quickblox.sample.groupchatwebrtc.fragments.AudioConversationFragment;
import com.quickblox.sample.groupchatwebrtc.fragments.VideoConversationFragment;
import com.quickblox.sample.groupchatwebrtc.fragments.ConversationFragmentCallbackListener;
import com.quickblox.sample.groupchatwebrtc.fragments.IncomeCallFragment;
import com.quickblox.sample.groupchatwebrtc.fragments.OnCallEventsController;
import com.quickblox.sample.groupchatwebrtc.fragments.IncomeCallFragmentCallbackListener;
import com.quickblox.sample.groupchatwebrtc.util.NetworkConnectionChecker;
import com.quickblox.sample.groupchatwebrtc.utils.Consts;
import com.quickblox.sample.groupchatwebrtc.utils.FragmentExecuotr;
import com.quickblox.sample.groupchatwebrtc.utils.QBEntityCallbackImpl;
import com.quickblox.sample.groupchatwebrtc.utils.RingtonePlayer;
import com.quickblox.sample.groupchatwebrtc.utils.SettingsUtil;
import com.quickblox.sample.groupchatwebrtc.utils.UsersUtils;
import com.quickblox.sample.groupchatwebrtc.utils.WebRtcSessionManager;
import com.quickblox.users.model.QBUser;
import com.quickblox.videochat.webrtc.AppRTCAudioManager;
import com.quickblox.videochat.webrtc.QBRTCClient;
import com.quickblox.videochat.webrtc.QBRTCConfig;
import com.quickblox.videochat.webrtc.QBRTCSession;
import com.quickblox.videochat.webrtc.QBRTCTypes;
import com.quickblox.videochat.webrtc.QBSignalingSpec;
import com.quickblox.videochat.webrtc.callbacks.QBRTCClientSessionCallbacks;
import com.quickblox.videochat.webrtc.callbacks.QBRTCSessionConnectionCallbacks;
import com.quickblox.videochat.webrtc.callbacks.QBRTCSignalingCallback;
import com.quickblox.videochat.webrtc.exception.QBRTCException;
import com.quickblox.videochat.webrtc.exception.QBRTCSignalException;
import org.jivesoftware.smack.AbstractConnectionListener;
import org.webrtc.VideoCapturerAndroid;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* QuickBlox team
*/
public class CallActivity extends BaseActivity implements QBRTCClientSessionCallbacks, QBRTCSessionConnectionCallbacks, QBRTCSignalingCallback,
OnCallEventsController, IncomeCallFragmentCallbackListener, ConversationFragmentCallbackListener, NetworkConnectionChecker.OnConnectivityChangedListener {
private static final String TAG = CallActivity.class.getSimpleName();
public static final String OPPONENTS_CALL_FRAGMENT = "opponents_call_fragment";
public static final String INCOME_CALL_FRAGMENT = "income_call_fragment";
public static final String CONVERSATION_CALL_FRAGMENT = "conversation_call_fragment";
public static final String CALLER_NAME = "caller_name";
public static final String SESSION_ID = "sessionID";
public static final String START_CONVERSATION_REASON = "start_conversation_reason";
private QBRTCSession currentSession;
public List<QBUser> opponentsList;
private Runnable showIncomingCallWindowTask;
private Handler showIncomingCallWindowTaskHandler;
private boolean closeByWifiStateAllow = true;
private String hangUpReason;
private boolean isInCommingCall;
private QBRTCClient rtcClient;
private QBRTCSessionUserCallback sessionUserCallback;
private OnChangeDynamicToggle onChangeDynamicCallback;
private ConnectionListener connectionListener;
private boolean wifiEnabled = true;
private SharedPreferences sharedPref;
private RingtonePlayer ringtonePlayer;
private LinearLayout connectionView;
private AppRTCAudioManager audioManager;
private NetworkConnectionChecker networkConnectionChecker;
private WebRtcSessionManager sessionManager;
private QbUsersDbManager dbManager;
private ArrayList<CurrentCallStateCallback> currentCallStateCallbackList = new ArrayList<>();
private List<Integer> opponentsIdsList;
private boolean callStarted;
private boolean isVideoCall;
private long expirationReconnectionTime;
private int reconnectHangUpTimeMillis;
private boolean headsetPlugged;
public static void start(Context context,
boolean isIncomingCall) {
Intent intent = new Intent(context, CallActivity.class);
intent.putExtra(Consts.EXTRA_IS_INCOMING_CALL, isIncomingCall);
context.startActivity(intent);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
parseIntentExtras();
if (!initFieldsSuccess()) {
// we have already currentSession == null, so it's no reason to do further initialization
finish();
Log.d(TAG, "finish CallActivity");
return;
}
initCurrentSession(currentSession);
PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
sharedPref = PreferenceManager.getDefaultSharedPreferences(this);
initQBRTCClient();
initAudioManager();
initWiFiManagerListener();
ringtonePlayer = new RingtonePlayer(this, R.raw.beep);
connectionView = (LinearLayout) View.inflate(this, R.layout.connection_popup, null);
startSuitableFragment(isInCommingCall);
}
private void startSuitableFragment(boolean isInComingCall) {
if (isInComingCall) {
initIncomingCallTask();
startLoadAbsentUsers();
addIncomeCallFragment();
} else {
addConversationFragment(isInComingCall);
}
}
private void startLoadAbsentUsers() {
ArrayList<QBUser> usersFromDb = dbManager.getAllUsers();
ArrayList<Integer> allParticipantsOfCall = new ArrayList<>();
allParticipantsOfCall.addAll(opponentsIdsList);
if (isInCommingCall) {
allParticipantsOfCall.add(currentSession.getCallerID());
}
ArrayList<Integer> idsUsersNeedLoad = UsersUtils.getIdsNotLoadedUsers(usersFromDb, allParticipantsOfCall);
if (!idsUsersNeedLoad.isEmpty()) {
requestExecutor.loadUsersByIds(idsUsersNeedLoad, new QBEntityCallbackImpl<ArrayList<QBUser>>() {
@Override
public void onSuccess(ArrayList<QBUser> result, Bundle params) {
dbManager.saveAllUsers(result, false);
needUpdateOpponentsList(result);
}
});
}
}
private void needUpdateOpponentsList(ArrayList<QBUser> newUsers) {
notifyCallStateListenersNeedUpdateOpponentsList(newUsers);
}
private boolean initFieldsSuccess() {
sessionManager = WebRtcSessionManager.getInstance(this);
dbManager = QbUsersDbManager.getInstance(getApplicationContext());
currentSession = sessionManager.getCurrentSession();
if (currentSession == null) {
return false;
}
opponentsIdsList = currentSession.getOpponents();
return true;
}
@Override
protected View getSnackbarAnchorView() {
return null;
}
private void parseIntentExtras() {
isInCommingCall = getIntent().getExtras().getBoolean(Consts.EXTRA_IS_INCOMING_CALL);
}
private void initAudioManager() {
audioManager = AppRTCAudioManager.create(this, new AppRTCAudioManager.OnAudioManagerStateListener() {
@Override
public void onAudioChangedState(AppRTCAudioManager.AudioDevice audioDevice) {
if (callStarted) {
Toaster.shortToast("Audio device switched to " + audioDevice);
}
}
});
isVideoCall = QBRTCTypes.QBConferenceType.QB_CONFERENCE_TYPE_VIDEO.equals(currentSession.getConferenceType());
if (isVideoCall) {
audioManager.setDefaultAudioDevice(AppRTCAudioManager.AudioDevice.SPEAKER_PHONE);
Log.d(TAG, "AppRTCAudioManager.AudioDevice.SPEAKER_PHONE");
} else {
audioManager.setDefaultAudioDevice(AppRTCAudioManager.AudioDevice.EARPIECE);
Log.d(TAG, "AppRTCAudioManager.AudioDevice.EARPIECE");
}
audioManager.setOnWiredHeadsetStateListener(new AppRTCAudioManager.OnWiredHeadsetStateListener() {
@Override
public void onWiredHeadsetStateChanged(boolean plugged, boolean hasMicrophone) {
headsetPlugged = plugged;
if (callStarted) {
Toaster.shortToast("Headset " + (plugged ? "plugged" : "unplugged"));
}
if (onChangeDynamicCallback != null) {
onChangeDynamicCallback.enableDynamicToggle(plugged);
}
}
});
audioManager.init();
}
private void initQBRTCClient() {
rtcClient = QBRTCClient.getInstance(this);
// Add signalling manager
QBChatService.getInstance().getVideoChatWebRTCSignalingManager().addSignalingManagerListener(new QBVideoChatSignalingManagerListener() {
@Override
public void signalingCreated(QBSignaling qbSignaling, boolean createdLocally) {
if (!createdLocally) {
rtcClient.addSignaling((QBWebRTCSignaling) qbSignaling);
}
}
});
rtcClient.setCameraErrorHendler(new VideoCapturerAndroid.CameraErrorHandler() {
@Override
public void onCameraError(final String s) {
CallActivity.this.runOnUiThread(new Runnable() {
@Override
public void run() {
Toaster.longToast(s);
}
});
}
});
// Configure
QBRTCConfig.setMaxOpponentsCount(Consts.MAX_OPPONENTS_COUNT);
SettingsUtil.setSettingsStrategy(opponentsIdsList, sharedPref, CallActivity.this);
SettingsUtil.configRTCTimers(CallActivity.this);
QBRTCConfig.setDebugEnabled(true);
// Add activity as callback to RTCClient
rtcClient.addSessionCallbacksListener(this);
// Start mange QBRTCSessions according to VideoCall parser's callbacks
rtcClient.prepareToProcessCalls();
connectionListener = new ConnectionListener();
QBChatService.getInstance().addConnectionListener(connectionListener);
}
private void setExpirationReconnectionTime() {
reconnectHangUpTimeMillis = SettingsUtil.getPreferenceInt(sharedPref, this, R.string.pref_disconnect_time_interval_key,
R.string.pref_disconnect_time_interval_default_value) * 1000;
expirationReconnectionTime = System.currentTimeMillis() + reconnectHangUpTimeMillis;
}
private void hangUpAfterLongReconnection() {
if (expirationReconnectionTime < System.currentTimeMillis()) {
hangUpCurrentSession();
}
}
@Override
public void connectivityChanged(boolean availableNow) {
if (callStarted) {
showToast("Internet connection " + (availableNow ? "available" : " unavailable"));
}
}
private void showNotificationPopUp(final int text, final boolean show) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (show) {
((TextView) connectionView.findViewById(R.id.notification)).setText(text);
if (connectionView.getParent() == null) {
((ViewGroup) CallActivity.this.findViewById(R.id.fragment_container)).addView(connectionView);
}
} else {
((ViewGroup) CallActivity.this.findViewById(R.id.fragment_container)).removeView(connectionView);
}
}
});
}
private void initWiFiManagerListener() {
networkConnectionChecker = new NetworkConnectionChecker(getApplication());
}
private void initIncomingCallTask() {
showIncomingCallWindowTaskHandler = new Handler(Looper.myLooper());
showIncomingCallWindowTask = new Runnable() {
@Override
public void run() {
if (currentSession == null) {
return;
}
QBRTCSession.QBRTCSessionState currentSessionState = currentSession.getState();
if (QBRTCSession.QBRTCSessionState.QB_RTC_SESSION_NEW.equals(currentSessionState)) {
rejectCurrentSession();
} else {
ringtonePlayer.stop();
hangUpCurrentSession();
}
Toaster.longToast("Call was stopped by timer");
}
};
}
private QBRTCSession getCurrentSession() {
return currentSession;
}
public void rejectCurrentSession() {
if (getCurrentSession() != null) {
getCurrentSession().rejectCall(new HashMap<String, String>());
}
}
public void hangUpCurrentSession() {
ringtonePlayer.stop();
if (getCurrentSession() != null) {
getCurrentSession().hangUp(new HashMap<String, String>());
}
}
private void setAudioEnabled(boolean isAudioEnabled) {
if (currentSession != null && currentSession.getMediaStreamManager() != null) {
currentSession.getMediaStreamManager().setAudioEnabled(isAudioEnabled);
}
}
private void setVideoEnabled(boolean isVideoEnabled) {
if (currentSession != null && currentSession.getMediaStreamManager() != null) {
currentSession.getMediaStreamManager().setVideoEnabled(isVideoEnabled);
}
}
private void startIncomeCallTimer(long time) {
showIncomingCallWindowTaskHandler.postAtTime(showIncomingCallWindowTask, SystemClock.uptimeMillis() + time);
}
private void stopIncomeCallTimer() {
Log.d(TAG, "stopIncomeCallTimer");
showIncomingCallWindowTaskHandler.removeCallbacks(showIncomingCallWindowTask);
}
@Override
protected void onResume() {
super.onResume();
networkConnectionChecker.registerListener(this);
}
@Override
protected void onPause() {
super.onPause();
networkConnectionChecker.unregisterListener(this);
}
@Override
protected void onStop() {
super.onStop();
}
private void forbiddenCloseByWifiState() {
closeByWifiStateAllow = false;
}
public void initCurrentSession(QBRTCSession session) {
if (session != null) {
Log.d(TAG, "Init new QBRTCSession");
this.currentSession = session;
this.currentSession.addSessionCallbacksListener(CallActivity.this);
this.currentSession.addSignalingCallback(CallActivity.this);
}
}
public void releaseCurrentSession() {
Log.d(TAG, "Release current session");
if (currentSession != null) {
this.currentSession.removeSessionCallbacksListener(CallActivity.this);
this.currentSession.removeSignalingCallback(CallActivity.this);
rtcClient.removeSessionsCallbacksListener(CallActivity.this);
this.currentSession = null;
}
}
@Override
public void onReceiveNewSession(final QBRTCSession session) {
runOnUiThread(new Runnable() {
@Override
public void run() {
Log.d(TAG, "Session " + session.getSessionID() + " are income");
if (getCurrentSession() != null) {
Log.d(TAG, "Stop new session. Device now is busy");
session.rejectCall(null);
}
}
});
}
@Override
public void onUserNotAnswer(QBRTCSession session, Integer userID) {
if (!session.equals(getCurrentSession())) {
return;
}
if (sessionUserCallback != null) {
sessionUserCallback.onUserNotAnswer(session, userID);
}
runOnUiThread(new Runnable() {
@Override
public void run() {
ringtonePlayer.stop();
}
});
}
@Override
public void onUserNoActions(QBRTCSession qbrtcSession, Integer integer) {
startIncomeCallTimer(0);
}
@Override
public void onStartConnectToUser(QBRTCSession session, Integer userID) {
}
@Override
public void onCallAcceptByUser(QBRTCSession session, Integer userId, Map<String, String> userInfo) {
if (!session.equals(getCurrentSession())) {
return;
}
if (sessionUserCallback != null) {
sessionUserCallback.onCallAcceptByUser(session, userId, userInfo);
}
runOnUiThread(new Runnable() {
@Override
public void run() {
ringtonePlayer.stop();
}
});
}
@Override
public void onCallRejectByUser(QBRTCSession session, Integer userID, Map<String, String> userInfo) {
if (!session.equals(getCurrentSession())) {
return;
}
if (sessionUserCallback != null) {
sessionUserCallback.onCallRejectByUser(session, userID, userInfo);
}
runOnUiThread(new Runnable() {
@Override
public void run() {
ringtonePlayer.stop();
}
});
}
@Override
public void onConnectionClosedForUser(QBRTCSession session, Integer userID) {
runOnUiThread(new Runnable() {
@Override
public void run() {
// Close app after session close of network was disabled
if (hangUpReason != null && hangUpReason.equals(Consts.WIFI_DISABLED)) {
Intent returnIntent = new Intent();
setResult(Consts.CALL_ACTIVITY_CLOSE_WIFI_DISABLED, returnIntent);
finish();
}
}
});
}
@Override
public void onConnectedToUser(QBRTCSession session, final Integer userID) {
callStarted = true;
notifyCallStateListenersCallStarted();
forbiddenCloseByWifiState();
runOnUiThread(new Runnable() {
@Override
public void run() {
if (isInCommingCall) {
stopIncomeCallTimer();
}
Log.d(TAG, "onConnectedToUser() is started");
}
});
}
@Override
public void onDisconnectedTimeoutFromUser(QBRTCSession session, Integer userID) {
}
@Override
public void onConnectionFailedWithUser(QBRTCSession session, Integer userID) {
}
@Override
public void onError(QBRTCSession qbrtcSession, QBRTCException e) {
}
@Override
public void onSessionClosed(final QBRTCSession session) {
runOnUiThread(new Runnable() {
@Override
public void run() {
Log.d(TAG, "Session " + session.getSessionID() + " start stop session");
if (session.equals(getCurrentSession())) {
Log.d(TAG, "Stop session");
if (audioManager != null) {
audioManager.close();
}
releaseCurrentSession();
closeByWifiStateAllow = true;
finish();
}
}
});
}
@Override
public void onSessionStartClose(final QBRTCSession session) {
if (session.equals(getCurrentSession())) {
session.removeSessionCallbacksListener(CallActivity.this);
notifyCallStateListenersCallStopped();
}
}
@Override
public void onDisconnectedFromUser(QBRTCSession session, Integer userID) {
}
private void showToast(final int message) {
runOnUiThread(new Runnable() {
@Override
public void run() {
Toaster.shortToast(message);
}
});
}
private void showToast(final String message) {
runOnUiThread(new Runnable() {
@Override
public void run() {
Toaster.shortToast(message);
}
});
}
@Override
public void onReceiveHangUpFromUser(final QBRTCSession session, final Integer userID, Map<String, String> map) {
if (session.equals(getCurrentSession())) {
if (sessionUserCallback != null) {
sessionUserCallback.onReceiveHangUpFromUser(session, userID);
}
QBUser participant = dbManager.getUserById(userID);
final String participantName = participant != null ? participant.getFullName() : String.valueOf(userID);
runOnUiThread(new Runnable() {
@Override
public void run() {
showToast("User " + participantName + " " + getString(R.string.text_status_hang_up) + " conversation");
}
});
}
}
private Fragment getCurrentFragment() {
return getFragmentManager().findFragmentById(R.id.fragment_container);
}
private void addIncomeCallFragment() {
Log.d(TAG, "QBRTCSession in addIncomeCallFragment is " + currentSession);
if (currentSession != null) {
IncomeCallFragment fragment = new IncomeCallFragment();
FragmentExecuotr.addFragment(getFragmentManager(), R.id.fragment_container, fragment, INCOME_CALL_FRAGMENT);
} else {
Log.d(TAG, "SKIP addIncomeCallFragment method");
}
}
private void addConversationFragment(boolean isIncomingCall) {
BaseConversationFragment conversationFragment = BaseConversationFragment.newInstance(
isVideoCall
? new VideoConversationFragment()
: new AudioConversationFragment(),
isIncomingCall);
FragmentExecuotr.addFragment(getFragmentManager(), R.id.fragment_container, conversationFragment, CONVERSATION_CALL_FRAGMENT);
}
public SharedPreferences getDefaultSharedPrefs() {
return sharedPref;
}
@Override
public void onSuccessSendingPacket(QBSignalingSpec.QBSignalCMD qbSignalCMD, Integer integer) {
}
@Override
public void onErrorSendingPacket(QBSignalingSpec.QBSignalCMD qbSignalCMD, Integer userId, QBRTCSignalException e) {
showToast(R.string.dlg_signal_error);
}
public void onUseHeadSet(boolean use) {
audioManager.setManageHeadsetByDefault(use);
}
public void sendHeadsetState() {
if (isInCommingCall) {
onChangeDynamicCallback.enableDynamicToggle(headsetPlugged);
}
}
////////////////////////////// IncomeCallFragmentCallbackListener ////////////////////////////
@Override
public void onAcceptCurrentSession() {
addConversationFragment(true);
}
@Override
public void onRejectCurrentSession() {
rejectCurrentSession();
}
////////////////////////////////////////// end /////////////////////////////////////////////
@Override
public void onBackPressed() {
}
@Override
protected void onDestroy() {
super.onDestroy();
QBChatService.getInstance().removeConnectionListener(connectionListener);
}
////////////////////////////// ConversationFragmentCallbackListener ////////////////////////////
@Override
public void addTCClientConnectionCallback(QBRTCSessionConnectionCallbacks clientConnectionCallbacks) {
if (currentSession != null) {
currentSession.addSessionCallbacksListener(clientConnectionCallbacks);
}
}
@Override
public void addRTCSessionUserCallback(QBRTCSessionUserCallback sessionUserCallback) {
this.sessionUserCallback = sessionUserCallback;
}
@Override
public void onSetAudioEnabled(boolean isAudioEnabled) {
setAudioEnabled(isAudioEnabled);
}
@Override
public void onHangUpCurrentSession() {
hangUpCurrentSession();
}
@Override
public void onSetVideoEnabled(boolean isNeedEnableCam) {
setVideoEnabled(isNeedEnableCam);
}
@Override
public void onSwitchAudio() {
if (audioManager.getSelectedAudioDevice() == AppRTCAudioManager.AudioDevice.WIRED_HEADSET
|| audioManager.getSelectedAudioDevice() == AppRTCAudioManager.AudioDevice.EARPIECE) {
audioManager.setAudioDevice(AppRTCAudioManager.AudioDevice.SPEAKER_PHONE);
} else {
audioManager.setAudioDevice(AppRTCAudioManager.AudioDevice.EARPIECE);
}
}
@Override
public void removeRTCClientConnectionCallback(QBRTCSessionConnectionCallbacks clientConnectionCallbacks) {
if (currentSession != null) {
currentSession.removeSessionCallbacksListener(clientConnectionCallbacks);
}
}
@Override
public void removeRTCSessionUserCallback(QBRTCSessionUserCallback sessionUserCallback) {
this.sessionUserCallback = null;
}
@Override
public void addCurrentCallStateCallback(CurrentCallStateCallback currentCallStateCallback) {
currentCallStateCallbackList.add(currentCallStateCallback);
}
@Override
public void removeCurrentCallStateCallback(CurrentCallStateCallback currentCallStateCallback) {
currentCallStateCallbackList.remove(currentCallStateCallback);
}
@Override
public void addOnChangeDynamicToggle(OnChangeDynamicToggle onChangeDynamicCallback) {
this.onChangeDynamicCallback = onChangeDynamicCallback;
sendHeadsetState();
}
@Override
public void removeOnChangeDynamicToggle(OnChangeDynamicToggle onChangeDynamicCallback) {
this.onChangeDynamicCallback = null;
}
////////////////////////////////////////// end /////////////////////////////////////////////
private class ConnectionListener extends AbstractConnectionListener {
@Override
public void connectionClosedOnError(Exception e) {
showNotificationPopUp(R.string.connection_was_lost, true);
setExpirationReconnectionTime();
}
@Override
public void reconnectionSuccessful() {
showNotificationPopUp(R.string.connection_was_lost, false);
}
@Override
public void reconnectingIn(int seconds) {
Log.i(TAG, "reconnectingIn " + seconds);
if (!callStarted) {
hangUpAfterLongReconnection();
}
}
}
public interface OnChangeDynamicToggle {
void enableDynamicToggle(boolean plugged);
}
public interface QBRTCSessionUserCallback {
void onUserNotAnswer(QBRTCSession session, Integer userId);
void onCallRejectByUser(QBRTCSession session, Integer userId, Map<String, String> userInfo);
void onCallAcceptByUser(QBRTCSession session, Integer userId, Map<String, String> userInfo);
void onReceiveHangUpFromUser(QBRTCSession session, Integer userId);
}
public interface CurrentCallStateCallback {
void onCallStarted();
void onCallStopped();
void onOpponentsListUpdated(ArrayList<QBUser> newUsers);
}
private void notifyCallStateListenersCallStarted() {
runOnUiThread(new Runnable() {
@Override
public void run() {
for (CurrentCallStateCallback callback : currentCallStateCallbackList) {
callback.onCallStarted();
}
}
});
}
private void notifyCallStateListenersCallStopped() {
runOnUiThread(new Runnable() {
@Override
public void run() {
for (CurrentCallStateCallback callback : currentCallStateCallbackList) {
callback.onCallStopped();
}
}
});
}
private void notifyCallStateListenersNeedUpdateOpponentsList(final ArrayList<QBUser> newUsers) {
runOnUiThread(new Runnable() {
@Override
public void run() {
for (CurrentCallStateCallback callback : currentCallStateCallbackList) {
callback.onOpponentsListUpdated(newUsers);
}
}
});
}
} |
package com.clockbyte.admobadapter.sampleapp.express;
import android.app.Activity;
import android.os.Bundle;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import com.clockbyte.admobadapter.expressads.AdmobExpressAdapterWrapper;
import com.clockbyte.admobadapter.expressads.ExpressAdPreset;
import com.clockbyte.admobadapter.sampleapp.R;
import com.google.android.gms.ads.AdRequest;
import com.google.android.gms.ads.AdSize;
import com.google.android.gms.ads.MobileAds;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Timer;
import java.util.TimerTask;
public class MainActivity_ListView_Express extends Activity {
ListView lvMessages;
AdmobExpressAdapterWrapper adapterWrapper;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main_listview);
//highly-recommended in Firebase docs to initialize things early as possible
//test_admob_app_id is different with unit_id! you could get it in your Admob console
MobileAds.initialize(getApplicationContext(), getString(R.string.test_admob_app_id));
initListViewItems();
}
/**
* Inits an adapter with items, wrapping your adapter with a {@link AdmobExpressAdapterWrapper} and setting the listview to this wrapper
* FIRST OF ALL Please notice that the following code will work on a real devices but emulator!
*/
private void initListViewItems() {
lvMessages = (ListView) findViewById(R.id.lvMessages);
//creating your adapter, it could be a custom adapter as well
ArrayAdapter<String> adapter = new ArrayAdapter<String>(this,
android.R.layout.simple_list_item_1);
//your test devices' ids
String[] testDevicesIds = new String[]{getString(R.string.testDeviceID),AdRequest.DEVICE_ID_EMULATOR};
//when you'll be ready for release please use another ctor with admobReleaseUnitId instead.
adapterWrapper = new AdmobExpressAdapterWrapper(this, testDevicesIds);
//By default the ad size is set to FULL_WIDTHx150
//To set a custom size you should use an appropriate ctor
//adapterWrapper = new AdmobExpressAdapterWrapper(this, testDevicesIds, new AdSize(AdSize.FULL_WIDTH, 150));
adapterWrapper.setAdapter(adapter); //wrapping your adapter with a AdmobExpressAdapterWrapper.
//Sets the max count of ad blocks per dataset, by default it equals to 3 (according to the Admob's policies and rules)
adapterWrapper.setLimitOfAds(10);
//Sets the number of your data items between ad blocks, by default it equals to 10.
//You should set it according to the Admob's policies and rules which says not to
//display more than one ad block at the visible part of the screen,
// so you should choose this parameter carefully and according to your item's height and screen resolution of a target devices
adapterWrapper.setNoOfDataBetweenAds(10);
adapterWrapper.setFirstAdIndex(2);
lvMessages.setAdapter(adapterWrapper); // setting an AdmobAdapterWrapper to a ListView
//preparing the collection of data
final String sItem = "item
ArrayList<String> lst = new ArrayList<String>(100);
for(int i=1;i<=100;i++)
lst.add(sItem.concat(Integer.toString(i)));
//adding a collection of data to your adapter and rising the data set changed event
adapter.addAll(lst);
adapter.notifyDataSetChanged();
}
/*
* Seems to be a good practice to destroy all the resources you have used earlier :)
*/
@Override
protected void onDestroy() {
super.onDestroy();
adapterWrapper.destroyAds();
}
} |
package org.sagebionetworks.repo.manager.backup;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.joda.time.DateTime;
import org.sagebionetworks.repo.manager.UserManager;
import org.sagebionetworks.repo.manager.backup.migration.MigrationDriver;
import org.sagebionetworks.repo.manager.backup.migration.MigrationDriverImpl;
import org.sagebionetworks.repo.model.AccessControlList;
import org.sagebionetworks.repo.model.Annotations;
import org.sagebionetworks.repo.model.AuthorizationConstants;
import org.sagebionetworks.repo.model.DatastoreException;
import org.sagebionetworks.repo.model.Node;
import org.sagebionetworks.repo.model.NodeBackup;
import org.sagebionetworks.repo.model.NodeRevisionBackup;
import org.sagebionetworks.repo.model.Reference;
import org.sagebionetworks.repo.model.ResourceAccess;
import org.sagebionetworks.repo.model.UserInfo;
import org.sagebionetworks.repo.model.query.jdo.NodeAliasCache;
import org.sagebionetworks.repo.model.search.Document;
import org.sagebionetworks.repo.model.search.DocumentFields;
import org.sagebionetworks.repo.model.search.DocumentTypeNames;
import org.sagebionetworks.repo.web.NotFoundException;
import org.sagebionetworks.schema.adapter.JSONObjectAdapterException;
import org.sagebionetworks.schema.adapter.org.json.EntityFactory;
import org.springframework.beans.factory.annotation.Autowired;
public class SearchDocumentDriverImpl implements SearchDocumentDriver {
/**
* The index field holding the access control list info
*/
public static final String ACL_INDEX_FIELD = "acl";
/**
* No more than 100 values in a field value array
*/
public static final int FIELD_VALUE_SIZE_LIMIT = 100;
private static Log log = LogFactory.getLog(SearchDocumentDriverImpl.class);
private static final String PATH_DELIMITER = "/";
private static final String CATCH_ALL_FIELD = "annotations";
private static final String DISEASE_FIELD = "disease";
private static final String TISSUE_FIELD = "tissue";
private static final String SPECIES_FIELD = "species";
private static final String PLATFORM_FIELD = "platform";
private static final String NUM_SAMPLES_FIELD = "num_samples";
// private static final String INVESTIGATOR = "investigator";
// private static final String INSTITUTION = "institution";
private static final Map<String, String> SEARCHABLE_NODE_ANNOTATIONS;
@Autowired
NodeBackupManager backupManager;
@Autowired
NodeAliasCache aliasCache;
@Autowired
UserManager userManager;
// For now we can just create one of these. We might need to make beans in
// the future.
MigrationDriver migrationDriver = new MigrationDriverImpl();
static {
// These are both node primary annotations and additional annotation
// names
Map<String, String> searchableNodeAnnotations = new HashMap<String, String>();
searchableNodeAnnotations.put("disease", DISEASE_FIELD);
searchableNodeAnnotations.put("Disease", DISEASE_FIELD);
searchableNodeAnnotations.put("Tissue_Tumor", TISSUE_FIELD);
searchableNodeAnnotations.put("sampleSource", TISSUE_FIELD);
searchableNodeAnnotations.put("SampleSource", TISSUE_FIELD);
searchableNodeAnnotations.put("tissueType", TISSUE_FIELD);
searchableNodeAnnotations.put("species", SPECIES_FIELD);
searchableNodeAnnotations.put("Species", SPECIES_FIELD);
searchableNodeAnnotations.put("platform", PLATFORM_FIELD);
searchableNodeAnnotations.put("Platform", PLATFORM_FIELD);
searchableNodeAnnotations.put("platformDesc", PLATFORM_FIELD);
searchableNodeAnnotations.put("platformVendor", PLATFORM_FIELD);
searchableNodeAnnotations.put("number_of_samples", NUM_SAMPLES_FIELD);
searchableNodeAnnotations.put("Number_of_Samples", NUM_SAMPLES_FIELD);
searchableNodeAnnotations.put("Number_of_samples", NUM_SAMPLES_FIELD);
searchableNodeAnnotations.put("numSamples", NUM_SAMPLES_FIELD);
// searchableNodeAnnotations.put("Investigator", INVESTIGATOR);
// searchableNodeAnnotations.put("Institution", INSTITUTION);
SEARCHABLE_NODE_ANNOTATIONS = Collections
.unmodifiableMap(searchableNodeAnnotations);
}
/**
* Used by Spring
*/
public SearchDocumentDriverImpl() {
}
/**
* Used by unit tests.
*
* @param backupManager
*
*/
public SearchDocumentDriverImpl(NodeBackupManager backupManager) {
super();
this.backupManager = backupManager;
}
/**
* @param destination
* @param progress
* @param entitiesToBackup
* @throws IOException
* @throws DatastoreException
* @throws NotFoundException
* @throws InterruptedException
* @throws JSONObjectAdapterException
*/
public void writeSearchDocument(File destination, Progress progress,
Set<String> entitiesToBackup) throws IOException,
DatastoreException, NotFoundException, InterruptedException,
JSONObjectAdapterException {
if (destination == null)
throw new IllegalArgumentException(
"Destination file cannot be null");
if (!destination.exists())
throw new IllegalArgumentException(
"Destination file does not exist: "
+ destination.getAbsolutePath());
if (progress == null)
throw new IllegalArgumentException("Progress cannot be null");
// If the entitiesToBackup is null then include the root
List<String> listToBackup = new ArrayList<String>();
boolean isRecursive = false;
if (entitiesToBackup == null) {
// Just add the root
isRecursive = true;
listToBackup.add(backupManager.getRootId());
} else {
// Add all of the entities from the set.
isRecursive = false;
listToBackup.addAll(entitiesToBackup);
}
log.info("Starting a backup to file: " + destination.getAbsolutePath());
progress.setTotalCount(backupManager.getTotalNodeCount());
// First write to the file
FileOutputStream outputStream = new FileOutputStream(destination);
// DEV NOTE: (1) AwesomeSearch cannot currently accept zipped content so
// we are not making a ZipOutputStream here (2) AwesomeSearch expects a
// raw JSON array so we cannot use something like
// org.sagebionetworks.repo.model.search.DocumentBatch here . . . also
// building up a gigantic DocumentBatch isn't appropriate for the
// streaming we are doing here to help with memory usage when dealing
// with a large batch of entities to send to search so this is better
// anyway
outputStream.write('[');
boolean isFirstEntry = true;
// First write the root node as its own entry
for (String idToBackup : listToBackup) {
// Recursively write each node.
NodeBackup backup = backupManager.getNode(idToBackup);
if (backup == null)
throw new IllegalArgumentException("Cannot backup node: "
+ idToBackup + " because it does not exists");
writeSearchDocumentBatch(outputStream, backup, "", progress,
isRecursive, isFirstEntry);
isFirstEntry = false;
}
outputStream.write(']');
outputStream.flush();
outputStream.close();
}
/**
* This is a recursive method that will write the full tree of node data to
* the search document batch.
*
* @throws JSONObjectAdapterException
*/
private void writeSearchDocumentBatch(OutputStream outputStream,
NodeBackup backup, String path, Progress progress,
boolean isRecursive, boolean isFirstEntry)
throws NotFoundException, DatastoreException, InterruptedException,
IOException, JSONObjectAdapterException {
if (backup == null)
throw new IllegalArgumentException("NodeBackup cannot be null");
if (backup.getNode() == null)
throw new IllegalArgumentException("NodeBackup.node cannot be null");
Node node = backup.getNode();
if (node.getId() == null)
throw new IllegalArgumentException("node.id cannot be null");
// Since this could be called in a tight loop, we need to be
// CPU friendly
Thread.yield();
path = path + node.getId() + PATH_DELIMITER;
// Write this node
// A well-formed JSON array does not end with a final comma, so here's
// how we ensure we add the right commas
if (isFirstEntry) {
isFirstEntry = false;
} else {
outputStream.write(",\n".getBytes());
}
writeSearchDocument(outputStream, backup, path);
progress.setMessage(backup.getNode().getName());
progress.incrementProgress();
log.info(progress.toString());
// Check for termination.
checkForTermination(progress);
if (isRecursive) {
// now write each child
List<String> childList = backup.getChildren();
if (childList != null) {
for (String childId : childList) {
NodeBackup child = backupManager.getNode(childId);
writeSearchDocumentBatch(outputStream, child, path,
progress, isRecursive, false);
}
}
}
}
/**
* @param progress
* @throws InterruptedException
*/
public void checkForTermination(Progress progress)
throws InterruptedException {
// Between each node check to see if we should terminate
if (progress.shouldTerminate()) {
throw new InterruptedException(
"Search document batch terminated by the user");
}
}
/**
* Write a single search document
*
* @throws JSONObjectAdapterException
*/
private void writeSearchDocument(OutputStream outputStream,
NodeBackup backup, String path) throws NotFoundException,
DatastoreException, IOException, JSONObjectAdapterException {
if (backup == null)
throw new IllegalArgumentException("NodeBackup cannot be null");
if (backup.getNode() == null)
throw new IllegalArgumentException("NodeBackup.node cannot be null");
Node node = backup.getNode();
if (node.getId() == null)
throw new IllegalArgumentException("node.id cannot be null");
String benefactorId = backup.getBenefactor();
NodeBackup benefactorBackup = backupManager.getNode(benefactorId);
Long revId = node.getVersionNumber();
NodeRevisionBackup rev = backupManager.getNodeRevision(node.getId(),
revId);
Document document = formulateSearchDocument(node, rev, benefactorBackup
.getAcl());
outputStream.write(cleanSearchDocument(document));
outputStream.flush();
}
static byte[] cleanSearchDocument(Document document)
throws UnsupportedEncodingException, JSONObjectAdapterException {
String serializedDocument = EntityFactory
.createJSONStringForEntity(document);
// AwesomeSearch pukes on control characters. Some descriptions have
// control characters in them for some reason, in any case, just get rid
// of all control characters in the search document
String cleanedDocument = serializedDocument.replaceAll("\\p{Cc}", "");
// Get rid of escaped control characters too
cleanedDocument = cleanedDocument.replaceAll("\\\\u00[0,1][0-9,a-f]",
"");
// AwesomeSearch expects UTF-8
return cleanedDocument.getBytes("UTF-8");
}
@Override
public Document formulateSearchDocument(Node node, NodeRevisionBackup rev,
AccessControlList acl) throws DatastoreException, NotFoundException {
DateTime now = DateTime.now();
Document document = new Document();
DocumentFields fields = new DocumentFields();
document.setFields(fields);
document.setType(DocumentTypeNames.add);
document.setLang("en"); // TODO this should have been set via "default"
// in the schema for this
// Node fields
document.setId(node.getId());
document.setVersion(now.getMillis() / 1000);
fields.setId(node.getId()); // this is redundant because document id
// is returned in search results, but its cleaner to have this also show
// up in the "data" section of AwesomeSearch results
fields.setEtag(node.getETag());
fields.setParent_id(node.getParentId());
fields.setName(node.getName());
fields.setNode_type(aliasCache.getPreferredAlias(node.getNodeType()));
if (null != node.getDescription()) {
fields.setDescription(node.getDescription());
}
fields.setCreated_by(getDisplayNameForUserId(node.getCreatedBy()));
fields.setCreated_on(node.getCreatedOn().getTime() / 1000);
fields.setModified_by(getDisplayNameForUserId(node.getModifiedBy()));
fields.setModified_on(node.getModifiedOn().getTime() / 1000);
// Stuff in this field any extra copies of data that you would like to
// boost in free text search
List<String> boost = new ArrayList<String>();
fields.setBoost(boost);
boost.add(node.getName());
boost.add(node.getName());
boost.add(node.getName());
boost.add(node.getId());
boost.add(node.getId());
boost.add(node.getId());
// Annotations
fields.setAnnotations(new ArrayList<String>());
fields.setDisease(new ArrayList<String>());
fields.setSpecies(new ArrayList<String>());
fields.setTissue(new ArrayList<String>());
fields.setPlatform(new ArrayList<String>());
fields.setNum_samples(new ArrayList<Long>());
addAnnotationsToSearchDocument(fields, rev.getNamedAnnotations()
.getPrimaryAnnotations());
addAnnotationsToSearchDocument(fields, rev.getNamedAnnotations()
.getAdditionalAnnotations());
// Transform the annotations array back to an array containing a single
// string since we often overflow the upper limit on value array length
// for AwesomeSearch
String joinedAnnotations = StringUtils.join(fields.getAnnotations(),
" ");
List<String> annotationsValue = new ArrayList<String>();
annotationsValue.add(joinedAnnotations);
fields.setAnnotations(annotationsValue);
// References, just put the node id to which the reference refers. Not
// currently adding the version or the type of the reference (e.g.,
// code/input/output)
if ((null != node.getReferences()) && (0 < node.getReferences().size())) {
List<String> referenceValues = new ArrayList<String>();
fields.setReferences(referenceValues);
for (Set<Reference> refs : node.getReferences().values()) {
for (Reference ref : refs) {
if (FIELD_VALUE_SIZE_LIMIT > referenceValues.size()) {
referenceValues.add(ref.getTargetId());
} else {
log.warn("Had to leave reference " + ref.getTargetId()
+ " out of search document " + node.getId()
+ " due to AwesomeSearch limits");
}
}
}
}
// READ and UPDATE ACLs
List<String> readAclValues = new ArrayList<String>();
fields.setAcl(readAclValues);
List<String> updateAclValues = new ArrayList<String>();
fields.setUpdate_acl(updateAclValues);
for (ResourceAccess access : acl.getResourceAccess()) {
if (access.getAccessType().contains(
AuthorizationConstants.ACCESS_TYPE.READ)) {
if (FIELD_VALUE_SIZE_LIMIT > readAclValues.size()) {
readAclValues.add(access.getGroupName());
} else {
log.error("Had to leave READ acl " + access.getGroupName()
+ " out of search document " + node.getId()
+ " due to AwesomeSearch limits");
}
}
if (access.getAccessType().contains(
AuthorizationConstants.ACCESS_TYPE.UPDATE)) {
if (FIELD_VALUE_SIZE_LIMIT > updateAclValues.size()) {
updateAclValues.add(access.getGroupName());
} else {
log.error("Had to leave UPDATE acl "
+ access.getGroupName()
+ " out of search document " + node.getId()
+ " due to AwesomeSearch limits");
}
}
}
return document;
}
private String getDisplayNameForUserId(String userId) {
String displayName = userId;
try {
displayName = userManager.getUserInfo(userId).getUser()
.getDisplayName();
} catch (NotFoundException ex) {
// this is a best-effort attempt to fill in the display name and
// this will happen for the 'bootstrap' user and users we may delete
// from our system but are still the creators/modifiers of entities
log.debug("Unable to get display name for user id: " + userId + ",", ex);
} catch (Exception ex) {
log.warn("Unable to get display name for user id: " + userId + ",", ex);
}
return displayName;
}
@SuppressWarnings("unchecked")
static void addAnnotationsToSearchDocument(DocumentFields fields,
Annotations annots) {
for (String key : annots.keySet()) {
Collection values = annots.getAllValues(key);
if (1 > values.size()) {
// no values so nothing to do here
continue;
}
Object objs[] = values.toArray();
if (objs[0] instanceof byte[]) {
// don't add blob annotations to the search index
continue;
}
String searchFieldName = SEARCHABLE_NODE_ANNOTATIONS.get(key);
for (int i = 0; i < objs.length; i++) {
if (null == objs[i])
continue;
if (null != searchFieldName) {
addAnnotationToSearchDocument(fields, searchFieldName,
objs[i]);
}
// Put ALL annotations into the catch-all field even if they are
// also in a facet, this way we can discover them both by free
// text AND faceted search
// TODO dates to epoch time? or skip them?
String catchAllValue = key + ":" + objs[i].toString();
// A multi-word annotation gets underscores so we can
// exact-match find it but I'm not positive this is actually
// true because AwesomeSearch might be splitting free text on
// underscores
catchAllValue = catchAllValue.replaceAll("\\s", "_");
addAnnotationToSearchDocument(fields, CATCH_ALL_FIELD,
catchAllValue);
}
}
}
static void addAnnotationToSearchDocument(DocumentFields fields,
String key, Object value) {
if (CATCH_ALL_FIELD == key) {
// Since the annotations field is a text field, after this we just
// join it into a single string instead of truncating it here since
// there is no need to truncate for free text
fields.getAnnotations().add((String) value);
} else if (DISEASE_FIELD == key
&& FIELD_VALUE_SIZE_LIMIT > fields.getDisease().size()) {
fields.getDisease().add((String) value);
} else if (TISSUE_FIELD == key
&& FIELD_VALUE_SIZE_LIMIT > fields.getTissue().size()) {
fields.getTissue().add((String) value);
} else if (SPECIES_FIELD == key
&& FIELD_VALUE_SIZE_LIMIT > fields.getSpecies().size()) {
fields.getSpecies().add((String) value);
} else if (PLATFORM_FIELD == key
&& FIELD_VALUE_SIZE_LIMIT > fields.getPlatform().size()) {
fields.getPlatform().add((String) value);
} else if (NUM_SAMPLES_FIELD == key
&& FIELD_VALUE_SIZE_LIMIT > fields.getNum_samples().size()) {
if (value instanceof Long) {
fields.getNum_samples().add((Long) value);
} else if (value instanceof String) {
try {
fields.getNum_samples().add(
Long.valueOf(((String) value).trim()));
} catch (NumberFormatException e) {
// swallow this exception, this is just a best-effort
// attempt to push more annotations into search
}
}
} else {
throw new IllegalArgumentException(
"Annotation "
+ key
+ " added to searchable annotations map but not added to addAnnotationToSearchDocument");
}
}
} |
package org.intermine.bio.gbrowse;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.log4j.Logger;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Task;
import org.intermine.bio.util.BioQueries;
import org.intermine.bio.util.Constants;
import org.intermine.metadata.Model;
import org.intermine.model.bio.Chromosome;
import org.intermine.model.bio.ChromosomeBand;
import org.intermine.model.bio.Exon;
import org.intermine.model.bio.Gene;
import org.intermine.model.bio.Location;
import org.intermine.model.bio.MRNA;
import org.intermine.model.bio.NcRNA;
import org.intermine.model.bio.Sequence;
import org.intermine.model.bio.SequenceFeature;
import org.intermine.model.bio.Synonym;
import org.intermine.model.bio.Transcript;
import org.intermine.objectstore.ObjectStore;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.objectstore.ObjectStoreFactory;
import org.intermine.objectstore.intermine.ObjectStoreInterMineImpl;
import org.intermine.objectstore.proxy.ProxyCollection;
import org.intermine.objectstore.query.ClobAccess;
import org.intermine.objectstore.query.ConstraintOp;
import org.intermine.objectstore.query.ConstraintSet;
import org.intermine.objectstore.query.ContainsConstraint;
import org.intermine.objectstore.query.Query;
import org.intermine.objectstore.query.QueryClass;
import org.intermine.objectstore.query.QueryCollectionReference;
import org.intermine.objectstore.query.QueryField;
import org.intermine.objectstore.query.QueryNode;
import org.intermine.objectstore.query.QueryObjectReference;
import org.intermine.objectstore.query.QueryValue;
import org.intermine.objectstore.query.Results;
import org.intermine.objectstore.query.ResultsRow;
import org.intermine.objectstore.query.SimpleConstraint;
import org.intermine.util.DynamicUtil;
import org.intermine.util.TypeUtil;
/**
* A Task for creating GFF and FASTA files for use by GBrowse. Only those features that are
* located on a Chromosome are written.
* @author Kim Rutherford
*/
public class WriteGFFTask extends Task
{
private static final Logger LOG = Logger.getLogger(WriteGFFTask.class);
private String alias;
private File destinationDirectory;
/**
* Set the ObjectStore alias to read from
* @param alias name of the ObjectStore
*/
public void setAlias(String alias) {
this.alias = alias;
}
/**
* Set the name of the directory where the GFF and FASTA files should be created.
* @param destinationDirectory the directory for creating new files in.
*/
public void setDest(File destinationDirectory) {
this.destinationDirectory = destinationDirectory;
}
/**
* {@inheritDoc}
*/
@Override
public void execute() {
if (destinationDirectory == null) {
throw new BuildException("dest attribute is not set");
}
if (alias == null) {
throw new BuildException("alias attribute is not set");
}
ObjectStore os = null;
try {
os = ObjectStoreFactory.getObjectStore(alias);
writeGFF(os);
} catch (Exception e) {
throw new BuildException(e);
}
}
private static final Class<SequenceFeature> LOCATED_SEQUENCE_FEATURE_CLASS =
SequenceFeature.class;
/**
* Create a GFF and FASTA files for the objects in the given ObjectStore, suitable for reading
* by GBrowse.
* @param os the ObjectStore to read from
* @throws ObjectStoreException if the is a problem with the ObjectStore
* @throws IOException if there is a problem while writing
*/
void writeGFF(ObjectStore os)
throws ObjectStoreException, IOException {
Model model = os.getModel();
Results results =
BioQueries.findLocationAndObjects(os, Chromosome.class,
LOCATED_SEQUENCE_FEATURE_CLASS, false, true, false, 2000);
@SuppressWarnings("unchecked") Iterator<ResultsRow> resIter = (Iterator) results.iterator();
PrintWriter gffWriter = null;
// a Map of object classes to counts
Map<String, Integer> objectCounts = null;
// Map from Transcript to Location (on Chromosome)
Map<Transcript, Location> seenTranscripts = new HashMap<Transcript, Location>();
// Map from exon primary identifier to Location (on Chromosome)
Map<String, Location> seenTranscriptParts = new HashMap<String, Location>();
// the last Chromosome seen
Integer currentChrId = null;
Chromosome currentChr = null;
Map<Integer, List<String>> synonymMap = null;
while (resIter.hasNext()) {
ResultsRow<?> rr = resIter.next();
Integer resultChrId = (Integer) rr.get(0);
SequenceFeature feature = (SequenceFeature) rr.get(1);
Location loc = (Location) rr.get(2);
// TODO XXX FIXME - see #628
if (feature instanceof ChromosomeBand) {
continue;
}
try {
if (TypeUtil.isInstanceOf(feature,
"org.intermine.model.bio.ChromosomalDeletion")) {
try {
if (feature.getFieldValue("available") != Boolean.TRUE) {
// write only the available deletions because there are too many
// ChromosomalDeletions for GBrowse to work well
continue;
}
} catch (IllegalAccessException e) {
throw new RuntimeException("can't access 'available' field in: " + feature);
}
}
} catch (ClassNotFoundException e) {
// ignore - ChromosomalDeletion is not in the model
}
if (model.hasClassDescriptor("CDS")) {
if (DynamicUtil.isInstance(feature,
model.getClassDescriptorByName("CDS").getType())) {
// writeTranscriptsAndExons() for use by the processed_transcript
// aggregator
continue;
}
}
if (currentChrId == null || !currentChrId.equals(resultChrId)) {
if (currentChrId != null) {
writeTranscriptsAndExons(gffWriter, currentChr, seenTranscripts,
seenTranscriptParts, synonymMap);
seenTranscripts = new HashMap<Transcript, Location>();
seenTranscriptParts = new HashMap<String, Location>();
}
synonymMap = makeSynonymMap(os, resultChrId);
currentChr = (Chromosome) os.getObjectById(resultChrId);
if (currentChr == null) {
throw new RuntimeException("get null from getObjectById()");
}
if (currentChr.getPrimaryIdentifier() == null) {
LOG.error("chromosome has no identifier: " + currentChr);
continue;
}
if (currentChr.getOrganism() == null) {
LOG.error("chromosome has no organism: " + currentChr);
continue;
}
if (!currentChr.getPrimaryIdentifier().endsWith("_random")) {
writeChromosomeFasta(currentChr);
File gffFile = chromosomeGFFFile(currentChr);
if (gffWriter != null) {
gffWriter.close();
}
gffWriter = new PrintWriter(new FileWriter(gffFile));
List<String> synonymList = synonymMap.get(currentChr.getId());
writeFeature(gffWriter, currentChr, currentChr, null,
chromosomeFileNamePrefix(currentChr),
"chromosome",
"Chromosome", null, synonymList, currentChr.getId());
objectCounts = new HashMap<String, Integer>();
currentChrId = resultChrId;
}
}
if (currentChr == null || synonymMap == null || objectCounts == null) {
throw new RuntimeException("Internal error - failed to set maps");
}
// process Transcripts but not tRNAs
// we can't just check for MRNA because the Transcripts of Pseudogenes aren't MRNAs
if (feature instanceof Transcript && !(feature instanceof NcRNA)) {
seenTranscripts.put((Transcript) feature, loc);
}
String primaryIdentifier = feature.getPrimaryIdentifier();
if (feature instanceof Exon
// || feature instanceof FivePrimeUTR || feature instanceof ThreePrimeUTR
) {
seenTranscriptParts.put(primaryIdentifier, loc);
}
String identifier = primaryIdentifier;
String featureType = getFeatureName(feature);
if (identifier == null) {
identifier = featureType + "_" + objectCounts.get(feature.getClass());
}
List<String> synonymList = synonymMap.get(feature.getId());
Map<String, List<String>> extraAttributes = new HashMap<String, List<String>>();
if (feature instanceof ChromosomeBand) {
ArrayList<String> indexList = new ArrayList<String>();
indexList.add(objectCounts.get(feature.getClass()).toString());
extraAttributes.put("Index", indexList);
}
writeFeature(gffWriter, currentChr, feature, loc, identifier,
featureType.toLowerCase(), featureType, extraAttributes,
synonymList, feature.getId());
incrementCount(objectCounts, feature);
}
if (currentChr == null) {
throw new RuntimeException("no chromosomes found");
}
writeTranscriptsAndExons(gffWriter, currentChr, seenTranscripts, seenTranscriptParts,
synonymMap);
if (gffWriter != null) {
gffWriter.close();
}
}
private String getFeatureName(SequenceFeature feature) {
Class<?> bioEntityClass = feature.getClass();
Set<Class<?>> classes = DynamicUtil.decomposeClass(bioEntityClass);
StringBuffer nameBuffer = new StringBuffer();
for (Class<?> thisClass : classes) {
if (nameBuffer.length() > 0) {
nameBuffer.append("_");
}
nameBuffer.append(TypeUtil.unqualifiedName(thisClass.getName()));
}
return nameBuffer.toString();
}
private void writeTranscriptsAndExons(PrintWriter gffWriter, Chromosome chr,
Map<Transcript, Location> seenTranscripts,
Map<String, Location> seenTranscriptParts,
Map<Integer, List<String>> synonymMap) {
Iterator<Transcript> transcriptIter = seenTranscripts.keySet().iterator();
while (transcriptIter.hasNext()) {
// we can't just use MRNA here because the Transcripts of a pseudogene are Transcripts,
// but aren't MRNAs
Transcript transcript = transcriptIter.next();
Gene gene = transcript.getGene();
if (gene == null) {
continue;
}
Location transcriptLocation = seenTranscripts.get(transcript);
String transcriptFeatureType = "mRNA";
Map<String, List<String>> geneNameAttributeMap = new HashMap<String, List<String>>();
List<String> geneNameList = new ArrayList<String>();
geneNameList.add(gene.getSecondaryIdentifier());
geneNameAttributeMap.put("Gene", geneNameList);
List<String> synonymList = synonymMap.get(transcript.getId());
if (synonymList == null) {
synonymList = new ArrayList<String>();
}
if (transcript instanceof MRNA) {
// special case for CDS objects - display them as MRNA as GBrowse uses the CDS class
// for displaying MRNAs
Iterator<CDS> cdsIter = ((MRNA) transcript).getcDSs().iterator();
while (cdsIter.hasNext()) {
CDS cds = cdsIter.next();
synonymList.add(makeIdString(cds.getId()));
}
}
writeFeature(gffWriter, chr, transcript, transcriptLocation,
transcript.getPrimaryIdentifier(),
transcriptFeatureType, "mRNA", geneNameAttributeMap, synonymList,
transcript.getId());
Collection<Exon> exons = transcript.getExons();
ProxyCollection<Exon> exonsResults = (ProxyCollection<Exon>) exons;
// exon collections are small enough that optimisation just slows things down
exonsResults.setNoOptimise();
exonsResults.setNoExplain();
Iterator<Exon> exonIter = exons.iterator();
while (exonIter.hasNext()) {
Exon exon = exonIter.next();
Location exonLocation = seenTranscriptParts.get(exon.getPrimaryIdentifier());
List<String> exonSynonymValues = synonymMap.get(exon.getId());
writeFeature(gffWriter, chr, exon, exonLocation, transcript.getPrimaryIdentifier(),
"CDS", "mRNA", null, exonSynonymValues,
transcript.getId());
}
}
}
private void incrementCount(Map<String, Integer> objectCounts, Object object) {
if (objectCounts.containsKey(object.getClass())) {
int oldCount = objectCounts.get(object.getClass()).intValue();
objectCounts.put(object.getClass().toString(), new Integer(oldCount + 1));
} else {
objectCounts.put(object.getClass().toString(), new Integer(1));
}
}
/**
* @param bioEntity the object to write
* @param chromosomeLocation the location of the object on the chromosome
* @param featureType the type (third output column) to be used when writing - null means create
* the featureType automatically from the java class name on the object to write
* @param idType the type tag to use when storing the ID in the attributes Map - null means use
* the featureType
* @param flyMineId
* @param synonymValues a List of synonyms for this feature
* @param evidenceList a List of evidence objects for this feature
*/
private void writeFeature(PrintWriter gffWriter, Chromosome chr, SequenceFeature bioEntity,
Location chromosomeLocation, String identifier, String featureType, String idType,
Map<String, List<String>> extraAttributes, List<String> synonymValues,
Integer flyMineId) {
StringBuffer lineBuffer = new StringBuffer();
lineBuffer.append(chromosomeFileNamePrefix(chr)).append("\t");
String source = ".";
lineBuffer.append(source).append("\t");
lineBuffer.append(featureType).append("\t");
if (chromosomeLocation == null) {
if (bioEntity == chr) {
// special case for Chromosome location
lineBuffer.append(1).append("\t").append(chr.getLength()).append("\t");
} else {
throw new RuntimeException("no chromomsome location for: " + bioEntity);
}
} else {
lineBuffer.append(chromosomeLocation.getStart()).append("\t");
lineBuffer.append(chromosomeLocation.getEnd()).append("\t");
}
lineBuffer.append(0).append("\t");
if (chromosomeLocation == null) {
lineBuffer.append(".");
} else {
if ("1".equals(chromosomeLocation.getStrand())) {
lineBuffer.append("+");
} else {
if ("-1".equals(chromosomeLocation.getStrand())) {
lineBuffer.append("-");
} else {
lineBuffer.append(".");
}
}
}
lineBuffer.append("\t");
lineBuffer.append(".");
lineBuffer.append("\t");
Map<String, List<String>> attributes = new LinkedHashMap<String, List<String>>();
List<String> identifiers = new ArrayList<String>();
identifiers.add(identifier);
attributes.put(idType, identifiers);
String secondaryIdentifier = bioEntity.getSecondaryIdentifier();
if (secondaryIdentifier != null) {
List<String> notes = new ArrayList<String>();
notes.add(secondaryIdentifier);
attributes.put("Note", notes);
}
List<String> allIds = new ArrayList<String>();
if (synonymValues != null) {
Iterator<String> synonymIter = synonymValues.iterator();
while (synonymIter.hasNext()) {
String thisSynonymValue = synonymIter.next();
if (!allIds.contains(thisSynonymValue)) {
allIds.add(thisSynonymValue);
}
}
}
attributes.put("Alias", allIds);
if (extraAttributes != null) {
Iterator<String> extraAttributesIter = extraAttributes.keySet().iterator();
while (extraAttributesIter.hasNext()) {
String key = extraAttributesIter.next();
attributes.put(key, extraAttributes.get(key));
}
}
try {
if (TypeUtil.isInstanceOf(bioEntity, "org.intermine.model.bio.PCRProduct")) {
Boolean fieldValue;
try {
fieldValue = (Boolean) bioEntity.getFieldValue("promoter");
} catch (IllegalAccessException e) {
throw new RuntimeException("can't access 'promoter' field in: " + bioEntity);
}
List<String> promoterFlagList = new ArrayList<String>();
promoterFlagList.add(fieldValue.toString());
attributes.put("promoter", promoterFlagList);
}
} catch (ClassNotFoundException e) {
// ignore - PCRProduct is not in the model
}
lineBuffer.append(stringifyAttributes(attributes));
gffWriter.println(lineBuffer.toString());
}
private String makeIdString(Integer id) {
return "FlyMineInternalID_" + id;
}
/**
* Return a String representation of the attributes Map. Taken from BioJava's
* SimpleGFFRecord.java
* @param attMap the Map of attributes
* @return a String representation of the attributes
*/
static String stringifyAttributes(Map<String, List<String>> attMap) {
StringBuffer sBuff = new StringBuffer();
Iterator<String> ki = attMap.keySet().iterator();
while (ki.hasNext()) {
String key = ki.next();
List<String> values = attMap.get(key);
if (values.size() == 0) {
sBuff.append(key);
sBuff.append(";");
} else {
for (Iterator<String> vi = values.iterator(); vi.hasNext();) {
sBuff.append(key);
String value = vi.next();
sBuff.append(" \"" + value + "\"");
if (ki.hasNext() || vi.hasNext()) {
sBuff.append(";");
}
if (vi.hasNext()) {
sBuff.append(" ");
}
}
}
if (ki.hasNext()) {
sBuff.append(" ");
}
}
return sBuff.toString();
}
/**
* Make a Map from SequenceFeature ID to List of Synonym values (Strings) for
* SequenceFeature objects located on the chromosome with the given ID.
* @param os the ObjectStore to read from
* @param chromosomeId the chromosome ID of the SequenceFeature objects to examine
* @return a Map from id to synonym List
* @throws ObjectStoreException
*/
private Map<Integer, List<String>> makeSynonymMap(ObjectStore os, Integer chromosomeId)
throws ObjectStoreException {
Query q = new Query();
q.setDistinct(true);
QueryClass qcEnt = new QueryClass(SequenceFeature.class);
QueryField qfEnt = new QueryField(qcEnt, "id");
q.addFrom(qcEnt);
q.addToSelect(qfEnt);
QueryClass qcSyn = new QueryClass(Synonym.class);
QueryField qfSyn = new QueryField(qcSyn, "value");
q.addFrom(qcSyn);
q.addToSelect(qfSyn);
QueryClass qcLoc = new QueryClass(Location.class);
q.addFrom(qcLoc);
QueryClass qcChr = new QueryClass(Chromosome.class);
QueryField qfChr = new QueryField(qcChr, "id");
q.addFrom(qcChr);
ConstraintSet cs = new ConstraintSet(ConstraintOp.AND);
QueryCollectionReference col = new QueryCollectionReference(qcEnt, "synonyms");
ContainsConstraint cc1 = new ContainsConstraint(col, ConstraintOp.CONTAINS, qcSyn);
cs.addConstraint(cc1);
QueryValue chrIdQueryValue = new QueryValue(chromosomeId);
SimpleConstraint sc = new SimpleConstraint(qfChr, ConstraintOp.EQUALS, chrIdQueryValue);
cs.addConstraint(sc);
QueryObjectReference ref1 = new QueryObjectReference(qcLoc, "feature");
ContainsConstraint cc2 = new ContainsConstraint(ref1, ConstraintOp.CONTAINS, qcEnt);
cs.addConstraint(cc2);
QueryObjectReference ref2 = new QueryObjectReference(qcLoc, "locatedOn");
ContainsConstraint cc3 = new ContainsConstraint(ref2, ConstraintOp.CONTAINS, qcChr);
cs.addConstraint(cc3);
q.setConstraint(cs);
Set<QueryNode> indexesToCreate = new HashSet<QueryNode>();
indexesToCreate.add(qfEnt);
indexesToCreate.add(qfSyn);
((ObjectStoreInterMineImpl) os).precompute(q, indexesToCreate,
Constants.PRECOMPUTE_CATEGORY);
Results res = os.execute(q, 50000, true, true, true);
@SuppressWarnings("unchecked") Iterator<ResultsRow> resIter = (Iterator) res.iterator();
Map<Integer, List<String>> returnMap = new HashMap<Integer, List<String>>();
while (resIter.hasNext()) {
ResultsRow<?> rr = resIter.next();
Integer bioEntityId = (Integer) rr.get(0);
String synonymValue = (String) rr.get(1);
List<String> synonymValues = returnMap.get(bioEntityId);
if (synonymValues == null) {
synonymValues = new ArrayList<String>();
returnMap.put(bioEntityId, synonymValues);
}
synonymValues.add(synonymValue);
}
return returnMap;
}
private void writeChromosomeFasta(Chromosome chr) throws IOException {
Sequence chromosomeSequence = chr.getSequence();
if (chromosomeSequence == null) {
LOG.warn("cannot find any sequence for chromosome " + chr.getPrimaryIdentifier());
} else {
ClobAccess residues = chromosomeSequence.getResidues();
if (residues == null) {
LOG.warn("cannot find any sequence residues for chromosome "
+ chr.getPrimaryIdentifier());
} else {
FileOutputStream fileStream =
new FileOutputStream(chromosomeFastaFile(chr));
PrintStream printStream = new PrintStream(fileStream);
printStream.println(">" + chromosomeFileNamePrefix(chr));
// code from BioJava's FastaFormat class:
int length = residues.length();
for (int pos = 0; pos < length; pos += 60) {
int end = Math.min(pos + 60, length);
printStream.println(residues.subSequence(pos, end).toString());
}
printStream.close();
fileStream.close();
}
}
}
private File chromosomeFastaFile(Chromosome chr) {
return new File(destinationDirectory, chromosomeFileNamePrefix(chr) + ".fa");
}
private File chromosomeGFFFile(Chromosome chr) {
return new File(destinationDirectory, chromosomeFileNamePrefix(chr) + ".gff");
}
private String chromosomeFileNamePrefix(Chromosome chr) {
String orgPrefix;
if (chr.getOrganism().getGenus() == null) {
orgPrefix = "Unknown_organism";
} else {
orgPrefix = chr.getOrganism().getGenus() + "_"
+ chr.getOrganism().getSpecies().replaceAll(" ", "_");
}
return orgPrefix + "_chr_" + chr.getPrimaryIdentifier();
}
} |
package com.algorithms.tree;
import java.util.NoSuchElementException;
import com.algorithms.elementary.ArrayQueue;
import com.algorithms.elementary.Queue;
/**
* 2-3-4tree
* @author altro
*
*/
public class LeftLeaningRedBlackTree <K extends Comparable<K>, V> implements Map<K, V>{
public static void main(String[] args) {
LeftLeaningRedBlackTree<Integer, Integer> tree = new LeftLeaningRedBlackTree<>();
tree.put(10, 10);
tree.put(7, 7);
tree.put(6, 6);
tree.put(8, 8);
tree.put(11, 11);
tree.put(15, 15);
tree.put(17, 17);
tree.put(0, 0);
tree.put(4, 4);
tree.put(19, 19);
System.out.println("tree.get(10) + " + tree.get(10));
System.out.println("tree.get(-1) + " + tree.get(-1));
System.out.println("tree.get(15) + " + tree.get(15));
tree.delete(15);
tree.delete(17);
System.out.println("hah");
}
private Node root = null;
public void put(K k, V v) {
root = put(root, k, v);
root.color = BLACK;
}
private Node put(Node cn, K k, V v) {
if (cn == null) return new Node(k, v, 1, RED);
if(isRed(cn.left) && isRed(cn.right)) split4Node(cn);
int cmp = k.compareTo(cn.k);
if (cmp > 0) cn.right = put(cn.right, k, v); // k > node.k go right
else if (cmp < 0) cn.left = put(cn.left, k, v);
else cn.v = v; //hit
//following code is to fix the tree on the way up
if (isRed(cn.right) && !isRed(cn.left)) cn = rotateLeft(cn);
if (isRed(cn.left) && isRed(cn.left.left)) cn = rotateRight(cn);
cn.size = size(cn.left) + size(cn.right) + 1;
return cn;
}
public V get(K k) {
return get(root, k);
}
//cn means currentNode
private V get(Node cn, K k) {
if (cn == null) return null; // not find the key
int cmp = k.compareTo(cn.k);
if(cmp > 0) return get(cn.right, k);
else if (cmp < 0) return get(cn.left, k);
else return cn.v; // hit
}
@Override
public boolean isEmpty() {
return root == null;
}
@Override
public int size() {
return size(root);
}
@Override
public int size(K lo, K hi) {
return size(root, lo, hi);
}
private int size(Node node, K lo, K hi) {
if (node == null) return 0;
int cmpToLow = node.k.compareTo(lo);
int cmpToHi = node.k.compareTo(hi);
if (cmpToLow < 0) { //node is less than lo
return size(node.right, lo, hi);
} else if (cmpToHi > 0) { // node is large than hi
return size(node.left, lo, hi);
} else { // node is between lo and hi, [lo, hi]
return size(node.right, lo, hi) + size(node.left, lo, hi) + 1;
}
}
@Override
public K min() {
if (isEmpty()) return null;
else return min(root).k;
}
@Override
public K max() {
if (isEmpty()) return null;
else return max(root).k;
}
//get the most max node in the given node
private Node max(Node node) {
while(node.right != null) node = node.right;
return node;
}
@Override
public K floor(K k) {
if (k == null) throw new IllegalArgumentException("argument to floor() is null");
return floor(root, k);
}
private K floor(Node node, K k) {
if (node == null) return null;
int cmp = node.k.compareTo(k);
if (cmp > 0) return floor(node.left, k); //node.k k so we need to find the k in the left tree
else if (cmp < 0) { //node.k is less then k
K returnValue = node.k; //so node.k might be the value .but iam not sure we shall see
if (floor(node.right, k) != null)
returnValue = floor(node.right, k);
return returnValue;
} else {
return node.k;
}
}
@Override
public K ceiling(K k) {
if (k == null) throw new IllegalArgumentException("argument to ceilling() is null");
else return ceiling(root, k);
}
private K ceiling(Node node, K k) {
if (node == null) return null;
int cmp = node.k.compareTo(k);
if (cmp > 0) {
K returnValue = node.k; //so node.k might be the value .but iam not sure we shall see
if (ceiling(node.left, k) != null)
returnValue = ceiling(node.left, k);
return returnValue;
}
else if (cmp < 0) { //node.k is less then k
return ceiling(node.right, k);
} else {
return node.k;
}
}
@Override
public int rank(K k) {
if (k == null) throw new IllegalArgumentException("argument to rank() is null");
return rank(root, k);
}
private int rank(Node node, K k) {
if (node == null) return 0;
int cmp = node.k.compareTo(k);
if (cmp > 0)
return rank(node.left, k);
else if (cmp < 0) {
return size(node.left) + rank(node.right, k) + 1;
} else
return size(node.left);
}
@Override
public K select(int k) {
if (root == null) throw new NoSuchElementException();
if (k > root.size - 1 || k < 0) throw new NoSuchElementException();
return select(root, k);
}
private K select(Node node, int k) {
if (size(node.left) > k) {
return select(node.left, k);
} else if (size(node.left) < k) {
return select(node.right, k - size(node.left) - 1);
} else
return node.k;
}
// keys in [lo , hi] in sorted order
@Override
public Iterable<K> keys(K lo, K hi) {
Queue<K> queue = new ArrayQueue<>();
keys(root, lo, hi, queue);
return queue;
}
private void keys(Node node, K lo, K hi, Queue<K> queue) {
if (node == null) return;
int cmpToLow = node.k.compareTo(lo);
int cmpToHi = node.k.compareTo(hi);
if (cmpToLow < 0) {
keys(node.left, lo, hi, queue);
} else if (cmpToHi > 0) {
keys(node.right, lo, hi, queue);
} else {
keys(node.left, lo, hi, queue);
queue.enqueue(node.k);
keys(node.right, lo, hi, queue);
}
}
@Override
public Iterable<K> keys() {
Queue<K> queue = new ArrayQueue<>();
keys(root, min(), max(), queue);
return queue;
}
public void delete(K k) {
if (k == null) throw new IllegalArgumentException("argument to delete() is null");
if (!contains(k)) return;
if (!isRed(root.left) && !isRed(root.right))
root.color = RED;
root = delete(root, k);
if (root != null)
root.color = BLACK;
}
public boolean contains(K k) {
return get(k) != null;
}
private Node delete(Node cn, K k) {
if (cn == null) return null;
int cmp = k.compareTo(cn.k);
if (cmp < 0) { // k < node.k go left
if (!isRed(cn.left) && !isRed(cn.left.left))
cn = moveRedLeft(cn);
cn.left = delete(cn.left, k);
} else if (cmp > 0) { // k > node.k go right
if (isRed(cn.left) && !isRed(cn.right))
cn = rotateRight(cn);
if (!isRed(cn.right) && !isRed(cn.right.left))
cn = moveRedRight(cn);
cn.right = delete(cn.right, k);
} else { //hit
if (isRed(cn.left) && !isRed(cn.right))
cn = rotateRight(cn);
if (k.compareTo(cn.k) == 0 && (cn.right == null)) //find null just return null
return null;
if (!isRed(cn.right) && !isRed(cn.right.left))
cn = moveRedRight(cn);
if (k.compareTo(cn.k) == 0) {
Node x = min(cn.right);
cn.k = x.k;
cn.v = x.v;
cn.right = deleteMin(cn.right);
} else cn.right = delete(cn.right, k);
}
return fixup(cn);
}
private Node min(Node x) {
if (x.left == null) return x;
else return min(x.left);
}
public void deleteMin() {
if (!isRed(root.left) && !isRed(root.right))
root.color = RED;
root = deleteMin(root);
root.color = BLACK;
}
public Node deleteMin(Node cn) {
if (cn.left == null) return null;
if (!isRed(cn.left) && !isRed(cn.left.left))
cn = moveRedLeft(cn);
cn.left = deleteMin(cn.left);
return fixup(cn);
}
private Node moveRedLeft(Node cn) {
flipColors(cn);
if (isRed(cn.right.left)) {
cn.right = rotateRight(cn.right);
cn = rotateLeft(cn);
flipColors(cn);
}
return cn;
}
public void deleteMax() {
if (!isRed(root.left) && !isRed(root.right))
root.color = RED;
root = deleteMax(root);
root.color = BLACK;
}
//make sure currentNode is not a 2Node by make currentNode is red or currentNode.left is red
private Node deleteMax(Node cn) {
if (isRed(cn.left) && !isRed(cn.right) )
cn = rotateRight(cn);
if (cn.right == null) return null; // approach the end and find the currentNode's childNode is null then just return null;
if (!isRed(cn.right) && !isRed(cn.right.left)) {
cn = moveRedRight(cn);
}
cn.right = deleteMax(cn.right);
return fixup(cn);
}
//used when cn.left is not a 2Node
private Node moveRedRight(Node cn) {
flipColors(cn);
if (isRed(cn.left.left)) {
cn = rotateRight(cn);
flipColors(cn);
}
return cn;
}
private int size(Node node) {
if (node == null)
return 0;
return node.size;
}
private void split4Node(Node cn) {
flipColors(cn);
}
private void flipColors(Node h) {
assert(!isRed(h));
assert(isRed(h.right));
assert(isRed(h.left));
h.color = !h.color;
h.left.color = !h.left.color;
h.right.color = !h.right.color;
}
private Node rotateLeft(Node h) {
assert(isRed(h.right));
Node x = h.right; //changethe pointers
h.right = x.left;
x.left = h;
x.color = h.color; //change the colors
h.color = RED;
x.size = h.size; //change the sizes
h.size = size(h.left) + size(h.right) + 1;
return x;
}
private Node rotateRight(Node h) {
assert(isRed(h.left));
Node x = h.left;
h.left = x.right;
x.right = h;
x.color = h.color;
h.color = RED;
x.size = h.size; //size is the same
h.size = size(h.left) + size(h.right) + 1;
return x;
}
private Node fixup(Node h) {
if (isRed(h.right) && !isRed(h.left)) h = rotateLeft(h);
if (isRed(h.left) && isRed(h.left.left)) h = rotateRight(h);
// on the way up eliminate 4 nodes
h.size = size(h.left) + size(h.right) + 1; //right the size
return h;
}
private boolean isRed(Node x) {
if (x == null) return false;
return x.color == RED;
}
private static final boolean RED = true;
private static final boolean BLACK = false;
private class Node {
private K k;
private V v;
private Node left, right;
private int size;
private boolean color;
Node(K k, V v, int size, boolean color) {
this.k = k;
this.v = v;
this.size = size;
this.color = color;
}
@Override
public String toString() {
if (color) return "red " + v;
else return "black " + v;
}
}
} |
package ru.apermyakov;
import java.util.*;
/**
* Class for sort list of users.
*
* @author apermyakov
* @version 1.0
* @since 24.10.2017
*/
public class SortUser {
/**
* Method for sort users and convert unsorted list into sorted set.
*
* @param list unsorted list
* @return sorted set
*/
public Set<User> sort(List<User> list) {
for (User user : list) {
if (user == null) {
list.remove(user);
}
}
return new TreeSet<>(list);
}
/**
* Method for sort users by name length.
*
* @param users unsorted list of users
* @return sorted list of users
*/
public List<User> sortNameLength(List<User> users) {
users.sort(new Comparator<User>() {
@Override
public int compare(User o1, User o2) {
return o1 == null ? 1
: o2 == null ? -1
: compareLength(o1, o2);
}
});
return users;
}
/**
* Method for sort users by name length and age.
*
* @param users unsorted list of users
* @return sorted list of users
*/
public List<User> sortByAllFields(List<User> users) {
users.sort(new Comparator<User>() {
@Override
public int compare(User o1, User o2) {
return o1 == null ? 1
: o2 == null ? -1
: compareLength(o1, o2) != 0 ? compareLength(o1, o2)
: o1.getAge().compareTo(o2.getAge());
}
});
return users;
}
/**
* Method for compare user's names.
*
* @param o1 first user
* @param o2 second user
* @return sorted names by length
*/
public int compareLength(User o1, User o2) {
return Integer.compare(o1.getName().length(), o2.getName().length());
}
} |
package net.hawkengine.core.components.pipelinescheduler;
import net.hawkengine.model.Job;
import net.hawkengine.model.Pipeline;
import net.hawkengine.model.Stage;
import net.hawkengine.model.enums.JobStatus;
import net.hawkengine.model.enums.StageStatus;
import net.hawkengine.model.enums.Status;
import net.hawkengine.services.PipelineService;
import net.hawkengine.services.interfaces.IPipelineService;
import org.apache.log4j.Logger;
import java.util.*;
import java.util.stream.Collectors;
public class StatusUpdaterService extends Thread {
private static final Logger logger = Logger.getLogger(StatusUpdaterService.class.getName());
private IPipelineService pipelineService;
public StatusUpdaterService() {
this.pipelineService = new PipelineService();
}
public StatusUpdaterService(IPipelineService pipelineService) {
this.pipelineService = pipelineService;
}
public void updateStatuses() {
List<Pipeline> pipelinesInProgress = (List<Pipeline>) this.pipelineService.getAllPipelinesInProgress().getObject();
for (Pipeline pipeline : pipelinesInProgress) {
this.updateAllStatuses(pipeline);
this.pipelineService.update(pipeline);
}
}
public boolean updateAllStatuses(Pipeline pipeline) {
Pipeline pipelineToUpdate = null;
Queue<Object> queue = new LinkedList<>();
queue.add(pipeline);
while (!queue.isEmpty()) {
Object queueNode = queue.poll();
if (queueNode.getClass() == Job.class) {
pipelineToUpdate = pipeline;
this.updatePipelineStatus(pipelineToUpdate);
return true;
}
if (queueNode.getClass() == Pipeline.class) {
pipelineToUpdate = (Pipeline) queueNode;
queue.addAll(pipelineToUpdate.getStages());
this.updateStageStatusesInSequence(pipelineToUpdate.getStages());
} else {
Stage stageNode = (Stage) queueNode;
queue.addAll(stageNode.getJobs());
}
}
return false;
}
public void updateStageStatusesInSequence(List<Stage> stages) {
for (Stage currentStage : stages) {
this.updateStageStatus(currentStage);
if (currentStage.getStatus() == StageStatus.NOT_RUN) {
currentStage.setStatus(StageStatus.IN_PROGRESS);
break;
} else if (currentStage.getStatus() == StageStatus.PASSED) {
continue;
} else {
break;
}
}
}
public void updateStageStatus(Stage stage) {
List<JobStatus> jobStatuses = new ArrayList<>();
List<Job> jobs = stage.getJobs();
for (Job job : jobs) {
JobStatus jobStatus = job.getStatus();
jobStatuses.add(jobStatus);
}
if (jobStatuses.contains(JobStatus.FAILED)) {
stage.setStatus(StageStatus.FAILED);
} else if (this.areAllPassed(jobStatuses)) {
stage.setStatus(StageStatus.PASSED);
}
}
public void updatePipelineStatus(Pipeline pipeline) {
List<Stage> stages = pipeline.getStages();
List<StageStatus> stageStatuses = new ArrayList<>();
for (Stage stage : stages) {
StageStatus stageStatus = stage.getStatus();
stageStatuses.add(stageStatus);
}
if (stageStatuses.contains(StageStatus.FAILED)) {
pipeline.setStatus(Status.FAILED);
} else if (this.areAllPassed(stageStatuses)) {
pipeline.setStatus(Status.PASSED);
}
}
public boolean areAllPassed(List<?> statuses) {
String[] statusesAsString = new String[statuses.size()];
int index = 0;
for (Object status : statuses) {
statusesAsString[index] = status.toString();
index++;
}
for (String aStatusesAsString : statusesAsString) {
if (!aStatusesAsString.equals("PASSED")) {
return false;
}
}
return true;
}
} |
package com.valkryst.VTerminal.misc;
import java.awt.Dimension;
import java.awt.Point;
import java.util.LinkedList;
import java.util.List;
public final class ShapeAlgorithms {
// Prevent users from creating an instance.
private ShapeAlgorithms() {}
/**
* Constructs a list, containing the outline, of an ellipse's points by using the Bresenham algorithm,
*
* @param position
* The x/y-axis (column/row) coordinates of the top-left character.
*
* @param dimension
* The width/height.
*
* @return
* The list of points.
*/
public static List<Point> getEllipse(final Point position, final Dimension dimension) {
final List<Point> points = new LinkedList<>();
final int x = position.x;
final int y = position.y;
final int width = dimension.width;
final int height = dimension.height;
final int a2 = width * width;
final int b2 = height * height;
final int fa2 = 4 * a2;
final int fb2 = 4 * b2;
int dx = 0;
int dy = height;
int sigma = 2 * b2 + a2 * (1 - 2 * height);
while (b2 * dx <= a2 * dy) {
points.add(new Point(x + dx, y + dy));
points.add(new Point(x - dx, y + dy));
points.add(new Point(x + dx, y - dy));
points.add(new Point(x - dx, y - dy));
if (sigma >= 0) {
sigma += fa2 * (1 - dy);
dy
}
sigma += b2 * ((4 * dx) + 6);
dx++;
}
dx = width;
dy = 0;
sigma = 2 * a2 + b2 * (1 - 2 * width);
while (a2 * dy <= b2 * dx) {
points.add(new Point(x + dx, y + dy));
points.add(new Point(x - dx, y + dy));
points.add(new Point(x + dx, y - dy));
points.add(new Point(x - dx, y - dy));
if (sigma >= 0) {
sigma += fb2 * (1 - dx);
dx
}
sigma += a2 * ((4 * dy) + 6);
dy++;
}
return points;
}
/**
* Constructs a list, containing the outline and fill, of an ellipse's points.
*
* @param position
* The x/y-axis (column/row) coordinates of the top-left character.
*
* @param dimension
* The width/height.
*
* @return
* The list of points.
*/
public static List<Point> getFilledEllipse(final Point position, final Dimension dimension) {
final List<Point> points = getEllipse(position, dimension);
final int xCenter = position.x + (dimension.width / 2);
final int yCenter = position.y + (dimension.height / 2);
position.setLocation(xCenter, yCenter);
return recursiveFill(points, position);
}
/**
* Constructs a list, containing the path, of a line's points by using the Bresenham algorithm,
*
* @param fromX
* The x-axis (column) coordinate of the start point of the line.
*
* @param fromY
* The y-axis (row) coordinate of the start point of the line.
*
* @param toX
* The x-axis (column) coordinate of the end point of the line.
*
* @param toY
* The y-axis (row) coordinate of the end point of the line.
*
* @return
* The list of points.
*/
public static List<Point> getLine(int fromX, int fromY, final int toX, final int toY) {
final List<Point> points = new LinkedList<>();
int d = 0;
final int dx = Math.abs(toX - fromX);
final int dy = Math.abs(toY - fromY);
final int dx2 = dx << 1;
final int dy2 = dy << 1;
final int ix = fromX < toX ? 1 : -1;
final int iy = fromY < toY ? 1 : -1;
if (dy <= dx) {
while(true) {
points.add(new Point(fromX, fromY));
if (fromX == toX) {
break;
}
fromX += ix;
d += dy2;
if (d > dx) {
fromY += iy;
d -= dx2;
}
}
} else {
while (true) {
points.add(new Point(fromX, fromY));
if (fromY == toY) {
break;
}
fromY += iy;
d += dx2;
if (d > dy) {
fromX += ix;
d -= dy2;
}
}
}
return points;
}
/**
* Constructs a list, containing the outline, of a rectangle's points.
*
* @param position
* The x/y-axis (column/row) coordinates of the top-left character.
*
* @param dimension
* The width/height.
*
* @return
* The list of points.
*/
public static List<Point> getRectangle(final Point position, final Dimension dimension) {
final List<Point> points = new LinkedList<>();
final int x = position.x;
final int y = position.y;
final int width = dimension.width;
final int height = dimension.height;
final int lastRow = y + height - 1;
final int lastColumn = x + width - 1;
// Corners:
points.add(new Point(x, y));
points.add(new Point(lastColumn, y));
points.add(new Point(x, lastRow));
points.add(new Point(lastColumn, lastRow));
// Left/Right Sides:
for (int i = 1 ; i < height - 1 ; i++) {
points.add(new Point(x, y + i));
points.add(new Point(lastColumn, y + i));
}
// Top/Bottom Sides:
for (int i = 1 ; i < width - 1 ; i++) {
points.add(new Point(x + i, y));
points.add(new Point(x + i, lastRow));
}
return points;
}
/**
* Constructs a list containing all of rectangle's points.
*
* @param position
* The x/y-axis (column/row) coordinates of the top-left character.
*
* @param dimension
* The width/height.
*
* @return
* The list of points.
*/
public static List<Point> getFilledRectangle(final Point position, final Dimension dimension) {
final List<Point> points = new LinkedList<>();
for (int xCounter = position.x ; xCounter < dimension.width + position.x ; xCounter++) {
for (int yCounter = position.y ; yCounter < dimension.height + position.y ; yCounter++) {
points.add(new Point(xCounter, yCounter));
}
}
return points;
}
/**
* Recursively fills an area on the screen bounded by the set of input points.
*
* @param points
* The border points.
*
* @param position
* The x/y-axis (column/row) coordinates of the current point.
*
* @return
* The list of filled points.
*/
public static List<Point> recursiveFill(final List<Point> points, final Point position) {
boolean pointExists = false;
final int x = position.x;
final int y = position.y;
for (final Point point : points) {
if (point.x == x && point.y == y) {
pointExists = true;
break;
}
}
if (pointExists == false) {
points.add(new Point(x, y));
position.setLocation(x + 1, y);
recursiveFill(points, position);
position.setLocation(x - 1, y);
recursiveFill(points, position);
position.setLocation(x, y + 1);
recursiveFill(points, position);
position.setLocation(x, y - 1);
recursiveFill(points, position);
}
return points;
}
} |
package org.broadinstitute.hellbender.tools.walkers.variantutils;
import htsjdk.variant.variantcontext.GenotypesContext;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.variantcontext.VariantContextBuilder;
import htsjdk.variant.variantcontext.VariantContextUtils;
import htsjdk.variant.variantcontext.writer.Options;
import htsjdk.variant.variantcontext.writer.VariantContextWriter;
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder;
import htsjdk.variant.vcf.*;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.broadinstitute.barclay.argparser.Argument;
import org.broadinstitute.barclay.argparser.CommandLineProgramProperties;
import org.broadinstitute.barclay.help.DocumentedFeature;
import org.broadinstitute.hellbender.cmdline.StandardArgumentDefinitions;
import org.broadinstitute.hellbender.cmdline.programgroups.VariantProgramGroup;
import org.broadinstitute.hellbender.engine.*;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.utils.samples.*;
import org.broadinstitute.hellbender.utils.variant.*;
import java.io.File;
import java.util.*;
/**
* Calculate genotype posterior probabilities given family and/or known population genotypes
*
* <p>
* This tool calculates the posterior genotype probability for each sample genotype in a VCF of input variant calls,
* based on the genotype likelihoods from the samples themselves and, optionally, from input VCFs describing allele
* frequencies in related populations. The input variants must possess genotype likelihoods generated by
* HaplotypeCaller, UnifiedGenotyper or another source that provides <b>unbiased</b> genotype likelihoods.</p>
*
* <h4>Statistical notes</h4>
* <p>The AF field is not used in the calculation as it does not provide a way to estimate the confidence
* interval or uncertainty around the allele frequency, unlike AN which does provide this necessary information. This
* uncertainty is modeled by a Dirichlet distribution: that is, the frequency is known up to a Dirichlet distribution
* with parameters AC1+q,AC2+q,...,(AN-AC1-AC2-...)+q, where "q" is the global frequency prior (typically q << 1). The
* genotype priors applied then follow a Dirichlet-Multinomial distribution, where 2 alleles per sample are drawn
* independently. This assumption of independent draws follows from the assumption of Hardy-Weinberg equilibrium (HWE).
* Thus, HWE is imposed on the likelihoods as a result of CalculateGenotypePosteriors.</p>
*
* <h3>Inputs</h3>
* <p>
* <ul>
* <li>A VCF with genotype likelihoods, and optionally genotypes, AC/AN fields, or MLEAC/AN fields.</li>
* <li>(Optional) A PED pedigree file containing the description of the relationships between individuals.</li>
* </ul>
* </p>
*
* <p>
* Optionally, a collection of VCFs can be provided for the purpose of informing allele frequency priors. Each of
* these resource VCFs must satisfy at least one of the following requirement sets:
* </p>
* <ul>
* <li>AC field and AN field</li>
* <li>MLEAC field and AN field</li>
* <li>Genotypes</li>
* </ul>
* </p>
*
* <h3>Output</h3>
* <p>A new VCF with the following information:</p>
* <ul>
* <li>Genotype posteriors added to the FORMAT fields ("PP")</li>
* <li>Genotypes and GQ assigned according to these posteriors (note that the original genotype and GQ may change)</li>
* <li>Per-site genotype priors added to the INFO field ("PG")</li>
* <li>(Optional) Per-site, per-trio joint likelihoods (JL) and joint posteriors (JL) given as Phred-scaled probability
* of all genotypes in the trio being correct based on the PLs for JL and the PPs for JP. These annotations are added to
* the FORMAT fields.</li>
* </ul>
*
* <h3>Notes</h3>
* <p>
* By default, priors will be applied to each variant separately, provided each variant features data from at least
* 10 called samples (no-calls do not count). SNP sites in the input callset that have a SNP at the matching site in
* the supporting VCF will have priors applied based on the AC from the supporting samples and the input callset
* unless the --ignoreInputSamples flag is used. If a site is not called in the supporting VCF, priors will be
* applied using the discovered AC from the input samples unless the --discoveredACpriorsOff flag is used.
* For any non-SNP sites in the input callset, flat priors are applied.
* </p>
*
* <h3>Usage examples</h3>
*
* <h4>Refine genotypes based on the discovered allele frequency in an input VCF containing many samples</h4>
* <pre>
* ./gatk-launch CalculateGenotypePosteriors \
* -V multisample_input.vcf \
* -O output.vcf
* </pre>
*
* <h4>Inform the genotype assignment of a single sample using the 1000G_EUR European panel</h4>
* <pre>
* ./gatk-launch CalculateGenotypePosteriors \
* -V sample_input.vcf \
* -O sample_output.1000G_EUR.vcf \
* -supporting 1000G_EUR.genotypes.vcf
* </pre>
*
* <h4>Apply only family priors to a callset</h4>
* <pre>
* ./gatk-launch CalculateGenotypePosteriors \
* -V input.vcf \
* -O output.vcf \
* -ped family.ped \
* --skipPopulationPriors
* </pre>
*
* <h4>Apply frequency and HWE-based priors to the genotypes of a family without including the family allele counts
* in the allele frequency estimates</h4>
* <pre>
* ./gatk-launch CalculateGenotypePosteriors \
* -V input.vcf \
* -O output.vcf \
* --ignoreInputSamples
* </pre>
*
* <h4>Calculate the posterior genotypes of a callset, and impose that a variant *not seen* in the external panel
* is tantamount to being AC=0, AN=100 within that panel</h4>
* <pre>
* ./gatk-launch CalculateGenotypePosteriors \
* -V input.vcf \
* -O output.vcf \
* -supporting external.panel.vcf \
* --numRefSamplesIfNoCall 100
* </pre>
*
*
*/
@CommandLineProgramProperties(
summary = "This tool calculates the posterior genotype probability for each sample genotype in a VCF of input variant calls,\n" +
" based on the genotype likelihoods from the samples themselves and, optionally, from input VCFs describing allele\n" +
" frequencies in related populations. The input variants must possess genotype likelihoods generated by\n" +
" HaplotypeCaller, UnifiedGenotyper or another source that provides *unbiased* genotype likelihoods.",
oneLineSummary = "Calculate genotype posterior probabilities given family and/or known population genotypes",
programGroup = VariantProgramGroup.class
)
@DocumentedFeature
public final class CalculateGenotypePosteriors extends VariantWalker {
private static final Logger logger = LogManager.getLogger(CalculateGenotypePosteriors.class);
/**
* Supporting external panels. Allele counts from these panels (taken from AC,AN or MLEAC,AN or raw genotypes) will
* be used to inform the frequency distribution underlying the genotype priors. These files must be VCF 4.2 spec or later.
*/
@Argument(fullName="supporting", shortName = "supporting", doc="Other callsets to use in generating genotype posteriors", optional=true)
public List<FeatureInput<VariantContext>> supportVariants = new ArrayList<>();
@Argument(doc="File to which variants should be written", fullName = StandardArgumentDefinitions.OUTPUT_LONG_NAME, shortName = StandardArgumentDefinitions.OUTPUT_SHORT_NAME, optional = false)
public String out = null;
/**
* The global prior of a variant site -- i.e. the expected allele frequency distribution knowing only that N alleles
* exist, and having observed none of them. This is the "typical" 1/x trend, modeled here as not varying
* across alleles. The calculation for this parameter is (Effective population size) * (steady state mutation rate)
*
*/
@Argument(fullName="globalPrior",shortName="G",doc="Global Dirichlet prior parameters for the allele frequency",optional=true)
public double globalPrior = HomoSapiensConstants.SNP_HETEROZYGOSITY;
/**
* The mutation prior -- i.e. the probability that a new mutation occurs. Sensitivity analysis on known de novo
* mutations suggests a default value of 10^-6.
*
*/
@Argument(fullName="deNovoPrior",shortName="DNP",doc="Prior for de novo mutations",optional=true)
public double deNovoPrior = 1e-6;
/**
* When a variant is not seen in a panel, this argument controls whether to infer (and with what effective strength)
* that only reference alleles were observed at that site. E.g. "If not seen in 1000Genomes, treat it as AC=0,
* AN=2000". This is applied across all external panels, so if numRefIsMissing = 10, and the variant is absent in
* two panels, this confers evidence of AC=0,AN=20.
*/
@Argument(fullName="numRefSamplesIfNoCall",shortName="nrs",doc="Number of hom-refs sites to infer at sites not present in a panel",optional=true)
public int numRefIfMissing = 0;
/**
* By default the tool looks for MLEAC first, and then falls back to AC if MLEAC is not found. When this
* flag is set, the behavior is flipped and the tool looks first for the AC field and then fall back to MLEAC or
* raw genotypes.
*/
@Argument(fullName="defaultToAC",shortName="useAC",doc="Use AC rather than MLEAC",optional=true)
public boolean defaultToAC = false;
/**
* When this flag is set, only the AC and AN calculated from external sources will be used, and the calculation
* will not use the discovered allele frequency in the callset whose posteriors are being calculated. Useful for
* callsets containing related individuals.
*/
@Argument(fullName="ignoreInputSamples",shortName="ext",doc="Use external information only",optional=true)
public boolean ignoreInputSamples = false;
/**
* Calculate priors for missing external variants from sample data -- default behavior is to apply flat priors
*/
@Argument(fullName="discoveredACpriorsOff",shortName="useACoff",doc="Do not use discovered allele count in the input callset " +
"for variants that do not appear in the external callset. ", optional=true)
public boolean useACoff = false;
/**
* Skip application of population-based priors
*/
@Argument(fullName="skipPopulationPriors",shortName="skipPop",doc="Skip application of population-based priors", optional=true)
public boolean skipPopulationPriors = false;
/**
* Skip application of family-based priors. Note: if pedigree file is absent, family-based priors will always be skipped.
*/
@Argument(fullName="skipFamilyPriors",shortName="skipFam",doc="Skip application of family-based priors", optional=true)
public boolean skipFamilyPriors = false;
@Argument(fullName="pedigree", shortName="ped", doc="Pedigree file for samples", optional=true)
private File pedigreeFile = null;
private FamilyLikelihoods famUtils;
private SampleDB sampleDB = null;
private VariantContextWriter vcfWriter;
@Override
public void onTraversalStart() {
final VariantContextWriterBuilder builder = new VariantContextWriterBuilder().setOutputFile(out).setOutputFileType(VariantContextWriterBuilder.OutputType.VCF);
if (hasReference()){
vcfWriter = builder.setReferenceDictionary(getBestAvailableSequenceDictionary()).setOption(Options.INDEX_ON_THE_FLY).build();
} else {
vcfWriter = builder.unsetOption(Options.INDEX_ON_THE_FLY).build();
logger.info("Can't make an index for output file " + out + " because a reference dictionary is required for creating Tribble indices on the fly");
}
sampleDB = initializeSampleDB();
// Get list of samples to include in the output
final Map<String, VCFHeader> vcfHeaders = Collections.singletonMap(getDrivingVariantsFeatureInput().getName(), getHeaderForVariants());
final Set<String> vcfSamples = VcfUtils.getSortedSampleSet(vcfHeaders, GATKVariantContextUtils.GenotypeMergeType.REQUIRE_UNIQUE);
//Get the trios from the families passed as ped
if (!skipFamilyPriors){
final Set<Trio> trios = sampleDB.getTrios();
if(trios.isEmpty()) {
logger.info("No PED file passed or no *non-skipped* trios found in PED file. Skipping family priors.");
skipFamilyPriors = true;
}
}
final VCFHeader header = vcfHeaders.values().iterator().next();
if ( ! header.hasGenotypingData() ) {
throw new UserException("VCF has no genotypes");
}
if ( header.hasInfoLine(GATKVCFConstants.MLE_ALLELE_COUNT_KEY) ) {
final VCFInfoHeaderLine mleLine = header.getInfoHeaderLine(GATKVCFConstants.MLE_ALLELE_COUNT_KEY);
if ( mleLine.getCountType() != VCFHeaderLineCount.A ) {
throw new UserException("VCF does not have a properly formatted MLEAC field: the count type should be \"A\"");
}
if ( mleLine.getType() != VCFHeaderLineType.Integer ) {
throw new UserException("VCF does not have a properly formatted MLEAC field: the field type should be \"Integer\"");
}
}
// Initialize VCF header
final Set<VCFHeaderLine> headerLines = VCFUtils.smartMergeHeaders(vcfHeaders.values(), true);
headerLines.add(GATKVCFHeaderLines.getFormatLine(GATKVCFConstants.PHRED_SCALED_POSTERIORS_KEY));
headerLines.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.GENOTYPE_PRIOR_KEY));
if (!skipFamilyPriors) {
headerLines.add(GATKVCFHeaderLines.getFormatLine(GATKVCFConstants.JOINT_LIKELIHOOD_TAG_NAME));
headerLines.add(GATKVCFHeaderLines.getFormatLine(GATKVCFConstants.JOINT_POSTERIOR_TAG_NAME));
}
headerLines.addAll(getDefaultToolVCFHeaderLines());
vcfWriter.writeHeader(new VCFHeader(headerLines, vcfSamples));
final Map<String,Set<Sample>> families = sampleDB.getFamilies(vcfSamples);
famUtils = new FamilyLikelihoods(sampleDB, deNovoPrior, vcfSamples, families);
}
/**
* Entry-point function to initialize the samples database from input data
*/
private SampleDB initializeSampleDB() {
final SampleDBBuilder sampleDBBuilder = new SampleDBBuilder(PedigreeValidationType.STRICT);
if (pedigreeFile != null) {
sampleDBBuilder.addSamplesFromPedigreeFiles(Collections.singletonList(pedigreeFile));
}
return sampleDBBuilder.getFinalSampleDB();
}
@Override
public void apply(final VariantContext variant,
final ReadsContext readsContext,
final ReferenceContext referenceContext,
final FeatureContext featureContext) {
final Collection<VariantContext> vcs = featureContext.getValues(getDrivingVariantsFeatureInput());
final Collection<VariantContext> otherVCs = featureContext.getValues(supportVariants);
final int missing = supportVariants.size() - otherVCs.size();
for ( final VariantContext vc : vcs ) {
final VariantContext vc_familyPriors;
final VariantContext vc_bothPriors;
//do family priors first (if applicable)
final VariantContextBuilder builder = new VariantContextBuilder(vc);
//only compute family priors for biallelelic sites
if (!skipFamilyPriors && vc.isBiallelic()){
final GenotypesContext gc = famUtils.calculatePosteriorGLs(vc);
builder.genotypes(gc);
}
VariantContextUtils.calculateChromosomeCounts(builder, false);
vc_familyPriors = builder.make();
if (!skipPopulationPriors) {
vc_bothPriors = PosteriorProbabilitiesUtils.calculatePosteriorProbs(vc_familyPriors, otherVCs, missing * numRefIfMissing, globalPrior, !ignoreInputSamples, defaultToAC, useACoff);
} else {
final VariantContextBuilder builder2 = new VariantContextBuilder(vc_familyPriors);
VariantContextUtils.calculateChromosomeCounts(builder, false);
vc_bothPriors = builder2.make();
}
vcfWriter.add(vc_bothPriors);
}
}
@Override
public void closeTool(){
vcfWriter.close();
}
} |
package com.aif.language.sentence;
import com.aif.common.FileHelper;
import com.aif.language.common.ISplitter;
import com.aif.language.token.TokenSplitter;
import org.testng.annotations.Test;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.List;
import static org.testng.AssertJUnit.assertTrue;
public class SentenceSplitterCharactersExtractorQualityTest {
private static final String TEXT_FILE_NAME = "46800-0.txt";
@Test(groups = { "quality-test", "acceptance-tests" })
public void testSeparatorExtractionQuality() throws Exception {
// input arguments
String inputText;
try(InputStream modelResource = SentenceSplitterCharactersExtractorQualityTest.class.getResourceAsStream(TEXT_FILE_NAME)) {
inputText = FileHelper.readAllText(modelResource);
}
final TokenSplitter tokenSplitter = new TokenSplitter();
final List<String> inputToken = tokenSplitter.split(inputText);
// expected results
final List<Character> expectedResult = Arrays.asList(new Character[]{
'.', '(', ')',
':', '\"', '
';', '‘', '“',
',', '
'%', '\'', '?',
'!', '[', ']'
});
// creating test instance
final ISentenceSeparatorExtractor testInstance = ISentenceSeparatorExtractor.Type.STAT.getInstance();
// execution test
final List<Character> actualResult = testInstance.extract(inputToken).get();
// result assert
long correct = actualResult
.stream()
.filter(ch -> expectedResult.contains(ch))
.count();
double result = (correct * 2.) / (double)(expectedResult.size() + actualResult.size());
assertTrue(String.format("result is: %f", result), result > 0.53);
}
} |
package com.grayben.riskExtractor.htmlScorer.nodeVisitor;
import com.grayben.riskExtractor.htmlScorer.ScoredText;
import com.grayben.riskExtractor.htmlScorer.ScoredTextElement;
import com.grayben.riskExtractor.htmlScorer.ScoringAndFlatteningNodeVisitor;
import com.grayben.riskExtractor.htmlScorer.partScorers.Scorer;
import com.grayben.riskExtractor.htmlScorer.partScorers.elementScorers.EmphasisElementScorer;
import com.grayben.riskExtractor.htmlScorer.partScorers.elementScorers.SegmentationElementScorer;
import com.grayben.riskExtractor.htmlScorer.partScorers.tagScorers.TagAndAttributeScorer;
import com.grayben.riskExtractor.htmlScorer.partScorers.tagScorers.TagEmphasisScorer;
import com.grayben.riskExtractor.htmlScorer.partScorers.tagScorers.TagSegmentationScorer;
import org.jsoup.nodes.Comment;
import org.jsoup.nodes.Element;
import org.jsoup.nodes.Node;
import org.jsoup.parser.Tag;
import org.jsoup.select.NodeTraversor;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.runners.MockitoJUnitRunner;
import java.util.*;
import static com.grayben.riskExtractor.htmlScorer.nodeVisitor.NodeVisitorOracle.getEmphasisedTargetElementsAndScores;
import static com.grayben.riskExtractor.htmlScorer.nodeVisitor.NodeVisitorOracle.getSegmentedTargetElementsAndScores;
import static junit.framework.Assert.*;
import static junit.framework.TestCase.fail;
@RunWith(MockitoJUnitRunner.class)
public class ScoringAndFlatteningNodeVisitorTest
extends NodeVisitorTest {
private ScoringAndFlatteningNodeVisitor nodeVisitorSUT;
@Before
@Override
public void setUp() throws Exception {
Set<Scorer<Element>> elementScorers
= new HashSet<>();
TagEmphasisScorer tagEmphasisScorer
= new TagEmphasisScorer(TagEmphasisScorer.defaultMap());
TagAndAttributeScorer tagAndAttributeScorer
= new TagAndAttributeScorer(TagAndAttributeScorer.defaultMap());
EmphasisElementScorer emphasisElementScorer
= new EmphasisElementScorer(
tagEmphasisScorer,
tagAndAttributeScorer
);
elementScorers.add(emphasisElementScorer);
TagSegmentationScorer tagSegmentationScorer
= new TagSegmentationScorer(TagSegmentationScorer.defaultMap());
SegmentationElementScorer segmentationElementScorer
= new SegmentationElementScorer(tagSegmentationScorer);
elementScorers.add(
segmentationElementScorer
);
this.nodeVisitorSUT
= new ScoringAndFlatteningNodeVisitor(elementScorers);
super.setNodeVisitorSUT(nodeVisitorSUT);
super.setUp();
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
}
// Constructor tests //////////////////////////////////////////////////////
@Test
public void
test_InitThrowsNullPointerException_WhenElementScorersIsNull
() throws Exception {
thrown.expect(NullPointerException.class);
Set<Scorer<Element>> elementScorers = null;
this.nodeVisitorSUT = new ScoringAndFlatteningNodeVisitor(elementScorers);
}
@Test
public void
test_InitThrowsNullPointerException_WhenAnyElementScorerIsNull
() throws Exception {
thrown.expect(NullPointerException.class);
Set<Scorer<Element>> elementScorers = new HashSet<>();
elementScorers.add(
new SegmentationElementScorer(
new TagSegmentationScorer(
TagSegmentationScorer.defaultMap()
)
)
);
elementScorers.add(null);
this.nodeVisitorSUT = new ScoringAndFlatteningNodeVisitor(elementScorers);
}
@Test
public void
test_GetScoredTextReturnsNonNull_ImmediatelyAfterSUTIsInitialised
() throws Exception {
Object returned = nodeVisitorSUT.getFlatText();
assertNotNull(returned);
}
@Test
public void
test_GetScoredTextReturnsEmpty_ImmediatelyAfterSUTIsInitialised
() throws Exception {
ScoredText scoredText = nodeVisitorSUT.getFlatText();
assertTrue(scoredText.toString().isEmpty());
}
// Method tests (without NodeTraversor) ///////////////////////////////////
@Test
public void
test_EmphasisScoreIsZero_ImmediatelyAfterInit
() throws Exception {
Integer expected = 0;
Integer returned
= nodeVisitorSUT.getCurrentScores()
.get(EmphasisElementScorer.SCORE_LABEL);
assertEquals(expected, returned);
}
@Test
public void
test_EmphasisScoreIsZero_AfterHeadOnNonEmphasisElement
() throws Exception {
Integer expected = 0;
Element notEmphasised = new Element(Tag.valueOf("foo"), "a-base-uri");
nodeVisitorSUT.head(notEmphasised, 1);
Integer returned
= nodeVisitorSUT.getCurrentScores()
.get(EmphasisElementScorer.SCORE_LABEL);
assertEquals(expected, returned);
}
@Test
public void
test_EmphasisScoreIsZero_AfterTailOnNonEmphasisElement
() throws Exception {
Integer expected = 0;
Element notEmphasised = new Element(Tag.valueOf("foo"), "a-base-uri");
nodeVisitorSUT.tail(notEmphasised, 1);
Integer returned
= nodeVisitorSUT.getCurrentScores()
.get(EmphasisElementScorer.SCORE_LABEL);
assertEquals(expected, returned);
}
@Test
public void
test_EmphasisScoreIsGreaterThanZero_AfterHeadOnEmphasisElement
() throws Exception {
String scoreLabel = EmphasisElementScorer.SCORE_LABEL;
Iterator<Map.Entry<Element, Integer>> it = getEmphasisedTargetElementsAndScores(nodeVisitorSUT)
.entrySet().iterator();
Map.Entry<Element, Integer> emphasisedElementAndScore
= it.next();
Element emphasisedElement = emphasisedElementAndScore.getKey();
Integer expected = emphasisedElementAndScore.getValue();
nodeVisitorSUT.head(emphasisedElement, 1);
Integer returned = nodeVisitorSUT.getCurrentScores().get(scoreLabel);
assert expected > 0;
assertEquals(expected, returned);
}
@Test
public void
test_EmphasisScoreIsGreaterThanZero_AfterHeadOnEmphasisElementThenNonEmphasisElement
() throws Exception {
String scoreLabel = EmphasisElementScorer.SCORE_LABEL;
Iterator<Map.Entry<Element, Integer>> it = getEmphasisedTargetElementsAndScores(nodeVisitorSUT)
.entrySet().iterator();
Map.Entry<Element, Integer> emphasisedElementAndScore
= it.next();
Element emphasisedElement = emphasisedElementAndScore.getKey();
Integer expected = emphasisedElementAndScore.getValue();
Element notEmphasisedElement
= new Element(Tag.valueOf("foo-bar"), "some-string");
nodeVisitorSUT.head(emphasisedElement, 1);
nodeVisitorSUT.head(notEmphasisedElement, 2);
Integer returned = nodeVisitorSUT.getCurrentScores().get(scoreLabel);
assertEquals(expected, returned);
}
@Test
public void
test_EmphasisScoreIsZero_AfterHeadAndTailOnEmphasisElement
() throws Exception {
String scoreLabel = EmphasisElementScorer.SCORE_LABEL;
Integer expected = 0;
Iterator<Map.Entry<Element, Integer>> it = getEmphasisedTargetElementsAndScores(nodeVisitorSUT)
.entrySet().iterator();
Map.Entry<Element, Integer> emphasisedElementAndScore
= it.next();
Element emphasisedElement = emphasisedElementAndScore.getKey();
nodeVisitorSUT.head(emphasisedElement, 1);
nodeVisitorSUT.tail(emphasisedElement, 1);
Integer returned = nodeVisitorSUT.getCurrentScores().get(scoreLabel);
assertEquals(expected, returned);
}
@Test
public void
test_EmphasisScoreIsGreaterThanZero_AfterHeadOn2EmphasisElementsAndTailOnLatterElement
() throws Exception {
String scoreLabel = EmphasisElementScorer.SCORE_LABEL;
Iterator<Map.Entry<Element, Integer>> it = getEmphasisedTargetElementsAndScores(nodeVisitorSUT)
.entrySet().iterator();
Map.Entry<Element, Integer> emphasisedElementAndScore;
emphasisedElementAndScore = it.next();
Element emphasisedElement1 = emphasisedElementAndScore.getKey();
Integer expected = emphasisedElementAndScore.getValue();
Element emphasisedElement2
= it.next().getKey();
nodeVisitorSUT.head(emphasisedElement1, 1);
nodeVisitorSUT.head(emphasisedElement2, 2);
nodeVisitorSUT.tail(emphasisedElement2, 1);
Integer returned = nodeVisitorSUT.getCurrentScores().get(scoreLabel);
assertEquals(expected, returned);
}
@Test
public void
test_ScoredTextContainsTextWithEmphasisScoreEqualToZero_AfterHeadAndTailOnNonEmphasisElementWithText
() throws Exception {
String scoreLabel = EmphasisElementScorer.SCORE_LABEL;
helper_ScoredTextContainsTextWithXScoreEqualToZero_AfterHeadAndTailOnNonXElement(scoreLabel);
}
@Test
public void
test_ScoredTextContainsTextWithEmphasisScoreGreaterThanZero_AfterHeadAndTailOnEmphasisElementWithText
() throws Exception {
String scoreLabel = EmphasisElementScorer.SCORE_LABEL;
Iterator<Map.Entry<Element, Integer>> it = getEmphasisedTargetElementsAndScores(nodeVisitorSUT)
.entrySet().iterator();
Map.Entry<Element, Integer> emphasisedElementAndScore;
emphasisedElementAndScore = it.next();
Element emphasisedElement = emphasisedElementAndScore.getKey();
Integer expectedScore = emphasisedElementAndScore.getValue();
String elementText = "This is some text contained by the element.";
emphasisedElement.text(elementText);
nodeVisitorSUT.head(emphasisedElement, 1);
nodeVisitorSUT.tail(emphasisedElement, 1);
ScoredTextElement scoredTextElement
= nodeVisitorSUT.getFlatText().getList().iterator().next();
Integer returnedScore = scoredTextElement.getScores().get(scoreLabel);
assertEquals(expectedScore, returnedScore);
}
@Test
public void
test_SegmentationScoreIsZero_ImmediatelyAfterInit
() throws Exception {
String scoreLabel = SegmentationElementScorer.SCORE_LABEL;
Integer expected = 0;
Integer returned
= nodeVisitorSUT.getCurrentScores().get(scoreLabel);
assertEquals(expected, returned);
}
@Test
public void
test_SegmentationScoreIsZero_AfterHeadOnNonSegmentationElement
() throws Exception {
String scoreLabel = SegmentationElementScorer.SCORE_LABEL;
Integer expected = 0;
Element nonSegElem = new Element(
Tag.valueOf("foobar"),
"some string"
);
nodeVisitorSUT.head(nonSegElem, 1);
Integer returned = nodeVisitorSUT.getCurrentScores().get(scoreLabel);
assertEquals(expected, returned);
}
@Test
public void
test_SegmentationScoreIsZero_AfterTailOnNonSegmentationElement
() throws Exception {
String scoreLabel = SegmentationElementScorer.SCORE_LABEL;
Integer expected = 0;
Element nonSegElem = new Element(
Tag.valueOf("foobar"),
"some string"
);
nodeVisitorSUT.tail(nonSegElem, 1);
Integer returned = nodeVisitorSUT.getCurrentScores().get(scoreLabel);
assertEquals(expected, returned);
}
@Test
public void
test_SegmentationScoreIsGreaterThanZero_AfterHeadOnSegmentationElement
() throws Exception {
String scoreLabel = SegmentationElementScorer.SCORE_LABEL;
Iterator<Map.Entry<Element, Integer>> it
= getSegmentedTargetElementsAndScores(nodeVisitorSUT)
.entrySet().iterator();
Map.Entry<Element, Integer> segmentedElementAndScore
= it.next();
Element segmentedElement = segmentedElementAndScore.getKey();
Integer expected = segmentedElementAndScore.getValue();
assert expected > 0;
nodeVisitorSUT.head(segmentedElement, 1);
Integer returned = nodeVisitorSUT.getCurrentScores().get(scoreLabel);
assertEquals(expected, returned);
}
@Test
public void
test_SegmentationScoreIsGreaterThanZero_AfterHeadOnSegmentationElementThenNonSegmentationElement
() throws Exception {
String scoreLabel = SegmentationElementScorer.SCORE_LABEL;
Iterator<Map.Entry<Element, Integer>> it
= getSegmentedTargetElementsAndScores(nodeVisitorSUT)
.entrySet().iterator();
Map.Entry<Element, Integer> segmentedElementAndScore
= it.next();
Element segmentedElement = segmentedElementAndScore.getKey();
Integer expected = segmentedElementAndScore.getValue();
assert expected > 0;
Element nonSegmentedElement
= new Element(
Tag.valueOf("foobar"),
"some string"
);
nodeVisitorSUT.head(segmentedElement, 1);
nodeVisitorSUT.head(nonSegmentedElement, 1);
Integer returned = nodeVisitorSUT.getCurrentScores().get(scoreLabel);
assertEquals(expected, returned);
}
@Test
public void
test_SegmentationScoreIsZero_AfterHeadAndTailOnSegmentationElement
() throws Exception {
String scoreLabel = SegmentationElementScorer.SCORE_LABEL;
Integer expected = 0;
Element segmentedElement
= getSegmentedTargetElementsAndScores(nodeVisitorSUT)
.entrySet().iterator().next().getKey();
nodeVisitorSUT.head(segmentedElement, 1);
nodeVisitorSUT.tail(segmentedElement, 1);
Integer returned = nodeVisitorSUT.getCurrentScores().get(scoreLabel);
assertEquals(expected, returned);
}
@Test
public void
test_SegmentationScoreIsGreaterThanZero_AfterHeadOn2SegmentationElementsAndTailOnLatterElement
() throws Exception {
String scoreLabel = SegmentationElementScorer.SCORE_LABEL;
Iterator<Map.Entry<Element, Integer>> it
= getSegmentedTargetElementsAndScores(nodeVisitorSUT)
.entrySet().iterator();
Map.Entry<Element, Integer> segmentedElementAndScore
= it.next();
Element segmentedElement1 = segmentedElementAndScore.getKey();
Integer expected = segmentedElementAndScore.getValue();
assert expected > 0;
Element segmentedElement2 = it.next().getKey();
nodeVisitorSUT.head(segmentedElement1, 1);
nodeVisitorSUT.head(segmentedElement2, 1);
nodeVisitorSUT.tail(segmentedElement2, 1);
Integer returned = nodeVisitorSUT.getCurrentScores().get(scoreLabel);
assertEquals(expected, returned);
}
private void helper_ScoreElementAndExpectScore
(Element element, String scoreLabel, Integer expectedScore){
nodeVisitorSUT.head(element, 1);
nodeVisitorSUT.tail(element, 1);
ScoredTextElement scoredTextElement
= nodeVisitorSUT.getFlatText().getList().iterator().next();
Integer actualScore = scoredTextElement.getScores().get(scoreLabel);
assertEquals(expectedScore, actualScore);
}
private void
helper_ScoredTextContainsTextWithXScoreEqualToZero_AfterHeadAndTailOnNonXElement
(String scoreLabel) throws Exception {
Element element = new Element(Tag.valueOf("foobar"), "some string");
String elementText = "This is some text contained by the element.";
element.text(elementText);
Integer expectedScore = 0;
helper_ScoreElementAndExpectScore(element, scoreLabel, expectedScore);
}
@Test
public void
test_ScoredTextContainsTextWithSegmentationScoreEqualToZero_AfterHeadAndTailOnNonSegmentationElementWithText
() throws Exception {
String scoreLabel = SegmentationElementScorer.SCORE_LABEL;
helper_ScoredTextContainsTextWithXScoreEqualToZero_AfterHeadAndTailOnNonXElement(scoreLabel);
}
@Test
public void
test_ScoredTextContainsTextWithSegmentationScoreGreaterThanZero_AfterHeadAndTailOnSegmentationElementWithText
() throws Exception {
String scoreLabel = SegmentationElementScorer.SCORE_LABEL;
Iterator<Map.Entry<Element, Integer>> it = getSegmentedTargetElementsAndScores(nodeVisitorSUT)
.entrySet().iterator();
Map.Entry<Element, Integer> segmentedElementAndScore;
segmentedElementAndScore = it.next();
Element segmentedElement = segmentedElementAndScore.getKey();
Integer expectedScore = segmentedElementAndScore.getValue();
String elementText = "This is some text contained by the element.";
segmentedElement.text(elementText);
nodeVisitorSUT.head(segmentedElement, 1);
nodeVisitorSUT.tail(segmentedElement, 1);
ScoredTextElement scoredTextElement
= nodeVisitorSUT.getFlatText().getList().iterator().next();
Integer returnedScore = scoredTextElement.getScores().get(scoreLabel);
assertEquals(expectedScore, returnedScore);
}
// Method tests (with NodeTraversor) //////////////////////////////////////
@Test
public void
test_GetScoredTextReturnsEmpty_AfterSingleVisitToNonElementNode
() throws Exception {
Node nonElementNode = new Comment(
"my-comment",
"http:
NodeTraversor nodeTraversor = new NodeTraversor(nodeVisitorSUT);
nodeTraversor.traverse(nonElementNode);
ScoredText scoredText = nodeVisitorSUT.getFlatText();
assertTrue(scoredText.toString().isEmpty());
}
@Test
public void
test_GetScoredTextReturnsEmpty_AfterSingleVisitToElementWithNoText
() throws Exception {
Tag tag = Tag.valueOf("a-tag-name");
Node elementNode = new Element(tag, "http:
NodeTraversor nodeTraversor = new NodeTraversor(nodeVisitorSUT);
nodeTraversor.traverse(elementNode);
ScoredText scoredText = nodeVisitorSUT.getFlatText();
assertTrue(scoredText.toString().isEmpty());
}
@Test
public void
test_GetScoredTextReturnsNotEmpty_AfterSingleVisitToElementNodeWithText
() throws Exception {
Tag tag = Tag.valueOf("a-tag-name");
Element element = new Element(tag, "a-base-URI");
element.text("Some text is here.");
NodeTraversor nodeTraversor = new NodeTraversor(nodeVisitorSUT);
nodeTraversor.traverse(element);
ScoredText scoredText = nodeVisitorSUT.getFlatText();
assertFalse(scoredText.getList().isEmpty());
}
@Test
public void
test_GetScoredTextReturnsExpectedText_AfterSingleVisitToElementWithText
() throws Exception {
String expectedText = "This is the text we expect to see present in the list.";
Tag tag = Tag.valueOf("a-tag-name");
Element element = new Element(tag, "a-base-uri");
element.text(expectedText);
NodeTraversor nodeTraversor = new NodeTraversor(nodeVisitorSUT);
nodeTraversor.traverse(element);
String output = nodeVisitorSUT.getFlatText().toString();
assertEquals(expectedText, output);
}
@Ignore
@Test
public void
test_GetScoredTextReturnsExpectedText_AfterVisitsToManyElementsWithText
() throws Exception {
fail("Test not implemented");
}
@Ignore
@Test
public void
test_GetScoredTextReturnsExpectedScores_AfterVisitsToManyElementsWithText
() throws Exception {
fail("Test not implemented");
}
@Test
public void
test_AllScoresAreZero_AfterTraversalOfManyTargetElements
() throws Exception {
NodeVisitorOracle oracle = new NodeVisitorOracle(NodeVisitorOracle.Configuration.SEQUENTIAL);
this.setNodeVisitorSUT(oracle.getSUT());
Element input = oracle.getInput();
NodeTraversor nt = new NodeTraversor(this.nodeVisitorSUT);
nt.traverse(input);
Map<String, Integer> sutScores = nodeVisitorSUT.getCurrentScores();
for (String key :
sutScores.keySet()) {
assertEquals(new Integer(0), sutScores.get(key));
}
}
} |
package io.miti.beetle.util;
import java.text.NumberFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
/**
* Utility methods.
*
* @author mwallace
* @version 1.0
*/
public final class Utility
{
/**
* Whether to read input files as a stream.
*/
private static boolean readAsStream = false;
/**
* The line separator for this OS.
*/
private static String lineSep = null;
/**
* Default constructor.
*/
private Utility()
{
super();
}
/**
* Return the line separator for this OS.
*
* @return the line separator for this OS
*/
public static String getLineSeparator()
{
// See if it's been initialized
if (lineSep == null)
{
lineSep = System.getProperty("line.separator");
}
return lineSep;
}
/**
* Whether to read content files as a stream. This
* is used when running the program as a standalone
* jar file.
*
* @param useStream whether to read files via a stream
*/
public static void readFilesAsStream(final boolean useStream)
{
readAsStream = useStream;
}
/**
* Whether to read content files as a stream.
*
* @return whether to read content files as a stream
*/
public static boolean readFilesAsStream()
{
return readAsStream;
}
/**
* Sleep for the specified number of milliseconds.
*
* @param time the number of milliseconds to sleep
*/
public static void sleep(final long time)
{
try
{
Thread.sleep(time);
}
catch (InterruptedException e)
{
Logger.error(e);
}
}
/**
* Convert a string into an integer.
*
* @param sInput the input string
* @param defaultValue the default value
* @param emptyValue the value to return for an empty string
* @return the value as an integer
*/
public static int getStringAsInteger(final String sInput,
final int defaultValue,
final int emptyValue)
{
// This is the variable that gets returned
int value = defaultValue;
// Check the input
if (sInput == null)
{
return emptyValue;
}
// Trim the string
final String inStr = sInput.trim();
if (inStr.length() < 1)
{
// The string is empty
return emptyValue;
}
// Convert the number
try
{
value = Integer.parseInt(inStr);
}
catch (NumberFormatException nfe)
{
value = defaultValue;
}
// Return the value
return value;
}
/**
* Compare two strings, handling nulls.
*
* @param str1 the first string to compare
* @param str2 the second string to compare
* @return the numeric value of comparing str1 to str2
*/
public static int compareTwoStrings(final String str1, final String str2)
{
if ((str1 == null) && (str2 == null)) {
return 0;
} else if (str1 == null) {
return -1;
} else if (str2 == null) {
return 1;
}
return str1.compareTo(str2);
}
/**
* Convert a string into a floating point number.
*
* @param sInput the input string
* @param defaultValue the default value
* @param emptyValue the value to return for an empty string
* @return the value as a float
*/
public static float getStringAsFloat(final String sInput,
final float defaultValue,
final float emptyValue)
{
// This is the variable that gets returned
float fValue = defaultValue;
// Check the input
if (sInput == null)
{
return emptyValue;
}
// Trim the string
final String inStr = sInput.trim();
if (inStr.length() < 1)
{
// The string is empty
return emptyValue;
}
// Convert the number
try
{
fValue = Float.parseFloat(inStr);
}
catch (NumberFormatException nfe)
{
fValue = defaultValue;
}
// Return the value
return fValue;
}
/**
* Convert a string into a double.
*
* @param sInput the input string
* @param defaultValue the default value
* @param emptyValue the value to return for an empty string
* @return the value as a double
*/
public static double getStringAsDouble(final String sInput,
final double defaultValue,
final double emptyValue)
{
// This is the variable that gets returned
double value = defaultValue;
// Check the input
if (sInput == null)
{
return emptyValue;
}
// Trim the string
final String inStr = sInput.trim();
if (inStr.length() < 1)
{
// The string is empty
return emptyValue;
}
// Convert the number
try
{
value = Double.parseDouble(inStr);
}
catch (NumberFormatException nfe)
{
value = defaultValue;
}
// Return the value
return value;
}
/**
* Return whether the string is null or has no length.
*
* @param msg the input string
* @return whether the string is null or has no length
*/
public static boolean isStringEmpty(final String msg)
{
return ((msg == null) || (msg.length() == 0));
}
/**
* Make the application compatible with Apple Macs.
*/
public static void makeMacCompatible()
{
// Set the system properties that a Mac uses
System.setProperty("apple.awt.brushMetalLook", "true");
System.setProperty("apple.laf.useScreenMenuBar", "true");
System.setProperty("apple.awt.showGrowBox", "true");
System.setProperty("com.apple.mrj.application.apple.menu.about.name",
"FGServer");
}
/**
* Get the specified date as a string.
*
* @param time the date and time
* @return the date as a string
*/
public static String getDateTimeString(final long time)
{
// Check the input
if (time <= 0)
{
return "Invalid time (" + Long.toString(time) + ")";
}
// Convert the time into a Date object
Date date = new Date(time);
// Declare our formatter
SimpleDateFormat formatter = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss");
// Return the date/time as a string
return formatter.format(date);
}
/**
* Format the date as a string, using a standard format.
*
* @param date the date to format
* @return the date as a string
*/
public static String getDateString(final Date date)
{
// Declare our formatter
SimpleDateFormat formatter = new SimpleDateFormat("MMMM d, yyyy");
if (date == null)
{
return formatter.format(new Date());
}
// Return the date/time as a string
return formatter.format(date);
}
/**
* Format the date and time as a string, using a standard format.
*
* @return the date as a string
*/
public static String getDateTimeString()
{
// Declare our formatter
SimpleDateFormat formatter = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss");
// Return the date/time as a string
return formatter.format(new Date());
}
/**
* Initialize the application's Look And Feel with the default
* for this OS.
*/
public static void initLookAndFeel()
{
// Use the default look and feel
try
{
javax.swing.UIManager.setLookAndFeel(
javax.swing.UIManager.getSystemLookAndFeelClassName());
}
catch (Exception e)
{
Logger.error("Exception: " + e.getMessage());
}
}
public static Boolean stringToBoolean(final String val) {
if ((val == null) || val.trim().isEmpty()) {
return null;
}
final Set<String> trues = new HashSet<String>(10);
trues.add("1");
trues.add("TRUE");
trues.add("true");
trues.add("yes");
trues.add("t");
trues.add("y");
return (trues.contains(val));
}
public static Integer getAscii(final String val) {
if ((val == null) || (val.length() != 1)) {
return null;
}
char ch = val.charAt(0);
return Integer.valueOf(((int) ch));
}
public static String formatLong(final long val)
{
return NumberFormat.getInstance().format(val);
}
/**
* Surround the string with single quotes, and backquote any
* single quotes in the string.
*
* @param str the input string
* @return the quoted string
*/
public static String quoteString(final String str)
{
// Check the input
if (str == null)
{
// It's null, so just return that
return "null";
}
String outStr = str.replace("\"", "\\\"");
if (outStr.contains("\n") || outStr.contains(",")) {
outStr = "\"" + outStr + "\"";
}
return outStr;
}
} |
package jade.core;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.StringReader;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import java.rmi.*;
import java.rmi.server.UnicastRemoteObject;
import jade.core.event.PlatformEvent;
import jade.core.event.MTPEvent;
import jade.domain.ams;
import jade.domain.df;
import jade.lang.acl.ACLMessage;
import jade.mtp.MTPException;
class MainContainerImpl extends AgentContainerImpl implements MainContainer, AgentManager {
private ThreadGroup systemAgentsThreads = new ThreadGroup("JADE System Agents");
// The two mandatory system agents.
private ams theAMS;
private df defaultDF;
private List platformListeners = new LinkedList();
private List platformAddresses = new LinkedList();
private ContainerTable containers = new ContainerTable();
private GADT platformAgents = new GADT();
public MainContainerImpl() throws RemoteException {
super();
systemAgentsThreads.setMaxPriority(Thread.NORM_PRIORITY + 1);
}
private void initAMS() {
theAMS = new ams(this);
// Subscribe as a listener for the AMS agent
Agent a = theAMS;
a.setToolkit(this);
// Insert AMS into local agents table
localAgents.put(Agent.AMS, theAMS);
AgentDescriptor desc = new AgentDescriptor();
RemoteProxyRMI rp = new RemoteProxyRMI(this, Agent.AMS);
desc.setContainerID(myID);
desc.setProxy(rp);
platformAgents.put(Agent.AMS, desc);
}
private void initDF() {
defaultDF = new df();
// Subscribe as a listener for the DF agent
Agent a = defaultDF;
a.setToolkit(this);
// Insert DF into local agents table
localAgents.put(Agent.DEFAULT_DF, defaultDF);
AgentDescriptor desc = new AgentDescriptor();
RemoteProxyRMI rp = new RemoteProxyRMI(this, Agent.DEFAULT_DF);
desc.setContainerID(myID);
desc.setProxy(rp);
platformAgents.put(Agent.DEFAULT_DF, desc);
}
// this variable holds a progressive number just used to name new containers
private static int containersProgNo = 0;
public void joinPlatform(String pID, Iterator agentSpecifiers, String[] MTPs, String[] ACLCodecs) {
// This string will be used to build the GUID for every agent on
// this platform.
platformID = pID;
translator = new AIDTranslator(platformID);
String platformRMI = "rmi://" + platformID;
try {
InetAddress netAddr = InetAddress.getLocalHost();
myID = new ContainerID(MAIN_CONTAINER_NAME, netAddr);
}
catch(UnknownHostException uhe) {
uhe.printStackTrace();
}
// Build the Agent IDs for the AMS and for the Default DF.
Agent.initReservedAIDs(globalAID("ams"), globalAID("df"));
initAMS();
initDF();
try {
myPlatform = (MainContainer)Naming.lookup(platformRMI);
}
catch(Exception e) {
// Should never happen
e.printStackTrace();
}
theACC = new acc(this, platformID);
try{
for(int i =0; i<ACLCodecs.length;i++){
String className = ACLCodecs[i];
installACLCodec(className);
}
containers.addContainer(MAIN_CONTAINER_NAME, this);
containersProgNo++;
PrintWriter f = new PrintWriter(new FileWriter("MTPs-" + MAIN_CONTAINER_NAME + ".txt"));
for(int i = 0; i < MTPs.length; i += 2) {
String className = MTPs[i];
String addressURL = MTPs[i+1];
if(addressURL.equals(""))
addressURL = null;
String s = installMTP(addressURL, className);
f.println(s);
System.out.println(s);
}
f.close();
}
catch(RemoteException re) {
// This should never happen...
re.printStackTrace();
}
catch (IOException io) {
io.printStackTrace();
}
catch(MTPException mtpe) {
mtpe.printStackTrace();
System.exit(0);
}catch(jade.lang.acl.ACLCodec.CodecException ce){
ce.printStackTrace();
System.exit(0);
}
// Notify platform listeners
fireAddedContainer(myID);
Agent a = theAMS;
a.powerUp(Agent.AMS, systemAgentsThreads);
a = defaultDF;
a.powerUp(Agent.DEFAULT_DF, systemAgentsThreads);
while(agentSpecifiers.hasNext())
{
Iterator i = ((List)agentSpecifiers.next()).iterator();
String agentName =(String)i.next();
String agentClass = (String)i.next();
List tmp = new ArrayList();
for ( ; i.hasNext(); )
tmp.add((String)i.next());
//verify is possible to use toArray() on tmp
int size = tmp.size();
String arguments[] = new String[size];
Iterator it = tmp.iterator();
for(int n = 0; it.hasNext(); n++)
arguments[n] = (String)it.next();
AID agentID = globalAID(agentName);
try {
createAgent(agentID, agentClass,arguments, START);
}
catch(RemoteException re) { // It should never happen
re.printStackTrace();
}
}
System.out.println("Agent Platform ready to accept new containers...");
}
AgentContainer getContainerFromAgent(AID agentID) throws NotFoundException {
AgentDescriptor ad = platformAgents.get(agentID);
if(ad == null) {
throw new NotFoundException("Agent " + agentID.getName() + " not found in getContainerFromAgent()");
}
ad.lock();
ContainerID cid = ad.getContainerID();
AgentContainer ac = containers.getContainer(cid.getName());
ad.unlock();
return ac;
}
// Inner class to detect agent container failures
private class FailureMonitor implements Runnable {
private AgentContainer target;
private ContainerID targetID;
private boolean active = true;
public FailureMonitor(AgentContainer ac, ContainerID cid) {
target = ac;
targetID = cid;
}
public void run() {
while(active) {
try {
target.ping(true); // Hang on this RMI call
}
catch(RemoteException re1) { // Connection down
try {
target.ping(false); // Try a non blocking ping to check
}
catch(RemoteException re2) { // Object down
containers.removeContainer(targetID.getName());
fireRemovedContainer(targetID);
active = false;
}
}
catch(Throwable t) {
t.printStackTrace();
}
}
}
}
// Private methods to notify platform listeners of a significant event.
private void fireAddedContainer(ContainerID cid) {
PlatformEvent ev = new PlatformEvent(PlatformEvent.ADDED_CONTAINER, cid);
for(int i = 0; i < platformListeners.size(); i++) {
AgentManager.Listener l = (AgentManager.Listener)platformListeners.get(i);
l.addedContainer(ev);
}
}
private void fireRemovedContainer(ContainerID cid) {
PlatformEvent ev = new PlatformEvent(PlatformEvent.REMOVED_CONTAINER, cid);
for(int i = 0; i < platformListeners.size(); i++) {
AgentManager.Listener l = (AgentManager.Listener)platformListeners.get(i);
l.removedContainer(ev);
}
}
private void fireBornAgent(ContainerID cid, AID agentID) {
PlatformEvent ev = new PlatformEvent(PlatformEvent.BORN_AGENT, agentID, cid);
for(int i = 0; i < platformListeners.size(); i++) {
AgentManager.Listener l = (AgentManager.Listener)platformListeners.get(i);
l.bornAgent(ev);
}
}
private void fireDeadAgent(ContainerID cid, AID agentID) {
PlatformEvent ev = new PlatformEvent(PlatformEvent.DEAD_AGENT, agentID, cid);
for(int i = 0; i < platformListeners.size(); i++) {
AgentManager.Listener l = (AgentManager.Listener)platformListeners.get(i);
l.deadAgent(ev);
}
}
private void fireMovedAgent(ContainerID from, ContainerID to, AID agentID) {
PlatformEvent ev = new PlatformEvent(agentID, from, to);
for(int i = 0; i < platformListeners.size(); i++) {
AgentManager.Listener l = (AgentManager.Listener)platformListeners.get(i);
l.movedAgent(ev);
}
}
private void fireAddedMTP(String address, ContainerID cid) {
Channel ch = new Channel("FIXME: missing channel name", "FIXME: missing channel protocol", address);
MTPEvent ev = new MTPEvent(MTPEvent.ADDED_MTP, cid, ch);
for(int i = 0; i < platformListeners.size(); i++) {
AgentManager.Listener l = (AgentManager.Listener)platformListeners.get(i);
l.addedMTP(ev);
}
}
private void fireRemovedMTP(String address, ContainerID cid) {
Channel ch = new Channel("FIXME: missing channel name", "FIXME: missing channel protocol", address);
MTPEvent ev = new MTPEvent(MTPEvent.REMOVED_MTP, cid, ch);
for(int i = 0; i < platformListeners.size(); i++) {
AgentManager.Listener l = (AgentManager.Listener)platformListeners.get(i);
l.removedMTP(ev);
}
}
public String addContainer(AgentContainer ac, ContainerID cid) throws RemoteException {
// Send all platform addresses to the new container
String[] containerNames = containers.names();
for(int i = 0; i < containerNames.length; i++) {
String name = containerNames[i];
try {
AgentContainer cont = containers.getContainer(name);
List addresses = containers.getAddresses(name);
Iterator it = addresses.iterator();
while(it.hasNext()) {
String a = (String)it.next();
ac.updateRoutingTable(ADD_RT, a, cont);
}
}
catch(NotFoundException nfe) {
nfe.printStackTrace();
}
}
String name = AUX_CONTAINER_NAME + containersProgNo;
cid.setName(name);
containers.addContainer(name, ac);
containersProgNo++;
// Spawn a blocking RMI call to the remote container in a separate
// thread. This is a failure notification technique.
Thread t = new Thread(new FailureMonitor(ac, cid));
t.start();
// Notify listeners
fireAddedContainer(cid);
// Return the name given to the new container
return name;
}
public void removeContainer(ContainerID cid) throws RemoteException {
containers.removeContainer(cid.getName());
// Notify listeners
fireRemovedContainer(cid);
}
public AgentContainer lookup(ContainerID cid) throws RemoteException, NotFoundException {
AgentContainer ac = containers.getContainer(cid.getName());
return ac;
}
public void bornAgent(AID name, RemoteProxy rp, ContainerID cid) throws RemoteException, NameClashException {
AgentDescriptor desc = new AgentDescriptor();
desc.setProxy(rp);
desc.setContainerID(cid);
AgentDescriptor old = platformAgents.put(name, desc);
// If there's already an agent with name 'name' throw a name clash
// exception unless the old agent's container is dead.
if(old != null) {
RemoteProxy oldProxy = old.getProxy();
try {
oldProxy.ping(); // Make sure agent is reachable, then raise a name clash exception
platformAgents.put(name, old);
throw new NameClashException("Agent " + name + " already present in the platform ");
}
catch(UnreachableException ue) {
System.out.println("Replacing a dead agent ...");
fireDeadAgent(old.getContainerID(), name);
}
}
// Notify listeners
fireBornAgent(cid, name);
}
public void deadAgent(AID name) throws RemoteException, NotFoundException {
AgentDescriptor ad = platformAgents.get(name);
if(ad == null)
throw new NotFoundException("DeadAgent failed to find " + name);
ContainerID cid = ad.getContainerID();
platformAgents.remove(name);
// Notify listeners
fireDeadAgent(cid, name);
}
public RemoteProxy getProxy(AID agentID) throws RemoteException, NotFoundException {
RemoteProxy rp;
AgentDescriptor ad = platformAgents.get(agentID);
if(ad == null)
throw new NotFoundException("getProxy() failed to find " + agentID.getName());
else {
ad.lock();
rp = ad.getProxy();
ad.unlock();
try {
rp.ping();
}
catch(UnreachableException ue) {
throw new NotFoundException("Container for " + agentID.getName() + " is unreachable");
}
return rp;
}
}
public boolean transferIdentity(AID agentID, ContainerID src, ContainerID dest) throws RemoteException, NotFoundException {
AgentDescriptor ad = platformAgents.get(agentID);
if(ad == null)
throw new NotFoundException("transferIdentity() unable to find agent " + agentID.getName());
AgentContainer srcAC = lookup(src);
AgentContainer destAC = lookup(dest);
try {
srcAC.ping(false);
destAC.ping(false);
}
catch(RemoteException re) {
// Abort transaction
return false;
}
// Commit transaction and notify listeners
ad.lock();
ad.setProxy(new RemoteProxyRMI(destAC, agentID));
ad.setContainerID(dest);
fireMovedAgent(src, dest, agentID);
ad.unlock();
return true;
}
// This method overrides AgentContainerImpl.shutDown(); besides
// behaving like the normal AgentContainer version, it makes all
// other agent containers exit.
public void shutDown() {
// Close all MTP links to the outside world
List l = theACC.getLocalAddresses();
String[] addresses = (String[])l.toArray(new String[0]);
for(int i = 0; i < addresses.length; i++) {
try {
String addr = addresses[i];
uninstallMTP(addr);
}
catch(RemoteException re) {
// It should never happen
System.out.println("ERROR: Remote Exception thrown for a local call.");
}
catch(NotFoundException nfe) {
nfe.printStackTrace();
}
catch(MTPException mtpe) {
mtpe.printStackTrace();
}
}
// Close down the ACC
theACC.shutdown();
// Deregister yourself as a container
containers.removeContainer(MAIN_CONTAINER_NAME);
// Kill every other container
AgentContainer[] allContainers = containers.containers();
for(int i = 0; i < allContainers.length; i++) {
AgentContainer ac = allContainers[i];
try {
APKillContainer(ac); // This call removes 'ac' from 'container' map and from the collection 'c'
}
catch(RemoteException re) {
System.out.println("Container is unreachable. Ignoring...");
}
}
// Kill all non-system agents
AID[] allLocalNames = localAgents.keys();
for(int i = 0; i < allLocalNames.length; i++) {
AID id = allLocalNames[i];
if(id.equals(Agent.AMS) ||
id.equals(Agent.DEFAULT_DF))
continue;
// Kill agent and wait for its termination
Agent a = localAgents.get(id);
if(a != null) {
a.doDelete();
a.join();
}
else // FIXME: Should not happen, but it does when there are sniffers around...
System.out.println("Zombie agent [" + id + "]");
}
// Kill system agents, at last
Agent systemAgent = defaultDF;
systemAgent.doDelete();
systemAgent.join();
systemAgent.resetToolkit();
systemAgent = theAMS;
systemAgent.doDelete();
systemAgent.join();
systemAgent.resetToolkit();
removeListener(theAMS);
}
// These methods dispatch agent management operations to
// appropriate Agent Container through RMI.
public void kill(AID agentID, String password) throws NotFoundException, UnreachableException {
try {
AgentContainer ac = getContainerFromAgent(agentID);
ac.killAgent(agentID); // RMI call
}
catch(RemoteException re) {
throw new UnreachableException(re.getMessage());
}
}
public void APKillContainer(AgentContainer ac) throws RemoteException {
try {
ac.exit(); // RMI call
}
catch(UnmarshalException ue) {
// FIXME: This is ignored, since we'd need oneway calls to
// perform exit() remotely
}
}
public void suspend(AID agentID, String password) throws NotFoundException, UnreachableException {
try {
AgentContainer ac = getContainerFromAgent(agentID);
ac.suspendAgent(agentID); // RMI call
}
catch(RemoteException re) {
throw new UnreachableException(re.getMessage());
}
}
public void activate(AID agentID, String password) throws NotFoundException, UnreachableException {
try {
AgentContainer ac = getContainerFromAgent(agentID);
ac.resumeAgent(agentID); // RMI call
}
catch(RemoteException re) {
throw new UnreachableException(re.getMessage());
}
}
public void wait(AID agentID, String password) throws NotFoundException, UnreachableException {
try {
AgentContainer ac = getContainerFromAgent(agentID);
ac.waitAgent(agentID); // RMI call
}
catch(RemoteException re) {
throw new UnreachableException(re.getMessage());
}
}
public void wake(AID agentID, String password) throws NotFoundException, UnreachableException {
try {
AgentContainer ac = getContainerFromAgent(agentID);
ac.wakeAgent(agentID); // RMI call
}
catch(RemoteException re) {
throw new UnreachableException(re.getMessage());
}
}
public void move(AID agentID, Location where, String password) throws NotFoundException, UnreachableException {
// Retrieve the container for the original agent
AgentContainer src = getContainerFromAgent(agentID);
try {
src.moveAgent(agentID, where);
}
catch(RemoteException re) {
throw new UnreachableException(re.getMessage());
}
}
public void copy(AID agentID, Location where, String newAgentID, String password) throws NotFoundException, UnreachableException {
// Retrieve the container for the original agent
AgentContainer src = getContainerFromAgent(agentID);
try {
src.copyAgent(agentID, where, newAgentID); // RMI call
}
catch(RemoteException re) {
throw new UnreachableException(re.getMessage());
}
}
// Methods for Message Transport Protocols management
public void newMTP(String mtpAddress, ContainerID cid) throws RemoteException {
try {
String containerName = cid.getName();
platformAddresses.add(mtpAddress);
containers.addAddress(containerName, mtpAddress);
AgentContainer target = containers.getContainer(containerName);
// To avoid additions/removals of containers during MTP tables update
synchronized(containers) {
// Add the new MTP to the routing tables of all the containers.
AgentContainer[] allContainers = containers.containers();
for(int i = 0; i < allContainers.length; i++) {
AgentContainer ac = allContainers[i];
// Skip target container
if(ac != target)
ac.updateRoutingTable(ADD_RT, mtpAddress, target);
}
}
// Notify listeners (typically the AMS)
fireAddedMTP(mtpAddress, cid);
}
catch(NotFoundException nfe) {
System.out.println("Error: the container " + cid.getName() + " was not found.");
}
}
public void deadMTP(String mtpAddress, ContainerID cid) throws RemoteException {
try {
String containerName = cid.getName();
platformAddresses.remove(mtpAddress);
containers.removeAddress(containerName, mtpAddress);
AgentContainer target = containers.getContainer(containerName);
// To avoid additions/removals of containers during MTP tables update
synchronized(containers) {
// Remove the dead MTP from the routing tables of all the containers.
AgentContainer[] allContainers = containers.containers();
for(int i = 0; i < allContainers.length; i++) {
AgentContainer ac = allContainers[i];
// Skip target container
if(ac != target)
ac.updateRoutingTable(DEL_RT, mtpAddress, target);
}
}
// Notify listeners (typically the AMS)
fireRemovedMTP(mtpAddress, cid);
}
catch(NotFoundException nfe) {
System.out.println("Error: the container " + cid.getName() + " was not found.");
nfe.printStackTrace();
}
}
public String installMTP(String address, ContainerID cid, String className) throws NotFoundException, UnreachableException, MTPException {
String containerName = cid.getName();
AgentContainer target = containers.getContainer(containerName);
try {
return target.installMTP(address, className);
}
catch(RemoteException re) {
throw new UnreachableException("Container " + containerName + " is unreachable.");
}
}
public void uninstallMTP(String address, ContainerID cid) throws NotFoundException, UnreachableException, MTPException {
String containerName = cid.getName();
AgentContainer target = containers.getContainer(containerName);
try {
target.uninstallMTP(address);
}
catch(RemoteException re) {
throw new UnreachableException("Container " + containerName + " is unreachable.");
}
}
// These methods are to be used only by AMS agent.
public void addListener(AgentManager.Listener l) {
platformListeners.add(l);
}
public void removeListener(AgentManager.Listener l) {
platformListeners.remove(l);
}
// This is used by AMS to obtain the set of all the Agent Containers of the platform.
public ContainerID[] containerIDs() {
String[] names = containers.names();
ContainerID[] ids = new ContainerID[names.length];
for(int i = 0; i < names.length; i++) {
ids[i] = new ContainerID(names[i], null);
}
return ids;
}
// This is used by AMS to obtain the list of all the agents of the platform.
public AID[] agentNames() {
return platformAgents.keys();
}
public String[] platformAddresses() {
Object[] objs = platformAddresses.toArray();
String[] result = new String[objs.length];
System.arraycopy(objs, 0, result, 0, result.length);
return result;
}
// This maps the name of an agent to the ID of the Agent Container the agent lives in.
public ContainerID getContainerID(AID agentID) throws NotFoundException {
AgentDescriptor ad = platformAgents.get(agentID);
if(ad == null)
throw new NotFoundException("Agent " + agentID.getName() + " not found in getContainerID()");
ad.lock();
ContainerID result = ad.getContainerID();
ad.unlock();
return result;
}
// This is called in response to a 'create-agent' action
public void create(String agentName, String className, String args[], ContainerID cid) throws UnreachableException {
try {
String containerName = cid.getName();
AgentContainer ac;
// If no name is given, the agent is started on the MainContainer itself
if(containerName == null)
ac = this;
else {
try {
ac = containers.getContainer(containerName);
}
catch(NotFoundException nfe) {
// If a wrong name is given, then again the agent starts on the MainContainer itself
ac = this;
}
}
AID id = globalAID(agentName);
ac.createAgent(id, className, args,START); // RMI call
}
catch(RemoteException re) {
throw new UnreachableException(re.getMessage());
}
}
public void killContainer(ContainerID cid) {
// This call spawns a separate thread in order to avoid deadlock.
try {
String containerName = cid.getName();
final AgentContainer ac = containers.getContainer(containerName);
final String cName = containerName;
Thread auxThread = new Thread(new Runnable() {
public void run() {
try {
APKillContainer(ac);
}
catch(RemoteException re) {
System.out.println("Container " + cName + " is unreachable.");
containers.removeContainer(cName);
fireRemovedContainer(new ContainerID(cName, null));
}
}
});
auxThread.start();
}
catch(NotFoundException nfe) {
nfe.printStackTrace();
}
}
public void sniffOn(AID snifferName, List toBeSniffed) throws NotFoundException, UnreachableException {
Iterator it = toBeSniffed.iterator();
try {
while(it.hasNext()) {
AID id = (AID)it.next();
AgentContainer ac = getContainerFromAgent(id);
ac.enableSniffer(snifferName, id); // RMI call
}
}
catch(RemoteException re) {
throw new UnreachableException(re.getMessage());
}
}
public void sniffOff(AID snifferName, List notToBeSniffed) throws NotFoundException, UnreachableException {
Iterator it = notToBeSniffed.iterator();
try {
while(it.hasNext()) {
AID id = (AID)it.next();
AgentContainer ac = getContainerFromAgent(id);
ac.disableSniffer(snifferName, id); // RMI call
}
}
catch(RemoteException re) {
throw new UnreachableException(re.getMessage());
}
}
} |
package simpledb;
import java.util.*;
import javax.swing.*;
import javax.swing.tree.*;
/**
* The JoinOptimizer class is responsible for ordering a series of joins
* optimally, and for selecting the best instantiation of a join for a given
* logical plan.
*/
public class JoinOptimizer {
LogicalPlan p;
Vector<LogicalJoinNode> joins;
/**
* Constructor
*
* @param p
* the logical plan being optimized
* @param joins
* the list of joins being performed
*/
public JoinOptimizer(LogicalPlan p, Vector<LogicalJoinNode> joins) {
this.p = p;
this.joins = joins;
}
/**
* Return best iterator for computing a given logical join, given the
* specified statistics, and the provided left and right subplans. Note that
* there is insufficient information to determine which plan should be the
* inner/outer here -- because DbIterator's don't provide any cardinality
* estimates, and stats only has information about the base tables. For this
* reason, the plan1
*
* @param lj
* The join being considered
* @param plan1
* The left join node's child
* @param plan2
* The right join node's child
*/
public static DbIterator instantiateJoin(LogicalJoinNode lj,
DbIterator plan1, DbIterator plan2) throws ParsingException {
int t1id = 0, t2id = 0;
DbIterator j;
try {
t1id = plan1.getTupleDesc().fieldNameToIndex(lj.f1QuantifiedName);
} catch (NoSuchElementException e) {
throw new ParsingException("Unknown field " + lj.f1QuantifiedName);
}
if (lj instanceof LogicalSubplanJoinNode) {
t2id = 0;
} else {
try {
t2id = plan2.getTupleDesc().fieldNameToIndex(
lj.f2QuantifiedName);
} catch (NoSuchElementException e) {
throw new ParsingException("Unknown field "
+ lj.f2QuantifiedName);
}
}
JoinPredicate p = new JoinPredicate(t1id, lj.p, t2id);
j = new Join(p,plan1,plan2);
return j;
}
/**
* Estimate the cost of a join.
*
* The cost of the join should be calculated based on the join algorithm (or
* algorithms) that you implemented for Lab 2. It should be a function of
* the amount of data that must be read over the course of the query, as
* well as the number of CPU opertions performed by your join. Assume that
* the cost of a single predicate application is roughly 1.
*
*
* @param j
* A LogicalJoinNode representing the join operation being
* performed.
* @param card1
* Estimated cardinality of the left-hand side of the query
* @param card2
* Estimated cardinality of the right-hand side of the query
* @param cost1
* Estimated cost of one full scan of the table on the left-hand
* side of the query
* @param cost2
* Estimated cost of one full scan of the table on the right-hand
* side of the query
* @return An estimate of the cost of this query, in terms of cost1 and
* cost2
*/
public double estimateJoinCost(LogicalJoinNode j, int card1, int card2,
double cost1, double cost2) {
if (j instanceof LogicalSubplanJoinNode) {
// A LogicalSubplanJoinNode represents a subquery.
// You do not need to implement proper support for these for Lab 5.
return card1 + cost1 + cost2;
} else {
return cost1 + card1 * cost2 + card1 * card2;
}
}
/**
* Estimate the cardinality of a join. The cardinality of a join is the
* number of tuples produced by the join.
*
* @param j
* A LogicalJoinNode representing the join operation being
* performed.
* @param card1
* Cardinality of the left-hand table in the join
* @param card2
* Cardinality of the right-hand table in the join
* @param t1pkey
* Is the left-hand table a primary-key table?
* @param t2pkey
* Is the right-hand table a primary-key table?
* @param stats
* The table stats, referenced by table names, not alias
* @return The cardinality of the join
*/
public int estimateJoinCardinality(LogicalJoinNode j, int card1, int card2,
boolean t1pkey, boolean t2pkey, Map<String, TableStats> stats) {
if (j instanceof LogicalSubplanJoinNode) {
// A LogicalSubplanJoinNode represents a subquery.
// You do not need to implement proper support for these for Lab 5.
return card1;
} else {
return estimateTableJoinCardinality(j.p, j.t1Alias, j.t2Alias,
j.f1PureName, j.f2PureName, card1, card2, t1pkey, t2pkey,
stats, p.getTableAliasToIdMapping());
}
}
/**
* Estimate the join cardinality of two tables.
* */
public static int estimateTableJoinCardinality(Predicate.Op joinOp,
String table1Alias, String table2Alias, String field1PureName,
String field2PureName, int card1, int card2, boolean t1pkey,
boolean t2pkey, Map<String, TableStats> stats,
Map<String, Integer> tableAliasToId) {
// Equality joins
if (joinOp.equals(Predicate.Op.EQUALS)) {
if (t1pkey) {
return card2;
} else if (t2pkey) {
return card1;
} else {
return Math.max(card1, card2);
}
}
// Everything else returns 30% of cross product
else {
return (int) (card1 * card2 * 0.3);
}
}
/**
* Helper method to enumerate all of the subsets of a given size of a
* specified vector.
*
* @param v
* The vector whose subsets are desired
* @param size
* The size of the subsets of interest
* @return a set of all subsets of the specified size
*/
@SuppressWarnings("unchecked")
public <T> Set<Set<T>> enumerateSubsets(Vector<T> v, int size) {
Set<Set<T>> els = new HashSet<Set<T>>();
els.add(new HashSet<T>());
// Iterator<Set> it;
// long start = System.currentTimeMillis();
for (int i = 0; i < size; i++) {
Set<Set<T>> newels = new HashSet<Set<T>>();
for (Set<T> s : els) {
for (T t : v) {
Set<T> news = (Set<T>) (((HashSet<T>) s).clone());
if (news.add(t))
newels.add(news);
}
}
els = newels;
}
return els;
}
/**
* Compute a logical, reasonably efficient join on the specified tables. See
* PS4 for hints on how this should be implemented.
*
* @param stats
* Statistics for each table involved in the join, referenced by
* base table names, not alias
* @param filterSelectivities
* Selectivities of the filter predicates on each table in the
* join, referenced by table alias (if no alias, the base table
* name)
* @param explain
* Indicates whether your code should explain its query plan or
* simply execute it
* @return A Vector<LogicalJoinNode> that stores joins in the left-deep
* order in which they should be executed.
* @throws ParsingException
* when stats or filter selectivities is missing a table in the
* join, or or when another internal error occurs
*/
public Vector<LogicalJoinNode> orderJoins(
HashMap<String, TableStats> stats,
HashMap<String, Double> filterSelectivities, boolean explain)
throws ParsingException {
PlanCache cache = new PlanCache();
for (int i = 1; i <= joins.size(); i++) {
for (Set<LogicalJoinNode> s : enumerateSubsets(joins, i)) {
double bestCost = Double.MAX_VALUE;
for (LogicalJoinNode j : s) {
CostCard subplan = computeCostAndCardOfSubplan(stats, filterSelectivities,
j, s, bestCost, cache);
if (subplan != null) {
cache.addPlan(s, subplan.cost, subplan.card, subplan.plan);
bestCost = subplan.cost;
}
}
}
}
Vector<LogicalJoinNode> order = cache.getOrder(new HashSet<LogicalJoinNode>(joins));
if (explain) {
printJoins(order, cache, stats, filterSelectivities);
}
return order;
}
/**
* This is a helper method that computes the cost and cardinality of joining
* joinToRemove to joinSet (joinSet should contain joinToRemove), given that
* all of the subsets of size joinSet.size() - 1 have already been computed
* and stored in PlanCache pc.
*
* @param stats
* table stats for all of the tables, referenced by table names
* rather than alias (see {@link #orderJoins})
* @param filterSelectivities
* the selectivities of the filters over each of the tables
* (where tables are indentified by their alias or name if no
* alias is given)
* @param joinToRemove
* the join to remove from joinSet
* @param joinSet
* the set of joins being considered
* @param bestCostSoFar
* the best way to join joinSet so far (minimum of previous
* invocations of computeCostAndCardOfSubplan for this joinSet,
* from returned CostCard)
* @param pc
* the PlanCache for this join; should have subplans for all
* plans of size joinSet.size()-1
* @return A {@link CostCard} objects desribing the cost, cardinality,
* optimal subplan
* @throws ParsingException
* when stats, filterSelectivities, or pc object is missing
* tables involved in join
*/
@SuppressWarnings("unchecked")
private CostCard computeCostAndCardOfSubplan(
HashMap<String, TableStats> stats,
HashMap<String, Double> filterSelectivities,
LogicalJoinNode joinToRemove, Set<LogicalJoinNode> joinSet,
double bestCostSoFar, PlanCache pc) throws ParsingException {
LogicalJoinNode j = joinToRemove;
Vector<LogicalJoinNode> prevBest;
if (this.p.getTableId(j.t1Alias) == null)
throw new ParsingException("Unknown table " + j.t1Alias);
if (this.p.getTableId(j.t2Alias) == null)
throw new ParsingException("Unknown table " + j.t2Alias);
String table1Name = Database.getCatalog().getTableName(
this.p.getTableId(j.t1Alias));
String table2Name = Database.getCatalog().getTableName(
this.p.getTableId(j.t2Alias));
String table1Alias = j.t1Alias;
String table2Alias = j.t2Alias;
Set<LogicalJoinNode> news = (Set<LogicalJoinNode>) ((HashSet<LogicalJoinNode>) joinSet)
.clone();
news.remove(j);
double t1cost, t2cost;
int t1card, t2card;
boolean leftPkey, rightPkey;
if (news.isEmpty()) { // base case -- both are base relations
prevBest = new Vector<LogicalJoinNode>();
t1cost = stats.get(table1Name).estimateScanCost();
t1card = stats.get(table1Name).estimateTableCardinality(
filterSelectivities.get(j.t1Alias));
leftPkey = isPkey(j.t1Alias, j.f1PureName);
t2cost = table2Alias == null ? 0 : stats.get(table2Name)
.estimateScanCost();
t2card = table2Alias == null ? 0 : stats.get(table2Name)
.estimateTableCardinality(
filterSelectivities.get(j.t2Alias));
rightPkey = table2Alias == null ? false : isPkey(table2Alias,
j.f2PureName);
} else {
// news is not empty -- figure best way to join j to news
prevBest = pc.getOrder(news);
// possible that we have not cached an answer, if subset
// includes a cross product
if (prevBest == null) {
return null;
}
double prevBestCost = pc.getCost(news);
int bestCard = pc.getCard(news);
// estimate cost of right subtree
if (doesJoin(prevBest, table1Alias)) { // j.t1 is in prevBest
t1cost = prevBestCost; // left side just has cost of whatever
// left
// subtree is
t1card = bestCard;
leftPkey = hasPkey(prevBest);
t2cost = j.t2Alias == null ? 0 : stats.get(table2Name)
.estimateScanCost();
t2card = j.t2Alias == null ? 0 : stats.get(table2Name)
.estimateTableCardinality(
filterSelectivities.get(j.t2Alias));
rightPkey = j.t2Alias == null ? false : isPkey(j.t2Alias,
j.f2PureName);
} else if (doesJoin(prevBest, j.t2Alias)) { // j.t2 is in prevbest
// (both
// shouldn't be)
t2cost = prevBestCost; // left side just has cost of whatever
// left
// subtree is
t2card = bestCard;
rightPkey = hasPkey(prevBest);
t1cost = stats.get(table1Name).estimateScanCost();
t1card = stats.get(table1Name).estimateTableCardinality(
filterSelectivities.get(j.t1Alias));
leftPkey = isPkey(j.t1Alias, j.f1PureName);
} else {
// don't consider this plan if one of j.t1 or j.t2
// isn't a table joined in prevBest (cross product)
return null;
}
}
// case where prevbest is left
double cost1 = estimateJoinCost(j, t1card, t2card, t1cost, t2cost);
LogicalJoinNode j2 = j.swapInnerOuter();
double cost2 = estimateJoinCost(j2, t2card, t1card, t2cost, t1cost);
if (cost2 < cost1) {
boolean tmp;
j = j2;
cost1 = cost2;
tmp = rightPkey;
rightPkey = leftPkey;
leftPkey = tmp;
}
if (cost1 >= bestCostSoFar)
return null;
CostCard cc = new CostCard();
cc.card = estimateJoinCardinality(j, t1card, t2card, leftPkey,
rightPkey, stats);
cc.cost = cost1;
cc.plan = (Vector<LogicalJoinNode>) prevBest.clone();
cc.plan.addElement(j); // prevbest is left -- add new join to end
return cc;
}
/**
* Return true if the specified table is in the list of joins, false
* otherwise
*/
private boolean doesJoin(Vector<LogicalJoinNode> joinlist, String table) {
for (LogicalJoinNode j : joinlist) {
if (j.t1Alias.equals(table)
|| (j.t2Alias != null && j.t2Alias.equals(table)))
return true;
}
return false;
}
/**
* Return true if field is a primary key of the specified table, false
* otherwise
*
* @param tableAlias
* The alias of the table in the query
* @param field
* The pure name of the field
*/
private boolean isPkey(String tableAlias, String field) {
int tid1 = p.getTableId(tableAlias);
String pkey1 = Database.getCatalog().getPrimaryKey(tid1);
return pkey1.equals(field);
}
/**
* Return true if a primary key field is joined by one of the joins in
* joinlist
*/
private boolean hasPkey(Vector<LogicalJoinNode> joinlist) {
for (LogicalJoinNode j : joinlist) {
if (isPkey(j.t1Alias, j.f1PureName)
|| (j.t2Alias != null && isPkey(j.t2Alias, j.f2PureName)))
return true;
}
return false;
}
/**
* Helper function to display a Swing window with a tree representation of
* the specified list of joins. See {@link #orderJoins}, which may want to
* call this when the analyze flag is true.
*
* @param js
* the join plan to visualize
* @param pc
* the PlanCache accumulated whild building the optimal plan
* @param stats
* table statistics for base tables
* @param selectivities
* the selectivities of the filters over each of the tables
* (where tables are indentified by their alias or name if no
* alias is given)
*/
private void printJoins(Vector<LogicalJoinNode> js, PlanCache pc,
HashMap<String, TableStats> stats,
HashMap<String, Double> selectivities) {
JFrame f = new JFrame("Join Plan for " + p.getQuery());
// Set the default close operation for the window,
// or else the program won't exit when clicking close button
f.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
f.setVisible(true);
f.setSize(300, 500);
HashMap<String, DefaultMutableTreeNode> m = new HashMap<String, DefaultMutableTreeNode>();
// int numTabs = 0;
// int k;
DefaultMutableTreeNode root = null, treetop = null;
HashSet<LogicalJoinNode> pathSoFar = new HashSet<LogicalJoinNode>();
boolean neither;
System.out.println(js);
for (LogicalJoinNode j : js) {
pathSoFar.add(j);
System.out.println("PATH SO FAR = " + pathSoFar);
String table1Name = Database.getCatalog().getTableName(
this.p.getTableId(j.t1Alias));
String table2Name = Database.getCatalog().getTableName(
this.p.getTableId(j.t2Alias));
// Double c = pc.getCost(pathSoFar);
neither = true;
root = new DefaultMutableTreeNode("Join " + j + " (Cost ="
+ pc.getCost(pathSoFar) + ", card = "
+ pc.getCard(pathSoFar) + ")");
DefaultMutableTreeNode n = m.get(j.t1Alias);
if (n == null) { // never seen this table before
n = new DefaultMutableTreeNode(j.t1Alias
+ " (Cost = "
+ stats.get(table1Name).estimateScanCost()
+ ", card = "
+ stats.get(table1Name).estimateTableCardinality(
selectivities.get(j.t1Alias)) + ")");
root.add(n);
} else {
// make left child root n
root.add(n);
neither = false;
}
m.put(j.t1Alias, root);
n = m.get(j.t2Alias);
if (n == null) { // never seen this table before
n = new DefaultMutableTreeNode(
j.t2Alias == null ? "Subplan"
: (j.t2Alias
+ " (Cost = "
+ stats.get(table2Name)
.estimateScanCost()
+ ", card = "
+ stats.get(table2Name)
.estimateTableCardinality(
selectivities
.get(j.t2Alias)) + ")"));
root.add(n);
} else {
// make right child root n
root.add(n);
neither = false;
}
m.put(j.t2Alias, root);
// unless this table doesn't join with other tables,
// all tables are accessed from root
if (!neither) {
for (String key : m.keySet()) {
m.put(key, root);
}
}
treetop = root;
}
JTree tree = new JTree(treetop);
JScrollPane treeView = new JScrollPane(tree);
tree.setShowsRootHandles(true);
// Set the icon for leaf nodes.
ImageIcon leafIcon = new ImageIcon("join.jpg");
DefaultTreeCellRenderer renderer = new DefaultTreeCellRenderer();
renderer.setOpenIcon(leafIcon);
renderer.setClosedIcon(leafIcon);
tree.setCellRenderer(renderer);
f.setSize(300, 500);
f.add(treeView);
for (int i = 0; i < tree.getRowCount(); i++) {
tree.expandRow(i);
}
if (js.size() == 0) {
f.add(new JLabel("No joins in plan."));
}
f.pack();
}
} |
package asmcup;
import java.io.*;
import java.nio.file.Files;
import java.util.*;
public class Compiler implements VMConsts {
public static void main(String[] args) throws IOException {
if (args.length < 2) {
System.err.println("USAGE: asmcup-compile <in> <out>");
System.exit(1);
return;
}
File inFile = new File(args[0]);
File outFile = new File(args[1]);
List<String> source = Files.readAllLines(inFile.toPath());
FileOutputStream output = new FileOutputStream(outFile);
Compiler compiler = new Compiler();
byte[] program = compiler.compile(source);
output.write(program);
output.close();
}
protected ArrayList<Statement> statements;
protected HashMap<String, Integer> labels;
protected byte[] ram;
protected int pc;
protected void write8(int value) {
ram[pc] = (byte)(value & 0xFF);
pc = (pc + 1) & 0xFF;
}
protected void writeOp(int op, int data) {
if ((op >> 2) != 0) {
throw new IllegalArgumentException("Opcode is greater than 2-bits");
}
if ((data >> 6) != 0) {
throw new IllegalArgumentException("Opcode data is greater than 6-bits");
}
write8(op | (data << 2));
}
protected void write16(int value) {
write8(value & 0xFF);
write8(value >> 8);
}
protected void write32(int value) {
write16(value & 0xFFFF);
write16(value >> 16);
}
protected void writeFloat(float value) {
write32(Float.floatToRawIntBits(value));
}
public byte[] compile(Iterable<String> lines) {
ram = new byte[256];
labels = new HashMap<>();
statements = new ArrayList<>();
pc = 0;
for (String line : lines) {
parseLine(line);
}
pc = 0;
for (Statement statement : statements) {
pc += statement.measure();
}
pc = 0;
for (Statement statement : statements) {
statement.compile();
}
labels.clear();
statements.clear();
labels = null;
statements = null;
pc = 0;
byte[] compiled = ram;
ram = null;
return compiled;
}
protected void parseLine(String line) {
line = line.trim();
if (line.isEmpty()) {
return;
}
line = parseComments(line);
line = parseLabels(line);
if (line.isEmpty()) {
return;
}
String[] parts = line.split("\\s+", 2);
if (parts.length <= 0) {
return;
}
String cmd = parts[0].toLowerCase().trim();
String[] args = parseArgs(parts);
parseStatement(cmd, args);
}
protected void parseStatement(String cmd, String[] args) {
switch (cmd) {
case "db":
case "db8":
db(args);
break;
case "dbf":
dbf(args);
break;
case "push8":
push8(args);
break;
case "pop8":
pop8(args);
break;
case "pushf":
pushf(args);
break;
case "popf":
popf(args);
break;
case "jne":
jne(args);
break;
case "jmp":
jmp(args);
break;
default:
func(cmd, args);
break;
}
}
final static byte[] NO_DATA = {};
protected void func(String cmd, String[] args) {
if (!VMFuncTable.exists(cmd)) {
throw new IllegalArgumentException("Unknown function " + cmd);
}
immediate(OP_FUNC, VMFuncTable.parse(cmd), NO_DATA);
}
protected void db(String[] args) {
statements.add(new Statement() {
public int measure() {
return args.length;
}
public void compile() {
for (String s : args) {
write8(parseLiteral(s));
}
}
});
}
protected void dbf(String[] args) {
statements.add(new Statement() {
public int measure() {
return args.length * 4;
}
public void compile() {
for (String s : args) {
writeFloat(Float.parseFloat(s));
}
}
});
}
protected void push8(String[] args) {
String s = expectOne(args);
if (isLiteral(s)) {
pushLiteral8(args);
} else {
pushMemory8(args);
}
}
protected void pushLiteral8(String[] args) {
immediate(OP_PUSH, MAGIC_PUSH_BYTE_IMMEDIATE, args);
}
protected void pushMemory8(String[] args) {
reference(OP_PUSH, MAGIC_PUSH_BYTE_MEMORY, args);
}
protected void pop8(String[] args) {
reference(OP_POP, MAGIC_POP_BYTE, args);
}
protected void pushf(String[] args) {
String s = expectOne(args);
if (isSymbol(s)) {
pushMemoryFloat(s);
} else {
pushLiteralFloat(s);
}
}
protected void pushMemoryFloat(String args) {
reference(OP_PUSH, MAGIC_PUSH_FLOAT_MEMORY, args);
}
protected void pushLiteralFloat(String s) {
statements.add(new Statement() {
public int measure() {
return 5;
}
public void compile() {
writeOp(OP_PUSH, MAGIC_PUSH_FLOAT_IMMEDIATE);
writeFloat(Float.parseFloat(s));
}
});
}
protected void popf(String[] args) {
reference(OP_BRANCH, MAGIC_POP_FLOAT, args);
}
protected void jne(String[] args) {
String s = expectOne(args);
if (isIndirect(s)) {
reference(OP_BRANCH, MAGIC_BRANCH_IMMEDIATE, s);
} else {
reference(OP_BRANCH, MAGIC_BRANCH_INDIRECT, s);
}
}
protected void jmp(String[] args) {
reference(OP_BRANCH, MAGIC_BRANCH_ALWAYS, args);
}
public static boolean isLiteral(String s) {
return s.startsWith("
}
public static boolean isSymbol(String s) {
return s.matches("^[a-zA-Z_]+[a-zA-Z_0-9]*$");
}
public static boolean isIndirect(String s) {
return s.startsWith("(");
}
public static String expectOne(String[] args) {
return args[0];
}
protected String parseComments(String line) {
int pos = line.indexOf(';');
if (pos < 0) {
return line.trim();
}
return line.substring(0, pos).trim();
}
protected String parseLabels(String line) {
int pos;
while ((pos = line.indexOf(':')) >= 0) {
if (pos == 0) {
throw new IllegalArgumentException("Expected label name");
}
String name = line.substring(0, pos);
statements.add(new Label(name));
line = line.substring(pos + 1).trim();
}
return line.trim();
}
protected static final String[] EMPTY_ARGS = {};
protected String[] parseArgs(String[] parts) {
if (parts.length <= 1) {
return EMPTY_ARGS;
}
String[] args = parts[1].split(",");
for (int i=0; i < args.length; i++) {
args[i] = args[i].trim();
}
return args;
}
protected static int parseLiteral(String s) {
if (!s.startsWith("
throw new IllegalArgumentException("Expected
}
s = s.substring(1);
if (RobotConstsTable.contains(s)) {
return RobotConstsTable.get(s);
}
if (s.startsWith("$")) {
return Integer.parseInt(s.substring(1), 16);
}
return Integer.parseInt(s, 10);
}
protected void reference(int op, int data, String s) {
statements.add(new Statement() {
public int measure() {
return 2;
}
public void compile() {
if (!labels.containsKey(s)) {
throw new IllegalArgumentException(String.format("Cannot find label '%s'", s));
}
writeOp(op, data);
write8(labels.get(s));
}
});
}
protected void reference(int op, int data, String[] args) {
reference(op, data, expectOne(args));
}
protected void immediate(int op, int data, byte[] payload) {
statements.add(new Statement() {
public int measure() {
return 1 + payload.length;
}
public void compile() {
writeOp(op, data);
for (byte b : payload) {
write8(b);
}
}
});
}
protected void immediate(int op, int data, String[] args) {
immediate(op, data, expectOne(args));
}
protected void immediate(int op, int data, String s) {
byte[] payload = new byte[] { (byte)parseLiteral(s) };
immediate(op, data, payload);
}
protected abstract class Statement {
public abstract int measure();
public abstract void compile();
}
protected class Label extends Statement {
final String name;
public Label(String name) {
this.name = name;
}
public int measure() {
labels.put(name, pc);
return 0;
}
public void compile() {
}
}
} |
package view;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Image;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseMotionAdapter;
import javax.swing.JPanel;
import model.AbstractTile;
import model.Board;
import model.Piece;
public class BoardView extends JPanel{
private static final long serialVersionUID = 1L;
Board b;
TileView tiles[];
/** Double Buffering technique requires an offscreen image. */
Image offscreenImage;
Graphics offscreenGraphics;
Graphics canvasGraphics;
//TODO Change Arguments: Board b
public BoardView(Board b){
setPreferredSize(new Dimension(384, 384));
this.b = b;
}
/**
* Redraw recreates everything offscreen. Notice how there is an offscreen image and graphics.
* It even paints all shapes to the offscreen graphics supplied to the paintShape object
* -> It is believed that the offscreenGraphics is an object stored within the offscreenImage
* -> This is the method called BEFORE calling .paint() on this panel
*/
public void redraw() {
// nothing to draw into? Must stop here.
if (offscreenImage == null) return;
if (offscreenGraphics == null) return; // detected during testing
// clear the image.
offscreenGraphics.clearRect(0, 0, this.getWidth(), this.getHeight());
/** Draw all shapes. Note selected shape is not part of the model. */
//I can assert that the board's graphics is empty at this point
TileView factory = new TileView();
for (int row = 0; row < 12; row++) {
for(int col = 0; col < 12; col++){
AbstractTile t = b.getTileAt(row, col);
factory.drawToBoard(offscreenGraphics, t);
}
//I can assert that the board's graphics is FULL of tiles at this point
}
//paintShape(offscreenGraphics, s);
}
/**
* Ensure image available prepares the offscreen image for painting if it is currently missing from
* the object (null). It gets called only INSIDE this class and is called in the paintComponent()
* method
*/
void ensureImageAvailable(Graphics g) {
if (offscreenImage == null) {
offscreenImage = this.createImage(this.getWidth(), this.getHeight());
offscreenGraphics = offscreenImage.getGraphics();
canvasGraphics = g;
redraw();
}
}
/**
* This is the method called by .paint(). It supers the constructor and then called ensureImageAvailable
* to make sure there is an offscreen image to bring onto the board. THAT'S why redraw() is called before
* paint(), so that way the offscreen is ready to go before it is brought onscreen.
*
* After that, the paint method draws the image as it is, to this panel with g.drawImage.
* It then draws every one of his shapes over that image.
*
* To Draw within a JPanel, you need to have a protected void method of this name.
* Note that the first operation of this method MUST BE to invoke super.paintComponent(g)
*
* @param g
*/
@Override
protected void paintComponent(Graphics g) {
super.paintComponent(g);
ensureImageAvailable(g);
g.drawImage(offscreenImage, 0, 0, getWidth(), getHeight(), this);
/**
* TODO Try to draw the pieceOutline here. Call the helper method in the works
* below. MouseDragged event probably wants to call paint(), NOT redraw since
* the background image wouldn't change?
*/
// if(pieceImage != null){
// paint the piece
// draw selected on top of offscreen image, since not part of the model.
// Shape selected = model.getSelected();
// if (selected != null) {
// paintShape(g, selected);
}
// /** Paint the shape right to the screen */
// public void paintShape(Shape s) {
// paintShape(canvasGraphics, s);
// /** Paint the shape into the given graphics context using its drawer. */
// public void paintShape(Graphics g, Shape s) {
// if (g == null) { return; }
// ShapeDrawer sd = s.getDrawer();
// sd.drawShape(g, s);
/**
* Method found inside ShapeDrawer. We should have either the model draw itself
* or the factory do this. Basically pass the board around and let everyone draw
* themselves onto the graphics object
*/
/**
* Draws shape of word.
*
* Fill in background and draw outline on top. Finally, draw the text.
* Subclass determines how to draw accordingly.
*
* Strategy design pattern being applied here.
*
* @param g
* @param s
*/
// public void drawShape(Graphics g, Shape s) {
// fillBox(g, s);
// outlineBox(g, s);
// drawText(g, s);
/**
* Need one more draw method that handles drawing a piece at a given mouse object
* or x/y coordinate (which ever the controller wants to pass to this method when
* it is attempting a drag or placement)
*
* It would need to consume the piece attempting to be drawn, to get an idea
* of where each tile is relative to the ORIGIN tile (the origin tile would be
* drawn around the mouse. The rest of the tiles would be extrapolated from that)
*
* The tiles outlines would be red if the entire piece doesn't fit on the board,
* green if they all do. So we would want a way to draw all tiles possible and then
* return true if they all could fit, false if not. That way we know what color to
* set.
*/
public void drawAtMouse(int x, int y, Piece p){
/**
* TODO
* P is to get the shape. X/Y is to get the mouse position
* 1) extrapolate the X/Y of the mouse to the topLeft corner of origin tile
* 2) Convert top left into a row/Col. Ask the board if this piece would collide with an
* empty tile or another piece on the board. Set the color of outline based on boolean answer
* 2) Prepare the graphics object offscreen (offscreenPiece)
* 3) Knowing the top left of the origin tile, draw all 6 tiles to the graphics object w/ correct color
* 4) Move the offscreen image onto the onscreenGraphics
* 5) this.paint()
* 6) reset the pieceImage to be null
*/
}
void setMouseMotionAdapter(MouseMotionAdapter ma){
//TODO Fill Stub - setMouseMotionAdapter
}
void setMouseAdapter(MouseAdapter ma){
//TODO Fill Stub - setMouseAdapter
}
} |
package org.apereo.cas.config;
import org.apereo.cas.configuration.CasConfigurationProperties;
import org.apereo.cas.services.web.CasThymeleafLoginFormDirector;
import org.apereo.cas.services.web.CasThymeleafOutputTemplateHandler;
import org.apereo.cas.services.web.CasThymeleafViewResolverConfigurer;
import org.apereo.cas.services.web.ThemeBasedViewResolver;
import org.apereo.cas.services.web.ThemeViewResolver;
import org.apereo.cas.services.web.ThemeViewResolverFactory;
import org.apereo.cas.util.CollectionUtils;
import org.apereo.cas.validation.CasProtocolViewFactory;
import org.apereo.cas.web.view.CasProtocolThymeleafViewFactory;
import org.apereo.cas.web.view.ChainingTemplateViewResolver;
import org.apereo.cas.web.view.RestfulUrlTemplateResolver;
import org.apereo.cas.web.view.ThemeFileTemplateResolver;
import lombok.val;
import org.apache.commons.lang3.StringUtils;
import org.jooq.lambda.Unchecked;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.ImportAutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.thymeleaf.ThymeleafAutoConfiguration;
import org.springframework.boot.autoconfigure.thymeleaf.ThymeleafProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.OrderComparator;
import org.springframework.util.ResourceUtils;
import org.springframework.web.servlet.ThemeResolver;
import org.springframework.web.servlet.ViewResolver;
import org.thymeleaf.dialect.IPostProcessorDialect;
import org.thymeleaf.postprocessor.IPostProcessor;
import org.thymeleaf.postprocessor.PostProcessor;
import org.thymeleaf.spring5.SpringTemplateEngine;
import org.thymeleaf.spring5.view.ThymeleafViewResolver;
import org.thymeleaf.templatemode.TemplateMode;
import org.thymeleaf.templateresolver.AbstractConfigurableTemplateResolver;
import org.thymeleaf.templateresolver.AbstractTemplateResolver;
import org.thymeleaf.templateresolver.FileTemplateResolver;
import java.util.List;
import java.util.Set;
/**
* This is {@link CasThymeleafConfiguration}.
*
* @author Misagh Moayyed
* @since 6.2.0
*/
@Configuration(value = "casThymeleafConfiguration", proxyBeanMethods = false)
@EnableConfigurationProperties(CasConfigurationProperties.class)
@ConditionalOnClass(SpringTemplateEngine.class)
@ImportAutoConfiguration(ThymeleafAutoConfiguration.class)
public class CasThymeleafConfiguration {
@Autowired
private CasConfigurationProperties casProperties;
@Autowired
private ConfigurableApplicationContext applicationContext;
@Autowired
@Qualifier("themeResolver")
private ObjectProvider<ThemeResolver> themeResolver;
@Autowired
@Qualifier("thymeleafViewResolver")
private ObjectProvider<ThymeleafViewResolver> thymeleafViewResolver;
@Autowired
private ObjectProvider<List<CasThymeleafViewResolverConfigurer>> thymeleafViewResolverConfigurers;
@Autowired
private ObjectProvider<SpringTemplateEngine> springTemplateEngine;
@Autowired
private ObjectProvider<ThymeleafProperties> thymeleafProperties;
@Bean
@RefreshScope
public AbstractTemplateResolver chainingTemplateViewResolver() {
val chain = new ChainingTemplateViewResolver();
val templatePrefixes = casProperties.getView().getTemplatePrefixes();
templatePrefixes.forEach(Unchecked.consumer(prefix -> {
val prefixPath = ResourceUtils.getFile(prefix).getCanonicalPath();
val viewPath = StringUtils.appendIfMissing(prefixPath, "/");
val rest = casProperties.getView().getRest();
if (StringUtils.isNotBlank(rest.getUrl())) {
val url = new RestfulUrlTemplateResolver(casProperties);
configureTemplateViewResolver(url);
chain.addResolver(url);
}
val theme = new ThemeFileTemplateResolver(casProperties);
configureTemplateViewResolver(theme);
theme.setPrefix(viewPath + "themes/%s/");
chain.addResolver(theme);
val file = new FileTemplateResolver();
configureTemplateViewResolver(file);
file.setPrefix(viewPath);
chain.addResolver(file);
}));
chain.initialize();
return chain;
}
@ConditionalOnMissingBean(name = "casPropertiesThymeleafViewResolverConfigurer")
@Bean
@RefreshScope
public CasThymeleafViewResolverConfigurer casPropertiesThymeleafViewResolverConfigurer() {
return new CasThymeleafViewResolverConfigurer() {
@Override
public int getOrder() {
return 0;
}
@Override
public void configureThymeleafViewResolver(final ThymeleafViewResolver thymeleafViewResolver) {
thymeleafViewResolver.addStaticVariable("cas", casProperties);
thymeleafViewResolver.addStaticVariable("casProperties", casProperties);
}
};
}
@ConditionalOnMissingBean(name = "registeredServiceViewResolver")
@Bean
@Autowired
@RefreshScope
public ViewResolver registeredServiceViewResolver(@Qualifier("themeViewResolverFactory") final ThemeViewResolverFactory themeViewResolverFactory) {
val resolver = new ThemeBasedViewResolver(this.themeResolver.getObject(), themeViewResolverFactory);
resolver.setOrder(thymeleafViewResolver.getObject().getOrder() - 1);
return resolver;
}
@ConditionalOnMissingBean(name = "casThymeleafLoginFormDirector")
@Bean
@RefreshScope
public CasThymeleafLoginFormDirector casThymeleafLoginFormDirector() {
return new CasThymeleafLoginFormDirector();
}
@ConditionalOnMissingBean(name = "themeViewResolverFactory")
@Bean
@RefreshScope
public ThemeViewResolverFactory themeViewResolverFactory() {
val factory = new ThemeViewResolver.Factory(nonCachingThymeleafViewResolver(), thymeleafProperties.getObject());
factory.setApplicationContext(applicationContext);
return factory;
}
@ConditionalOnMissingBean(name = "casProtocolViewFactory")
@Bean
@RefreshScope
public CasProtocolViewFactory casProtocolViewFactory() {
return new CasProtocolThymeleafViewFactory(this.springTemplateEngine.getObject(), thymeleafProperties.getObject());
}
private ThymeleafViewResolver nonCachingThymeleafViewResolver() {
val r = new ThymeleafViewResolver();
val thymeleafResolver = this.thymeleafViewResolver.getObject();
r.setAlwaysProcessRedirectAndForward(thymeleafResolver.getAlwaysProcessRedirectAndForward());
r.setApplicationContext(thymeleafResolver.getApplicationContext());
r.setCacheUnresolved(thymeleafResolver.isCacheUnresolved());
r.setCharacterEncoding(thymeleafResolver.getCharacterEncoding());
r.setContentType(thymeleafResolver.getContentType());
r.setExcludedViewNames(thymeleafResolver.getExcludedViewNames());
r.setOrder(thymeleafResolver.getOrder());
r.setRedirectContextRelative(thymeleafResolver.isRedirectContextRelative());
r.setRedirectHttp10Compatible(thymeleafResolver.isRedirectHttp10Compatible());
r.setStaticVariables(thymeleafResolver.getStaticVariables());
r.setForceContentType(thymeleafResolver.getForceContentType());
val engine = SpringTemplateEngine.class.cast(thymeleafResolver.getTemplateEngine());
if (!engine.isInitialized()) {
engine.addDialect(new IPostProcessorDialect() {
@Override
public int getDialectPostProcessorPrecedence() {
return Integer.MAX_VALUE;
}
@Override
public Set<IPostProcessor> getPostProcessors() {
return CollectionUtils.wrapSet(new PostProcessor(TemplateMode.parse(thymeleafProperties.getObject().getMode()),
CasThymeleafOutputTemplateHandler.class, Integer.MAX_VALUE));
}
@Override
public String getName() {
return CasThymeleafOutputTemplateHandler.class.getSimpleName();
}
});
}
r.setTemplateEngine(engine);
r.setViewNames(thymeleafResolver.getViewNames());
r.setCache(false);
thymeleafViewResolverConfigurers.getObject().stream()
.sorted(OrderComparator.INSTANCE)
.forEach(configurer -> configurer.configureThymeleafViewResolver(r));
return r;
}
private void configureTemplateViewResolver(final AbstractConfigurableTemplateResolver resolver) {
val props = thymeleafProperties.getObject();
resolver.setCacheable(props.isCache());
resolver.setCharacterEncoding(props.getEncoding().name());
resolver.setCheckExistence(props.isCheckTemplateLocation());
resolver.setForceTemplateMode(true);
resolver.setOrder(0);
resolver.setSuffix(".html");
resolver.setTemplateMode(props.getMode());
}
} |
package de.bitdroid.flooding.ods;
import java.util.ArrayList;
import android.content.ContentProvider;
import android.content.ContentProviderOperation;
import android.content.ContentProviderResult;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.OperationApplicationException;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteQueryBuilder;
import android.net.Uri;
import android.os.Bundle;
import de.bitdroid.flooding.utils.Log;
public final class OdsContentProvider extends ContentProvider {
private OdsDatabase odsDatabase;
@Override
public boolean onCreate() {
odsDatabase = new OdsDatabase(getContext());
return true;
}
@Override
public String getType(Uri uri) {
return null;
}
@Override
public Cursor query(
Uri uri,
String[] projection,
String selection,
String[] selectionArgs,
String sortOrder) {
OdsSource source = OdsSource.fromUri(uri);
// check for manual sync reqeust
if (OdsSource.isSyncUri(uri)) {
Bundle settingsBundle = new Bundle();
settingsBundle.putBoolean(ContentResolver.SYNC_EXTRAS_MANUAL, true);
settingsBundle.putBoolean(ContentResolver.SYNC_EXTRAS_EXPEDITED, true);
ContentResolver.requestSync(
OdsSource.ACCOUNT,
OdsSource.AUTHORITY,
settingsBundle);
}
// check if source is being monitored
if (!OdsSourceManager.getInstance(getContext()).isSourceRegisteredForPeriodicSync(source)) {
Log.warning("Requested content for unregistered source");
return null;
}
// create table
String tableName = source.toSqlTableName();
SQLiteDatabase database = odsDatabase.getWritableDatabase();
odsDatabase.addSource(database, tableName, source);
// query db
SQLiteQueryBuilder queryBuilder = new SQLiteQueryBuilder();
queryBuilder.setTables(tableName);
Cursor cursor = queryBuilder.query(
odsDatabase.getReadableDatabase(),
projection,
selection,
selectionArgs,
null, null,
sortOrder);
cursor.setNotificationUri(getContext().getContentResolver(), source.toUri());
return cursor;
}
@Override
public Uri insert(Uri uri, ContentValues data) {
OdsSource source = OdsSource.fromUri(uri);
String tableName = source.toSqlTableName();
SQLiteDatabase database = odsDatabase.getWritableDatabase();
odsDatabase.addSource(database, tableName, source);
long id = database.insert(
tableName,
null,
data);
getContext().getContentResolver().notifyChange(uri, null);
return uri.buildUpon().appendPath(String.valueOf(id)).build();
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
throw new UnsupportedOperationException("under construction");
}
@Override
public int update(
Uri uri,
ContentValues values,
String selection,
String[] selectionArgs) {
throw new UnsupportedOperationException("under construction");
}
@Override
public ContentProviderResult[] applyBatch(ArrayList<ContentProviderOperation> operations)
throws OperationApplicationException {
SQLiteDatabase db = odsDatabase.getWritableDatabase();
db.beginTransaction();
try {
ContentProviderResult[] result = super.applyBatch(operations);
db.setTransactionSuccessful();
return result;
} finally {
db.endTransaction();
}
}
} |
package de.bitdroid.flooding.ods;
import java.util.ArrayList;
import android.content.ContentProvider;
import android.content.ContentProviderOperation;
import android.content.ContentProviderResult;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.OperationApplicationException;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteQueryBuilder;
import android.net.Uri;
import android.os.Bundle;
import de.bitdroid.flooding.utils.Log;
public final class OdsContentProvider extends ContentProvider {
private OdsDatabase odsDatabase;
@Override
public boolean onCreate() {
odsDatabase = new OdsDatabase(getContext());
return true;
}
@Override
public String getType(Uri uri) {
return null;
}
@Override
public Cursor query(
Uri uri,
String[] projection,
String selection,
String[] selectionArgs,
String sortOrder) {
OdsSource source = OdsSource.fromUri(uri);
// check for manual sync reqeust
if (OdsSource.isSyncUri(uri)) {
Bundle settingsBundle = new Bundle();
settingsBundle.putBoolean(ContentResolver.SYNC_EXTRAS_MANUAL, true);
settingsBundle.putBoolean(ContentResolver.SYNC_EXTRAS_EXPEDITED, true);
ContentResolver.requestSync(
OdsSource.ACCOUNT,
OdsSource.AUTHORITY,
settingsBundle);
}
// check if source is being monitored
if (!OdsSourceManager.getInstance(getContext()).isSourceRegisteredForPeriodicSync(source)) {
Log.warning("Requested content for unregistered source");
return null;
}
// check if values have been inserted
String tableName = source.toSqlTableName();
Cursor tableCheckCursor = null;
try {
tableCheckCursor = odsDatabase.getReadableDatabase() .rawQuery(
"SELECT name FROM sqlite_master WHERE type=? AND name=?",
new String[] { "table", tableName });
if (tableCheckCursor.getCount() < 1) return null;
} finally {
if (tableCheckCursor != null) tableCheckCursor.close();
}
// query db
SQLiteQueryBuilder queryBuilder = new SQLiteQueryBuilder();
queryBuilder.setTables(tableName);
Cursor cursor = queryBuilder.query(
odsDatabase.getReadableDatabase(),
projection,
selection,
selectionArgs,
null, null,
sortOrder);
cursor.setNotificationUri(getContext().getContentResolver(), uri);
return cursor;
}
@Override
public Uri insert(Uri uri, ContentValues data) {
OdsSource source = OdsSource.fromUri(uri);
String tableName = source.toSqlTableName();
SQLiteDatabase database = odsDatabase.getWritableDatabase();
odsDatabase.addSource(database, tableName, source);
long id = database.insert(
tableName,
null,
data);
getContext().getContentResolver().notifyChange(uri, null);
return uri.buildUpon().appendPath(String.valueOf(id)).build();
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
throw new UnsupportedOperationException("under construction");
}
@Override
public int update(
Uri uri,
ContentValues values,
String selection,
String[] selectionArgs) {
throw new UnsupportedOperationException("under construction");
}
@Override
public ContentProviderResult[] applyBatch(ArrayList<ContentProviderOperation> operations)
throws OperationApplicationException {
SQLiteDatabase db = odsDatabase.getWritableDatabase();
db.beginTransaction();
try {
ContentProviderResult[] result = super.applyBatch(operations);
db.setTransactionSuccessful();
return result;
} finally {
db.endTransaction();
}
}
} |
package japsadev.bio.np.phage;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import japsa.bio.np.ErrorCorrection;
import japsa.seq.Alphabet;
import japsa.seq.Sequence;
import japsa.seq.SequenceOutputStream;
import japsa.seq.SequenceReader;
public class CDHitExtract {
public static void main(String[] args) throws IOException, InterruptedException {
// TODO Auto-generated method stub
ArrayList<Sequence> representSeq = SequenceReader.readAll("/home/sonhoanghguyen/Projects/Phage/paper/poa-consensus/allNanopore.fasta", Alphabet.DNA());
HashMap<String,Sequence> map = new HashMap<String,Sequence>();
for(Sequence seq:representSeq){
map.put(seq.getName(), seq);
}
String aligner = "clustal";
BufferedReader pathReader = new BufferedReader(new FileReader("/home/sonhoanghguyen/Projects/Phage/paper/poa-consensus/nanopore.clstr"));
SequenceOutputStream out = SequenceOutputStream.makeOutputStream("/home/sonhoanghguyen/Projects/Phage/paper/poa-consensus/nanopore.fasta");
String s;
//Read contigs from contigs.paths and refer themselves to contigs.fasta
Sequence consensus = new Sequence( Alphabet.DNA(), 10000);
int count = 0;
String seq = "";
ArrayList<Sequence> aGroup=new ArrayList<Sequence>();;
while((s=pathReader.readLine()) != null){
if(s.startsWith(">")){
if(count>0){
//group=map.get(seq);
if(count > 1){
System.out.println("Consensusing group with " + aGroup.size() + " members");
consensus = ErrorCorrection.consensusSequence(aGroup, "grouping", aligner);
}
else
consensus = map.get(seq);
consensus.setName(seq); // name of the CDHit representative sequence, but content is the consensus
consensus.setDesc(aligner+"="+count);
//System.out.println(group.getName() + " : " + group.getDesc());
consensus.writeFasta(out);
}
aGroup = new ArrayList<Sequence>();
count=0;
}else{
count++;
aGroup.add(map.get(s.substring(s.indexOf(">")+1, s.indexOf("..."))));
if(s.contains("*")){
seq = s.substring(s.indexOf(">")+1, s.indexOf("..."));
}
}
}
//last round
if(count>0){
//group=map.get(seq);
if(count > 1){
System.out.println("Consensusing group with " + aGroup.size() + " members");
consensus = ErrorCorrection.consensusSequence(aGroup, "grouping", aligner);
}
else
consensus = map.get(seq);
consensus.setName(seq); // name of the CDHit representative sequence, but content is the consensus
consensus.setDesc(aligner+"="+count);
//System.out.println(group.getName() + " : " + group.getDesc());
consensus.writeFasta(out);
}
pathReader.close();
out.close();
}
} |
package dr.app.beagle.tools;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map.Entry;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import beagle.Beagle;
import beagle.BeagleFactory;
import dr.app.beagle.evomodel.sitemodel.BranchSubstitutionModel;
import dr.app.beagle.evomodel.sitemodel.EpochBranchSubstitutionModel;
import dr.app.beagle.evomodel.sitemodel.GammaSiteRateModel;
import dr.app.beagle.evomodel.sitemodel.HomogenousBranchSubstitutionModel;
import dr.app.beagle.evomodel.substmodel.FrequencyModel;
import dr.app.beagle.evomodel.substmodel.HKY;
import dr.app.beagle.evomodel.treelikelihood.BufferIndexHelper;
import dr.evolution.alignment.Alignment;
import dr.evolution.alignment.SimpleAlignment;
import dr.evolution.datatype.Codons;
import dr.evolution.datatype.DataType;
import dr.evolution.datatype.Nucleotides;
import dr.evolution.io.NewickImporter;
import dr.evolution.sequence.Sequence;
import dr.evolution.tree.NodeRef;
import dr.evolution.tree.Tree;
import dr.evolution.util.Taxon;
import dr.evomodel.branchratemodel.BranchRateModel;
import dr.evomodel.branchratemodel.DefaultBranchRateModel;
import dr.evomodel.tree.TreeModel;
import dr.inference.model.Parameter;
import dr.math.MathUtils;
/**
* @author Filip Bielejec
* @version $Id$
*
*/
public class BeagleSequenceSimulator {
private final boolean DEBUG = false;
private ArrayList<Partition> partitions;
private int replications;
private SimpleAlignment simpleAlignment;
private DataType dataType;
private int stateCount;
private ConcurrentHashMap<Taxon, int[]> alignmentMap;
private boolean fieldsSet = false;
public BeagleSequenceSimulator(ArrayList<Partition> partitions,
int replications
) {
this.partitions = partitions;
this.replications = replications;
alignmentMap = new ConcurrentHashMap<Taxon, int[]>();
}// END: Constructor
// TODO: fix parallel execution
public Alignment simulate() {
try {
// Executor for threads
int NTHREDS = Runtime.getRuntime().availableProcessors();
ExecutorService executor = Executors.newFixedThreadPool(NTHREDS);
ThreadLocal<Partition> threadLocalPartition;
List<Callable<Void>> simulatePartitionCallers = new ArrayList<Callable<Void>>();
int partitionCount = 0;
for (Partition partition : partitions) {
threadLocalPartition = new ThreadLocal<Partition>();
threadLocalPartition.set(partition);
if (DEBUG) {
System.out.println("Simulating for partition " + partitionCount);
}
// simulatePartition(partition);
// executor.submit(new simulatePartitionRunnable(threadLocalPartition.get()));
simulatePartitionCallers.add(new simulatePartitionCallable(threadLocalPartition.get()));
threadLocalPartition.remove();
partitionCount++;
}// END: partitions loop
executor.invokeAll(simulatePartitionCallers);
// Wait until all threads are finished
executor.shutdown();
while (!executor.isTerminated()) {
}
if (DEBUG) {
printHashMap(alignmentMap);
}
// compile the alignment
Iterator<Entry<Taxon, int[]>> iterator = alignmentMap.entrySet()
.iterator();
while (iterator.hasNext()) {
Entry<?, ?> pairs = (Entry<?, ?>) iterator.next();
simpleAlignment.addSequence(intArray2Sequence(
(Taxon) pairs.getKey(), (int[]) pairs.getValue()));
iterator.remove();
}// END: while has next
} catch (Exception e) {
e.printStackTrace();
}// END: try-catch block
return simpleAlignment;
}// END: simulate
public class simulatePartitionRunnable implements Runnable {
private Partition partition;
private FrequencyModel freqModel;
private BranchSubstitutionModel branchSubstitutionModel;
private Tree treeModel;
private GammaSiteRateModel siteModel;
private int partitionSiteCount;
private NodeRef root;
private simulatePartitionRunnable(Partition partition) {
this.partition = partition;
}// END: Constructor
@Override
public void run() {
try {
treeModel = partition.treeModel;
branchSubstitutionModel = partition.branchSubstitutionModel;
siteModel = partition.siteModel;
freqModel = partition.freqModel;
partitionSiteCount = partition.getPartitionSiteCount();
root = treeModel.getRoot();
// do those only once
if(!fieldsSet) {
dataType = freqModel.getDataType();
simpleAlignment = new SimpleAlignment();
simpleAlignment.setDataType(dataType);
simpleAlignment.setReportCountStatistics(false);
stateCount = dataType.getStateCount();
fieldsSet = true;
}//END: partitionCount check
// Buffer index helpers
int eigenCount = branchSubstitutionModel.getEigenCount();
BufferIndexHelper eigenBufferHelper = new BufferIndexHelper(eigenCount, 0);
int nodeCount = treeModel.getNodeCount();
BufferIndexHelper matrixBufferHelper = new BufferIndexHelper(nodeCount, 0);
int tipCount = treeModel.getExternalNodeCount();
BufferIndexHelper partialBufferHelper = new BufferIndexHelper(nodeCount, tipCount);
// load beagle
Beagle beagle = loadBeagleInstance(partition, eigenBufferHelper,
matrixBufferHelper, partialBufferHelper);
// gamma category rates
double[] categoryRates = siteModel.getCategoryRates();
beagle.setCategoryRates(categoryRates);
// weights for gamma category rates
double[] categoryWeights = siteModel.getCategoryProportions();
beagle.setCategoryWeights(0, categoryWeights);
// proportion of sites in each category
double[] categoryProbs = siteModel.getCategoryProportions();
int[] category = new int[partitionSiteCount];
for (int i = 0; i < partitionSiteCount; i++) {
category[i] = MathUtils.randomChoicePDF(categoryProbs);
}
int[] parentSequence = new int[partitionSiteCount];
// set ancestral sequence for partition if it exists
if (partition.hasAncestralSequence) {
parentSequence = sequence2intArray(partition.ancestralSequence);
} else {
double[] frequencies = freqModel.getFrequencies();
for (int i = 0; i < partitionSiteCount; i++) {
parentSequence[i] = MathUtils.randomChoicePDF(frequencies);
}
}// END: ancestral sequence check
for (int i = 0; i < eigenCount; i++) {
eigenBufferHelper.flipOffset(i);
branchSubstitutionModel.setEigenDecomposition(beagle,
i,
eigenBufferHelper,
0
);
}// END: i loop
int categoryCount = siteModel.getCategoryCount();
traverse(beagle, partition, root, parentSequence, category, categoryCount, matrixBufferHelper, eigenBufferHelper);
} catch (RuntimeException e) {
e.printStackTrace();
}// END: try-catch block
}// END: run
}//END: simulatePartitionRunnable class
private class simulatePartitionCallable implements Callable<Void> {
private Partition partition;
private FrequencyModel freqModel;
private BranchSubstitutionModel branchSubstitutionModel;
private Tree treeModel;
private GammaSiteRateModel siteModel;
private int partitionSiteCount;
private NodeRef root;
private simulatePartitionCallable(Partition partition) {
this.partition = partition;
}// END: Constructor
public Void call() throws Exception {
treeModel = partition.treeModel;
branchSubstitutionModel = partition.branchSubstitutionModel;
siteModel = partition.siteModel;
freqModel = partition.freqModel;
partitionSiteCount = partition.getPartitionSiteCount();
root = treeModel.getRoot();
// do those only once
if(!fieldsSet) {
dataType = freqModel.getDataType();
simpleAlignment = new SimpleAlignment();
simpleAlignment.setDataType(dataType);
simpleAlignment.setReportCountStatistics(false);
stateCount = dataType.getStateCount();
fieldsSet = true;
}//END: partitionCount check
// Buffer index helpers
int eigenCount = branchSubstitutionModel.getEigenCount();
BufferIndexHelper eigenBufferHelper = new BufferIndexHelper(eigenCount, 0);
int nodeCount = treeModel.getNodeCount();
BufferIndexHelper matrixBufferHelper = new BufferIndexHelper(nodeCount, 0);
int tipCount = treeModel.getExternalNodeCount();
BufferIndexHelper partialBufferHelper = new BufferIndexHelper(nodeCount, tipCount);
// load beagle
Beagle beagle = loadBeagleInstance(partition, eigenBufferHelper,
matrixBufferHelper, partialBufferHelper);
// gamma category rates
double[] categoryRates = siteModel.getCategoryRates();
beagle.setCategoryRates(categoryRates);
// weights for gamma category rates
double[] categoryWeights = siteModel.getCategoryProportions();
beagle.setCategoryWeights(0, categoryWeights);
// proportion of sites in each category
double[] categoryProbs = siteModel.getCategoryProportions();
int[] category = new int[partitionSiteCount];
for (int i = 0; i < partitionSiteCount; i++) {
category[i] = MathUtils.randomChoicePDF(categoryProbs);
}
int[] parentSequence = new int[partitionSiteCount];
// set ancestral sequence for partition if it exists
if (partition.hasAncestralSequence) {
parentSequence = sequence2intArray(partition.ancestralSequence);
} else {
double[] frequencies = freqModel.getFrequencies();
for (int i = 0; i < partitionSiteCount; i++) {
parentSequence[i] = MathUtils.randomChoicePDF(frequencies);
}
}// END: ancestral sequence check
for (int i = 0; i < eigenCount; i++) {
eigenBufferHelper.flipOffset(i);
branchSubstitutionModel.setEigenDecomposition(beagle,
i,
eigenBufferHelper,
0
);
}// END: i loop
int categoryCount = siteModel.getCategoryCount();
traverse(beagle, partition, root, parentSequence, category, categoryCount, matrixBufferHelper, eigenBufferHelper);
return null;
}//END: call
}// END: simulatePartitionCallable class
private void simulatePartition(Partition partition) {
TreeModel treeModel = partition.treeModel;
BranchSubstitutionModel branchSubstitutionModel = partition.branchSubstitutionModel;
GammaSiteRateModel siteModel = partition.siteModel;
FrequencyModel freqModel = partition.freqModel;
int partitionSiteCount = partition.getPartitionSiteCount();
NodeRef root = treeModel.getRoot();
// do those only once
if(!fieldsSet) {
dataType = freqModel.getDataType();
simpleAlignment = new SimpleAlignment();
simpleAlignment.setDataType(dataType);
simpleAlignment.setReportCountStatistics(false);
stateCount = dataType.getStateCount();
fieldsSet = true;
}//END: partitionCount check
// Buffer index helpers
int eigenCount = branchSubstitutionModel.getEigenCount();
BufferIndexHelper eigenBufferHelper = new BufferIndexHelper(eigenCount, 0);
int nodeCount = treeModel.getNodeCount();
BufferIndexHelper matrixBufferHelper = new BufferIndexHelper(nodeCount, 0);
int tipCount = treeModel.getExternalNodeCount();
BufferIndexHelper partialBufferHelper = new BufferIndexHelper(nodeCount, tipCount);
// load beagle
Beagle beagle = loadBeagleInstance(partition, eigenBufferHelper,
matrixBufferHelper, partialBufferHelper);
// gamma category rates
double[] categoryRates = siteModel.getCategoryRates();
beagle.setCategoryRates(categoryRates);
// weights for gamma category rates
double[] categoryWeights = siteModel.getCategoryProportions();
beagle.setCategoryWeights(0, categoryWeights);
// proportion of sites in each category
double[] categoryProbs = siteModel.getCategoryProportions();
int[] category = new int[partitionSiteCount];
for (int i = 0; i < partitionSiteCount; i++) {
category[i] = MathUtils.randomChoicePDF(categoryProbs);
}
int[] parentSequence = new int[partitionSiteCount];
// set ancestral sequence for partition if it exists
if (partition.hasAncestralSequence) {
// System.out.println(partition.ancestralSequence.getSequenceString().length());
// System.out.println(partitionSiteCount);
// System.out.println(replications);
parentSequence = sequence2intArray(partition.ancestralSequence);
} else {
double[] frequencies = freqModel.getFrequencies();
for (int i = 0; i < partitionSiteCount; i++) {
parentSequence[i] = MathUtils.randomChoicePDF(frequencies);
}
}// END: ancestral sequence check
for (int i = 0; i < eigenCount; i++) {
eigenBufferHelper.flipOffset(i);
branchSubstitutionModel.setEigenDecomposition(beagle,
i,
eigenBufferHelper,
0
);
}// END: i loop
int categoryCount = siteModel.getCategoryCount();
traverse(beagle, partition, root, parentSequence, category, categoryCount, matrixBufferHelper, eigenBufferHelper);
}// END: simulatePartition
public Beagle loadBeagleInstance(Partition partition,
BufferIndexHelper eigenBufferHelper,
BufferIndexHelper matrixBufferHelper,
BufferIndexHelper partialBufferHelper
) {
TreeModel treeModel = partition.treeModel;
BranchSubstitutionModel branchSubstitutionModel = partition.branchSubstitutionModel;
GammaSiteRateModel siteModel = partition.siteModel;
int partitionSiteCount = partition.getPartitionSiteCount();
int tipCount = treeModel.getExternalNodeCount();
int compactPartialsCount = tipCount;
int patternCount = partitionSiteCount;
int categoryCount = siteModel.getCategoryCount();
int internalNodeCount = treeModel.getInternalNodeCount();
int scaleBufferCount = internalNodeCount + 1;
int[] resourceList = new int[] { 0 };
long preferenceFlags = 0;
long requirementFlags = 0;
Beagle beagle = BeagleFactory.loadBeagleInstance(
tipCount,
partialBufferHelper.getBufferCount(),
compactPartialsCount,
stateCount,
patternCount,
eigenBufferHelper.getBufferCount(),
matrixBufferHelper.getBufferCount() + branchSubstitutionModel.getExtraBufferCount(treeModel),
categoryCount,
scaleBufferCount,
resourceList,
preferenceFlags,
requirementFlags
);
return beagle;
}// END: loadBeagleInstance
private void traverse(Beagle beagle,
Partition partition,
NodeRef node,
int[] parentSequence,
int[] category,
int categoryCount,
BufferIndexHelper matrixBufferHelper,
BufferIndexHelper eigenBufferHelper
) {
TreeModel treeModel = partition.treeModel;
int partitionSiteCount = partition.getPartitionSiteCount();
for (int iChild = 0; iChild < treeModel.getChildCount(node); iChild++) {
NodeRef child = treeModel.getChild(node, iChild);
int[] partitionSequence = new int[partitionSiteCount];
double[] cProb = new double[stateCount];
double[][] probabilities = getTransitionProbabilities(beagle, partition, child, categoryCount, stateCount, matrixBufferHelper, eigenBufferHelper);
// System.out.println(partition.ancestralSequence.getSequenceString().length());
// System.out.println(partitionSiteCount);
for (int i = 0; i < partitionSiteCount; i++) {
System.arraycopy(probabilities[category[i]], parentSequence[i] * stateCount, cProb, 0, stateCount);
partitionSequence[i] = MathUtils.randomChoicePDF(cProb);
}
if (treeModel.getChildCount(child) == 0) {
if (DEBUG) {
EpochBranchSubstitutionModel.printArray(partitionSequence);
}
Taxon taxon = treeModel.getNodeTaxon(child);
if (alignmentMap.containsKey(taxon)) {
int j = 0;
for (int i = partition.from; i <= partition.to; i += partition.every) {
alignmentMap.get(taxon)[i] = partitionSequence[j];
j++;
}// END: i loop
} else {
int[] sequence = new int[replications];
Arrays.fill(sequence, Integer.MAX_VALUE);
int j = 0;
for (int i = partition.from; i <= partition.to; i += partition.every) {
sequence[i] = partitionSequence[j];
j++;
}// END: i loop
alignmentMap.put(taxon, sequence);
}// END: key check
} //END: tip node check
traverse(beagle, partition, treeModel.getChild(node, iChild), partitionSequence, category, categoryCount, matrixBufferHelper, eigenBufferHelper);
}// END: child nodes loop
}// END: traverse
private double[][] getTransitionProbabilities(Beagle beagle,
Partition partition,
NodeRef node,
int categoryCount,
int stateCount,
BufferIndexHelper matrixBufferHelper,
BufferIndexHelper eigenBufferHelper
) {
double[][] probabilities = new double[categoryCount][stateCount * stateCount];
BranchSubstitutionModel branchSubstitutionModel = partition.branchSubstitutionModel;
TreeModel treeModel = partition.treeModel;
BranchRateModel branchRateModel = partition.branchRateModel;
int nodeNum = node.getNumber();
matrixBufferHelper.flipOffset(nodeNum);
int branchIndex = matrixBufferHelper.getOffsetIndex(nodeNum);
int eigenIndex = branchSubstitutionModel.getBranchIndex(treeModel, node, branchIndex);
int count = 1;
double branchRate = branchRateModel.getBranchRate(treeModel, node);
double branchTime = treeModel.getBranchLength(node) * branchRate;
if (branchTime < 0.0) {
throw new RuntimeException("Negative branch length: " + branchTime);
}
branchSubstitutionModel.updateTransitionMatrices(beagle,
eigenIndex,
eigenBufferHelper,
new int[] { branchIndex },
null,
null,
new double[] { branchTime },
count
);
double transitionMatrix[] = new double[categoryCount * stateCount * stateCount];
beagle.getTransitionMatrix(branchIndex,
transitionMatrix
);
for (int i = 0; i < categoryCount; i++) {
System.arraycopy(transitionMatrix, i * stateCount, probabilities[i], 0, stateCount * stateCount);
}
return probabilities;
}// END: getTransitionProbabilities
private Sequence intArray2Sequence(Taxon taxon, int[] seq) {
StringBuilder sSeq = new StringBuilder();
if (dataType instanceof Codons) {
for (int i = 0; i < replications; i++) {
sSeq.append(dataType.getTriplet(seq[i]));
}// END: replications loop
} else {
for (int i = 0; i < replications; i++) {
sSeq.append(dataType.getCode(seq[i]));
}// END: replications loop
}// END: dataType check
return new Sequence(taxon, sSeq.toString());
}// END: intArray2Sequence
private int[] sequence2intArray(Sequence sequence) {
int array[] = new int[replications];
if (dataType instanceof Codons) {
int k = 0;
for (int i = 0; i < replications; i++) {
array[i] = ((Codons) dataType).getState(sequence.getChar(k),
sequence.getChar(k + 1), sequence.getChar(k + 2));
k += 3;
}// END: replications loop
} else {
for (int i = 0; i < replications; i++) {
array[i] = dataType.getState(sequence.getChar(i));
}// END: replications loop
}// END: dataType check
return array;
}// END: sequence2intArray
public void printHashMap(ConcurrentHashMap<?, ?> hashMap) {
Iterator<?> iterator = hashMap.entrySet().iterator();
while (iterator.hasNext()) {
Entry<?, ?> pairs = (Entry<?, ?>) iterator.next();
Taxon taxon = (Taxon)pairs.getKey();
int[] sequence = (int[])pairs.getValue();
System.out.println(taxon.toString());
EpochBranchSubstitutionModel.printArray(sequence);
}// END: while has next
}//END: printHashMap
public static void main(String[] args) {
// simulateOnePartition();
simulateTwoPartitions();
// simulateThreePartitions();
} // END: main
static void simulateOnePartition() {
try {
System.out.println("Test case 1: simulateOnePartition");
int sequenceLength = 10;
ArrayList<Partition> partitionsList = new ArrayList<Partition>();
// create tree
NewickImporter importer = new NewickImporter(
"(SimSeq1:73.7468,(SimSeq2:25.256989999999995,SimSeq3:45.256989999999995):18.48981);");
Tree tree = importer.importTree(null);
TreeModel treeModel = new TreeModel(tree);
// create Frequency Model
Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25,
0.25, 0.25 });
FrequencyModel freqModel = new FrequencyModel(Nucleotides.INSTANCE,
freqs);
// create substitution model
Parameter kappa = new Parameter.Default(1, 10);
HKY hky = new HKY(kappa, freqModel);
HomogenousBranchSubstitutionModel substitutionModel = new HomogenousBranchSubstitutionModel(
hky, freqModel);
// create site model
GammaSiteRateModel siteRateModel = new GammaSiteRateModel(
"siteModel");
// create branch rate model
BranchRateModel branchRateModel = new DefaultBranchRateModel();
// create partition
Partition partition1 = new Partition(treeModel,
substitutionModel,
siteRateModel,
branchRateModel,
freqModel,
0, // from
sequenceLength - 1,
1 // every
);
Sequence ancestralSequence = new Sequence();
ancestralSequence.appendSequenceString("TCAAGTGAGG");
partition1.setAncestralSequence(ancestralSequence);
partitionsList.add(partition1);
// feed to sequence simulator and generate data
BeagleSequenceSimulator simulator = new BeagleSequenceSimulator(partitionsList, sequenceLength);
System.out.println(simulator.simulate().toString());
} catch (Exception e) {
e.printStackTrace();
System.exit(-1);
} // END: try-catch block
}// END: simulateOnePartition
static void simulateTwoPartitions() {
try {
System.out.println("Test case 2: simulateTwoPartitions");
int sequenceLength = 11;
ArrayList<Partition> partitionsList = new ArrayList<Partition>();
// create tree
NewickImporter importer = new NewickImporter(
"(SimSeq1:73.7468,(SimSeq2:25.256989999999995,SimSeq3:45.256989999999995):18.48981);");
Tree tree = importer.importTree(null);
TreeModel treeModel = new TreeModel(tree);
// create Frequency Model
Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
FrequencyModel freqModel = new FrequencyModel(Nucleotides.INSTANCE,
freqs);
// create substitution model
Parameter kappa = new Parameter.Default(1, 10);
HKY hky = new HKY(kappa, freqModel);
HomogenousBranchSubstitutionModel substitutionModel = new HomogenousBranchSubstitutionModel(
hky, freqModel);
// create site model
GammaSiteRateModel siteRateModel = new GammaSiteRateModel(
"siteModel");
// create branch rate model
BranchRateModel branchRateModel = new DefaultBranchRateModel();
// create partition
Partition partition1 = new Partition(treeModel,
substitutionModel,
siteRateModel,
branchRateModel,
freqModel,
0, // from
4,
1 // every
);
// create partition
Partition partition2 = new Partition(treeModel,
substitutionModel,
siteRateModel,
branchRateModel,
freqModel,
5, // from
sequenceLength - 1,
1 // every
);
// Sequence ancestralSequence = new Sequence();
// ancestralSequence.appendSequenceString("TCAAGTGAGG");
// partition2.setAncestralSequence(ancestralSequence);
partitionsList.add(partition1);
partitionsList.add(partition2);
// feed to sequence simulator and generate data
BeagleSequenceSimulator simulator = new BeagleSequenceSimulator(partitionsList, sequenceLength);
System.out.println(simulator.simulate().toString());
} catch (Exception e) {
e.printStackTrace();
System.exit(-1);
} // END: try-catch block
}// END: simulateTwoPartitions
static void simulateThreePartitions() {
try {
System.out.println("Test case 3: simulateThreePartitions");
int sequenceLength = 10;
ArrayList<Partition> partitionsList = new ArrayList<Partition>();
// create tree
NewickImporter importer = new NewickImporter(
"(SimSeq1:73.7468,(SimSeq2:25.256989999999995,SimSeq3:45.256989999999995):18.48981);");
Tree tree = importer.importTree(null);
TreeModel treeModel = new TreeModel(tree);
// create Frequency Model
Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
FrequencyModel freqModel = new FrequencyModel(Nucleotides.INSTANCE,
freqs);
// create substitution model
Parameter kappa = new Parameter.Default(1, 10);
HKY hky = new HKY(kappa, freqModel);
HomogenousBranchSubstitutionModel substitutionModel = new HomogenousBranchSubstitutionModel(
hky, freqModel);
// create site model
GammaSiteRateModel siteRateModel = new GammaSiteRateModel(
"siteModel");
// create branch rate model
BranchRateModel branchRateModel = new DefaultBranchRateModel();
// create partition
Partition partition1 = new Partition(treeModel,
substitutionModel,
siteRateModel,
branchRateModel,
freqModel,
0, // from
sequenceLength - 1,
3 // every
);
// create partition
Partition partition2 = new Partition(treeModel,
substitutionModel,
siteRateModel,
branchRateModel,
freqModel,
1, // from
sequenceLength - 1,
3 // every
);
// create partition
Partition partition3 = new Partition(treeModel,
substitutionModel,
siteRateModel,
branchRateModel,
freqModel,
2, // from
sequenceLength - 1,
3 // every
);
partitionsList.add(partition1);
partitionsList.add(partition2);
partitionsList.add(partition3);
// feed to sequence simulator and generate data
BeagleSequenceSimulator simulator = new BeagleSequenceSimulator(partitionsList, sequenceLength);
System.out.println(simulator.simulate().toString());
} catch (Exception e) {
e.printStackTrace();
System.exit(-1);
} // END: try-catch block
}// END: simulateThreePartitions
} // END: class |
package edu.kit.informatik.literatur_system;
/**
* Class used to represent a journal entity
* @author JoseNote
* @version %I%, %G%
*/
public final class Journal extends Venue {
private final String name;
private final String publisher;
/**
* Creates a new instance
* @param name the name value
* @param publisher the publisher value
*/
public Journal(final String name, final String publisher) {
super();
this.name = name;
this.publisher = publisher;
}
/**
* Creates a new instance with no publisher
* @param name the name value
*/
public Journal(final String name) {
super();
this.name = name;
publisher = null;
}
@Override
public int hashCode() {
return name.hashCode();
}
@Override
public boolean equals(Object obj) {
return obj instanceof Journal
&& ((Journal) obj).name.compareTo(name) == 0;
}
} |
package edu.mit.streamjit.impl.compiler2;
import static com.google.common.base.Preconditions.*;
import com.google.common.base.Predicate;
import com.google.common.collect.ContiguousSet;
import com.google.common.collect.DiscreteDomain;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.google.common.collect.Range;
import com.google.common.collect.Sets;
import edu.mit.streamjit.util.Combinators;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Compiler IR for a fused group of workers (what used to be called StreamNode).
* @author Jeffrey Bosboom <jeffreybosboom@gmail.com>
* @since 9/22/2013
*/
public class ActorGroup implements Comparable<ActorGroup> {
private ImmutableSet<Actor> actors;
private ImmutableMap<Actor, Integer> schedule;
private ActorGroup(ImmutableSet<Actor> actors) {
this.actors = actors;
for (Actor a : actors)
a.setGroup(this);
}
public static ActorGroup of(Actor actor) {
assert actor.group() == null : actor.group();
return new ActorGroup(ImmutableSet.of(actor));
}
public static ActorGroup fuse(ActorGroup first, ActorGroup second) {
return new ActorGroup(ImmutableSet.<Actor>builder().addAll(first.actors()).addAll(second.actors()).build());
}
public void remove(Actor a) {
assert actors.contains(a) : a;
actors = ImmutableSet.copyOf(Sets.difference(actors, ImmutableSet.of(a)));
schedule = ImmutableMap.copyOf(Maps.difference(schedule, ImmutableMap.of(a, 0)).entriesOnlyOnLeft());
}
public ImmutableSet<Actor> actors() {
return actors;
}
public boolean isTokenGroup() {
for (Actor a : actors())
if (a instanceof TokenActor)
return true;
return false;
}
public int id() {
return Collections.min(actors()).id();
}
public boolean isPeeking() {
for (Actor a : actors())
if (a.isPeeking())
return true;
return false;
}
public boolean isStateful() {
for (Actor a : actors())
if (a instanceof WorkerActor && ((WorkerActor)a).archetype().isStateful())
return true;
return false;
}
public Set<Storage> inputs() {
return Sets.filter(allEdges(), new Predicate<Storage>() {
@Override
public boolean apply(Storage input) {
return Iterables.getOnlyElement(input.upstream()).group() != ActorGroup.this;
}
});
}
public Set<Storage> outputs() {
return Sets.filter(allEdges(), new Predicate<Storage>() {
@Override
public boolean apply(Storage input) {
return Iterables.getOnlyElement(input.downstream()).group() != ActorGroup.this;
}
});
}
public Set<Storage> internalEdges() {
return Sets.filter(allEdges(), new Predicate<Storage>() {
@Override
public boolean apply(Storage input) {
return Iterables.getOnlyElement(input.upstream()).group() == ActorGroup.this &&
Iterables.getOnlyElement(input.downstream()).group() == ActorGroup.this;
}
});
}
private Set<Storage> allEdges() {
ImmutableSet.Builder<Storage> builder = ImmutableSet.builder();
for (Actor a : actors) {
builder.addAll(a.inputs());
builder.addAll(a.outputs());
}
return builder.build();
}
public Set<ActorGroup> predecessorGroups() {
ImmutableSet.Builder<ActorGroup> builder = ImmutableSet.builder();
for (Actor a : actors)
for (Storage s : a.inputs())
for (Actor b : s.upstream())
if (b.group() != this)
builder.add(b.group());
return builder.build();
}
public Set<ActorGroup> successorGroups() {
ImmutableSet.Builder<ActorGroup> builder = ImmutableSet.builder();
for (Actor a : actors)
for (Storage s : a.outputs())
for (Actor b : s.downstream())
if (b.group() != this)
builder.add(b.group());
return builder.build();
}
public ImmutableMap<Actor, Integer> schedule() {
checkState(schedule != null, "schedule not yet initialized");
return schedule;
}
public void setSchedule(ImmutableMap<Actor, Integer> schedule) {
checkState(this.schedule == null, "already initialized schedule");
for (Actor a : actors())
checkArgument(schedule.containsKey(a), "schedule doesn't contain actor "+a);
this.schedule = schedule;
}
/**
* Returns a map mapping each output Storage to the set of physical indices
* read in that Storage during the given ActorGroup iteration.
* @param iteration the iteration to simulate
* @return a map of read physical indices
*/
public Map<Storage, Set<Integer>> reads(int iteration) {
Map<Storage, Set<Integer>> retval = new HashMap<>(outputs().size());
for (Storage s : inputs())
retval.put(s, new HashSet<Integer>());
for (Actor a : actors()) {
int begin = schedule.get(a) * iteration, end = schedule.get(a) * (iteration + 1);
for (int input = 0; input < a.inputs().size(); ++input)
if (!a.inputs().get(input).isInternal()) {
//Use pop on all but last iteration, then use max(pop, peek).
int pop = a.pop(input);
for (int iter = begin; iter < end-1; ++iter)
for (int idx = pop * iter; idx < pop * (iter+1); ++idx)
retval.get(a.inputs().get(input)).add(a.translateInputIndex(input, idx));
int read = Math.max(a.pop(input), a.peek(input));
for (int iter = end-1; iter < end; ++iter)
for (int idx = read * iter; idx < read * (iter+1); ++idx)
retval.get(a.inputs().get(input)).add(a.translateInputIndex(input, idx));
}
}
return retval;
}
/**
* Returns a map mapping each output Storage to the set of physical indices
* written in that Storage during the given ActorGroup iteration.
* @param iteration the iteration to simulate
* @return a map of written physical indices
*/
public Map<Storage, Set<Integer>> writes(int iteration) {
Map<Storage, Set<Integer>> retval = new HashMap<>(outputs().size());
for (Storage s : outputs())
retval.put(s, new HashSet<Integer>());
for (Actor a : actors()) {
int begin = schedule.get(a) * iteration, end = schedule.get(a) * (iteration + 1);
for (int output = 0; output < a.outputs().size(); ++output)
if (!a.outputs().get(output).isInternal()) {
int push = a.push(output);
for (int iter = begin; iter < end; ++iter)
for (int idx = push * iter; idx < push * (iter+1); ++idx)
retval.get(a.outputs().get(output)).add(a.translateOutputIndex(output, idx));
}
}
return retval;
}
/**
* Returns a void->void MethodHandle that will run this ActorGroup for the
* given iterations using the given ConcreteStorage instances.
* @param iterations the range of iterations to run for
* @param storage the storage being used
* @return a void->void method handle
*/
public MethodHandle specialize(Range<Integer> iterations, Map<Storage, ConcreteStorage> storage) {
//TokenActors are special.
assert !isTokenGroup() : actors();
/**
* Compute the read and write method handles for each Actor. These don't
* depend on the iteration, so we can bind and reuse them.
*/
Map<Actor, MethodHandle> withRWHandlesBound = new HashMap<>();
for (Actor a : actors()) {
WorkerActor wa = (WorkerActor)a;
MethodHandle specialized = wa.archetype().specialize(wa);
assert a.inputs().size() > 0 : a;
MethodHandle read;
if (a.inputs().size() == 1)
read = MethodHandles.filterArguments(storage.get(a.inputs().get(0)).readHandle(),
0, a.inputIndexFunctions().get(0));
else {
MethodHandle[] table = new MethodHandle[a.inputs().size()];
for (int i = 0; i < a.inputs().size(); i++)
table[i] = MethodHandles.filterArguments(storage.get(a.inputs().get(i)).readHandle(),
0, a.inputIndexFunctions().get(i));
read = Combinators.tableswitch(table);
}
assert a.outputs().size() > 0 : a;
MethodHandle write;
if (a.outputs().size() == 1)
write = MethodHandles.filterArguments(storage.get(a.outputs().get(0)).writeHandle(),
0, a.outputIndexFunctions().get(0));
else {
MethodHandle[] table = new MethodHandle[a.outputs().size()];
for (int i = 0; i < a.outputs().size(); ++i)
table[i] = MethodHandles.filterArguments(storage.get(a.outputs().get(i)).writeHandle(),
0, a.outputIndexFunctions().get(i));
write = Combinators.tableswitch(table);
}
withRWHandlesBound.put(wa, specialized.bindTo(read).bindTo(write));
}
/**
* Compute the initial read/write indices for each iteration, then bind
* them together in sequence. (We could also move the computation
* inside the handle, but I think leaving everything explicit is better.
* We could also bytecode these constants and invoke the method handle,
* if bytecode gives the JVM more visibility.)
*
* TODO: maybe for Filters, we can use a loop since the computation is
* simple and JVMs can decide whether to unroll or not.
*/
List<MethodHandle> handles = new ArrayList<>();
for (int iteration : ContiguousSet.create(iterations, DiscreteDomain.integers())) {
for (Actor a : ImmutableSortedSet.copyOf(actors())) {
MethodHandle base = withRWHandlesBound.get(a);
int subiterations = schedule.get(a);
for (int i = iteration*subiterations; i < (iteration+1)*subiterations; ++i) {
MethodHandle next = base;
if (a.inputs().size() == 1)
next = MethodHandles.insertArguments(next, 0, i * a.pop(0));
else {
int[] readIndices = new int[a.inputs().size()];
for (int m = 0; m < a.inputs().size(); ++m)
readIndices[m] = i * a.pop(m);
next = MethodHandles.insertArguments(next, 0, readIndices);
}
if (a.outputs().size() == 1)
next = MethodHandles.insertArguments(next, 0, i * a.push(0));
else {
int[] writeIndices = new int[a.outputs().size()];
for (int m = 0; m < a.outputs().size(); ++m)
writeIndices[m] = i * a.push(m);
next = MethodHandles.insertArguments(next, 0, writeIndices);
}
handles.add(next);
}
}
}
return Combinators.semicolon(handles);
}
@Override
public int compareTo(ActorGroup o) {
return Integer.compare(id(), o.id());
}
@Override
public boolean equals(Object obj) {
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final ActorGroup other = (ActorGroup)obj;
if (id() != other.id())
return false;
return true;
}
@Override
public int hashCode() {
return id();
}
} |
package edu.nyu.cs.cs2580;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Scanner;
import java.util.Vector;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.commons.io.FileUtils;
import org.jsoup.Jsoup;
import org.mapdb.DB;
import org.mapdb.DBMaker;
import edu.nyu.cs.cs2580.SearchEngine.Options;
/**
* @CS2580: Implement this class for HW2.
*/
public class IndexerInvertedOccurrence extends Indexer {
private DB documentDB = null;
private ArrayList<DB> ivtIndexDBList = new ArrayList<DB>();
private ArrayList<Map<String, List<Integer>>> ivtIndexMapList = new ArrayList<Map<String, List<Integer>>>();
private Map<Integer, DocumentIndexed> docMap = null;
private Map<String, Integer> docUrlMap = null;
// Table name for index of documents.
private static final String DOC_IDX_TBL = "docDB";
private static final String DOC_IVT_TBL = "docIvtDB";
private static final String DOC_URL_TBL = "docUrlDB";
public IndexerInvertedOccurrence(Options options) {
super(options);
System.out.println("Using Indexer: " + this.getClass().getSimpleName());
}
private List<File> getAllFiles(final File folder) {
List<File> fileList = new LinkedList<File>();
for (final File fileEntry : folder.listFiles()) {
if (fileEntry.isDirectory()) {
fileList.addAll(getAllFiles(fileEntry));
} else {
fileList.add(fileEntry);
}
}
return fileList;
}
private class InvertIndexBuildingTask implements Runnable {
private List<File> files;
private int startFileIdx;
private int endFileIdx;
private Map<String, List<Integer>> ivtMap;
public InvertIndexBuildingTask(List<File> files, int startFileIdx,
int endFileIdx, Map<String, List<Integer>> ivtMap) {
this.files = files;
this.startFileIdx = startFileIdx;
this.endFileIdx = endFileIdx;
this.ivtMap = ivtMap;
}
@Override
public void run() {
System.out.println("Thread " + Thread.currentThread().getName()
+ " processes files from " + startFileIdx + " to "
+ endFileIdx);
for (int docId = startFileIdx; docId < endFileIdx; docId++) {
File file = files.get(docId);
Map<String, ArrayList<Integer>> ivtMapItem = new HashMap<String, ArrayList<Integer>>();
String htmlStr = null;
try {
htmlStr = FileUtils.readFileToString(file);
} catch (IOException e) {
continue;
}
org.jsoup.nodes.Document doc = Jsoup.parse(htmlStr);
String title = doc.title();
String text = doc.text();
Stemmer s = new Stemmer();
Scanner scanner = new Scanner(text);
int passageLength = 0;
while (scanner.hasNext()) {
String token = scanner.next().toLowerCase();
s.add(token.toCharArray(), token.length());
s.stem();
// Build inverted map.
token = s.toString();
if (!ivtMapItem.containsKey(token)) {
ArrayList<Integer> occList = new ArrayList<Integer>();
ivtMapItem.put(token, occList);
}
ArrayList<Integer> occList = ivtMapItem.get(token);
occList.add(passageLength);
ivtMapItem.put(token, occList);
passageLength++;
}
String url = null;
try {
url = file.getCanonicalPath();
} catch (IOException e) {
continue;
}
DocumentIndexed di = new DocumentIndexed(docId);
di.setTitle(title);
di.setUrl(url);
di.setLength(passageLength);
for (String token : ivtMapItem.keySet()) {
if (!ivtMap.containsKey(token)) {
ivtMap.put(token, new ArrayList<Integer>());
}
List<Integer> recordList = ivtMap.get(token);
ArrayList<Integer> occList = ivtMapItem.get(token);
// sequentially add <docid, occurrence> to the posting list.
for (int e : occList) {
recordList.add(docId);
recordList.add(e);
}
}
buildDocumentIndex(di);
}
}
}
@Override
public void constructIndex() throws IOException {
String corpusFolder = _options._corpusPrefix;
System.out.println("Construct index from: " + corpusFolder);
long start_t = System.currentTimeMillis();
cleanUpDirectory();
// Get all corpus files.
List<File> files = getAllFiles(new File(corpusFolder));
int filesPerBatch = 1000;
initialStore(false, files.size() / filesPerBatch);
int threadCount = 1;
System.out.println("Start building index with " + threadCount
+ " threads. Elapsed: "
+ (System.currentTimeMillis() - start_t) / 1000.0 + "s");
for (int batchNum = 0; batchNum < files.size() / filesPerBatch; batchNum++) {
int fileIdStart = batchNum * filesPerBatch;
int fileIdEnd = (batchNum + 1) * filesPerBatch;
if (batchNum == (files.size() / filesPerBatch) - 1) {
fileIdEnd = files.size();
}
System.out.println("Processing files from " + fileIdStart + " to "
+ fileIdEnd);
ExecutorService threadPool = Executors
.newFixedThreadPool(threadCount);
Map<String, List<Integer>> ivtMap = new HashMap<String, List<Integer>>();
int totalFileCount = fileIdEnd - fileIdStart;
int filesPerThread = totalFileCount / threadCount;
for (int threadId = 0; threadId < threadCount; threadId++) {
int startFileIdx = threadId * filesPerThread + fileIdStart;
int endFileIdx = (threadId + 1) * filesPerThread + fileIdStart;
if (threadId == threadCount - 1) {
endFileIdx = fileIdEnd;
}
InvertIndexBuildingTask iibt = new InvertIndexBuildingTask(
files, startFileIdx, endFileIdx, ivtMap);
threadPool.submit(iibt);
}
threadPool.shutdown();
try {
threadPool.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println(fileIdEnd
+ " pages have been processed. Elapsed: "
+ (System.currentTimeMillis() - start_t) / 1000.0 + "s");
// Write ivtMap into storage.
long recordsCommit = 0;
System.out.println("Writing Inverted Map to disk. " + fileIdEnd
+ " pages have been processed. Elapsed: "
+ (System.currentTimeMillis() - start_t) / 1000.0 + "s");
DB db = ivtIndexDBList.get(batchNum);
Map<String, List<Integer>> ivtMapPerDB = ivtIndexMapList
.get(batchNum);
ivtMapPerDB.putAll(ivtMap);
// for (String token : ivtMap.keySet()) {
// if (recordsCommit % 80000 == 0 && recordsCommit != 0) {
// db.commit();
// System.out.println("Records commit size: " + recordsCommit);
// //List<Integer> ivtRecordList = new
// Vector<Integer>(ivtMap.get(token));
// recordsCommit++;
db.commit();
// db.compact();
// db.close();
System.out.println("Batch commit done. Elapsed: "
+ (System.currentTimeMillis() - start_t) / 1000.0 + "s");
}
documentDB.commit();
documentDB.compact();
documentDB.close();
long end_t = System.currentTimeMillis();
System.out.println("Construct done. Duration: " + (end_t - start_t)
/ 1000.0 + "s");
}
private void initialStore(boolean readOnly) {
initialStore(readOnly, -1);
}
private void initialStore(boolean readOnly, int ivtDBCount) {
// Initialize Database
File f = new File(_options._indexPrefix, "docIdx");
if (readOnly) {
documentDB = DBMaker.newFileDB(f).mmapFileEnable().readOnly()
.transactionDisable().make();
} else {
documentDB = DBMaker.newFileDB(f).mmapFileEnable()
.transactionDisable().asyncWriteEnable().make();
}
if (ivtDBCount > 0) {
for (int i = 0; i < ivtDBCount; i++) {
File ivtDbFile = new File(_options._indexPrefix, "ivtDb" + i);
DB db = null;
if (readOnly) {
db = DBMaker.newFileDB(ivtDbFile).mmapFileEnable()
.readOnly().commitFileSyncDisable()
.transactionDisable().make();
} else {
db = DBMaker.newFileDB(ivtDbFile).mmapFileEnable()
.transactionDisable().compressionEnable()
.commitFileSyncDisable().asyncWriteEnable().make();
}
ivtIndexDBList.add(db);
}
} else {
for (int i = 0; i < 100; i++) {
File ivtDbFile = new File(_options._indexPrefix, "ivtDb" + i);
if (!ivtDbFile.exists()) {
break;
}
DB db = null;
if (readOnly) {
db = DBMaker.newFileDB(ivtDbFile).mmapFileEnable()
.readOnly().commitFileSyncDisable()
.transactionDisable().cacheSize(524288).make();
} else {
db = DBMaker.newFileDB(ivtDbFile).mmapFileEnable()
.commitFileSyncDisable().transactionDisable()
.asyncWriteEnable().cacheSize(524288).make();
}
ivtIndexDBList.add(db);
}
}
for (DB d : ivtIndexDBList) {
Map<String, List<Integer>> hm = d.createHashMap(DOC_IVT_TBL)
.makeOrGet();
ivtIndexMapList.add(hm);
}
docMap = documentDB.createHashMap(DOC_IDX_TBL).makeOrGet();
docUrlMap = documentDB.createHashMap(DOC_URL_TBL).makeOrGet();
}
private void cleanUpDirectory() {
File dir = new File(_options._indexPrefix);
dir.mkdirs();
for (File file : dir.listFiles()) {
file.delete();
}
}
synchronized private void buildDocumentIndex(DocumentIndexed di) {
docMap.put(di._docid, di);
docUrlMap.put(di.getUrl(), di._docid);
}
@Override
public void loadIndex() throws IOException, ClassNotFoundException {
initialStore(true);
}
@Override
public Document getDoc(int docid) {
return docMap.get(docid);
}
/**
* In HW2, you should be using {@link DocumentIndexed}.
*/
@Override
public Document nextDoc(Query query, int docid) {
return null;
}
@Override
public int corpusDocFrequencyByTerm(String term) {
// Number of documents in which {@code term} appeared, over the full corpus.
// Stem given term.
Stemmer s = new Stemmer();
s.add(term.toLowerCase().toCharArray(), term.length());
if (!ivtContainsKey(s.toString())) {
return 0;
}
// Get posting list from index.
List<Integer> l = ivtGet(s.toString());
int count = 0;
int last_id = -1;
for (int i = 0; i < l.size() - 1; i += 2){
if ( l.get(i) != last_id){
last_id = l.get(i);
++count;
}
}
return count;
}
@Override
public int corpusTermFrequency(String term) {
// Number of times {@code term} appeared in corpus.
// Stem given term.
Stemmer s = new Stemmer();
s.add(term.toLowerCase().toCharArray(), term.length());
if (!ivtContainsKey(s.toString())) {
return 0;
}
// Get posting list from index.
List<Integer> l = ivtGet(s.toString());
return l.size() / 2;
}
private int bsInner(final int start, final int end, final int docid,
final List<Integer> list) {
if (end - start <= 1) {
return -1;
}
int chk = start / 2 + end / 2;
if (chk % 2 == 1) {
return -1;
}
if (list.get(chk) > docid) {
return bsInner(start, chk, docid, list);
} else if (list.get(chk) < docid) {
return bsInner(chk, end, docid, list);
}
while (chk -2 >= start && list.get(chk-2) == docid){
chk -= 2;
}
return chk;
}
private int binarySearchPostList(final int docId, final List<Integer> list) {
return bsInner(0, list.size() - 1, docId, list);
}
@Override
public int documentTermFrequency(String term, String url) {
// Number of times {@code term} appeared in the document {@code url}
// Get docid for specific url.
int docid = docUrlMap.get(url);
// Stem given term.
Stemmer s = new Stemmer();
s.add(term.toLowerCase().toCharArray(), term.length());
if (!ivtContainsKey(s.toString())) {
return 0;
}
// Get posting list from index.
List<Integer> l = ivtGet(s.toString());
// Use binary search looking for docid within given posting list.
int pos = binarySearchPostList(docid, l);
if (pos != -1) {
// Return term frequency for given doc and term
int count=0;
while (pos < l.size()-1 && l.get(pos)==docid){
++count;
pos += 2;
}
return count;
} else {
return 0;
}
}
private boolean ivtContainsKey(String key) {
for (Map<String, List<Integer>> m : ivtIndexMapList) {
if (m.containsKey(key)) {
return true;
}
}
return false;
}
private List<Integer> ivtGet(String key) {
List<Integer> l = new ArrayList<Integer>();
for (Map<String, List<Integer>> m : ivtIndexMapList) {
if (m.containsKey(key)) {
l.addAll(m.get(key));
}
}
return l;
}
} |
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* the project. */
package edu.wpi.first.wpilibj.templates;
import edu.wpi.first.wpilibj.DriverStation;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.RobotDrive;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class BaseProject extends IterativeRobot {
/*
* Declare constants of the PWM ports for specified motors
*/
public static final int DRIVE_LEFT_FRONT = 1;
public static final int DRIVE_LEFT_REAR = 2;
public static final int DRIVE_RIGHT_FRONT = 3;
public static final int DRIVE_RIGHT_REAR = 4;
/*
* Declare constants of the USB ports for specified joysticks
*/
public static final int JOYSTICK = 1;
/*
* Declare keys for SmartDashboard Data
*/
public static final String SD_LOG = "Log";
public static final String SD_JOYSTICK_X = "JoyX";
public static final String SD_JOYSTICK_Y = "JoyY";
/*
* Declare channels for Driver Station Data
*
* For use with "getDigitalIn(int channel)", "getAnalogIn(int channel)"
* and "setDigitalOut(int channel)"
*/
public static final int DS_DIGITAL_1 = 1;
public static final int DS_DIGITAL_2 = 2;
public static final int DS_DIGITAL_3 = 3;
public static final int DS_DIGITAL_4 = 4;
public static final int DS_DIGITAL_5 = 5;
public static final int DS_DIGITAL_6 = 6;
public static final int DS_DIGITAL_7 = 7;
public static final int DS_DIGITAL_8 = 8;
public static final int DS_ANALOG_1 = 1;
public static final int DS_ANALOG_2 = 2;
public static final int DS_ANALOG_3 = 3;
public static final int DS_ANALOG_4 = 4;
public static final int DS_ANALOG_5 = 5;
public static final int DS_ANALOG_6 = 6;
public static final int DS_ANALOG_7 = 7;
public static final int DS_ANALOG_8 = 8;
/**
* RobotDrive object handles the four drive motors in the manner required
* for an arcade, tank, or omni-directional drive
*/
private RobotDrive mRobotDrive;
/**
* Joystick object handles the fetching of all user input from a single
* physical device.
*/
private Joystick mJoystick;
/**
* Driver Station object to transfer data to and from the DS
*/
private DriverStation mDriverStation;
private LogHelper mLogHelper = new LogHelper();
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
public void robotInit() {
SmartDashboard.putString(SD_LOG, "Entered 'robotInit()'");
mLogHelper.log("Entered 'robotInit()'");
mRobotDrive = new RobotDrive(DRIVE_LEFT_FRONT, DRIVE_LEFT_REAR,
DRIVE_RIGHT_FRONT, DRIVE_RIGHT_REAR);
SmartDashboard.putString(SD_LOG, "Initialized RobotDrive");
mJoystick = new Joystick(JOYSTICK);
SmartDashboard.putString(SD_LOG, "Initialized Joystick");
mDriverStation = DriverStation.getInstance();
SmartDashboard.putString(SD_LOG, "Initialized Driver Station");
SmartDashboard.putString(SD_LOG, "End 'robotInit()'");
}
/**
* This function is called periodically during autonomous
*/
public void autonomousPeriodic() {
SmartDashboard.putString(SD_LOG, "Entered 'autonomousPeriodic()'");
}
/**
* This function is called periodically during operator control
*/
public void teleopPeriodic() {
SmartDashboard.putString(SD_LOG, "Entered 'teleopPeriodic()'");
while(isEnabled() && isOperatorControl()) {
mRobotDrive.arcadeDrive(mJoystick.getY(), mJoystick.getX());
}
}
/**
* This function is called periodically during test mode
*/
public void testPeriodic() {
}
} |
package org.opendaylight.netvirt.aclservice.api.utils;
import java.math.BigInteger;
import java.util.List;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.yang.types.rev130715.Uuid;
import org.opendaylight.yang.gen.v1.urn.opendaylight.netvirt.aclservice.rev160608.IpPrefixOrAddress;
import org.opendaylight.yang.gen.v1.urn.opendaylight.netvirt.aclservice.rev160608.interfaces._interface.AllowedAddressPairs;
/**
* The Class AclInterface.
*/
public class AclInterface {
/** The port security enabled. */
Boolean portSecurityEnabled = false;
/** The interface id. */
String interfaceId;
/** The l port tag. */
Integer lportTag;
/** The dp id. */
BigInteger dpId;
/** Elan tag of the interface. */
Long elanId;
/** VPN Id of the interface. */
Long vpnId;
/** The security groups. */
List<Uuid> securityGroups;
/** The allowed address pairs. */
List<AllowedAddressPairs> allowedAddressPairs;
/** The IP broadcast CIDRs. */
List<IpPrefixOrAddress> subnetIpPrefixes;
/** The port is marked for delete. */
Boolean isMarkedForDelete = false;
/**
* Checks if is port security enabled.
*
* @return the boolean
*/
public Boolean isPortSecurityEnabled() {
return portSecurityEnabled;
}
/**
* Gets the port security enabled.
*
* @return the port security enabled
*/
public Boolean getPortSecurityEnabled() {
return portSecurityEnabled;
}
/**
* Sets the port security enabled.
*
* @param portSecurityEnabled the new port security enabled
*/
public void setPortSecurityEnabled(Boolean portSecurityEnabled) {
this.portSecurityEnabled = portSecurityEnabled;
}
/**
* Gets the interface id.
*
* @return the interface id
*/
public String getInterfaceId() {
return interfaceId;
}
/**
* Sets the interface id.
*
* @param interfaceId the new interface id
*/
public void setInterfaceId(String interfaceId) {
this.interfaceId = interfaceId;
}
/**
* Gets the l port tag.
*
* @return the l port tag
*/
public Integer getLPortTag() {
return lportTag;
}
/**
* Sets the l port tag.
*
* @param lportTag the new l port tag
*/
public void setLPortTag(Integer lportTag) {
this.lportTag = lportTag;
}
/**
* Gets the dp id.
*
* @return the dp id
*/
public BigInteger getDpId() {
return dpId;
}
/**
* Sets the dp id.
*
* @param dpId the new dp id
*/
public void setDpId(BigInteger dpId) {
this.dpId = dpId;
}
/**
* Gets elan id.
*
* @return elan id of the interface
*/
public Long getElanId() {
return elanId;
}
/**
* Sets elan id of the interface.
*
* @param elanId elan id of the interface
*/
public void setElanId(Long elanId) {
this.elanId = elanId;
}
/**
* Gets vpn id.
*
* @return VPN Id of the interface
*/
public Long getVpnId() {
return vpnId;
}
/**
* Sets VPN Id of the interface.
*
* @param vpnId VPN Id of the interface
*/
public void setVpnId(Long vpnId) {
this.vpnId = vpnId;
}
/**
* Gets the security groups.
*
* @return the security groups
*/
public List<Uuid> getSecurityGroups() {
return securityGroups;
}
/**
* Sets the security groups.
*
* @param securityGroups the new security groups
*/
public void setSecurityGroups(List<Uuid> securityGroups) {
this.securityGroups = securityGroups;
}
/**
* Gets the allowed address pairs.
*
* @return the allowed address pairs
*/
public List<AllowedAddressPairs> getAllowedAddressPairs() {
return allowedAddressPairs;
}
/**
* Sets the allowed address pairs.
*
* @param allowedAddressPairs the new allowed address pairs
*/
public void setAllowedAddressPairs(List<AllowedAddressPairs> allowedAddressPairs) {
this.allowedAddressPairs = allowedAddressPairs;
}
/**
* Gets the Subnet IP Prefix.
*
* @return the Subnet IP Prefix
*/
public List<IpPrefixOrAddress> getSubnetIpPrefixes() {
return subnetIpPrefixes;
}
/**
* Sets the Subnet IP Prefix.
*
* @param subnetIpPrefixes the Subnet IP Prefix
*/
public void setSubnetIpPrefixes(List<IpPrefixOrAddress> subnetIpPrefixes) {
this.subnetIpPrefixes = subnetIpPrefixes;
}
/**
* Retrieve isMarkedForDelete.
* @return the whether it is marked for delete
*/
public Boolean isMarkedForDelete() {
return isMarkedForDelete;
}
/**
* Sets isMarkedForDelete.
* @param isMarkedForDelete boolean value
*/
public void setIsMarkedForDelete(Boolean isMarkedForDelete) {
this.isMarkedForDelete = isMarkedForDelete;
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((portSecurityEnabled == null) ? 0 : portSecurityEnabled.hashCode());
result = prime * result + ((dpId == null) ? 0 : dpId.hashCode());
result = prime * result + ((interfaceId == null) ? 0 : interfaceId.hashCode());
result = prime * result + ((lportTag == null) ? 0 : lportTag.hashCode());
result = prime * result + ((securityGroups == null) ? 0 : securityGroups.hashCode());
result = prime * result + ((allowedAddressPairs == null) ? 0 : allowedAddressPairs.hashCode());
result = prime * result + ((isMarkedForDelete == null) ? 0 : isMarkedForDelete.hashCode());
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
AclInterface other = (AclInterface) obj;
if (portSecurityEnabled == null) {
if (other.portSecurityEnabled != null) {
return false;
}
} else if (!portSecurityEnabled.equals(other.portSecurityEnabled)) {
return false;
}
if (dpId == null) {
if (other.dpId != null) {
return false;
}
} else if (!dpId.equals(other.dpId)) {
return false;
}
if (interfaceId == null) {
if (other.interfaceId != null) {
return false;
}
} else if (!interfaceId.equals(other.interfaceId)) {
return false;
}
if (lportTag == null) {
if (other.lportTag != null) {
return false;
}
} else if (!lportTag.equals(other.lportTag)) {
return false;
}
if (securityGroups == null) {
if (other.securityGroups != null) {
return false;
}
} else if (!securityGroups.equals(other.securityGroups)) {
return false;
}
if (allowedAddressPairs == null) {
if (other.allowedAddressPairs != null) {
return false;
}
} else if (!allowedAddressPairs.equals(other.allowedAddressPairs)) {
return false;
}
if (isMarkedForDelete == null) {
if (other.isMarkedForDelete != null) {
return false;
}
} else if (!isMarkedForDelete.equals(other.isMarkedForDelete)) {
return false;
}
return true;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "AclInterface [portSecurityEnabled=" + portSecurityEnabled + ", interfaceId=" + interfaceId
+ ", lportTag=" + lportTag + ", dpId=" + dpId + ", securityGroups=" + securityGroups
+ ", allowedAddressPairs=" + allowedAddressPairs + ", isMarkedForDelete=" + isMarkedForDelete + "]";
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.