answer
stringlengths
17
10.2M
package seedu.address.ui; import java.time.LocalDateTime; import javafx.fxml.FXML; import javafx.scene.control.Label; import javafx.scene.layout.Pane; import seedu.address.commons.core.IndexPrefix; import seedu.address.model.task.EventTask; public class EventTaskListCard extends UiPart<Pane> { private static final String FXML = "/view/EventTaskListCard.fxml"; @FXML private Label indexLabel; @FXML private Label nameLabel; @FXML private Label startLabel; @FXML private Label endLabel; /** * @param eventTask The event task to display. Can be null to not display anything. */ public EventTaskListCard(EventTask eventTask, int index) { super(FXML); if (eventTask != null) { indexLabel.setText(IndexPrefix.EVENT.getPrefixString() + index + ". "); nameLabel.setText(eventTask.getName().toString()); startLabel.setText(eventTask.localDateTimeToPrettyString(eventTask.getStart())); endLabel.setText(eventTask.localDateTimeToPrettyString(eventTask.getEnd())); if (eventTask.getEnd().isBefore(LocalDateTime.now())) { getRoot().getStyleClass().add("finished"); } else if (eventTask.getStart().isBefore(LocalDateTime.now())) { getRoot().getStyleClass().add("inProgress"); } } else { getRoot().setVisible(false); } } }
package seedu.emeraldo.model.task; import seedu.emeraldo.commons.exceptions.IllegalValueException; /** * Represents a Task's date and time in Emeraldo. * Guarantees: immutable; is valid as declared in {@link #isValidDateTime(String)} */ public class DateTime { public static final String MESSAGE_DATETIME_CONSTRAINTS = "Task date and time must follow this format DD/MM/YYYY HH:MM in 24 hours format"; public static final String DATETIME_VALIDATION_REGEX = "((0?[1-9]|[12][0-9]|3[01])/(0?[1-9]|[1][0-2])/(([0-9][0-9])?[0-9][0-9])){1}" + "( ([01][0-9]|[2][0-3]):([0-5][0-9]))?"; public final String value; public DateTime(String dateTime) throws IllegalValueException { assert dateTime != null; if (!isValidDateTime(dateTime)) { throw new IllegalValueException(MESSAGE_DATETIME_CONSTRAINTS); } this.value = dateTime; } private static boolean isValidDateTime(String test) { if(test.equals("")) return true; else return test.matches(DATETIME_VALIDATION_REGEX); } @Override public String toString() { return value; } @Override public boolean equals(Object other) { return other == this // short circuit if same object || (other instanceof DateTime // instanceof handles nulls && this.value.equals(((DateTime) other).value)); // state check } @Override public int hashCode() { return value.hashCode(); } }
package org.ctrip.ops.sysdev.filters; import java.util.Map; import java.util.concurrent.ArrayBlockingQueue; import org.apache.log4j.Logger; public class BaseFilter implements Runnable { private static final Logger logger = Logger.getLogger("BaseFilter"); protected Map config; protected ArrayBlockingQueue inputQueue; protected ArrayBlockingQueue outputQueue; public BaseFilter(Map config, ArrayBlockingQueue inputQueue) { this.config = config; this.inputQueue = inputQueue; int queueSize = 1000; if (this.config.containsKey("queueSize")) { queueSize = (int) this.config.get("queueSize"); } this.outputQueue = new ArrayBlockingQueue(queueSize, false); this.prepare(); } protected void prepare() { }; protected void filter(Object event) { }; public void run() { while (true) { Object event = this.inputQueue.poll(); if (event != null) { this.filter(event); try { this.outputQueue.put(event); } catch (InterruptedException e) { logger.warn("put event to outMQ failed"); logger.trace(e.getMessage()); } } } } public ArrayBlockingQueue getOutputMQ() { return this.outputQueue; } }
package com.github.digin.android; import android.app.Fragment; import com.github.digin.android.fragments.BoundedMapFragment; import com.github.digin.android.fragments.FavoritesFragment; import com.github.digin.android.fragments.LineupListFragment; import java.util.Arrays; import java.util.List; public class NavDrawerItem { static NavDrawerItem[] items = new NavDrawerItem[] { new NavDrawerItem(BoundedMapFragment.class, "Map"), new NavDrawerItem(LineupListFragment.class, "Chefs"), new NavDrawerItem(FavoritesFragment.class, "Favorites") }; public static List<NavDrawerItem> getItems() { return Arrays.asList(items); } String itemName; Class<? extends Fragment> fragmentClass; Fragment fragment; public NavDrawerItem(Class<? extends Fragment> fragmentClass, String itemName) { this.fragmentClass = fragmentClass; this.itemName = itemName; } public Fragment getFragment() { if(fragment == null) { try { fragment = fragmentClass.newInstance(); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { throw new RuntimeException("The fragment " + fragmentClass.getSimpleName() + " has no public default constructor."); } } return fragment; } @Override public String toString() { return itemName; } }
package beast.evolution.tree.coalescent; import beast.core.Description; import beast.core.Input; import beast.core.parameter.IntegerParameter; import beast.core.parameter.RealParameter; import beast.evolution.tree.Node; import beast.evolution.tree.Tree; import beast.math.Binomial; import beast.util.Randomizer; import java.util.ArrayList; import java.util.List; /** * @author Alexei Drummond */ @Description("A tree generated randomly from the structured coalescent process, with the given population sizes, migration rates and per-deme sample sizes.") public class StructuredCoalescentTree extends Tree { public Input<RealParameter> popSizesMigrationRates = new Input<RealParameter>("popSizesMigrationRates", "A matrix of migration rates and population sizes. Population sizes occupy the diagonal and migration rates occupy the off-diagonals"); public Input<IntegerParameter> sampleSizes = new Input<IntegerParameter>("sampleSizes", "The sample sizes for each population"); enum EventType {coalescent, migration} public void initAndValidate() { int count = 0; List<List<Node>> nodes = new ArrayList<List<Node>>(); for (int i = 0; i < sampleSizes.get().getDimension(); i++) { nodes.add(new ArrayList<Node>()); for (int j = 0; j < sampleSizes.get().getValue(i); j++) { Node node = new Node(); node.setNr(count); node.setID(count + ""); node.setMetaData("deme", i); node.setHeight(0); nodes.get(i).add(node); count += 1; } } setRoot(simulateStructuredCoalescentForest(nodes, popSizesMigrationRates.get(), Double.POSITIVE_INFINITY).get(0)); initArrays(); } private List<Node> simulateStructuredCoalescentForest(List<List<Node>> nodes, RealParameter popSizesMigrationRates, double stopTime) { //diagonals are coalescent rates, off-diagonals are migration rates double[][] rates = new double[nodes.size()][nodes.size()]; double totalRate = populateRateMatrix(nodes, popSizesMigrationRates, rates); double time = 0.0; int nodeNumber = getTotalNodeCount(nodes); while (time < stopTime && getTotalNodeCount(nodes) > 1) { SCEvent event = selectRandomEvent(rates, totalRate, time); if (event.type == EventType.coalescent) { // coalescent Node node1 = selectRandomNode(nodes.get(event.pop)); Node node2 = selectRandomNode(nodes.get(event.pop)); if (node1.getMetaData("deme") != node2.getMetaData("deme")) { throw new RuntimeException("demes must match for coalescing nodes!"); } Node parent = new Node(); parent.setNr(nodeNumber); parent.setHeight(event.time); parent.setMetaData("deme", node1.getMetaData("deme")); parent.addChild(node1); parent.addChild(node2); time = event.time; nodes.get(event.pop).remove(node1); nodes.get(event.pop).remove(node1); nodes.get(event.pop).add(parent); } else { // migration Node migrant = selectRandomNode(nodes.get(event.pop)); Node migrantsParent = new Node(); migrantsParent.setNr(nodeNumber); migrantsParent.setHeight(event.time); migrantsParent.setMetaData("deme", event.toPop); migrantsParent.addChild(migrant); time = event.time; nodes.get(event.pop).remove(migrant); nodes.get(event.toPop).add(migrantsParent); } totalRate = populateRateMatrix(nodes, popSizesMigrationRates, rates); nodeNumber += 1; } List<Node> rootNodes = new ArrayList<Node>(); for (List<Node> nodeList : nodes) { rootNodes.addAll(nodeList); } //System.out.println(rootNodes.size() + " root nodes remain"); //System.out.println(" rootNodes.get(0).getNodeCount() == " + rootNodes.get(0).getNodeCount()); return rootNodes; } private int getTotalNodeCount(List<List<Node>> nodes) { int count = 0; for (List<Node> nodeList : nodes) { count += nodeList.size(); } return count; } private Node selectRandomNode(List<Node> nodes) { int index = Randomizer.nextInt(nodes.size()); Node node = nodes.remove(index); return node; } private SCEvent selectRandomEvent(double[][] rates, double totalRate, double time) { double U = Randomizer.nextDouble() * totalRate; double cumulativeRate = 0.0; for (int i = 0; i < rates.length; i++) { for (int j = 0; j < rates.length; j++) { if (U > rates[i][j]) { U -= rates[i][j]; } else { SCEvent event = new SCEvent(); event.pop = i; event.toPop = j; if (i == j) { event.type = EventType.coalescent; } else { event.type = EventType.migration; } double V = U / rates[i][j]; event.time = time + (-Math.log(V) / totalRate); return event; } } } throw new RuntimeException(); } private double populateRateMatrix(List<List<Node>> nodes, RealParameter popSizesMigrationRates, double[][] rates) { double totalRate = 0; // coalescent rates for (int i = 0; i < rates.length; i++) { for (int j = 0; j < rates.length; j++) { double popSizej = popSizesMigrationRates.getMatrixValue(j, j); if (i == j) { rates[i][i] = Binomial.choose2(nodes.get(i).size()) * popSizej; } else { rates[i][j] = popSizesMigrationRates.getMatrixValue(i, j) * popSizej * nodes.get(i).size(); } totalRate += rates[i][j]; } } return totalRate; } private class SCEvent { int pop; // if the event is a migration this is the population the parent node is in // (i.e. the deme that the lineage migrates to when going backwards in time) int toPop; EventType type; double time; } }
package ch.unizh.ini.jaer.chip.multicamera; import ch.unizh.ini.jaer.chip.retina.DVS128; import ch.unizh.ini.jaer.chip.stereopsis.*; import java.util.TreeMap; import net.sf.jaer.event.MultiCameraEvent; import net.sf.jaer.stereopsis.*; import net.sf.jaer.aemonitor.*; import net.sf.jaer.aemonitor.AEMonitorInterface; import net.sf.jaer.aemonitor.AEPacketRaw; import net.sf.jaer.chip.AEChip; import java.util.ArrayList; import net.sf.jaer.biasgen.BiasgenHardwareInterface; import net.sf.jaer.event.*; import net.sf.jaer.graphics.*; import net.sf.jaer.graphics.BinocularRenderer; import net.sf.jaer.hardwareinterface.HardwareInterface; import net.sf.jaer.hardwareinterface.HardwareInterfaceFactory; import net.sf.jaer.hardwareinterface.usb.USBInterface; /** * Multiple DVS128 retinas each with its own separate but time-synchronized hardware interface. * Differs from the usual AEChip object in that it also overrides #getHardwareInterface and #setHardwareInterface to supply MultiCameraInterface which are multiple DVS128 hardware interfaces. * @author tobi * @see net.sf.jaer.stereopsis.MultiCameraInterface * @see net.sf.jaer.stereopsis.MultiCameraHardwareInterface */ public class MultiDVS128CameraChip extends DVS128 implements MultiCameraInterface { public static String getDescription() { return "A multi DVS128 retina (DVS128) each on it's own USB interface with merged and presumably aligned fields of view"; } private AEChip[] cameras = new AEChip[MultiCameraEvent.NUM_CAMERAS]; /** Creates a new instance of MultiDVS128CameraChip */ public MultiDVS128CameraChip() { super(); for (AEChip c : cameras) { c = new DVS128(); } setEventClass(BinocularEvent.class); setRenderer(new BinocularRenderer(this)); // setCanvas(new RetinaCanvas(this)); // we make this canvas so that the sizes of the chip are correctly set setEventExtractor(new Extractor(this)); setBiasgen(new Biasgen(this)); } @Override public void setAeViewer(AEViewer aeViewer) { super.setAeViewer(aeViewer); aeViewer.setLogFilteredEventsEnabled(false); // not supported for binocular reconstruction yet TODO } public AEChip getCamera(int i) { return cameras[i]; } @Override public int getNumCellTypes() { return MultiCameraEvent.NUM_CAMERAS * 2; } @Override public int getNumCameras() { return MultiCameraEvent.NUM_CAMERAS; } @Override public void setCamera(int i, AEChip chip) { cameras[i] = chip; } /** Changes order of cameras according to list in permutation (which is not checked for uniqueness or bounds). * * @param permutation list of destination indices for elements of cameras. */ @Override public void permuteCameras(int[] permutation) { AEChip[] tmp = new AEChip[permutation.length]; System.arraycopy(cameras, 0, tmp, 0, permutation.length); for (int i = 0; i < permutation.length; i++) { cameras[i] = tmp[permutation[i]]; } } /** the event extractor for the multi chip. * It extracts from each event the x,y,type of the event and in addition, * it adds getNumCellTypes to each type to signal * a right event (as opposed to a left event) */ public class Extractor extends DVS128.Extractor { public Extractor(MultiDVS128CameraChip chip) { super(new DVS128()); // they are the same type } /** extracts the meaning of the raw events and returns EventPacket containing BinocularEvent. *@param in the raw events, can be null *@return out the processed events. these are partially processed in-place. empty packet is returned if null is supplied as in. */ @Override synchronized public EventPacket extractPacket(AEPacketRaw in) { if (out == null) { out = new EventPacket(MultiCameraEvent.class); } if (in == null) { return out; } int n = in.getNumEvents(); //addresses.length; int skipBy = 1; if (isSubSamplingEnabled()) { while (n / skipBy > getSubsampleThresholdEventCount()) { skipBy++; } } int[] a = in.getAddresses(); int[] timestamps = in.getTimestamps(); OutputEventIterator outItr = out.outputIterator(); for (int i = 0; i < n; i += skipBy) { // bug here MultiCameraEvent e = (MultiCameraEvent) outItr.nextOutput(); // we need to be careful to fill in all the fields here or understand how the super of MultiCameraEvent fills its fields e.address = a[i]; e.timestamp = timestamps[i]; e.camera = MultiCameraEvent.getCameraFromRawAddress(a[i]); e.x = getXFromAddress(a[i]); e.y = getYFromAddress(a[i]); // assumes that the raw address format has polarity in msb and that 0==OFF type int pol = a[i] & 1; e.polarity = pol == 0 ? PolarityEvent.Polarity.Off : PolarityEvent.Polarity.On; // combines polarity with camera to assign 2*NUM_CAMERA types e.type = (byte) (2 * e.camera + pol); // assign e.type here so that superclasses don't get fooled by using default type of event for polarity event } return out; } /** Reconstructs the raw packet after event filtering to include the binocular information @param packet the filtered packet @return the reconstructed packet */ @Override public AEPacketRaw reconstructRawPacket(EventPacket packet) { AEPacketRaw p = super.reconstructRawPacket(packet); // we also need to add camera info to raw events for (int i = 0; i < packet.getSize(); i++) { MultiCameraEvent mce = (MultiCameraEvent) packet.getEvent(i); EventRaw event = p.getEvent(i); event.address=MultiCameraEvent.setCameraNumberToRawAddress(mce.camera, event.address); } return p; } }// extractor for multidvs128camerachip @Override public void setHardwareInterface(HardwareInterface hw) { if (hw != null) { log.warning("trying to set hardware interface to " + hw + " but hardware interface should have been constructed as a MultiCameraHardwareInterface by this MultiDVS128CameraChip"); } if (hw != null && hw.isOpen()) { log.info("closing hw interface"); hw.close(); } super.setHardwareInterface(hw); } boolean deviceMissingWarningLogged = false; /**Builds and returns a hardware interface for this multi camera device. * Unlike other chip objects, this one actually invokes the HardwareInterfaceFactory to * construct the interfaces and opens them, because this device depends on a particular pair of interfaces. * <p> * The hardware serial number IDs are used to assign left and right retinas. * @return the hardware interface for this device */ @Override public HardwareInterface getHardwareInterface() { if (hardwareInterface != null) { return hardwareInterface; } int n = HardwareInterfaceFactory.instance().getNumInterfacesAvailable(); if (n < MultiCameraEvent.NUM_CAMERAS) { if (deviceMissingWarningLogged = false) { log.warning("couldn't build MultiCameraHardwareInterface hardware interface because only " + n + " are available and " + MultiCameraEvent.NUM_CAMERAS + " are needed"); deviceMissingWarningLogged = true; } return null; } if (n > MultiCameraEvent.NUM_CAMERAS) { log.info(n + " interfaces, searching them to find DVS128 interfaces"); } ArrayList<HardwareInterface> hws = new ArrayList(); for (int i = 0; i < n; i++) { HardwareInterface hw = HardwareInterfaceFactory.instance().getInterface(i); if (hw instanceof AEMonitorInterface && hw instanceof BiasgenHardwareInterface) { log.info("found AEMonitorInterface && BiasgenHardwareInterface " + hw); hws.add(hw); } } if (hws.size() < MultiCameraEvent.NUM_CAMERAS) { log.warning("could not find " + MultiCameraEvent.NUM_CAMERAS + " interfaces which are suitable candidates for a multiple camera arrangement " + hws.size()); return null; } // TODO fix assignment of cameras according to serial number order // make treemap (sorted map) of string serial numbers of cameras mapped to interfaces TreeMap<String, AEMonitorInterface> map = new TreeMap(); for (HardwareInterface hw : hws) { try { hw.open(); USBInterface usb0 = (USBInterface) hw; String[] sa = usb0.getStringDescriptors(); if (sa.length < 3) { log.warning("interface " + hw.toString() + " has no serial number, cannot guarentee assignment of cameras"); } else { map.put(sa[2], (AEMonitorInterface) hw); } } catch (Exception ex) { log.warning("enumerating multiple cameras: " + ex.toString()); } } try { Object[] oa=map.values().toArray(); AEMonitorInterface[] aemons=new AEMonitorInterface[oa.length]; int ind=0; for(Object o:oa){ aemons[ind++]=(AEMonitorInterface)o; } hardwareInterface = new MultiCameraBiasgenHardwareInterface(aemons); ((MultiCameraBiasgenHardwareInterface) hardwareInterface).setChip(this); hardwareInterface.close(); // will be opened later on by user } catch (Exception e) { log.warning("couldn't build correct multi camera hardware interface: " + e.getMessage()); return null; } deviceMissingWarningLogged = false; return hardwareInterface; } /** * A biasgen for this multicamera combination of DVS128. The biases are simultaneously controlled. * @author tobi */ public class Biasgen extends DVS128.Biasgen { /** Creates a new instance of Biasgen for DVS128 with a given hardware interface *@param chip the hardware interface on this chip is used */ public Biasgen(net.sf.jaer.chip.Chip chip) { super(chip); setName("MultiDVS128CameraChip"); } } }
package com.btmura.android.reddit; import android.app.ActionBar; import android.app.ActionBar.OnNavigationListener; import android.app.Activity; import android.app.Fragment; import android.app.FragmentManager; import android.app.FragmentManager.OnBackStackChangedListener; import android.app.FragmentTransaction; import android.content.ClipboardManager; import android.content.Intent; import android.net.Uri; import android.os.Bundle; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnFocusChangeListener; import android.widget.SearchView; import android.widget.SearchView.OnQueryTextListener; import android.widget.ShareActionProvider; import android.widget.Toast; import com.btmura.android.reddit.SubredditListFragment.OnSubredditSelectedListener; import com.btmura.android.reddit.ThingListFragment.OnThingSelectedListener; import com.btmura.android.reddit.common.Formatter; import com.btmura.android.reddit.subredditsearch.SubredditSearchActivity; public class MainActivity extends Activity implements OnBackStackChangedListener, OnNavigationListener, OnQueryTextListener, OnFocusChangeListener, OnSubredditSelectedListener, OnThingSelectedListener { private static final String FRAG_CONTROL = "control"; private static final String FRAG_SUBREDDIT_LIST = "subredditList"; private static final String FRAG_THING_LIST = "thingList"; private static final String FRAG_LINK = "link"; private static final String FRAG_COMMENT = "comment"; private static final int REQUEST_ADD_SUBREDDITS = 0; private static final String STATE_LAST_SELECTED_FILTER = "lastSelectedFilter"; private FragmentManager manager; private ActionBar bar; private SearchView searchView; private FilterAdapter filterSpinner; private int lastSelectedFilter; private View singleContainer; private View thingContainer; private View navContainer; private ShareActionProvider shareProvider; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); manager = getFragmentManager(); manager.addOnBackStackChangedListener(this); bar = getActionBar(); bar.setDisplayShowHomeEnabled(true); bar.setCustomView(R.layout.subreddits_search); searchView = (SearchView) bar.getCustomView(); searchView.setOnQueryTextListener(this); searchView.setOnQueryTextFocusChangeListener(this); filterSpinner = new FilterAdapter(this); bar.setListNavigationCallbacks(filterSpinner, this); singleContainer = findViewById(R.id.single_container); thingContainer = findViewById(R.id.thing_container); navContainer = findViewById(R.id.nav_container); if (savedInstanceState == null) { setupFragments(); } } private void setupFragments() { if (singleContainer != null) { bar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); ControlFragment controlFrag = ControlFragment.newInstance(null, null, -1, 0); SubredditListFragment srFrag = SubredditListFragment.newInstance(false); FragmentTransaction ft = manager.beginTransaction(); ft.add(controlFrag, FRAG_CONTROL); ft.replace(R.id.single_container, srFrag, FRAG_SUBREDDIT_LIST); ft.commit(); } if (thingContainer != null) { bar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); ControlFragment controlFrag = ControlFragment.newInstance(null, null, -1, 0); SubredditListFragment srFrag = SubredditListFragment.newInstance(true); FragmentTransaction ft = manager.beginTransaction(); ft.add(controlFrag, FRAG_CONTROL); ft.replace(R.id.subreddit_list_container, srFrag, FRAG_SUBREDDIT_LIST); ft.commit(); } } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putInt(STATE_LAST_SELECTED_FILTER, lastSelectedFilter); } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); if (savedInstanceState != null) { lastSelectedFilter = savedInstanceState.getInt(STATE_LAST_SELECTED_FILTER); onBackStackChanged(); } } public boolean onNavigationItemSelected(int itemPosition, long itemId) { lastSelectedFilter = itemPosition; if (itemId != getFilter()) { selectSubreddit(getSubreddit(), itemPosition); } return true; } public void onSubredditSelected(Subreddit sr, int event) { switch (event) { case OnSubredditSelectedListener.FLAG_ITEM_CLICKED: selectSubreddit(sr, lastSelectedFilter); break; case OnSubredditSelectedListener.FLAG_LOAD_FINISHED: if (thingContainer != null && !isVisible(FRAG_THING_LIST)) { setThingListNavigationMode(sr); getSubredditListFragment().setSelectedSubreddit(sr); selectSubreddit(sr, lastSelectedFilter); } break; } } private void selectSubreddit(Subreddit sr, int filter) { ControlFragment controlFrag = ControlFragment.newInstance(sr, null, -1, filter); if (singleContainer != null) { manager.removeOnBackStackChangedListener(this); manager.popBackStackImmediate(FRAG_THING_LIST, FragmentManager.POP_BACK_STACK_INCLUSIVE); manager.addOnBackStackChangedListener(this); ThingListFragment thingListFrag = ThingListFragment.newInstance(sr, filter, false); FragmentTransaction ft = manager.beginTransaction(); ft.add(controlFrag, FRAG_CONTROL); ft.replace(R.id.single_container, thingListFrag, FRAG_THING_LIST); ft.addToBackStack(FRAG_THING_LIST); ft.commit(); } if (thingContainer != null) { manager.removeOnBackStackChangedListener(this); manager.popBackStackImmediate(); manager.addOnBackStackChangedListener(this); setThingListNavigationMode(sr); refreshContainers(null); ThingListFragment thingListFrag = ThingListFragment.newInstance(sr, filter, true); FragmentTransaction ft = manager.beginTransaction(); ft.add(controlFrag, FRAG_CONTROL); ft.replace(R.id.thing_list_container, thingListFrag, FRAG_THING_LIST); ft.commit(); } } public void onThingSelected(Thing thing, int position) { selectThing(thing, thing.isSelf ? FRAG_COMMENT : FRAG_LINK, position); } private void selectThing(Thing thing, String tag, int position) { ControlFragment controlFrag = ControlFragment.newInstance(getSubreddit(), thing, position, getFilter()); Fragment thingFrag; String popTag; if (FRAG_LINK.equalsIgnoreCase(tag)) { thingFrag = LinkFragment.newInstance(thing); popTag = FRAG_COMMENT; } else if (FRAG_COMMENT.equalsIgnoreCase(tag)) { thingFrag = CommentListFragment.newInstance(thing.getId()); popTag = FRAG_LINK; } else { throw new IllegalArgumentException(tag); } if (singleContainer != null) { manager.removeOnBackStackChangedListener(this); manager.popBackStackImmediate(popTag, FragmentManager.POP_BACK_STACK_INCLUSIVE); manager.addOnBackStackChangedListener(this); FragmentTransaction ft = manager.beginTransaction(); ft.add(controlFrag, FRAG_CONTROL); ft.replace(R.id.single_container, thingFrag, tag); ft.addToBackStack(tag); ft.commit(); } if (thingContainer != null) { manager.removeOnBackStackChangedListener(this); manager.popBackStackImmediate(); manager.addOnBackStackChangedListener(this); FragmentTransaction ft = manager.beginTransaction(); ft.add(controlFrag, FRAG_CONTROL); ft.replace(R.id.thing_container, thingFrag, tag); ft.addToBackStack(tag); ft.commit(); } } private Subreddit getSubreddit() { return getControlFragment().getTopic(); } private Thing getThing() { return getControlFragment().getThing(); } private int getThingPosition() { return getControlFragment().getThingPosition(); } private int getFilter() { return getControlFragment().getFilter(); } private ControlFragment getControlFragment() { return (ControlFragment) manager.findFragmentByTag(FRAG_CONTROL); } private SubredditListFragment getSubredditListFragment() { return (SubredditListFragment) manager.findFragmentByTag(FRAG_SUBREDDIT_LIST); } private ThingListFragment getThingListFragment() { return (ThingListFragment) manager.findFragmentByTag(FRAG_THING_LIST); } public void onBackStackChanged() { refreshActionBar(); refreshCheckedItems(); refreshContainers(getThing()); invalidateOptionsMenu(); } private void refreshActionBar() { Subreddit sr = getSubreddit(); Thing t = getThing(); if (t != null && !isVisible(FRAG_SUBREDDIT_LIST)) { setThingNavigationMode(t); } else if (sr != null) { setThingListNavigationMode(sr); } else { setSubredditListNavigationMode(); } bar.setDisplayHomeAsUpEnabled(singleContainer != null && sr != null || t != null); if (bar.getNavigationMode() == ActionBar.NAVIGATION_MODE_LIST) { bar.setSelectedNavigationItem(getFilter()); } } private void setThingNavigationMode(Thing t) { bar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); bar.setDisplayShowTitleEnabled(true); bar.setDisplayShowCustomEnabled(false); bar.setTitle(t.title); } private void setThingListNavigationMode(Subreddit sr) { bar.setNavigationMode(ActionBar.NAVIGATION_MODE_LIST); bar.setDisplayShowTitleEnabled(false); bar.setDisplayShowCustomEnabled(false); filterSpinner.setSubreddit(sr.getTitle(this)); } private void setSubredditListNavigationMode() { bar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); bar.setDisplayShowTitleEnabled(true); bar.setDisplayShowCustomEnabled(false); bar.setTitle(R.string.app_name); } private void refreshCheckedItems() { if (isVisible(FRAG_SUBREDDIT_LIST)) { getSubredditListFragment().setSelectedSubreddit(getSubreddit()); } if (isVisible(FRAG_THING_LIST)) { getThingListFragment().setChosenPosition(getThingPosition()); } } private void refreshContainers(Thing t) { if (thingContainer != null) { thingContainer.setVisibility(t != null ? View.VISIBLE : View.GONE); if (navContainer != null) { navContainer.setVisibility(t != null ? View.GONE : View.VISIBLE); } } } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); getMenuInflater().inflate(R.menu.main, menu); shareProvider = (ShareActionProvider) menu.findItem(R.id.menu_share).getActionProvider(); return true; } @Override public boolean onPrepareOptionsMenu(Menu menu) { super.onPrepareOptionsMenu(menu); Thing thing = getThing(); boolean hasThing = thing != null; boolean isSelf = thing != null && thing.isSelf; menu.findItem(R.id.menu_refresh).setVisible(singleContainer != null && isVisible(FRAG_THING_LIST)); menu.findItem(R.id.menu_link).setVisible(hasThing && !isSelf && isVisible(FRAG_COMMENT)); menu.findItem(R.id.menu_comments).setVisible(hasThing && !isSelf && isVisible(FRAG_LINK)); menu.findItem(R.id.menu_share).setVisible(hasThing); menu.findItem(R.id.menu_copy_url).setVisible(hasThing); menu.findItem(R.id.menu_view).setVisible(hasThing); menu.findItem(R.id.menu_search_for_subreddits).setVisible(singleContainer == null || isVisible(FRAG_SUBREDDIT_LIST)); updateShareActionIntent(thing); return true; } private boolean isVisible(String tag) { Fragment f = manager.findFragmentByTag(tag); return f != null && f.isAdded(); } @Override public boolean onOptionsItemSelected(MenuItem item) { super.onOptionsItemSelected(item); switch (item.getItemId()) { case android.R.id.home: handleHome(); return true; case R.id.menu_search_for_subreddits: handleSearchForSubreddits(); return true; case R.id.menu_link: handleLink(); return true; case R.id.menu_comments: handleComments(); return true; case R.id.menu_copy_url: handleCopyUrl(); return true; case R.id.menu_view: handleView(); return true; } return false; } private void handleSearchForSubreddits() { searchView.setQuery("", false); bar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); bar.setDisplayShowCustomEnabled(true); bar.getCustomView().requestFocus(); } public boolean onQueryTextChange(String newText) { return false; } public boolean onQueryTextSubmit(String query) { Intent intent = new Intent(this, SubredditSearchActivity.class); intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET | Intent.FLAG_ACTIVITY_NO_ANIMATION); intent.putExtra(SubredditSearchActivity.EXTRA_QUERY, query); startActivityForResult(intent, REQUEST_ADD_SUBREDDITS); return true; } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { switch (requestCode) { case REQUEST_ADD_SUBREDDITS: refreshActionBar(); break; default: throw new IllegalStateException("Unexpected request code: " + requestCode); } } public void onFocusChange(View v, boolean hasFocus) { if (v == searchView && !hasFocus) { refreshActionBar(); } } private void handleHome() { int count = manager.getBackStackEntryCount(); if (count > 0) { String tag = manager.getBackStackEntryAt(count - 1).getName(); if (FRAG_COMMENT.equals(tag) || FRAG_LINK.equals(tag)) { for (int i = manager.getBackStackEntryCount() - 1; i >= 0; i String name = manager.getBackStackEntryAt(i).getName(); if (!FRAG_THING_LIST.equals(name)) { manager.popBackStack(); } else { break; } } } else if (singleContainer != null && FRAG_THING_LIST.equals(tag)) { manager.popBackStack(FRAG_THING_LIST, FragmentManager.POP_BACK_STACK_INCLUSIVE); } } } private void handleLink() { selectThing(getThing(), FRAG_LINK, getThingPosition()); } private void handleComments() { selectThing(getThing(), FRAG_COMMENT, getThingPosition()); } private void handleCopyUrl() { ClipboardManager clipboard = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE); String text = getLink(getThing()); clipboard.setText(text); Toast.makeText(getApplicationContext(), text, Toast.LENGTH_SHORT).show(); } private void handleView() { Intent intent = new Intent(Intent.ACTION_VIEW); intent.setData(Uri.parse(getLink(getThing()))); startActivity(Intent.createChooser(intent, getString(R.string.menu_view))); } private String getLink(Thing thing) { return isVisible(FRAG_LINK) ? thing.url : "http: } private void updateShareActionIntent(Thing thing) { if (thing != null) { Intent intent = new Intent(Intent.ACTION_SEND); intent.setType("text/plain"); intent.putExtra(Intent.EXTRA_SUBJECT, Formatter.format(thing.title)); intent.putExtra(Intent.EXTRA_TEXT, getLink(thing)); shareProvider.setShareIntent(intent); } } }
package com.edinarobotics.zed.commands; import com.edinarobotics.utils.gamepad.FilterSet; import com.edinarobotics.utils.gamepad.Gamepad; import com.edinarobotics.utils.gamepad.GamepadResult; import com.edinarobotics.utils.gamepad.filters.DeadzoneFilter; import com.edinarobotics.utils.gamepad.filters.ScalingFilter; import com.edinarobotics.zed.subsystems.Drivetrain; import com.edinarobotics.zed.subsystems.DrivetrainStrafe; import edu.wpi.first.wpilibj.command.Command; /** * This {@link Command} allows a gamepad to control the {@link Drivetrain}. * It gets the current values from the gamepad and sends them to the drivetrain. */ public class GamepadDriveStrafeCommand extends Command{ private static final String COMMAND_NAME = "GamepadDriveStrafe"; Gamepad gamepad; FilterSet filters; DrivetrainStrafe drivetrainStrafe; /** * Construct a new GamepadDriveCommand using the given gamepad, filters * and drivetrain. * @param gamepad The Gamepad object to read for control values. * @param filters The set of filters to use when filtering gamepad output. * @param drivetrain The drivetrain object to control. */ public GamepadDriveStrafeCommand(Gamepad gamepad, FilterSet filters, DrivetrainStrafe drivetrainStrafe){ super(COMMAND_NAME); this.gamepad = gamepad; this.filters = filters; this.drivetrainStrafe = drivetrainStrafe; requires(drivetrainStrafe); } /** * Constructs a new GamepadDriveCommand using the given gamepad and * drivetrain. This command will use a default set of filters. * @param gamepad The Gamepad object to read for control values. * @param drivetrain The drivetrain object to control. */ public GamepadDriveStrafeCommand(Gamepad gamepad, DrivetrainStrafe drivetrainStrafe){ super(COMMAND_NAME); filters = new FilterSet(); filters.addFilter(new DeadzoneFilter(0.15)); filters.addFilter(new ScalingFilter()); this.gamepad = gamepad; this.drivetrainStrafe = drivetrainStrafe; requires(drivetrainStrafe); } protected void initialize() { } /** * Submits values from the given {@code gamepad} to the given * {@code drivetrain}. */ protected void execute() { GamepadResult gamepadState = filters.filter(gamepad.getJoysticks()); drivetrainStrafe.mecanumPolarStrafe(gamepadState.getLeftMagnitude(), -gamepadState.getLeftDirection()); } protected boolean isFinished() { return false; } protected void end() { } protected void interrupted() { } }
package com.danga.squeezer.model; import java.util.Map; import android.os.Parcel; import com.danga.squeezer.Util; import com.danga.squeezer.framework.SqueezerArtworkItem; public class SqueezerAlbum extends SqueezerArtworkItem { private String name; @Override public String getName() { return name; } public SqueezerAlbum setName(String name) { this.name = name; return this; } private String artist; public String getArtist() { return artist; } public void setArtist(String model) { this.artist = model; } private int year; public int getYear() { return year; } public void setYear(int year) { this.year = year; } public SqueezerAlbum(String albumId, String album) { setId(albumId); setName(album); } public SqueezerAlbum(Map<String, String> record) { setId(record.containsKey("album_id") ? record.get("album_id") : record.get("id")); setName(record.get("album")); setArtist(record.get("artist")); setYear(Util.parseDecimalIntOrZero(record.get("year"))); setArtwork_track_id(record.get("artwork_track_id")); } public static final Creator<SqueezerAlbum> CREATOR = new Creator<SqueezerAlbum>() { public SqueezerAlbum[] newArray(int size) { return new SqueezerAlbum[size]; } public SqueezerAlbum createFromParcel(Parcel source) { return new SqueezerAlbum(source); } }; private SqueezerAlbum(Parcel source) { setId(source.readString()); name = source.readString(); artist = source.readString(); year = source.readInt(); setArtwork_track_id(source.readString()); } public void writeToParcel(Parcel dest, int flags) { dest.writeString(getId()); dest.writeString(name); dest.writeString(artist); dest.writeInt(year); dest.writeString(getArtwork_track_id()); } @Override public String toString() { return "id=" + getId() + ", name=" + name + ", artist=" + artist + ", year=" + year; } }
package com.esotericsoftware.kryo.serializers; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Calendar; import java.util.Collections; import java.util.Currency; import java.util.Date; import java.util.GregorianCalendar; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TimeZone; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.KryoException; import com.esotericsoftware.kryo.KryoSerializable; import com.esotericsoftware.kryo.Serializer; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; import static com.esotericsoftware.kryo.Kryo.*; /** Contains many serializer classes that are provided by {@link Kryo#addDefaultSerializer(Class, Class) default}. * @author Nathan Sweet <misc@n4te.com> */ public class DefaultSerializers { static public class BooleanSerializer extends Serializer<Boolean> { { setImmutable(true); } public void write (Kryo kryo, Output output, Boolean object) { output.writeBoolean(object); } public Boolean read (Kryo kryo, Input input, Class<Boolean> type) { return input.readBoolean(); } } static public class ByteSerializer extends Serializer<Byte> { { setImmutable(true); } public void write (Kryo kryo, Output output, Byte object) { output.writeByte(object); } public Byte read (Kryo kryo, Input input, Class<Byte> type) { return input.readByte(); } } static public class CharSerializer extends Serializer<Character> { { setImmutable(true); } public void write (Kryo kryo, Output output, Character object) { output.writeChar(object); } public Character read (Kryo kryo, Input input, Class<Character> type) { return input.readChar(); } } static public class ShortSerializer extends Serializer<Short> { { setImmutable(true); } public void write (Kryo kryo, Output output, Short object) { output.writeShort(object); } public Short read (Kryo kryo, Input input, Class<Short> type) { return input.readShort(); } } static public class IntSerializer extends Serializer<Integer> { { setImmutable(true); } public void write (Kryo kryo, Output output, Integer object) { output.writeInt(object, false); } public Integer read (Kryo kryo, Input input, Class<Integer> type) { return input.readInt(false); } } static public class LongSerializer extends Serializer<Long> { { setImmutable(true); } public void write (Kryo kryo, Output output, Long object) { output.writeLong(object, false); } public Long read (Kryo kryo, Input input, Class<Long> type) { return input.readLong(false); } } static public class FloatSerializer extends Serializer<Float> { { setImmutable(true); } public void write (Kryo kryo, Output output, Float object) { output.writeFloat(object); } public Float read (Kryo kryo, Input input, Class<Float> type) { return input.readFloat(); } } static public class DoubleSerializer extends Serializer<Double> { { setImmutable(true); } public void write (Kryo kryo, Output output, Double object) { output.writeDouble(object); } public Double read (Kryo kryo, Input input, Class<Double> type) { return input.readDouble(); } } /** @see Output#writeString(String) */ static public class StringSerializer extends Serializer<String> { { setImmutable(true); setAcceptsNull(true); } public void write (Kryo kryo, Output output, String object) { output.writeString(object); } public String read (Kryo kryo, Input input, Class<String> type) { return input.readString(); } } static public class BigIntegerSerializer extends Serializer<BigInteger> { { setImmutable(true); setAcceptsNull(true); } public void write (Kryo kryo, Output output, BigInteger object) { if (object == null) { output.writeByte(NULL); return; } BigInteger value = (BigInteger)object; byte[] bytes = value.toByteArray(); output.writeInt(bytes.length + 1, true); output.writeBytes(bytes); } public BigInteger read (Kryo kryo, Input input, Class<BigInteger> type) { int length = input.readInt(true); if (length == NULL) return null; byte[] bytes = input.readBytes(length - 1); return new BigInteger(bytes); } } static public class BigDecimalSerializer extends Serializer<BigDecimal> { private BigIntegerSerializer bigIntegerSerializer = new BigIntegerSerializer(); { setAcceptsNull(true); setImmutable(true); } public void write (Kryo kryo, Output output, BigDecimal object) { if (object == null) { output.writeByte(NULL); return; } BigDecimal value = (BigDecimal)object; bigIntegerSerializer.write(kryo, output, value.unscaledValue()); output.writeInt(value.scale(), false); } public BigDecimal read (Kryo kryo, Input input, Class<BigDecimal> type) { BigInteger unscaledValue = bigIntegerSerializer.read(kryo, input, null); if (unscaledValue == null) return null; int scale = input.readInt(false); return new BigDecimal(unscaledValue, scale); } } static public class ClassSerializer extends Serializer<Class> { { setImmutable(true); setAcceptsNull(true); } public void write (Kryo kryo, Output output, Class object) { kryo.writeClass(output, object); } public Class read (Kryo kryo, Input input, Class<Class> type) { return kryo.readClass(input).getType(); } } static public class DateSerializer extends Serializer<Date> { public void write (Kryo kryo, Output output, Date object) { output.writeLong(object.getTime(), true); } public Date read (Kryo kryo, Input input, Class<Date> type) { return new Date(input.readLong(true)); } public Date copy (Kryo kryo, Date original) { return new Date(original.getTime()); } } static public class EnumSerializer extends Serializer<Enum> { { setImmutable(true); setAcceptsNull(true); } private Object[] enumConstants; public EnumSerializer (Kryo kryo, Class<? extends Enum> type) { enumConstants = type.getEnumConstants(); if (enumConstants == null) throw new IllegalArgumentException("The type must be an enum: " + type); } public void write (Kryo kryo, Output output, Enum object) { if (object == null) { output.writeByte(NULL); return; } output.writeInt(object.ordinal() + 1, true); } public Enum read (Kryo kryo, Input input, Class<Enum> type) { int ordinal = input.readInt(true); if (ordinal == NULL) return null; ordinal if (ordinal < 0 || ordinal > enumConstants.length - 1) throw new KryoException("Invalid ordinal for enum \"" + type.getName() + "\": " + ordinal); Object constant = enumConstants[ordinal]; return (Enum)constant; } } /** @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a> */ static public class CurrencySerializer extends Serializer<Currency> { { setImmutable(true); setAcceptsNull(true); } public void write (Kryo kryo, Output output, Currency object) { output.writeString(object == null ? null : object.getCurrencyCode()); } public Currency read (Kryo kryo, Input input, Class<Currency> type) { String currencyCode = input.readString(); if (currencyCode == null) return null; return Currency.getInstance(currencyCode); } } /** @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a> */ static public class StringBufferSerializer extends Serializer<StringBuffer> { { setAcceptsNull(true); } public void write (Kryo kryo, Output output, StringBuffer object) { output.writeString(object); } public StringBuffer read (Kryo kryo, Input input, Class<StringBuffer> type) { String value = input.readString(); if (value == null) return null; return new StringBuffer(value); } public StringBuffer copy (Kryo kryo, StringBuffer original) { return new StringBuffer(original); } } /** @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a> */ static public class StringBuilderSerializer extends Serializer<StringBuilder> { { setAcceptsNull(true); } public void write (Kryo kryo, Output output, StringBuilder object) { output.writeString(object); } public StringBuilder read (Kryo kryo, Input input, Class<StringBuilder> type) { return input.readStringBuilder(); } public StringBuilder copy (Kryo kryo, StringBuilder original) { return new StringBuilder(original); } } static public class KryoSerializableSerializer extends Serializer<KryoSerializable> { public void write (Kryo kryo, Output output, KryoSerializable object) { object.write(kryo, output); } public KryoSerializable read (Kryo kryo, Input input, Class<KryoSerializable> type) { KryoSerializable object = kryo.newInstance(type); kryo.reference(object); object.read(kryo, input); return object; } } /** Serializer for lists created via {@link Collections#emptyList()} or that were just assigned the * {@link Collections#EMPTY_LIST}. * @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a> */ static public class CollectionsEmptyListSerializer extends Serializer { { setImmutable(true); } public void write (Kryo kryo, Output output, Object object) { } public Object read (Kryo kryo, Input input, Class type) { return Collections.EMPTY_LIST; } } /** Serializer for maps created via {@link Collections#emptyMap()} or that were just assigned the {@link Collections#EMPTY_MAP}. * @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a> */ static public class CollectionsEmptyMapSerializer extends Serializer { { setImmutable(true); } public void write (Kryo kryo, Output output, Object object) { } public Object read (Kryo kryo, Input input, Class type) { return Collections.EMPTY_MAP; } } /** Serializer for sets created via {@link Collections#emptySet()} or that were just assigned the {@link Collections#EMPTY_SET}. * @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a> */ static public class CollectionsEmptySetSerializer extends Serializer { { setImmutable(true); } public void write (Kryo kryo, Output output, Object object) { } public Object read (Kryo kryo, Input input, Class type) { return Collections.EMPTY_SET; } } /** Serializer for lists created via {@link Collections#singletonList(Object)}. * @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a> */ static public class CollectionsSingletonListSerializer extends Serializer<List> { { setImmutable(true); } public void write (Kryo kryo, Output output, List object) { kryo.writeClassAndObject(output, object.get(0)); } public List read (Kryo kryo, Input input, Class type) { return Collections.singletonList(kryo.readClassAndObject(input)); } } /** Serializer for maps created via {@link Collections#singletonMap(Object, Object)}. * @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a> */ static public class CollectionsSingletonMapSerializer extends Serializer<Map> { { setImmutable(true); } public void write (Kryo kryo, Output output, Map object) { Entry entry = (Entry)object.entrySet().iterator().next(); kryo.writeClassAndObject(output, entry.getKey()); kryo.writeClassAndObject(output, entry.getValue()); } public Map read (Kryo kryo, Input input, Class type) { Object key = kryo.readClassAndObject(input); Object value = kryo.readClassAndObject(input); return Collections.singletonMap(key, value); } } /** Serializer for sets created via {@link Collections#singleton(Object)}. * @author <a href="mailto:martin.grotzke@javakaffee.de">Martin Grotzke</a> */ static public class CollectionsSingletonSetSerializer extends Serializer<Set> { { setImmutable(true); } public void write (Kryo kryo, Output output, Set object) { kryo.writeClassAndObject(output, object.iterator().next()); } public Set read (Kryo kryo, Input input, Class type) { return Collections.singleton(kryo.readClassAndObject(input)); } } /** Serializer for {@link TimeZone}. Assumes the timezones are immutable. * @author serverperformance */ static public class TimeZoneSerializer extends Serializer<TimeZone> { { setImmutable(true); } public void write (Kryo kryo, Output output, TimeZone object) { output.writeString(object.getID()); } public TimeZone read (Kryo kryo, Input input, Class<TimeZone> type) { return TimeZone.getTimeZone(input.readString()); } } /** Serializer for {@link GregorianCalendar}, java.util.JapaneseImperialCalendar, and sun.util.BuddhistCalendar. * @author serverperformance */ static public class CalendarSerializer extends Serializer<Calendar> { // The default value of gregorianCutover. static private final long DEFAULT_GREGORIAN_CUTOVER = -12219292800000L; TimeZoneSerializer timeZoneSerializer = new TimeZoneSerializer(); public void write (Kryo kryo, Output output, Calendar object) { timeZoneSerializer.write(kryo, output, object.getTimeZone()); // can't be null output.writeLong(object.getTimeInMillis(), true); output.writeBoolean(object.isLenient()); output.writeInt(object.getFirstDayOfWeek(), true); output.writeInt(object.getMinimalDaysInFirstWeek(), true); if (object instanceof GregorianCalendar) output.writeLong(((GregorianCalendar)object).getGregorianChange().getTime(), false); else output.writeLong(DEFAULT_GREGORIAN_CUTOVER, false); } public Calendar read (Kryo kryo, Input input, Class<Calendar> type) { Calendar result = Calendar.getInstance(timeZoneSerializer.read(kryo, input, TimeZone.class)); result.setTimeInMillis(input.readLong(true)); result.setLenient(input.readBoolean()); result.setFirstDayOfWeek(input.readInt(true)); result.setMinimalDaysInFirstWeek(input.readInt(true)); long gregorianChange = input.readLong(false); if (gregorianChange != DEFAULT_GREGORIAN_CUTOVER) if (result instanceof GregorianCalendar) ((GregorianCalendar)result).setGregorianChange(new Date(gregorianChange)); return result; } public Calendar copy (Kryo kryo, Calendar original) { return (Calendar)original.clone(); } } }
package com.innobraves.kairosjava.models.results.subsets; import javax.json.JsonObject; /** * @author Hex-3-En * @version 0.0.1 */ public class Subject { private String faceId; private long enrollmentTimestamp; private Subject(){} public String getFaceId() { return faceId; } public long getEnrollmentTimestamp() { return enrollmentTimestamp; } public static Subject create(JsonObject raw){ Subject sub = new Subject(); sub.faceId = raw.getJsonString("face_id").getString(); sub.enrollmentTimestamp = raw.getJsonNumber("enrollment_timestamp").longValue(); return sub; } @Override public String toString() { return "Subject{\n" + "faceId:\t'" + faceId + '\'' + "\n" + "enrollmentTimestamp:\t" + enrollmentTimestamp + "\n" + '}'; } }
package com.jakewharton.breakoutwallpaper; import java.io.FileNotFoundException; import java.util.List; import java.util.Random; import com.jakewharton.utilities.WidgetLocationsPreference; import android.content.SharedPreferences; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.PointF; import android.graphics.Rect; import android.graphics.RectF; import android.net.Uri; import android.util.Log; public class Game implements SharedPreferences.OnSharedPreferenceChangeListener { /** * Single random number generate for this wallpaper. */ /*package*/static final Random RANDOM = new Random(); /** * Tag used for logging. */ private static final String TAG = "BreakoutWallpaper.Game"; /** * Cell value for a blank space. */ private static final int CELL_BLANK = 0; /** * Cell value for an invalid space. */ private static final int CELL_INVALID = 1; /** * Block cells between icon rows. */ private static final int CELLS_BETWEEN_ROW = 2; /** * Block cells between icon columns. */ private static final int CELLS_BETWEEN_COLUMN = 1; /** * Paint solid shapes. */ private static final int PAINT_STYLE_FILL = 0; /** * Paint shape outlines. */ private static final int PAINT_STYLE_STROKE = 1; /** * Endless mode. */ /*package*/static final int MODE_ENDLESS = 0; /** * Level mode. */ private static final int MODE_LEVELS = 1; /** * Number of cells on the board horizontally. */ private int mCellsWide; /** * Number of cells on the board vertically. */ private int mCellsTall; /** * Number of cells horizontally between the columns. */ private int mCellColumnSpacing; /** * Number of cells vertically between the rows. */ private int mCellRowSpacing; /** * Width (in pixels) of a single cell. */ private float mCellWidth; /** * Height (in pixels) of a single cell. */ private float mCellHeight; /** * Height (in pixels) of the screen. */ private int mScreenHeight; /** * Width (in pixels) of the screen. */ private int mScreenWidth; private int mGameWidth; private int mGameHeight; /** * Whether or not the screen is currently in landscape mode. */ private boolean mIsLandscape; /** * Number of icon rows on the launcher. */ private int mIconRows; /** * Number of icon columns on the launcher. */ private int mIconCols; /** * 2-dimensional array of the board's cells. * * zero == blank * non-zero == block and represents its color */ private int[][] mBoard; /** * Color of the background. */ private int mGameBackground; /** * Top padding (in pixels) of the grid from the screen top. */ private int mDotGridPaddingTop; /** * Left padding (in pixels) of the grid from the screen left. */ private int mDotGridPaddingLeft; /** * Bottom padding (in pixels) of the grid from the screen bottom. */ private int mDotGridPaddingBottom; /** * Right padding (in pixels) of the grid from the screen right. */ private int mDotGridPaddingRight; /** * Path to the user background image (if any). */ private String mBackgroundPath; /** * The user background image (if any). */ private Bitmap mBackground; /** * The size (in pixels) of a single cell. */ private final RectF mCellSize; /** * The locations of widgets on the launcher. */ private List<Rect> mWidgetLocations; /** * Paint to draw the background color. */ private final Paint mBackgroundPaint; /** * Paint to draw the blocks. */ private final Paint mBlockForeground; /** * Paint to draw the balls. */ private final Paint mBallForeground; /** * Balls. Enough said. */ private Ball[] mBalls; /** * Colors for blocks. */ private final int[] mBlockColors; /** * Number of blocks remaining in the game. */ private int mBlocksRemaining; /** * Total blocks in a level. */ private int mBlocksTotal; /** * Gameplay mode. */ private int mMode; /** * Percentage at which to regenerate blocks. */ private float mRegenPercent; /** * Create a new game. */ public Game() { if (Wallpaper.LOG_VERBOSE) { Log.v(Game.TAG, "> Game()"); } //Create Paints this.mBackgroundPaint = new Paint(); this.mBlockForeground = new Paint(Paint.ANTI_ALIAS_FLAG); this.mBallForeground = new Paint(Paint.ANTI_ALIAS_FLAG); this.mCellSize = new RectF(0, 0, 0, 0); this.mBlockColors = new int[3]; //Load all preferences or their defaults Wallpaper.PREFERENCES.registerOnSharedPreferenceChangeListener(this); this.onSharedPreferenceChanged(Wallpaper.PREFERENCES, null); if (Wallpaper.LOG_VERBOSE) { Log.v(Game.TAG, "< Game()"); } } /** * Handle the changing of a preference. */ public void onSharedPreferenceChanged(final SharedPreferences preferences, final String key) { if (Wallpaper.LOG_VERBOSE) { Log.v(Game.TAG, "> onSharedPreferenceChanged()"); } final boolean all = (key == null); final Resources resources = Wallpaper.CONTEXT.getResources(); boolean hasLayoutChanged = false; boolean hasGraphicsChanged = false; boolean hasBallsChanged = false; // GENERAL // int balls = 0; final String ghostCount = resources.getString(R.string.settings_game_ballcount_key); if (all || key.equals(ghostCount)) { balls = preferences.getInt(ghostCount, resources.getInteger(R.integer.game_ballcount_default)); hasBallsChanged = true; if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Ball Count: " + balls); } this.mBalls = new Ball[balls]; for (int i = 0; i < balls; i++) { this.mBalls[i] = new Ball(); } } final String gameMode = resources.getString(R.string.settings_game_mode_key); if (all || key.equals(gameMode)) { this.mMode = preferences.getInt(gameMode, resources.getInteger(R.integer.game_mode_default)); if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Game Mode: " + this.mMode); } } final String endlessRegen = resources.getString(R.string.settings_game_endlessregen_key); if (all || key.equals(endlessRegen)) { final int regen = preferences.getInt(endlessRegen, resources.getInteger(R.integer.game_endlessregen_default)); this.mRegenPercent = regen / 100.0f; if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Endless Regen: " + regen + "%"); } } // COLORS // final String gameBackground = resources.getString(R.string.settings_color_background_key); if (all || key.equals(gameBackground)) { this.mGameBackground = preferences.getInt(gameBackground, resources.getInteger(R.integer.color_background_default)); if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Background: #" + Integer.toHexString(this.mGameBackground)); } } final String backgroundImage = resources.getString(R.string.settings_color_bgimage_key); if (all || key.equals(backgroundImage)) { this.mBackgroundPath = preferences.getString(backgroundImage, null); if (this.mBackgroundPath != null) { if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Background Image: " + this.mBackgroundPath); } //Trigger performResize hasGraphicsChanged = true; } else { this.mBackground = null; } } final String backgroundOpacity = resources.getString(R.string.settings_color_bgopacity_key); if (all || key.equals(backgroundOpacity)) { this.mBackgroundPaint.setAlpha(preferences.getInt(backgroundOpacity, resources.getInteger(R.integer.color_bgopacity_default))); if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Background Image Opacity: " + this.mBackgroundPaint.getAlpha()); } } final String ballColor = resources.getString(R.string.settings_color_ball_key); if (all || key.equals(ballColor)) { this.mBallForeground.setColor(preferences.getInt(ballColor, resources.getInteger(R.integer.color_ball_default))); if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Ball Color: #" + Integer.toHexString(this.mBallForeground.getColor())); } } final String block1Color = resources.getString(R.string.settings_color_block1_key); if (all || key.equals(block1Color)) { this.mBlockColors[0] = preferences.getInt(block1Color, resources.getInteger(R.integer.color_block1_default)); if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Block 1 Color: #" + Integer.toHexString(this.mBlockColors[0])); } } final String block2Color = resources.getString(R.string.settings_color_block2_key); if (all || key.equals(block2Color)) { this.mBlockColors[1] = preferences.getInt(block2Color, resources.getInteger(R.integer.color_block2_default)); if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Block 2 Color: #" + Integer.toHexString(this.mBlockColors[1])); } } final String block3Color = resources.getString(R.string.settings_color_block3_key); if (all || key.equals(block3Color)) { this.mBlockColors[2] = preferences.getInt(block3Color, resources.getInteger(R.integer.color_block3_default)); if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Block 3 Color: #" + Integer.toHexString(this.mBlockColors[2])); } } final String blockStyle = resources.getString(R.string.settings_color_blockstyle_key); if (all || key.equals(blockStyle)) { final int blockStyleValue = preferences.getInt(blockStyle, resources.getInteger(R.integer.color_blockstyle_default)); switch (blockStyleValue) { case Game.PAINT_STYLE_FILL: this.mBlockForeground.setStyle(Paint.Style.FILL); if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Block Style: FILL"); } break; case Game.PAINT_STYLE_STROKE: this.mBlockForeground.setStyle(Paint.Style.STROKE); if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Block Style: STROKE"); } break; default: Log.e(Game.TAG, "Invalid block style value " + blockStyleValue); } } final String ballStyle = resources.getString(R.string.settings_color_ballstyle_key); if (all || key.equals(ballStyle)) { final int ballStyleValue = preferences.getInt(ballStyle, resources.getInteger(R.integer.color_ballstyle_default)); switch (ballStyleValue) { case Game.PAINT_STYLE_FILL: this.mBallForeground.setStyle(Paint.Style.FILL); if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Ball Style: FILL"); } break; case Game.PAINT_STYLE_STROKE: this.mBallForeground.setStyle(Paint.Style.STROKE); if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Ball Style: STROKE"); } break; default: Log.e(Game.TAG, "Invalid ball style value " + ballStyleValue); } } // GRID // final String dotGridPaddingLeft = resources.getString(R.string.settings_display_padding_left_key); if (all || key.equals(dotGridPaddingLeft)) { this.mDotGridPaddingLeft = preferences.getInt(dotGridPaddingLeft, resources.getInteger(R.integer.display_padding_left_default)); hasGraphicsChanged = true; if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Dot Grid Padding Left: " + this.mDotGridPaddingLeft); } } final String dotGridPaddingRight = resources.getString(R.string.settings_display_padding_right_key); if (all || key.equals(dotGridPaddingRight)) { this.mDotGridPaddingRight = preferences.getInt(dotGridPaddingRight, resources.getInteger(R.integer.display_padding_right_default)); hasGraphicsChanged = true; if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Dot Grid Padding Right: " + this.mDotGridPaddingRight); } } final String dotGridPaddingTop = resources.getString(R.string.settings_display_padding_top_key); if (all || key.equals(dotGridPaddingTop)) { this.mDotGridPaddingTop = preferences.getInt(dotGridPaddingTop, resources.getInteger(R.integer.display_padding_top_default)); hasGraphicsChanged = true; if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Dot Grid Padding Top: " + this.mDotGridPaddingTop); } } final String dotGridPaddingBottom = resources.getString(R.string.settings_display_padding_bottom_key); if (all || key.equals(dotGridPaddingBottom)) { this.mDotGridPaddingBottom = preferences.getInt(dotGridPaddingBottom, resources.getInteger(R.integer.display_padding_bottom_default)); hasGraphicsChanged = true; if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Dot Grid Padding Bottom: " + this.mDotGridPaddingBottom); } } final String widgetLocations = resources.getString(R.string.settings_display_widgetlocations_key); if (all || key.equals(widgetLocations)) { this.mWidgetLocations = WidgetLocationsPreference.convertStringToWidgetList(preferences.getString(widgetLocations, resources.getString(R.string.display_widgetlocations_default))); hasLayoutChanged = true; if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Widget Locations: " + (this.mWidgetLocations.size() / 4)); } } // CELLS // final String iconRows = resources.getString(R.string.settings_display_iconrows_key); if (all || key.equals(iconRows)) { this.mIconRows = preferences.getInt(iconRows, resources.getInteger(R.integer.display_iconrows_default)); hasLayoutChanged = true; if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Icon Rows: " + this.mIconRows); } } final String iconCols = resources.getString(R.string.settings_display_iconcols_key); if (all || key.equals(iconCols)) { this.mIconCols = preferences.getInt(iconCols, resources.getInteger(R.integer.display_iconcols_default)); hasLayoutChanged = true; if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Icon Cols: " + this.mIconCols); } } final String cellSpacingRow = resources.getString(R.string.settings_display_rowspacing_key); if (all || key.equals(cellSpacingRow)) { this.mCellRowSpacing = preferences.getInt(cellSpacingRow, resources.getInteger(R.integer.display_rowspacing_default)); hasLayoutChanged = true; if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Cell Row Spacing: " + this.mCellRowSpacing); } } final String cellSpacingCol = resources.getString(R.string.settings_display_colspacing_key); if (all || key.equals(cellSpacingCol)) { this.mCellColumnSpacing = preferences.getInt(cellSpacingCol, resources.getInteger(R.integer.display_colspacing_default)); hasLayoutChanged = true; if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Cell Column Spacing: " + this.mCellColumnSpacing); } } if (hasLayoutChanged) { this.mCellsWide = (this.mIconCols * (this.mCellColumnSpacing + Game.CELLS_BETWEEN_COLUMN)) + Game.CELLS_BETWEEN_COLUMN; this.mCellsTall = (this.mIconRows * (this.mCellRowSpacing + Game.CELLS_BETWEEN_ROW)) + Game.CELLS_BETWEEN_ROW; if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Cells Wide: " + this.mCellsWide); Log.d(Game.TAG, "Cells Tall: " + this.mCellsTall); } //Create playing board this.mBoard = new int[this.mCellsTall][this.mCellsWide]; } if (hasLayoutChanged || hasGraphicsChanged || hasBallsChanged) { if ((this.mScreenWidth > 0) && (this.mScreenHeight > 0)) { //Resize everything to fit this.performResize(this.mScreenWidth, this.mScreenHeight); } this.newLevel(); } if (Wallpaper.LOG_VERBOSE) { Log.v(Game.TAG, "< onSharedPreferenceChanged()"); } } /** * Get the width of a cell. * * @return Cell width. */ public float getCellWidth() { return this.mCellWidth; } /** * Get the height of a cell. * * @return Cell height. */ public float getCellHeight() { return this.mCellHeight; } /** * Determine whether or not a position is a valid cell. * * @param x X coordinate. * @param y Y coordinate. * @return Boolean. */ private boolean isCell(final int x, final int y) { return (x >= 0) && (x < this.mCellsWide) && (y >= 0) && (y < this.mCellsTall) && (this.mBoard[y][x] != Game.CELL_INVALID); } /** * Determine whether or not a position contains a block. * * @param x X coordinate. * @param y Y coordinate. * @return Boolean. */ private boolean isBlock(final int x, final int y) { return this.isCell(x, y) && (this.mBoard[y][x] != Game.CELL_BLANK); } /** * Manipulate a ball direction based on a user touch. * * @param x X coordinate of touch. * @param y Y coordinate of touch. */ public void setTouch(final float x, final float y) { double closestDistance = Float.MAX_VALUE; Ball closestBall = null; for (final Ball ball : this.mBalls) { final double ballDistance = Math.sqrt(Math.pow(x - ball.getLocationX(), 2) + Math.pow(y - ball.getLocationY(), 2)); if (ballDistance < closestDistance) { closestBall = ball; closestDistance = ballDistance; } } closestBall.setVector(x - closestBall.getLocationX(), y - closestBall.getLocationY()); } /** * Reset the game state to that of first initialization. */ public void newLevel() { if (Wallpaper.LOG_VERBOSE) { Log.v(Game.TAG, "> newGame()"); } //Initialize board final int iconCellsWidth = this.mCellColumnSpacing + Game.CELLS_BETWEEN_COLUMN; final int iconCellsHeight = this.mCellRowSpacing + Game.CELLS_BETWEEN_ROW; final int colors = this.mBlockColors.length; for (int y = 0; y < this.mCellsTall; y++) { for (int x = 0; x < this.mCellsWide; x++) { final int dx = x % iconCellsWidth; final int dy = y % iconCellsHeight; if ((dx < Game.CELLS_BETWEEN_COLUMN) || (dy < Game.CELLS_BETWEEN_ROW)) { this.mBoard[y][x] = this.mBlockColors[(x + y) % colors]; } else { this.mBoard[y][x] = Game.CELL_INVALID; } } } //Remove board under widgets for (final Rect widget : this.mWidgetLocations) { if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Widget: L=" + widget.left + ", T=" + widget.top + ", R=" + widget.right + ", B=" + widget.bottom); } final int left = (widget.left * iconCellsWidth) + Game.CELLS_BETWEEN_COLUMN; final int top = (widget.top * iconCellsHeight) + Game.CELLS_BETWEEN_ROW; final int bottom = (widget.bottom * iconCellsHeight) + Game.CELLS_BETWEEN_ROW + this.mCellRowSpacing - 1; final int right = (widget.right * iconCellsWidth) + Game.CELLS_BETWEEN_COLUMN + this.mCellColumnSpacing - 1; for (int y = top; y <= bottom; y++) { for (int x = left; x <= right; x++) { this.mBoard[y][x] = Game.CELL_INVALID; } } } //Count blocks this.mBlocksRemaining = 0; for (int y = 0; y < this.mCellsTall; y++) { for (int x = 0; x < this.mCellsWide; x++) { if ((this.mBoard[y][x] != Game.CELL_BLANK) && (this.mBoard[y][x] != Game.CELL_INVALID)) { this.mBlocksRemaining += 1; } } } this.mBlocksTotal = this.mBlocksRemaining; if (Wallpaper.LOG_VERBOSE) { Log.v(Game.TAG, "< newGame()"); } } /** * Convert an icon position to on-screen coordinates * * @param x Icon column * @param y Icon row * @return Screen coordinates. */ private PointF getBallLocationAtIcon(final int x, final int y) { return new PointF( ((this.mCellColumnSpacing * x) + (Game.CELLS_BETWEEN_COLUMN * (x + 1)) + (this.mCellColumnSpacing / 2.0f)) * this.mCellWidth, ((this.mCellRowSpacing * y) + (Game.CELLS_BETWEEN_ROW * (y + 1)) + (this.mCellRowSpacing / 2.0f)) * this.mCellHeight ); } /** * Iterate all entities one step. */ public void tick() { for (final Ball ball : this.mBalls) { ball.tick(this); //Test screen edges if (ball.getLocationX() <= 0) { ball.setVector(Math.abs(ball.getVectorX()), ball.getVectorY() + Game.RANDOM.nextFloat()); } else if (ball.getLocationX() >= this.mGameWidth) { ball.setVector(-Math.abs(ball.getVectorX()), ball.getVectorY() + Game.RANDOM.nextFloat()); } if (ball.getLocationY() <= 0) { ball.setVector(ball.getVectorX() + Game.RANDOM.nextFloat(), Math.abs(ball.getVectorY())); } else if (ball.getLocationY() >= this.mGameHeight) { ball.setVector(ball.getVectorX() + Game.RANDOM.nextFloat(), -Math.abs(ball.getVectorY())); } //Test blocks final int ballCheck1X = (int)((ball.getLocationX() - Ball.RADIUS) / this.mCellWidth); final int ballCheck1Y = (int)((ball.getLocationY() + (Math.signum(ball.getVectorY()) * Ball.RADIUS)) / this.mCellHeight); final int ballCheck2X = (int)((ball.getLocationX() + Ball.RADIUS) / this.mCellWidth); final int ballCheck2Y = ballCheck1Y; final int ballCheck3X = (int)((ball.getLocationX() + (Math.signum(ball.getVectorX()) * Ball.RADIUS)) / this.mCellWidth); final int ballCheck3Y = (int)((ball.getLocationY() + (-Math.signum(ball.getVectorY()) * Ball.RADIUS)) / this.mCellHeight); this.checkCollision(ball, ballCheck1X, ballCheck1Y); this.checkCollision(ball, ballCheck2X, ballCheck2Y); this.checkCollision(ball, ballCheck3X, ballCheck3Y); //Check game mode switch (this.mMode) { case Game.MODE_ENDLESS: Log.d(Game.TAG, "Remaining: " + this.mBlocksRemaining + ", Total: " + this.mBlocksTotal + ", Factor: " + this.mRegenPercent + ", Regen: " + (this.mBlocksTotal * this.mRegenPercent)); if (this.mBlocksRemaining < (this.mBlocksTotal * this.mRegenPercent)) { Log.d(Game.TAG, "Regen"); while (true) { final int x = Game.RANDOM.nextInt(this.mCellsWide); final int y = Game.RANDOM.nextInt(this.mCellsTall); if (this.isCell(x, y) && (this.mBoard[y][x] == Game.CELL_BLANK)) { this.mBoard[y][x] = this.mBlockColors[(x + y) % this.mBlockColors.length]; break; } } this.mBlocksRemaining += 1; } break; case Game.MODE_LEVELS: if (this.mBlocksRemaining == 0) { this.newLevel(); } break; default: Log.e(Game.TAG, "Invalid game mode value " + this.mMode); break; } } if (this.mBlocksRemaining <= 0) { this.newLevel(); } } /** * Determine if a ball has collided with a block in the specified coordinates. * * @param ball Ball instance. * @param blockX X coordinate of potential block. * @param blockY Y coordinate of potential block. * @return Boolean indicating collision. */ private boolean checkCollision(final Ball ball, final int blockX, final int blockY) { if (Wallpaper.LOG_VERBOSE) { Log.d(Game.TAG, "Checking block (" + blockX + "," + blockY + ") against ball at (" + ball.getLocationX() + "," + ball.getLocationY() + ")"); } if (!this.isBlock(blockX, blockY)) { return false; } if (Wallpaper.LOG_VERBOSE) { Log.d(Game.TAG, "-- Is Collision"); Log.d(Game.TAG, "-- Current Vector: (" + ball.getVectorX() + "," + ball.getVectorY() + ")"); } final float cellWidthOverTwo = this.mCellWidth / 2; final float cellHeightOverTwo = this.mCellHeight / 2; final float blockCenterX = (blockX * this.mCellWidth) + cellWidthOverTwo; final float blockCenterY = (blockY * this.mCellHeight) + cellHeightOverTwo; //Calculate collision unit vector float collisionUnitVectorX = blockCenterX - ball.getLocationX(); float collisionUnitVectorY = blockCenterY - ball.getLocationY(); final float collisionVectorLength = (float)Math.sqrt(Math.pow(collisionUnitVectorX, 2) + Math.pow(collisionUnitVectorY, 2)); collisionUnitVectorX /= collisionVectorLength; collisionUnitVectorY /= collisionVectorLength; //Calculate ball velocity unit vector final float ballVectorLength = (float)Math.sqrt(Math.pow(ball.getVectorX(), 2) + Math.pow(ball.getVectorY(), 2)); final float ballUnitVectorX = ball.getVectorX() / ballVectorLength; final float ballUnitVectorY = ball.getVectorY() / ballVectorLength; final float dotProduct = (collisionUnitVectorX * ballUnitVectorX) + (collisionUnitVectorY * ballUnitVectorY); final float vectorDeltaX = -2 * collisionUnitVectorX * dotProduct * ballVectorLength; final float vectorDeltaY = -2 * collisionUnitVectorY * dotProduct * ballVectorLength; float newVectorX = ball.getVectorX() + vectorDeltaX; float newVectorY = ball.getVectorY() + vectorDeltaY; final float newVectorLength = (float)Math.sqrt(Math.pow(newVectorX, 2) + Math.pow(newVectorY, 2)); newVectorX /= newVectorLength; newVectorY /= newVectorLength; ball.setVector(newVectorX, newVectorY); if (Wallpaper.LOG_VERBOSE) { Log.d(Game.TAG, "-- New Vector: (" + ball.getVectorX() + "," + ball.getVectorY() + ")"); } this.mBoard[blockY][blockX] = Game.CELL_BLANK; this.mBlocksRemaining -= 1; return true; } /** * Resize the game board and all entities according to a new width and height. * * @param screenWidth New width. * @param screenHeight New height. */ public void performResize(int screenWidth, int screenHeight) { if (Wallpaper.LOG_VERBOSE) { Log.v(Game.TAG, "> performResize(width = " + screenWidth + ", height = " + screenHeight + ")"); } //Background image if (this.mBackgroundPath != null) { try { final Bitmap temp = BitmapFactory.decodeStream(Wallpaper.CONTEXT.getContentResolver().openInputStream(Uri.parse(this.mBackgroundPath))); final float pictureAR = temp.getWidth() / (temp.getHeight() * 1.0f); final float screenAR = screenWidth / (screenHeight * 1.0f); int newWidth; int newHeight; int x; int y; if (pictureAR > screenAR) { //wider than tall related to the screen AR newHeight = screenHeight; newWidth = (int)(temp.getWidth() * (screenHeight / (temp.getHeight() * 1.0f))); x = (newWidth - screenWidth) / 2; y = 0; } else { //taller than wide related to the screen AR newWidth = screenWidth; newHeight = (int)(temp.getHeight() * (screenWidth / (temp.getWidth() * 1.0f))); x = 0; y = (newHeight - screenHeight) / 2; } final Bitmap scaled = Bitmap.createScaledBitmap(temp, newWidth, newHeight, false); this.mBackground = Bitmap.createBitmap(scaled, x, y, screenWidth, screenHeight); } catch (FileNotFoundException e) { e.printStackTrace(); Log.w(Game.TAG, "Unable to load background bitmap."); this.mBackground = null; } } if (screenWidth > screenHeight) { this.mIsLandscape = true; final int temp = screenHeight; screenHeight = screenWidth; screenWidth = temp; } else { this.mIsLandscape = false; } this.mScreenWidth = screenWidth; this.mScreenHeight = screenHeight; if (this.mIsLandscape) { this.mGameWidth = (screenWidth - this.mDotGridPaddingTop); this.mCellWidth = this.mGameWidth / (this.mCellsWide * 1.0f); this.mGameHeight = (screenHeight - (this.mDotGridPaddingBottom + this.mDotGridPaddingLeft + this.mDotGridPaddingRight)); this.mCellHeight = this.mGameHeight / (this.mCellsTall * 1.0f); } else { this.mGameWidth = (screenWidth - (this.mDotGridPaddingLeft + this.mDotGridPaddingRight)); this.mCellWidth = this.mGameWidth / (this.mCellsWide * 1.0f); this.mGameHeight = (screenHeight - (this.mDotGridPaddingTop + this.mDotGridPaddingBottom)); this.mCellHeight = this.mGameHeight / (this.mCellsTall * 1.0f); } //Update cell size this.mCellSize.right = this.mCellWidth; this.mCellSize.bottom = this.mCellHeight; //Set ball radius Ball.RADIUS = ((this.mCellWidth < this.mCellHeight) ? this.mCellWidth : this.mCellHeight) * Ball.SIZE_PERCENTAGE / 2; //Position balls //TODO: this should be in newGame(); final PointF ball0Location = this.getBallLocationAtIcon(0, 0); this.mBalls[0].setLocation(ball0Location.x, ball0Location.y); this.mBalls[0].setVector(0, -1); if (this.mBalls.length > 1) { final PointF ball1Location = this.getBallLocationAtIcon(this.mIconCols - 1, this.mIconRows - 1); this.mBalls[1].setLocation(ball1Location.x, ball1Location.y); this.mBalls[1].setVector(0, 1); } if (this.mBalls.length > 2) { final PointF ball2Location = this.getBallLocationAtIcon(this.mIconCols - 1, 0); this.mBalls[2].setLocation(ball2Location.x, ball2Location.y); this.mBalls[2].setVector(1, 0); } if (this.mBalls.length > 3) { final PointF ball3Location = this.getBallLocationAtIcon(0, this.mIconRows - 1); this.mBalls[3].setLocation(ball3Location.x, ball3Location.y); this.mBalls[3].setVector(-1, 0); } if (Wallpaper.LOG_DEBUG) { Log.d(Game.TAG, "Is Landscape: " + this.mIsLandscape); Log.d(Game.TAG, "Screen Width: " + screenWidth); Log.d(Game.TAG, "Screen Height: " + screenHeight); Log.d(Game.TAG, "Cell Width: " + this.mCellWidth); Log.d(Game.TAG, "Cell Height: " + this.mCellHeight); Log.d(Game.TAG, "Ball Radius: " + Ball.RADIUS); } if (Wallpaper.LOG_VERBOSE) { Log.v(Game.TAG, "< performResize()"); } } /** * Render the board and all entities on a Canvas. * * @param c Canvas to draw on. */ public void draw(final Canvas c) { c.save(); //Clear the screen in case of transparency in the image c.drawColor(this.mGameBackground); if (this.mBackground != null) { //Bitmap should already be sized to the screen so draw it at the origin c.drawBitmap(this.mBackground, 0, 0, this.mBackgroundPaint); } if (this.mIsLandscape) { //Perform counter-clockwise rotation c.rotate(-90, this.mScreenWidth / 2.0f, this.mScreenWidth / 2.0f); c.translate(0, this.mDotGridPaddingLeft); } else { c.translate(this.mDotGridPaddingLeft, this.mDotGridPaddingTop); } //Draw dots and walls this.drawGameBoard(c); if (this.mIsLandscape) { //Perform clockwise rotation back to normal c.rotate(90, this.mScreenWidth / 2.0f, this.mScreenWidth / 2.0f); } c.restore(); } /** * Render the dots and walls. * * @param c Canvas to draw on. */ private void drawGameBoard(final Canvas c) { //draw blocks for (int y = 0; y < this.mCellsTall; y++) { for (int x = 0; x < this.mCellsWide; x++) { final int cell = this.mBoard[y][x]; if ((cell != Game.CELL_BLANK) && (cell != Game.CELL_INVALID)) { this.mBlockForeground.setColor(cell); final float left = x * this.mCellWidth; final float top = y * this.mCellHeight; final float right = left + this.mCellWidth; final float bottom = top + this.mCellHeight; c.drawRect(left, top, right, bottom, this.mBlockForeground); } } } //draw balls for (final Ball ball : this.mBalls) { c.drawRect(ball.getLocationX() - Ball.RADIUS, ball.getLocationY() - Ball.RADIUS, ball.getLocationX() + Ball.RADIUS, ball.getLocationY() + Ball.RADIUS, this.mBallForeground); } } }
package com.mac.tarchan.desktop; import java.awt.EventQueue; import java.awt.Window; import javax.swing.UIManager; /** * DesktopSupport * * @author tarchan */ public class DesktopSupport { /** * Look&Feel * * @see UIManager#setLookAndFeel(String) */ public static void useSystemLookAndFeel() { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception x) { throw new RuntimeException(x); } } public static void useSystemProxies() { System.setProperty("java.net.useSystemProxies", "true"); } public static void printEventDispatchThread() { boolean isEventDispatchThread = EventQueue.isDispatchThread(); if (isEventDispatchThread) { System.out.println("isEventDispatchThread: " + isEventDispatchThread + ", " + Thread.currentThread()); } else { System.err.println("isEventDispatchThread: " + isEventDispatchThread + ", " + Thread.currentThread()); } } /** * * * @param runnable * @see EventQueue#invokeLater(Runnable) */ public static void invokeLater(Runnable runnable) { EventQueue.invokeLater(runnable); } /** * * * @param window * @see EventQueue#invokeLater(Runnable) */ public static void show(final Window window) { invokeLater(new Runnable() { public void run() { window.setVisible(true); } }); } /** * * * @param window * @see EventQueue#invokeLater(Runnable) * @deprecated {@link #show(Window)} */ public static void windowVisible(final Window window) { show(window); } }
package net.sf.jaer.eventprocessing.filter; import java.awt.Font; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.ArrayList; import java.util.Iterator; import java.util.Observable; import java.util.Observer; import com.jogamp.opengl.GL; import com.jogamp.opengl.GL2; import com.jogamp.opengl.GLAutoDrawable; import net.sf.jaer.Description; import net.sf.jaer.chip.AEChip; import net.sf.jaer.event.BasicEvent; import net.sf.jaer.event.EventPacket; import net.sf.jaer.event.PolarityEvent; import net.sf.jaer.eventprocessing.EventFilter2D; import net.sf.jaer.eventprocessing.FilterChain; import net.sf.jaer.graphics.AEViewer; import net.sf.jaer.graphics.FrameAnnotater; import net.sf.jaer.hardwareinterface.HardwareInterfaceException; import net.sf.jaer.util.filter.HighpassFilter; import ch.unizh.ini.jaer.hardware.pantilt.PanTilt; import com.jogamp.opengl.util.awt.TextRenderer; import eu.seebetter.ini.chips.DavisChip; import eu.seebetter.ini.chips.davis.imu.IMUSample; import java.util.concurrent.ArrayBlockingQueue; import net.sf.jaer.DevelopmentStatus; import net.sf.jaer.event.ApsDvsEvent; import net.sf.jaer.event.ApsDvsEventPacket; import net.sf.jaer.event.OutputEventIterator; import net.sf.jaer.eventio.AEFileInputStream; import net.sf.jaer.eventio.AEInputStream; import static net.sf.jaer.eventprocessing.EventFilter.log; import net.sf.jaer.graphics.AbstractAEPlayer; import net.sf.jaer.graphics.ChipRendererDisplayMethodRGBA; /** * This "vestibular-ocular Steadicam" tries to compensate global image motion by * using vestibular and global motion metrics to redirect output events and * (optionally) also a mechanical pan-tilt unit, shifting them according to * motion of input. Three methods can be used 1) the global translational flow * computed from AbstractDirectionSelectiveFilter, or 2) the optical gyro * outputs from OpticalGyro, or 3) the integrated IMU on the camera if * available. * * @author tobi */ @Description("Compenstates global scene translation and rotation to stabilize scene like a SteadiCam, using built-in IMU or other methods of estimation of camera rotation.") @DevelopmentStatus(DevelopmentStatus.Status.Experimental) public class Steadicam extends EventFilter2D implements FrameAnnotater, Observer, PropertyChangeListener { /** * Classes that compute camera rotationRad estimate based on scene shift and * maybe rotationRad around the center of the scene. */ public enum CameraRotationEstimator { VORSensor }; private CameraRotationEstimator cameraRotationEstimator = null; //PositionComputer.valueOf(get("positionComputer", "OpticalGyro")); // private float gainTranslation = getFloat("gainTranslation", 1f); // private float gainVelocity = getFloat("gainVelocity", 1); private float gainPanTiltServos = getFloat("gainPanTiltServos", 1); private boolean feedforwardEnabled = getBoolean("feedforwardEnabled", false); private boolean panTiltEnabled = getBoolean("panTiltEnabled", false); private boolean electronicStabilizationEnabled = getBoolean("electronicStabilizationEnabled", true); // private boolean vestibularStabilizationEnabled = getBoolean("vestibularStabilizationEnabled", false); private Point2D.Float translation = new Point2D.Float(); private HighpassFilter filterX = new HighpassFilter(), filterY = new HighpassFilter(), filterRotation = new HighpassFilter(); private boolean flipContrast = getBoolean("flipContrast", false); boolean evenMotion = true; private FilterChain filterChain; private boolean annotateEnclosedEnabled = getBoolean("annotateEnclosedEnabled", true); private PanTilt panTilt = null; ArrayList<TransformAtTime> transformList = new ArrayList(); // holds list of transforms over update times commputed by enclosed filter update callbacks private TransformAtTime lastTransform = null, imageTransform = null; // private double[] angular, acceleration; private float panRate = 0, tiltRate = 0, rollRate = 0; // in deg/sec private float panOffset = getFloat("panOffset", 0), tiltOffset = getFloat("tiltOffset", 0), rollOffset = getFloat("rollOffset", 0); // private float upAccel = 0, rightAccel = 0, zAccel = 0; // in g in m/s^2 private float panTranslationDeg = 0; private float tiltTranslationDeg = 0; private float rollDeg = 0; private float panDC = 0, tiltDC = 0, rollDC = 0; private float lensFocalLengthMm = getFloat("lensFocalLengthMm", 8.5f); HighpassFilter panTranslationFilter = new HighpassFilter(); HighpassFilter tiltTranslationFilter = new HighpassFilter(); HighpassFilter rollFilter = new HighpassFilter(); private float highpassTauMsTranslation = getFloat("highpassTauMsTranslation", 1000); private float highpassTauMsRotation = getFloat("highpassTauMsRotation", 1000); float radPerPixel; private volatile boolean resetCalled = false; private int lastImuTimestamp = 0; private boolean initialized = false; private boolean addTimeStampsResetPropertyChangeListener = false; private int transformResetLimitDegrees = getInt("transformResetLimitDegrees", 45); // deal with leftover IMU data after timestamps reset private static final int FLUSH_COUNT = 10; private int flushCounter = 0; // calibration private boolean calibrating = false; // used to flag calibration state private int calibrationSampleCount = 0; private int NUM_CALIBRATION_SAMPLES_DEFAULT = 800; // 400 samples /sec protected int numCalibrationSamples=getInt("numCalibrationSamples",NUM_CALIBRATION_SAMPLES_DEFAULT); private CalibrationFilter panCalibrator, tiltCalibrator, rollCalibrator; TextRenderer imuTextRenderer = null; private boolean showTransformRectangle = getBoolean("showTransformRectangle", true); private boolean showGrid = getBoolean("showGrid", true); // transform control public boolean disableTranslation = getBoolean("disableTranslation", false); public boolean disableRotation = getBoolean("disableRotation", false); // array size vars, updated in update() private int sxm1; private int sym1; private int sx2, sy2; private boolean transformImageEnabled = getBoolean("transformImageEnabled", true); private int lastFrameNumber = 0; protected float imuLagMs = getFloat("imuLagMs", 1.8f); private boolean addedViewerPropertyChangeListener = false; ApsDvsEventPacket outputPacket=null; /** * Creates a new instance of SceneStabilizer */ public Steadicam(AEChip chip) { super(chip); filterChain = new FilterChain(chip); chip.addObserver(this); // to get pixel array size updates addObserver(this); // we add ourselves as observer so that our update() can be called during packet iteration periodically according to global FilterFrame update interval settting try { cameraRotationEstimator = CameraRotationEstimator.valueOf(getString("positionComputer", "OpticalGyro")); } catch (IllegalArgumentException e) { log.warning("bad preference " + getString("positionComputer", "OpticalGyro") + " for preferred PositionComputer, choosing default OpticalGyro"); cameraRotationEstimator = CameraRotationEstimator.VORSensor; putString("positionComputer", "OpticalGyro"); } setCameraRotationEstimator(cameraRotationEstimator); // init filter enabled states initFilter(); // init filters for motion compensation String transform = "Transform", pantilt = "Pan-Tilt", display = "Display", imu = "IMU"; setPropertyTooltip("cameraRotationEstimator", "specifies which method is used to measure camera rotation"); // setPropertyTooltip(pantilt, "gainTranslation", "gain applied to measured scene translation to affect electronic or mechanical output"); // setPropertyTooltip(pantilt, "gainVelocity", "gain applied to measured scene velocity times the weighted-average cluster aqe to affect electronic or mechanical output"); // setPropertyTooltip(pantilt, "gainPanTiltServos", "gain applied to translation for pan/tilt servo values"); setPropertyTooltip("feedforwardEnabled", "enables optical flow motion computation on stabilized output of filter rather than input (only during use of DirectionSelectiveFilter)"); setPropertyTooltip(pantilt, "panTiltEnabled", "enables use of pan/tilt servos for camera"); setPropertyTooltip("electronicStabilizationEnabled", "stabilize by shifting events according to the PositionComputer"); setPropertyTooltip(display, "flipContrast", "flips contrast of output events depending on x*y sign of motion - should maintain colors of edges"); // setPropertyTooltip("cornerFreqHz", "sets highpass corner frequency in Hz for stabilization - frequencies smaller than this will not be stabilized and transform will return to zero on this time scale"); setPropertyTooltip(display, "annotateEnclosedEnabled", "showing tracking or motion filter output annotation of output, for setting up parameters of enclosed filters"); setPropertyTooltip(transform, "opticalGyroTauLowpassMs", "lowpass filter time constant in ms for optical gyro camera rotation measure"); setPropertyTooltip(transform, "opticalGyroRotationEnabled", "enables rotation in transform"); setPropertyTooltip(transform, "vestibularStabilizationEnabled", "use the gyro/accelometer to provide transform"); setPropertyTooltip(imu, "zeroGyro", "zeros the gyro output. Sensor should be stationary for period of 1-2 seconds during zeroing"); setPropertyTooltip(imu, "eraseGyroZero", "Erases the gyro zero values"); setPropertyTooltip(imu, "numCalibrationSamples", "Number of calibration samples from IMU to average for offset correction"); setPropertyTooltip(transform, "transformImageEnabled", "Transforms rendering of the APS image (note that the APS image data is unaffected; this is only for demo purposes)"); // setPropertyTooltip("sampleIntervalMs", "sensor sample interval in ms, min 4ms, powers of two, e.g. 4,8,16,32..."); setPropertyTooltip(transform, "highpassTauMsTranslation", "highpass filter time constant in ms to relax transform back to zero for translation (pan, tilt) components"); setPropertyTooltip(transform, "highpassTauMsRotation", "highpass filter time constant in ms to relax transform back to zero for rotation (roll) component"); setPropertyTooltip(transform, "lensFocalLengthMm", "sets lens focal length in mm to adjust the scaling from camera rotation to pixel space"); setPropertyTooltip(imu, "zeroGyro", "zeros the gyro output. Sensor should be stationary for period of 1-2 seconds during zeroing"); setPropertyTooltip(imu, "eraseGyroZero", "Erases the gyro zero values"); setPropertyTooltip(transform, "transformResetLimitDegrees", "If transform translations exceed this limit in degrees the transform is automatically reset to 0"); setPropertyTooltip(display, "showTransformRectangle", "Disable to not show the red transform square and red cross hairs"); setPropertyTooltip(display, "showGrid", "Enabled to show a grid to allow judging the degree of stabilization"); setPropertyTooltip(transform, "disableRotation", "Disables rotational part of transform"); setPropertyTooltip(transform, "disableTranslation", "Disables translations part of transform"); setPropertyTooltip(imu, "imuLagMs", "absolute delay/lag of IMU in ms"); rollFilter.setTauMs(highpassTauMsRotation); panTranslationFilter.setTauMs(highpassTauMsTranslation); tiltTranslationFilter.setTauMs(highpassTauMsTranslation); panCalibrator = new CalibrationFilter(); tiltCalibrator = new CalibrationFilter(); rollCalibrator = new CalibrationFilter(); setEnclosedFilterChain(filterChain); } @Override synchronized public EventPacket filterPacket(EventPacket in) { // TODO completely rework this code because IMUSamples are part of the packet now! if (!addedViewerPropertyChangeListener) { if (chip.getAeViewer() != null) { chip.getAeViewer().addPropertyChangeListener(this); // chip.getAeViewer().getAePlayer().getSupport().addPropertyChangeListener(this); // TODO might be duplicated callback addedViewerPropertyChangeListener = true; } } if (!addTimeStampsResetPropertyChangeListener) { chip.getAeViewer().addPropertyChangeListener(AEViewer.EVENT_TIMESTAMPS_RESET, this); addTimeStampsResetPropertyChangeListener = true; } if(outputPacket==null){ outputPacket=new ApsDvsEventPacket(in.getEventClass()); } transformList.clear(); // empty list of transforms to be applied // The call to enclosed filters issues callbacks to us periodically via updates that fills transform list, in case of enclosed filters. // this is not the case when using integrated IMU which generates IMUSamples in the event stream. getEnclosedFilterChain().filterPacket(in); // System.outputPacket.println("new steadicam input packet "+in); if (electronicStabilizationEnabled) { // here we stabilize by using the measured camera rotationRad to counter-transform the events // transform events in place, no need to copy to output packet // checkOutputPacketEventType(in); // OutputEventIterator outItr = getOutputPacket().outputIterator();// the transformed events output packet // TODO compute evenMotion boolean from opticalGyro Iterator<TransformAtTime> transformItr = transformList.iterator(); // this list is filled by the enclosed filters // int i=-1; sx2 = chip.getSizeX() / 2; sy2 = chip.getSizeY() / 2; sxm1 = chip.getSizeX() - 1; sym1 = chip.getSizeY() - 1; OutputEventIterator outItr = outputPacket.outputIterator(); if(!(in instanceof ApsDvsEventPacket)){ log.warning("input packet is not an ApsDvsEventPacket, disabling filter"); setFilterEnabled(false); return in; } ApsDvsEventPacket in2=(ApsDvsEventPacket)in; Iterator itr=in2.fullIterator(); while(itr.hasNext()) { Object o=itr.next(); if (o == null) { log.warning("null event passed in, returning input packet"); return in; } ApsDvsEvent ev = (ApsDvsEvent) o; switch (cameraRotationEstimator) { case VORSensor: if (ev instanceof IMUSample) { // TODO hack, we mark IMUSamples in EventExtractor that are actually ApsDvsEvent as non-special so we can detect them here // System.outputPacket.println("at position "+i+" got "+ev); IMUSample s = (IMUSample) ev; // because of imuLagMs this IMU sample should actually be applied to samples from the past // to achieve this backwards application of the IMU samples we hold the older events in a FIFO and pop events from the FIFO until // the event timestamp catches up to the current IMUSample timestamp - imuLagMs. if (s.imuSampleEvent) { lastTransform = updateTransform(s); if (transformImageEnabled && lastTransform != null && chip instanceof DavisChip && chip.getAeViewer() != null && chip.getCanvas() != null && chip.getCanvas().getDisplayMethod() instanceof ChipRendererDisplayMethodRGBA) { DavisChip apsDvsChip = (DavisChip) chip; int frameStartTimestamp = apsDvsChip.getFrameExposureStartTimestampUs(); int frameEndTimestamp = apsDvsChip.getFrameExposureEndTimestampUs(); int frameCounter = apsDvsChip.getFrameCount(); if (frameEndTimestamp >= frameStartTimestamp && lastTransform.timestamp >= frameEndTimestamp && frameCounter > lastFrameNumber) { // if a frame has been read outputPacket, then save the last transform to apply to rendering this frame imageTransform = lastTransform; lastFrameNumber = frameCounter; // only set transfrom once per frame, as soon as we have a tranform for it. ChipRendererDisplayMethodRGBA displayMethod = (ChipRendererDisplayMethodRGBA) chip.getCanvas().getDisplayMethod(); // TODO not ideal (tobi) displayMethod.setImageTransform(lastTransform.translationPixels, lastTransform.rotationRad); // immediately set this to be the transform, assuming that next rendering cycle will draw this new frame } } continue; // next event } } break; default: lastTransform = transformItr.next(); } pushEvent(ev); // System.outputPacket.print(">"); ApsDvsEvent be = null; while ((be = peekEvent()) != null && (be.timestamp <= ev.timestamp - imuLagMs * 1000 || be.timestamp > ev.timestamp)) { be = popEvent(); // System.outputPacket.print("<"); if (!(be instanceof IMUSample)) { if (lastTransform != null) { // apply transform Re+T. First center events from middle of array at 0,0, then transform, then move them back to their origin int nx = be.x - sx2, ny = be.y - sy2; be.x = (short) ((((lastTransform.cosAngle * nx) - (lastTransform.sinAngle * ny)) + lastTransform.translationPixels.x) + sx2); be.y = (short) (((lastTransform.sinAngle * nx) + (lastTransform.cosAngle * ny) + lastTransform.translationPixels.y) + sy2); be.address = chip.getEventExtractor().getAddressFromCell(be.x, be.y, be.getType()); // so event is logged properly to disk } if ((be.x > sxm1) || (be.x < 0) || (be.y > sym1) || (be.y < 0)) { be.setFilteredOut(true); // TODO this gradually fills the packet with filteredOut events, which are never seen afterwards because the iterator filters them outputPacket in the reused packet. continue; // discard events outside chip limits for now, because we can't render them presently, although they are valid events } else { be.setFilteredOut(false); } // deal with flipping contrast of output event depending on direction of motion, to make things appear the same regardless of camera rotationRad if (flipContrast) { if (evenMotion) { be.type = (byte) (1 - be.type); // don't let contrast flip when direction changes, try to stabilze contrast by flipping it as well be.polarity = be.polarity == PolarityEvent.Polarity.On ? PolarityEvent.Polarity.Off : PolarityEvent.Polarity.On; } } } outItr.nextOutput().copyFrom(be); } } // event iterator // if(transformImageEnabled && lastTransform!=null && chip.getAeViewer()!=null && chip.getCanvas()!=null && chip.getCanvas().getDisplayMethod() instanceof ChipRendererDisplayMethodRGBA){ // ChipRendererDisplayMethodRGBA displayMethod=(ChipRendererDisplayMethodRGBA)chip.getCanvas().getDisplayMethod(); // TODO not ideal (tobi) // displayMethod.setImageTransform(lastTransform.translationPixels,lastTransform.rotationRad); // }// if(transformImageEnabled && lastTransform!=null && chip.getAeViewer()!=null && chip.getCanvas()!=null && chip.getCanvas().getDisplayMethod() instanceof ChipRendererDisplayMethodRGBA){ // ChipRendererDisplayMethodRGBA displayMethod=(ChipRendererDisplayMethodRGBA)chip.getCanvas().getDisplayMethod(); // TODO not ideal (tobi) // displayMethod.setImageTransform(lastTransform.translationPixels,lastTransform.rotationRad); } // electronicStabilizationEnabled if (isPanTiltEnabled()) { // mechanical pantilt try { // mechanical pantilt // assume that pan of 1 takes us 180 degrees and that the sensor has 45 deg FOV, // then 1 pixel will require only 45/180/size pan final float factor = (float) (chip.getPixelWidthUm() / 1000 / lensFocalLengthMm / Math.PI); panTilt.setPanTiltValues(.5f - (translation.x * getGainPanTiltServos() * factor), .5f + (translation.y * getGainPanTiltServos() * factor)); } catch (HardwareInterfaceException ex) { log.warning("setting pantilt: " + ex); panTilt.close(); } } return outputPacket; } final int INIITAL_QUEUE_SIZE = 10000; ArrayBlockingQueue<ApsDvsEvent> eventQueue = new ArrayBlockingQueue<ApsDvsEvent>(INIITAL_QUEUE_SIZE); private void pushEvent(ApsDvsEvent ev) { ApsDvsEvent ne = new ApsDvsEvent(); ne.copyFrom(ev); if (!eventQueue.offer(ne)) { // increase queue size ArrayBlockingQueue<ApsDvsEvent> newQueue = new ArrayBlockingQueue<ApsDvsEvent>(eventQueue.size() * 2); log.info("increased event queue to " + newQueue.remainingCapacity() + " events"); newQueue.addAll(eventQueue); eventQueue = newQueue; eventQueue.offer(ne); }; } private ApsDvsEvent popEvent() { return eventQueue.poll(); } private ApsDvsEvent peekEvent() { return eventQueue.peek(); } /** * Called back here during packet iteration to update transform * * @param o * @param arg */ @Override public void update(Observable o, Object arg) { // called by enclosed filter to update event stream on the fly, using intermediate data if (arg instanceof UpdateMessage) { computeTransform((UpdateMessage) arg); // gets the lastTransform from the enclosed filter } } /** * Computes transform using current gyro outputs based on timestamp supplied * and returns a TransformAtTime object. Should be called by update in * enclosing processor. * * @param timestamp the timestamp in us. * @return the transform object representing the camera rotationRad */ synchronized public TransformAtTime updateTransform(IMUSample imuSample) { if (resetCalled) { log.info("reset called, panDC" + panDC + " panTranslationFilter=" + panTranslationFilter); resetCalled = false; } if (imuSample == null) { return null; } if (flushCounter return null; // flush some samples if the timestamps have been reset and we need to discard some samples here }// System.outputPacket.println(imuSample.toString()); int timestamp = imuSample.getTimestampUs(); float dtS = (timestamp - lastImuTimestamp) * 1e-6f; lastImuTimestamp = timestamp; if (!initialized) { initialized = true; return null; } panRate = imuSample.getGyroYawY(); tiltRate = imuSample.getGyroTiltX(); rollRate = imuSample.getGyroRollZ(); if (calibrating) { calibrationSampleCount++; if (calibrationSampleCount > numCalibrationSamples) { calibrating = false; panOffset = panCalibrator.computeAverage(); tiltOffset = tiltCalibrator.computeAverage(); rollOffset = rollCalibrator.computeAverage(); putFloat("panOffset", panOffset); putFloat("tiltOffset", tiltOffset); putFloat("rollOffset", rollOffset); log.info(String.format("calibration finished. %d samples averaged to (pan,tilt,roll)=(%.3f,%.3f,%.3f)", numCalibrationSamples, panOffset, tiltOffset, rollOffset)); } else { panCalibrator.addSample(panRate); tiltCalibrator.addSample(tiltRate); rollCalibrator.addSample(rollRate); } return null; } // zAccel = imuSample.getAccelZ(); // upAccel = imuSample.getAccelY(); // rightAccel = imuSample.getAccelX(); panDC += getPanRate() * dtS; tiltDC += getTiltRate() * dtS; rollDC += getRollRate() * dtS; panTranslationDeg = panTranslationFilter.filter(panDC, timestamp); tiltTranslationDeg = tiltTranslationFilter.filter(tiltDC, timestamp); rollDeg = rollFilter.filter(rollDC, timestamp); // check limits, make limit for rotationRad a lot higher to avoid reset on big rolls, which are different than pans and tilts if ((Math.abs(panTranslationDeg) > transformResetLimitDegrees) || (Math.abs(tiltTranslationDeg) > transformResetLimitDegrees) || (Math.abs(rollDeg) > (transformResetLimitDegrees * 3))) { panDC = 0; tiltDC = 0; rollDC = 0; panTranslationDeg = 0; tiltTranslationDeg = 0; rollDeg = 0; panTranslationFilter.reset(); tiltTranslationFilter.reset(); rollFilter.reset(); log.info("transform reset limit reached, transform reset to zero"); } if (flipContrast) { if (Math.abs(panRate) > Math.abs(tiltRate)) { evenMotion = panRate > 0; // used to flip contrast } else { evenMotion = tiltRate > 0; } } if (disableRotation) { rollDeg = 0; } if (disableTranslation) { panTranslationDeg = 0; tiltTranslationDeg = 0; } // computute transform in TransformAtTime units here. // Use the lens focal length and camera resolution. TransformAtTime tr = new TransformAtTime(timestamp, new Point2D.Float( (float) ((Math.PI / 180) * panTranslationDeg) / radPerPixel, (float) ((Math.PI / 180) * tiltTranslationDeg) / radPerPixel), (-rollDeg * (float) Math.PI) / 180); return tr; } private final void transformEvent(BasicEvent e, TransformAtTime transform) { e.x -= sx2; e.y -= sy2; short newx = (short) Math.round((((transform.cosAngle * e.x) - (transform.sinAngle * e.y)) + transform.translationPixels.x)); short newy = (short) Math.round(((transform.sinAngle * e.x) + (transform.cosAngle * e.y) + transform.translationPixels.y)); e.x = (short) (newx + sx2); e.y = (short) (newy + sy2); e.address = chip.getEventExtractor().getAddressFromCell(e.x, e.y, e.getType()); // so event is logged properly to disk } synchronized public void doEraseGyroZero() { panOffset = 0; tiltOffset = 0; rollOffset = 0; putFloat("panOffset", 0); putFloat("tiltOffset", 0); putFloat("rollOffset", 0); log.info("calibration erased"); } synchronized public void doZeroGyro() { calibrating = true; calibrationSampleCount = 0; panCalibrator.reset(); tiltCalibrator.reset(); rollCalibrator.reset(); log.info("calibration started"); // panOffset = panRate; // TODO offsets should really be some average over some samples // tiltOffset = tiltRate; // rollOffset = rollRate; } /** * Called by update on enclosed filter updates. * <p> * Using AbstractDirectionSelectiveFilter, the lastTransform is computed by * pure integration of the motion signal followed by a high-pass filter to * remove long term DC offsets. * <p> * Using OpticalGyro, the lastTransform is computed by the optical gyro * which tracks clusters and measures scene translationPixels (and possibly * rotationRad) from a consensus of the tracked clusters. * <p> * Using PhidgetsVORSensor, lastTransform is computed by PhidgetsVORSensor * using rate gyro sensors. * * * @param in the input event packet. */ private void computeTransform(UpdateMessage msg) { // only used in AbstractDirectionSelectiveFilter and OpticalGyro. IMU transform is applied inline in filterPacket float shiftx = 0, shifty = 0; float rot = 0; Point2D.Float trans = new Point2D.Float(); } /** * @return the panRate */ public float getPanRate() { return panRate - panOffset; } /** * @return the tiltRate */ public float getTiltRate() { return tiltRate - tiltOffset; } /** * @return the rollRate */ public float getRollRate() { return rollRate - rollOffset; } @Override public void annotate(GLAutoDrawable drawable) { if (calibrating) { if (imuTextRenderer == null) { imuTextRenderer = new TextRenderer(new Font("SansSerif", Font.PLAIN, 36)); } imuTextRenderer.begin3DRendering(); imuTextRenderer.setColor(1, 1, 1, 1); final String saz = String.format("Don't move sensor (Calibrating %d/%d)", calibrationSampleCount, numCalibrationSamples); Rectangle2D rect = imuTextRenderer.getBounds(saz); final float scale = .25f; imuTextRenderer.draw3D(saz, (chip.getSizeX() / 2) - (((float) rect.getWidth() * scale) / 2), chip.getSizeY() / 2, 0, scale); imuTextRenderer.end3DRendering(); } GL2 gl = null; if (showGrid || showTransformRectangle) { gl = drawable.getGL().getGL2(); } if (gl == null) { return; } if (showTransformRectangle && (lastTransform != null) && isElectronicStabilizationEnabled()) { // draw transform gl.glPushMatrix(); gl.glLineWidth(1f); gl.glColor3f(1, 0, 0); // translate and rotate gl.glTranslatef(lastTransform.translationPixels.x + sx2, lastTransform.translationPixels.y + sy2, 0); gl.glRotatef((float) ((lastTransform.rotationRad * 180) / Math.PI), 0, 0, 1); // draw xhairs on frame to help show locations of objects and if they have moved. gl.glBegin(GL.GL_LINES); // sequence of individual segments, in pairs of vertices gl.glVertex2f(0, 0); // start at origin gl.glVertex2f(sx2, 0); // outputPacket to right gl.glVertex2f(0, 0); // origin gl.glVertex2f(-sx2, 0); // outputPacket to left gl.glVertex2f(0, 0); // origin gl.glVertex2f(0, sy2); gl.glVertex2f(0, 0); // origin gl.glVertex2f(0, -sy2); // down gl.glEnd(); // rectangle around transform gl.glTranslatef(-sx2, -sy2, 0); // lower left corner gl.glBegin(GL.GL_LINE_LOOP); // loop of vertices gl.glVertex2f(0, 0); // lower left corner gl.glVertex2f(sx2 * 2, 0); // lower right gl.glVertex2f(2 * sx2, 2 * sy2); // upper right gl.glVertex2f(0, 2 * sy2); // upper left gl.glVertex2f(0, 0); // back of lower left gl.glEnd(); gl.glPopMatrix(); } if (showGrid) { gl.glLineWidth(1f); gl.glColor3f(0, 0, 1); final int s = chip.getMaxSize() / 8; final int n = chip.getMaxSize() / s; gl.glBegin(GL.GL_LINES); for (int i = 0; i < n; i++) { final int x = i * s; gl.glVertex2i(x, 0); gl.glVertex2i(x, sy2 * 2); } for (int i = 0; i < n; i++) { final int y = i * s; gl.glVertex2i(0, y); gl.glVertex2i(sx2 * 2, y); } gl.glEnd(); } } // public float getGainTranslation() { // return gainTranslation; // public void setGainTranslation(float gain) { // if (gain < 0) { // gain = 0; // } else if (gain > 100) { // gain = 100; // this.gainTranslation = gain; // putFloat("gainTranslation", gain); // /** // * @return the gainVelocity // */ // public float getGainVelocity() { // return gainVelocity; // /** // * @param gainVelocity the gainVelocity to set // */ // public void setGainVelocity(float gainVelocity) { // this.gainVelocity = gainVelocity; // putFloat("gainVelocity", gainVelocity); // public void setCornerFreqHz(float freq) { // cornerFreqHz = freq; // filterX.set3dBFreqHz(freq); // filterY.set3dBFreqHz(freq); // filterRotation.set3dBFreqHz(freq); // putFloat("cornerFreqHz", freq); // public float getCornerFreqHz() { // return cornerFreqHz; @Override synchronized public void resetFilter() { resetCalled = true; panRate = 0; tiltRate = 0; rollRate = 0; panDC = 0; tiltDC = 0; rollDC = 0; rollDeg = 0; panTranslationFilter.reset(); tiltTranslationFilter.reset(); rollFilter.reset(); radPerPixel = (float) Math.atan((getChip().getPixelWidthUm() * 1e-3f) / lensFocalLengthMm); filterX.setInternalValue(0); filterY.setInternalValue(0); filterRotation.setInternalValue(0); translation.x = 0; translation.y = 0; lastTransform = null; if (isPanTiltEnabled()) { try { panTilt.setPanTiltValues(.5f, .5f); } catch (HardwareInterfaceException ex) { log.warning(ex.toString()); panTilt.close(); } } eventQueue.clear(); initialized = false; } @Override public void initFilter() { // panTilt = PanTilt.getLastInstance(); resetFilter(); } public boolean isFlipContrast() { return flipContrast; } public void setFlipContrast(boolean flipContrast) { this.flipContrast = flipContrast; putBoolean("flipContrast", flipContrast); } @Override synchronized public void setFilterEnabled(boolean yes) { super.setFilterEnabled(yes); setCameraRotationEstimator(cameraRotationEstimator); // reflag enabled/disabled state of motion computation getEnclosedFilterChain().reset(); if (!yes) { setPanTiltEnabled(false); // turn off servos, close interface if (chip.getAeViewer() != null && chip.getCanvas() != null && chip.getCanvas().getDisplayMethod() instanceof ChipRendererDisplayMethodRGBA) { ChipRendererDisplayMethodRGBA displayMethod = (ChipRendererDisplayMethodRGBA) chip.getCanvas().getDisplayMethod(); // TODO not ideal (tobi) displayMethod.setImageTransform(new Point2D.Float(0, 0), 0); } } else { resetFilter(); // reset on enabled to prevent large timestep anomalies } } public boolean isFeedforwardEnabled() { return feedforwardEnabled; } /** * true to apply current shift values to input packet events. This does a * kind of feedback compensation */ public void setFeedforwardEnabled(boolean feedforwardEnabled) { this.feedforwardEnabled = feedforwardEnabled; putBoolean("feedforwardEnabled", feedforwardEnabled); } // public boolean isRotationEnabled(){ // return rotationEnabled; // public void setRotationEnabled(boolean rotationEnabled){ // this.rotationEnabled=rotationEnabled; // putBoolean("rotationEnabled",rotationEnabled); /** * Method used to compute shift. * * @return the positionComputer */ public CameraRotationEstimator getCameraRotationEstimator() { return cameraRotationEstimator; } /** * Chooses how the current position of the scene is computed. * * @param positionComputer the positionComputer to set */ synchronized public void setCameraRotationEstimator(CameraRotationEstimator positionComputer) { this.cameraRotationEstimator = positionComputer; putString("positionComputer", positionComputer.toString()); switch (positionComputer) { case VORSensor: } } /** * The global translational shift applied to output, computed by enclosed * FilterChain. * * @return the x,y shift */ public Point2D.Float getShift() { return translation; } /** * @param shift the shift to set */ public void setShift(Point2D.Float shift) { this.translation = shift; } /** * @return the annotateEnclosedEnabled */ public boolean isAnnotateEnclosedEnabled() { return annotateEnclosedEnabled; } /** * @param annotateEnclosedEnabled the annotateEnclosedEnabled to set */ public void setAnnotateEnclosedEnabled(boolean annotateEnclosedEnabled) { this.annotateEnclosedEnabled = annotateEnclosedEnabled; putBoolean("annotateEnclosedEnabled", annotateEnclosedEnabled); } /** * @return the panTiltEnabled */ public boolean isPanTiltEnabled() { return panTiltEnabled; } /** * Enables use of pan/tilt servo controller for camera for mechanical * stabilization. * * @param panTiltEnabled the panTiltEnabled to set */ public void setPanTiltEnabled(boolean panTiltEnabled) { this.panTiltEnabled = panTiltEnabled; putBoolean("panTiltEnabled", panTiltEnabled); if (!panTiltEnabled) { try { if ((panTilt != null) && (panTilt.getServoInterface() != null) && panTilt.getServoInterface().isOpen()) { panTilt.getServoInterface().disableAllServos(); panTilt.close(); } } catch (HardwareInterfaceException ex) { log.warning(ex.toString()); panTilt.close(); } } } /** * @return the electronicStabilizationEnabled */ public boolean isElectronicStabilizationEnabled() { return electronicStabilizationEnabled; } /** * @param electronicStabilizationEnabled the electronicStabilizationEnabled * to set */ public void setElectronicStabilizationEnabled(boolean electronicStabilizationEnabled) { this.electronicStabilizationEnabled = electronicStabilizationEnabled; putBoolean("electronicStabilizationEnabled", electronicStabilizationEnabled); } /** * @return the gainPanTiltServos */ public float getGainPanTiltServos() { return gainPanTiltServos; } /** * @param gainPanTiltServos the gainPanTiltServos to set */ public void setGainPanTiltServos(float gainPanTiltServos) { this.gainPanTiltServos = gainPanTiltServos; putFloat("gainPanTiltServos", gainPanTiltServos); } /** * @return the highpassTauMs */ public float getHighpassTauMsTranslation() { return highpassTauMsTranslation; } /** * @param highpassTauMs the highpassTauMs to set */ public void setHighpassTauMsTranslation(float highpassTauMs) { this.highpassTauMsTranslation = highpassTauMs; putFloat("highpassTauMsTranslation", highpassTauMs); panTranslationFilter.setTauMs(highpassTauMs); tiltTranslationFilter.setTauMs(highpassTauMs); } /** * @return the highpassTauMs */ public float getHighpassTauMsRotation() { return highpassTauMsRotation; } /** * @param highpassTauMs the highpassTauMs to set */ public void setHighpassTauMsRotation(float highpassTauMs) { this.highpassTauMsRotation = highpassTauMs; putFloat("highpassTauMsRotation", highpassTauMs); rollFilter.setTauMs(highpassTauMs); } private float clip(float f, float lim) { if (f > lim) { f = lim; } else if (f < -lim) { f = -lim; } return f; } /** * @return the lensFocalLengthMm */ public float getLensFocalLengthMm() { return lensFocalLengthMm; } /** * @param lensFocalLengthMm the lensFocalLengthMm to set */ public void setLensFocalLengthMm(float lensFocalLengthMm) { this.lensFocalLengthMm = lensFocalLengthMm; putFloat("lensFocalLengthMm", lensFocalLengthMm); radPerPixel = (float) Math.asin((getChip().getPixelWidthUm() * 1e-3f) / lensFocalLengthMm); } @Override public void propertyChange(PropertyChangeEvent evt) { if (evt.getPropertyName() == AEViewer.EVENT_TIMESTAMPS_RESET) { resetFilter(); flushCounter = FLUSH_COUNT; } else if (evt.getPropertyName().equals(AEInputStream.EVENT_REWIND)) { resetFilter(); flushCounter = FLUSH_COUNT; } else if (evt.getPropertyName().equals(AEViewer.EVENT_FILEOPEN)) { log.info("File Open"); AbstractAEPlayer player = chip.getAeViewer().getAePlayer(); AEFileInputStream in = (player.getAEInputStream()); in.getSupport().addPropertyChangeListener(this); // Treat FileOpen same as a rewind resetFilter(); flushCounter = FLUSH_COUNT; } // END IF } /** * @return the transformResetLimitDegrees */ public int getTransformResetLimitDegrees() { return transformResetLimitDegrees; } /** * @param transformResetLimitDegrees the transformResetLimitDegrees to set */ public void setTransformResetLimitDegrees(int transformResetLimitDegrees) { this.transformResetLimitDegrees = transformResetLimitDegrees; putInt("transformResetLimitDegrees", transformResetLimitDegrees); } /** * @return the showTransformRectangle */ public boolean isShowTransformRectangle() { return showTransformRectangle; } /** * @param showTransformRectangle the showTransformRectangle to set */ public void setShowTransformRectangle(boolean showTransformRectangle) { this.showTransformRectangle = showTransformRectangle; putBoolean("showTransformRectangle", showTransformRectangle); } /** * @return the disableTranslation */ public boolean isDisableTranslation() { return disableTranslation; } /** * @param disableTranslation the disableTranslation to set */ public void setDisableTranslation(boolean disableTranslation) { this.disableTranslation = disableTranslation; putBoolean("disableTranslation", disableTranslation); } /** * @return the disableRotation */ public boolean isDisableRotation() { return disableRotation; } /** * @param disableRotation the disableRotation to set */ public void setDisableRotation(boolean disableRotation) { this.disableRotation = disableRotation; putBoolean("disableRotation", disableRotation); } private class CalibrationFilter { int count = 0; float sum = 0; void reset() { count = 0; sum = 0; } void addSample(float sample) { sum += sample; count++; } float computeAverage() { return sum / count; } } /** * Returns the last event transform that was computed. * * @return the lastTransform that was computed */ public TransformAtTime getLastTransform() { return lastTransform; } /** * Returns the transform applicable to last image acquired. * * @return the image transform */ public TransformAtTime getImageTransform() { return imageTransform; } /** * @return the transformImageEnabled */ public boolean isTransformImageEnabled() { return transformImageEnabled; } /** * @param transformImageEnabled the transformImageEnabled to set */ public void setTransformImageEnabled(boolean transformImageEnabled) { this.transformImageEnabled = transformImageEnabled; putBoolean("transformImageEnabled", transformImageEnabled); } /** * @return the showGrid */ public boolean isShowGrid() { return showGrid; } /** * @param showGrid the showGrid to set */ public void setShowGrid(boolean showGrid) { this.showGrid = showGrid; putBoolean("showGrid", showGrid); } /** * @return the imuLagMs */ public float getImuLagMs() { return imuLagMs; } /** * @param imuLagMs the imuLagMs to set */ public void setImuLagMs(float imuLagMs) { this.imuLagMs = imuLagMs; putFloat("imuLagMs", imuLagMs); } /** * @return the numCalibrationSamples */ public int getNumCalibrationSamples() { return numCalibrationSamples; } /** * @param numCalibrationSamples the numCalibrationSamples to set */ public void setNumCalibrationSamples(int numCalibrationSamples) { this.numCalibrationSamples = numCalibrationSamples; putInt("numCalibrationSamples",numCalibrationSamples); } }
package com.maddyhome.idea.vim.ui; import com.intellij.ide.ui.LafManager; import com.intellij.ide.ui.LafManagerListener; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.ui.DocumentAdapter; import com.intellij.util.IJSwingUtilities; import com.maddyhome.idea.vim.VimPlugin; import com.maddyhome.idea.vim.ex.CommandParser; import com.maddyhome.idea.vim.ex.ExCommand; import com.maddyhome.idea.vim.ex.LineRange; import com.maddyhome.idea.vim.ex.Ranges; import com.maddyhome.idea.vim.group.MotionGroup; import com.maddyhome.idea.vim.helper.EditorHelper; import com.maddyhome.idea.vim.helper.UiHelper; import com.maddyhome.idea.vim.option.Options; import com.maddyhome.idea.vim.regexp.CharPointer; import com.maddyhome.idea.vim.regexp.RegExp; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import java.awt.*; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.awt.event.ComponentListener; /** * This is used to enter ex commands such as searches and "colon" commands */ public class ExEntryPanel extends JPanel implements LafManagerListener { public static ExEntryPanel getInstance() { if (instance == null) { instance = new ExEntryPanel(); } return instance; } private ExEntryPanel() { label = new JLabel(" "); entry = new ExTextField(); GridBagLayout layout = new GridBagLayout(); GridBagConstraints gbc = new GridBagConstraints(); setLayout(layout); gbc.gridx = 0; layout.setConstraints(this.label, gbc); add(this.label); gbc.gridx = 1; gbc.weightx = 1.0; gbc.fill = GridBagConstraints.HORIZONTAL; layout.setConstraints(entry, gbc); add(entry); new ExShortcutKeyAction(this).registerCustomShortcutSet(); LafManager.getInstance().addLafManagerListener(this); updateUI(); } /** * Turns on the ex entry field for the given editor * * @param editor The editor to use for display * @param context The data context * @param label The label for the ex entry (i.e. :, /, or ?) * @param initText The initial text for the entry * @param count A holder for the ex entry count */ public void activate(@NotNull Editor editor, DataContext context, @NotNull String label, String initText, int count) { this.label.setText(label); this.count = count; setFontForElements(); entry.reset(); entry.setEditor(editor, context); entry.setText(initText); entry.setType(label); parent = editor.getContentComponent(); if (isIncSearchEnabled()) { entry.getDocument().addDocumentListener(incSearchDocumentListener); caretOffset = editor.getCaretModel().getOffset(); verticalOffset = editor.getScrollingModel().getVerticalScrollOffset(); horizontalOffset = editor.getScrollingModel().getHorizontalScrollOffset(); } if (!ApplicationManager.getApplication().isUnitTestMode()) { JRootPane root = SwingUtilities.getRootPane(parent); oldGlass = (JComponent)root.getGlassPane(); oldLayout = oldGlass.getLayout(); wasOpaque = oldGlass.isOpaque(); oldGlass.setLayout(null); oldGlass.setOpaque(false); oldGlass.add(this); oldGlass.addComponentListener(resizePanelListener); positionPanel(); oldGlass.setVisible(true); entry.requestFocusInWindow(); } active = true; } /** * Turns off the ex entry field and optionally puts the focus back to the original component */ public void deactivate(boolean refocusOwningEditor) { logger.info("deactivate"); if (!active) return; active = false; // incsearch won't change in the lifetime of this activation if (isIncSearchEnabled()) { entry.getDocument().removeDocumentListener(incSearchDocumentListener); final Editor editor = entry.getEditor(); if (!editor.isDisposed()) { MotionGroup.moveCaret(editor, editor.getCaretModel().getPrimaryCaret(), caretOffset); editor.getScrollingModel().scrollVertically(verticalOffset); editor.getScrollingModel().scrollHorizontally(horizontalOffset); } // This is somewhat inefficient. We've done the search, highlighted everything and now (if we hit <Enter>), we're // removing all the highlights to invoke the search action, to search and highlight everything again. On the plus // side, it clears up the current item highlight VimPlugin.getSearch().resetIncsearchHighlights(); } entry.deactivate(); if (!ApplicationManager.getApplication().isUnitTestMode()) { if (refocusOwningEditor && parent != null) { UiHelper.requestFocus(parent); } oldGlass.removeComponentListener(resizePanelListener); oldGlass.setVisible(false); oldGlass.remove(this); oldGlass.setOpaque(wasOpaque); oldGlass.setLayout(oldLayout); } parent = null; } /** * Gets the label for the ex entry. This should be one of ":", "/", or "?" * * @return The ex entry label */ public String getLabel() { return label.getText(); } /** * Gets the count given during activation * * @return The count */ public int getCount() { return count; } /** * Checks if the ex entry panel is currently active * * @return true if active, false if not */ public boolean isActive() { return active; } /** * Gets the text entered by the user. This includes any initial text but does not include the label * * @return The user entered text */ public String getText() { return entry.getActualText(); } @NotNull public ExTextField getEntry() { return entry; } /** * Pass the keystroke on to the text edit for handling * * @param stroke The keystroke */ public void handleKey(@NotNull KeyStroke stroke) { entry.handleKey(stroke); } @Override public void lookAndFeelChanged(@NotNull LafManager source) { // Calls updateUI on this and child components IJSwingUtilities.updateComponentTreeUI(this); } // Called automatically when the LAF is changed and the component is visible, and manually by the LAF listener handler @Override public void updateUI() { super.updateUI(); setBorder(new ExPanelBorder()); // Can be null when called from base constructor //noinspection ConstantConditions if (entry != null && label != null) { setFontForElements(); // Label background is automatically picked up label.setForeground(entry.getForeground()); } } // Entry can be null if getForeground is called during base class initialisation @SuppressWarnings("ConstantConditions") @Override public Color getForeground() { return entry != null ? entry.getForeground() : super.getForeground(); } @SuppressWarnings("ConstantConditions") @Override public Color getBackground() { return entry != null ? entry.getBackground() : super.getBackground(); } private void setFontForElements() { final Font font = UiHelper.getEditorFont(); label.setFont(font); entry.setFont(font); } private void positionPanel() { if (parent == null) return; Container scroll = SwingUtilities.getAncestorOfClass(JScrollPane.class, parent); int height = (int)getPreferredSize().getHeight(); if (scroll != null) { Rectangle bounds = scroll.getBounds(); bounds.translate(0, scroll.getHeight() - height); bounds.height = height; Point pos = SwingUtilities.convertPoint(scroll.getParent(), bounds.getLocation(), oldGlass); bounds.setLocation(pos); setBounds(bounds); repaint(); } } private boolean isIncSearchEnabled() { return Options.getInstance().isSet(Options.INCREMENTAL_SEARCH); } private boolean active; private int count; // UI stuff @Nullable private JComponent parent; @NotNull private final JLabel label; @NotNull private final ExTextField entry; private JComponent oldGlass; private LayoutManager oldLayout; private boolean wasOpaque; // incsearch stuff private int verticalOffset; private int horizontalOffset; private int caretOffset; @NotNull private final ComponentListener resizePanelListener = new ComponentAdapter() { @Override public void componentResized(ComponentEvent e) { positionPanel(); } }; @NotNull private final DocumentListener incSearchDocumentListener = new DocumentAdapter() { @Override protected void textChanged(@NotNull DocumentEvent e) { final Editor editor = entry.getEditor(); LineRange searchRange = null; char separator = label.getText().charAt(0); String searchText = entry.getActualText(); if (label.getText().equals(":")) { final ExCommand command = getIncsearchCommand(searchText); if (command == null) { return; } searchText = ""; final String argument = command.getArgument(); if (argument.length() > 1) { // E.g. skip '/' in `:%s/`. `%` is range, `s` is command, `/` is argument separator = argument.charAt(0); searchText = argument.substring(1); } final Ranges ranges = command.getRanges(); ranges.setDefaultLine(EditorHelper.offsetToCharacterPosition(editor, caretOffset).line); searchRange = command.getLineRange(editor, entry.getContext()); } final boolean forwards = !label.getText().equals("?"); // :s, :g, :v are treated as forwards final CharPointer p = new CharPointer(searchText); final CharPointer end = RegExp.skip_regexp(new CharPointer(searchText), separator, true); final String pattern = p.substring(end.pointer() - p.pointer()); VimPlugin.getSearch().updateIncsearchHighlights(editor, pattern, forwards, caretOffset, searchRange); } @Nullable private ExCommand getIncsearchCommand(String commandText) { try { final ExCommand exCommand = CommandParser.getInstance().parse(commandText); final String command = exCommand.getCommand(); if (command.equals("s") || command.equals("substitute") || command.equals("g") || command.equals("global") || command.equals("v") || command.equals("vglobal")) { return exCommand; } } catch(Exception e) { logger.warn("Cannot parse command for incsearch", e); } return null; } }; private static ExEntryPanel instance; private static final Logger logger = Logger.getInstance(ExEntryPanel.class.getName()); }
package nallar.patched.world.tracking; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.concurrent.ConcurrentLinkedQueue; import nallar.tickthreading.Log; import nallar.tickthreading.minecraft.TickThreading; import nallar.tickthreading.patcher.Declare; import nallar.tickthreading.util.ChunkLoadRunnable; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.network.packet.Packet; import net.minecraft.network.packet.Packet51MapChunk; import net.minecraft.network.packet.Packet52MultiBlockChange; import net.minecraft.network.packet.Packet53BlockChange; import net.minecraft.server.MinecraftServer; import net.minecraft.server.management.PlayerInstance; import net.minecraft.server.management.PlayerManager; import net.minecraft.tileentity.TileEntity; import net.minecraft.world.ChunkCoordIntPair; import net.minecraft.world.WorldServer; import net.minecraft.world.chunk.Chunk; import net.minecraftforge.common.ForgeDummyContainer; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.world.ChunkWatchEvent; public abstract class PatchPlayerInstance extends PlayerInstance { private ConcurrentLinkedQueue<TileEntity> tilesToUpdate; private static final boolean rateLimitChunkUpdates = TickThreading.instance.rateLimitChunkUpdates; private static final byte[] unloadSequence = {0x78, (byte) 0x9C, 0x63, 0x64, 0x1C, (byte) 0xD9, 0x00, 0x00, (byte) 0x81, (byte) 0x80, 0x01, 0x01}; @Declare public boolean loaded_; private boolean watched; @Declare public net.minecraft.world.chunk.Chunk chunk_; private int startTime; private int sentUpdates; public PatchPlayerInstance(PlayerManager par1PlayerManager, int par2, int par3) { super(par1PlayerManager, par2, par3); } public void construct() { tilesToUpdate = new ConcurrentLinkedQueue<TileEntity>(); myManager.getWorldServer().theChunkProviderServer.getChunkAt(chunkLocation.chunkXPos, chunkLocation.chunkZPos, new LoadRunnable(this)); startTime = MinecraftServer.currentTick + 30; } @Override public void addPlayerToChunkWatchingList(final EntityPlayerMP entityPlayerMP) { if (this.playersInChunk.contains(entityPlayerMP)) { throw new IllegalStateException("Failed to add player. " + entityPlayerMP + " already is in chunk " + this.chunkLocation.chunkXPos + ", " + this.chunkLocation.chunkZPos); } else { this.playersInChunk.add(entityPlayerMP); if (loaded) { Collection<ChunkCoordIntPair> loadedChunks = entityPlayerMP.loadedChunks; synchronized (loadedChunks) { loadedChunks.add(chunkLocation); } } else { myManager.getWorldServer().theChunkProviderServer.getChunkAt(chunkLocation.chunkXPos, chunkLocation.chunkZPos, new AddToPlayerRunnable(entityPlayerMP, chunkLocation)); } } } @Override @Declare public synchronized void clearTileCount() { this.numberOfTilesToUpdate = 0; this.field_73260_f = 0; this.watched = false; } @Override public void sendThisChunkToPlayer(EntityPlayerMP entityPlayerMP) { if (this.playersInChunk.remove(entityPlayerMP)) { Packet51MapChunk packet51MapChunk = new Packet51MapChunk(); packet51MapChunk.includeInitialize = true; packet51MapChunk.xCh = chunkLocation.chunkXPos; packet51MapChunk.zCh = chunkLocation.chunkZPos; packet51MapChunk.yChMax = 0; packet51MapChunk.yChMin = 0; packet51MapChunk.setData(unloadSequence); entityPlayerMP.playerNetServerHandler.sendPacketToPlayer(packet51MapChunk); Collection<ChunkCoordIntPair> loadedChunks = entityPlayerMP.loadedChunks; synchronized (loadedChunks) { loadedChunks.remove(chunkLocation); } MinecraftForge.EVENT_BUS.post(new ChunkWatchEvent.UnWatch(chunkLocation, entityPlayerMP)); if (this.playersInChunk.isEmpty()) { long var2 = (long) this.chunkLocation.chunkXPos + 2147483647L | (long) this.chunkLocation.chunkZPos + 2147483647L << 32; this.myManager.getChunkWatchers().remove(var2); if (watched) { this.myManager.getChunkWatcherWithPlayers().remove(this); } this.myManager.getWorldServer().theChunkProviderServer.unloadChunksIfNotNearSpawn(this.chunkLocation.chunkXPos, this.chunkLocation.chunkZPos); } } } @Override public String toString() { return chunkLocation + " watched by " + Arrays.toString(playersInChunk.toArray()); } @Override @Declare public void forceUpdate() { this.sendToAllPlayersWatchingChunk(new Packet51MapChunk(myManager.getWorldServer().getChunkFromChunkCoords(this.chunkLocation.chunkXPos, this.chunkLocation.chunkZPos), true, Integer.MAX_VALUE)); } public void sendTiles() { HashSet<TileEntity> tileEntities = new HashSet<TileEntity>(); for (TileEntity tileEntity = tilesToUpdate.poll(); tileEntity != null; tileEntity = tilesToUpdate.poll()) { tileEntities.add(tileEntity); } for (TileEntity tileEntity : tileEntities) { this.sendTileToAllPlayersWatchingChunk(tileEntity); } tileEntities.clear(); } @Override public void flagChunkForUpdate(int par1, int par2, int par3) { if (noUpdateRequired()) { return; } markRequiresUpdate(); synchronized (this) { this.field_73260_f |= 1 << (par2 >> 4); short mask = (short) (par1 << 12 | par3 << 8 | par2); short[] locationOfBlockChange = this.locationOfBlockChange; int tiles = numberOfTilesToUpdate; for (int var5 = 0; var5 < tiles; ++var5) { if (locationOfBlockChange[var5] == mask) { return; } } if (tiles == locationOfBlockChange.length) { this.locationOfBlockChange = locationOfBlockChange = Arrays.copyOf(locationOfBlockChange, locationOfBlockChange.length << 1); } locationOfBlockChange[tiles++] = mask; numberOfTilesToUpdate = tiles; } } private void markRequiresUpdate() { boolean requiresWatch = false; synchronized (this) { if (!watched) { watched = requiresWatch = true; } } if (requiresWatch) { this.myManager.getChunkWatcherWithPlayers().add(this); } } @Override @Declare public void updateTile(TileEntity tileEntity) { if (noUpdateRequired()) { return; } markRequiresUpdate(); tilesToUpdate.add(tileEntity); } @Override protected void sendTileToAllPlayersWatchingChunk(TileEntity tileEntity) { if (tileEntity != null) { Packet descriptionPacket; try { descriptionPacket = tileEntity.getDescriptionPacket(); } catch (Throwable t) { Log.severe("Failed to send TileEntity description for " + Log.toString(tileEntity) + " at chunk coords " + chunkLocation, t); return; } if (descriptionPacket != null) { this.sendToAllPlayersWatchingChunk(descriptionPacket); } } } private boolean noUpdateRequired() { Chunk chunk = this.chunk; if (chunk == null || !chunk.isTerrainPopulated || playersInChunk.isEmpty()) { return true; } if (chunk.partiallyUnloaded) { Log.severe("Chunk for " + this + " has been unloaded without removing the PlayerInstance"); this.chunk = null; myManager.getWorldServer().theChunkProviderServer.getChunkAt(chunkLocation.chunkXPos, chunkLocation.chunkZPos, new LoadRunnable(this)); return true; } return false; } @Override @Declare public boolean shouldPostPone(boolean squash, int currentTick) { if (!rateLimitChunkUpdates) { return false; } int runningTicks = currentTick - startTime; if (squash) { startTime = currentTick - (runningTicks /= 2); sentUpdates /= 2; } return (sentUpdates / (float) runningTicks) > 0.1f; } @Override public void sendChunkUpdate() { watched = false; if (noUpdateRequired()) { return; } sentUpdates++; sendTiles(); synchronized (this) { int numberOfTilesToUpdate = this.numberOfTilesToUpdate; if (numberOfTilesToUpdate != 0) { short[] locationOfBlockChange = this.locationOfBlockChange; if (numberOfTilesToUpdate > locationOfBlockChange.length) { Log.warning("numberOfTilesToUpdate set too high. Got " + numberOfTilesToUpdate + " should be <= " + locationOfBlockChange.length); numberOfTilesToUpdate = locationOfBlockChange.length; } WorldServer worldServer = myManager.getWorldServer(); Chunk chunk = this.chunk; if (numberOfTilesToUpdate == 1) { int x = chunkLocation.chunkXPos * 16 + (locationOfBlockChange[0] >> 12 & 15); int y = locationOfBlockChange[0] & 255; int z = chunkLocation.chunkZPos * 16 + (locationOfBlockChange[0] >> 8 & 15); sendToAllPlayersWatchingChunk(new Packet53BlockChange(x, y, z, worldServer)); sendTileToAllPlayersWatchingChunk(chunk.getChunkBlockTileEntity(locationOfBlockChange[0] >> 12 & 15, locationOfBlockChange[0] & 255, locationOfBlockChange[0] >> 8 & 15)); } else { if (numberOfTilesToUpdate >= ForgeDummyContainer.clumpingThreshold) { sendToAllPlayersWatchingChunk(new Packet51MapChunk(chunk, false, field_73260_f)); } else { sendToAllPlayersWatchingChunk(new Packet52MultiBlockChange(chunkLocation.chunkXPos, chunkLocation.chunkZPos, locationOfBlockChange, numberOfTilesToUpdate, worldServer)); } for (int i = 0; i < numberOfTilesToUpdate; ++i) { sendTileToAllPlayersWatchingChunk(chunk.getChunkBlockTileEntity(locationOfBlockChange[i] >> 12 & 15, locationOfBlockChange[i] & 255, locationOfBlockChange[i] >> 8 & 15)); } } this.numberOfTilesToUpdate = 0; this.field_73260_f = 0; } } } public static class AddToPlayerRunnable implements Runnable { private final EntityPlayerMP entityPlayerMP; private final ChunkCoordIntPair chunkLocation; public AddToPlayerRunnable(EntityPlayerMP entityPlayerMP, ChunkCoordIntPair chunkLocation) { this.entityPlayerMP = entityPlayerMP; this.chunkLocation = chunkLocation; } @Override public void run() { Collection<ChunkCoordIntPair> loadedChunks = entityPlayerMP.loadedChunks; synchronized (loadedChunks) { loadedChunks.add(chunkLocation); } } } public static class LoadRunnable extends ChunkLoadRunnable { final PlayerInstance playerInstance; public LoadRunnable(PlayerInstance playerInstance) { this.playerInstance = playerInstance; } @Override public void onLoad(Chunk chunk) { playerInstance.loaded = true; playerInstance.chunk = chunk; } } }
package nl.sense_os.commonsense.client.main; import nl.sense_os.commonsense.client.CommonSense; import nl.sense_os.commonsense.client.login.LoginEvents; import nl.sense_os.commonsense.client.main.components.HelpScreen; import nl.sense_os.commonsense.client.main.components.HomeScreen; import nl.sense_os.commonsense.client.main.components.NavPanel; import nl.sense_os.commonsense.client.utility.Log; import nl.sense_os.commonsense.client.visualization.VizEvents; import nl.sense_os.commonsense.shared.UserModel; import com.extjs.gxt.ui.client.Style.LayoutRegion; import com.extjs.gxt.ui.client.event.EventType; import com.extjs.gxt.ui.client.mvc.AppEvent; import com.extjs.gxt.ui.client.mvc.Controller; import com.extjs.gxt.ui.client.mvc.Dispatcher; import com.extjs.gxt.ui.client.mvc.View; import com.extjs.gxt.ui.client.util.Margins; import com.extjs.gxt.ui.client.widget.Component; import com.extjs.gxt.ui.client.widget.LayoutContainer; import com.extjs.gxt.ui.client.widget.Text; import com.extjs.gxt.ui.client.widget.Viewport; import com.extjs.gxt.ui.client.widget.layout.BorderLayout; import com.extjs.gxt.ui.client.widget.layout.BorderLayoutData; import com.extjs.gxt.ui.client.widget.layout.CenterLayout; import com.extjs.gxt.ui.client.widget.layout.FitLayout; import com.google.gwt.user.client.ui.RootPanel; public class MainView extends View { private static final String TAG = "MainView"; private Viewport viewport; private LayoutContainer center; private NavPanel navPanel; private Component homeComponent; private Component helpComponent; public MainView(Controller controller) { super(controller); } private void createCenter() { this.center = new LayoutContainer(new FitLayout()); BorderLayoutData centerData = new BorderLayoutData(LayoutRegion.CENTER); this.viewport.add(this.center, centerData); } private void createFooter() { LayoutContainer footer = new LayoutContainer(new CenterLayout()); Text footerText = new Text("& + CommonSense.LAST_DEPLOYED); footerText.setStyleAttribute("font-size", "10pt"); footer.add(footerText); footer.setId("footer-bar"); BorderLayoutData southData = new BorderLayoutData(LayoutRegion.SOUTH, 30); southData.setMargins(new Margins(0)); southData.setSplit(false); this.viewport.add(footer, southData); } private void createNavigation() { this.navPanel = new NavPanel(); this.navPanel.setId("navigation-bar"); BorderLayoutData northData = new BorderLayoutData(LayoutRegion.NORTH, 23); northData.setMargins(new Margins(0)); northData.setSplit(false); this.viewport.add(this.navPanel, northData); } @Override protected void handleEvent(AppEvent event) { EventType type = event.getType(); if (type.equals(MainEvents.Error)) { Log.e(TAG, "Error"); onError(event); } else if (type.equals(MainEvents.Init)) { Log.d(TAG, "Init"); // do nothing: actual initialization is done in initialize() } else if (type.equals(MainEvents.UiReady)) { Log.d(TAG, "UiReady"); onUiReady(event); } else if (type.equals(MainEvents.Navigate)) { // Log.d(TAG, "Navigate: \'" + event.<String> getData() + "\'"); onNavigate(event); } else if (type.equals(LoginEvents.LoggedIn)) { // Log.d(TAG, "LoggedIn"); onLoggedIn(event); } else if (type.equals(LoginEvents.LoggedOut)) { // Log.d(TAG, "LoggedOut"); onLoggedOut(event); } else { Log.e(TAG, "Unexpected event type: " + type); } } @Override protected void initialize() { super.initialize(); // ViewPort fills browser screen and automatically resizes content this.viewport = new Viewport(); this.viewport.setId("viewport"); this.viewport.setLayout(new BorderLayout()); this.viewport.setStyleAttribute("background", "url('img/bg/right_top_pre-light.png') no-repeat top right;"); createNavigation(); createCenter(); createFooter(); } private void onError(AppEvent event) { Log.e(TAG, "Error: " + event.<String> getData()); } private void onLoggedIn(AppEvent event) { final UserModel user = event.<UserModel> getData(); this.navPanel.setUser(user); this.navPanel.setLoggedIn(true); } private void onLoggedOut(AppEvent event) { this.navPanel.setLoggedIn(false); } private void onNavigate(AppEvent event) { String location = event.<String> getData("new"); // select the new center content Component newContent = null; if (null != location) { if (location.equals(NavPanel.SIGN_IN)) { newContent = new LayoutContainer(); Dispatcher.forwardEvent(LoginEvents.Show); } else if (location.equals(NavPanel.SIGN_OUT)) { newContent = new LayoutContainer(); Dispatcher.forwardEvent(LoginEvents.RequestLogout); } else if (location.equals(NavPanel.HOME)) { if (null == this.homeComponent) { this.homeComponent = new HomeScreen(); } newContent = this.homeComponent; } else if (location.equals(NavPanel.HELP)) { if (null == this.helpComponent) { this.helpComponent = new HelpScreen(); } newContent = this.helpComponent; } else if (location.equals(NavPanel.VISUALIZATION)) { Dispatcher.forwardEvent(VizEvents.Show, this.center); } else { LayoutContainer lc = new LayoutContainer(new CenterLayout()); lc.add(new Text("Under construction...")); newContent = lc; } } // remove old center content if (null != newContent) { newContent.setId("center_content"); this.center.removeAll(); this.center.add(newContent); this.center.layout(); } // hide login window String oldLocation = event.<String> getData("old"); if (NavPanel.SIGN_IN.equalsIgnoreCase(oldLocation) && !NavPanel.SIGN_IN.equalsIgnoreCase(location)) { Dispatcher.forwardEvent(LoginEvents.Hide); } // update navigation panel this.navPanel.setHighlight(location); } private void onUiReady(AppEvent event) { RootPanel.get().add(this.viewport); } }
package com.opencms.core; import com.opencms.util.*; import java.util.*; import java.io.*; import javax.servlet.*; import javax.servlet.http.*; public class CmsRequestHttpServlet implements I_CmsConstants, I_CmsLogChannels, I_CmsRequest { /** * Define the maximum size for an uploaded file (8 MB) */ private static final int DEFAULT_MAX_POST_SIZE = 8192 * 1024;// 8 Meg /** * Definition of the error message for an empty request. */ static final String C_REQUEST_NOTNULL="The Request cannot be null."; /** * Definition of the error message for being not a multipart request. */ static final String C_REQUEST_NOMULTIPART="Posted content type isn't multipart/form-data"; /** * Definition of the error message for a negative maximum post size. */ static final String C_REQUEST_SIZENOTNEGATIVE="The maxPostSize must be positive."; /** * Definition of the error message for a premature end. */ static final String C_REQUEST_PROMATUREEND="Corrupt form data: premature ending"; /** * Definition of the error message for missing boundary. */ static final String C_REQUEST_NOBOUNDARY="Separation boundary was not specified"; /** * The maximum size of the uploaded data. */ private int m_maxSize=DEFAULT_MAX_POST_SIZE; /** * The original request. */ private HttpServletRequest m_req; /** * The path to the resource */ private String m_path=null; /** * The type of theis CmsRequest. */ private int m_type=C_REQUEST_HTTP; /** * Storage for all uploaded files. */ private Hashtable m_files = new Hashtable(); /** * Storage for all uploaded name values */ private Hashtable m_parameters = new Hashtable(); /** * Constructor, creates a new CmsRequestHttpServlet object. * * @param req The original HttpServletRequest used to create this CmsRequest. */ CmsRequestHttpServlet(HttpServletRequest req) throws IOException { m_req=req; // Test if this is a multipart-request. // If it is, extract all files from it. String type = req.getHeader("content-type"); if ((type != null) && type.startsWith("multipart/form-data")){ readRequest(); } if(A_OpenCms.isLogging() && m_req.getQueryString() != null && m_req.getQueryString().indexOf("/") != -1) { A_OpenCms.log(C_OPENCMS_INFO, "WARNING: unescaped \"/\" found in URL parameter! This may cause problems with some servlet environments."); A_OpenCms.log(C_OPENCMS_INFO, javax.servlet.http.HttpUtils.getRequestURL(m_req).toString()); } if(m_req.getPathInfo().indexOf("?") != -1) { A_OpenCms.log(C_OPENCMS_CRITICAL, "WARNING: URL parameters were not extracted properly."); A_OpenCms.log(C_OPENCMS_CRITICAL, "This may be caused by a bug in your servlet environment with handling \"/\" characters. "); A_OpenCms.log(C_OPENCMS_CRITICAL, "Please make sure you are escaping all special chars (including \"/\") in your HTML forms."); A_OpenCms.log(C_OPENCMS_CRITICAL, m_req.getPathInfo()); throw new IOException("URL parameters not extracted properly by servlet environment. " + m_req.getPathInfo()); } } public String getRequestedResource() { return m_req.getPathInfo(); } /** * Returns the value of a named parameter as a String. * Returns null if the parameter does not exist or an empty string if the parameter * exists but without a value. * * @param name The name of the parameter. * @returns The value of the parameter. */ public String getParameter(String name) { String parameter=null; // Test if this is a multipart-request. // If it is, extract all files from it. String type = m_req.getHeader("content-type"); if ((type != null) && type.startsWith("multipart/form-data")){ parameter = (String)m_parameters.get(name); } else { parameter=m_req.getParameter(name); } /* if(parameter != null && !"".equals(parameter) && (parameter.indexOf("%") != -1)) { if(A_OpenCms.isLogging()) { A_OpenCms.log(C_OPENCMS_DEBUG, "[CmsRequestHttpServlet] encoding required for parameter " + name + "=" + parameter); } parameter = Encoder.unescape(parameter); }*/ return parameter; } /** * Returns all parameter names as an Enumeration of String objects. * Returns an empty Enumeratrion if no parameters were included in the request. * * @return Enumeration of parameter names. */ public Enumeration getParameterNames() { String type = m_req.getHeader("content-type"); if ((type != null) && type.startsWith("multipart/form-data")){ // add all parameters extreacted in the multipart handling return m_parameters.keys(); } else { // add all parameters from the original request return m_req.getParameterNames(); } } /** * Returns all parameter values of a parameter key. * * @return Aarray of String containing the parameter values. */ public String[] getParameterValues(String key) { return m_req.getParameterValues(key); } /** * Returns the content of an uploaded file. * Returns null if no file with this name has been uploaded with this request. * Returns an empty byte[] if a file without content has been uploaded. * * @param name The name of the uploaded file. * @return The selected uploaded file content. */ public byte[] getFile(String name) { byte[] content=null; content=(byte[])m_files.get(name); return content; } /** * Returns the names of all uploaded files in this request. * Returns an empty eumeration if no files were included in the request. * * @return An Enumeration of file names. */ public Enumeration getFileNames() { Enumeration names=m_files.keys(); return names; } /** * Returns the type of the request that was used to create the CmsRequest. * The returned int must be one of the constants defined above in this interface. * * @return The type of the CmsRequest. */ public int getOriginalRequestType() { return m_type; } /** * Returns the original request that was used to create the CmsRequest. * * @return The original request of the CmsRequest. */ public Object getOriginalRequest() { return m_req; } /** * This method actually parses the request. A subclass * can override this method for a better optimized or differently * behaved implementation. * * @exception IOException If the uploaded content is larger than * <tt>maxSize</tt> or there's a problem parsing the request. */ private void readRequest() throws IOException { // Check the content type to make sure it's "multipart/form-data" String type = m_req.getContentType(); if (type == null || !type.toLowerCase().startsWith("multipart/form-data")) { throw new IOException(C_REQUEST_NOMULTIPART); } // Check the content length to prevent denial of service attacks int length = m_req.getContentLength(); if (length > m_maxSize) { throw new IOException("Posted content length of " + length + " exceeds limit of " + m_maxSize); } // Get the boundary string; it's included in the content type. String boundary = extractBoundary(type); if (boundary == null) { throw new IOException(C_REQUEST_NOBOUNDARY); } // Construct the special input stream we'll read from CmsMultipartInputStreamHandler in = new CmsMultipartInputStreamHandler(m_req.getInputStream(), boundary, length); // Read the first line, should be the first boundary String line = in.readLine(); if (line == null) { throw new IOException(C_REQUEST_PROMATUREEND); } // Verify that the line is the boundary if (!line.startsWith(boundary)) { throw new IOException(C_REQUEST_NOBOUNDARY); } // Now that we're just beyond the first boundary, loop over each part boolean done = false; while (!done) { done = readNextPart(in, boundary); } // Unfortunately some servlet environmets cannot handle multipart // requests AND URL parameters at the same time, we have to manage // the URL params ourself here. So try to read th URL parameters: String queryString = m_req.getQueryString(); if(queryString != null) { StringTokenizer st = new StringTokenizer(m_req.getQueryString(), "&"); while(st.hasMoreTokens()) { // Loop through all parameters String currToken = st.nextToken(); if(currToken != null && !"".equals(currToken)) { // look for the "=" character to divide parameter name and value int idx = currToken.indexOf("="); if(idx > -1) { String key = currToken.substring(0,idx); String value = (idx < (currToken.length()-1))?currToken.substring(idx+1):""; m_parameters.put(key, value); } } } } } /** * A utility method that reads an individual part. Dispatches to * readParameter() and readAndSaveFile() to do the actual work. * <p> * The single files are stored in a hashtable (seperated in filename and contents) * for later addition to a CmsFile Object * <p> A subclass can override this method for a better optimized or * differently behaved implementation. * * @param in The stream from which to read the part * @param boundary The boundary separating parts * @return A flag indicating whether this is the last part * @exception IOException If there's a problem reading or parsing the * request * * @see readParameter * @see readAndSaveFile */ private boolean readNextPart(CmsMultipartInputStreamHandler in, String boundary) throws IOException { // Read the first line, should look like this: // content-disposition: form-data; name="field1"; filename="file1.txt" String line = in.readLine(); if (line == null || line.equals("")) { // No parts left, we're done return true; } // Parse the content-disposition line String[] dispInfo = extractDispositionInfo(line); // String disposition = dispInfo[0]; String name = dispInfo[1]; String filename = dispInfo[2]; // Now onto the next line. This will either be empty // or contain a Content-Type and then an empty line. line = in.readLine(); if (line == null) { // No parts left, we're done return true; } // Get the content type, or null if none specified String contentType = extractContentType(line); if (contentType != null) { // Eat the empty line line = in.readLine(); if (line == null || line.length() > 0) { // line should be empty throw new IOException("Malformed line after content type: " + line); } } else { // Assume a default content type contentType = "application/octet-stream"; } // Now, finally, we read the content (end after reading the boundary) if (filename == null) { // This is a parameter String value = readParameter(in, boundary); m_parameters.put(name, value); } else { // This is a file byte[] value = readAndSaveFile(in, boundary); filecounter ++; m_files.put(filename, value); } // there's more to read return false; } int filecounter = 0; /** * A utility method that reads a single part of the multipart request * that represents a parameter. A subclass can override this method * for a better optimized or differently behaved implementation. * * @param in The stream from which to read the parameter information * @param boundary The boundary signifying the end of this part * @return The parameter value * @exception IOException If there's a problem reading or parsing the * request */ private String readParameter(CmsMultipartInputStreamHandler in, String boundary) throws IOException { StringBuffer sbuf = new StringBuffer(); String line; while ((line = in.readLine()) != null) { if (line.startsWith(boundary)) break; // add the \r\n in case there are many lines sbuf.append(line + "\r\n"); } if (sbuf.length() == 0) { // nothing read return null; } // cut off the last line's \r\n sbuf.setLength(sbuf.length() - 2); // no URL decoding needed return sbuf.toString(); } /** * A utility method that reads a single part of the multipart request * that represents a file. Unlike the method name it does NOT saves the file to disk. * The name is from the original O'Reilly implmentaton. * <p> * A subclass can override this method for a better optimized or * differently behaved implementation. * * @param in The stream from which to read the file. * @param boundary The boundary signifying the end of this part. * @exception IOException If there's a problem reading or parsing the request. */ private byte[] readAndSaveFile(CmsMultipartInputStreamHandler in, String boundary) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(8 * 1024); byte[] boundaryBytes = boundary.getBytes(); int[] lookaheadBuf = new int[boundary.length() + 3]; int[] newLineBuf = {-1, -1}; int matches = 0; int matchingByte = new Byte(boundaryBytes[matches]).intValue(); /* File parts of multipart request should not be read line by line. Since some servlet environments touch and change the new line character(s) when calling the ServletInputStream's <code>readLine</code> this may cause problems with binary file uploads. We decided to read this parts byte by byte here. */ int read = in.read(); while(read > -1) { if(read == matchingByte) { // read byte is matching the next byte of the boundary // we should not write to the output stream here. lookaheadBuf[matches] = read; matches++; if(matches == boundary.length()) { // The end of the Boundary has been reached. // Now snip the following line feed. read = in.read(); if(newLineBuf[1] == read) { // New line contains ONE single character. // Write the last byte of the buffer to the output stream. out.write(newLineBuf[0]); } else { // new line contains TWO characters, possibly "\r\n" // The bytes in the buffer are not part of the file. // We even have to read one more byte. in.read(); } break; } matchingByte = new Byte(boundaryBytes[matches]).intValue(); } else { // read byte does not match the next byte of the boundary // write the first buffer byte to the output stream if(newLineBuf[0] != -1) { out.write(newLineBuf[0]); } if(matches == 0) { // this may be the most propably case. newLineBuf[0] = newLineBuf[1]; } else { // we have started to read the boundary. // Unfortunately, this was NOT the real boundary. // Fall back to normal read mode. // write the complete buffer to the output stream if(newLineBuf[1] != -1) { out.write(newLineBuf[1]); } for(int i=0; i<matches; i++) { out.write(lookaheadBuf[i]); } // reset boundary matching counter matches = 0; matchingByte = new Byte(boundaryBytes[matches]).intValue(); // clear buffer newLineBuf[0] = -1; } // put the last byte read into the buffer. // it may be part of a line feed. newLineBuf[1] = read; } read = in.read(); } out.flush(); return out.toByteArray(); } /** * Extracts and returns the boundary token from a line. * * @param Line with boundary from input stream. * @return The boundary token. */ private String extractBoundary(String line) { int index = line.indexOf("boundary="); if (index == -1) { return null; } // 9 for "boundary=" String boundary = line.substring(index + 9); // The real boundary is always preceeded by an extra "--" boundary = "--" + boundary; return boundary; } /** * Extracts and returns disposition info from a line, as a String array * with elements: disposition, name, filename. Throws an IOException * if the line is malformatted. * * @param line Line from input stream. * @return Array of string containing disposition information. * @exception IOException Throws an IOException if the line is malformatted. */ private String[] extractDispositionInfo(String line) throws IOException { // Return the line's data as an array: disposition, name, filename String[] retval = new String[3]; // Convert the line to a lowercase string without the ending \r\n // Keep the original line for error messages and for variable names. String origline = line; line = origline.toLowerCase(); // Get the content disposition, should be "form-data" int start = line.indexOf("content-disposition: "); int end = line.indexOf(";"); if (start == -1 || end == -1) { throw new IOException("Content disposition corrupt: " + origline); } String disposition = line.substring(start + 21, end); if (!disposition.equals("form-data")) { throw new IOException("Invalid content disposition: " + disposition); } // Get the field name // start at last semicolon start = line.indexOf("name=\"", end); // skip name=\" end = line.indexOf("\"", start + 7); if (start == -1 || end == -1) { throw new IOException("Content disposition corrupt: " + origline); } String name = origline.substring(start + 6, end); // Get the filename, if given String filename = null; // start after name start = line.indexOf("filename=\"", end + 2); // skip filename=\" end = line.indexOf("\"", start + 10); // note the != if (start != -1 && end != -1) { filename = origline.substring(start + 10, end); // The filename may contain a full path. Cut to just the filename. int slash = Math.max(filename.lastIndexOf('/'), filename.lastIndexOf('\\')); if (slash > -1) { filename = filename.substring(slash + 1); // past last slash } if (filename.equals("")) filename = "unknown"; // sanity check } // Return a String array: disposition, name, filename retval[0] = disposition; retval[1] = name; retval[2] = filename; return retval; } /** * Extracts and returns the content type from a line, or null if the * line was empty. * @param line Line from input stream. * @return Content type of the line. * @exception IOException Throws an IOException if the line is malformatted. */ private String extractContentType(String line) throws IOException { String contentType = null; // Convert the line to a lowercase string String origline = line; line = origline.toLowerCase(); // Get the content type, if any if (line.startsWith("content-type")) { int start = line.indexOf(" "); if (start == -1) { throw new IOException("Content type corrupt: " + origline); } contentType = line.substring(start + 1); } else if (line.length() != 0) { // no content type, so should be empty throw new IOException("Malformed line after disposition: " + origline); } return contentType; } }
package opendap.wcs.v2_0; import opendap.bes.BESError; import opendap.bes.BESManager; import opendap.bes.BadConfigurationException; import opendap.bes.dap2Responders.BesApi; import opendap.coreServlet.Scrub; import opendap.http.Util; import opendap.ppt.PPTException; import opendap.wcs.v2_0.formats.WcsResponseFormat; import opendap.wcs.v2_0.http.Attachment; import opendap.wcs.v2_0.http.MultipartResponse; import opendap.wcs.v2_0.http.SoapHandler; import org.apache.http.client.CredentialsProvider; import org.jdom.Document; import org.jdom.Element; import org.jdom.output.Format; import org.jdom.output.XMLOutputter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.*; /** * Process GetCoverage requests. Static methods are used to construct a wcs:Coverages * response. */ public class GetCoverageRequestProcessor { private static Logger _log = LoggerFactory.getLogger(GetCoverageRequestProcessor.class); public static String coveragesContentID = "urn:ogc:wcs:1.1:coverages"; /** * @param req The GetCoverageRequest object built fromt the client request. * @param response HttpServletResponse object that will receive the response content. * @param useSoapEnvelope Instructs the server to make the response a SOAP document. * @throws WcsException When a wcs:Coverage response document cannot be * constructed for the passed request. * @throws InterruptedException When the server may need to stop a (possibly length) request * @throws IOException Wen in the disk or the internets are broken. */ public static void sendCoverageResponse( GetCoverageRequest req, HttpServletResponse response, boolean useSoapEnvelope ) throws WcsException, InterruptedException, IOException, PPTException, BadConfigurationException, BESError { String id = req.getCoverageID(); WcsCatalog wcsCatalog = WcsServiceManager.getCatalog(id); boolean b = wcsCatalog.hasCoverage(id); if (!b) throw new WcsException("No such wcs:Coverage: " + Scrub.fileName(id), WcsException.INVALID_PARAMETER_VALUE, "wcs:CoverageId"); // If the mediaType is specified then we know it must be multipart/related because the spec says that's // the only acceptable value (orly?) and the GetCoverageRequest class enforces that rule. And since it // only has a single value we know it means we have to send the multipart response with the gml:Coverage // in the first part and then the binary stuff as specified in the format parameter in the next part. if (req.getMediaType() != null) { sendMultipartGmlResponse(req, response, useSoapEnvelope); } else { sendFormatResponse(req, response); } } public static void sendFormatResponse( GetCoverageRequest req, HttpServletResponse response ) throws WcsException, InterruptedException, IOException, PPTException, BadConfigurationException, BESError { _log.debug("Sending binary data response..."); response.setHeader("Content-Disposition", getContentDisposition(req)); String coverageId = req.getCoverageID(); WcsCatalog wcsCatalog = WcsServiceManager.getCatalog(coverageId); String dapDatasetUrl = wcsCatalog.getDapDatsetUrl(coverageId); if (dapDatasetUrl.toLowerCase().startsWith(Util.BES_PROTOCOL)) { CoverageDescription coverageDescription = wcsCatalog.getCoverageDescription(coverageId); String besDatasetId = dapDatasetUrl.substring(Util.BES_PROTOCOL.length()); Document besCmd = getBesCmd(req, coverageDescription, wcsCatalog); if (!BESManager.isInitialized()) throw new WcsException("The BESManager has not been configured. Unable to access BES!", WcsException.NO_APPLICABLE_CODE); BesApi besApi = new BesApi(); besApi.besTransaction(besDatasetId, besCmd, response.getOutputStream()); } else if (dapDatasetUrl.toLowerCase().startsWith(Util.HTTP_PROTOCOL) || dapDatasetUrl.toLowerCase().startsWith((Util.HTTPS_PROTOCOL))) { CredentialsProvider authCreds = WcsServiceManager.getCredentialsProvider(); Util.forwardUrlContent(getDap2DataAccessUrl(req), authCreds, response, true); } } public static void sendMultipartGmlResponse(GetCoverageRequest req, HttpServletResponse response, boolean useSoapEnvelope) throws WcsException, InterruptedException { _log.debug("Building multi-part Response..."); String coverageId = req.getCoverageID(); WcsCatalog wcsCatalog = WcsServiceManager.getCatalog(coverageId); String rangePartId = "cid:" + coverageId; CoverageDescription coverageDescription = wcsCatalog.getCoverageDescription(req.getCoverageID()); /** * If this an EO coverage then update its bounding box to reflect the subset. */ if (coverageDescription instanceof EOCoverageDescription) { try { // Mke a copy so we don't bunk up the original. EOCoverageDescription eocd = new EOCoverageDescription((EOCoverageDescription) coverageDescription); // Tweak the foot print eocd.adjustEOMetadataCoverageFootprint(req); // Make it the thing to use... coverageDescription = eocd; } catch (IOException e) { throw new WcsException("sendMultipartGmlResponse() - OUCH!! Failed to create new (malleable) " + "EOCoverageDescription using the copy constructor.", WcsException.NO_APPLICABLE_CODE); } } Coverage coverage = coverageDescription.getCoverage(req.getRequestUrl()); // new Coverage(coverageDescription, req.getRequestUrl()); Element coverageElement = coverage.getCoverageElement(rangePartId, getReturnMimeType(req)); XMLOutputter xmlo = new XMLOutputter(Format.getPrettyFormat()); _log.debug(xmlo.outputString(coverageElement)); Document doc = new Document(coverageElement); if (useSoapEnvelope) doc = SoapHandler.wrapDocumentInSoapEnvelope(doc); MultipartResponse mpr = new MultipartResponse(); Attachment gmlPart = new Attachment("application/gml+xml; charset=UTF-8", "gml-part", doc); mpr.addAttachment(gmlPart); Attachment rangePart = null; String dapDataAccessUrl = wcsCatalog.getDapDatsetUrl(coverageId); if (dapDataAccessUrl.toLowerCase().startsWith(opendap.http.Util.BES_PROTOCOL)) { String besDatasetId = dapDataAccessUrl.substring(Util.BES_PROTOCOL.length()); Document besCmd = getBesCmd(req, coverageDescription, wcsCatalog); rangePart = new Attachment(getReturnMimeType(req), rangePartId, besDatasetId, besCmd); } else { rangePart = new Attachment(getReturnMimeType(req), rangePartId, getDap2DataAccessUrl(req), WcsServiceManager.getCredentialsProvider()); } rangePart.setHeader("Content-Disposition", getContentDisposition(req)); mpr.addAttachment(rangePart); try { mpr.send(response); } catch (Exception e) { StringBuilder msg = new StringBuilder("sendMultipartGmlResponse() - "); msg.append("Failed to transmit WCS coverage response."); msg.append(" Message: ").append(e.getMessage()); throw new WcsException(msg.toString(), WcsException.NO_APPLICABLE_CODE); } } public static String getReturnFormat(GetCoverageRequest req) throws WcsException, InterruptedException { String format = req.getFormat(); String id = req.getCoverageID(); if (format == null) { CoverageDescription coverageDescription = WcsServiceManager.getCatalog(id).getCoverageDescription(id); format = coverageDescription.getNativeFormat(); } return format; } /** * @param req * @return * @throws WcsException * @throws InterruptedException */ public static String getDap2DataAccessUrl(GetCoverageRequest req) throws WcsException, InterruptedException { String format = getReturnFormat(req); WcsResponseFormat rFormat = ServerCapabilities.getFormat(format); if (rFormat == null) { throw new WcsException("Unrecognized response format: " + Scrub.fileName(format), WcsException.INVALID_PARAMETER_VALUE, "format"); } WcsCatalog wcsCatalog = WcsServiceManager.getCatalog(req.getCoverageID()); String requestURL = wcsCatalog.getDapDatsetUrl(req.getCoverageID()); StringBuilder dap2DataAccessURL = new StringBuilder(requestURL); dap2DataAccessURL.append(".").append(rFormat.dapDataResponseSuffix()).append("?").append(getDap2CE(req)); return dap2DataAccessURL.toString(); } public static String getContentDisposition(GetCoverageRequest req) throws WcsException, InterruptedException { String format = getReturnFormat(req); WcsResponseFormat rFormat = ServerCapabilities.getFormat(format); if (rFormat == null) { throw new WcsException("Unrecognized response format: " + Scrub.fileName(format), WcsException.INVALID_PARAMETER_VALUE, "format"); } StringBuilder contentDisposition = new StringBuilder(); contentDisposition .append(" attachment; filename=\"") .append(req.getCoverageID()) .append(rFormat.dapDataResponseSuffix()); return contentDisposition.toString(); } public static String getReturnMimeType(GetCoverageRequest req) throws WcsException, InterruptedException { String format = getReturnFormat(req); WcsResponseFormat rFormat = ServerCapabilities.getFormat(format); if (rFormat == null) throw new WcsException("Unrecognized response format: " + Scrub.fileName(format), WcsException.INVALID_PARAMETER_VALUE, "format"); return rFormat.mimeType(); } private static String getDap2CE(GetCoverageRequest req) throws InterruptedException, WcsException { String coverageID = req.getCoverageID(); WcsCatalog wcsCatalog = WcsServiceManager.getCatalog(coverageID); CoverageDescription coverageDescription = wcsCatalog.getCoverageDescription(coverageID); HashMap<String, DimensionSubset> dimensionSubsets = req.getDimensionSubsets(); HashMap<DomainCoordinate, DimensionSubset> domCordToDimSubsetMap = new HashMap<>(); // The user may have provided domain subsets. // Let's first just QC the request - We'll make sure that the user is asking for dimension // subsets of coordinate dimensions that this field has, and while we do that we will associate // every matching DomainCoordinate with the DimensionSubset that it matched. LinkedHashMap<String, DomainCoordinate> domainCoordinates = coverageDescription.getDomainCoordinates(); for (DimensionSubset ds : dimensionSubsets.values()) { DomainCoordinate dc = domainCoordinates.get(ds.getDimensionId()); if (dc == null) { // It's likely to happen frequently that the user submits a bad dimension name. So // take the time to give an informative error message. StringBuilder msg = new StringBuilder(); msg.append("Bad subsetting request.\n"); msg.append("A subset was requested for dimension '").append(ds.getDimensionId()).append("'"); msg.append(" and there is no coordinate dimension of that name in the Coverage "); msg.append("'").append(coverageDescription.getCoverageId()).append("'\n"); msg.append("Valid coordinate dimension names for '").append(coverageDescription.getCoverageId()).append("' "); msg.append("are: "); for (String dcName : domainCoordinates.keySet()) { msg.append("\n ").append(dcName); } msg.append("\n"); _log.debug(msg.toString()); throw new WcsException(msg.toString(), WcsException.INVALID_PARAMETER_VALUE, "wcs:dimension"); } ds.setDomainCoordinate(dc); domCordToDimSubsetMap.put(dc, ds); } // Determines which fields (variables) will be sent back with the response. // If none are specified, all are sent. Vector<String> requestedFields; RangeSubset rangeSubset = req.getRangeSubset(); if (rangeSubset != null) { requestedFields = rangeSubset.getRequestedFields(); } else { requestedFields = new Vector<>(); } if (requestedFields.isEmpty()) { // if they didn't ask for a subset of the set of fields, then take them all. String fieldNames[] = coverageDescription.getFieldNames(); requestedFields.addAll(Arrays.asList(fieldNames)); /* Vector<Field> fields = coverageDescription.getFields(); for (Field field : fields) { String requestedFieldName = field.getName(); requestedFields.add(requestedFieldName); } */ } // Is there a Scale request? // TODO - Handle scale requests: ScaleRequest scaleRequest = req.getScaleRequest(); StringBuilder dap2CE = new StringBuilder(); // Here we begin building the DAP2 CE // For every field (variable) to be transmitted we (may) need server side functional expressions, // array subset expressions, etc. Vector<String> subsetClauses = new Vector<>(); for (String fieldId : requestedFields) { String dapGridArrayName = coverageDescription.getDapGridArrayId(fieldId); if (dimensionSubsets.isEmpty()) { // no dimension subsets means take the whole enchilada subsetClauses.add(dapGridArrayName); } else { StringBuilder valueSubsetClause = new StringBuilder(); boolean arraySubset = false; // We need to process the value based subsets by using a call to the grid() ssf // The array index subsets, if anyexist, need to be applied to the variable as it is // passed into the grid() function and we need to get them in the order of the // DomainCoordinate variables for (DomainCoordinate domainCoordinate : domainCoordinates.values()) { DimensionSubset dimSub = domCordToDimSubsetMap.get(domainCoordinate); if (dimSub == null) { // No subset on this Coordinate? that's ok, make one that get's the entire dim dimSub = new DimensionSubset(domainCoordinate); } if (dimSub.isValueSubset()) { // A value subset means that the user supplied values of the domain coordinates that specify // the bounds of the subset that they want if (valueSubsetClause.length() > 0) { _log.debug("getDap2CE() - DAP2 CE: {}", dap2CE); valueSubsetClause.append(","); } // Then we tack on the value constraint expression: "low<=dimName<=high" valueSubsetClause.append(dimSub.getDap2GridValueConstraint()); } else if (dimSub.isArraySubset()) { // An Array subset means that user indicated (through the use of integer values in // their subset request) that they are wanting to subset by array index. // Because order of the [] array notation in DAP URL's is important, we collect // all of the user provided array constraints here and then literally sort them out below // for inclusion in the response. DomainCoordinate domCoord = domainCoordinates.get(dimSub.getDimensionId()); domCoord.setArraySubset(dimSub.getDapArrayIndexConstraint()); arraySubset = true; } else { throw new WcsException("Unrecognized dimension subset.", WcsException.NO_APPLICABLE_CODE); } } // So we've processed all the user requested dimension subsets, now we need to build the inditial // array subsetting clause if needed. StringBuilder fieldSubsetClause = new StringBuilder(); // If theres value based subsetting to be done we'll need the grid() ssf if (valueSubsetClause.length() > 0) { fieldSubsetClause.append("grid("); } // then the name of the variable fieldSubsetClause.append(dapGridArrayName); // Add any inditial array subsets to the variable name if (arraySubset) { // We build the subsetting string using the domain coordinates in the order they // appear in the DAP dataset, which is how they MUST occur in the configuration // or this all gets broken. for (DomainCoordinate dc : domainCoordinates.values()) { String clause = dc.getArraySubset(); clause = clause == null ? "[*]" : clause; fieldSubsetClause.append(clause); } } // Add the value subsets (to the grid() syntax) if (valueSubsetClause.length() > 0) { fieldSubsetClause.append(","); fieldSubsetClause.append(valueSubsetClause); fieldSubsetClause.append(")"); } // add to the list of clauses. subsetClauses.add(fieldSubsetClause.toString()); } // dimension subsets } // fields for (String subsetClause : subsetClauses) { String comma_as_needed = dap2CE.length() > 0 ? "," : ""; dap2CE.append(comma_as_needed).append(subsetClause); } _log.debug("getDap2CE() - DAP2 CE: {}", dap2CE); try { return URLEncoder.encode(dap2CE.toString(), "UTF-8"); } catch (UnsupportedEncodingException e) { _log.error("getDap2CE() - Unable to URLEncoder.encode() DAP CE: '{}'", dap2CE); throw new WcsException("Failed URL encode DAP2 CE: " + dap2CE + "'", WcsException.NO_APPLICABLE_CODE); } } /** * @param req * @return * @throws InterruptedException * @throws WcsException */ /* private static String getDap2CE_OLD(GetCoverageRequest req) throws InterruptedException, WcsException { String coverageID = req.getCoverageID(); WcsCatalog wcsCatalog = WcsServiceManager.getCatalog(coverageID); CoverageDescription coverageDescription = wcsCatalog.getCoverageDescription(coverageID); Vector<Field> fields = coverageDescription.getFields(); HashMap<String, DimensionSubset> dimensionSubsets = req.getDimensionSubsets(); // // The user may have provided domain subsets. // Let's first just QC the request - We'll make sure that the user is asking for dimension // subsets of coordinate dimensions that this field has, and while we do that we will associate // every matching DomainCoordinate with the DimensionSubset that it matched. LinkedHashMap<String, DomainCoordinate> domainCoordinates = coverageDescription.getDomainCoordinates(); for(DimensionSubset ds: dimensionSubsets.values()){ DomainCoordinate dc = domainCoordinates.get(ds.getDimensionId()); if(dc==null){ // //It's likely to happen frequently that the user submits a bad dimension name. So //take the time to give an informative error message. // StringBuilder msg = new StringBuilder(); msg.append("Bad subsetting request.\n"); msg.append("A subset was requested for dimension '").append(ds.getDimensionId()).append("'"); msg.append(" and there is no coordinate dimension of that name in the Coverage "); msg.append("'").append(coverageDescription.getCoverageId()).append("'\n"); msg.append("Valid coordinate dimension names for '").append(coverageDescription.getCoverageId()).append("' "); msg.append("are: "); for(String dcName :domainCoordinates.keySet()){ msg.append("\n ").append(dcName); } msg.append("\n"); _log.debug(msg.toString()); throw new WcsException(msg.toString(), WcsException.INVALID_PARAMETER_VALUE, "wcs:dimension"); } ds.setDomainCoordinate(dc); } // // Determines which fields (variables) will be sent back with the response. // If none are specified, all are sent. // Vector<String> requestedFields; RangeSubset rangeSubset = req.getRangeSubset(); if(rangeSubset!=null) { requestedFields = rangeSubset.getRequestedFields(); if (requestedFields.isEmpty()) { // if they didn't ask for a subset of the set of fields, then take them all. for (Field field : fields) { requestedFields.add(field.getName()); } } } else { requestedFields = new Vector<>(); } // // Is there a Scale request? // ScaleRequest sr = req.getScaleRequest(); StringBuilder dap2CE = new StringBuilder(); // // Here we begin building the DAP2 CE // For every field (variable) to be transmitted we (may) need server side functional expressions, // array subset expressions, ect. // // Vector<String> gridSubsetClauses = new Vector<>(); Vector<String> arraySubsetClauses = new Vector<>(); for(String fieldId: requestedFields){ String dapGridArrayName = coverageDescription.getDapGridArrayId(fieldId); if(dimensionSubsets.isEmpty()){ // no dimension subsets means take the whole enchilada arraySubsetClauses.add(dapGridArrayName); } else { // So we need to process the value based subsets with a call to grid // and the array index subsets with an appended array index subset for that. StringBuilder ssfGridSubsetClause = new StringBuilder(); boolean arraySubset = false; // Process each dimension subset term the user has submitted for (DimensionSubset dimSub : dimensionSubsets.values()) { if(dimSub.isValueSubset()) { // A value subset means that the user supplied values of the domain coordinates that specify // the bounds of the subset that they want if(ssfGridSubsetClause.length()==0){ // the first dimension subset needs the grid ssf function // declaration and the name of the Grid array and a comma // separator. ssfGridSubsetClause.append("grid(").append(dapGridArrayName).append(","); } else { // subsequent dimensions just need the comma separator ssfGridSubsetClause.append(","); } // Then we tack on the value constraint expression: "low<=dimName<=high" ssfGridSubsetClause.append(dimSub.getDap2GridValueConstraint()); } else if(dimSub.isArraySubset()) { // An Array subset means that user indicated (through the use of integer values in // their subset request) that they are wanting to subset by array index. // Because order of the [] array notation in DAP URL's is important, we collect // all of the user provided array constraints here and then literally sort them out below // for inclusion in the response. DomainCoordinate domCoord = domainCoordinates.get(dimSub.getDimensionId()); domCoord.setArraySubset(dimSub.getDapArrayIndexConstraint()); arraySubset = true; } else { throw new WcsException("Unrecognized dimension subset.",WcsException.NO_APPLICABLE_CODE); } } // So we've processed all the user requested dimension subsets, now we need to build the inditial // array subsetting clause if needed. StringBuilder arraySubsetClause = new StringBuilder(); if(arraySubset){ arraySubsetClause.append(dapGridArrayName); // We build the subsetting string using the domain coordinates in the order they // appear in the DAP dataset, which is how they MUST occur in the configuration // or this all gets broken. for(DomainCoordinate dc : domainCoordinates.values()){ String clause = dc.getArraySubset(); clause = clause==null?"[*]":clause; arraySubsetClause.append(clause); } } if(ssfGridSubsetClause.length()>0){ ssfGridSubsetClause.append(")"); //if(arraySubsetClause.length()>0){ // ssfGridSubsetClause.append(","); //} gridSubsetClauses.add(ssfGridSubsetClause.toString()); } if(arraySubsetClause.length()>0){ arraySubsetClauses.add(arraySubsetClause.toString()); //ssfGridSubsetClause.append(arraySubsetClause); } //dap2CE.append(ssfGridSubsetClause); } // dimension subsets } // fields for(String gridSubsetClause: gridSubsetClauses){ String comma_as_needed = dap2CE.length()>0 ? "," : ""; String possiblyScaledGridSubset = sr.getScaleExpression(gridSubsetClause); dap2CE.append(comma_as_needed).append(possiblyScaledGridSubset); } for(String arraySubsetClause: arraySubsetClauses){ String comma_as_needed = dap2CE.length()>0 ? "," : ""; dap2CE.append(comma_as_needed).append(arraySubsetClause); } try { _log.debug("getDap2CE() - DAP2 CE: {}",dap2CE); return URLEncoder.encode(dap2CE.toString(), "UTF-8"); } catch (UnsupportedEncodingException e) { _log.error("getDap2CE() - Unable to URLEncoder.encode() DAP CE: '{}'",dap2CE); throw new WcsException("Failed URL encode DAP2 CE: "+dap2CE+"'",WcsException.NO_APPLICABLE_CODE); } } */ public static Document getBesCmd(GetCoverageRequest req, CoverageDescription cd, WcsCatalog wcsCatalog) throws WcsException, InterruptedException { Document besCmd = null; String dap2ce = GetCoverageRequestProcessor.getDap2CE(req); String format = GetCoverageRequestProcessor.getReturnFormat(req); WcsResponseFormat rFormat = ServerCapabilities.getFormat(format); if (rFormat == null) throw new WcsException("The requested return format '" + format + "' is not recognized by this service.", WcsException.INVALID_PARAMETER_VALUE, "format"); String besUrl = wcsCatalog.getDapDatsetUrl(cd.getCoverageId()); String besDatatsetId = besUrl.substring(Util.BES_PROTOCOL.length()); if (!BESManager.isInitialized()) throw new WcsException("The BESManager has not been configured. Unable to access BES!", WcsException.NO_APPLICABLE_CODE); BesApi besApi = new BesApi(); try { switch (rFormat.type()) { case dap2: besCmd = besApi.getDap2RequestDocument( opendap.bes.dap2Responders.BesApi.DAP2_DATA, besDatatsetId, dap2ce, null, null, "3.2", 0, null, null, null, opendap.bes.dap2Responders.BesApi.XML_ERRORS); break; case netcdf: besCmd = besApi.getDap2DataAsNetcdf4Request( besDatatsetId, dap2ce, req.getCfHistoryAttribute(), "3.2", 0); break; case geotiff: besCmd = besApi.getDap2DataAsGeoTiffRequest( besDatatsetId, dap2ce, "3.2", 0); break; case jpeg2000: besCmd = besApi.getDap2DataAsGmlJpeg2000Request( besDatatsetId, dap2ce, "3.2", 0); break; case dap4: default: throw new WcsException(("Unsupported format: '" + rFormat.name()) + "' :(", WcsException.INVALID_PARAMETER_VALUE, "format"); } } catch (BadConfigurationException bce) { throw new WcsException("Failed to generate a BES command for the GetCoverage request: '" + req.toString() , WcsException.NO_APPLICABLE_CODE); } return besCmd; } }
package org.bouncycastle.x509; import org.bouncycastle.jce.X509LDAPCertStoreParameters; import org.bouncycastle.util.Selector; import org.bouncycastle.util.Store; import java.security.InvalidAlgorithmParameterException; import java.security.cert.CertSelector; import java.security.cert.CertStore; import java.security.cert.CollectionCertStoreParameters; import java.security.cert.LDAPCertStoreParameters; import java.security.cert.PKIXParameters; import java.security.cert.TrustAnchor; import java.security.cert.X509CertSelector; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; /** * This class extends the PKIXParameters with a validity model parameter. */ public class ExtendedPKIXParameters extends PKIXParameters { private List stores; private Selector selector; private boolean additionalLocationsEnabled; private List additionalStores; private Set trustedACIssuers; private Set necessaryACAttributes; private Set prohibitedACAttributes; private Set attrCertCheckers; /** * Creates an instance of <code>PKIXParameters</code> with the specified * <code>Set</code> of most-trusted CAs. Each element of the set is a * {@link TrustAnchor TrustAnchor}. <p/> Note that the <code>Set</code> * is copied to protect against subsequent modifications. * * @param trustAnchors a <code>Set</code> of <code>TrustAnchor</code>s * @throws InvalidAlgorithmParameterException if the specified * <code>Set</code> is empty. * @throws NullPointerException if the specified <code>Set</code> is * <code>null</code> * @throws ClassCastException if any of the elements in the <code>Set</code> * is not of type <code>java.security.cert.TrustAnchor</code> */ public ExtendedPKIXParameters(Set trustAnchors) throws InvalidAlgorithmParameterException { super(trustAnchors); stores = new ArrayList(); additionalStores = new ArrayList(); trustedACIssuers = new HashSet(); necessaryACAttributes = new HashSet(); prohibitedACAttributes = new HashSet(); attrCertCheckers = new HashSet(); } /** * Returns an instance with the parameters of a given * <code>PKIXParameters</code> object. * * @param pkixParams The given <code>PKIXParameters</code> * @return an extended PKIX params object */ public static ExtendedPKIXParameters getInstance(PKIXParameters pkixParams) { ExtendedPKIXParameters params; try { params = new ExtendedPKIXParameters(pkixParams.getTrustAnchors()); } catch (Exception e) { // cannot happen throw new RuntimeException(e.getMessage()); } params.setParams(pkixParams); return params; } /** * Method to support <code>clone()</code> under J2ME. * <code>super.clone()</code> does not exist and fields are not copied. * * @param params Parameters to set. If this are * <code>ExtendedPKIXParameters</code> they are copied to. */ protected void setParams(PKIXParameters params) { setDate(params.getDate()); setCertPathCheckers(params.getCertPathCheckers()); setCertStores(params.getCertStores()); setAnyPolicyInhibited(params.isAnyPolicyInhibited()); setExplicitPolicyRequired(params.isExplicitPolicyRequired()); setPolicyMappingInhibited(params.isPolicyMappingInhibited()); setRevocationEnabled(params.isRevocationEnabled()); setInitialPolicies(params.getInitialPolicies()); setPolicyQualifiersRejected(params.getPolicyQualifiersRejected()); setSigProvider(params.getSigProvider()); setTargetCertConstraints(params.getTargetCertConstraints()); try { setTrustAnchors(params.getTrustAnchors()); } catch (Exception e) { // cannot happen throw new RuntimeException(e.getMessage()); } if (params instanceof ExtendedPKIXParameters) { ExtendedPKIXParameters _params = (ExtendedPKIXParameters) params; validityModel = _params.validityModel; useDeltas = _params.useDeltas; additionalLocationsEnabled = _params.additionalLocationsEnabled; selector = _params.selector == null ? null : (Selector) _params.selector.clone(); stores = new ArrayList(_params.stores); additionalStores = new ArrayList(_params.additionalStores); trustedACIssuers = new HashSet(_params.trustedACIssuers); prohibitedACAttributes = new HashSet(_params.prohibitedACAttributes); necessaryACAttributes = new HashSet(_params.necessaryACAttributes); attrCertCheckers = new HashSet(_params.attrCertCheckers); } } /** * This is the default PKIX validity model. Actually there are two variants * of this: The PKIX model and the modified PKIX model. The PKIX model * verifies that all involved certificates must have been valid at the * current time. The modified PKIX model verifies that all involved * certificates were valid at the signing time. Both are indirectly choosen * with the {@link PKIXParameters#setDate(java.util.Date)} method, so this * methods sets the Date when <em>all</em> certificates must have been * valid. */ public static final int PKIX_VALIDITY_MODEL = 0; /** * This model uses the following validity model. Each certificate must have * been valid at the moment where is was used. That means the end * certificate must have been valid at the time the signature was done. The * CA certificate which signed the end certificate must have been valid, * when the end certificate was signed. The CA (or Root CA) certificate must * have been valid, when the CA certificate was signed and so on. So the * {@link PKIXParameters#setDate(java.util.Date)} method sets the time, when * the <em>end certificate</em> must have been valid. <p/> It is used e.g. * in the German signature law. */ public static final int CHAIN_VALIDITY_MODEL = 1; private int validityModel = PKIX_VALIDITY_MODEL; private boolean useDeltas = true; /** * Defaults to <code>false</code>. * * @return Returns if delta CRLs should be used. */ public boolean isUseDeltasEnabled() { return useDeltas; } /** * Sets if delta CRLs should be used for checking the revocation status. * * @param useDeltas <code>true</code> if delta CRLs should be used. */ public void setUseDeltasEnabled(boolean useDeltas) { this.useDeltas = useDeltas; } /** * @return Returns the validity model. * @see #CHAIN_VALIDITY_MODEL * @see #PKIX_VALIDITY_MODEL */ public int getValidityModel() { return validityModel; } /** * Adds a Java CertStore to this extended PKIX parameters. If the store uses * initialization parameters of type * <code>CollectionCertStoreParameters</code> or <code></code> the * corresponding Bouncy Castle {@link Store} type is created additionally to * it. */ public void addCertStore(CertStore store) { super.addCertStore(store); if (store.getCertStoreParameters() instanceof CollectionCertStoreParameters) { Collection coll = ((CollectionCertStoreParameters) store .getCertStoreParameters()).getCollection(); X509CollectionStoreParameters params = new X509CollectionStoreParameters( coll); try { stores.add(X509Store.getInstance("CERTIFICATE/COLLECTION", params, "BC")); stores.add(X509Store .getInstance("CRL/COLLECTION", params, "BC")); } catch (Exception e) { // cannot happen throw new RuntimeException(e.getMessage()); } } if (store.getCertStoreParameters() instanceof LDAPCertStoreParameters || store.getCertStoreParameters() instanceof X509LDAPCertStoreParameters) { X509LDAPCertStoreParameters params; if (store.getCertStoreParameters() instanceof X509LDAPCertStoreParameters) { params = (X509LDAPCertStoreParameters) store .getCertStoreParameters(); } else { int port = ((LDAPCertStoreParameters) store .getCertStoreParameters()).getPort(); String server = ((LDAPCertStoreParameters) store .getCertStoreParameters()).getServerName(); params = new X509LDAPCertStoreParameters.Builder("ldap: + server + ":" + port, null).build(); } try { stores.add(X509Store.getInstance("CERTIFICATE/LDAP", params, "BC")); stores.add(X509Store.getInstance("CRL/LDAP", params, "BC")); } catch (Exception e) { // cannot happen throw new RuntimeException(e.getMessage()); } } } /** * Sets the Java CertStore to this extended PKIX parameters. If the stores * use initialisation parameters of type * <code>CollectionCertStoreParameters</code> or <code></code> the * corresponding Bouncy Castle {@link Store} types are created additionally * to it. * * @throws ClassCastException if an element of <code>stores</code> is not * a <code>CertStore</code>. */ public void setCertStores(List stores) { if (stores != null) { Iterator it = stores.iterator(); while (it.hasNext()) { addCertStore((CertStore) it.next()); } } } /** * Sets the Bouncy Castle Stores for finding CRLs, certificates, attribute * certificates or cross certificates. * <p> * The <code>List</code> is cloned. * * @param stores A list of stores to use. * @see #getStores * @throws ClassCastException if an element of <code>stores</code> is not * a {@link Store}. */ public void setStores(List stores) { if (stores == null) { this.stores = new ArrayList(); } else { for (Iterator i = stores.iterator(); i.hasNext();) { if (!(i.next() instanceof Store)) { throw new ClassCastException( "All elements of list must be " + "of type org.bouncycastle.util.Store."); } } this.stores = new ArrayList(stores); } } /** * Adds a Bouncy Castle {@link Store} to find CRLs, certificates, attribute * certificates or cross certificates. * <p> * This method should be used to add local stores, like collection based * X.509 stores, if available. Local stores should be considered first, * before trying to use additional (remote) locations, because they do not * need possible additional network traffic. * <p> * If <code>store</code> is <code>null</code> it is ignored. * * @param store The store to add. * @see #getStores */ public void addStore(Store store) { if (stores != null) { stores.add(store); } } /** * Adds a additional Bouncy Castle {@link Store} to find CRLs, certificates, * attribute certificates or cross certificates. * <p> * You should not use this method. This method is used for adding additional * X.509 stores, which are used to add (remote) locations, e.g. LDAP, found * during X.509 object processing, e.g. in certificates or CRLs. This method * is used in PKIX certification path processing. * <p> * If <code>store</code> is <code>null</code> it is ignored. * * @param store The store to add. * @see #getStores() */ public void addAddionalStore(Store store) { if (store != null) { additionalStores.add(store); } } /** * Returns an immutable <code>List</code> of additional Bouncy Castle * <code>Store</code>s used for finding CRLs, certificates, attribute * certificates or cross certificates. * * @return an immutable <code>List</code> of additional Bouncy Castle * <code>Store</code>s. Never <code>null</code>. * * @see #addAddionalStore(Store) */ public List getAddionalStores() { return Collections.unmodifiableList(additionalStores); } /** * Returns an immutable <code>List</code> of Bouncy Castle * <code>Store</code>s used for finding CRLs, certificates, attribute * certificates or cross certificates. * * @return an immutable <code>List</code> of Bouncy Castle * <code>Store</code>s. Never <code>null</code>. * * @see #setStores(List) */ public List getStores() { return Collections.unmodifiableList(new ArrayList(stores)); } /** * @param validityModel The validity model to set. * @see #CHAIN_VALIDITY_MODEL * @see #PKIX_VALIDITY_MODEL */ public void setValidityModel(int validityModel) { this.validityModel = validityModel; } public Object clone() { ExtendedPKIXParameters params; try { params = new ExtendedPKIXParameters(getTrustAnchors()); } catch (Exception e) { // cannot happen throw new RuntimeException(e.getMessage()); } params.setParams(this); return params; } /** * Returns if additional {@link X509Store}s for locations like LDAP found * in certificates or CRLs should be used. * * @return Returns <code>true</code> if additional stores are used. */ public boolean isAdditionalLocationsEnabled() { return additionalLocationsEnabled; } /** * Sets if additional {@link X509Store}s for locations like LDAP found in * certificates or CRLs should be used. * * @param enabled <code>true</code> if additional stores are used. */ public void setAdditionalLocationsEnabled(boolean enabled) { additionalLocationsEnabled = enabled; } /** * Returns the required constraints on the target certificate or attribute * certificate. The constraints are returned as an instance of * <code>Selector</code>. If <code>null</code>, no constraints are * defined. * * <p> * The target certificate in a PKIX path may be a certificate or an * attribute certificate. * <p> * Note that the <code>Selector</code> returned is cloned to protect * against subsequent modifications. * * @return a <code>Selector</code> specifying the constraints on the * target certificate or attribute certificate (or <code>null</code>) * @see #setTargetConstraints * @see X509CertStoreSelector * @see X509AttributeCertStoreSelector */ public Selector getTargetConstraints() { if (selector != null) { return (Selector) selector.clone(); } else { return null; } } /** * Sets the required constraints on the target certificate or attribute * certificate. The constraints are specified as an instance of * <code>Selector</code>. If <code>null</code>, no constraints are * defined. * <p> * The target certificate in a PKIX path may be a certificate or an * attribute certificate. * <p> * Note that the <code>Selector</code> specified is cloned to protect * against subsequent modifications. * * @param selector a <code>Selector</code> specifying the constraints on * the target certificate or attribute certificate (or * <code>null</code>) * @see #getTargetConstraints * @see X509CertStoreSelector * @see X509AttributeCertStoreSelector */ public void setTargetConstraints(Selector selector) { if (selector != null) { this.selector = (Selector) selector.clone(); } else { this.selector = null; } } /** * Sets the required constraints on the target certificate. The constraints * are specified as an instance of <code>X509CertSelector</code>. If * <code>null</code>, no constraints are defined. * * <p> * This method wraps the given <code>X509CertSelector</code> into a * <code>X509CertStoreSelector</code>. * <p> * Note that the <code>X509CertSelector</code> specified is cloned to * protect against subsequent modifications. * * @param selector a <code>X509CertSelector</code> specifying the * constraints on the target certificate (or <code>null</code>) * @see #getTargetCertConstraints * @see X509CertStoreSelector */ public void setTargetCertConstraints(CertSelector selector) { super.setTargetCertConstraints(selector); if (selector != null) { this.selector = X509CertStoreSelector .getInstance((X509CertSelector) selector); } else { this.selector = null; } } /** * Returns the trusted attribute certificate issuers. If attribute * certificates is verified the trusted AC issuers must be set. * <p> * The returned <code>Set</code> consists of <code>TrustAnchor</code>s. * <p> * The returned <code>Set</code> is immutable. Never <code>null</code> * * @return Returns an immutable set of the trusted AC issuers. */ public Set getTrustedACIssuers() { return Collections.unmodifiableSet(trustedACIssuers); } /** * Sets the trusted attribute certificate issuers. If attribute certificates * is verified the trusted AC issuers must be set. * <p> * The <code>trustedACIssuers</code> must be a <code>Set</code> of * <code>TrustAnchor</code> * <p> * The given set is cloned. * * @param trustedACIssuers The trusted AC issuers to set. Is never * <code>null</code>. * @throws ClassCastException if an element of <code>stores</code> is not * a <code>TrustAnchor</code>. */ public void setTrustedACIssuers(Set trustedACIssuers) { if (trustedACIssuers == null) { trustedACIssuers.clear(); return; } for (Iterator it = trustedACIssuers.iterator(); it.hasNext();) { if (!(it.next() instanceof TrustAnchor)) { throw new ClassCastException("All elements of set must be " + "of type " + TrustAnchor.class.getName() + "."); } } this.trustedACIssuers.clear(); this.trustedACIssuers.addAll(trustedACIssuers); } /** * Returns the neccessary attributes which must be contained in an attribute * certificate. * <p> * The returned <code>Set</code> is immutable and contains * <code>String</code>s with the OIDs. * * @return Returns the necessary AC attributes. */ public Set getNecessaryACAttributes() { return Collections.unmodifiableSet(necessaryACAttributes); } /** * Sets the neccessary which must be contained in an attribute certificate. * <p> * The <code>Set</code> must contain <code>String</code>s with the * OIDs. * <p> * The set is cloned. * * @param necessaryACAttributes The necessary AC attributes to set. * @throws ClassCastException if an element of * <code>necessaryACAttributes</code> is not a * <code>String</code>. */ public void setNecessaryACAttributes(Set necessaryACAttributes) { if (necessaryACAttributes == null) { this.necessaryACAttributes.clear(); return; } for (Iterator it = necessaryACAttributes.iterator(); it.hasNext();) { if (!(it.next() instanceof String)) { throw new ClassCastException("All elements of set must be " + "of type String."); } } this.necessaryACAttributes.clear(); this.necessaryACAttributes.addAll(necessaryACAttributes); } /** * Returns the attribute certificates which are not allowed. * <p> * The returned <code>Set</code> is immutable and contains * <code>String</code>s with the OIDs. * * @return Returns the prohibited AC attributes. Is never <code>null</code>. */ public Set getProhibitedACAttributes() { return prohibitedACAttributes; } /** * Sets the attribute certificates which are not allowed. * <p> * The <code>Set</code> must contain <code>String</code>s with the * OIDs. * <p> * The set is cloned. * * @param prohibitedACAttributes The prohibited AC attributes to set. * @throws ClassCastException if an element of * <code>prohibitedACAttributes</code> is not a * <code>String</code>. */ public void setProhibitedACAttributes(Set prohibitedACAttributes) { if (prohibitedACAttributes == null) { this.prohibitedACAttributes.clear(); return; } for (Iterator it = prohibitedACAttributes.iterator(); it.hasNext();) { if (!(it.next() instanceof String)) { throw new ClassCastException("All elements of set must be " + "of type String."); } } this.prohibitedACAttributes.clear(); this.prohibitedACAttributes.addAll(prohibitedACAttributes); } /** * Returns the attribute certificate checker. The returned set contains * {@link PKIXAttrCertChecker}s and is immutable. * * @return Returns the attribute certificate checker. Is never * <code>null</code>. */ public Set getAttrCertCheckers() { return Collections.unmodifiableSet(attrCertCheckers); } /** * Sets the attribute certificate checkers. * <p> * All elements in the <code>Set</code> must a {@link PKIXAttrCertChecker}. * <p> * The given set is cloned. * * @param attrCertCheckers The attribute certificate checkers to set. Is * never <code>null</code>. * @throws ClassCastException if an element of <code>attrCertCheckers</code> * is not a <code>PKIXAttrCertChecker</code>. */ public void setAttrCertCheckers(Set attrCertCheckers) { if (attrCertCheckers == null) { this.attrCertCheckers.clear(); return; } for (Iterator it = attrCertCheckers.iterator(); it.hasNext();) { if (!(it.next() instanceof PKIXAttrCertChecker)) { throw new ClassCastException("All elements of set must be " + "of type " + PKIXAttrCertChecker.class.getName() + "."); } } this.attrCertCheckers.clear(); this.attrCertCheckers.addAll(attrCertCheckers); } }
package com.swabunga.spell.engine; import java.io.BufferedReader; import java.io.InputStreamReader; /** * This class is based on Levenshtein Distance algorithms, and it calculates how similar two words are. * If the words are identical, then the distance is 0. The more that the words have in common, the lower the distance value. * The distance value is based on how many operations it takes to get from one word to the other. Possible operations are * swapping characters, adding a character, deleting a character, and substituting a character. * The resulting distance is the sum of these operations weighted by their cost, which can be set in the Configuration object. * When there are multiple ways to convert one word into the other, the lowest cost distance is returned. * <br/> * Another way to think about this: what are the cheapest operations that would have to be done on the "original" word to end up * with the "similar" word? Each operation has a cost, and these are added up to get the distance. * <br/> * * @see com.swabunga.spell.engine.Configuration#COST_REMOVE_CHAR * @see com.swabunga.spell.engine.Configuration#COST_INSERT_CHAR * @see com.swabunga.spell.engine.Configuration#COST_SUBST_CHARS * @see com.swabunga.spell.engine.Configuration#COST_SWAP_CHARS * */ public class EditDistance { /** * Fetches the spell engine configuration properties. */ public static Configuration config = Configuration.getConfiguration(); /** * get the weights for each possible operation */ static final int costOfDeletingSourceCharacter = config.getInteger(Configuration.COST_REMOVE_CHAR); static final int costOfInsertingSourceCharacter = config.getInteger(Configuration.COST_INSERT_CHAR); static final int costOfSubstitutingLetters = config.getInteger(Configuration.COST_SUBST_CHARS); static final int costOfSwappingLetters = config.getInteger(Configuration.COST_SWAP_CHARS); static final int costOfChangingCase = config.getInteger(Configuration.COST_CHANGE_CASE); /** * Evaluates the distance between two words. * * @param word One word to evaluates * @param similar The other word to evaluates * @return a number representing how easy or complex it is to transform on * word into a similar one. */ public static final int getDistance(String word, String similar) { return getDistance(word,similar,null); } /** * Evaluates the distance between two words. * * @param word One word to evaluates * @param similar The other word to evaluates * @return a number representing how easy or complex it is to transform on * word into a similar one. */ public static final int getDistance(String word, String similar, int[][] matrix) { /* JMH Again, there is no need to have a global class matrix variable * in this class. I have removed it and made the getDistance static final * DMV: I refactored this method to make it more efficient, more readable, and simpler. * I also fixed a bug with how the distance was being calculated. You could get wrong * distances if you compared ("abc" to "ab") depending on what you had setup your * COST_REMOVE_CHAR and EDIT_INSERTION_COST values to - that is now fixed. * WRS: I added a distance for case comparison, so a misspelling of "i" would be closer to "I" than * to "a". */ //Allocate memory outside of the loops. int i; int j; int costOfSubst; int costOfSwap; int costOfDelete; int costOfInsertion; int costOfCaseChange; boolean isSwap; char sourceChar = 0; char otherChar = 0; int a_size = word.length() + 1; int b_size = similar.length() + 1; //Only allocate new memory if we need a bigger matrix. if (matrix == null || matrix.length < a_size || matrix[0].length < b_size) matrix = new int[a_size][b_size]; matrix[0][0] = 0; for (i = 1; i != a_size; ++i) matrix[i][0] = matrix[i - 1][0] + costOfInsertingSourceCharacter; //initialize the first column for (j = 1; j != b_size; ++j) matrix[0][j] = matrix[0][j - 1] + costOfDeletingSourceCharacter; //initalize the first row for (i = 1; i != a_size; ++i) { sourceChar = word.charAt(i-1); for (j = 1; j != b_size; ++j) { otherChar = similar.charAt(j-1); if (sourceChar == otherChar) { matrix[i][j] = matrix[i - 1][j - 1]; //no change required, so just carry the current cost up continue; } costOfSubst = costOfSubstitutingLetters + matrix[i - 1][j - 1]; //if needed, add up the cost of doing a swap costOfSwap = Integer.MAX_VALUE; isSwap = (i != 1) && (j != 1) && sourceChar == similar.charAt(j - 2) && word.charAt(i - 2) == otherChar; if (isSwap) costOfSwap = costOfSwappingLetters + matrix[i - 2][j - 2]; costOfDelete = costOfDeletingSourceCharacter + matrix[i][j - 1]; costOfInsertion = costOfInsertingSourceCharacter + matrix[i - 1][j]; costOfCaseChange = Integer.MAX_VALUE; if (equalIgnoreCase(sourceChar, otherChar)) costOfCaseChange = costOfChangingCase + matrix[i - 1][j - 1]; matrix[i][j] = minimum(costOfSubst, costOfSwap, costOfDelete, costOfInsertion, costOfCaseChange); } } if (false) System.out.println(dumpMatrix(word, similar, matrix)); return matrix[a_size - 1][b_size - 1]; } /** * checks to see if the two charactors are equal ignoring case. * @param ch1 * @param ch2 * @return boolean */ private static boolean equalIgnoreCase(char ch1, char ch2) { if (ch1 == ch2) { return true; } else { return (Character.toLowerCase(ch1) == Character.toLowerCase(ch2)); } } /** * For debugging, this creates a string that represents the matrix. To read the matrix, look at any square. That is the cost to get from * the partial letters along the top to the partial letters along the side. * @param src - the source string that the matrix columns are based on * @param dest - the dest string that the matrix rows are based on * @param matrix - a two dimensional array of costs (distances) * @return String */ static private String dumpMatrix(String src, String dest, int matrix[][]) { StringBuffer s = new StringBuffer(""); int cols = matrix.length -1; int rows = matrix[0].length -1; for (int i = 0; i < cols + 1; i++) { for (int j = 0; j < rows + 1; j++) { if (i == 0 && j == 0) { s.append("\n "); continue; } if (i == 0) { s.append("| "); s.append(dest.charAt(j - 1)); continue; } if (j == 0) { s.append(src.charAt(i - 1)); continue; } String num = Integer.toString(matrix[i - 1][j - 1]); int padding = 4 - num.length(); s.append("|"); for (int k = 0; k < padding; k++) s.append(' '); s.append(num); } s.append('\n'); } return s.toString(); } static private int minimum(int a, int b, int c, int d, int e) { int mi = a; if (b < mi) mi = b; if (c < mi) mi = c; if (d < mi) mi = d; if (e < mi) mi = e; return mi; } /** * For testing edit distances * @param args an array of two strings we want to evaluate their distances. * @throws java.lang.Exception when problems occurs during reading args. */ public static void main(String[] args) throws Exception { BufferedReader stdin = new BufferedReader(new InputStreamReader(System.in)); int[][] matrix = new int[0][0]; while (true) { String input1 = stdin.readLine(); if (input1 == null || input1.length() == 0) break; String input2 = stdin.readLine(); if (input2 == null || input2.length() == 0) break; System.out.println(EditDistance.getDistance(input1, input2,matrix)); } System.out.println("done"); } }
package org.hfoss.posit.android; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import org.hfoss.posit.android.utilities.Utils; import org.hfoss.posit.android.web.Communicator; import android.app.AlertDialog; import android.app.Dialog; import android.app.ListActivity; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.os.Bundle; import android.preference.PreferenceManager; import android.util.Log; import android.view.KeyEvent; import android.view.View; import android.view.View.OnClickListener; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ListView; import android.widget.RadioGroup; /** * This activity shows a list of all the projects on the server that the phone is registered with, * and allows the user to pick one from the list. When the user picks one, the phone automatically * syncs with the server to get all the finds from that project * * */ public class ShowProjectsActivity extends ListActivity implements OnClickListener{ private static final String TAG = "ShowProjectsActivity"; private static final int CONFIRM_PROJECT_CHANGE = 0; static final int NEW_PROJECT = 1; private int mClickedPosition = 0; private ArrayList<HashMap<String, Object>> projectList; private RadioGroup mRadio; /** * Called when the activity is first started. Shows a list of * radio buttons, each representing * a different project on the server. */ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.list_proj); Button addProjectButton = (Button)findViewById(R.id.idAddProjButton); addProjectButton.setOnClickListener(this); showProjects(); } /* (non-Javadoc) * @see android.app.Activity#onResume() */ @Override protected void onResume() { super.onResume(); // tryToRegister(); } private void showProjects() { if (!Utils.isConnected(this)) { reportNetworkError("No Network connection ... exiting"); return; } Communicator comm = new Communicator(this); try{ projectList = comm.getProjects(); } catch(Exception e){ Log.i(TAG, "Communicator error " + e.getMessage()); e.printStackTrace(); this.reportNetworkError(e.getMessage()); finish(); } if (projectList != null) { Iterator<HashMap<String, Object>> it = projectList.iterator(); ArrayList<String> projList = new ArrayList<String>(); for(int i = 0; it.hasNext(); i++) { HashMap<String,Object> next = it.next(); projList.add((String)(next.get("name"))); } setListAdapter(new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, projList)); } else { this.reportNetworkError("Null project list returned.\nMake sure your server is reachable."); } } /** * Reports as much information as it can about the error. * @param str */ private void reportNetworkError(String str) { Log.i(TAG, "Registration Failed: " + str); Utils.showToast(this, "Registration Failed: " + str); finish(); } protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == NEW_PROJECT) showProjects(); } public void onListItemClick(ListView lv, View v, int position, long idFull){ mClickedPosition = position; /* Confirms with the user that they have changed their project and * automatically syncs with the server * to get all the project finds */ AlertDialog.Builder alt_bld = new AlertDialog.Builder(this); alt_bld.setIcon(R.drawable.icon); alt_bld.setTitle("Project Selection"); alt_bld.setMessage("Would you like to select " + (String) projectList.get(mClickedPosition).get("name") + " as your current project?"); // If user confirms selection, start sync of finds alt_bld.setPositiveButton(R.string.alert_dialog_ok, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { Log.i(TAG, "User confirmed project selection to: " + (String) projectList.get(mClickedPosition) .get("name")); Context appCont = getApplicationContext(); String projectId = (String) projectList.get(mClickedPosition).get("id"); int id = Integer.parseInt(projectId); String projectName = (String) projectList.get(mClickedPosition).get("name"); SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(appCont); int currentProjectId = sp.getInt("PROJECT_ID",0); if (id == currentProjectId){ Utils.showToast(appCont, "'" + projectName + "' is already the current project."); finish(); return; } Editor editor = sp.edit(); editor.putInt("PROJECT_ID", id); editor.putString("PROJECT_NAME", projectName); editor.commit(); sp = PreferenceManager.getDefaultSharedPreferences( ShowProjectsActivity.this); boolean syncIsOn = sp.getBoolean("SYNC_ON_OFF", true); if (syncIsOn) { Intent intent = new Intent(ShowProjectsActivity.this, SyncActivity.class); intent.setAction(Intent.ACTION_SYNC); startActivity(intent); } // New project should be set in preferences Log.i(TAG, "Preferences= " + sp.getAll().toString()); finish(); } }); // If user cancels the selection, close dialog alt_bld.setNegativeButton(R.string.alert_dialog_cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { Log.i(TAG, "User cancelled project selection"); // No project should be set in preferences Context appCont = getApplicationContext(); SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(appCont); Log.i(TAG, "Preferences= " + sp.getAll().toString()); dialog.cancel(); } }); alt_bld.show(); } /** * Called when the user clicks on a project in the list. Sets the project id in the shared * preferences so it can be remembered when the application is closed */ public void onClick(View v) { Intent i = new Intent(this, NewProjectActivity.class);; switch (v.getId()) { case R.id.idAddProjButton: startActivityForResult(i,NEW_PROJECT); break; } } /** * This method is used to block the user from selecting to go back or to * press on the menu button without selecting a project. The user will be * prompted to select a project in these cases. If the user previously * selected a project, the action will be allowed. * @param keyCode is an integer representing which key is pressed * @param event is a KeyEvent that is not used here * @return a boolean telling whether or not the operation was successful */ @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BACK || keyCode == KeyEvent.KEYCODE_MENU) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(this); // If user previously selected a project than allow action if (sp.getInt("PROJECT_ID",0) == 0) { Utils.showToast(this, "Please select a project"); return true; } } return super.onKeyDown(keyCode, event); } }
package com.unibeta.cloudtest; import java.io.File; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; import javax.jws.WebMethod; import javax.jws.WebParam; import javax.jws.WebParam.Mode; import javax.jws.WebResult; import javax.jws.WebService; import javax.transaction.UserTransaction; import org.apache.log4j.Logger; import org.springframework.beans.factory.BeanFactory; import com.unibeta.cloudtest.CloudTestInput.CloudTestParameter; import com.unibeta.cloudtest.assertion.AssertResult; import com.unibeta.cloudtest.assertion.AssertService; import com.unibeta.cloudtest.assertion.CloudTestAssert; import com.unibeta.cloudtest.config.CacheManager; import com.unibeta.cloudtest.config.CacheManagerFactory; import com.unibeta.cloudtest.config.CloudTestCase; import com.unibeta.cloudtest.config.CloudTestCase.Case; import com.unibeta.cloudtest.config.ConfigurationProxy; import com.unibeta.cloudtest.config.plugin.CloudTestPluginFactory; import com.unibeta.cloudtest.config.plugin.PluginConfigProxy; import com.unibeta.cloudtest.config.plugin.elements.UserTransactionPlugin; import com.unibeta.cloudtest.constant.CloudTestConstants; import com.unibeta.cloudtest.parallel.thread.CloudTestSingleServiceExecutor; import com.unibeta.cloudtest.parallel.util.LocalParallelJobUtil; import com.unibeta.cloudtest.tool.XmlDataDigester; import com.unibeta.cloudtest.util.CloudTestUtils; import com.unibeta.cloudtest.util.ObjectDigester; import com.unibeta.vrules.base.vRules4j; import com.unibeta.vrules.base.vRules4j.Context; import com.unibeta.vrules.base.vRules4j.Object.Rule.ErrorMessage; import com.unibeta.vrules.tools.Java2vRules; import com.unibeta.vrules.utils.CommonUtils; import com.unibeta.vrules.utils.XmlUtils; import bsh.EvalError; /** * <code>CloudTestService</code> is a core cloud testing service provider. * * @author jordan.xue */ @WebService(name = TestService.WEB_SERVICE_NAME_CLOUD_TEST_SERVICE, targetNamespace = TestService.NAME_SPACE_CLOUDTEST_UNIBETA_COM) public class CloudTestService implements TestService { private static final String MSG_PERFORMANCE_ASSERTION_FAILURE = "[PerformanceFailure]"; private static final String[] ASSERTS = new String[] {}; // new // String[]{"org.springframework.util.Assert",/*"junit.framework.Assert",*/"org.junit.Assert"}; private static final String REGEX_XML_EXPRESSION = "<\\D.*>.*</\\D.*>|<\\D.*/>"; private static final String POSTFIX_ASSERT_XML = ".assert.xml"; private static final String PARAMETER_TYPE_BY_CONTEXT = "2"; private static final String PARAMETER_TYPE_BY_XML_FILE = "1"; private static final String PARAMETER_TYPE_BY_VALUE = "0"; private static Logger logger = Logger.getLogger(CloudTestService.class); private static Pattern xmlValuePattern = Pattern.compile(REGEX_XML_EXPRESSION, Pattern.DOTALL); // private XStream x = new XStream(new DomDriver()); /** * Test service by parameter<br> * cloudTestServiceByParameter * * @param input * - test input object, including class name, method name and * parameters. * @return test result output */ @WebMethod(operationName = TestService.OPERATION_NAME_CLOUD_TEST_SERVICE_BY_PARAMETER) @WebResult(name = TestService.WEB_RESULT_CLOUD_TEST_RESULT) public CloudTestOutput doTest( @WebParam(name = TestService.WEB_PARAM_TEST_CASE, mode = Mode.IN) CloudTestInput input) { CloudTestOutput checkParallelJobLockRisk = checkParallelJobLock(); CloudTestOutput output = new CloudTestOutput(); try { output = invoke(input); } catch (Exception e) { logger.error(e.getMessage() + CloudTestUtils.printExceptionStackTrace(e)); printExceptionStack(output, e); } finally { CacheManagerFactory.getInstance().clear(); } output.setReturnValue(null); resolveParallelJobLockRisk(checkParallelJobLockRisk, output); return output; } /** * Test service by <code>CloudTestCase</code>. * * @param input * - test input object, including class name, method name and * parameters. * @return test result output */ @WebMethod(exclude = true) public CloudTestOutput doTest(CloudTestCase input) { CloudTestOutput checkParallelJobLockRisk = checkParallelJobLock(); CloudTestOutput output = new CloudTestOutput(); List<CloudTestOutput> list = new ArrayList<CloudTestOutput>(); List<Case> cases = input.testCase; long start = System.currentTimeMillis(); for (Case c : cases) { CloudTestOutput doTestOutput = null; String caseId = c.id; String eachId = c.eachId; caseId = evaluateDataByCondition(caseId, eachId); try { doTestOutput = invoke(c); } catch (Exception e) { logger.error(e.getMessage() + CloudTestUtils.printExceptionStackTrace(e)); printExceptionStack(doTestOutput, e); } doTestOutput.setCaseId(caseId); doTestOutput.setReturnValue(null); list.add(doTestOutput); } long end = System.currentTimeMillis(); CloudTestUtils.processResultStatistics(output, false); output.setRunTime((end - start) / 1000.00); output.setTestCaseResults(list); output.setStatus(true); CacheManagerFactory.getInstance().clear(); resolveParallelJobLockRisk(checkParallelJobLockRisk, output); return output; } private String evaluateDataByCondition(String caseId, String eachId) { if (!CommonUtils.isNullOrEmpty(eachId)) { try { Object eachIdVaule = ObjectDigester.fromJava(eachId); if (!CommonUtils.isNullOrEmpty(eachIdVaule)) { caseId = caseId + eachIdVaule.toString(); } } catch (Exception e1) { logger.warn("evaluate '" + eachId + "' failed caused by: " + e1.getMessage()); } } return caseId; } private void executeDependsCases(Case c) throws Exception { if (!CommonUtils.isNullOrEmpty(c.depends)) { String[] ids = c.depends.split(","); for (int i = 0; i < ids.length; i++) { Object object = getDependCaseFromNameSpaceLibs(c.nsLibs, ids[i]); if (!CommonUtils.isNullOrEmpty(ids[i]) && null != object) { Case testCase = (Case) object; if (!testCase.id.equals(c.id)) { // below code would lead java heap space OOM issue. remove it on 2018/06/22 // if (!testCase.nsLibs.contains(c.nsLibs)) { // testCase.nsLibs = c.nsLibs + "," + testCase.nsLibs; CloudTestOutput doTestOutput = null; Boolean ignore = isIgnoreCase(testCase.ignore); boolean exeDepends = false; if (null == ignore) { this.executeDependsCases(testCase); exeDepends = true; ignore = isIgnoreCase(testCase.ignore); } if ("true".equalsIgnoreCase(testCase.ignore.trim()) || (!Boolean.TRUE.equals(ignore))) { if (!exeDepends) { this.executeDependsCases(testCase); } doTestOutput = invoke(testCase); } else { continue; } if (null != doTestOutput) { String assertErr = getFailedAssertMsg(doTestOutput.getFailedAssertResults()); if (null != doTestOutput && !doTestOutput.getStatus() && !assertErr.contains(MSG_PERFORMANCE_ASSERTION_FAILURE)) { throw new Exception(CloudTestConstants.FAILED_DEPENDENT_TESTCASE_DESC_PROFIX + testCase.id + CloudTestConstants.FAILED_DEPENDENT_TESTCASE_DESC_POSTFIX + "\nCaused by " + (doTestOutput.getErrorMessage() == null ? "" : doTestOutput.getErrorMessage() + ";") + (assertErr.length() > 0 ? ("assertion failure:" + assertErr) : "")); } } } } else { logger.warn("Dependent TestCase[" + ids[i] + "] was not found, will be ignored."); } } } } private String getFailedAssertMsg(List<AssertResult> failedAssertResults) { if (null == failedAssertResults || failedAssertResults.size() == 0) { return ""; } StringBuffer sb = new StringBuffer(); for (AssertResult ar : failedAssertResults) { sb.append(ar.getErrorMessage() + ";"); } return sb.toString(); } private CloudTestOutput invoke(Case c) throws Exception { logger.debug("cloudtest is executing case[id = '" + c.id + "']"); CloudTestInput test = ConfigurationProxy.converCaseToCloudTestInput(c); CloudTestOutput doTestOutput; String group = this.evaluateDataByCondition("", c.group); if (CommonUtils.isNullOrEmpty(group)) { group = c.group; } doTestOutput = invoke(test); if ("true".equalsIgnoreCase(c.returnFlag) && !CommonUtils.isNullOrEmpty(c.returnTo) && doTestOutput.getStatus()) { CacheManagerFactory.getInstance().put(CacheManagerFactory.getInstance().CACHE_TYPE_RUNTIME_DATA, c.returnTo, doTestOutput.getReturnValue()); } doTestOutput.setGroup(group); return doTestOutput; } /** * Do loading test. Starts 'concurrentNumber' thread in 'inFixedSeconds'. It is * thread-safety for multiple concurrence in short time. * * @param caseFilePath * - test case file path, can be the directory or file. * @param concurrentNumber * - concurrent thread number * @param concurrentSeconds * - the give fixed time (second) * @return - <code>CloudTestOutput</code> */ @WebMethod(exclude = true) public CloudTestOutput doLoadTest(String caseFilePath, Long concurrentNumber, Long concurrentSeconds) { CloudTestOutput output = invokeLoadTest(caseFilePath, null, concurrentNumber, concurrentSeconds, concurrentNumber); return output; } /** * Do loading test by given <code>CloudLoadTestInput</code>. * cloudLoadTestServiceByCase. * * @param loadTestInput * contains fileName caseId concurrentNumber and concurrentSeconds. * @return <code>CloudTestOutput</code> */ @WebMethod(operationName = TestService.OPERATION_NAME_CLOUD_LOAD_TEST_SERVICE_BY_CASE) @WebResult(name = TestService.WEB_RESULT_CLOUD_TEST_RESULT) public CloudTestOutput doLoadTest( @WebParam(name = TestService.WEB_PARAM_CLOUD_LOAD_INPUT, mode = Mode.IN) CloudLoadInput loadTestInput) { CloudTestOutput output; if (null == loadTestInput) { CloudTestOutput output1 = new CloudTestOutput(); output1.setStatus(false); output1.setErrorMessage("The cloud load test input is null."); output = output1; } CloudTestOutput checkParallelJobLockRisk = checkParallelJobToken(loadTestInput); output = invokeLoadTest(loadTestInput.getFileName(), loadTestInput.getCaseId(), loadTestInput.getConcurrentNumber(), loadTestInput.getConcurrentSeconds(), loadTestInput.getMaxThreadPoolSize()); resolveParallelJobLockRisk(checkParallelJobLockRisk, output); return output; } private CloudTestOutput checkParallelJobLock() { if (LocalParallelJobUtil.isInParallelJobService()) { CloudTestOutput output = new CloudTestOutput(); output.setErrorMessage(LocalParallelJobUtil.getWarningMessage()); return output; } else { return null; } } @SuppressWarnings("unused") private CloudTestOutput invokeLoadTest(String caseFilePath, String[] caseId, Long concurrentNumber, Long concurrentSeconds) { CloudTestOutput output = new CloudTestOutput(); double waitTime = 0; if (null == concurrentNumber) { concurrentNumber = 0L; } if (null != concurrentSeconds && concurrentSeconds > 0) { waitTime = concurrentSeconds / (concurrentNumber * 1.0); } List<CloudTestSingleServiceExecutor> serviceList = new ArrayList<CloudTestSingleServiceExecutor>(); List<Thread> threadList = new ArrayList<Thread>(); long start = System.currentTimeMillis(); long end = -1; ThreadGroup group = new ThreadGroup("CloudTestService-" + System.currentTimeMillis()); try { for (int i = 0; i < concurrentNumber; i++) { CloudTestSingleServiceExecutor cloudTestServiceThread = new CloudTestSingleServiceExecutor(this, caseFilePath, caseId); serviceList.add(cloudTestServiceThread); Thread thr = new Thread(group, cloudTestServiceThread); threadList.add(thr); } int i = 0; start = System.currentTimeMillis(); for (Thread t : threadList) { t.start(); i++; if (i < threadList.size()) { Thread.sleep((long) (waitTime * 1000)); } } // Waiting for all thread ending, let them join here while (group.activeCount() > 0) { Thread.sleep((1 * 10)); } end = System.currentTimeMillis(); for (CloudTestSingleServiceExecutor t : serviceList) { if (null != t.getCloudTestOutput()) { if (output.getTestCaseResults() == null) { output.setTestCaseResults(t.getCloudTestOutput().getTestCaseResults()); } else { output.getTestCaseResults().addAll(t.getCloudTestOutput().getTestCaseResults()); } if (!CommonUtils.isNullOrEmpty(t.getCloudTestOutput().getErrorMessage())) { StringBuffer errorMsg = new StringBuffer(); errorMsg.append(t.getCloudTestOutput().getErrorMessage()); if (!CommonUtils.isNullOrEmpty(output.getErrorMessage()) && output.getErrorMessage().indexOf(errorMsg.toString()) < 0) { errorMsg.append("\nFound Error:" + output.getErrorMessage()); } output.setErrorMessage(errorMsg.toString()); } } } group.destroy(); if (CommonUtils.isNullOrEmpty(output.getErrorMessage())) { output.setStatus(true); } else { output.setStatus(false); } } catch (Exception e) { String printExceptionStackTrace = CloudTestUtils.printExceptionStackTrace(e); logger.error(printExceptionStackTrace); output.setErrorMessage(printExceptionStackTrace); output.setStatus(false); } finally { if (end <= 0) { end = System.currentTimeMillis(); } output.setRunTime((end - start) / 1000.00); CloudTestUtils.processResultStatistics(output, false); } return output; } private CloudTestOutput invokeLoadTest(String caseFilePath, String[] caseId, Long concurrentNumber, Long concurrentSeconds, Long maxThreadPoolSize) { CloudTestOutput checkParallelJobLockRisk = checkParallelJobLock(); CloudTestOutput output = new CloudTestOutput(); if (null == concurrentNumber || concurrentNumber < 0) { concurrentNumber = 0L; } if (null == maxThreadPoolSize || maxThreadPoolSize <= 0) { if (concurrentNumber > 0) { maxThreadPoolSize = concurrentNumber; } else { maxThreadPoolSize = 1L; } } ThreadPoolExecutor executor = new ThreadPoolExecutor(maxThreadPoolSize.intValue(), maxThreadPoolSize.intValue(), 60, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>()); double waitTime = 0; if (null != concurrentSeconds && concurrentSeconds > 0) { waitTime = concurrentSeconds / (concurrentNumber * 1.0); } List<CloudTestSingleServiceExecutor> serviceList = new ArrayList<CloudTestSingleServiceExecutor>(); long start = System.currentTimeMillis(); long end = -1; int maxDetailedLoadTestResponseAmount = 100000; try { maxDetailedLoadTestResponseAmount = CloudTestPluginFactory.getParamConfigServicePlugin() .getMaxDetailedLoadTestResponseAmount(); start = System.currentTimeMillis(); for (int i = 0; i < concurrentNumber; i++) { CloudTestSingleServiceExecutor cloudTestServiceThread = new CloudTestSingleServiceExecutor(this, caseFilePath, caseId); if (concurrentNumber <= maxDetailedLoadTestResponseAmount) { serviceList.add(cloudTestServiceThread); } executor.execute(cloudTestServiceThread); if (i < concurrentNumber - 1) { Thread.sleep((long) (waitTime * 1000)); } } executor.shutdown(); // Waiting for all thread ending, let them join here boolean done = false; while (!done) { done = executor.awaitTermination(10, TimeUnit.MILLISECONDS); } end = System.currentTimeMillis(); for (CloudTestSingleServiceExecutor t : serviceList) { if (null != t.getCloudTestOutput()) { if (output.getTestCaseResults() == null) { output.setTestCaseResults(t.getCloudTestOutput().getTestCaseResults()); } else if (null != t.getCloudTestOutput().getTestCaseResults()) { output.getTestCaseResults().addAll(t.getCloudTestOutput().getTestCaseResults()); } if (!CommonUtils.isNullOrEmpty(t.getCloudTestOutput().getErrorMessage())) { StringBuffer errorMsg = new StringBuffer(); errorMsg.append(t.getCloudTestOutput().getErrorMessage()); if (!CommonUtils.isNullOrEmpty(output.getErrorMessage()) && output.getErrorMessage().indexOf(errorMsg.toString()) < 0) { errorMsg.append("\nFound Error:" + output.getErrorMessage()); } output.setErrorMessage(errorMsg.toString()); } } } if (CommonUtils.isNullOrEmpty(output.getErrorMessage())) { output.setStatus(true); } else { output.setStatus(false); } } catch (Exception e) { String printExceptionStackTrace = CloudTestUtils.printExceptionStackTrace(e); logger.error(printExceptionStackTrace); output.setErrorMessage(printExceptionStackTrace); output.setStatus(false); } finally { if (end <= 0) { end = System.currentTimeMillis(); } output.setRunTime((end - start) / 1000.00); if (concurrentNumber >= maxDetailedLoadTestResponseAmount) { output.setReturns("The concurrentNumber is larger than " + maxDetailedLoadTestResponseAmount + ", detailed TestCaseOutputs information was ignored for load test service."); } CloudTestUtils.processResultStatistics(output, false); CacheManagerFactory.getInstance().clear(); } resolveParallelJobLockRisk(checkParallelJobLockRisk, output); return output; } private void printExceptionStack(CloudTestOutput output, Exception e) { output.setStatus(false); StringBuilder sBuilder = new StringBuilder(); if (!this.isEmpty(output.getErrorMessage())) { sBuilder.append(output.getErrorMessage() + "\n"); } sBuilder.append(CloudTestUtils.printExceptionStackTrace(e).trim()); // if (null != output.getErrorMsg() // && output.getErrorMsg().trim().length() > 0) { // sBuilder.append("\n... much more \n" + output.getErrorMsg()); output.setErrorMessage(sBuilder.toString()); } /** * Test service by TestCase. If caseId is null, it will execute all cased under * specified case file. <br> * cloudTestServiceByTestCase * * @param input * - input cae object, including case file full path and caseId. * @return test result output */ @WebMethod(operationName = TestService.OPERATION_NAME_CLOUD_TEST_SERVICE_BY_TEST_CASE) @WebResult(name = TestService.WEB_RESULT_CLOUD_TEST_RESULT) public CloudTestOutput doTest( @WebParam(name = TestService.WEB_PARAM_CLOUD_CASE_INPUT, mode = Mode.IN) CloudCaseInput input) { CloudTestOutput checkParallelJobLockRisk; CloudTestOutput output = new CloudTestOutput(); if (null == input || input.getFileName() == null) { output.setErrorMessage("CloudCaseInput or case fileName is null "); output.setStatus(false); return output; } else { checkParallelJobLockRisk = checkParallelJobToken(input); } /*String[] fileNames = input.getFileName().split(","); String[] caseIds = input.getCaseId();*/ List<CloudCaseInput> inputs = CloudTestUtils.resolveCloudCaseInputByURIs(input.getFileName()); List<String> filePathList = new ArrayList<String>(); List<CloudTestOutput> outputList = new ArrayList<CloudTestOutput>(); Boolean isDirectory = false; Map<String,String[]> caseIdMap = new HashMap<String,String[]>(); for (CloudCaseInput caseInput : inputs) { String fileName = caseInput.getFileName(); caseIdMap.put(CloudTestUtils.getContextedURI(fileName), caseInput.getCaseId()); if (CommonUtils.isNullOrEmpty(fileName )) { continue; } else { fileName = fileName.trim(); } String caseFilePath = ConfigurationProxy.getCloudTestRootPath() + fileName; File file = new File(caseFilePath); if (!file.exists()) { output.setErrorMessage("File Path is null or file does not exist,invalid file name is " + caseFilePath); output.setStatus(false); } if (file.isDirectory()) { isDirectory = true; filePathList.addAll(CloudTestUtils.getAllFilePathListInFolder(caseFilePath, true)); } else { filePathList.add(caseFilePath); } } if (filePathList.size() == 0) { output.setErrorMessage((output.getErrorMessage() == null ? "" : output.getErrorMessage() + ",") + "No runnable TestCase was found from case URIs:" + input.getFileName()); output.setStatus(false); return output; } Collections.sort(filePathList, new CaseContextPathComparator()); long start = System.currentTimeMillis(); try { loadCasesDependsToContext(filePathList, null); loadCasesTasksToContext(filePathList, input.getCaseId()); startPreCompileThread(filePathList); for (int i = 0; i < filePathList.size(); i++) { String filePath = filePathList.get(i); String casePath = filePath.substring(ConfigurationProxy.getCloudTestRootPath().length()); CloudTestCase cloudTestCase = null; try { String[] caseIds = caseIdMap.get(CloudTestUtils.getContextedURI(filePath)); if (isDirectory) { caseIds = null; }else { caseIds = input.getCaseId(); } // get input list by fileName and caseIds cloudTestCase = ConfigurationProxy.loadCloudTestCase(filePath, caseIds); if (null == cloudTestCase) { continue; } Boolean ignore = isIgnoreCase(cloudTestCase.ignore); boolean exeDepends = false; if (null == ignore) { executeRootDependsCases(cloudTestCase); exeDepends = true; ignore = isIgnoreCase(cloudTestCase.ignore); } if (!Boolean.TRUE.equals(ignore)) { if (!exeDepends) { executeRootDependsCases(cloudTestCase); } List<Case> cases = cloudTestCase.testCase; String assertFileName = checkAssertFile(filePath, cloudTestCase.assertRuleFile); String group = this.evaluateDataByCondition("", cloudTestCase.group); if (CommonUtils.isNullOrEmpty(group)) { group = cloudTestCase.group; } for (Case c : cases) { if (c != null) { executeTestCase(group, c, casePath, assertFileName, outputList); } CacheManagerFactory.getInstance().remove( CacheManagerFactory.getInstance().CACHE_TYPE_TASKS_QUEUE, this.buildDependsNsURI(casePath, c.id)); } } else { clearAllCaseTasksFromContext(casePath, cloudTestCase); } } catch (Exception e) { if (null != cloudTestCase) { clearAllCaseTasksFromContext(casePath, cloudTestCase); } logger.error(e.getMessage() + CloudTestUtils.printExceptionStackTrace(e)); if (!this.isEmpty(output.getErrorMessage())) { output.setErrorMessage(output.getErrorMessage() + "\n......\n" + "Found error in Test Case file, the path is: " + filePath); } else { output.setErrorMessage("Found error in Test Case file, the path is: " + filePath); } printExceptionStack(output, e); } } } finally { Collections.sort(outputList, new CloudTestUtils.TestCaseOutputComparator()); output.setTestCaseResults(outputList); if (CommonUtils.isNullOrEmpty(output.getErrorMessage()) && (null == output.getTestCaseResults() || output.getTestCaseResults().size() == 0)) { output.setErrorMessage(CLOUDTEST_ERROR_MESSAGE_NO_TEST_CASE_WAS_FOUND); output.setStatus(false); } CacheManagerFactory.getInstance().clear(); long end = System.currentTimeMillis(); output.setRunTime((end - start) / 1000.00); } CloudTestUtils.processResultStatistics(output, isDirectory); resolveParallelJobLockRisk(checkParallelJobLockRisk, output); return output; } private void resolveParallelJobLockRisk(CloudTestOutput checkParallelJobLockRisk, CloudTestOutput output) { if (checkParallelJobLockRisk != null) { output.setErrorMessage(checkParallelJobLockRisk.getErrorMessage() + (output.getErrorMessage() != null ? output.getErrorMessage() : "")); } } private CloudTestOutput checkParallelJobToken(CloudCaseInput input) { CloudTestOutput checkParallelJobLockRisk = this.checkParallelJobLock(); String[] tokens = LocalParallelJobUtil.parseTokenAndValue(input.getFileName()); if (tokens != null && tokens.length == 2) { input.setFileName(tokens[1]); if (tokens[0] != null && tokens[0].equals(LocalParallelJobUtil.getCurrentUser())) { checkParallelJobLockRisk = null; } } return checkParallelJobLockRisk; } private void startPreCompileThread(List<String> filePathList) { try { String enable = PluginConfigProxy .getParamValueByName(CloudTestConstants.CLOUDTEST_ASSERT_PRE_COMPILE_ENABLE); if ("true".equalsIgnoreCase(enable)) { new Thread(new CaseAssertPreCompileThread(filePathList)).start(); } } catch (Exception e1) { logger.info( "assertion pre-compile thread was disabled, which can be enabled by 'cloudtest.assert.pre_compile.enable' parameter in plugin config. "); } } private void executeRootDependsCases(CloudTestCase cloudTestCase) throws Exception { if (!CommonUtils.isNullOrEmpty(cloudTestCase.depends)) { String[] depends = cloudTestCase.depends.split(","); String[] imports = cloudTestCase.imports.split(","); // for (String impt : imports) { // CloudTestCase dependsCloudTestCase = ConfigurationProxy // .loadCloudTestCase(CloudTestUtils // .resolveTestCaseImportsPath(cloudTestCase.ns, // impt)[0], null); // this.executeRootDependsCases(dependsCloudTestCase); for (String s : depends) { if (isDependsEntireCaseFile(s, imports)) { CloudTestCase dependsCloudTestCase = ConfigurationProxy .loadCloudTestCase(CloudTestUtils.resolveTestCaseImportsPath(cloudTestCase.ns, s)[0], null); if (!dependsCloudTestCase.ns.equals(cloudTestCase.ns)) { this.executeRootDependsCases(dependsCloudTestCase); } for (Case c : dependsCloudTestCase.testCase) { if (!isIgnoreCase(c.ignore)) { this.executeDependsCases(c); invoke(c); } } } else { Object object = getDependCaseFromNameSpaceLibs(cloudTestCase.nsLibs, s); if (null != object && object instanceof Case) { Case c = (Case) object; CloudTestCase dependsCloudTestCase = ConfigurationProxy.loadCloudTestCase(c.ns, null); if (!dependsCloudTestCase.ns.equals(cloudTestCase.ns)) { this.executeRootDependsCases(dependsCloudTestCase); } this.executeDependsCases(c); invoke(c); } else { logger.warn("Dependent TestCase[" + s + "] was not found, will be ignored."); } } } } } private boolean isDependsEntireCaseFile(String s, String[] imports) { if (CommonUtils.isNullOrEmpty(s)) { return false; } for (String impt : imports) { if (s.equals(impt)) { return true; } } return false; } private void clearAllCaseTasksFromContext(String casePath, CloudTestCase cloudTestCase) { for (Case c : cloudTestCase.testCase) { CacheManagerFactory.getInstance().remove(CacheManagerFactory.getInstance().CACHE_TYPE_TASKS_QUEUE, this.buildDependsNsURI(casePath, c.id)); } } private void loadCasesTasksToContext(List<String> filePathList, String[] caseIds) { for (int i = 0; i < filePathList.size(); i++) { String filePath = filePathList.get(i); try { // get input list by fileName and caseIds CloudTestCase cloudTestCase = ConfigurationProxy.loadCloudTestCase(filePath, caseIds); for (Case c : cloudTestCase.testCase) { if (!"true".equalsIgnoreCase(c.ignore)) { String uri = this.buildDependsNsURI( filePath.substring(ConfigurationProxy.getCloudTestRootPath().length()), c.id); CacheManagerFactory.getInstance().put(CacheManagerFactory.getInstance().CACHE_TYPE_TASKS_QUEUE, uri, uri); } } } catch (Exception e) { logger.warn("Cases loading in " + filePath + " failed, caused by " + e.getMessage() + "."); } } } private Object getDependCaseFromNameSpaceLibs(String nsLibs, String depend) { String[] libs = nsLibs.split(","); Object object = null; for (String ns : libs) { String buildDependsNsURI = buildDependsNsURI(ns, depend); object = CacheManagerFactory.getInstance().get(CacheManagerFactory.getInstance().CACHE_TYPE_TESTCASE, buildDependsNsURI); if (null != object) { return object; } } return object; } private String buildDependsNsURI(String ns, String depend) { return (ns + ":" + depend).replace("\\", "/"); } private Boolean isIgnoreCase(String ignoreExpress) { Boolean ignore = false; Object ignoreObj = null; if (CommonUtils.isNullOrEmpty(ignoreExpress)) { return false; } try { ignoreObj = ObjectDigester.fromJava(ignoreExpress); } catch (Exception e) { // e.printStackTrace(); logger.warn("eval ingore expression '" + ignoreExpress + "'.caused by:" + e.getMessage()); return null; } if (null != ignoreObj && ignoreObj.getClass().isAssignableFrom(Boolean.class)) { ignore = (Boolean) ignoreObj; } return ignore; } private String checkAssertFile(String filePath, String assertRuleFile) { String assertFileName = null; String[] assertFileNames = CommonUtils.fetchIncludesFileNames(assertRuleFile, filePath); if (assertFileNames != null && assertFileNames.length > 0) { assertFileName = assertFileNames[0]; } if (!CommonUtils.isNullOrEmpty(assertFileName)) { synchronized (assertFileName) { if (!new File(assertFileName).exists()) { try { buildAssertRuleFile(ConfigurationProxy.loadCloudTestCase(filePath, null), assertFileName); } catch (Exception e) { assertFileName = null; logger.warn("checking assert rule file failure, the assert will be ignored. caused by " + e.getMessage(), e); } } } } return assertFileName; } private void loadCasesDependsToContext(List<String> filePathList, List<String> importsLogList) { if (null == importsLogList) { importsLogList = Collections.synchronizedList(new ArrayList<String>()); } for (int i = 0; i < filePathList.size(); i++) { String filePath = filePathList.get(i); try { // get input list by fileName and caseIds CloudTestCase cloudTestCase = ConfigurationProxy.loadCloudTestCase(filePath, null); String imports = cloudTestCase.imports; if (!CommonUtils.isNullOrEmpty(imports)) { File currentFile = new File(filePath); List<String> importsList = new ArrayList<String>(); String[] strs = null; String[] splits = imports.split(","); for (String impt : splits) { strs = CloudTestUtils.resolveTestCaseImportsPath(filePath, impt); for (String p : strs) { File f = new File(p); if (!f.getAbsolutePath().equals(currentFile.getAbsolutePath()) && f.exists() && !importsLogList.contains(f.getPath())) { importsList.add(f.getPath()); } } importsLogList.addAll(importsList); } this.loadCasesDependsToContext(importsList, importsLogList); } List<Case> cases = cloudTestCase.testCase; for (Case c : cases) { String buildDependsNsURI = buildDependsNsURI(cloudTestCase.ns, c.id); c.ns = cloudTestCase.ns; CacheManagerFactory.getInstance().put(CacheManagerFactory.getInstance().CACHE_TYPE_TESTCASE, buildDependsNsURI, c); } } catch (Exception e) { logger.warn("Cases loading in " + filePath + " failed, caused by " + e.getMessage() + ". depends cases will not be executed."); } } } private boolean isEmpty(String errorMsg) { return (null == errorMsg) || errorMsg.trim().length() == 0; } private Object executeTestCase(String group, Case c, String casePath, String assertFileName, List<CloudTestOutput> outputList) { CloudTestOutput testCaseOutput = new CloudTestOutput(); Object returnObj = null; CloudTestInput input = null; Boolean ignore = isIgnoreCase(c.ignore); try { input = ConfigurationProxy.converCaseToCloudTestInput(c); if (!Boolean.TRUE.equals(ignore)) { if (!CommonUtils.isNullOrEmpty(c.foreach)) { executeForeachCases(casePath, c, group, assertFileName, input, outputList); testCaseOutput = null; } else { boolean exeDepends = false; if (null == ignore) { this.executeDependsCases(c); exeDepends = true; ignore = isIgnoreCase(c.ignore); } if (!Boolean.TRUE.equals(ignore)) { if (!exeDepends) { this.executeDependsCases(c); } testCaseOutput = invoke(c); exeAndAssertPostsResult(c, testCaseOutput); if (testCaseOutput != null) { returnObj = testCaseOutput.getReturnValue(); assertExecutionResult(c, assertFileName, input, testCaseOutput); } } else { testCaseOutput = null; } } } else { testCaseOutput = null; } } catch (Exception e) { logger.error(e.getMessage() + CloudTestUtils.printExceptionStackTrace(e), e); if (ignore != null && testCaseOutput != null) { testCaseOutput.setStatus(false); testCaseOutput.setTestCase(input); printExceptionStack(testCaseOutput, e); } else { testCaseOutput = null; } } finally { if (null != testCaseOutput) { if (CommonUtils.isNullOrEmpty(testCaseOutput.getGroup())) { testCaseOutput.setGroup(group); } testCaseOutput.setCasePath(casePath); testCaseOutput.setCaseId(this.evaluateDataByCondition(c.id, c.eachId)); testCaseOutput.setReturnValue(null); outputList.add(testCaseOutput); logger.debug(c.id + "@" + casePath + " was done in " + testCaseOutput.getRunTime() + "s"); } } return returnObj; } private void exeAndAssertPostsResult(Case c, CloudTestOutput testCaseOutput) throws Exception { List<CloudTestOutput> postsResult = executePostsCases(c); if (postsResult != null) { for (CloudTestOutput output : postsResult) { if (!CommonUtils.isNullOrEmpty(output.getErrorMessage())) { testCaseOutput.setErrorMessage( (testCaseOutput.getErrorMessage() == null ? "" : testCaseOutput.getErrorMessage()) + "post case[id='" + output.getCaseId() + "'] was executed failed, caused by:\n" + output.getErrorMessage()); } } } } private void assertExecutionResult(Case c, String assertFileName, CloudTestInput input, CloudTestOutput testCaseOutput) throws Exception { Object assertObject = testCaseOutput.getReturnValue(); if (needAssert(c, assertFileName, testCaseOutput)) { // if (assertObject != null) { List<AssertResult> list = new AssertService().doAssert(assertFileName, c.assertId, assertObject); if (list != null) { testCaseOutput.setFailedAssertResults(list); testCaseOutput.setStatus(false); } else { testCaseOutput.setStatus(true); } // } else { // assertNotNull(c, testCaseOutput); } if ("false".equalsIgnoreCase(c.returnFlag)) { testCaseOutput.setReturns(null); } if ((null != testCaseOutput.getErrorMessage() && testCaseOutput.getErrorMessage().length() > 0) || (null != testCaseOutput.getFailedAssertResults() && testCaseOutput.getFailedAssertResults().size() > 0)) { testCaseOutput.setStatus(false); testCaseOutput.setTestCase(input); } else { testCaseOutput.setStatus(true); } assertPerformanceTime(c, input, testCaseOutput); } private void executeForeachCases(String casePath, Case c, String group, String assertFileName, CloudTestInput input, List<CloudTestOutput> oututputList) throws Exception { Object foreach = ObjectDigester.fromJava(c.foreach); if (foreach != null && foreach instanceof Iterable) { Iterable interable = (Iterable) foreach; int i = 0; for (Object eachvar : interable) { CacheManagerFactory.getInstance().put(CacheManager.CACHE_TYPE_RUNTIME_DATA, c.eachvar, eachvar); CloudTestOutput testCaseOutput = new CloudTestOutput(); try { this.executeDependsCases(c); Boolean ignoreCase = this.isIgnoreCase(c.ignore); if (null == ignoreCase) { logger.warn("eval ingore expression '" + c.ignore + "' failed for below 'eachvar' element:\n" + ObjectDigester.toXML(eachvar)); } else if (ignoreCase) { continue; } testCaseOutput = invoke(c); if (testCaseOutput.getStatus()) { exeAndAssertPostsResult(c, testCaseOutput); } assertExecutionResult(c, assertFileName, input, testCaseOutput); } catch (Exception e) { logger.error(e.getMessage(), e); testCaseOutput.setStatus(false); testCaseOutput.setErrorMessage(CloudTestUtils.printExceptionStackTrace(e)); testCaseOutput.setTestCase(ConfigurationProxy.converCaseToCloudTestInput(c)); } String caseId = c.id; if (CommonUtils.isNullOrEmpty(c.eachId)) { caseId = c.id + "@" + (++i); } String eachId = c.eachId; caseId = evaluateDataByCondition(caseId, eachId); if (CommonUtils.isNullOrEmpty(testCaseOutput.getGroup())) { testCaseOutput.setGroup(group); } testCaseOutput.setCasePath(casePath); testCaseOutput.setCaseId(caseId); testCaseOutput.setReturnValue(null); oututputList.add(testCaseOutput); } } else { String msg = "Case[id= '" + c.id + "'] " + "foreach command will be ignored due to evaluated result is invalid. foreach element is below:\n" + ObjectDigester.toXML(foreach); logger.warn(msg); CloudTestOutput testCaseOutput = new CloudTestOutput(); testCaseOutput.setGroup(group); testCaseOutput.setCasePath(casePath); testCaseOutput.setCaseId(c.id); testCaseOutput.setErrorMessage(msg); testCaseOutput.setStatus(false); testCaseOutput.setTestCase(ConfigurationProxy.converCaseToCloudTestInput(c)); oututputList.add(testCaseOutput); } } private List<CloudTestOutput> executePostsCases(Case c) throws Exception { if (CommonUtils.isNullOrEmpty(c.posts)) { return null; } List<CloudTestOutput> list = new ArrayList<CloudTestOutput>(); String[] posts = c.posts.split(","); for (String post : posts) { Object object = getDependCaseFromNameSpaceLibs(c.nsLibs, post); if (!CommonUtils.isNullOrEmpty(post) && null != object) { Case testCase = (Case) object; this.executeDependsCases(testCase); CloudTestOutput invokeResult = invoke(testCase); invokeResult.setCaseId(c.id); list.add(invokeResult); } else { logger.warn("post case[id = '" + post + "'] is not found, will be ignored."); } } return list; } private void assertPerformanceTime(Case c, CloudTestInput input, CloudTestOutput testCaseOutput) { Double expectedTime = c.assertion.timeout; Double runTime = testCaseOutput.getRunTime(); if (runTime == null) { runTime = 0D; } if (expectedTime != null && expectedTime >= 0 && runTime > expectedTime) { AssertResult ar = new AssertResult(); ar.setErrorMessage(MSG_PERFORMANCE_ASSERTION_FAILURE + "Expected time is " + expectedTime + "s, actual execution time is " + runTime + "s"); List<AssertResult> assertList = testCaseOutput.getFailedAssertResults(); if (null == assertList) { assertList = new ArrayList<AssertResult>(); } assertList.add(ar); testCaseOutput.setFailedAssertResults(assertList); testCaseOutput.setTestCase(input); testCaseOutput.setStatus(false); } } @SuppressWarnings("unused") private void assertNotNull(Case c, CloudTestOutput testCaseOutput) throws Exception { Boolean needNull = false; Method m = getMethodByCase(c); // if none method found, return back if (null == m) { return; } String name = CloudTestUtils.formatSimpleName(m.getReturnType()) + CloudTestConstants.ASSERT_RESULT_SUFFIX; Pattern p = Pattern.compile(name + ".*==.*null", Pattern.DOTALL); if (CommonUtils.isNullOrEmpty(c.assertion.assert_) || "true".equals(c.assertion.assert_.trim()) || p.matcher(c.assertion.assert_).find()) { needNull = true; } if (!needNull) { testCaseOutput.setStatus(false); List<AssertResult> al = testCaseOutput.getFailedAssertResults(); if (al == null) { al = new ArrayList<AssertResult>(); } AssertResult ar = new AssertResult(); ar.setErrorMessage(c.id + " asserted failure, returned result is null."); al.add(ar); testCaseOutput.setFailedAssertResults(al); } // try { // Class.forName("bsh.Interpreter"); // Interpreter bsh = new Interpreter(); // bsh.set(name, null); // Object o = bsh.eval(c.assertion.assert_); // if (o != null // && (Boolean.class.isAssignableFrom(o.getClass()) || boolean.class // .isAssignableFrom(o.getClass()))) { // needNull = (Boolean) o; // } catch (Exception e) { // Pattern p = Pattern.compile(name + ".*==.*null", // Pattern.DOTALL); // if (p.matcher(c.assertion.assert_).find()) { // needNull = true; } private boolean needAssert(Case c, String assertFileName, CloudTestOutput testCaseOutput) throws Exception { boolean isMethodReturnValue = true; return isMethodReturnValue && !CommonUtils.isNullOrEmpty(assertFileName) && assertFileName.toLowerCase().endsWith(POSTFIX_ASSERT_XML) && !CommonUtils.isNullOrEmpty(c.assertId) && testCaseOutput.getStatus() == true && CommonUtils.isNullOrEmpty(testCaseOutput.getErrorMessage()); } private void buildAssertRuleFile(CloudTestCase cloudTestCase, String assertFileName) throws Exception { vRules4j rules4j = new vRules4j(); String[] asserts = ASSERTS; List<Context> contextList = new ArrayList<vRules4j.Context>(); List<com.unibeta.vrules.base.vRules4j.Object> objList = new ArrayList<vRules4j.Object>(); rules4j.imports = rules4j.imports + "static com.unibeta.cloudtest.util.ObjectDigester.*;"; for (String ast : asserts) { boolean has = true; try { Class.forName(ast); } catch (ClassNotFoundException e) { has = false; } if (has) { rules4j.imports = rules4j.imports + "static " + ast + ".*;"; } } rules4j.java = "\n" + "com.unibeta.cloudtest.config.CacheManager $cache$ = " + "com.unibeta.cloudtest.config.CacheManagerFactory.getInstance();" + "\n"; for (Case c : cloudTestCase.testCase) { if (CommonUtils.isNullOrEmpty(c.assertId)) { continue; } Method m = this.getMethodByCase(c); if (null != m && m.getReturnType() != null && !void.class.equals(m.getReturnType())) { try { Context ctx = new Context(); String canonicalName = m.getReturnType().getCanonicalName(); if (m.getReturnType().isMemberClass()) { canonicalName = CloudTestUtils.formatMemberClassCanonicalName(m.getReturnType()); } ctx.className = CloudTestUtils.evalDataType(canonicalName).getCanonicalName(); ctx.name = CloudTestUtils.formatSimpleName(m.getReturnType()) + CloudTestConstants.ASSERT_RESULT_SUFFIX; if (!contextList.contains(ctx)) { contextList.add(ctx); } } catch (Exception e) { logger.warn("Assert rule evals failure, which should be due to invalid return type of " + m.getReturnType().getCanonicalName() + ". caused by " + e.getMessage(), e); } } com.unibeta.vrules.base.vRules4j.Object obj = new com.unibeta.vrules.base.vRules4j.Object(); obj.id = c.assertId; obj.className = CloudTestAssert.class.getName(); obj.name = c.id; obj.nillable = "false"; com.unibeta.vrules.base.vRules4j.Object.Rule r = obj.rules[0]; r.id = c.id + "Rule"; r.name = r.id; r.assert_ = c.assertion.assert_; if (CommonUtils.isNullOrEmpty(r.assert_)) { r.assert_ = "true"; } ErrorMessage em = new ErrorMessage(); if (!CommonUtils.isNullOrEmpty(c.assertion.message)) { em.message = c.assertion.message; } else { em.message = c.id + " returned value is invalid."; } em.id = c.id; r.errorMessage = em; r.isComplexType = "false"; r.isMapOrList = "false"; obj.rules[0] = r; objList.add(obj); } rules4j.contexts = contextList.toArray(new Context[] {}); rules4j.objects = objList.toArray(new com.unibeta.vrules.base.vRules4j.Object[] {}); Java2vRules.toXml(rules4j, assertFileName); } private Class[] buildMethodParamClassArray(Case c) throws Exception { CloudTestInput in = ConfigurationProxy.converCaseToCloudTestInput(c); Class[] methodParamClassArray = null; if (in.getParameter() != null && in.getParameter().size() > 0) { methodParamClassArray = new Class[in.getParameter().size()]; List<Class> cl = new ArrayList<Class>(); for (CloudTestParameter p : in.getParameter()) { cl.add(getDataType(p.getDataType().trim())); } methodParamClassArray = cl.toArray(methodParamClassArray); } else { methodParamClassArray = new Class[] {}; } return methodParamClassArray; } private CloudTestOutput invoke(CloudTestInput input) throws Exception { CloudTestOutput output = new CloudTestOutput(); output.setTimestamp(new Date()); output.setStatus(true); if (!validateInput(input, output)) { return output; } Class c = null; // get className and methodName String className = input.getClassName().trim(); String methodName = input.getMethodName().trim(); // get method input parameters list List cloudTestParameterList = input.getParameter(); int paramLength = 0; if (cloudTestParameterList != null) { paramLength = cloudTestParameterList.size(); } // parameters type array Class[] methodParamClassArray = new Class[paramLength]; // parameters value array Object[] methodParamValueArray = new Object[paramLength]; CloudTestParameter cloudTestParameter; UserTransaction trans = null; try { for (int i = 0; i < paramLength; i++) { cloudTestParameter = (CloudTestParameter) cloudTestParameterList.get(i); if (null == cloudTestParameter.getDataType() || cloudTestParameter.getDataType().length() == 0) { output.setStatus(false); output.setErrorMessage("parameter data type is null!"); return output; } methodParamClassArray[i] = getDataType(cloudTestParameter.getDataType().trim()); // 1:the field value is file path in the CloudTestParameter if (PARAMETER_TYPE_BY_XML_FILE.equals(cloudTestParameter.getParameterType())) { String xmlDataFile = ConfigurationProxy.getCloudTestRootPath() + cloudTestParameter.getValue(); File caseDataFile = new File(xmlDataFile); if (caseDataFile.exists()) { methodParamValueArray[i] = ObjectDigester.fromXML( XmlUtils.paserDocumentToString(XmlUtils.getDocumentByFileName(caseDataFile.getPath()))); } else { String xmlData = new XmlDataDigester().toXml(cloudTestParameter.getDataType().trim(), caseDataFile.getPath()); output.setStatus(false); output.setErrorMessage( "\n test data file was not found, XmlDataDigester created xml file successfully. xml data file is located in " + caseDataFile.getPath()); methodParamValueArray[i] = ObjectDigester.fromXML(xmlData); } } // 2:test case is context case else if (PARAMETER_TYPE_BY_CONTEXT.equals(cloudTestParameter.getParameterType())) { methodParamValueArray[i] = evalueDataFromContext(cloudTestParameter); } else if (PARAMETER_TYPE_BY_VALUE.equals(cloudTestParameter.getParameterType())) { // the value of the field value is parameter value if (null == cloudTestParameter.getValue()) { cloudTestParameter.setValue(""); } boolean isXmlData = xmlValuePattern.matcher(cloudTestParameter.getValue()).find(); if (isXmlData) { methodParamValueArray[i] = ObjectDigester.fromXML(cloudTestParameter.getValue()); } else { methodParamValueArray[i] = evalueDataFromContext(cloudTestParameter); } } else { output.setStatus(false); output.setErrorMessage("parameter type is invalid, only 0,1 are acceptable in current version. " + "\n 0 stands for the java plain value(e.g \"string value...\", 12,12.5D ) or xml formed data(e.g <string>string value</string>, <int>12</int>,<double>12.5</double>), also java code such as 'new String(\"string value\")'; " + "\n 1 stands for loading data from xml data file, such as 'd:\\data.xml'."); return output; } } Method m; Object returnValue = null; long start = System.currentTimeMillis(); long end = -1; try { Object beanObject = null; try { // get object by bean factory Class.forName("org.springframework.beans.factory.BeanFactory"); BeanFactory beanFactory = CloudTestPluginFactory.getSpringBeanFactoryPlugin(); beanObject = beanFactory.getBean(className); c = beanObject.getClass(); } catch (Exception e) { // get class by className try { c = Class.forName(className); } catch (ClassNotFoundException e1) { try { boolean isXmlData = xmlValuePattern.matcher(className).find(); if (isXmlData) { beanObject = ObjectDigester.fromXML(className); } else { beanObject = ObjectDigester.fromJava(className); } if (null != beanObject) { c = beanObject.getClass(); } else { String message = "class '" + className + "' evaluated result is null.\n"; logger.error(message); throw new ClassNotFoundException(message, e1); } } catch (Exception e2) { String message = "'" + className + "' class was not found, evaluated also failure.\nCaused by " + e2.getMessage(); e2.printStackTrace(); logger.error(message, e2); throw new ClassNotFoundException(message, e1); } } } m = getMethod(c, methodName, methodParamClassArray); m.setAccessible(true); UserTransactionPlugin userTransactionPlugin = CloudTestPluginFactory.getUserTransactionPlugin(); try { if (null != userTransactionPlugin) { trans = userTransactionPlugin.getUserTransaction(); } } catch (Exception e) { logger.debug( "Gets UserTransaction plugin instance failed, UserTransaction control will be disabled."); } if (null != userTransactionPlugin) { userTransactionPlugin.before(); } if (null != trans) { trans.begin(); } start = System.currentTimeMillis(); if (Modifier.isStatic(m.getModifiers())) { returnValue = m.invoke(c, methodParamValueArray); } else { if (null == beanObject) { Class implClass = resolveInterfaceImpl(output, c); beanObject = implClass.newInstance(); } output = CloudTestPluginFactory.getCaseRunnerPlugin().run(beanObject, m, methodParamValueArray); returnValue = output.getReturnValue(); } if (null != trans) { trans.commit(); } if (null != userTransactionPlugin) { userTransactionPlugin.after(); } } catch (Exception e) { printExceptionStack(output, e); e.printStackTrace(); logger.error(e.getMessage() + CloudTestUtils.printExceptionStackTrace(e)); try { if (null != trans) { trans.rollback(); } } catch (Exception e1) { // let it be, no need handle it } } finally { if (end <= 0) { end = System.currentTimeMillis(); } if (output.getReturnValue() == null) { output.setReturnValue(returnValue); } if (output.getReturns() == null) { try { output.setReturns(ObjectDigester.toXML(returnValue)); } catch (Exception e) { logger.warn("Convert result to xml failure caused by " + e.getMessage(), e); if (null != returnValue) { output.setReturns(returnValue.toString()); } } } if (output.getRunTime() == null || output.getRunTime() == 0.0) { output.setRunTime((end - start) / 1000.00); } } } catch (Exception e) { printExceptionStack(output, e); logger.error(e.getMessage() + CloudTestUtils.printExceptionStackTrace(e)); } finally { // current is empty } if (output.getRunTime() == null) { output.setRunTime(0D); } Class targetClass = CloudTestUtils.getProxiedTargetClass(className); if (targetClass == null) { if (c != null) { output.setClassName(c.getCanonicalName()); } else { output.setClassName(className); } } else { output.setClassName(targetClass.getCanonicalName()); } return output; } private Class resolveInterfaceImpl(CloudTestOutput output, Class c) { Class implClass = c; if (c.isInterface()) { List<Class> l = null; try { l = CloudTestUtils.findImplementations(c, new String[] { CloudTestUtils.getIndexedSearchPackageName(c) }, false); } catch (Exception e) { // empty } if (null != l && l.size() > 0) { implClass = l.get(0); String instantiationWarn = "[InstantiationWarn]Interface '" + c.getCanonicalName() + "' can't be instantiated, cloud test engine find an implementation '" + implClass.getCanonicalName() + "' for this test case execution. For any concern, please check test case definition xml file."; logger.warn(instantiationWarn); output.setErrorMessage(instantiationWarn); } } return implClass; } private Method getMethod(Class c, String methodName, Class[] methodParamClassArray) throws NoSuchMethodException { Method m = null; try { m = c.getDeclaredMethod(methodName, methodParamClassArray); } catch (Exception e1) { m = c.getMethod(methodName, methodParamClassArray); } return m; } private Method getMethodByCase(Case cs) { Class clazz = null; Object beanObject = null; Class[] methodParamClassArray; String methodName; Method method = null; try { methodParamClassArray = buildMethodParamClassArray(cs); String className = cs.className.trim(); methodName = cs.methodName.trim(); try { // get object by bean factory Class springClass = Class.forName("org.springframework.beans.factory.BeanFactory"); BeanFactory beanFactory = CloudTestPluginFactory.getSpringBeanFactoryPlugin(); beanObject = beanFactory.getBean(className); clazz = beanObject.getClass(); } catch (Exception e) { // get class by className try { clazz = Class.forName(className); } catch (Exception e1) { Object fromJava = ObjectDigester.fromJava(className); if (null != fromJava) { clazz = fromJava.getClass(); } else { throw e1; } } } method = this.getMethod(clazz, methodName, methodParamClassArray); } catch (Exception e) { // TODO Auto-generated catch block logger.error(e.getMessage(), e); } return method; } private Class getDataType(String name) throws Exception { Class clazz = null; if (CloudTestConstants.PRIMITIVE_TYPE_MAP.keySet().contains(name)) { clazz = CloudTestConstants.PRIMITIVE_TYPE_MAP.get(name); } else { Object o = null; try { clazz = Class.forName(name); } catch (Exception e) { clazz = CloudTestUtils.evalDataType(name); } } return clazz; } private Object evalueDataFromContext(CloudTestParameter cloudTestParameter) throws Exception { Object methodParamValue = null; try { methodParamValue = ObjectDigester.fromJava(cloudTestParameter.getValue()); } catch (EvalError e) { String errMsg = "Parameter value evaluated error: " + e.getMessage(); e.printStackTrace(); logger.error(errMsg, e); throw new Exception(errMsg, e); } return methodParamValue; } private boolean validateInput(CloudTestInput input, CloudTestOutput output) { if (input == null) { output.setStatus(false); output.setErrorMessage("input object is null"); return false; } if (input.getClassName() == null) { output.setStatus(false); output.setErrorMessage("className is null"); return false; } if (input.getMethodName() == null) { output.setStatus(false); output.setErrorMessage("MethodName is null"); return false; } return true; } static class CaseContextPathComparator implements Comparator<String> { public int compare(String o1, String o2) { if (CommonUtils.isNullOrEmpty(o1) || CommonUtils.isNullOrEmpty(o2)) { return 0; } int i1 = o1.replace("\\", "/").split("/").length; int i2 = o2.replace("\\", "/").split("/").length; return i1 - i2; } } class CaseAssertPreCompileThread implements Runnable { private CaseAssertPreCompileThread() { } List<String> filePathList = null; public CaseAssertPreCompileThread(List<String> filePathList) { this.filePathList = filePathList; } public void run() { if (null == filePathList) { return; } for (String filePath : this.filePathList) { CloudTestCase cloudTestCase = null; try { cloudTestCase = ConfigurationProxy.loadCloudTestCase(filePath, null); } catch (Exception e) { // TODO Auto-generated catch block logger.error("CaseAssertPreCompileThread encounter error for " + filePath + ", caused by ", e); } if (null == cloudTestCase) { continue; } long start = System.currentTimeMillis(); synchronized (cloudTestCase) { String assertFileName = checkAssertFile(filePath, cloudTestCase.assertRuleFile); if (!CommonUtils.isNullOrEmpty(assertFileName)) { new AssertService().doAssert(assertFileName, "CaseAssertPreCompileThread", new Object()); } } long end = System.currentTimeMillis(); logger.debug(filePath + " assertion pre-compile was done in " + (end - start) / 1000.00 + "s"); } } } }
package com.valkryst.VTerminal.misc; import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Caffeine; import com.valkryst.VTerminal.GraphicTile; import com.valkryst.VTerminal.Tile; import com.valkryst.VTerminal.font.Font; import com.valkryst.VTerminal.shader.Shader; import com.valkryst.VTerminal.shader.character.CharShader; import lombok.Getter; import lombok.NonNull; import lombok.ToString; import java.awt.*; import java.awt.image.BufferedImage; import java.awt.image.ColorModel; import java.awt.image.VolatileImage; import java.awt.image.WritableRaster; import java.util.concurrent.TimeUnit; @ToString public final class ImageCache { /** The cache. */ private final Cache<Integer, VolatileImage> cachedImages; /** The font of the character images. */ @Getter private final Font font; /** * Constructs a new ImageCache. * * @param font * The font. * * @throws NullPointerException * If the font is null. */ public ImageCache(final @NonNull Font font) { this(font, 3); } public ImageCache(final @NonNull Font font, final int duration) { if (duration < 1) { throw new IllegalArgumentException("The duration cannot be below 1."); } this.font = font; cachedImages = Caffeine.newBuilder() .initialCapacity(5_000) .expireAfterAccess(duration, TimeUnit.MINUTES) .build(); } /** * Retrieves a tile image from the cache. * * If no image could be found, then one is created, inserted into the cache, * and then returned. * * @param tile * The tile. * * @return * The character image. * * @throws NullPointerException * If the tile is null. */ public VolatileImage retrieve(final @NonNull Tile tile) { final int hash = tile.getCacheHash(); VolatileImage image = cachedImages.getIfPresent(hash); if (image == null || image.contentsLost()) { image = loadIntoCache(tile); } return image; } /** * Loads a tile into the cache. * * @param tile * The tile. * * @return * The resulting tile image. * * @throws NullPointerException * If the tile is null. */ public VolatileImage loadIntoCache(final @NonNull Tile tile) { BufferedImage bufferedImage; bufferedImage = applyColorSwap(tile, font); for (final Shader shader : tile.getShaders()) { if (shader instanceof CharShader) { bufferedImage = ((CharShader) shader).run(bufferedImage, tile); } else { bufferedImage = shader.run(bufferedImage); } } final VolatileImage result = convertToVolatileImage(bufferedImage); cachedImages.put(tile.getCacheHash(), result); return result; } /** * Gets a character image for a character and applies the back/foreground * colors to it. * * @param character * The character. * * @param font * The font to retrieve the base character image from. * * @return * The character image. * * @throws NullPointerException * If the character or font are null. */ private static BufferedImage applyColorSwap(final @NonNull Tile character, final @NonNull Font font) { BufferedImage image; try { image = cloneImage(font.getCharacterImage(character.getCharacter())); } catch (final NullPointerException e) { image = new BufferedImage(font.getWidth(), font.getHeight(), BufferedImage.TYPE_INT_ARGB); character.setBackgroundColor(new Color(0xFFFF00FF)); character.setForegroundColor(new Color(0xFFFF00FF)); return image; } final int backgroundRGB = character.getBackgroundColor().getRGB(); // final int backgroundA = (backgroundRGB >> 24) & 0xFF; // final int backgroundR = (backgroundRGB >> 16) & 0xFF; // final int backgroundG = (backgroundRGB >> 8) & 0xFF; // final int backgroundB = backgroundRGB & 0xFF; final int foregroundRGB = character.getForegroundColor().getRGB(); // final int foregroundA = (foregroundRGB >> 24) & 0xFF; final int foregroundR = (foregroundRGB >> 16) & 0xFF; final int foregroundG = (foregroundRGB >> 8) & 0xFF; final int foregroundB = foregroundRGB & 0xFF; final boolean isTile = ! (character instanceof GraphicTile); for (int y = 0; y < image.getHeight(); y++) { for (int x = 0; x < image.getWidth(); x++) { final int pixel = image.getRGB(x, y); final int alpha = (pixel >> 24) & 0xFF; final int red = (pixel >> 16) & 0xFF; final int green = (pixel >> 8) & 0xFF; final int blue = (pixel) & 0xFF; boolean isTransparent = alpha == 0; isTransparent &= red == 0; isTransparent &= green == 0; isTransparent &= blue == 0; if (isTransparent) { image.setRGB(x, y, backgroundRGB); continue; } if (isTile) { if (alpha == 255) { image.setRGB(x, y, foregroundRGB); } else { final int blendedRGBA = (alpha << 24) + (foregroundR << 16) + (foregroundG << 8) + foregroundB; image.setRGB(x, y, blendedRGBA); } } } } return image; } /** * Makes a clone of an image. * * @param image * The image. * * @return * The clone image. * * @throws NullPointerException * If the image is null. */ public static BufferedImage cloneImage(final @NonNull BufferedImage image) { final ColorModel colorModel = image.getColorModel(); final boolean isAlphaPremultiplied = colorModel.isAlphaPremultiplied(); final WritableRaster writableRaster = image.copyData(image.getRaster().createCompatibleWritableRaster()); return new BufferedImage(colorModel, writableRaster, isAlphaPremultiplied, null); } /** * Converts a BufferedImage into a VolatileImage, * * @param source * The BufferedImage. * * @return * The VolatileImage. */ private static VolatileImage convertToVolatileImage(final BufferedImage source) { final GraphicsEnvironment graphicsEnvironment = GraphicsEnvironment.getLocalGraphicsEnvironment(); final GraphicsDevice graphicsDevice = graphicsEnvironment.getDefaultScreenDevice(); final GraphicsConfiguration graphicsConfiguration = graphicsDevice.getDefaultConfiguration(); final VolatileImage destination = graphicsConfiguration.createCompatibleVolatileImage(source.getWidth(), source.getHeight(), source.getTransparency()); final Graphics2D g2d = destination.createGraphics(); g2d.setComposite(AlphaComposite.Src); g2d.drawImage(source, 0, 0, null); g2d.dispose(); return destination; } }
package org.objectweb.proactive.core.rmi; import org.apache.log4j.Logger; import org.objectweb.proactive.core.util.UrlBuilder; import java.net.UnknownHostException; public class ClassServer implements Runnable { protected static Logger logger = Logger.getLogger(ClassServer.class.getName()); public static final int DEFAULT_SERVER_BASE_PORT = 2222; protected static int DEFAULT_SERVER_PORT_INCREMENT = 20; protected static int MAX_RETRY = 50; private static java.util.Random random = new java.util.Random(); protected static int port; static { String newport; if (System.getProperty("proactive.communication.protocol").equals("http")) { if(System.getProperty("proactive.http.port") != null){ newport = System.getProperty("proactive.http.port"); } else { newport = new Integer(DEFAULT_SERVER_BASE_PORT).toString(); System.setProperty("proactive.http.port", newport); } } } protected String hostname; private java.net.ServerSocket server = null; protected String paths; /** * Constructs a ClassServer that listens on a random port. The port number * used is the first one found free starting from a default base port. * obtains a class's bytecodes using the method <b>getBytes</b>. * @exception java.io.IOException if the ClassServer could not listen on any port. */ protected ClassServer() throws java.io.IOException { this(0, null); } protected ClassServer(int port_) throws java.io.IOException { if (port == 0) { port = boundServerSocket(Integer.parseInt(System.getProperty("proactive.http.port")), MAX_RETRY); } else { port = port_; server = new java.net.ServerSocket(port); } hostname = java.net.InetAddress.getLocalHost().getHostAddress(); newListener(); // if (logger.isInfoEnabled()) { // logger.info("communication protocol = " +System.getProperty("proactive.communication.protocol")+", http server port = " + port); } /** * Constructs a ClassServer that listens on <b>port</b> and * obtains a class's bytecodes using the method <b>getBytes</b>. * @param port the port number * @exception java.io.IOException if the ClassServer could not listen * on <b>port</b>. */ protected ClassServer(int port_, String paths) throws java.io.IOException { this(port_); this.paths = paths; printMessage(); } /** * Constructs a ClassFileServer. * @param classpath the classpath where the server locates classes */ public ClassServer(String paths) throws java.io.IOException { this(0, paths); } public static boolean isPortAlreadyBound(int port) { java.net.Socket socket = null; try { socket = new java.net.Socket(java.net.InetAddress.getLocalHost(), port); // if we can connect to the port it means the server already exists return true; } catch (java.io.IOException e) { return false; } finally { try { if (socket != null) { socket.close(); } } catch (java.io.IOException e) { } } } private void printMessage() { if (logger.isDebugEnabled()) { logger.debug( "To use this ClassFileServer set the property java.rmi.server.codebase to http: hostname + ":" + port + "/"); } if (this.paths == null) { logger.info( " --> This ClassFileServer is reading resources from classpath"); } else { logger.info( " --> This ClassFileServer is reading resources from the following paths"); //for (int i = 0; i < codebases.length; i++) { logger.info(paths); //codebases[i].getAbsolutePath()); } } public static int getServerSocketPort() { return port; } public String getHostname() { return hostname; } public static String getUrl() { try { return UrlBuilder.buildUrl(java.net.InetAddress.getLocalHost().getHostName(), "", "http:", port); } catch (UnknownHostException e) { // TODO Auto-generated catch block e.printStackTrace(); } return UrlBuilder.buildUrl("localhost", "", "http:", port); } /** * The "listen" thread that accepts a connection to the * server, parses the header to obtain the class file name * and sends back the bytecodes for the class (or error * if the class is not found or the response was malformed). */ public void run() { java.net.Socket socket = null; // accept a connection while (true) { try { socket = server.accept(); HTTPRequestHandler service = (new HTTPRequestHandler(socket, paths)); service.start(); } catch (java.io.IOException e) { System.out.println("Class Server died: " + e.getMessage()); e.printStackTrace(); return; } } } private void newListener() { (new Thread(this, "ClassServer-" + hostname + ":" + port)).start(); } private int boundServerSocket(int basePortNumber, int numberOfTry) throws java.io.IOException { for (int i = 0; i < numberOfTry; i++) { try { server = new java.net.ServerSocket(basePortNumber); return basePortNumber; } catch (java.io.IOException e) { basePortNumber += random.nextInt(DEFAULT_SERVER_PORT_INCREMENT); System.setProperty("proactive.http.port", basePortNumber + ""); } } throw new java.io.IOException( "ClassServer cannot create a ServerSocket after " + numberOfTry + " attempts !!!"); } }
package org.pentaho.di.core.gui; import java.util.List; import org.pentaho.di.core.AddUndoPositionInterface; import org.pentaho.di.spoon.UndoInterface; public class SnapAllignDistribute { private List<? extends GUIPositionInterface> elements; private AddUndoPositionInterface addUndoPositionInterface; private int[] indices; private Redrawable redrawable; private UndoInterface undoInterface; public SnapAllignDistribute(UndoInterface undoInterface, List<? extends GUIPositionInterface> elements, int[] indices, AddUndoPositionInterface addUndoPositionInterface, Redrawable redrawable) { this.undoInterface = undoInterface; this.elements = elements; this.indices = indices; this.addUndoPositionInterface = addUndoPositionInterface; this.redrawable = redrawable; } public void snaptogrid(int size) { if (elements.size() == 0) return; // First look for the minimum x coordinate... GUIPositionInterface elemArray[] = new GUIPositionInterface[elements.size()]; Point before[] = new Point[elements.size()]; Point after[] = new Point[elements.size()]; for (int i = 0; i < elements.size(); i++) { GUIPositionInterface positionInterface = elements.get(i); elemArray[i] = positionInterface; Point p = positionInterface.getLocation(); before[i] = new Point(p.x, p.y); // What's the modulus ? int dx = p.x % size; int dy = p.y % size; // Correct the location to the nearest grid line! // This means for size = 10 // x = 3: dx=3, dx<=5 --> x=3-3 = 0; // x = 7: dx=7, dx> 5 --> x=3+10-3 = 10; // x = 10: dx=0, dx<=5 --> x=10-0 = 10; if (dx > size / 2) p.x += size - dx; else p.x -= dx; if (dy > size / 2) p.y += size - dy; else p.y -= dy; after[i] = new Point(p.x, p.y); } if (addUndoPositionInterface!=null) addUndoPositionInterface.addUndoPosition(undoInterface, elemArray, indices, before, after); redrawable.redraw(); } public void allignleft() { if (elements.size() == 0) return; GUIPositionInterface elemArray[] = elements.toArray(new GUIPositionInterface[elements.size()]); Point before[] = new Point[elements.size()]; Point after[] = new Point[elements.size()]; int min = 99999; // First look for the minimum x coordinate... for (int i = 0; i < elements.size(); i++) { GUIPositionInterface element = elements.get(i); Point p = element.getLocation(); if (p.x < min) min = p.x; } // Then apply the coordinate... for (int i = 0; i < elements.size(); i++) { GUIPositionInterface element = elements.get(i); Point p = element.getLocation(); before[i] = new Point(p.x, p.y); element.setLocation(min, p.y); after[i] = new Point(min, p.y); } if (addUndoPositionInterface!=null) addUndoPositionInterface.addUndoPosition(undoInterface, elemArray, indices, before, after); redrawable.redraw(); } public void allignright() { if (elements.size() == 0) return; GUIPositionInterface elemArray[] = elements.toArray(new GUIPositionInterface[elements.size()]); Point before[] = new Point[elements.size()]; Point after[] = new Point[elements.size()]; int max = -99999; // First look for the maximum x coordinate... for (int i = 0; i < elements.size(); i++) { GUIPositionInterface element = elements.get(i); Point p = element.getLocation(); if (p.x > max) max = p.x; } // Then apply the coordinate... for (int i = 0; i < elements.size(); i++) { GUIPositionInterface stepMeta = elements.get(i); Point p = stepMeta.getLocation(); before[i] = new Point(p.x, p.y); stepMeta.setLocation(max, p.y); after[i] = new Point(max, p.y); } if (addUndoPositionInterface!=null) addUndoPositionInterface.addUndoPosition(undoInterface, elemArray, indices, before, after); redrawable.redraw(); } public void alligntop() { if (elements.size() == 0) return; GUIPositionInterface elemArray[] = elements.toArray(new GUIPositionInterface[elements.size()]); Point before[] = new Point[elements.size()]; Point after[] = new Point[elements.size()]; int min = 99999; // First look for the minimum y coordinate... for (int i = 0; i < elements.size(); i++) { GUIPositionInterface element = elements.get(i); Point p = element.getLocation(); if (p.y < min) min = p.y; } // Then apply the coordinate... for (int i = 0; i < elements.size(); i++) { GUIPositionInterface element = elements.get(i); Point p = element.getLocation(); before[i] = new Point(p.x, p.y); element.setLocation(p.x, min); after[i] = new Point(p.x, min); } if (addUndoPositionInterface!=null) addUndoPositionInterface.addUndoPosition(undoInterface, elemArray, indices, before, after); redrawable.redraw(); } public void allignbottom() { if (elements.size() == 0) return; GUIPositionInterface elemArray[] = elements.toArray(new GUIPositionInterface[elements.size()]); Point before[] = new Point[elements.size()]; Point after[] = new Point[elements.size()]; int max = -99999; // First look for the maximum y coordinate... for (int i = 0; i < elements.size(); i++) { GUIPositionInterface element = elements.get(i); Point p = element.getLocation(); if (p.y > max) max = p.y; } // Then apply the coordinate... for (int i = 0; i < elements.size(); i++) { GUIPositionInterface element = elements.get(i); Point p = element.getLocation(); before[i] = new Point(p.x, p.y); element.setLocation(p.x, max); after[i] = new Point(p.x, max); } if (addUndoPositionInterface!=null) addUndoPositionInterface.addUndoPosition(undoInterface, elemArray, indices, before, after); redrawable.redraw(); } public void distributehorizontal() { if (elements.size() <= 1) return; GUIPositionInterface elemArray[] = elements.toArray(new GUIPositionInterface[elements.size()]); Point before[] = new Point[elements.size()]; Point after[] = new Point[elements.size()]; int min = 99999; int max = -99999; int order[] = new int[elements.size()]; // First look for the minimum & maximum x coordinate... for (int i = 0; i < elements.size(); i++) { GUIPositionInterface element = elements.get(i); Point p = element.getLocation(); if (p.x < min) min = p.x; if (p.x > max) max = p.x; order[i] = i; } // Difficult to keep the steps in the correct order. // If you just set the x-coordinates, you get special effects. // Best is to keep the current order of things. // First build an arraylist and store the order there. // Then sort order[], based upon the coordinate of the step. for (int i = 0; i < elements.size(); i++) { for (int j = 0; j < elements.size() - 1; j++) { Point p1 = (elements.get(order[j])).getLocation(); Point p2 = (elements.get(order[j+1])).getLocation(); if (p1.x > p2.x) // swap { int dummy = order[j]; order[j] = order[j + 1]; order[j + 1] = dummy; } } } // The distance between two steps becomes. int distance = (max - min) / (elements.size() - 1); for (int i = 0; i < elements.size(); i++) { GUIPositionInterface element = elements.get(order[i]); Point p = element.getLocation(); before[i] = new Point(p.x, p.y); p.x = min + (i * distance); after[i] = new Point(p.x, p.y); } // Undo! if (addUndoPositionInterface!=null) addUndoPositionInterface.addUndoPosition(undoInterface, elemArray, indices, before, after); redrawable.redraw(); } public void distributevertical() { if (elements.size() <= 1) return; GUIPositionInterface elemArray[] = elements.toArray(new GUIPositionInterface[elements.size()]); Point before[] = new Point[elements.size()]; Point after[] = new Point[elements.size()]; int min = 99999; int max = -99999; int order[] = new int[elements.size()]; // First look for the minimum & maximum y coordinate... int selnr = 0; for (int i = 0; i < elements.size(); i++) { GUIPositionInterface element = elements.get(i); Point p = element.getLocation(); if (p.y < min) min = p.y; if (p.y > max) max = p.y; order[i] = i; selnr++; } // Difficult to keep the steps in the correct order. // If you just set the x-coordinates, you get special effects. // Best is to keep the current order of things. // First build an arraylist and store the order there. // Then sort order[], based upon the coordinate of the step. for (int i = 0; i < elements.size(); i++) { for (int j = 0; j < elements.size() - 1; j++) { Point p1 = (elements.get(order[j])).getLocation(); Point p2 = (elements.get(order[j+1])).getLocation(); if (p1.y > p2.y) // swap { int dummy = order[j]; order[j] = order[j + 1]; order[j + 1] = dummy; } } } // The distance between two steps becomes. int distance = (max - min) / (elements.size() - 1); for (int i = 0; i < elements.size(); i++) { GUIPositionInterface element = elements.get(order[i]); Point p = element.getLocation(); before[i] = new Point(p.x, p.y); p.y = min + (i * distance); after[i] = new Point(p.x, p.y); } // Undo! if (addUndoPositionInterface!=null) addUndoPositionInterface.addUndoPosition(undoInterface, elemArray, indices, before, after); redrawable.redraw(); } }
package org.pentaho.di.trans.steps.csvinput; import java.io.FileInputStream; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.apache.commons.vfs.FileObject; import org.apache.commons.vfs.provider.local.LocalFile; import org.pentaho.di.core.Const; import org.pentaho.di.core.ResultFile; import org.pentaho.di.core.exception.KettleConversionException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.row.RowDataUtil; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.di.trans.steps.textfileinput.EncodingType; /** * Read a simple CSV file * Just output Strings found in the file... * * @author Matt * @since 2007-07-05 */ public class CsvInput extends BaseStep implements StepInterface { private static Class<?> PKG = CsvInput.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ private CsvInputMeta meta; private CsvInputData data; public CsvInput(StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans) { super(stepMeta, stepDataInterface, copyNr, transMeta, trans); } public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException { meta=(CsvInputMeta)smi; data=(CsvInputData)sdi; if (first) { first=false; data.outputRowMeta = new RowMeta(); meta.getFields(data.outputRowMeta, getStepname(), null, null, this); if (data.filenames==null) { // We're expecting the list of filenames from the previous step(s)... getFilenamesFromPreviousSteps(); } // We only run in parallel if we have at least one file to process // AND if we have more than one step copy running... data.parallel = meta.isRunningInParallel() && data.totalNumberOfSteps>1; // The conversion logic for when the lazy conversion is turned of is simple: // Pretend it's a lazy conversion object anyway and get the native type during conversion. data.convertRowMeta = data.outputRowMeta.clone(); for (ValueMetaInterface valueMeta : data.convertRowMeta.getValueMetaList()) { valueMeta.setStorageType(ValueMetaInterface.STORAGE_TYPE_BINARY_STRING); } // Calculate the indexes for the filename and row number fields data.filenameFieldIndex = -1; if (!Const.isEmpty(meta.getFilenameField()) && meta.isIncludingFilename()) { data.filenameFieldIndex = meta.getInputFields().length; } data.rownumFieldIndex = -1; if (!Const.isEmpty(meta.getRowNumField())) { data.rownumFieldIndex = meta.getInputFields().length; if (data.filenameFieldIndex>=0) { data.rownumFieldIndex++; } } // Now handle the parallel reading aspect: determine total of all the file sizes // Then skip to the appropriate file and location in the file to start reading... // Also skip to right after the first newline if (data.parallel) { prepareToRunInParallel(); } // Open the next file... if (!openNextFile()) { setOutputDone(); return false; // nothing to see here, move along... } } // If we are running in parallel, make sure we don't read too much in this step copy... if (data.parallel) { if (data.totalBytesRead>data.blockToRead) { setOutputDone(); // stop reading return false; } } try { Object[] outputRowData=readOneRow(true); // get row, set busy! if (outputRowData==null) // no more input to be expected... { if (openNextFile()) { return true; // try again on the next loop... } else { setOutputDone(); // last file, end here return false; } } else { putRow(data.outputRowMeta, outputRowData); // copy row to possible alternate rowset(s). if (checkFeedback(getLinesInput())) { if(log.isBasic()) logBasic(BaseMessages.getString(PKG, "CsvInput.Log.LineNumber", Long.toString(getLinesInput()))); //$NON-NLS-1$ } } } catch(KettleConversionException e) { if (getStepMeta().isDoingErrorHandling()) { StringBuffer errorDescriptions = new StringBuffer(100); StringBuffer errorFields = new StringBuffer(50); for (int i=0;i<e.getCauses().size();i++) { if (i>0) { errorDescriptions.append(", "); //$NON-NLS-1$ errorFields.append(", "); //$NON-NLS-1$ } errorDescriptions.append(e.getCauses().get(i).getMessage()); errorFields.append(e.getFields().get(i).toStringMeta()); } putError(data.outputRowMeta, e.getRowData(), e.getCauses().size(), errorDescriptions.toString(), errorFields.toString(), "CSVINPUT001"); //$NON-NLS-1$ } else { // Only forward the first cause. throw new KettleException(e.getMessage(), e.getCauses().get(0)); } } return true; } private void prepareToRunInParallel() throws KettleException { try { // At this point it doesn't matter if we have 1 or more files. // We'll use the same algorithm... for (String filename : data.filenames) { long size = KettleVFS.getFileObject(filename, getTransMeta()).getContent().getSize(); data.fileSizes.add(size); data.totalFileSize+=size; } // Now we can determine the range to read. // For example, the total file size is 50000, spread over 5 files of 10000 // Suppose we have 2 step copies running (clustered or not) // That means step 0 has to read 0-24999 and step 1 has to read 25000-49999 // The size of the block to read (25000 in the example) : data.blockToRead = Math.round( (double)data.totalFileSize / (double)data.totalNumberOfSteps ); // Now we calculate the position to read (0 and 25000 in our sample) : data.startPosition = data.blockToRead * data.stepNumber; data.endPosition = data.startPosition + data.blockToRead; // Determine the start file number (0 or 2 in our sample) : // >0<,1000,>2000<,3000,4000 long totalFileSize=0L; for (int i=0;i<data.fileSizes.size();i++) { long size = data.fileSizes.get(i); // Start of file range: totalFileSize // End of file range: totalFileSize+size if (data.startPosition>=totalFileSize && data.startPosition<totalFileSize+size) { // This is the file number to start reading from... data.filenr = i; // remember where we started to read to allow us to know that we have to skip the header row in the next files (if any) data.startFilenr = i; // How many bytes do we skip in that first file? if (data.startPosition==0) { data.bytesToSkipInFirstFile=0L; } else { data.bytesToSkipInFirstFile = data.startPosition - totalFileSize; } break; } totalFileSize+=size; } if (data.filenames.length > 0) logBasic(BaseMessages.getString(PKG, "CsvInput.Log.ParallelFileNrAndPositionFeedback", data.filenames[data.filenr], Long.toString(data.fileSizes.get(data.filenr)), Long.toString(data.bytesToSkipInFirstFile), Long.toString(data.blockToRead))); //$NON-NLS-1$ } catch(Exception e) { throw new KettleException(BaseMessages.getString(PKG, "CsvInput.Exception.ErrorPreparingParallelRun"), e); //$NON-NLS-1$ } } private void getFilenamesFromPreviousSteps() throws KettleException { List<String> filenames = new ArrayList<String>(); boolean firstRow = true; int index=-1; Object[] row = getRow(); while (row!=null) { if (firstRow) { firstRow=false; // Get the filename field index... String filenameField = environmentSubstitute(meta.getFilenameField()); index = getInputRowMeta().indexOfValue(filenameField); if (index<0) { throw new KettleException(BaseMessages.getString(PKG, "CsvInput.Exception.FilenameFieldNotFound", filenameField)); //$NON-NLS-1$ } } String filename = getInputRowMeta().getString(row, index); filenames.add(filename); // add it to the list... row = getRow(); // Grab another row... } data.filenames = filenames.toArray(new String[filenames.size()]); logBasic(BaseMessages.getString(PKG, "CsvInput.Log.ReadingFromNrFiles", Integer.toString(data.filenames.length))); //$NON-NLS-1$ } @Override public void dispose(StepMetaInterface smi, StepDataInterface sdi) { try { // Close the previous file... if (data.fc!=null) { data.fc.close(); } } catch(Exception e) { logError("Error closing file channel", e); } try { if (data.fis!=null) { data.fis.close(); } } catch(Exception e) { logError("Error closing file input stream", e); } super.dispose(smi, sdi); } private boolean openNextFile() throws KettleException { try { // Close the previous file... if (data.fc!=null) { data.fc.close(); } if (data.fis!=null) { data.fis.close(); } if (data.filenr>=data.filenames.length) { return false; } // Open the next one... FileObject fileObject = KettleVFS.getFileObject(data.filenames[data.filenr], getTransMeta()); if (!(fileObject instanceof LocalFile)) { // We can only use NIO on local files at the moment, so that's what we limit ourselves to. throw new KettleException(BaseMessages.getString(PKG, "CsvInput.Log.OnlyLocalFilesAreSupported")); //$NON-NLS-1$ } if (meta.isLazyConversionActive()) { data.binaryFilename=data.filenames[data.filenr].getBytes(); } data.fis = new FileInputStream(KettleVFS.getFilename(fileObject)); data.fc = data.fis.getChannel(); data.bb = ByteBuffer.allocateDirect( data.preferredBufferSize ); // If we are running in parallel and we need to skip bytes in the first file, let's do so here. if (data.parallel) { if (data.bytesToSkipInFirstFile>0) { data.fc.position(data.bytesToSkipInFirstFile); // Now, we need to skip the first row, until the first CR that is. readOneRow(false); } } // Add filename to result filenames ? if(meta.isAddResultFile()) { ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, fileObject, getTransMeta().getName(), toString()); resultFile.setComment("File was read by a Csv input step"); addResultFile(resultFile); } // Move to the next filename data.filenr++; // See if we need to skip a row... // - If you have a header row checked and if you're not running in parallel // - If you're running in parallel, if a header row is checked, if you're at the beginning of a file if (meta.isHeaderPresent()) { if ( (!data.parallel) || // Standard flat file : skip header (data.parallel && data.bytesToSkipInFirstFile<=0) ) { readOneRow(false); // skip this row. logBasic(BaseMessages.getString(PKG, "CsvInput.Log.HeaderRowSkipped", data.filenames[data.filenr-1])); //$NON-NLS-1$ } } // Reset the row number pointer... data.rowNumber = 1L; // Don't skip again in the next file... data.bytesToSkipInFirstFile=-1L; return true; } catch(KettleException e) { throw e; } catch(Exception e) { throw new KettleException(e); } } /** * Check to see if the buffer size is large enough given the data.endBuffer pointer.<br> * Resize the buffer if there is not enough room. * * @return false if everything is OK, true if there is a problem and we should stop. * @throws IOException in case there is a I/O problem (read error) */ private boolean checkBufferSize() throws IOException { if (data.endBuffer>=data.bufferSize) { // Oops, we need to read more data... // Better resize this before we read other things in it... data.resizeByteBufferArray(); // Also read another chunk of data, now that we have the space for it... int n = data.readBufferFromFile(); // If we didn't manage to read something, we return true to indicate we're done return n<0; } return false; } /* private boolean isReturn(byte[] source, int location) { switch (data.encodingType) { case SINGLE: return source[location] == '\n'; case DOUBLE_BIG_ENDIAN: if (location >= 1) { return source[location - 1] == 0 && source[location] == 0x0d; } else { return false; } case DOUBLE_LITTLE_ENDIAN: if (location >= 1) { return source[location - 1] == 0x0d && source[location] == 0x00; } else { return false; } default: return source[location] == '\n'; } } private boolean isLineFeed(byte[] source, int location) { switch (data.encodingType) { case SINGLE: return source[location] == '\r'; case DOUBLE_BIG_ENDIAN: if (location >= 1) { return source[location - 1] == 0 && source[location] == 0x0a; } else { return false; } case DOUBLE_LITTLE_ENDIAN: if (location >= 1) { return source[location - 1] == 0x0a && source[location] == 0x00; } else { return false; } default: return source[location] == '\r'; } } */ /** Read a single row of data from the file... * * @param doConversions if you want to do conversions, set to false for the header row. * @return a row of data... * @throws KettleException */ private Object[] readOneRow(boolean doConversions) throws KettleException { try { Object[] outputRowData = RowDataUtil.allocateRowData(data.outputRowMeta.size()); int outputIndex=0; boolean newLineFound = false; boolean endOfBuffer = false; int newLines = 0; List<Exception> conversionExceptions = null; List<ValueMetaInterface> exceptionFields = null; // The strategy is as follows... // We read a block of byte[] from the file. // We scan for the separators in the file (NOT for line feeds etc) // Then we scan that block of data. // We keep a byte[] that we extend if needed.. // At the end of the block we read another, etc. // Let's start by looking where we left off reading. while (!newLineFound && outputIndex<meta.getInputFields().length) { /* if (getLinesInput()==5445) { System.out.println("Break!"); } */ if (checkBufferSize()) { // Last row was being discarded if the last item is null and // there is no end of line delimiter if (outputRowData != null) { // Make certain that at least one record exists before // filling the rest of them with null if (outputIndex > 0) { return (outputRowData); } } return null; // nothing more to read, call it a day. } // OK, at this point we should have data in the byteBuffer and we should be able to scan for the next // delimiter (;) // So let's look for a delimiter. // Also skip over the enclosures ("), it is NOT taking into account escaped enclosures. // Later we can add an option for having escaped or double enclosures in the file. <sigh> boolean delimiterFound = false; boolean enclosureFound = false; int escapedEnclosureFound = 0; while (!delimiterFound) { // If we find the first char, we might find others as well ;-) // Single byte delimiters only for now. if (data.delimiterMatcher.matchesPattern(data.byteBuffer, data.endBuffer, data.delimiter)) { delimiterFound = true; } // Perhaps we found a (pre-mature) new line? else if ( // In case we are not using an enclosure and in case fields contain new lines // we need to make sure that we check the newlines possible flag. // If the flag is enable we skip newline checking except for the last field in the row. // In that one we can't support newlines without enclosure (handled below). (!meta.isNewlinePossibleInFields() || outputIndex==meta.getInputFields().length-1) && (data.crLfMatcher.isReturn(data.byteBuffer, data.endBuffer) || data.crLfMatcher.isLineFeed(data.byteBuffer, data.endBuffer))) { if(data.encodingType.equals(EncodingType.DOUBLE_LITTLE_ENDIAN) || data.encodingType.equals(EncodingType.DOUBLE_BIG_ENDIAN)) { data.endBuffer += 2; } else { data.endBuffer ++; } data.totalBytesRead++; newLines=1; if (data.endBuffer>=data.bufferSize) { // Oops, we need to read more data... // Better resize this before we read other things in it... data.resizeByteBufferArray(); // Also read another chunk of data, now that we have the space for it... // Ignore EOF, there might be other stuff in the buffer. data.readBufferFromFile(); } // re-check for double delimiters... if (data.crLfMatcher.isReturn(data.byteBuffer, data.endBuffer) || data.crLfMatcher.isLineFeed(data.byteBuffer, data.endBuffer)) { data.endBuffer++; data.totalBytesRead++; newLines=2; if (data.endBuffer>=data.bufferSize) { // Oops, we need to read more data... // Better resize this before we read other things in it... data.resizeByteBufferArray(); // Also read another chunk of data, now that we have the space for it... // Ignore EOF, there might be other stuff in the buffer. data.readBufferFromFile(); } } newLineFound = true; delimiterFound = true; } // Perhaps we need to skip over an enclosed part? // We always expect exactly one enclosure character // If we find the enclosure doubled, we consider it escaped. // --> "" is converted to " later on. else if (data.enclosure != null && data.enclosureMatcher.matchesPattern(data.byteBuffer, data.endBuffer, data.enclosure)) { enclosureFound=true; boolean keepGoing; do { if (data.increaseEndBuffer()) { enclosureFound=false; break; } keepGoing = !data.enclosureMatcher.matchesPattern(data.byteBuffer, data.endBuffer, data.enclosure); if (!keepGoing) { // We found an enclosure character. // Read another byte... if (data.increaseEndBuffer()) { enclosureFound=false; break; } // If this character is also an enclosure, we can consider the enclosure "escaped". // As such, if this is an enclosure, we keep going... keepGoing = data.enclosureMatcher.matchesPattern(data.byteBuffer, data.endBuffer, data.enclosure); if (keepGoing) escapedEnclosureFound++; } } while (keepGoing); // Did we reach the end of the buffer? if (data.endBuffer>=data.bufferSize) { newLineFound=true; // consider it a newline to break out of the upper while loop newLines+=2; // to remove the enclosures in case of missing newline on last line. endOfBuffer=true; break; } } else { data.endBuffer++; data.totalBytesRead++; if (checkBufferSize()) { if (data.endBuffer>=data.bufferSize) { newLineFound=true; break; } } } } // If we're still here, we found a delimiter.. // Since the starting point never changed really, we just can grab range: // [startBuffer-endBuffer[ // This is the part we want. // data.byteBuffer[data.startBuffer] int length = calculateFieldLength(newLineFound, newLines, enclosureFound, endOfBuffer); byte[] field = new byte[length]; System.arraycopy(data.byteBuffer, data.startBuffer, field, 0, length); // Did we have any escaped characters in there? if (escapedEnclosureFound>0) { if (log.isRowLevel()) logRowlevel("Escaped enclosures found in "+new String(field)); field = data.removeEscapedEnclosures(field, escapedEnclosureFound); } if (doConversions) { if (meta.isLazyConversionActive()) { outputRowData[outputIndex++] = field; } else { // We're not lazy so we convert the data right here and now. // The convert object uses binary storage as such we just have to ask the native type from it. // That will do the actual conversion. ValueMetaInterface sourceValueMeta = data.convertRowMeta.getValueMeta(outputIndex); try { outputRowData[outputIndex++] = sourceValueMeta.convertBinaryStringToNativeType(field); } catch(KettleValueException e) { // There was a conversion error, outputRowData[outputIndex++] = null; if (conversionExceptions==null) { conversionExceptions = new ArrayList<Exception>(); exceptionFields = new ArrayList<ValueMetaInterface>(); } conversionExceptions.add(e); exceptionFields.add(sourceValueMeta); } } } else { outputRowData[outputIndex++] = null; // nothing for the header, no conversions here. } // OK, move on to the next field... if( !newLineFound) { data.endBuffer++; data.totalBytesRead++; } data.startBuffer = data.endBuffer; } // See if we reached the end of the line. // If not, we need to skip the remaining items on the line until the next newline... if (!newLineFound && !checkBufferSize()) { do { data.endBuffer++; data.totalBytesRead++; if (checkBufferSize()) { break; // nothing more to read. } // TODO: if we're using quoting we might be dealing with a very dirty file with quoted newlines in trailing fields. (imagine that) // In that particular case we want to use the same logic we use above (refactored a bit) to skip these fields. } while (!data.crLfMatcher.isReturn(data.byteBuffer, data.endBuffer) && !data.crLfMatcher.isLineFeed(data.byteBuffer, data.endBuffer)); if (!checkBufferSize()) { while (data.crLfMatcher.isReturn(data.byteBuffer, data.endBuffer) || data.crLfMatcher.isLineFeed(data.byteBuffer, data.endBuffer)) { data.endBuffer++; data.totalBytesRead++; if (checkBufferSize()) { break; // nothing more to read. } } } // Make sure we start at the right position the next time around. data.startBuffer = data.endBuffer; } // Optionally add the current filename to the mix as well... if (meta.isIncludingFilename() && !Const.isEmpty(meta.getFilenameField())) { if (meta.isLazyConversionActive()) { outputRowData[data.filenameFieldIndex] = data.binaryFilename; } else { outputRowData[data.filenameFieldIndex] = data.filenames[data.filenr-1]; } } if (data.isAddingRowNumber) { outputRowData[data.rownumFieldIndex] = new Long(data.rowNumber++); } incrementLinesInput(); if (conversionExceptions!=null && conversionExceptions.size()>0) { // Forward the first exception throw new KettleConversionException("There were "+conversionExceptions.size()+" conversion errors on line "+getLinesInput(), conversionExceptions, exceptionFields, outputRowData); } return outputRowData; } catch(KettleConversionException e) { throw e; } catch (Exception e) { throw new KettleFileException("Exception reading line using NIO", e); } } private int calculateFieldLength(boolean newLineFound, int newLines, boolean enclosureFound, boolean endOfBuffer) { int length = data.endBuffer-data.startBuffer; if (newLineFound) { length-=newLines; if (length<=0) length=0; if (endOfBuffer) data.startBuffer++; // offset for the enclosure in last field before EOF } if (enclosureFound) { data.startBuffer++; length-=2; if (length<=0) length=0; } if (length<=0) length=0; if (data.encodingType!=EncodingType.SINGLE) { length } return length; } public boolean init(StepMetaInterface smi, StepDataInterface sdi) { meta=(CsvInputMeta)smi; data=(CsvInputData)sdi; if (super.init(smi, sdi)) { data.preferredBufferSize = Integer.parseInt(environmentSubstitute(meta.getBufferSize())); // If the step doesn't have any previous steps, we just get the filename. // Otherwise, we'll grab the list of filenames later... if (getTransMeta().findNrPrevSteps(getStepMeta())==0) { String filename = environmentSubstitute(meta.getFilename()); if (Const.isEmpty(filename)) { logError(BaseMessages.getString(PKG, "CsvInput.MissingFilename.Message")); //$NON-NLS-1$ return false; } data.filenames = new String[] { filename, }; } else { data.filenames = null; data.filenr = 0; } data.totalBytesRead=0L; data.encodingType = EncodingType.guessEncodingType(meta.getEncoding()); // PDI-2489 - set the delimiter byte value to the code point of the // character as represented in the input file's encoding try { data.delimiter = data.encodingType.getBytes( environmentSubstitute(meta.getDelimiter()), meta.getEncoding()); if( Const.isEmpty(meta.getEnclosure()) ) { data.enclosure = null; } else { data.enclosure = data.encodingType.getBytes( environmentSubstitute(meta.getEnclosure()), meta.getEncoding() ); } } catch (UnsupportedEncodingException e) { logError(BaseMessages.getString(PKG, "CsvInput.BadEncoding.Message"), e); //$NON-NLS-1$ return false; } data.isAddingRowNumber = !Const.isEmpty(meta.getRowNumField()); // Handle parallel reading capabilities... data.stopReading = false; if (meta.isRunningInParallel()) { data.stepNumber = getUniqueStepNrAcrossSlaves(); data.totalNumberOfSteps = getUniqueStepCountAcrossSlaves(); // We are not handling a single file, but possibly a list of files... // As such, the fair thing to do is calculate the total size of the files // Then read the required block. data.fileSizes = new ArrayList<Long>(); data.totalFileSize = 0L; } // Set the most efficient pattern matcher to match the delimiter. if (data.delimiter.length==1) { data.delimiterMatcher = new SingleBytePatternMatcher(); } else { data.delimiterMatcher = new MultiBytePatternMatcher(); } // Set the most efficient pattern matcher to match the enclosure. if (data.enclosure==null) { data.enclosureMatcher = new EmptyPatternMatcher(); } else { if (data.enclosure.length==1) { data.enclosureMatcher = new SingleBytePatternMatcher(); } else { data.enclosureMatcher = new MultiBytePatternMatcher(); } } switch(data.encodingType) { case DOUBLE_BIG_ENDIAN: data.crLfMatcher = new MultiByteBigCrLfMatcher(); break; case DOUBLE_LITTLE_ENDIAN: data.crLfMatcher = new MultiByteLittleCrLfMatcher(); break; default: data.crLfMatcher = new SingleByteCrLfMatcher(); break; } return true; } return false; } public void closeFile() throws KettleException { try { if (data.fc!=null) { data.fc.close(); } if (data.fis!=null) { data.fis.close(); } } catch (IOException e) { throw new KettleException("Unable to close file channel for file '"+data.filenames[data.filenr-1],e); } } /** * This method is borrowed from TextFileInput * * @param log * @param line * @param delimiter * @param enclosure * @param escapeCharacter * @return * @throws KettleException */ public static final String[] guessStringsFromLine(LogChannelInterface log, String line, String delimiter, String enclosure, String escapeCharacter) throws KettleException { List<String> strings = new ArrayList<String>(); int fieldnr; String pol; // piece of line try { if (line == null) return null; // Split string in pieces, only for CSV! fieldnr = 0; int pos = 0; int length = line.length(); boolean dencl = false; int len_encl = (enclosure == null ? 0 : enclosure.length()); int len_esc = (escapeCharacter == null ? 0 : escapeCharacter.length()); while (pos < length) { int from = pos; int next; boolean encl_found; boolean contains_escaped_enclosures = false; boolean contains_escaped_separators = false; // Is the field beginning with an enclosure? // "aa;aa";123;"aaa-aaa";000;... if (len_encl > 0 && line.substring(from, from + len_encl).equalsIgnoreCase(enclosure)) { if (log.isRowLevel()) log.logRowlevel(BaseMessages.getString(PKG, "CsvInput.Log.ConvertLineToRowTitle"), BaseMessages.getString(PKG, "CsvInput.Log.ConvertLineToRow",line.substring(from, from + len_encl))); //$NON-NLS-1$ //$NON-NLS-2$ encl_found = true; int p = from + len_encl; boolean is_enclosure = len_encl > 0 && p + len_encl < length && line.substring(p, p + len_encl).equalsIgnoreCase(enclosure); boolean is_escape = len_esc > 0 && p + len_esc < length && line.substring(p, p + len_esc).equalsIgnoreCase(escapeCharacter); boolean enclosure_after = false; // Is it really an enclosure? See if it's not repeated twice or escaped! if ((is_enclosure || is_escape) && p < length - 1) { String strnext = line.substring(p + len_encl, p + 2 * len_encl); if (strnext.equalsIgnoreCase(enclosure)) { p++; enclosure_after = true; dencl = true; // Remember to replace them later on! if (is_escape) contains_escaped_enclosures = true; } } // Look for a closing enclosure! while ((!is_enclosure || enclosure_after) && p < line.length()) { p++; enclosure_after = false; is_enclosure = len_encl > 0 && p + len_encl < length && line.substring(p, p + len_encl).equals(enclosure); is_escape = len_esc > 0 && p + len_esc < length && line.substring(p, p + len_esc).equals(escapeCharacter); // Is it really an enclosure? See if it's not repeated twice or escaped! if ((is_enclosure || is_escape) && p < length - 1) { String strnext = line.substring(p + len_encl, p + 2 * len_encl); if (strnext.equals(enclosure)) { p++; enclosure_after = true; dencl = true; // Remember to replace them later on! if (is_escape) contains_escaped_enclosures = true; // remember } } } if (p >= length) next = p; else next = p + len_encl; if (log.isRowLevel()) log.logRowlevel(BaseMessages.getString(PKG, "CsvInput.Log.ConvertLineToRowTitle"), BaseMessages.getString(PKG, "CsvInput.Log.EndOfEnclosure", ""+ p)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ } else { encl_found = false; boolean found = false; int startpoint = from; int tries = 1; do { next = line.indexOf(delimiter, startpoint); // See if this position is preceded by an escape character. if (len_esc > 0 && next - len_esc > 0) { String before = line.substring(next - len_esc, next); if (escapeCharacter != null && escapeCharacter.equals(before)) { // take the next separator, this one is escaped... startpoint = next + 1; tries++; contains_escaped_separators = true; } else { found = true; } } else { found = true; } } while (!found && next >= 0); } if (next == -1) next = length; if (encl_found) { pol = line.substring(from + len_encl, next - len_encl); if (log.isRowLevel()) log.logRowlevel(BaseMessages.getString(PKG, "CsvInput.Log.ConvertLineToRowTitle"), BaseMessages.getString(PKG, "CsvInput.Log.EnclosureFieldFound", ""+ pol)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ } else { pol = line.substring(from, next); if (log.isRowLevel()) log.logRowlevel(BaseMessages.getString(PKG, "CsvInput.Log.ConvertLineToRowTitle"), BaseMessages.getString(PKG, "CsvInput.Log.NormalFieldFound",""+ pol)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ } if (dencl) { StringBuilder sbpol = new StringBuilder(pol); int idx = sbpol.indexOf(enclosure + enclosure); while (idx >= 0) { sbpol.delete(idx, idx + (enclosure == null ? 0 : enclosure.length()) ); idx = sbpol.indexOf(enclosure + enclosure); } pol = sbpol.toString(); } // replace the escaped enclosures with enclosures... if (contains_escaped_enclosures) { String replace = escapeCharacter + enclosure; String replaceWith = enclosure; pol = Const.replace(pol, replace, replaceWith); } //replace the escaped separators with separators... if (contains_escaped_separators) { String replace = escapeCharacter + delimiter; String replaceWith = delimiter; pol = Const.replace(pol, replace, replaceWith); } // Now add pol to the strings found! strings.add(pol); pos = next + delimiter.length(); fieldnr++; } if ( pos == length ) { if (log.isRowLevel()) log.logRowlevel(BaseMessages.getString(PKG, "CsvInput.Log.ConvertLineToRowTitle"), BaseMessages.getString(PKG, "CsvInput.Log.EndOfEmptyLineFound")); //$NON-NLS-1$ //$NON-NLS-2$ strings.add(""); //$NON-NLS-1$ fieldnr++; } } catch (Exception e) { throw new KettleException(BaseMessages.getString(PKG, "CsvInput.Log.Error.ErrorConvertingLine",e.toString()), e); //$NON-NLS-1$ } return strings.toArray(new String[strings.size()]); } public boolean isWaitingForData() { return true; } }
package com.kii.thingif; import android.content.Context; import android.content.SharedPreferences; import android.os.Parcel; import android.os.Parcelable; import android.text.TextUtils; import android.util.Pair; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.WorkerThread; import com.google.gson.JsonParseException; import com.kii.thingif.command.Action; import com.kii.thingif.command.Command; import com.kii.thingif.command.CommandForm; import com.kii.thingif.exception.StoredInstanceNotFoundException; import com.kii.thingif.exception.ThingIFException; import com.kii.thingif.exception.ThingIFRestException; import com.kii.thingif.exception.UnloadableInstanceVersionException; import com.kii.thingif.exception.UnsupportedActionException; import com.kii.thingif.exception.UnsupportedSchemaException; import com.kii.thingif.gateway.EndNode; import com.kii.thingif.gateway.Gateway; import com.kii.thingif.gateway.PendingEndNode; import com.kii.thingif.internal.GsonRepository; import com.kii.thingif.internal.http.IoTRestClient; import com.kii.thingif.internal.http.IoTRestRequest; import com.kii.thingif.schema.Schema; import com.kii.thingif.trigger.ServerCode; import com.kii.thingif.trigger.Predicate; import com.kii.thingif.trigger.Trigger; import com.kii.thingif.internal.utils.JsonUtils; import com.kii.thingif.internal.utils.Path; import com.kii.thingif.trigger.TriggerOptions; import com.kii.thingif.trigger.TriggeredCommandForm; import com.kii.thingif.trigger.TriggeredServerCodeResult; import com.kii.thingif.trigger.TriggersWhat; import com.squareup.okhttp.MediaType; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.text.MessageFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * This class operates an IoT device that is specified by {@link #onboard(String, String, String, JSONObject)} method. */ public class ThingIFAPI implements Parcelable { private static final String SHARED_PREFERENCES_KEY_INSTANCE = "ThingIFAPI_INSTANCE"; private static final String SHARED_PREFERENCES_SDK_VERSION_KEY = "ThingIFAPI_VERSION"; private static final String MINIMUM_LOADABLE_SDK_VERSION = "0.13.0"; private static Context context; private final String tag; private final KiiApp app; private final Owner owner; private Target target; private final Map<Pair<String, Integer>, Schema> schemas = new HashMap<Pair<String, Integer>, Schema>(); private final IoTRestClient restClient; private String installationID; /** * Try to load the instance of ThingIFAPI using stored serialized instance. * <BR> * Instance is automatically saved when following methods are called. * <BR> * {@link #onboard(String, String, String, JSONObject)}, {@link #onboard(String, String)}, * {@link #copyWithTarget(Target, String)} * and {@link #installPush} has been successfully completed. * <BR> * (When {@link #copyWithTarget(Target, String)} is called, only the copied instance is saved.) * <BR> * <BR> * * If the ThingIFAPI instance is build without the tag, all instance is saved in same place * and overwritten when the instance is saved. * <BR> * <BR> * * If the ThingIFAPI instance is build with the tag(optional), tag is used as key to distinguish * the storage area to save the instance. This would be useful to saving multiple instance. * You need specify tag to load the instance by the * {@link #loadFromStoredInstance(Context, String) api}. * * When you catch exceptions, please call {@link #onboard(String, String, String, JSONObject)} * for saving or updating serialized instance. * * @param context context * @return ThingIFAPI instance. * @throws StoredInstanceNotFoundException when the instance has not stored yet. * @throws UnloadableInstanceVersionException when the instance couldn't be loaded. */ @NonNull public static ThingIFAPI loadFromStoredInstance(@NonNull Context context) throws StoredInstanceNotFoundException, UnloadableInstanceVersionException { return loadFromStoredInstance(context, null); } /** * Try to load the instance of ThingIFAPI using stored serialized instance. * <BR> * For details please refer to the {@link #loadFromStoredInstance(Context)} document. * * @param context context * @param tag specified when the ThingIFAPI has been built. * @return ThingIFAPI instance. * @throws StoredInstanceNotFoundException when the instance has not stored yet. * @throws UnloadableInstanceVersionException when the instance couldn't be loaded. */ @NonNull public static ThingIFAPI loadFromStoredInstance(@NonNull Context context, String tag) throws StoredInstanceNotFoundException, UnloadableInstanceVersionException { ThingIFAPI.context = context.getApplicationContext(); SharedPreferences preferences = getSharedPreferences(); String serializedJson = preferences.getString(getStoredInstanceKey(tag), null); if (serializedJson == null) { throw new StoredInstanceNotFoundException(tag); } String storedSDKVersion = preferences.getString(getStoredSDKVersionKey(tag), null); if (!isLoadableSDKVersion(storedSDKVersion)) { throw new UnloadableInstanceVersionException(tag, storedSDKVersion, MINIMUM_LOADABLE_SDK_VERSION); } return GsonRepository.gson().fromJson(serializedJson, ThingIFAPI.class); } /** * Clear all saved instances in the SharedPreferences. */ public static void removeAllStoredInstances() { SharedPreferences preferences = getSharedPreferences(); SharedPreferences.Editor editor = preferences.edit(); editor.clear(); editor.apply(); } /** * Remove saved specified instance in the SharedPreferences. * * @param tag tag to specify stored instance. */ public static void removeStoredInstance(@Nullable String tag) { SharedPreferences preferences = getSharedPreferences(); SharedPreferences.Editor editor = preferences.edit(); editor.remove(getStoredSDKVersionKey(tag)); editor.remove(getStoredInstanceKey(tag)); editor.apply(); } private static void saveInstance(ThingIFAPI instance) { SharedPreferences preferences = getSharedPreferences(); if (preferences != null) { SharedPreferences.Editor editor = preferences.edit(); editor.putString(getStoredSDKVersionKey(instance.tag), SDKVersion.versionString); editor.putString(getStoredInstanceKey(instance.tag), GsonRepository.gson().toJson(instance)); editor.apply(); } } private static String getStoredInstanceKey(String tag) { return SHARED_PREFERENCES_KEY_INSTANCE + (tag == null ? "" : "_" +tag); } private static String getStoredSDKVersionKey(String tag) { return SHARED_PREFERENCES_SDK_VERSION_KEY + (tag == null ? "" : "_" +tag); } private static boolean isLoadableSDKVersion(String storedSDKVersion) { if (storedSDKVersion == null) { return false; } String[] actualVersions = storedSDKVersion.split("\\."); if (actualVersions.length != 3) { return false; } String[] minimumLoadableVersions = ThingIFAPI.MINIMUM_LOADABLE_SDK_VERSION.split("\\."); for (int i = 0; i < 3; ++i) { int actual = Integer.parseInt(actualVersions[i]); int expect = Integer.parseInt(minimumLoadableVersions[i]); if (actual < expect) { return false; } else if (actual > expect) { break; } } return true; } ThingIFAPI( @Nullable Context context, @Nullable String tag, @NonNull KiiApp app, @NonNull Owner owner, @Nullable Target target, @NonNull List<Schema> schemas, String installationID) { // Parameters are checked by ThingIFAPIBuilder if (context != null) { ThingIFAPI.context = context.getApplicationContext(); } this.tag = tag; this.app = app; this.owner = owner; this.target = target; for (Schema schema : schemas) { this.schemas.put(new Pair<String, Integer>(schema.getSchemaName(), schema.getSchemaVersion()), schema); } this.installationID = installationID; this.restClient = new IoTRestClient(); } /** * Create the clone instance that has specified target and tag. * * @param target coping target. * @param tag A key to store instnace. * @return ThingIFAPI instance */ public ThingIFAPI copyWithTarget(@NonNull Target target, @Nullable String tag) { if (target == null) { throw new IllegalArgumentException("target is null"); } ThingIFAPI api = new ThingIFAPI(context, tag, this.app, this.owner, target, new ArrayList<Schema>(this.schemas.values()), this.installationID); saveInstance(api); return api; } @NonNull @WorkerThread public Target onboard( @NonNull String vendorThingID, @NonNull String thingPassword, @Nullable String thingType, @Nullable JSONObject thingProperties) throws ThingIFException { OnboardWithVendorThingIDOptions.Builder builder = new OnboardWithVendorThingIDOptions.Builder(); builder.setThingType(thingType).setThingProperties(thingProperties); return onboardWithVendorThingID(vendorThingID, thingPassword, builder.build()); } @NonNull @WorkerThread public Target onboard( @NonNull String vendorThingID, @NonNull String thingPassword, @Nullable OnboardWithVendorThingIDOptions options) throws ThingIFException { return onboardWithVendorThingID(vendorThingID, thingPassword, options); } private Target onboardWithVendorThingID( String vendorThingID, String thingPassword, OnboardWithVendorThingIDOptions options) throws ThingIFException { if (this.onboarded()) { throw new IllegalStateException("This instance is already onboarded."); } if (TextUtils.isEmpty(vendorThingID)) { throw new IllegalArgumentException("vendorThingID is null or empty"); } if (TextUtils.isEmpty(thingPassword)) { throw new IllegalArgumentException("thingPassword is null or empty"); } JSONObject requestBody = new JSONObject(); LayoutPosition layoutPosition = null; try { requestBody.put("vendorThingID", vendorThingID); requestBody.put("thingPassword", thingPassword); if (options != null) { String thingType = options.getThingType(); String firmwareVersion = options.getFirmwareVersion(); JSONObject thingProperties = options.getThingProperties(); layoutPosition = options.getLayoutPosition(); DataGroupingInterval dataGroupingInterval = options.getDataGroupingInterval(); if (thingType != null) { requestBody.put("thingType", thingType); } if (firmwareVersion != null) { requestBody.put("firmwareVersion", firmwareVersion); } if (thingProperties != null && thingProperties.length() > 0) { requestBody.put("thingProperties", thingProperties); } if (layoutPosition != null) { requestBody.put("layoutPosition", layoutPosition.name()); } if (dataGroupingInterval != null) { requestBody.put("dataGroupingInterval", dataGroupingInterval.getInterval()); } } requestBody.put("owner", this.owner.getTypedID().toString()); } catch (JSONException e) { } return this.onboard(MediaTypes.MEDIA_TYPE_ONBOARDING_WITH_VENDOR_THING_ID_BY_OWNER_REQUEST, requestBody, vendorThingID, layoutPosition); } @NonNull @WorkerThread public Target onboard( @NonNull String thingID, @NonNull String thingPassword) throws ThingIFException { return onboardWithThingID(thingID, thingPassword, null); } @NonNull @WorkerThread public Target onboard( @NonNull String thingID, @NonNull String thingPassword, @Nullable OnboardWithThingIDOptions options) throws ThingIFException { return onboardWithThingID(thingID, thingPassword, options); } private Target onboardWithThingID( String thingID, String thingPassword, OnboardWithThingIDOptions options) throws ThingIFException { if (this.onboarded()) { throw new IllegalStateException("This instance is already onboarded."); } if (TextUtils.isEmpty(thingID)) { throw new IllegalArgumentException("thingID is null or empty"); } if (TextUtils.isEmpty(thingPassword)) { throw new IllegalArgumentException("thingPassword is null or empty"); } JSONObject requestBody = new JSONObject(); LayoutPosition layoutPosition = null; try { requestBody.put("thingID", thingID); requestBody.put("thingPassword", thingPassword); requestBody.put("owner", this.owner.getTypedID().toString()); if (options != null) { layoutPosition = options.getLayoutPosition(); DataGroupingInterval dataGroupingInterval = options.getDataGroupingInterval(); if (layoutPosition != null) { requestBody.put("layoutPosition", layoutPosition.name()); } if (dataGroupingInterval != null) { requestBody.put("dataGroupingInterval", dataGroupingInterval.getInterval()); } } } catch (JSONException e) { } // FIXME: Currently, Server does not return the VendorThingID when onboarding is successful. return this.onboard(MediaTypes.MEDIA_TYPE_ONBOARDING_WITH_THING_ID_BY_OWNER_REQUEST, requestBody, null, layoutPosition); } private Target onboard(MediaType contentType, JSONObject requestBody, String vendorThingID, LayoutPosition layoutPosition) throws ThingIFException { String path = MessageFormat.format("/thing-if/apps/{0}/onboardings", this.app.getAppID()); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.POST, headers, contentType, requestBody); JSONObject responseBody = this.restClient.sendRequest(request); String thingID = responseBody.optString("thingID", null); String accessToken = responseBody.optString("accessToken", null); if (layoutPosition == LayoutPosition.GATEWAY) { this.target = new Gateway(thingID, vendorThingID); } else if (layoutPosition == LayoutPosition.ENDNODE) { this.target = new EndNode(thingID, vendorThingID, accessToken); } else { this.target = new StandaloneThing(thingID, vendorThingID, accessToken); } saveInstance(this); return this.target; } public EndNode onboardEndnodeWithGateway( @NonNull PendingEndNode pendingEndNode, @NonNull String endnodePassword) throws ThingIFException { return onboardEndNodeWithGateway(pendingEndNode, endnodePassword, null); } public EndNode onboardEndnodeWithGateway( @NonNull PendingEndNode pendingEndNode, @NonNull String endnodePassword, @Nullable OnboardEndnodeWithGatewayOptions options) throws ThingIFException { return onboardEndNodeWithGateway(pendingEndNode, endnodePassword, options); } private EndNode onboardEndNodeWithGateway( PendingEndNode pendingEndNode, String endnodePassword, @Nullable OnboardEndnodeWithGatewayOptions options) throws ThingIFException { if (this.target == null) { throw new IllegalStateException("Can not perform this action before onboarding the gateway"); } if (this.target instanceof EndNode) { throw new IllegalStateException("Target must be Gateway"); } if (pendingEndNode == null) { throw new IllegalArgumentException("pendingEndNode is null or empty"); } if (TextUtils.isEmpty(pendingEndNode.getVendorThingID())) { throw new IllegalArgumentException("vendorThingID is null or empty"); } if (TextUtils.isEmpty(endnodePassword)) { throw new IllegalArgumentException("endnodePassword is null or empty"); } JSONObject requestBody = new JSONObject(); try { requestBody.put("gatewayThingID", this.target.getTypedID().getID()); requestBody.put("endNodeVendorThingID", pendingEndNode.getVendorThingID()); requestBody.put("endNodePassword", endnodePassword); if (!TextUtils.isEmpty(pendingEndNode.getThingType())) { requestBody.put("endNodeThingType", pendingEndNode.getThingType()); } if (pendingEndNode.getThingProperties() != null && pendingEndNode.getThingProperties().length() > 0) { requestBody.put("endNodeThingProperties", pendingEndNode.getThingProperties()); } if (options != null) { DataGroupingInterval dataGroupingInterval = options.getDataGroupingInterval(); if (dataGroupingInterval != null) { requestBody.put("dataGroupingInterval", dataGroupingInterval.getInterval()); } } requestBody.put("owner", this.owner.getTypedID().toString()); } catch (JSONException e) { } String path = MessageFormat.format("/thing-if/apps/{0}/onboardings", this.app.getAppID()); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.POST, headers, MediaTypes.MEDIA_TYPE_ONBOARDING_ENDNODE_WITH_GATEWAY_THING_ID_REQUEST, requestBody); JSONObject responseBody = this.restClient.sendRequest(request); String thingID = responseBody.optString("endNodeThingID", null); String accessToken = responseBody.optString("accessToken", null); return new EndNode(thingID, pendingEndNode.getVendorThingID(), accessToken); } /** * Checks whether on boarding is done. * @return true if done, otherwise false. */ public boolean onboarded() { return this.target != null; } /** * Install push notification to receive notification from IoT Cloud. This will install on production environment. * IoT Cloud will send notification when the Target replies to the Command. * Application can receive the notification and check the result of Command * fired by Application or registered Trigger. * After installation is done Installation ID is managed in this class. * @param deviceToken for GCM, specify token obtained by * InstanceID.getToken(). * for JPUSH, specify id obtained by * JPushInterface.getUdid(). * @param pushBackend Specify backend to use. * @return Installation ID used in IoT Cloud. * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. * @throws ThingIFRestException Thrown when server returns error response. * @see #installPush(String, PushBackend, boolean) for development/production environment installation. */ @NonNull @WorkerThread public String installPush( @Nullable String deviceToken, @NonNull PushBackend pushBackend ) throws ThingIFException { return this.installPush(deviceToken, pushBackend, false); } /** * Install push notification to receive notification from IoT Cloud. * IoT Cloud will send notification when the Target replies to the Command. * Application can receive the notification and check the result of Command * fired by Application or registered Trigger. * After installation is done Installation ID is managed in this class. * @param deviceToken for GCM, specify token obtained by * InstanceID.getToken(). * for JPUSH, specify id obtained by * JPushInterface.getUdid(). * @param pushBackend Specify backend to use. * @param development Specify development flag to use. Indicates if the installation is for development or production environment. * @return Installation ID used in IoT Cloud. * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. * @throws ThingIFRestException Thrown when server returns error response. */ @NonNull @WorkerThread public String installPush( @Nullable String deviceToken, @NonNull PushBackend pushBackend, boolean development ) throws ThingIFException{ if (pushBackend == null) { throw new IllegalArgumentException("pushBackend is null"); } String path = MessageFormat.format("/api/apps/{0}/installations", this.app.getAppID()); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); JSONObject requestBody = new JSONObject(); try { if (!TextUtils.isEmpty(deviceToken)) { requestBody.put("installationRegistrationID", deviceToken); } if (development){ requestBody.put("development", true); } requestBody.put("deviceType", pushBackend.getDeviceType()); } catch (JSONException e) { } IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.POST, headers, MediaTypes.MEDIA_TYPE_INSTALLATION_CREATION_REQUEST, requestBody); JSONObject responseBody = this.restClient.sendRequest(request); this.installationID = responseBody.optString("installationID", null); saveInstance(this); return this.installationID; } /** * Get installationID if the push is already installed. * null will be returned if the push installation has not been done. * @return Installation ID used in IoT Cloud. */ @Nullable public String getInstallationID() { return this.installationID; } /** * Uninstall push notification. * After done, notification from IoT Cloud won't be notified. * @param installationID installation ID returned from * {@link #installPush(String, PushBackend)} * if null is specified, value obtained by * {@link #getInstallationID()} is used. * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. * @throws ThingIFRestException Thrown when server returns error response. */ @NonNull @WorkerThread public void uninstallPush(@NonNull String installationID) throws ThingIFException { if (installationID == null) { throw new IllegalArgumentException("installationID is null"); } String path = MessageFormat.format("/api/apps/{0}/installations/{1}", this.app.getAppID(), installationID); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.DELETE, headers); this.restClient.sendRequest(request); } /** * Post new command to IoT Cloud. * Command will be delivered to specified target and result will be notified * through push notification. * @param form form of command. It contains name of schema, version of * schema, list of actions etc. * @return Created Command instance. At this time, Command is delivered to * the target Asynchronously and may not finished. Actual Result will be * delivered through push notification or you can check the latest status * of the command by calling {@link #getCommand}. * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. * @throws ThingIFRestException Thrown when server returns error response. */ @NonNull @WorkerThread public Command postNewCommand( @NonNull CommandForm form) throws ThingIFException { if (this.target == null) { throw new IllegalStateException("Can not perform this action before onboarding"); } final String schemaName = form.getSchemaName(); final int schemaVersion = form.getSchemaVersion(); Schema schema = this.getSchema(schemaName, schemaVersion); if (schema == null) { throw new UnsupportedSchemaException(schemaName, schemaVersion); } String path = MessageFormat.format("/thing-if/apps/{0}/targets/{1}/commands", this.app.getAppID(), this.target.getTypedID().toString()); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); JSONObject requestBody = createPostNewCommandRequestBody(form); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.POST, headers, MediaTypes.MEDIA_TYPE_JSON, requestBody); JSONObject responseBody = this.restClient.sendRequest(request); String commandID = responseBody.optString("commandID", null); return this.getCommand(commandID); } /** * Get specified command. * @param commandID ID of the command to obtain. ID is present in the * instance returned by {@link #postNewCommand} * and can be obtained by {@link Command#getCommandID} * * @return Command instance. * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. * @throws ThingIFRestException Thrown when server returns error response. * @throws UnsupportedSchemaException Thrown when the returned response has a schema that cannot handle this instance. * @throws UnsupportedActionException Thrown when the returned response has a action that cannot handle this instance. */ @NonNull @WorkerThread public Command getCommand( @NonNull String commandID) throws ThingIFException { if (this.target == null) { throw new IllegalStateException("Can not perform this action before onboarding"); } if (TextUtils.isEmpty(commandID)) { throw new IllegalArgumentException("commandID is null or empty"); } String path = MessageFormat.format("/thing-if/apps/{0}/targets/{1}/commands/{2}", this.app.getAppID(), this.target.getTypedID().toString(), commandID); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.GET, headers); JSONObject responseBody = this.restClient.sendRequest(request); String schemaName = responseBody.optString("schema", null); int schemaVersion = responseBody.optInt("schemaVersion"); Schema schema = this.getSchema(schemaName, schemaVersion); if (schema == null) { throw new UnsupportedSchemaException(schemaName, schemaVersion); } return this.deserialize(schema, responseBody, Command.class); } /** * List Commands in the specified Target.<br> * If the Schema of the Command included in the response does not matches with the Schema * registered this ThingIfAPI instance, It won't be included in returned value. * @param bestEffortLimit Maximum number of the Commands in the response. * if the value is {@literal <}= 0, default limit internally * defined is applied. * Meaning of 'bestEffort' is if the specified limit * is greater than default limit, default limit is * applied. * @param paginationKey Used to get the next page of previously obtained. * If there is further page to obtain, this method * returns paginationKey as the 2nd element of pair. * Applying this key to the argument results continue * to get the result from the next page. * @return 1st Element is Commands belongs to the Target. 2nd element is * paginationKey if there is next page to be obtained. * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. * @throws ThingIFRestException Thrown when server returns error response. * @throws UnsupportedActionException Thrown when the returned response has a action that cannot handle this instance. */ @NonNull public Pair<List<Command>, String> listCommands ( int bestEffortLimit, @Nullable String paginationKey) throws ThingIFException { if (this.target == null) { throw new IllegalStateException("Can not perform this action before onboarding"); } String path = MessageFormat.format("/thing-if/apps/{0}/targets/{1}/commands", this.app.getAppID(), this.target.getTypedID().toString()); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.GET, headers); if (bestEffortLimit > 0) { request.addQueryParameter("bestEffortLimit", bestEffortLimit); } if (!TextUtils.isEmpty(paginationKey)) { request.addQueryParameter("paginationKey", paginationKey); } JSONObject responseBody = this.restClient.sendRequest(request); String nextPaginationKey = responseBody.optString("nextPaginationKey", null); JSONArray commandArray = responseBody.optJSONArray("commands"); List<Command> commands = new ArrayList<Command>(); if (commandArray != null) { for (int i = 0; i < commandArray.length(); i++) { JSONObject commandJson = commandArray.optJSONObject(i); String schemaName = commandJson.optString("schema", null); int schemaVersion = commandJson.optInt("schemaVersion"); Schema schema = this.getSchema(schemaName, schemaVersion); if (schema == null) { continue; } commands.add(this.deserialize(schema, commandJson, Command.class)); } } return new Pair<List<Command>, String>(commands, nextPaginationKey); } @NonNull @WorkerThread public Trigger postNewTrigger( @NonNull TriggeredCommandForm form, @NonNull Predicate predicate, @Nullable TriggerOptions options) throws ThingIFException { return postNewTriggerWithForm(form, predicate, options); } private Trigger postNewTriggerWithForm( @NonNull TriggeredCommandForm form, @NonNull Predicate predicate, @Nullable TriggerOptions options) throws ThingIFException { if (this.target == null) { throw new IllegalStateException("Can not perform this action before onboarding"); } if (form == null) { throw new IllegalArgumentException("form is null."); } if (predicate == null) { throw new IllegalArgumentException("predicate is null."); } JSONObject requestBody = options != null ? JsonUtils.newJson(GsonRepository.gson().toJson(options)) : new JSONObject(); try { requestBody.put("triggersWhat", TriggersWhat.COMMAND.name()); requestBody.put("predicate", JsonUtils.newJson( GsonRepository.gson().toJson(predicate))); JSONObject command = JsonUtils.newJson( GsonRepository.gson( this.getSchema( form.getSchemaName(), form.getSchemaVersion())).toJson(form)); command.put("issuer", this.owner.getTypedID()); if (form.getTargetID() == null) { command.put("target", this.target.getTypedID().toString()); } requestBody.put("command", command); } catch (JSONException e) { // Won't happen. // TODO: remove this after test finished. throw new RuntimeException(e); } return postNewTrigger(requestBody); } /** * Post new Trigger with server code to IoT Cloud. * * @param serverCode Specify server code you want to execute. * @param predicate Specify when the Trigger fires command. * @param options option fileds of this trigger. * @return Instance of the Trigger registered in IoT Cloud. * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. * @throws ThingIFRestException Thrown when server returns error response. */ @NonNull @WorkerThread public Trigger postNewTrigger( @NonNull ServerCode serverCode, @NonNull Predicate predicate, @Nullable TriggerOptions options) throws ThingIFException { return postServerCodeNewTrigger(serverCode, predicate, options); } /** * Post new Trigger with server code to IoT Cloud. * * <p> * Limited version of {@link #postNewTrigger(ServerCode, Predicate, * TriggerOptions)}. This method can not be set title, description and * metadata of {@link Trigger}. * </p> * * @param serverCode Specify server code you want to execute. * @param predicate Specify when the Trigger fires command. * @return Instance of the Trigger registered in IoT Cloud. * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. * @throws ThingIFRestException Thrown when server returns error response. */ @NonNull @WorkerThread public Trigger postNewTrigger( @NonNull ServerCode serverCode, @NonNull Predicate predicate) throws ThingIFException { return postServerCodeNewTrigger(serverCode, predicate, null); } @NonNull @WorkerThread private Trigger postServerCodeNewTrigger( @NonNull ServerCode serverCode, @NonNull Predicate predicate, @Nullable TriggerOptions options) throws ThingIFException { if (this.target == null) { throw new IllegalStateException( "Can not perform this action before onboarding"); } if (serverCode == null) { throw new IllegalArgumentException("serverCode is null"); } if (predicate == null) { throw new IllegalArgumentException("predicate is null"); } JSONObject requestBody = options != null ? JsonUtils.newJson(GsonRepository.gson().toJson(options)) : new JSONObject(); try { requestBody.put("predicate", JsonUtils.newJson(GsonRepository.gson().toJson(predicate))); requestBody.put("triggersWhat", TriggersWhat.SERVER_CODE.name()); requestBody.put("serverCode", JsonUtils.newJson(GsonRepository.gson().toJson(serverCode))); } catch (JSONException e) { // Won't happen } return this.postNewTrigger(requestBody); } private Trigger postNewTrigger(@NonNull JSONObject requestBody) throws ThingIFException { String path = MessageFormat.format("/thing-if/apps/{0}/targets/{1}/triggers", this.app.getAppID(), this.target.getTypedID().toString()); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.POST, headers, MediaTypes.MEDIA_TYPE_JSON, requestBody); JSONObject responseBody = this.restClient.sendRequest(request); String triggerID = responseBody.optString("triggerID", null); return this.getTrigger(triggerID); } /** * Get specified Trigger. * @param triggerID ID of the Trigger to get. * @return Trigger instance. * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. * @throws ThingIFRestException Thrown when server returns error response. * @throws UnsupportedSchemaException Thrown when the returned response has a schema that cannot handle this instance. * @throws UnsupportedActionException Thrown when the returned response has a action that cannot handle this instance. */ @NonNull @WorkerThread public Trigger getTrigger( @NonNull String triggerID) throws ThingIFException { if (this.target == null) { throw new IllegalStateException("Can not perform this action before onboarding"); } if (TextUtils.isEmpty(triggerID)) { throw new IllegalArgumentException("triggerID is null or empty"); } String path = MessageFormat.format("/thing-if/apps/{0}/targets/{1}/triggers/{2}", this.app.getAppID(), this.target.getTypedID().toString(), triggerID); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.GET, headers); JSONObject responseBody = this.restClient.sendRequest(request); Schema schema = null; JSONObject commandObject = responseBody.optJSONObject("command"); if (commandObject != null) { String schemaName = commandObject.optString("schema", null); int schemaVersion = commandObject.optInt("schemaVersion"); schema = this.getSchema(schemaName, schemaVersion); if (schema == null) { throw new UnsupportedSchemaException(schemaName, schemaVersion); } } return this.deserialize(schema, responseBody, this.target.getTypedID()); } @NonNull @WorkerThread public Trigger patchTrigger( @NonNull String triggerID, @Nullable TriggeredCommandForm form, @Nullable Predicate predicate, @Nullable TriggerOptions options) throws ThingIFException { return patchTriggerWithForm(triggerID, form, predicate, options); } @NonNull @WorkerThread public Trigger patchTrigger( @NonNull String triggerID, @Nullable String schemaName, int schemaVersion, @Nullable List<Action> actions, @Nullable Predicate predicate) throws ThingIFException { if ((actions == null || actions.size() == 0) && predicate == null) { throw new IllegalArgumentException( "actions is null or empty and predicate is null."); } TriggeredCommandForm form = null; if (actions != null && actions.size() > 0) { form = TriggeredCommandForm.Builder.newBuilder( schemaName, schemaVersion, actions).build(); } return patchTriggerWithForm(triggerID, form, predicate, null); } @NonNull @WorkerThread private Trigger patchTriggerWithForm( @NonNull String triggerID, @Nullable TriggeredCommandForm form, @Nullable Predicate predicate, @Nullable TriggerOptions options) throws ThingIFException { if (this.target == null) { throw new IllegalStateException( "Can not perform this action before onboarding"); } if (TextUtils.isEmpty(triggerID)) { throw new IllegalArgumentException("triggerID is null or empty"); } if (form == null && predicate == null && options == null) { throw new IllegalArgumentException( "All of form, predicate and options are null."); } JSONObject requestBody = null; try { if (options != null) { requestBody = JsonUtils.newJson(GsonRepository.gson().toJson(options)); } else { requestBody = new JSONObject(); } requestBody.put("triggersWhat", TriggersWhat.COMMAND.name()); if (predicate != null) { requestBody.put("predicate", JsonUtils.newJson( GsonRepository.gson().toJson(predicate))); } if (form != null) { JSONObject command = JsonUtils.newJson( GsonRepository.gson( this.getSchema(form.getSchemaName(), form.getSchemaVersion())).toJson(form)); command.put("issuer", this.owner.getTypedID()); if (form.getTargetID() == null) { command.put("target", this.target.getTypedID().toString()); } requestBody.put("command", command); } } catch (JSONException e) { // Won't happen } return this.patchTrigger(triggerID, requestBody); } @NonNull @WorkerThread public Trigger patchTrigger( @NonNull String triggerID, @Nullable ServerCode serverCode, @Nullable Predicate predicate, @Nullable TriggerOptions options) throws ThingIFException { return patchServerCodeTrigger(triggerID, serverCode, predicate, options); } @NonNull @WorkerThread public Trigger patchTrigger( @NonNull String triggerID, @Nullable ServerCode serverCode, @Nullable Predicate predicate) throws ThingIFException { if (serverCode == null && predicate == null) { throw new IllegalArgumentException( "serverCode and predicate are null."); } return patchServerCodeTrigger(triggerID, serverCode, predicate, null); } @NonNull @WorkerThread private Trigger patchServerCodeTrigger( @NonNull String triggerID, @Nullable ServerCode serverCode, @Nullable Predicate predicate, @Nullable TriggerOptions options) throws ThingIFException { if (this.target == null) { throw new IllegalStateException("Can not perform this action before onboarding"); } if (TextUtils.isEmpty(triggerID)) { throw new IllegalArgumentException("triggerID is null or empty"); } if (serverCode == null && predicate == null && options == null) { throw new IllegalArgumentException( "serverCode, predicate and options are null."); } JSONObject requestBody = null; try { if (options != null) { requestBody = JsonUtils.newJson( GsonRepository.gson().toJson(options)); } else { requestBody = new JSONObject(); } if (predicate != null) { requestBody.put("predicate", JsonUtils.newJson( GsonRepository.gson().toJson(predicate))); } if (serverCode != null) { requestBody.put("serverCode", JsonUtils.newJson( GsonRepository.gson().toJson(serverCode))); } requestBody.put("triggersWhat", TriggersWhat.SERVER_CODE.name()); } catch (JSONException e) { // Won't happen } return this.patchTrigger(triggerID, requestBody); } private Trigger patchTrigger(@NonNull String triggerID, @NonNull JSONObject requestBody) throws ThingIFException { String path = MessageFormat.format("/thing-if/apps/{0}/targets/{1}/triggers/{2}", this.app.getAppID(), this.target.getTypedID().toString(), triggerID); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.PATCH, headers, MediaTypes.MEDIA_TYPE_JSON, requestBody); this.restClient.sendRequest(request); return this.getTrigger(triggerID); } /** * Enable/Disable registered Trigger * If its already enabled(/disabled), * this method won't throw Exception and behave as succeeded. * @param triggerID ID of the Trigger to be enabled(/disabled). * @param enable specify whether enable of disable the Trigger. * @return Updated Trigger Instance. * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. * @throws ThingIFRestException Thrown when server returns error response. */ @NonNull @WorkerThread public Trigger enableTrigger( @NonNull String triggerID, boolean enable) throws ThingIFException { if (this.target == null) { throw new IllegalStateException("Can not perform this action before onboarding"); } if (TextUtils.isEmpty(triggerID)) { throw new IllegalArgumentException("triggerID is null or empty"); } String path = MessageFormat.format("/thing-if/apps/{0}/targets/{1}/triggers/{2}/{3}", this.app.getAppID(), this.target.getTypedID().toString(), triggerID, (enable ? "enable" : "disable")); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.PUT, headers); this.restClient.sendRequest(request); return this.getTrigger(triggerID); } /** * Delete the specified Trigger. * @param triggerID ID of the Trigger to be deleted. * @return Deleted Trigger Id. * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. * @throws ThingIFRestException Thrown when server returns error response. */ @NonNull @WorkerThread public String deleteTrigger( @NonNull String triggerID) throws ThingIFException { if (this.target == null) { throw new IllegalStateException("Can not perform this action before onboarding"); } if (TextUtils.isEmpty(triggerID)) { throw new IllegalArgumentException("triggerID is null or empty"); } String path = MessageFormat.format("/thing-if/apps/{0}/targets/{1}/triggers/{2}", this.app.getAppID(), target.getTypedID().toString(), triggerID); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.DELETE, headers); this.restClient.sendRequest(request); return triggerID; } /** * Retrieves list of server code results that was executed by the specified trigger. Results will be listing with order by modified date descending (latest first) * @param triggerID trigger ID to retrieve server code results. * @param bestEffortLimit limit the maximum number of the results in the * Response. It ensures numbers in * response is equals to or less than specified number. * But doesn't ensures number of the results * in the response is equal to specified value.<br> * If the specified value {@literal <}= 0, Default size of the limit * is applied by IoT Cloud. * @param paginationKey If specified obtain rest of the items. * @return first is list of the results and second is paginationKey returned * by IoT Cloud. paginationKey is null when there is next page to be obtained. * Obtained paginationKey can be used to get the rest of the items stored * in the target. * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. * @throws ThingIFRestException Thrown when server returns error response. */ @NonNull @WorkerThread public Pair<List<TriggeredServerCodeResult>, String> listTriggeredServerCodeResults ( @NonNull String triggerID, int bestEffortLimit, @Nullable String paginationKey ) throws ThingIFException { if (this.target == null) { throw new IllegalStateException("Can not perform this action before onboarding"); } if (TextUtils.isEmpty(triggerID)) { throw new IllegalArgumentException("triggerID is null or empty"); } String path = MessageFormat.format("/thing-if/apps/{0}/targets/{1}/triggers/{2}/results/server-code", this.app.getAppID(), this.target.getTypedID().toString(), triggerID); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.GET, headers); if (bestEffortLimit > 0) { request.addQueryParameter("bestEffortLimit", bestEffortLimit); } if (!TextUtils.isEmpty(paginationKey)) { request.addQueryParameter("paginationKey", paginationKey); } JSONObject responseBody = this.restClient.sendRequest(request); String nextPaginationKey = responseBody.optString("nextPaginationKey", null); JSONArray resultArray = responseBody.optJSONArray("triggerServerCodeResults"); List<TriggeredServerCodeResult> results = new ArrayList<TriggeredServerCodeResult>(); if (resultArray != null) { for (int i = 0; i < resultArray.length(); i++) { JSONObject resultJson = resultArray.optJSONObject(i); results.add(this.deserialize(resultJson, TriggeredServerCodeResult.class)); } } return new Pair<List<TriggeredServerCodeResult>, String>(results, nextPaginationKey); } /** * List Triggers belongs to the specified Target.<br> * If the Schema of the Trigger included in the response does not matches with the Schema * registered this ThingIfAPI instance, It won't be included in returned value. * @param bestEffortLimit limit the maximum number of the Triggers in the * Response. It ensures numbers in * response is equals to or less than specified number. * But doesn't ensures number of the Triggers * in the response is equal to specified value.<br> * If the specified value {@literal <}= 0, Default size of the limit * is applied by IoT Cloud. * @param paginationKey If specified obtain rest of the items. * @return first is list of the Triggers and second is paginationKey returned * by IoT Cloud. paginationKey is null when there is next page to be obtained. * Obtained paginationKey can be used to get the rest of the items stored * in the target. * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. * @throws ThingIFRestException Thrown when server returns error response. * @throws UnsupportedActionException Thrown when the returned response has a action that cannot handle this instance. */ @NonNull @WorkerThread public Pair<List<Trigger>, String> listTriggers( int bestEffortLimit, @Nullable String paginationKey) throws ThingIFException { if (this.target == null) { throw new IllegalStateException("Can not perform this action before onboarding"); } String path = MessageFormat.format("/thing-if/apps/{0}/targets/{1}/triggers", this.app.getAppID(), this.target.getTypedID().toString()); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.GET, headers); if (bestEffortLimit > 0) { request.addQueryParameter("bestEffortLimit", bestEffortLimit); } if (!TextUtils.isEmpty(paginationKey)) { request.addQueryParameter("paginationKey", paginationKey); } JSONObject responseBody = this.restClient.sendRequest(request); String nextPaginationKey = responseBody.optString("nextPaginationKey", null); JSONArray triggerArray = responseBody.optJSONArray("triggers"); List<Trigger> triggers = new ArrayList<Trigger>(); if (triggerArray != null) { for (int i = 0; i < triggerArray.length(); i++) { JSONObject triggerJson = triggerArray.optJSONObject(i); JSONObject commandJson = triggerJson.optJSONObject("command"); Schema schema = null; if (commandJson != null) { String schemaName = commandJson.optString("schema", null); int schemaVersion = commandJson.optInt("schemaVersion"); schema = this.getSchema(schemaName, schemaVersion); if (schema == null) { continue; } } triggers.add(this.deserialize(schema, triggerJson, this.target.getTypedID())); } } return new Pair<List<Trigger>, String>(triggers, nextPaginationKey); } /** * Get the State of specified Target. * State will be serialized with Gson library. * @param classOfS Specify class of the State. * @param <S> State class. * @return Instance of Target State. * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. * @throws ThingIFRestException Thrown when server returns error response. */ @NonNull @WorkerThread public <S extends TargetState> S getTargetState( @NonNull Class<S> classOfS) throws ThingIFException { if (this.target == null) { throw new IllegalStateException("Can not perform this action before onboarding"); } if (classOfS == null) { throw new IllegalArgumentException("classOfS is null"); } String path = MessageFormat.format("/thing-if/apps/{0}/targets/{1}/states", this.app.getAppID(), this.target.getTypedID().toString()); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.GET, headers); JSONObject responseBody = this.restClient.sendRequest(request); S ret = GsonRepository.gson().fromJson(responseBody.toString(), classOfS); return ret; } /** * Get the Vendor Thing ID of specified Target. * * @return Vendor Thing ID * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. */ @NonNull @WorkerThread public String getVendorThingID() throws ThingIFException { if (this.target == null) { throw new IllegalStateException("Can not perform this action before onboarding"); } String path = MessageFormat.format("/api/apps/{0}/things/{1}/vendor-thing-id", this.app.getAppID(), this.target.getTypedID().getID()); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.GET, headers); JSONObject responseBody = this.restClient.sendRequest(request); return responseBody.optString("_vendorThingID", null); } /** * Update the Vendor Thing ID of specified Target. * * @param newVendorThingID New vendor thing id * @param newPassword New password * @throws ThingIFException Thrown when failed to connect IoT Cloud Server. */ @WorkerThread public void updateVendorThingID(@NonNull String newVendorThingID, @NonNull String newPassword) throws ThingIFException { if (this.target == null) { throw new IllegalStateException("Can not perform this action before onboarding"); } if (TextUtils.isEmpty(newPassword)) { throw new IllegalArgumentException("newPassword is null or empty"); } if (TextUtils.isEmpty(newVendorThingID)) { throw new IllegalArgumentException("newVendorThingID is null or empty"); } JSONObject requestBody = new JSONObject(); try { requestBody.put("_vendorThingID", newVendorThingID); requestBody.put("_password", newPassword); } catch (JSONException e) { } String path = MessageFormat.format("/api/apps/{0}/things/{1}/vendor-thing-id", this.app.getAppID(), this.target.getTypedID().getID()); String url = Path.combine(this.app.getBaseUrl(), path); Map<String, String> headers = this.newHeader(); IoTRestRequest request = new IoTRestRequest(url, IoTRestRequest.Method.PUT, headers, MediaTypes.MEDIA_TYPE_VENDOR_THING_ID_UPDATE_REQUEST, requestBody); this.restClient.sendRequest(request); } /** Get Kii App * @return Kii Cloud Application. */ @NonNull public KiiApp getApp() { return this.app; } /** * Get AppID * @return app ID */ @NonNull public String getAppID() { return this.app.getAppID(); } /** * Get AppKey * @return app key */ @NonNull public String getAppKey() { return this.app.getAppKey(); } /** * Get base URL * @return base URL */ @NonNull public String getBaseUrl() { return this.app.getBaseUrl(); } /** * Get list of schema. * @return list of schema. */ @NonNull public List<Schema> getSchemas() { return new ArrayList<Schema>(this.schemas.values()); } /** * Get owner who uses the ThingIFAPI. * @return owner */ @NonNull public Owner getOwner() { return this.owner; } /** * Get target thing that is operated by the ThingIFAPI. * @return target of this ThingIFAPI. */ @Nullable public Target getTarget() { return this.target; } /** * Get a tag. * @return tag. */ @Nullable public String getTag() { return this.tag; } @Nullable private Schema getSchema(String schemaName, int schemaVersion) { return this.schemas.get(new Pair<String, Integer>(schemaName, schemaVersion)); } private Map<String, String> newHeader() { Map<String, String> headers = new HashMap<String, String>(); if (!TextUtils.isEmpty(this.getAppID())) { headers.put("X-Kii-AppID", this.getAppID()); } if (!TextUtils.isEmpty(this.getAppKey())) { headers.put("X-Kii-AppKey", this.getAppKey()); } if (this.owner != null && !TextUtils.isEmpty(this.owner.getAccessToken())) { headers.put("Authorization", "Bearer " + this.owner.getAccessToken()); } return headers; } private JSONObject createPostNewCommandRequestBody(CommandForm src) throws ThingIFException { JSONObject ret = JsonUtils.newJson(GsonRepository.gson().toJson(src)); try { ret.put("issuer", this.owner.getTypedID().toString()); } catch (JSONException e) { throw new AssertionError(e); } return ret; } private <T> T deserialize(JSONObject json, Class<T> clazz) throws ThingIFException { return this.deserialize(null, json, clazz); } private <T> T deserialize(Schema schema, JSONObject json, Class<T> clazz) throws ThingIFException { return this.deserialize(schema, json.toString(), clazz); } private Trigger deserialize(Schema schema, JSONObject json, TypedID targetID) throws ThingIFException { JSONObject copied = null; try { copied = new JSONObject(json.toString()); copied.put("targetID", targetID.toString()); } catch (JSONException e) { throw new ThingIFException("unexpected error.", e); } return this.deserialize(schema, copied.toString(), Trigger.class); } private <T> T deserialize(Schema schema, String json, Class<T> clazz) throws ThingIFException { try { return GsonRepository.gson(schema).fromJson(json, clazz); } catch (JsonParseException e) { if (e.getCause() instanceof ThingIFException) { throw (ThingIFException)e.getCause(); } throw e; } } private static SharedPreferences getSharedPreferences() { if (context != null) { return context.getSharedPreferences("com.kii.thingif.preferences", Context.MODE_PRIVATE); } return null; } // Implementation of Parcelable protected ThingIFAPI(Parcel in) { this.tag = in.readString(); this.app = in.readParcelable(KiiApp.class.getClassLoader()); this.owner = in.readParcelable(Owner.class.getClassLoader()); this.target = in.readParcelable(Target.class.getClassLoader()); ArrayList<Schema> schemas = in.createTypedArrayList(Schema.CREATOR); for (Schema schema : schemas) { this.schemas.put(new Pair<String, Integer>(schema.getSchemaName(), schema.getSchemaVersion()), schema); } this.restClient = new IoTRestClient(); this.installationID = in.readString(); } public static final Creator<ThingIFAPI> CREATOR = new Creator<ThingIFAPI>() { @Override public ThingIFAPI createFromParcel(Parcel in) { return new ThingIFAPI(in); } @Override public ThingIFAPI[] newArray(int size) { return new ThingIFAPI[size]; } }; @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeString(this.tag); dest.writeParcelable(this.app, flags); dest.writeParcelable(this.owner, flags); dest.writeParcelable(this.target, flags); dest.writeTypedList(new ArrayList<Schema>(this.schemas.values())); dest.writeString(this.installationID); } /** * Get version of the SDK. * @return Version string. */ @NonNull public static String getSDKVersion() { return SDKVersion.versionString; } }
package radlab.rain.workload.rubis; import java.io.IOException; import java.util.ArrayList; import java.util.List; import radlab.rain.IScoreboard; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.utils.URIBuilder; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.message.BasicNameValuePair; import org.apache.http.NameValuePair; import radlab.rain.workload.rubis.model.RubisItem; import radlab.rain.workload.rubis.model.RubisUser; /** * Store-Bid operation. * * Emulates the following requests: * 1. Click on the 'Bid Now' image for a certain item * 2. Provide authentication data (login name and password) * 3. Fill-in the form anc click on the 'Bid now!' button * * @author Marco Guazzone (marco.guazzone@gmail.com) */ public class StoreBidOperation extends RubisOperation { public StoreBidOperation(boolean interactive, IScoreboard scoreboard) { super(interactive, scoreboard); this._operationName = "Store-Bid"; this._operationIndex = RubisGenerator.STORE_BID_OP; } @Override public void execute() throws Throwable { //this.getLogger().finest("Begin Bid execution"); StringBuilder response = null; // Get an item (from last response or from session) int itemId = this.getUtility().findItemIdInHtml(this.getSessionState().getLastResponse()); RubisItem item = this.getUtility().getItem(itemId, this.getSessionState().getLoggedUserId()); if (!this.getUtility().isValidItem(item)) { // Try to see if there an item in session item = this.getUtility().getItem(this.getSessionState().getItemId(), this.getSessionState().getLoggedUserId()); if (!this.getUtility().isValidItem(item)) { this.getLogger().warning("No valid item has been found. Operation interrupted."); this.setFailed(true); return; } } // Need a logged user RubisUser loggedUser = this.getUtility().getUser(this.getSessionState().getLoggedUserId()); if (!this.getUtility().isValidUser(loggedUser)) { this.getLogger().warning("No valid user has been found to log-in. Operation interrupted."); this.setFailed(true); return; } HttpPost reqPost = null; List<NameValuePair> form = null; UrlEncodedFormEntity entity = null; // Fill-in the form anc click on the 'Bid now!' button // This will really store the bid on the DB. String str = null; int maxQty = 0; str = this.getUtility().findFormParamInHtml(this.getSessionState().getLastResponse(), "maxQty"); if (str != null && !str.isEmpty()) { maxQty = Math.max(Integer.parseInt(str), maxQty); } int qty = (maxQty > 0) ? (this.getRandomGenerator().nextInt(maxQty)+1) : 0; float minBid = 0; str = this.getUtility().findFormParamInHtml(this.getSessionState().getLastResponse(), "minBid"); if (str != null && !str.isEmpty()) { minBid = Float.parseFloat(str); } int addBid = this.getRandomGenerator().nextInt(Math.round(this.getConfiguration().getMaxItemBaseBidPrice()))+1; float bid = minBid+addBid; float maxBid = minBid+addBid*2; reqPost = new HttpPost(this.getGenerator().getStoreBidURL()); form = new ArrayList<NameValuePair>(); form.add(new BasicNameValuePair("itemId", Integer.toString(item.id))); form.add(new BasicNameValuePair("userId", Integer.toString(loggedUser.id))); form.add(new BasicNameValuePair("minBid", Float.toString(minBid))); form.add(new BasicNameValuePair("bid", Float.toString(bid))); form.add(new BasicNameValuePair("maxBid", Float.toString(maxBid))); form.add(new BasicNameValuePair("maxQty", Integer.toString(maxQty))); form.add(new BasicNameValuePair("qty", Integer.toString(qty))); entity = new UrlEncodedFormEntity(form, "UTF-8"); reqPost.setEntity(entity); this.getLogger().finest("Send POST " + reqPost.getURI().toString()); response = this.getHttpTransport().fetch(reqPost); this.trace(reqPost.getURI().toString()); if (!this.getGenerator().checkHttpResponse(response.toString())) { this.getLogger().severe("Problems in performing request to URL: " + reqPost.getURI() + " (HTTP status code: " + this.getHttpTransport().getStatusCode() + "). Server response: " + response); throw new IOException("Problems in performing request to URL: " + reqPost.getURI() + " (HTTP status code: " + this.getHttpTransport().getStatusCode() + ")"); } // Save session data this.getSessionState().setLastResponse(response.toString()); this.getSessionState().setItemId(item.id); this.setFailed(false); //this.getLogger().finest("End Bid execution"); } }
package dr.app.beauti; import dr.app.beauti.options.BeautiOptions; import dr.app.beauti.options.NucModelType; import dr.app.beauti.options.PartitionModel; import dr.evolution.datatype.Nucleotides; import dr.evolution.io.NexusImporter; import java.io.IOException; import java.io.Reader; import java.io.Writer; import java.util.List; /** * Class for importing PAUP, MrBayes and Rhino NEXUS file format * * @author Andrew Rambaut * @author Alexei Drummond * @version $Id: NexusApplicationImporter.java,v 1.4 2005/07/11 14:07:25 rambaut Exp $ */ public class NexusApplicationImporter extends NexusImporter { public static final NexusBlock ASSUMPTIONS_BLOCK = new NexusBlock("ASSUMPTIONS"); public static final NexusBlock PAUP_BLOCK = new NexusBlock("PAUP"); public static final NexusBlock MRBAYES_BLOCK = new NexusBlock("MRBAYES"); /** * @param reader a reader to read the Nexus format from */ public NexusApplicationImporter(Reader reader) { super(reader); setCommentDelimiters('[', ']', '\0'); } public NexusApplicationImporter(Reader reader, Writer commentWriter) { super(reader, commentWriter); setCommentDelimiters('[', ']', '\0'); } /** * This function returns an enum class to specify what the * block given by blockName is. */ public NexusBlock findBlockName(String blockName) { if (blockName.equalsIgnoreCase(ASSUMPTIONS_BLOCK.toString())) { return ASSUMPTIONS_BLOCK; } else if (blockName.equalsIgnoreCase(PAUP_BLOCK.toString())) { return PAUP_BLOCK; } else if (blockName.equalsIgnoreCase(MRBAYES_BLOCK.toString())) { return MRBAYES_BLOCK; } else { return super.findBlockName(blockName); } } /** * Parses an 'Assumptions' block. * * @param charSets a list of char sets to *add* to if any are defined in PAUP block * @throws dr.evolution.io.Importer.ImportException * if Assumptions block is poorly formed * @throws java.io.IOException if I/O fails */ public void parseAssumptionsBlock(List<CharSet> charSets) throws ImportException, IOException { boolean done = false; while (!done) { String command = readToken(";"); if (command.equalsIgnoreCase("ENDBLOCK") || command.equalsIgnoreCase("END")) { done = true; } else if (match("CHARSET", command, 5)) { if (getLastDelimiter() != ';') { charSets.add(readCharSetCommand()); } } else { System.err.println("The command, '" + command + "', is not used by BEAST and has been ignored"); } } } /** * Parses a 'PAUP' block. * * @param options the BEAUti options * @param charSets a list of char sets to *add* to if any are defined in PAUP block * @return a partition model representing the model defined in the PAUP block * @throws dr.evolution.io.Importer.ImportException * if PAUP block is poorly formed * @throws java.io.IOException if I/O fails */ public PartitionModel parsePAUPBlock(BeautiOptions options, List<CharSet> charSets) throws ImportException, IOException { PartitionModel model = new PartitionModel(options, "nucs", Nucleotides.INSTANCE); readTopLevelBlock(options, model, charSets); return model; } /** * Parses a 'MRBAYES' block. * * @param options the BEAUti options * @param charSets a list of char sets to *add* to if any are defined in PAUP block * @return a partition model representing the model defined in the MRBAYES block * @throws dr.evolution.io.Importer.ImportException * if MRBAYES block is poorly formed * @throws java.io.IOException if I/O fails */ public PartitionModel parseMrBayesBlock(BeautiOptions options, List<CharSet> charSets) throws ImportException, IOException { PartitionModel model = new PartitionModel(options, "nucs", Nucleotides.INSTANCE); readTopLevelBlock(options, model, charSets); return model; } private CharSet readCharSetCommand() throws ImportException, IOException { String name = readToken("=;"); String[] parts = readToken(";").split("-"); int from; int to; int every = 1; try { if (parts.length == 2) { from = Integer.parseInt(parts[0]); String[] toParts = parts[1].split("\\\\"); to = Integer.parseInt(toParts[0]); every = 1; if (toParts.length > 1) every = Integer.parseInt(toParts[1]); } else if (parts.length == 1) { from = Integer.parseInt(parts[0]); to = from; } else { throw new ImportException("CharSet, " + name + ", unable to be parsed"); } } catch (NumberFormatException nfe) { throw new ImportException("CharSet, " + name + ", unable to be parsed"); } return new CharSet(name, from, to, every); } /** * This method reads a PAUP or MrBayes block * * @param options the beauti options * @param model the partition model * @param charSets a list of char sets to *add* to if any are defined in PAUP block * @throws dr.evolution.io.Importer.ImportException * if top-level block is poorly formed * @throws java.io.IOException if I/O fails */ private void readTopLevelBlock(BeautiOptions options, PartitionModel model, List<CharSet> charSets) throws ImportException, IOException { boolean done = false; while (!done) { String command = readToken(";"); if (command.equalsIgnoreCase("ENDBLOCK") || command.equalsIgnoreCase("END")) { done = true; } else if (match("HSEARCH", command, 2)) { // Once we reach a search in PAUP then stop done = true; } else if (match("MCMC", command, 4)) { if (getLastDelimiter() != ';') { readMCMCCommand(options); } done = true; } else if (match("MCMCP", command, 5)) { if (getLastDelimiter() != ';') { readMCMCCommand(options); } } else if (match("LSET", command, 2)) { if (getLastDelimiter() != ';') { readLSETCommand(model); } } else if (match("CHARSET", command, 5)) { if (getLastDelimiter() != ';') { charSets.add(readCharSetCommand()); } } else { System.err.println("The command, '" + command + "', is not used by BEAST and has been ignored"); } } } private void readLSETCommand(PartitionModel model) throws ImportException, IOException { boolean done = false; while (!done) { String subcommand = readToken("=;"); if (match("NST", subcommand, 2)) { int nst = readInteger(";"); if (nst == 1) { model.setNucSubstitutionModel(NucModelType.JC); } else if (nst == 2) { model.setNucSubstitutionModel(NucModelType.HKY); } else if (nst == 6) { model.setNucSubstitutionModel(NucModelType.GTR); } else { throw new BadFormatException("Bad value for NST subcommand of LSET command"); } } else if (match("RATES", subcommand, 2)) { String token = readToken(";"); if (match("EQUAL", token, 1)) { model.setGammaHetero(false); model.setInvarHetero(false); } else if (match("GAMMA", token, 1)) { model.setGammaHetero(true); model.setInvarHetero(false); } else if (match("PROPINV", token, 1)) { model.setGammaHetero(false); model.setInvarHetero(true); } else if (match("INVGAMMA", token, 1)) { model.setGammaHetero(true); model.setInvarHetero(true); } else if (match("ADGAMMA", token, 1)) { System.err.println("The option, 'RATES=ADGAMMA', in the LSET command is not used by BEAST and has been ignored"); } else if (match("SITESPEC", token, 1)) { System.err.println("The option, 'RATES=SITESPEC', in the LSET command is not used by BEAST and has been ignored"); } else { throw new BadFormatException("Unknown value, '" + token + "'"); } } else if (match("NGAMMACAT", subcommand, 2)) { model.setGammaCategories(readInteger(";")); } else { System.err.println("The option, '" + subcommand + "', in the LSET command is not used by BEAST and has been ignored"); } if (getLastDelimiter() == ';') { done = true; } } } private void readMCMCCommand(BeautiOptions options) throws ImportException, IOException { boolean done = false; while (!done) { String subcommand = readToken("=;"); if (match("NGEN", subcommand, 2)) { options.chainLength = readInteger(";"); } else if (match("SAMPLEFREQ", subcommand, 2)) { options.logEvery = readInteger(";"); } else if (match("PRINTFREQ", subcommand, 1)) { options.echoEvery = readInteger(";"); } else if (match("FILENAME", subcommand, 1)) { options.fileName = readToken(";"); } else if (match("BURNIN", subcommand, 1)) { options.burnIn = readInteger(";"); } else if (match("STARTINGTREE", subcommand, 2)) { String token = readToken(";"); if (match("USER", token, 1)) { options.userTree = true; } else if (match("RANDOM", token, 1)) { options.userTree = false; } else { throw new BadFormatException("Unknown value, '" + token + "'"); } } else { System.err.println("The option, '" + subcommand + "', in the MCMC command is not used by BEAST and has been ignored"); } if (getLastDelimiter() == ';') { done = true; } } } private boolean match(String reference, String target, int min) throws ImportException { if (target.length() < min) { //throw new BadFormatException("Ambiguous command or subcommand, '" + target + "'"); } return reference.startsWith(target.toUpperCase()); } public class CharSet { public CharSet(String name, int fromSite, int toSite) { this.name = name; this.fromSite = fromSite; this.toSite = toSite; } public CharSet(String name, int fromSite, int toSite, int every) { this.name = name; this.fromSite = fromSite; this.toSite = toSite; this.every = every; } public String getName() { return name; } public int getFromSite() { return fromSite; } public int getToSite() { return toSite; } public int getEvery() { return every; } private final String name; private final int fromSite; private final int toSite; private int every = 1; } }
package dr.app.beauti.generator; import dr.app.beast.BeastVersion; import dr.app.beauti.components.ComponentFactory; import dr.app.beauti.enumTypes.ClockType; import dr.app.beauti.enumTypes.FixRateType; import dr.app.beauti.enumTypes.PriorType; import dr.app.beauti.enumTypes.TreePriorType; import dr.app.beauti.options.*; import dr.app.beauti.options.Parameter; import dr.app.beauti.util.XMLWriter; import dr.evolution.alignment.Alignment; import dr.evolution.alignment.SitePatterns; import dr.evolution.datatype.DataType; import dr.evolution.datatype.Nucleotides; import dr.evolution.util.Taxa; import dr.evolution.util.Taxon; import dr.evolution.util.TaxonList; import dr.evolution.util.Units; import dr.evomodel.branchratemodel.BranchRateModel; import dr.evomodel.branchratemodel.StrictClockBranchRates; import dr.evomodel.clock.ACLikelihood; import dr.evomodel.coalescent.CoalescentLikelihood; import dr.evomodel.coalescent.GMRFFixedGridImportanceSampler; import dr.evomodel.speciation.SpeciationLikelihood; import dr.evomodel.speciation.SpeciesTreeModel; import dr.evomodel.speciation.TreePartitionCoalescent; import dr.evomodel.tree.MonophylyStatistic; import dr.evomodel.tree.TMRCAStatistic; import dr.evomodel.tree.TreeModel; import dr.evomodelxml.*; import dr.evoxml.*; import dr.inference.distribution.MixedDistributionLikelihood; import dr.inference.loggers.Columns; import dr.inference.model.*; import dr.inference.operators.SimpleOperatorSchedule; import dr.inference.xml.LoggerParser; import dr.inferencexml.PriorParsers; import dr.util.Attribute; import dr.util.Version; import dr.xml.XMLParser; import java.io.Writer; import java.util.*; /** * This class holds all the data for the current BEAUti Document * * @author Andrew Rambaut * @author Alexei Drummond * @author Walter Xie * @version $Id: BeastGenerator.java,v 1.4 2006/09/05 13:29:34 rambaut Exp $ */ public class BeastGenerator extends Generator { private final static Version version = new BeastVersion(); private final static String TREE_FILE_LOG = "treeFileLog"; private final static String SUB_TREE_FILE_LOG = "substTreeFileLog"; private final TreePriorGenerator treePriorGenerator; private final TreeLikelihoodGenerator treeLikelihoodGenerator; private final SubstitutionModelGenerator substitutionModelGenerator; private final InitialTreeGenerator initialTreeGenerator; private final TreeModelGenerator treeModelGenerator; private final BranchRatesModelGenerator branchRatesModelGenerator; private final OperatorsGenerator operatorsGenerator; private final STARBEASTGenerator starEASTGeneratorGenerator; public BeastGenerator(BeautiOptions options, ComponentFactory[] components) { super(options, components); substitutionModelGenerator = new SubstitutionModelGenerator(options, components); treePriorGenerator = new TreePriorGenerator(options, components); treeLikelihoodGenerator = new TreeLikelihoodGenerator(options, components); initialTreeGenerator = new InitialTreeGenerator(options, components); treeModelGenerator = new TreeModelGenerator(options, components); branchRatesModelGenerator = new BranchRatesModelGenerator(options, components); operatorsGenerator = new OperatorsGenerator(options, components); starEASTGeneratorGenerator = new STARBEASTGenerator(options, components); } public void checkOptions() throws IllegalArgumentException { //++++++++++++++++ Taxon List ++++++++++++++++++ TaxonList taxonList = options.taxonList; Set<String> ids = new HashSet<String>(); ids.add(TaxaParser.TAXA); ids.add(AlignmentParser.ALIGNMENT); if (taxonList != null) { if (taxonList.getTaxonCount() < 2) { throw new IllegalArgumentException("BEAST requires at least two taxa to run."); } for (int i = 0; i < taxonList.getTaxonCount(); i++) { Taxon taxon = taxonList.getTaxon(i); if (ids.contains(taxon.getId())) { throw new IllegalArgumentException("A taxon has the same id," + taxon.getId() + "\nas another element (taxon, sequence, taxon set etc.):\nAll ids should be unique."); } ids.add(taxon.getId()); } } //++++++++++++++++ Taxon Sets ++++++++++++++++++ for (Taxa taxa : options.taxonSets) { if (taxa.getTaxonCount() < 2) { throw new IllegalArgumentException("Taxon set, " + taxa.getId() + ", should contain\n" + "at least two taxa."); } if (ids.contains(taxa.getId())) { throw new IllegalArgumentException("A taxon sets has the same id," + taxa.getId() + "\nas another element (taxon, sequence, taxon set etc.):\nAll ids should be unique."); } ids.add(taxa.getId()); } //++++++++++++++++ Tree Prior ++++++++++++++++++ if (options.isShareSameTreePrior()) { for (PartitionTreePrior prior : options.getPartitionTreePriors()) { if (prior.getNodeHeightPrior() == TreePriorType.GMRF_SKYRIDE) { throw new IllegalArgumentException("For GMRF, tree model/tree prior combination not implemented by BEAST yet!" + "\nPlease uncheck the shareSameTreePrior if using GMRF."); } } } //++++++++++++++++ clock model/tree model combination ++++++++++++++++++ for (PartitionTreeModel model : options.getPartitionTreeModels()) { // clock model/tree model combination not implemented by BEAST yet validateClockTreeModelCombination(model); } //++++++++++++++++ Species tree ++++++++++++++++++ if (options.starBEASTOptions.isSpeciesAnalysis()) { // if (!(options.nodeHeightPrior == TreePriorType.SPECIES_BIRTH_DEATH || options.nodeHeightPrior == TreePriorType.SPECIES_YULE)) { // //TODO: more species tree model } // add other tests and warnings here // Speciation model with dated tips // Sampling rates without dated tips or priors on rate or nodes } /** * Generate a beast xml file from these beast options * * @param w the writer */ public void generateXML(Writer w) { XMLWriter writer = new XMLWriter(w); writer.writeText("<?xml version=\"1.0\" standalone=\"yes\"?>"); writer.writeComment("Generated by BEAUTi " + version.getVersionString()); writer.writeComment(" by Alexei J. Drummond and Andrew Rambaut"); writer.writeComment(" Department of Computer Science, University of Auckland and"); writer.writeComment(" Institute of Evolutionary Biology, University of Edinburgh"); writer.writeComment(" http://beast.bio.ed.ac.uk/"); writer.writeOpenTag("beast"); writer.writeText(""); // this gives any added implementations of the 'Component' interface a // chance to generate XML at this point in the BEAST file. generateInsertionPoint(ComponentGenerator.InsertionPoint.BEFORE_TAXA, writer); //++++++++++++++++ Taxon List ++++++++++++++++++ writeTaxa(writer, options.taxonList); List<Taxa> taxonSets = options.taxonSets; if (taxonSets != null && taxonSets.size() > 0) { writeTaxonSets(writer, taxonSets); // TODO } if (options.allowDifferentTaxa) { // allow diff taxa for multi-gene writer.writeText(""); writer.writeComment("List all taxons regarding each gene (file) for Multispecies Coalescent function"); // write all taxa in each gene tree regarding each data partition, for (PartitionData partition : options.dataPartitions) { // do I need if (!alignments.contains(alignment)) {alignments.add(alignment);} ? writeDifferentTaxaForMultiGene(partition, writer); } } generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TAXA, writer); //++++++++++++++++ Alignments ++++++++++++++++++ List<Alignment> alignments = new ArrayList<Alignment>(); for (PartitionData partition : options.dataPartitions) { Alignment alignment = partition.getAlignment(); if (!alignments.contains(alignment)) { alignments.add(alignment); } } if (!options.samplePriorOnly) { int index = 1; for (Alignment alignment : alignments) { if (alignments.size() > 1) { //if (!options.allowDifferentTaxa) { alignment.setId(AlignmentParser.ALIGNMENT + index); //} else { // e.g. alignment_gene1 // alignment.setId("alignment_" + mulitTaxaTagName + index); } else { alignment.setId(AlignmentParser.ALIGNMENT); } writeAlignment(alignment, writer); index += 1; writer.writeText(""); } generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SEQUENCES, writer); //++++++++++++++++ Pattern Lists ++++++++++++++++++ // for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) { // writePatternList(model, writer); for (PartitionData partition : options.dataPartitions) { // Each PD has one TreeLikelihood writePatternList(partition, writer); writer.writeText(""); } generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_PATTERNS, writer); } else { Alignment alignment = alignments.get(0); alignment.setId(AlignmentParser.ALIGNMENT); writeAlignment(alignment, writer); writer.writeText(""); generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SEQUENCES, writer); generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_PATTERNS, writer); } //++++++++++++++++ Tree Prior Model ++++++++++++++++++ // if ( options.shareSameTreePrior ) { // Share Same Tree Prior // treePriorGenerator.setModelPrefix(""); // treePriorGenerator.writeTreePriorModel(options.activedSameTreePrior, writer); // } else { // Different Tree Priors for (PartitionTreePrior prior : options.getPartitionTreePriors()) { // treePriorGenerator.setModelPrefix(prior.getPrefix()); // prior.constant treePriorGenerator.writeTreePriorModel(prior, writer); writer.writeText(""); } //++++++++++++++++ Starting Tree ++++++++++++++++++ // if ( options.getPartitionTreeModels().size() == 1 ) { // 1 Partition Tree Model // initialTreeGenerator.setModelPrefix(""); // initialTreeGenerator.writeStartingTree(options.getPartitionTreeModels().get(0), writer); // } else { // Different Tree Models for (PartitionTreeModel model : options.getPartitionTreeModels()) { // initialTreeGenerator.setModelPrefix(model.getPrefix()); // model.startingTree initialTreeGenerator.writeStartingTree(model, writer); writer.writeText(""); } //++++++++++++++++ Tree Model +++++++++++++++++++ // if ( options.getPartitionTreeModels().size() == 1 ) { // 1 Partition Tree Model // treeModelGenerator.setModelPrefix(""); // treeModelGenerator.writeTreeModel(writer); // } else { // Different Tree Models for (PartitionTreeModel model : options.getPartitionTreeModels()) { // treeModelGenerator.setModelPrefix(model.getPrefix()); // treemodel.treeModel treeModelGenerator.writeTreeModel(model, writer); writer.writeText(""); } generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_MODEL, writer); //++++++++++++++++ Tree Prior Likelihood ++++++++++++++++++ // if ( options.shareSameTreePrior ) { // Share Same Tree Prior // treePriorGenerator.setModelPrefix(""); // treePriorGenerator.writeTreePrior(options.activedSameTreePrior, writer); // } else { // no species // for (PartitionTreePrior prior : options.getPartitionTreePriors()) { // treePriorGenerator.setModelPrefix(prior.getPrefix()); // prior.treeModel for (PartitionTreeModel model : options.getPartitionTreeModels()) { PartitionTreePrior prior = model.getPartitionTreePrior(); treePriorGenerator.writePriorLikelihood(prior, model, writer); writer.writeText(""); } for (PartitionTreePrior prior : options.getPartitionTreePriors()) { treePriorGenerator.writeEBSPVariableDemographic(prior, writer); } generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_PRIOR, writer); //++++++++++++++++ Branch Rates Model ++++++++++++++++++ // if ( options.getPartitionClockModels().size() == 1 ) { // 1 Partition Clock Model // branchRatesModelGenerator.setModelPrefix(""); // branchRatesModelGenerator.writeBranchRatesModel(writer); // } else { // Different Tree Models for (PartitionClockModel model : options.getPartitionClockModels()) { // branchRatesModelGenerator.setModelPrefix(model.getPrefix()); // model.startingTree // for (PartitionTreeModel tree : options.getPartitionTreeModels(model.getAllPartitionData())) { branchRatesModelGenerator.writeBranchRatesModel(model, writer); writer.writeText(""); } // write allClockRate for fix mean option in clock model panel if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.FIX_MEAN) { writer.writeOpenTag(CompoundParameter.COMPOUND_PARAMETER, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, "allClockRates")}); for (PartitionClockModel model : options.getPartitionClockModels()) { branchRatesModelGenerator.writeAllClockRateRefs(model, writer); } writer.writeCloseTag(CompoundParameter.COMPOUND_PARAMETER); writer.writeText(""); } //++++++++++++++++ Substitution Model ++++++++++++++++++ for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) { substitutionModelGenerator.writeSubstitutionModel(model, writer); writer.writeText(""); } generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SUBSTITUTION_MODEL, writer); //++++++++++++++++ Site Model ++++++++++++++++++ boolean writeMuParameters = options.substitutionModelOptions.hasCodon(); //options.getTotalActivePartitionSubstitutionModelCount() > 1; for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) { substitutionModelGenerator.writeSiteModel(model, writeMuParameters, writer); writer.writeText(""); } if (writeMuParameters) { // write allMus for codon model // allMus is global writer.writeOpenTag(CompoundParameter.COMPOUND_PARAMETER, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, "allMus")}); for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) { substitutionModelGenerator.writeMuParameterRefs(model, writer); } writer.writeCloseTag(CompoundParameter.COMPOUND_PARAMETER); writer.writeText(""); } generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SITE_MODEL, writer); //++++++++++++++++ Tree Likelihood ++++++++++++++++++ // for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) { // if ( options.isSpeciesAnalysis() ) { // species // treeLikelihoodGenerator.setModelPrefix(model.getName() + "."); // } else { // treeLikelihoodGenerator.setModelPrefix(""); // //TODO: need merge genePrifx and prefix //// for (PartitionData partition : options.dataPartitions) { // Each PD has one TreeLikelihood // treeLikelihoodGenerator.writeTreeLikelihood(model, writer); // writer.writeText(""); for (PartitionData partition : options.dataPartitions) { // Each PD has one TreeLikelihood treeLikelihoodGenerator.writeTreeLikelihood(partition, writer); // for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) { // treeLikelihoodGenerator.writeTreeLikelihood(model, writer); writer.writeText(""); } generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_LIKELIHOOD, writer); //++++++++++++++++ Traits ++++++++++++++++++ // traits tag if (options.selecetedTraits.size() > 0) { for (String trait : options.selecetedTraits) { TraitGuesser.TraitType traiType = options.traitTypes.get(trait); writeTraits(writer, trait, traiType.toString(), options.taxonList); } generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TRAITS, writer); } if (taxonSets != null && taxonSets.size() > 0) { //TODO: need to suit for multi-gene-tree writeTMRCAStatistics(writer); } //++++++++++++++++ Operators ++++++++++++++++++ List<Operator> operators = options.selectOperators(); operatorsGenerator.writeOperatorSchedule(operators, writer); writer.writeText(""); generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_OPERATORS, writer); //++++++++++++++++ MCMC ++++++++++++++++++ // XMLWriter writer, List<PartitionSubstitutionModel> models, writeMCMC(writer); writer.writeText(""); generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_MCMC, writer); writeTimerReport(writer); writer.writeText(""); if (options.performTraceAnalysis) { writeTraceAnalysis(writer); } if (options.generateCSV) { for (PartitionTreePrior prior : options.getPartitionTreePriors()) { treePriorGenerator.writeEBSPAnalysisToCSVfile(prior, writer); } } writer.writeCloseTag("beast"); writer.flush(); } /** * Generate a taxa block from these beast options * * @param writer the writer * @param taxonList the taxon list to write */ private void writeTaxa(XMLWriter writer, TaxonList taxonList) { // -1 (single taxa), 0 (1st gene of multi-taxa) writer.writeComment("The list of taxa analyse (can also include dates/ages)."); writer.writeComment("ntax=" + taxonList.getTaxonCount()); writer.writeOpenTag(TaxaParser.TAXA, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, TaxaParser.TAXA)}); boolean firstDate = true; for (int i = 0; i < taxonList.getTaxonCount(); i++) { Taxon taxon = taxonList.getTaxon(i); boolean hasDate = false; if (options.clockModelOptions.isTipCalibrated()) { hasDate = TaxonList.Utils.hasAttribute(taxonList, i, dr.evolution.util.Date.DATE); } writer.writeTag(TaxonParser.TAXON, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, taxon.getId())}, !hasDate); if (hasDate) { dr.evolution.util.Date date = (dr.evolution.util.Date) taxon.getAttribute(dr.evolution.util.Date.DATE); if (firstDate) { options.units = date.getUnits(); firstDate = false; } else { if (options.units != date.getUnits()) { System.err.println("Error: Units in dates do not match."); } } Attribute[] attributes = { new Attribute.Default<Double>(ParameterParser.VALUE, date.getTimeValue()), new Attribute.Default<String>("direction", date.isBackwards() ? "backwards" : "forwards"), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(options.units)) /*, new Attribute.Default("origin", date.getOrigin()+"")*/ }; writer.writeTag(dr.evolution.util.Date.DATE, attributes, true); writer.writeCloseTag(TaxonParser.TAXON); } generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_TAXON, taxon, writer); } writer.writeCloseTag(TaxaParser.TAXA); } /** * Generate additional taxon sets * * @param writer the writer * @param taxonSets a list of taxa to write */ private void writeTaxonSets(XMLWriter writer, List<Taxa> taxonSets) { writer.writeText(""); for (Taxa taxa : taxonSets) { writer.writeOpenTag( TaxaParser.TAXA, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, taxa.getId()) } ); for (int j = 0; j < taxa.getTaxonCount(); j++) { writer.writeIDref(TaxonParser.TAXON, taxa.getTaxon(j).getId()); } writer.writeCloseTag(TaxaParser.TAXA); } } /** * Determine and return the datatype description for these beast options * note that the datatype in XML may differ from the actual datatype * * @param alignment the alignment to get data type description of * @return description */ private String getAlignmentDataTypeDescription(Alignment alignment) { String description; switch (alignment.getDataType().getType()) { case DataType.TWO_STATES: case DataType.COVARION: // TODO make this work // throw new RuntimeException("TO DO!"); //switch (partition.getPartitionSubstitutionModel().binarySubstitutionModel) { // case ModelOptions.BIN_COVARION: // description = TwoStateCovarion.DESCRIPTION; // break; // default: description = alignment.getDataType().getDescription(); break; default: description = alignment.getDataType().getDescription(); } return description; } public void writeDifferentTaxaForMultiGene(PartitionData dataPartition, XMLWriter writer) { String data = dataPartition.getName(); Alignment alignment = dataPartition.getAlignment(); writer.writeComment("gene name = " + data + ", ntax= " + alignment.getTaxonCount()); writer.writeOpenTag(TaxaParser.TAXA, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, data + "." + TaxaParser.TAXA)}); for (int i = 0; i < alignment.getTaxonCount(); i++) { final Taxon taxon = alignment.getTaxon(i); writer.writeIDref(TaxonParser.TAXON, taxon.getId()); } writer.writeCloseTag(TaxaParser.TAXA); } /** * Generate an alignment block from these beast options * * @param alignment the alignment to write * @param writer the writer */ public void writeAlignment(Alignment alignment, XMLWriter writer) { writer.writeText(""); writer.writeComment("The sequence alignment (each sequence refers to a taxon above)."); writer.writeComment("ntax=" + alignment.getTaxonCount() + " nchar=" + alignment.getSiteCount()); if (options.samplePriorOnly) { writer.writeComment("Null sequences generated in order to sample from the prior only."); } writer.writeOpenTag( AlignmentParser.ALIGNMENT, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, alignment.getId()), new Attribute.Default<String>("dataType", getAlignmentDataTypeDescription(alignment)) } ); for (int i = 0; i < alignment.getTaxonCount(); i++) { Taxon taxon = alignment.getTaxon(i); writer.writeOpenTag("sequence"); writer.writeIDref(TaxonParser.TAXON, taxon.getId()); if (!options.samplePriorOnly) { writer.writeText(alignment.getAlignedSequenceString(i)); } else { writer.writeText("N"); } writer.writeCloseTag("sequence"); } writer.writeCloseTag(AlignmentParser.ALIGNMENT); } /** * Generate traits block regarding specific trait name (currently only <species>) from options * * @param writer * @param trait * @param traitType * @param taxonList */ private void writeTraits(XMLWriter writer, String trait, String traitType, TaxonList taxonList) { writer.writeText(""); if (options.starBEASTOptions.isSpeciesAnalysis()) { // species writer.writeComment("Species definition: binds taxa, species and gene trees"); } writer.writeComment("trait = " + trait + " trait_type = " + traitType); writer.writeOpenTag(trait, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, trait)}); //new Attribute.Default<String>("traitType", traitType)}); // write sub-tags for species if (options.starBEASTOptions.isSpeciesAnalysis()) { // species starEASTGeneratorGenerator.writeMultiSpecies(taxonList, writer); } // end write sub-tags for species writer.writeCloseTag(trait); if (options.starBEASTOptions.isSpeciesAnalysis()) { // species starEASTGeneratorGenerator.writeSTARBEAST(writer); } } /** * Writes the pattern lists * * @param partition the partition data to write the pattern lists for * @param writer the writer */ public void writePatternList(PartitionData partition, XMLWriter writer) { writer.writeText(""); PartitionSubstitutionModel model = partition.getPartitionSubstitutionModel(); String codonHeteroPattern = model.getCodonHeteroPattern(); int partitionCount = model.getCodonPartitionCount(); if (model.getDataType() == Nucleotides.INSTANCE && codonHeteroPattern != null && partitionCount > 1) { if (codonHeteroPattern.equals("112")) { writer.writeComment("The unique patterns for codon positions 1 & 2"); writer.writeOpenTag(MergePatternsParser.MERGE_PATTERNS, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, model.getPrefix(1) + partition.getName() + "." + SitePatternsParser.PATTERNS), } ); // for (PartitionData partition : options.dataPartitions) { // if (partition.getPartitionSubstitutionModel() == model) { writePatternList(partition, 0, 3, writer); writePatternList(partition, 1, 3, writer); writer.writeCloseTag(MergePatternsParser.MERGE_PATTERNS); writer.writeComment("The unique patterns for codon positions 3"); writer.writeOpenTag(MergePatternsParser.MERGE_PATTERNS, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, model.getPrefix(2) + partition.getName() + "." + SitePatternsParser.PATTERNS), } ); // for (PartitionData partition : options.dataPartitions) { // if (partition.getPartitionSubstitutionModel() == model) { writePatternList(partition, 2, 3, writer); writer.writeCloseTag(MergePatternsParser.MERGE_PATTERNS); } else { // pattern is 123 // write pattern lists for all three codon positions for (int i = 1; i <= 3; i++) { writer.writeComment("The unique patterns for codon positions " + i); writer.writeOpenTag(MergePatternsParser.MERGE_PATTERNS, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, model.getPrefix(i) + partition.getName() + "." + SitePatternsParser.PATTERNS), } ); // for (PartitionData partition : options.dataPartitions) { // if (partition.getPartitionSubstitutionModel() == model) { writePatternList(partition, i - 1, 3, writer); writer.writeCloseTag(MergePatternsParser.MERGE_PATTERNS); } } } else { //partitionCount = 1; // writer.writeComment("The unique patterns site patterns"); // Alignment alignment = partition.getAlignment(); // writer.writeOpenTag(SitePatternsParser.PATTERNS, // new Attribute[]{ // new Attribute.Default<String>(XMLParser.ID, partition.getName() + "." + SitePatternsParser.PATTERNS), writePatternList(partition, 0, 1, writer); // writer.writeIDref(AlignmentParser.ALIGNMENT, alignment.getId()); // writer.writeCloseTag(SitePatternsParser.PATTERNS); // for (PartitionData partition : options.dataPartitions) { // if (partition.getPartitionSubstitutionModel() == model) { // writePatternList(partition, 0, 1, writer); } } /** * Write a single pattern list * * @param partition the partition to write a pattern list for * @param offset offset by * @param every skip every * @param writer the writer */ private void writePatternList(PartitionData partition, int offset, int every, XMLWriter writer) { Alignment alignment = partition.getAlignment(); int from = partition.getFromSite(); int to = partition.getToSite(); int partEvery = partition.getEvery(); if (partEvery > 1 && every > 1) throw new IllegalArgumentException(); if (from < 1) from = 1; every = Math.max(partEvery, every); from += offset; writer.writeComment("The unique patterns from " + from + " to " + (to > 0 ? to : "end") + ((every > 1) ? " every " + every : "")); // this object is created solely to calculate the number of patterns in the alignment SitePatterns patterns = new SitePatterns(alignment, from - 1, to - 1, every); writer.writeComment("npatterns=" + patterns.getPatternCount()); List<Attribute> attributes = new ArrayList<Attribute>(); // no codon, unique patterns site patterns if (offset == 0 && every == 1) attributes.add(new Attribute.Default<String>(XMLParser.ID, partition.getName() + "." + SitePatternsParser.PATTERNS)); attributes.add(new Attribute.Default<String>("from", "" + from)); if (to >= 0) attributes.add(new Attribute.Default<String>("to", "" + to)); if (every > 1) { attributes.add(new Attribute.Default<String>("every", "" + every)); } // generate <patterns> writer.writeOpenTag(SitePatternsParser.PATTERNS, attributes); writer.writeIDref(AlignmentParser.ALIGNMENT, alignment.getId()); writer.writeCloseTag(SitePatternsParser.PATTERNS); } /** * Generate tmrca statistics * * @param writer the writer */ public void writeTMRCAStatistics(XMLWriter writer) { writer.writeText(""); for (Taxa taxa : options.taxonSets) { writer.writeOpenTag( TMRCAStatistic.TMRCA_STATISTIC, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, "tmrca(" + taxa.getId() + ")"), } ); writer.writeOpenTag(TMRCAStatistic.MRCA); writer.writeIDref(TaxaParser.TAXA, taxa.getId()); writer.writeCloseTag(TMRCAStatistic.MRCA); writer.writeIDref(TreeModel.TREE_MODEL, TreeModel.TREE_MODEL); writer.writeCloseTag(TMRCAStatistic.TMRCA_STATISTIC); if (options.taxonSetsMono.get(taxa)) { writer.writeOpenTag( MonophylyStatistic.MONOPHYLY_STATISTIC, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, "monophyly(" + taxa.getId() + ")"), }); writer.writeOpenTag(MonophylyStatistic.MRCA); writer.writeIDref(TaxaParser.TAXA, taxa.getId()); writer.writeCloseTag(MonophylyStatistic.MRCA); writer.writeIDref(TreeModel.TREE_MODEL, TreeModel.TREE_MODEL); writer.writeCloseTag(MonophylyStatistic.MONOPHYLY_STATISTIC); } } } /** * Write the timer report block. * * @param writer the writer */ public void writeTimerReport(XMLWriter writer) { writer.writeOpenTag("report"); writer.writeOpenTag("property", new Attribute.Default<String>("name", "timer")); writer.writeIDref("mcmc", "mcmc"); writer.writeCloseTag("property"); writer.writeCloseTag("report"); } /** * Write the trace analysis block. * * @param writer the writer */ public void writeTraceAnalysis(XMLWriter writer) { writer.writeTag( "traceAnalysis", new Attribute[]{ new Attribute.Default<String>("fileName", options.logFileName) }, true ); } /** * Write the MCMC block. * * @param writer */ public void writeMCMC(XMLWriter writer) { writer.writeComment("Define MCMC"); writer.writeOpenTag( "mcmc", new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, "mcmc"), new Attribute.Default<Integer>("chainLength", options.chainLength), new Attribute.Default<String>("autoOptimize", options.autoOptimize ? "true" : "false") }); if (options.hasData()) { writer.writeOpenTag(CompoundLikelihood.POSTERIOR, new Attribute.Default<String>(XMLParser.ID, "posterior")); } // write prior block writer.writeOpenTag(CompoundLikelihood.PRIOR, new Attribute.Default<String>(XMLParser.ID, "prior")); if (options.starBEASTOptions.isSpeciesAnalysis()) { // species // coalescent prior writer.writeIDref(TreePartitionCoalescent.SPECIES_COALESCENT, TraitGuesser.Traits.TRAIT_SPECIES + "." + COALESCENT); // prior on population sizes // if (options.speciesTreePrior == TreePriorType.SPECIES_YULE) { writer.writeIDref(MixedDistributionLikelihood.DISTRIBUTION_LIKELIHOOD, SPOPS); // } else { // writer.writeIDref(SpeciesTreeBMPrior.STPRIOR, STP); // prior on species tree writer.writeIDref(SpeciationLikelihood.SPECIATION_LIKELIHOOD, SPECIATION_LIKE); } writeParameterPriors(writer); for (PartitionTreeModel model : options.getPartitionTreeModels()) { PartitionTreePrior prior = model.getPartitionTreePrior(); treePriorGenerator.writePriorLikelihoodReference(prior, model, writer); writer.writeText(""); } for (PartitionTreePrior prior : options.getPartitionTreePriors()) { treePriorGenerator.writeEBSPVariableDemographicReference(prior, writer); } generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_MCMC_PRIOR, writer); writer.writeCloseTag(CompoundLikelihood.PRIOR); if (options.hasData()) { // write likelihood block writer.writeOpenTag(CompoundLikelihood.LIKELIHOOD, new Attribute.Default<String>(XMLParser.ID, "likelihood")); treeLikelihoodGenerator.writeTreeLikelihoodReferences(writer); generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_MCMC_LIKELIHOOD, writer); writer.writeCloseTag(CompoundLikelihood.LIKELIHOOD); writer.writeCloseTag(CompoundLikelihood.POSTERIOR); } writer.writeIDref(SimpleOperatorSchedule.OPERATOR_SCHEDULE, "operators"); // write log to screen writeLogToScreen(writer); // write log to file writeLogToFile(writer); // write tree log to file writeTreeLogToFile(writer); writer.writeCloseTag("mcmc"); } /** * write log to screen * * @param writer */ private void writeLogToScreen(XMLWriter writer) { writer.writeComment("write log to screen"); writer.writeOpenTag(LoggerParser.LOG, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, "screenLog"), new Attribute.Default<String>(LoggerParser.LOG_EVERY, options.echoEvery + "") }); if (options.hasData()) { writer.writeOpenTag(Columns.COLUMN, new Attribute[]{ new Attribute.Default<String>(Columns.LABEL, "Posterior"), new Attribute.Default<String>(Columns.DECIMAL_PLACES, "4"), new Attribute.Default<String>(Columns.WIDTH, "12") } ); writer.writeIDref(CompoundLikelihood.POSTERIOR, "posterior"); writer.writeCloseTag(Columns.COLUMN); } writer.writeOpenTag(Columns.COLUMN, new Attribute[]{ new Attribute.Default<String>(Columns.LABEL, "Prior"), new Attribute.Default<String>(Columns.DECIMAL_PLACES, "4"), new Attribute.Default<String>(Columns.WIDTH, "12") } ); writer.writeIDref(CompoundLikelihood.PRIOR, "prior"); writer.writeCloseTag(Columns.COLUMN); if (options.hasData()) { writer.writeOpenTag(Columns.COLUMN, new Attribute[]{ new Attribute.Default<String>(Columns.LABEL, "Likelihood"), new Attribute.Default<String>(Columns.DECIMAL_PLACES, "4"), new Attribute.Default<String>(Columns.WIDTH, "12") } ); writer.writeIDref(CompoundLikelihood.LIKELIHOOD, "likelihood"); writer.writeCloseTag(Columns.COLUMN); } if (options.starBEASTOptions.isSpeciesAnalysis()) { // species writer.writeOpenTag(Columns.COLUMN, new Attribute[]{ new Attribute.Default<String>(Columns.LABEL, "PopMean"), new Attribute.Default<String>(Columns.DECIMAL_PLACES, "4"), new Attribute.Default<String>(Columns.WIDTH, "12") } ); writer.writeIDref(ParameterParser.PARAMETER, TraitGuesser.Traits.TRAIT_SPECIES + "." + options.starBEASTOptions.POP_MEAN); writer.writeCloseTag(Columns.COLUMN); } writer.writeOpenTag(Columns.COLUMN, new Attribute[]{ new Attribute.Default<String>(Columns.LABEL, "Root Height"), new Attribute.Default<String>(Columns.SIGNIFICANT_FIGURES, "6"), new Attribute.Default<String>(Columns.WIDTH, "12") } ); for (PartitionTreeModel model : options.getPartitionTreeModels()) { writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + TreeModel.TREE_MODEL + "." + TreeModelParser.ROOT_HEIGHT); } writer.writeCloseTag(Columns.COLUMN); writer.writeOpenTag(Columns.COLUMN, new Attribute[]{ new Attribute.Default<String>(Columns.LABEL, "Rate"), new Attribute.Default<String>(Columns.SIGNIFICANT_FIGURES, "6"), new Attribute.Default<String>(Columns.WIDTH, "12") } ); if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.FIX_MEAN) { writer.writeIDref(ParameterParser.PARAMETER, "allClockRates"); for (PartitionClockModel model : options.getPartitionClockModels()) { if (model.getClockType() == ClockType.UNCORRELATED_LOGNORMAL) writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_STDEV); } } else { for (PartitionClockModel model : options.getPartitionClockModels()) { branchRatesModelGenerator.writeLog(model, writer); } } writer.writeCloseTag(Columns.COLUMN); for (PartitionClockModel model : options.getPartitionClockModels()) { branchRatesModelGenerator.writeLogStatistic(model, writer); } generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_SCREEN_LOG, writer); writer.writeCloseTag(LoggerParser.LOG); generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SCREEN_LOG, writer); } /** * write log to file * * @param writer */ private void writeLogToFile(XMLWriter writer) { writer.writeComment("write log to file"); if (options.logFileName == null) { options.logFileName = options.fileNameStem + ".log"; } writer.writeOpenTag(LoggerParser.LOG, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, "fileLog"), new Attribute.Default<String>(LoggerParser.LOG_EVERY, options.logEvery + ""), new Attribute.Default<String>(LoggerParser.FILE_NAME, options.logFileName) }); if (options.hasData()) { writer.writeIDref(CompoundLikelihood.POSTERIOR, "posterior"); } writer.writeIDref(CompoundLikelihood.PRIOR, "prior"); if (options.hasData()) { writer.writeIDref(CompoundLikelihood.LIKELIHOOD, "likelihood"); } if (options.starBEASTOptions.isSpeciesAnalysis()) { // species // coalescent prior writer.writeIDref(TreePartitionCoalescent.SPECIES_COALESCENT, TraitGuesser.Traits.TRAIT_SPECIES + "." + COALESCENT); // prior on population sizes // if (options.speciesTreePrior == TreePriorType.SPECIES_YULE) { writer.writeIDref(MixedDistributionLikelihood.DISTRIBUTION_LIKELIHOOD, SPOPS); // } else { // writer.writeIDref(SpeciesTreeBMPrior.STPRIOR, STP); // prior on species tree writer.writeIDref(SpeciationLikelihood.SPECIATION_LIKELIHOOD, SPECIATION_LIKE); writer.writeIDref(ParameterParser.PARAMETER, TraitGuesser.Traits.TRAIT_SPECIES + "." + options.starBEASTOptions.POP_MEAN); writer.writeIDref(ParameterParser.PARAMETER, SpeciesTreeModel.SPECIES_TREE + "." + SPLIT_POPS); if (options.getPartitionTreePriors().get(0).getNodeHeightPrior() == TreePriorType.SPECIES_BIRTH_DEATH) { writer.writeIDref(ParameterParser.PARAMETER, TraitGuesser.Traits.TRAIT_SPECIES + "." + BirthDeathModelParser.BIRTHDIFF_RATE_PARAM_NAME); writer.writeIDref(ParameterParser.PARAMETER, TraitGuesser.Traits.TRAIT_SPECIES + "." + BirthDeathModelParser.RELATIVE_DEATH_RATE_PARAM_NAME); } else if (options.getPartitionTreePriors().get(0).getNodeHeightPrior() == TreePriorType.SPECIES_YULE) { writer.writeIDref(ParameterParser.PARAMETER, TraitGuesser.Traits.TRAIT_SPECIES + "." + YuleModelParser.YULE + "." + YuleModelParser.BIRTH_RATE); } else { throw new IllegalArgumentException("Get wrong species tree prior using *BEAST : " + options.getPartitionTreePriors().get(0).getNodeHeightPrior().toString()); } //Species Tree: tmrcaStatistic writer.writeIDref(TMRCAStatistic.TMRCA_STATISTIC, SpeciesTreeModel.SPECIES_TREE + "." + TreeModelParser.ROOT_HEIGHT); } for (PartitionTreeModel model : options.getPartitionTreeModels()) { writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + TreeModel.TREE_MODEL + "." + TreeModelParser.ROOT_HEIGHT); } for (Taxa taxa : options.taxonSets) { writer.writeIDref("tmrcaStatistic", "tmrca(" + taxa.getId() + ")"); } // if ( options.shareSameTreePrior ) { // Share Same Tree Prior // treePriorGenerator.setModelPrefix(""); // treePriorGenerator.writeParameterLog(options.activedSameTreePrior, writer); // } else { // no species for (PartitionTreePrior prior : options.getPartitionTreePriors()) { // treePriorGenerator.setModelPrefix(prior.getPrefix()); // priorName.treeModel treePriorGenerator.writeParameterLog(prior, writer); } for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) { substitutionModelGenerator.writeLog(writer, model); } if (options.substitutionModelOptions.hasCodon()) { writer.writeIDref(ParameterParser.PARAMETER, "allMus"); } if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.FIX_MEAN) { writer.writeIDref(ParameterParser.PARAMETER, "allClockRates"); for (PartitionClockModel model : options.getPartitionClockModels()) { if (model.getClockType() == ClockType.UNCORRELATED_LOGNORMAL) writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_STDEV); } } else { for (PartitionClockModel model : options.getPartitionClockModels()) { branchRatesModelGenerator.writeLog(model, writer); } } for (PartitionClockModel model : options.getPartitionClockModels()) { branchRatesModelGenerator.writeLogStatistic(model, writer); } generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_FILE_LOG_PARAMETERS, writer); if (options.hasData()) { treeLikelihoodGenerator.writeTreeLikelihoodReferences(writer); } generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_FILE_LOG_LIKELIHOODS, writer); // coalescentLikelihood for (PartitionTreeModel model : options.getPartitionTreeModels()) { PartitionTreePrior prior = model.getPartitionTreePrior(); treePriorGenerator.writePriorLikelihoodReferenceLog(prior, model, writer); writer.writeText(""); } for (PartitionTreePrior prior : options.getPartitionTreePriors()) { if (prior.getNodeHeightPrior() == TreePriorType.EXTENDED_SKYLINE) writer.writeIDref(CoalescentLikelihood.COALESCENT_LIKELIHOOD, prior.getPrefix() + COALESCENT); // only 1 coalescent } writer.writeCloseTag(LoggerParser.LOG); generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_FILE_LOG, writer); } /** * write tree log to file * * @param writer */ private void writeTreeLogToFile(XMLWriter writer) { writer.writeComment("write tree log to file"); if (options.starBEASTOptions.isSpeciesAnalysis()) { // species // species tree log writer.writeOpenTag(TreeLoggerParser.LOG_TREE, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, TraitGuesser.Traits.TRAIT_SPECIES + "." + TREE_FILE_LOG), // speciesTreeFileLog new Attribute.Default<String>(TreeLoggerParser.LOG_EVERY, options.logEvery + ""), new Attribute.Default<String>(TreeLoggerParser.NEXUS_FORMAT, "true"), new Attribute.Default<String>(TreeLoggerParser.FILE_NAME, options.fileNameStem + "." + options.starBEASTOptions.SPECIES_TREE_FILE_NAME), new Attribute.Default<String>(TreeLoggerParser.SORT_TRANSLATION_TABLE, "true") }); writer.writeIDref(SpeciesTreeModel.SPECIES_TREE, SP_TREE); if (options.hasData()) { // we have data... writer.writeIDref("posterior", "posterior"); } writer.writeCloseTag(TreeLoggerParser.LOG_TREE); } // gene tree log //TODO make code consistent to MCMCPanel for (PartitionTreeModel tree : options.getPartitionTreeModels()) { String treeFileName; if (options.substTreeLog) { treeFileName = options.fileNameStem + "." + tree.getPrefix() + "(time)." + GMRFFixedGridImportanceSampler.TREE_FILE_NAME; } else { treeFileName = options.fileNameStem + "." + tree.getPrefix() + GMRFFixedGridImportanceSampler.TREE_FILE_NAME; // stem.partitionName.tree } List<Attribute> attributes = new ArrayList<Attribute>(); attributes.add(new Attribute.Default<String>(XMLParser.ID, tree.getPrefix() + TREE_FILE_LOG)); // partionName.treeFileLog attributes.add(new Attribute.Default<String>(TreeLoggerParser.LOG_EVERY, options.logEvery + "")); attributes.add(new Attribute.Default<String>(TreeLoggerParser.NEXUS_FORMAT, "true")); attributes.add(new Attribute.Default<String>(TreeLoggerParser.FILE_NAME, treeFileName)); attributes.add(new Attribute.Default<String>(TreeLoggerParser.SORT_TRANSLATION_TABLE, "true")); if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.RElATIVE_TO) { double aveFixedRate = options.clockModelOptions.getSelectedRate(options.getPartitionClockModels()); attributes.add(new Attribute.Default<String>(TreeLoggerParser.NORMALISE_MEAN_RATE_TO, Double.toString(aveFixedRate))); } // generate <logTree> writer.writeOpenTag(TreeLoggerParser.LOG_TREE, attributes); // writer.writeOpenTag(TreeLoggerParser.LOG_TREE, // new Attribute[]{ // new Attribute.Default<String>(XMLParser.ID, tree.getPrefix() + TREE_FILE_LOG), // partionName.treeFileLog // new Attribute.Default<String>(TreeLoggerParser.LOG_EVERY, options.logEvery + ""), // new Attribute.Default<String>(TreeLoggerParser.NEXUS_FORMAT, "true"), // new Attribute.Default<String>(TreeLoggerParser.FILE_NAME, treeFileName), // new Attribute.Default<String>(TreeLoggerParser.SORT_TRANSLATION_TABLE, "true") writer.writeIDref(TreeModel.TREE_MODEL, tree.getPrefix() + TreeModel.TREE_MODEL); for (PartitionClockModel model : options.getPartitionClockModels(tree.getAllPartitionData())) { switch (model.getClockType()) { case STRICT_CLOCK: writer.writeIDref(StrictClockBranchRates.STRICT_CLOCK_BRANCH_RATES, model.getPrefix() + tree.getPrefix() + BranchRateModel.BRANCH_RATES); break; case UNCORRELATED_EXPONENTIAL: case UNCORRELATED_LOGNORMAL: case RANDOM_LOCAL_CLOCK: writer.writeIDref(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES, model.getPrefix() + tree.getPrefix() + BranchRateModel.BRANCH_RATES); break; case AUTOCORRELATED_LOGNORMAL: writer.writeIDref(ACLikelihood.AC_LIKELIHOOD, model.getPrefix() + tree.getPrefix() + BranchRateModel.BRANCH_RATES); break; default: throw new IllegalArgumentException("Unknown clock model"); } } if (options.hasData()) { // we have data... writer.writeIDref("posterior", "posterior"); } writer.writeCloseTag(TreeLoggerParser.LOG_TREE); } // end For loop generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_TREES_LOG, writer); if (options.substTreeLog) { if (options.starBEASTOptions.isSpeciesAnalysis()) { // species //TODO: species sub tree } // gene tree for (PartitionTreeModel tree : options.getPartitionTreeModels()) { // write tree log to file writer.writeOpenTag(TreeLoggerParser.LOG_TREE, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, tree.getPrefix() + SUB_TREE_FILE_LOG), new Attribute.Default<String>(TreeLoggerParser.LOG_EVERY, options.logEvery + ""), new Attribute.Default<String>(TreeLoggerParser.NEXUS_FORMAT, "true"), new Attribute.Default<String>(TreeLoggerParser.FILE_NAME, options.fileNameStem + "." + tree.getPrefix() + "(subst)." + GMRFFixedGridImportanceSampler.TREE_FILE_NAME), new Attribute.Default<String>(TreeLoggerParser.BRANCH_LENGTHS, TreeLoggerParser.SUBSTITUTIONS) }); writer.writeIDref(TreeModel.TREE_MODEL, tree.getPrefix() + TreeModel.TREE_MODEL); for (PartitionClockModel model : options.getPartitionClockModels(tree.getAllPartitionData())) { switch (model.getClockType()) { case STRICT_CLOCK: writer.writeIDref(StrictClockBranchRates.STRICT_CLOCK_BRANCH_RATES, model.getPrefix() + tree.getPrefix() + BranchRateModel.BRANCH_RATES); break; case UNCORRELATED_EXPONENTIAL: case UNCORRELATED_LOGNORMAL: case RANDOM_LOCAL_CLOCK: writer.writeIDref(DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES, model.getPrefix() + tree.getPrefix() + BranchRateModel.BRANCH_RATES); break; case AUTOCORRELATED_LOGNORMAL: writer.writeIDref(ACLikelihood.AC_LIKELIHOOD, model.getPrefix() + tree.getPrefix() + BranchRateModel.BRANCH_RATES); break; default: throw new IllegalArgumentException("Unknown clock model"); } } writer.writeCloseTag(TreeLoggerParser.LOG_TREE); } } generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREES_LOG, writer); } /** * Write the priors for each parameter * * @param writer the writer */ private void writeParameterPriors(XMLWriter writer) { boolean first = true; for (Map.Entry<Taxa, Boolean> taxaBooleanEntry : options.taxonSetsMono.entrySet()) { if (taxaBooleanEntry.getValue()) { if (first) { writer.writeOpenTag(BooleanLikelihood.BOOLEAN_LIKELIHOOD); first = false; } final String taxaRef = "monophyly(" + taxaBooleanEntry.getKey().getId() + ")"; writer.writeIDref(MonophylyStatistic.MONOPHYLY_STATISTIC, taxaRef); } } if (!first) { writer.writeCloseTag(BooleanLikelihood.BOOLEAN_LIKELIHOOD); } ArrayList<Parameter> parameters = options.selectParameters(); for (Parameter parameter : parameters) { if (parameter.priorType != PriorType.NONE) { if (parameter.priorType != PriorType.UNIFORM_PRIOR || parameter.isNodeHeight) { writeParameterPrior(parameter, writer); } } } } /** * Write the priors for each parameter * * @param parameter the parameter * @param writer the writer */ private void writeParameterPrior(dr.app.beauti.options.Parameter parameter, XMLWriter writer) { switch (parameter.priorType) { case UNIFORM_PRIOR: writer.writeOpenTag(PriorParsers.UNIFORM_PRIOR, new Attribute[]{ new Attribute.Default<String>(PriorParsers.LOWER, "" + parameter.uniformLower), new Attribute.Default<String>(PriorParsers.UPPER, "" + parameter.uniformUpper) }); writeParameterIdref(writer, parameter); writer.writeCloseTag(PriorParsers.UNIFORM_PRIOR); break; case EXPONENTIAL_PRIOR: writer.writeOpenTag(PriorParsers.EXPONENTIAL_PRIOR, new Attribute[]{ new Attribute.Default<String>(PriorParsers.MEAN, "" + parameter.exponentialMean), new Attribute.Default<String>(PriorParsers.OFFSET, "" + parameter.exponentialOffset) }); writeParameterIdref(writer, parameter); writer.writeCloseTag(PriorParsers.EXPONENTIAL_PRIOR); break; case NORMAL_PRIOR: writer.writeOpenTag(PriorParsers.NORMAL_PRIOR, new Attribute[]{ new Attribute.Default<String>(PriorParsers.MEAN, "" + parameter.normalMean), new Attribute.Default<String>(PriorParsers.STDEV, "" + parameter.normalStdev) }); writeParameterIdref(writer, parameter); writer.writeCloseTag(PriorParsers.NORMAL_PRIOR); break; case LOGNORMAL_PRIOR: writer.writeOpenTag(PriorParsers.LOG_NORMAL_PRIOR, new Attribute[]{ new Attribute.Default<String>(PriorParsers.MEAN, "" + parameter.logNormalMean), new Attribute.Default<String>(PriorParsers.STDEV, "" + parameter.logNormalStdev), new Attribute.Default<String>(PriorParsers.OFFSET, "" + parameter.logNormalOffset), // this is to be implemented... new Attribute.Default<String>(PriorParsers.MEAN_IN_REAL_SPACE, "false") }); writeParameterIdref(writer, parameter); writer.writeCloseTag(PriorParsers.LOG_NORMAL_PRIOR); break; case GAMMA_PRIOR: writer.writeOpenTag(PriorParsers.GAMMA_PRIOR, new Attribute[]{ new Attribute.Default<String>(PriorParsers.SHAPE, "" + parameter.gammaAlpha), new Attribute.Default<String>(PriorParsers.SCALE, "" + parameter.gammaBeta), new Attribute.Default<String>(PriorParsers.OFFSET, "" + parameter.gammaOffset) }); writeParameterIdref(writer, parameter); writer.writeCloseTag(PriorParsers.GAMMA_PRIOR); break; case JEFFREYS_PRIOR: writer.writeOpenTag(OneOnXPrior.ONE_ONE_X_PRIOR); writeParameterIdref(writer, parameter); writer.writeCloseTag(OneOnXPrior.ONE_ONE_X_PRIOR); break; case POISSON_PRIOR: writer.writeOpenTag(PriorParsers.POISSON_PRIOR, new Attribute[]{ new Attribute.Default<String>(PriorParsers.MEAN, "" + parameter.poissonMean), new Attribute.Default<String>(PriorParsers.OFFSET, "" + parameter.poissonOffset) }); writeParameterIdref(writer, parameter); writer.writeCloseTag(PriorParsers.POISSON_PRIOR); break; case TRUNC_NORMAL_PRIOR: writer.writeOpenTag(PriorParsers.UNIFORM_PRIOR, new Attribute[]{ new Attribute.Default<String>(PriorParsers.LOWER, "" + parameter.uniformLower), new Attribute.Default<String>(PriorParsers.UPPER, "" + parameter.uniformUpper) }); writeParameterIdref(writer, parameter); writer.writeCloseTag(PriorParsers.UNIFORM_PRIOR); writer.writeOpenTag(PriorParsers.NORMAL_PRIOR, new Attribute[]{ new Attribute.Default<String>(PriorParsers.MEAN, "" + parameter.normalMean), new Attribute.Default<String>(PriorParsers.STDEV, "" + parameter.normalStdev) }); writeParameterIdref(writer, parameter); writer.writeCloseTag(PriorParsers.NORMAL_PRIOR); break; default: throw new IllegalArgumentException("Unknown priorType"); } } private void writeParameterIdref(XMLWriter writer, dr.app.beauti.options.Parameter parameter) { if (parameter.isStatistic) { writer.writeIDref("statistic", parameter.getName()); } else { writer.writeIDref(ParameterParser.PARAMETER, parameter.getName()); } } }
package dr.evomodel.speciation; import dr.evolution.tree.NodeRef; import dr.evolution.tree.Tree; import dr.evolution.util.Taxon; import dr.evomodel.tree.TreeModel; import dr.inference.model.*; import dr.util.HeapSort; import dr.xml.*; import jebl.util.FixedBitSet; import java.util.*; public class SpeciesBindings extends AbstractModel { public static final String SPECIES = "species"; public static final String SP = "sp"; public static final String GENE_TREES = "geneTrees"; public static final String GTREE = "gtree"; // all gene trees private final GeneTreeInfo[] geneTrees; // convenience private final Map<Taxon, Integer> taxon2Species = new HashMap<Taxon, Integer>(); // Species definition final SPinfo[] species; private final double[][] popTimesPair; private boolean dirty_pp; private final double[][] popTimesSingle; private boolean dirty_sg; private final boolean verbose = false; private SpeciesBindings(SPinfo[] species, TreeModel[] geneTrees, double[] popFactors) { super(null); this.species = species; final int nsp = species.length; for (int ns = 0; ns < nsp; ++ns) { for (Taxon t : species[ns].taxa) { if (taxon2Species.containsKey(t)) { throw new Error("Multiple assignments for taxon" + t); } taxon2Species.put(t, ns); } } this.geneTrees = new GeneTreeInfo[geneTrees.length]; for (int i = 0; i < geneTrees.length; i++) { final TreeModel t = geneTrees[i]; addModel(t); this.geneTrees[i] = new GeneTreeInfo(t, popFactors[i]); } for (GeneTreeInfo gt : this.geneTrees) { for (int ns = 0; ns < nsp; ++ns) { if (gt.nLineages(ns) == 0) { throw new Error("Every gene tree must contain at least one tip from each species"); } } } popTimesSingle = new double[nsp][]; for (int ns = 0; ns < popTimesSingle.length; ++ns) { popTimesSingle[ns] = new double[allCoalPointsCount(ns)]; } dirty_sg = true; popTimesPair = new double[(nsp * (nsp - 1)) / 2][]; { final int nps = allPairCoalPointsCount(); for (int ns = 0; ns < popTimesPair.length; ++ns) { popTimesPair[ns] = new double[nps]; } } dirty_pp = true; addStatistic(new SpeciesLimits()); } public int nSpecies() { return species.length; } /** * Per species coalecent times. * <p/> * Indexed by sp index, a list of coalescent times of taxa of this sp from all gene trees. * * @return Per species coalecent times */ public double[][] getPopTimesSingle() { if (dirty_sg) { for (int ns = 0; ns < popTimesSingle.length; ++ns) { getAllCoalPoints(ns, popTimesSingle[ns]); } dirty_sg = false; } return popTimesSingle; } public double[][] getPopTimesPair() { if (dirty_pp) { final int nsp = nSpecies(); for (int ns1 = 0; ns1 < nsp - 1; ++ns1) { final int z = (ns1 * (2 * nsp - ns1 - 3)) / 2 - 1; for (int ns2 = ns1 + 1; ns2 < nsp; ++ns2) { getAllPairCoalPoints(ns1, ns2, popTimesPair[z + ns2]); } } } return popTimesPair; } private void getAllPairCoalPoints(int ns1, int ns2, double[] popTimes) { for (int i = 0; i < geneTrees.length; i++) { for (CoalInfo ci : geneTrees[i].getCoalInfo()) { if ((ci.sinfo[0].contains(ns1) && ci.sinfo[1].contains(ns2)) || (ci.sinfo[1].contains(ns1) && ci.sinfo[0].contains(ns2))) { popTimes[i] = ci.ctime; break; } } } HeapSort.sort(popTimes); } private int allCoalPointsCount(int spIndex) { int tot = 0; for (GeneTreeInfo t : geneTrees) { if (t.nLineages(spIndex) > 0) { tot += t.nLineages(spIndex) - 1; } } return tot; } // length of points must be right void getAllCoalPoints(int spIndex, double[] points) { int k = 0; for (GeneTreeInfo t : geneTrees) { final int totCoalEvents = t.nLineages(spIndex) - 1; int savek = k; for (CoalInfo ci : t.getCoalInfo()) { // if( ci == null ) { // assert ci != null; if (ci.allHas(spIndex)) { points[k] = ci.ctime; ++k; } } if (!(totCoalEvents >= 0 && savek + totCoalEvents == k) || (totCoalEvents < 0 && savek == k)) { System.err.println(totCoalEvents); } assert (totCoalEvents >= 0 && savek + totCoalEvents == k) || (totCoalEvents < 0 && savek == k); } assert k == points.length; HeapSort.sort(points); } private int allPairCoalPointsCount() { return geneTrees.length; } public double speciationUpperBound(FixedBitSet sub1, FixedBitSet sub2) { //Determined by the last time any pair of sp's in sub1 x sub2 have been seen
package edu.cmu.minorthird.text; import org.apache.log4j.Logger; import java.io.*; import java.text.ParseException; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; public class TextBaseLoader { //style/location for IDs, groupID, Category of doc public static final int NONE = 0; //could be given as a param at some point public static final int DIRECTORY_NAME = 1; public static final int FILE_NAME = 2; public static final int IN_FILE = 3; //document style public static final int DOC_PER_LINE = 0; public static final int DOC_PER_FILE = 1; // public static final int DOC_PER_DIR = 2; Not implemented /** Parameters for loading follow */ /** One document per line in a file or One document per file */ private int documentStyle = DOC_PER_FILE; /** * where is the string id for each document found? * if loading one doc per line then could be NONE or IN_FILE, others invalid * NONE will generate a docId based on the line number * IN_FILE will generate an id from the first word on the line * if loading one doc per file then must be FILE_NAME (assumed) */ private int docIdSourceType = FILE_NAME; /** * where do we find a groupID * if loading one doc per line then could any setting * if loading one doc per file then can be any except IN_FILE */ private int groupIdSourceType = NONE; /** * Category to label documents * NOT SUPPORTED if loading one doc per line * if loading one doc per file then can be any except IN_FILE */ private int categoryIdSourceType = NONE; //tagging -- are labels tagged with xml style? private boolean labelsInFile = false; //recursion -- if loading from a directory should subdirectories be loaded too? private boolean recurseDirectories = false; //legacy only private boolean firstWordIsDocumentId = false; /** docID = IN_FILE */ private boolean secondWordIsGroupId = false; /** groupID = IN_FILE */ //internal structure private static Logger log = Logger.getLogger(TextBaseLoader.class); private int closurePolicy = TextLabelsLoader.CLOSE_ALL_TYPES; // saves labels associated with last set of files loaded private MutableTextLabels labels; private TextBase textBase; private String curDocID; private String curGrpID; private String curCatID; private Pattern markupPattern = Pattern.compile("</?([^ ><]+)( [^<>]+)?>"); private ArrayList stack; //xml tag stack private List spanList; /** * One document per line, documentID as first word, optional groupID second * ex: * msg1 group1 blah blahblah * msg2 group1 blah blahblah * msg3 group2 blah blahblah */ public static TextBase loadDocPerLine(File file, boolean hasGroupID) throws ParseException, IOException { TextBaseLoader loader = new TextBaseLoader(DOC_PER_LINE, IN_FILE); if (hasGroupID) loader.setGroupIdSourceType(IN_FILE); return loader.load(file); } /** * One document per file in a directory, labels are embedded in the data as xml tags * NB: Don't use this if the data isn't labbed - it will remove things that look like <just a note> * which could cause problems. * * Returns the TextLabels object, the textbase is embedded */ public static MutableTextLabels loadDirOfTaggedFiles(File dir) throws ParseException, IOException { TextBaseLoader loader = new TextBaseLoader(DOC_PER_FILE, FILE_NAME, true); loader.load(dir); return loader.getLabels(); } public int getDocIdSourceType() { return docIdSourceType; } public void setDocIdSourceType(int docIdSourceType) { this.docIdSourceType = docIdSourceType; if (docIdSourceType == IN_FILE) this.firstWordIsDocumentId = true; else this.firstWordIsDocumentId = false; if (!checkParameters()) throw new IllegalStateException("Illegal parameter combination for TextBaseLoader"); } public int getGroupIdSourceType() { return groupIdSourceType; } public void setGroupIdSourceType(int groupIdSourceType) { this.groupIdSourceType = groupIdSourceType; if (groupIdSourceType == IN_FILE) this.secondWordIsGroupId = true; else this.secondWordIsGroupId = false; if (!checkParameters()) throw new IllegalStateException("Illegal parameter combination for TextBaseLoader"); } public Pattern getMarkupPattern() { return markupPattern; } public void setMarkupPattern(Pattern markupPattern) { this.markupPattern = markupPattern; if (!checkParameters()) throw new IllegalStateException("Illegal parameter combination for TextBaseLoader"); } public boolean isRecurseDirectories() { return recurseDirectories; } public void setRecurseDirectories(boolean recurseDirectories) { this.recurseDirectories = recurseDirectories; if (!checkParameters()) throw new IllegalStateException("Illegal parameter combination for TextBaseLoader"); } public int getDocumentStyle() { return documentStyle; } /** DOC_PER_FILE means that docID will be FILE_NAMEs */ public void setDocumentStyle(int documentStyle) { this.documentStyle = documentStyle; if (documentStyle == DOC_PER_FILE) docIdSourceType = FILE_NAME; if (!checkParameters()) throw new IllegalStateException("Illegal parameter combination for TextBaseLoader"); } /** loading labels from data file? */ public boolean isLabelsInFile() { return labelsInFile; } /** set whether to load from tags in the file */ public void setLabelsInFile(boolean labelsInFile) { this.labelsInFile = labelsInFile; if (!checkParameters()) throw new IllegalStateException("Illegal parameter combination for TextBaseLoader"); } /** Set the closure policy. * @param policy one of TextLabelsLoader.CLOSE_ALL_TYPES, * TextLabelsLoader.CLOSE_TYPES_IN_LABELED_DOCS, TextLabelsLoader.DONT_CLOSE_TYPES */ public void setClosurePolicy(int policy) { this.closurePolicy = policy; if (!checkParameters()) throw new IllegalStateException("Illegal parameter combination for TextBaseLoader"); } /** get labeling generated by tags in data file */ public MutableTextLabels getLabels() { return labels; } /** set the TextLabels object to add tags into */ public void setLabels(MutableTextLabels labels) { this.labels = labels; if (!checkParameters()) throw new IllegalStateException("Illegal parameter combination for TextBaseLoader"); } /** get the current text base of documents */ public TextBase getTextBase() { return textBase; } /** * set the TextBase to load new data into. You can add to an existing set of documents * or use setTextBase(null) to generate a new one with the next call to .load * @param textBase TextBase to add documents to, null to generate a new one */ public void setTextBase(TextBase textBase) { this.textBase = textBase; if (!checkParameters()) throw new IllegalStateException("Illegal parameter combination for TextBaseLoader"); } /** For one doc per line, indicates if first word is the Id. */ public boolean getFirstWordIsDocumentId() { return firstWordIsDocumentId; } /** For one doc per line, indicates if first word is the Id. */ public void setFirstWordIsDocumentId(boolean flag) { firstWordIsDocumentId = flag; if (!checkParameters()) throw new IllegalStateException("Illegal parameter combination for TextBaseLoader"); } /** For one doc per line, indicates if second word is the group Id. */ public boolean getSecondWordIsGroupId() { return secondWordIsGroupId; } /** For one doc per line, indicates if second word is the group Id and also * sets first word to be document id */ public void setSecondWordIsGroupId(boolean flag) { firstWordIsDocumentId = true; secondWordIsGroupId = flag; if (!checkParameters()) throw new IllegalStateException("Illegal parameter combination for TextBaseLoader"); } public TextBaseLoader() {} public TextBaseLoader(int documentStyle) { this.documentStyle = documentStyle; } public TextBaseLoader(int documentStyle, int docID) { this.documentStyle = documentStyle; this.docIdSourceType = docID; } public TextBaseLoader(int documentStyle, int docID, boolean labelsInFile) { this.documentStyle = documentStyle; this.docIdSourceType = docID; this.labelsInFile = labelsInFile; } public TextBaseLoader(int documentStyle, int docID, int groupID, int categoryID) { this.categoryIdSourceType = categoryID; this.docIdSourceType = docID; this.documentStyle = documentStyle; this.groupIdSourceType = groupID; } public TextBaseLoader(int documentStyle, int docID, int groupID, int categoryID, boolean labelsInFile, boolean recurseDirectories) { this.documentStyle = documentStyle; this.docIdSourceType = docID; this.groupIdSourceType = groupID; this.categoryIdSourceType = categoryID; this.labelsInFile = labelsInFile; this.recurseDirectories = recurseDirectories; } /** * Load data from the given location according to configuration and whether location * is a directory or not * * Calling load a second time will load into the same text base (thus the second call returns * documents from both the first and second locations). Use setTextBase(null) to reset the text base. * * * @param dataLocation File representation of location (single file or directory) * @return the loaded TextBase * @throws IOException - problem reading the file * @throws ParseException - problem with xml of internal tagging */ public TextBase load(File dataLocation) throws IOException, ParseException { if (textBase == null) textBase = new BasicTextBase(); if (labels == null) labels = new BasicTextLabels(textBase); clear(); //check whether it's a dir or single dataLocation if (dataLocation.isDirectory()) loadDirectory(dataLocation); else loadFile(dataLocation); return textBase; } /** * Checks the consistency of parameters * TODO: need a table of allowed values * @return true if the paremeter combination is supported, else false */ public boolean checkParameters() { switch (this.documentStyle) { case DOC_PER_LINE: if ( (docIdSourceType == NONE || docIdSourceType == IN_FILE) && (categoryIdSourceType == NONE) ) return true; break; case DOC_PER_FILE: if ( (docIdSourceType == NONE || docIdSourceType == FILE_NAME ) && (groupIdSourceType != IN_FILE) && (categoryIdSourceType != IN_FILE)) return true; break; } return false; } /** * Write the textTokenbase to a file. * * NB: ksteppe bug # */ public void writeSerialized(TextBase base,File file) throws IOException { ObjectOutputStream out = new ObjectOutputStream(new BufferedOutputStream(new FileOutputStream(file))); out.writeObject(base); out.flush(); out.close(); } /* not yet implementing public JComponent guiConfigure() { JPanel panel = new JPanel(new GridLayout(0, 2)); String[] styles = {"Document per File", "Document per Line in file"}; String[] sources = {"No Source", "Directory Name", "File Name", "In File"}; //style first panel.add(new JLabel("")); JComboBox docStyleBox = new JComboBox(styles); docStyleBox.addActionListener(); panel.add(new JLabel("")); JComboBox docSourceBox = new JComboBox(sources); docSourceBox.addActionListener(); panel.add(new JLabel("")); JComboBox grpSourceBox = new JComboBox(sources); grpSourceBox.addActionListener(); panel.add(new JLabel("")); JCheckBox labelsCheck = new JCheckBox(); labelsCheck.addActionListener(); panel.add(new JLabel("")); JCheckBox recurseCheck = new JCheckBox(); recurseCheck.addActionListener(); return panel; } */ private void loadDirectory(File directory) throws IOException, ParseException { { //loop on files in directory or loop on directories? File[] files = directory.listFiles(); if (files==null) throw new IllegalArgumentException("can't list directory "+directory.getName()); if (categoryIdSourceType == DIRECTORY_NAME) curCatID = directory.getName(); if (groupIdSourceType == DIRECTORY_NAME) curGrpID = directory.getName(); for (int i=0; i<files.length; i++) { // skip CVS directories if ("CVS".equals(files[i].getName())) continue; if (files[i].isDirectory() && isRecurseDirectories()) loadDirectory(files[i]); if (files[i].isFile()) loadFile(files[i]); } } } /** * Load the given single file according the current settings * @param file * @throws IOException */ private void loadFile(File file) throws IOException, ParseException { log.debug("loadFile: " + file.getName()); //build the correct reader BufferedReader in; if (documentStyle == DOC_PER_LINE) in = new LineNumberReader(new FileReader(file)); else in = new BufferedReader(new FileReader(file)); if (docIdSourceType == FILE_NAME) curDocID = file.getName(); //set the docid if (categoryIdSourceType == FILE_NAME) curCatID = file.getName(); //select the categoryID properly if (groupIdSourceType == FILE_NAME) curGrpID = file.getName(); //set the groupID spanList = new ArrayList(); //list of labeled spans if internally tagged //buffer of lines in file StringBuffer buf = new StringBuffer(); //loop through the file while (in.ready()) //in.ready may cause problems on Macintosh { String line = in.readLine(); if (docIdSourceType == IN_FILE) line = getIDsFromLine(line); if (this.isLabelsInFile()) line = labelLine(line, buf, spanList); // appends to the buffer internally if (this.documentStyle == DOC_PER_LINE) { //get ids //make doc if (docIdSourceType == NONE) curDocID = file.getName() + "@line:" + ((LineNumberReader)in).getLineNumber(); addDocument(line); //we don't really care about the buffer, it's fluf buf = new StringBuffer(); } else if (!this.isLabelsInFile()) //better append to the buffer if it wasn't done before { buf.append(line); buf.append("\n"); } } if (this.documentStyle == DOC_PER_FILE) addDocument(buf.toString()); //still need to set ids and such in.close(); } /** * Add this text to the textBase as a new document, including group id and categorization * @param docText String version of text */ private void addDocument(String docText) { // docText = docText.trim(); //might or might not want this //Blank documents are dropped if (docText.length() == 0) { log.warn("Text for document " + curDocID + " is length zero or all white space, it will not be added to the text base."); return; } if (log.isDebugEnabled()) log.debug("add document " + curDocID); textBase.loadDocument(curDocID, docText); if (curGrpID != null) textBase.setDocumentGroupId(curDocID, curGrpID); //label document as this category if (curCatID != null) labels.addToType(textBase.documentSpan(curDocID), curCatID); for (Iterator j=spanList.iterator(); j.hasNext(); ) { CharSpan charSpan = (CharSpan)j.next(); // types.add( charSpan.type ); unused Span approxSpan = textBase.documentSpan(curDocID).charIndexSubSpan(charSpan.lo, charSpan.hi); if (log.isDebugEnabled()) { int hi = charSpan.hi; if (hi > docText.length()) hi = docText.length(); log.debug("approximating "+charSpan.type+" span '" + docText.substring(charSpan.lo,hi) +"' with token span '"+approxSpan); } labels.addToType( approxSpan, charSpan.type ); } new TextLabelsLoader().closeLabels( labels, closurePolicy ); spanList = new ArrayList(); } /** * Takes a single line of text. * Uses the markupPattern field to remove labelings (must be xml styled). * These labelling are added to the span list * * @param line - String of a single line to have it's labels parsed * @param spanList - List of span labelings * @return a String with the labelings removed * @throws ParseException improper xml format will cause a parse exception */ private String labelLine(String line, StringBuffer docBuffer, List spanList) throws ParseException { // stack of open tags if (stack == null) stack = new ArrayList(); int currentChar = 0; Matcher matcher = markupPattern.matcher(line); while (matcher.find()) { String tag = matcher.group(1); boolean isOpenTag = !matcher.group().startsWith("</"); if (log.isDebugEnabled()) { log.debug("matcher.group='"+matcher.group()+"'"); log.debug("found '"+tag+"' tag ,open="+isOpenTag+", at "+matcher.start()+" in:\n"+line); } //copy stuff up to tag into buffer docBuffer.append( line.substring(currentChar, matcher.start()) ); currentChar = matcher.end(); if (isOpenTag) { stack.add( new StackEntry(docBuffer.length(), tag) ); } else { // pop the corresponding open off the stack StackEntry entry = null; for (int j=stack.size()-1; j>=0; j entry = (StackEntry)stack.get(j); if (tag.equals(entry.markupTag)) { stack.remove(j); break; } } if (entry==null) throw new ParseException("close '"+tag+"' tag with no open", entry.index); if (!tag.equals(entry.markupTag)) throw new ParseException("close '"+tag+"' tag paired with open '" +entry.markupTag+"'", entry.index); if (log.isDebugEnabled()) { log.debug("adding a "+tag+" span from "+entry.index+" to "+docBuffer.length() +": '"+docBuffer.substring(entry.index)+"'"); } //spanList.add( new CharSpan(entry.index, docBuffer.length()-1, tag) ); spanList.add( new CharSpan(entry.index, docBuffer.length(), tag) ); } } // append stuff from end of last tag to end of line into the buffer docBuffer.append( line.substring(currentChar, line.length()) ); docBuffer.append( "\n" ); return docBuffer.toString(); } /** * parse id values out of the given line. * Return the rest of the line * @param line * @return */ private String getIDsFromLine(String line) { int spaceIndex = line.indexOf(' '); if (spaceIndex < 0) { curDocID = line; line = ""; } else { curDocID = line.substring(0, spaceIndex); int i = spaceIndex + 1; for (; i < line.length() && line.charAt(i) == ' '; i++) {} line = line.substring(i, line.length()); if (secondWordIsGroupId) { int spaceIndex2 = line.indexOf(' '); if (spaceIndex < 0) { curGrpID = line.substring(spaceIndex + 1, line.length()); line = ""; } else { curGrpID = line.substring(0, spaceIndex2); i = spaceIndex2 + 1; for (; i < line.length() && line.charAt(i) == ' '; i++) {} line = line.substring(i, line.length()); } } } return line; } /** * Clears the state of current ids. * Good to do before each document */ private void clear() { curCatID = null; curDocID = null; curGrpID = null; } private class StackEntry { public int index; public String markupTag; public StackEntry(int index,String markupTag) { this.index=index; this.markupTag=markupTag; } } private class CharSpan { public int lo,hi; String type; public CharSpan(int lo,int hi,String type) { this.lo=lo; this.hi=hi; this.type = type; } } /** * Load from either a file (one document per line) or a directory (one document per file) * Directory is assumed to be tagged files * Single file assumed not to be tagged * * @deprecated; to be removed at end of February */ public void loadFile(TextBase base,File file) throws IOException,FileNotFoundException { if (file.isDirectory()) loadTaggedFiles(base,file); else { doLoadLines(base,file); labels = new BasicTextLabels(base); } } /** Load files from a directory, stripping out any XML/SGML tags. * * @deprecated; to be removed at end of February * */ public void loadTaggedFiles(TextBase base,File dir) throws IOException,FileNotFoundException { labels = new BasicTextLabels(base); File[] files = dir.listFiles(); if (files==null) throw new IllegalArgumentException("can't list directory "+dir.getName()); for (int i=0; i<files.length; i++) { // skip CVS directories if ("CVS".equals(files[i].getName())) continue; loadTaggedFile(files[i], markupPattern, base); } } /** * @deprecated; to be removed at end of February */ public void loadTaggedFile(File file, Pattern markupPattern, TextBase base) throws IOException { if (labels == null) labels = new BasicTextLabels(base); if (markupPattern == null) markupPattern = Pattern.compile("</?([^ ><]+)( [^<>]+)?>"); // list of constructed spans List spanList = new ArrayList(); // file name used as ID String id = file.getName(); // holds a string representation of the file with xml tags removed StringBuffer buf = new StringBuffer(""); LineNumberReader in = new LineNumberReader(new FileReader(file)); String line; while ((line = in.readLine())!=null) { try { labelLine(line, buf, spanList); } catch (ParseException e) { IllegalStateException ex = new IllegalStateException("in " + id + " @" + in.getLineNumber() + ":" + e.getMessage()); ex.setStackTrace(e.getStackTrace()); throw ex; } } in.close(); // add the document to the textbase base.loadDocument(id, buf.toString() ); // add the markup to the labels Set types = new TreeSet(); for (Iterator j=spanList.iterator(); j.hasNext(); ) { CharSpan charSpan = (CharSpan)j.next(); types.add( charSpan.type ); Span approxSpan = base.documentSpan(id).charIndexSubSpan(charSpan.lo, charSpan.hi); log.debug("approximating "+charSpan.type+" span '" +buf.toString().substring(charSpan.lo,charSpan.hi) +"' with token span '"+approxSpan); labels.addToType( approxSpan, charSpan.type ); } new TextLabelsLoader().closeLabels( labels, closurePolicy ); } // loadLines code //* @deprecated; to be removed at end of February */ public void loadLines(TextBase base, File file) throws IOException, FileNotFoundException { doLoadLines(base,file); } private void doLoadLines(TextBase base, File file) throws IOException, FileNotFoundException { LineNumberReader in = new LineNumberReader(new FileReader(file)); String line; this.textBase = base; while ((line = in.readLine()) != null) { clear(); if (!firstWordIsDocumentId) { curDocID = file.getName() + "@line:" + in.getLineNumber(); // default } else line = getIDsFromLine(line); textBase.loadDocument(curDocID, line); if (curGrpID != null) { textBase.setDocumentGroupId(curDocID, curGrpID); } } in.close(); } /** * Read a serialized BasicTextBase from a file. * * will soon be deprecated; to be removed at end of February */ public TextBase readSerialized(File file) throws IOException { try { ObjectInputStream in = new ObjectInputStream(new BufferedInputStream(new FileInputStream(file))); TextBase b = (TextBase)in.readObject(); in.close(); return b; } catch (ClassNotFoundException e) { throw new IllegalArgumentException("can't read BasicTextBase from "+file+": "+e); } } /** * Takes a base directory. Each file is a different doc to load. * @param base TextBase to load into * @param directory File representation of directory * * @deprecated; to be removed at end of February * */ public void loadDir(TextBase base, File directory) { if (directory.isDirectory()) { // String categoryLabel = directory.getName(); // log.debug("found directory for type: " + categoryLabel); //load everything in the directory try { File[] files = directory.listFiles(); for (int j = 0; j < files.length; j++) { // skip CVS directories if ("CVS".equals(files[j].getName())) continue; File file = files[j]; this.loadFileWithID(base, file, file.getName()); } } catch (IOException ioe) { log.error(ioe, ioe); } } else log.error("loadDir found a file instead of directory label: " + directory.getPath() + File.pathSeparator + directory.getName()); } /** * Takes a base directory. Each subdirectory is a label for the category * of the files in that directory. Each file is a different doc * @param base TextBase to load into * @param dir File representation of dir to use as the base * * @deprecated; to be removed at end of February * */ public void loadLabeledDir(TextBase base, File dir) { labels = new BasicTextLabels(base); //cycle through the directories //these should all be directories File[] dirs = dir.listFiles(); for (int i = 0; i < dirs.length; i++) { File directory = dirs[i]; if (directory.isDirectory()) { String categoryLabel = directory.getName(); log.debug("found directory for type: " + categoryLabel); //load everything in the directory try { File[] files = directory.listFiles(); for (int j = 0; j < files.length; j++) { File file = files[j]; this.loadFileWithID(base, file, file.getName()); //label the new span labels.addToType(base.documentSpan(file.getName()), categoryLabel); } } catch (IOException ioe) { log.error(ioe, ioe); } } else log.error("loadLabeledDir found a file instead of directory label: " + directory.getPath() + File.pathSeparator + directory.getName()); } } /** * the given file is treated as a single document * @param base TextBase to load into * @param file File to load from * @param id ID to be given to the document * * @deprecated; to be removed at end of February * */ public void loadFileWithID(TextBase base, File file, String id) throws IOException { log.debug("loadFileWithID: " + file); if (!file.isFile()) throw new IllegalArgumentException("loadFileWithID must be given a file, not a directory"); BufferedReader in = new BufferedReader(new FileReader(file)); String allLines = new String(); while (in.ready()) { allLines += in.readLine() + "\n"; } base.loadDocument(id, allLines); in.close(); } // test routine static public void main(String[] args) { if (args.length<2) throw new IllegalArgumentException("usage: TextBaseLoader [file|dir] output.seqbase"); try { TextBase b; // = new BasicTextBase(); TextBaseLoader loader = new TextBaseLoader(); File f = new File(args[0]); if (f.isDirectory()) { b = TextBaseLoader.loadDirOfTaggedFiles(f).getTextBase(); } else { b = TextBaseLoader.loadDocPerLine(f, false); } loader.writeSerialized(b, new File(args[1])); } catch (Exception e) { e.printStackTrace(); } } }
package edu.wustl.common.util.global; import java.util.HashMap; /** * This classes is specific to common files. And contains all variables used by classes from * common package. * @author gautam_shetty * */ public class Constants { // constants for passwordManager public static final String MINIMUM_PASSWORD_LENGTH = "minimumPasswordLength"; public static final String SELECT_OPTION = "-- Select --"; public static final int SELECT_OPTION_VALUE = -1; public static final String CDE_CONF_FILE = "CDEConfig.xml"; public static final String ANY = "Any"; public static final String DELIMETER = ","; public static final String TRUE = "true"; public static final String FALSE = "false"; public static final String TAB_DELIMETER = "\t"; // Misc public static final String SEPARATOR = " : "; public static final String TIMESTAMP_PATTERN = "yyyy-MM-dd-HH24.mm.ss.SSS"; public static final HashMap STATIC_PROTECTION_GROUPS_FOR_OBJECT_TYPES = new HashMap(); // Mandar: Used for Date Validations in Validator Class public static final String DATE_SEPARATOR = "-"; public static final String DATE_SEPARATOR_SLASH = "/"; public static final String MIN_YEAR = "1900"; public static final String MAX_YEAR = "9999"; //Activity Status values public static final String ACTIVITY_STATUS_ACTIVE = "Active"; public static final String ADD = "add"; public static final String getCollectionProtocolPGName(Long identifier) { if(identifier == null) { return "COLLECTION_PROTOCOL_"; } return "COLLECTION_PROTOCOL_"+identifier; } public static final String getCollectionProtocolPIGroupName(Long identifier) { if(identifier == null) { return "PI_COLLECTION_PROTOCOL_"; } return "PI_COLLECTION_PROTOCOL_"+identifier; } public static final String getCollectionProtocolCoordinatorGroupName(Long identifier) { if(identifier == null) { return "COORDINATORS_COLLECTION_PROTOCOL_"; } return "COORDINATORS_COLLECTION_PROTOCOL_"+identifier; } public static final String getDistributionProtocolPGName(Long identifier) { if(identifier == null) { return "DISTRIBUTION_PROTOCOL_"; } return "DISTRIBUTION_PROTOCOL_"+identifier; } public static final String getDistributionProtocolPIGroupName(Long identifier) { if(identifier == null) { return "PI_DISTRIBUTION_PROTOCOL_"; } return "PI_DISTRIBUTION_PROTOCOL_"+identifier; } public static final String getStorageContainerPGName() { return "USER_"; } public static final String COLLECTION_PROTOCOL_CLASS_NAME = "edu.wustl.catissuecore.domain.CollectionProtocol";//CollectionProtocol.class.getName(); public static final String DISTRIBUTION_PROTOCOL_CLASS_NAME = "edu.wustl.catissuecore.domain.DistributionProtocol";//DistributionProtocol.class.getName(); // Aarti: Constants for security parameter required // while retrieving data from DAOs public static final int INSECURE_RETRIEVE = 0; public static final int CLASS_LEVEL_SECURE_RETRIEVE = 1; public static final int OBJECT_LEVEL_SECURE_RETRIEVE = 2; public static final String CATISSUE_SPECIMEN = "CATISSUE_SPECIMEN"; // Constants used for authentication module. public static final String LOGIN = "login"; public static final String LOGOUT = "logout"; public static final String OPERATION = "operation"; // Constants for HTTP-API public static final String HTTP_API = "HTTPAPI"; public static final String SUCCESS = "success"; public static final String FAILURE = "failure"; public static final String SYSTEM_IDENTIFIER = "id"; // User Roles public static final String ADMINISTRATOR = "Administrator"; //Assign Privilege Constants. public static final boolean PRIVILEGE_ASSIGN = true; //DAO Constants. public static final int HIBERNATE_DAO = 1; public static final int JDBC_DAO = 2; public static final String ORACLE_DATABASE = "ORACLE"; public static final String MYSQL_DATABASE = "MYSQL"; // The unique key voilation message is "Duplicate entry %s for key %d" // This string is used for searching " for key " string in the above error message public static final String MYSQL_DUPL_KEY_MSG = " for key "; public static final String GENERIC_DATABASE_ERROR = "An error occured during a database operation. Please report this problem to the adminstrator"; public static final String CONSTRAINT_VOILATION_ERROR = "Submission failed since a {0} with the same {1} already exists"; public static final String OBJECT_NOT_FOUND_ERROR = "Submission failed since a {0} with given {1}: \"{2}\" does not exists"; public static final String ACTIVITY_STATUS_DISABLED = "Disabled"; public static final String ACTIVITY_STATUS_CLOSED = "Closed"; public static final String AND_JOIN_CONDITION = "AND"; public static final String OR_JOIN_CONDITION = "OR"; public static final String ACTIVITY_STATUS = "activityStatus"; public static final boolean switchSecurity = true; public static final String EDIT = "edit"; public static final String DATE_PATTERN_MM_DD_YYYY = "MM-dd-yyyy"; //Constants for audit of disabled objects. public static final String UPDATE_OPERATION = "UPDATE"; public static final String ACTIVITY_STATUS_COLUMN = "ACTIVITY_STATUS"; //Tree View constants. public static final String TISSUE_SITE = "Tissue Site"; public static final String CLINICAL_DIAGNOSIS="Clinical Diagnosis"; public static final int TISSUE_SITE_TREE_ID = 1; public static final int STORAGE_CONTAINER_TREE_ID = 2; public static final int QUERY_RESULTS_TREE_ID = 3; public static final String ROOT = "Root"; public static final String CATISSUE_CORE = "caTissue Core"; //Mandar : CDE xml package path. public static final String CDE_XML_PACKAGE_PATH = "edu.wustl.common.cde.xml"; public static final String BOOLEAN_YES = "Yes"; public static final String BOOLEAN_NO = "No"; public static final String SESSION_DATA = "sessionData"; public static final String TEMP_SESSION_DATA = "tempSessionData"; public static final String ACCESS = "access"; public static final String PASSWORD_CHANGE_IN_SESSION = "changepassword"; public static final String USER_CLASS_NAME = "edu.wustl.common.domain.User"; public static final String IDENTIFIER = "IDENTIFIER"; public static final String FIELD_TYPE_BIGINT = "bigint"; public static final String FIELD_TYPE_VARCHAR = "varchar"; public static final String FIELD_TYPE_TEXT = "text"; public static final String FIELD_TYPE_TINY_INT = "tinyint"; public static final String FIELD_TYPE_DATE = "date"; public static final String FIELD_TYPE_TIMESTAMP_DATE = "timestampdate"; public static final String TABLE_ALIAS_NAME_COLUMN = "ALIAS_NAME"; public static final String TABLE_DATA_TABLE_NAME = "CATISSUE_QUERY_TABLE_DATA"; public static final String TABLE_DISPLAY_NAME_COLUMN = "DISPLAY_NAME"; public static final String TABLE_FOR_SQI_COLUMN = "FOR_SQI"; public static final String TABLE_ID_COLUMN = "TABLE_ID"; public static final String NULL = "NULL"; public static final String CONDITION_VALUE_YES = "yes"; public static final String TINY_INT_VALUE_ONE = "1"; public static final String TINY_INT_VALUE_ZERO = "0"; public static final String FIELD_TYPE_TIMESTAMP_TIME = "timestamptime"; public static final String CDE_NAME_TISSUE_SITE = "Tissue Site"; public static final String UPPER = "UPPER"; public static final String PARENT_SPECIMEN_ID_COLUMN = "PARENT_SPECIMEN_ID"; // Query results view temporary table name. public static final String QUERY_RESULTS_TABLE = "CATISSUE_QUERY_RESULTS"; public static final String TIME_PATTERN_HH_MM_SS = "HH:mm:ss"; public static final int SIMPLE_QUERY_INTERFACE_ID = 40; // -- menu selection related public static final String MENU_SELECTED = "menuSelected"; public static final String SIMPLE_QUERY_MAP = "simpleQueryMap"; public static final String IDENTIFIER_FIELD_INDEX = "identifierFieldIndex"; public static final String PAGEOF_SIMPLE_QUERY_INTERFACE = "pageOfSimpleQueryInterface"; public static final String SIMPLE_QUERY_ALIAS_NAME = "simpleQueryAliasName"; public static final String SIMPLE_QUERY_INTERFACE_ACTION = "/SimpleQueryInterface.do"; public static final String PAGEOF = "pageOf"; public static final String TABLE_ALIAS_NAME = "aliasName"; public static final String SIMPLE_QUERY_NO_RESULTS = "noResults"; public static final String SEARCH_OBJECT_ACTION = "/SearchObject.do"; public static final String SEARCH = "search"; // SimpleSearchAction public static final String SIMPLE_QUERY_SINGLE_RESULT = "singleResult"; public static final String SPREADSHEET_DATA_LIST = "spreadsheetDataList"; public static final String SPREADSHEET_COLUMN_LIST = "spreadsheetColumnList"; public static final String ACCESS_DENIED = "access_denied"; public static final String ADVANCED_CONDITION_NODES_MAP = "advancedConditionNodesMap"; public static final String ADVANCED_CONDITIONS_ROOT = "advancedCondtionsRoot"; public static final String TREE_VECTOR = "treeVector"; public static final String SELECT_COLUMN_LIST = "selectColumnList"; public static final String SELECTED_NODE = "selectedNode"; //Individual view Constants in DataViewAction. public static final String CONFIGURED_COLUMN_DISPLAY_NAMES = "configuredColumnDisplayNames"; public static final String CONFIGURED_COLUMN_NAMES = "configuredColumnNames"; public static final String CONFIGURED_SELECT_COLUMN_LIST = "configuredSelectColumnList"; public static final String COLUMN_DISPLAY_NAMES = "columnDisplayNames"; public static final String COLUMN_ID_MAP = "columnIdsMap"; public static final String PAGEOF_ADVANCE_QUERY_INTERFACE = "pageOfAdvanceQueryInterface"; public static final String PAGEOF_QUERY_RESULTS = "pageOfQueryResults"; public static final String COLUMN = "Column"; public static final String ATTRIBUTE_NAME_LIST = "attributeNameList"; public static final String ATTRIBUTE_CONDITION_LIST = "attributeConditionList"; public static final String[] ATTRIBUTE_NAME_ARRAY = { SELECT_OPTION }; public static final String[] ATTRIBUTE_CONDITION_ARRAY = { "=","<",">" }; //For Simple Query Interface public static final int SIMPLE_QUERY_TABLES = 1; public static final String OBJECT_NAME_LIST = "objectNameList"; public static final String ACCESS_DENIED_ADMIN = "access_denied_admin"; public static final String ACCESS_DENIED_BIOSPECIMEN = "access_denied_biospecimen"; // Constants for type of query results view. public static final String SPREADSHEET_VIEW = "Spreadsheet View"; public static final String OBJECT_VIEW = "Edit View"; public static final String COLLECTION_PROTOCOL ="CollectionProtocol"; // Frame names in Query Results page. public static final String DATA_VIEW_FRAME = "myframe1"; public static final String APPLET_VIEW_FRAME = "appletViewFrame"; // NodeSelectionlistener - Query Results Tree node selection (For spreadsheet or individual view). public static final String DATA_VIEW_ACTION = "DataView.do?nodeName="; public static final String VIEW_TYPE = "viewType"; // TissueSite Tree View Constants. public static final String PROPERTY_NAME = "propertyName"; // For Tree Applet public static final String PAGEOF_STORAGE_LOCATION = "pageOfStorageLocation"; public static final String PAGEOF_SPECIMEN = "pageOfSpecimen"; public static final String PAGEOF_TISSUE_SITE = "pageOfTissueSite"; public static final String PAGEOF_MULTIPLE_SPECIMEN = "pageOfMultipleSpecimen"; //Added By Ramya //Constants to display Specimen Tree in RequestDetails.jsp public static final String PAGEOF_REQUEST_DETAILS = "pageOfRequestDetails"; public static final int REQUEST_DETAILS_ID = 4; public static final String REQUEST_DETAILS = "Specimen Tree"; public static final String SPECIMEN_TYPE = "type"; public static final String SPECIMEN_TREE_ROOT_NAME = "Specimens"; // Constants for Storage Container. public static final String STORAGE_CONTAINER_TYPE = "storageType"; public static final String STORAGE_CONTAINER_TO_BE_SELECTED = "storageToBeSelected"; public static final String STORAGE_CONTAINER_POSITION = "position"; public static final String CDE_NAME = "cdeName"; // Tree Data Action public static final String TREE_DATA_ACTION = "Data.do"; public static final String SHOW_STORAGE_CONTAINER_GRID_VIEW_ACTION = "ShowStorageGridView.do"; public static final String TREE_APPLET_NAME = "treeApplet"; // for Add New public static final String ADD_NEW_STORAGE_TYPE_ID ="addNewStorageTypeId"; public static final String ADD_NEW_COLLECTION_PROTOCOL_ID ="addNewCollectionProtocolId"; public static final String ADD_NEW_SITE_ID ="addNewSiteId"; public static final String ADD_NEW_USER_ID ="addNewUserId"; public static final String ADD_NEW_USER_TO ="addNewUserTo"; public static final String SUBMITTED_FOR = "submittedFor"; public static final String SUBMITTED_FOR_ADD_NEW = "AddNew"; public static final String SUBMITTED_FOR_FORWARD_TO = "ForwardTo"; public static final String SUBMITTED_FOR_DEFAULT = "Default"; public static final String FORM_BEAN_STACK= "formBeanStack"; public static final String ADD_NEW_FORWARD_TO ="addNewForwardTo"; public static final String FORWARD_TO = "forwardTo"; public static final String ADD_NEW_FOR = "addNewFor"; public static final String ERROR_DETAIL = "Error Detail"; //Identifiers for various Form beans public static final int QUERY_INTERFACE_ID = 43; //Status message key Constants public static final String STATUS_MESSAGE_KEY = "statusMessageKey"; //Constant for redefine operation for Advance and Simple Query public static final String REDEFINE = "redefine"; public static final String ORIGINAL_SIMPLE_QUERY_OBJECT = "originalSimpleQueryObject"; public static final String ORIGINAL_SIMPLE_QUERY_COUNTER = "counter"; public static final String SIMPLE_QUERY_COUNTER = "counter"; /*** Added New Constansts ***/ // Activity Status values public static final String ACTIVITY_STATUS_APPROVE = "Approve"; public static final String ACTIVITY_STATUS_REJECT = "Reject"; public static final String ACTIVITY_STATUS_NEW = "New"; public static final String ACTIVITY_STATUS_PENDING = "Pending"; //Approve User status values. public static final String APPROVE_USER_APPROVE_STATUS = "Approve"; public static final String APPROVE_USER_REJECT_STATUS = "Reject"; public static final String APPROVE_USER_PENDING_STATUS = "Pending"; // Identifiers for various Form beans public static final int USER_FORM_ID = 1; public static final int ACCESSION_FORM_ID = 3; public static final int REPORTED_PROBLEM_FORM_ID = 4; public static final int INSTITUTION_FORM_ID = 5; public static final int APPROVE_USER_FORM_ID = 6; public static final int ACTIVITY_STATUS_FORM_ID = 7; public static final int DEPARTMENT_FORM_ID = 8; public static final int CANCER_RESEARCH_GROUP_FORM_ID = 14; public static final int FORGOT_PASSWORD_FORM_ID = 35; public static final int SIGNUP_FORM_ID = 36; public static final int DISTRIBUTION_FORM_ID = 37; // Query Interface Results View Constants public static final String QUERY = "query"; public static final String PAGEOF_APPROVE_USER = "pageOfApproveUser"; public static final String PAGEOF_SIGNUP = "pageOfSignUp"; public static final String PAGEOF_USERADD = "pageOfUserAdd"; public static final String PAGEOF_USER_ADMIN = "pageOfUserAdmin"; public static final String PAGEOF_USER_PROFILE = "pageOfUserProfile"; public static final String PAGEOF_CHANGE_PASSWORD = "pageOfChangePassword"; // Approve User Constants public static final int ZERO = 0; public static final int START_PAGE = 1; public static final int NUMBER_RESULTS_PER_PAGE = 5; public static final String PAGE_NUMBER = "pageNum"; public static final String RESULTS_PER_PAGE = "numResultsPerPage"; public static final String TOTAL_RESULTS = "totalResults"; public static final String PREVIOUS_PAGE = "prevpage"; public static final String NEXT_PAGE = "nextPage"; public static final String ORIGINAL_DOMAIN_OBJECT_LIST = "originalDomainObjectList"; public static final String SHOW_DOMAIN_OBJECT_LIST = "showDomainObjectList"; public static final String USER_DETAILS = "details"; public static final String CURRENT_RECORD = "currentRecord"; public static final String APPROVE_USER_EMAIL_SUBJECT = "Your membership status in caTISSUE Core."; // Constants required in UserBizLogic public static final int DEFAULT_BIZ_LOGIC = 0; public static final String [] USER_ACTIVITY_STATUS_VALUES = { SELECT_OPTION, "Active", "Closed" }; public static final String CDE_NAME_COUNTRY_LIST = "Countries"; public static final String CDE_NAME_STATE_LIST = "States"; /** * @param id * @return */ public static String getUserPGName(Long identifier) { if(identifier == null) { return "USER_"; } return "USER_"+identifier; } /** * @param id * @return */ public static String getUserGroupName(Long identifier) { if(identifier == null) { return "USER_"; } return "USER_"+identifier; } // Constants required for Forgot Password public static final String FORGOT_PASSWORD = "forgotpassword"; public static final String LOGINNAME = "loginName"; public static final String LASTNAME = "lastName"; public static final String FIRSTNAME = "firstName"; public static final String INSTITUTION = "institution"; public static final String EMAIL = "email"; public static final String DEPARTMENT = "department"; public static final String ADDRESS = "address"; public static final String CITY = "city"; public static final String STATE = "state"; public static final String COUNTRY = "country"; public static final String NEXT_CONTAINER_NO = "startNumber"; public static final String CSM_USER_ID = "csmUserId"; public static final String REPORTED_PROBLEM_CLASS_NAME = "edu.wustl.catissuecore.domain.ReportedProblem"; public static final String PARTICIPANT = "Participant"; public static final String SPECIMEN = "Specimen"; public static final String SPECIMEN_COLLECTION_GROUP ="SpecimenCollectionGroup"; public static final String ALL = "All"; }
package edu.wustl.xipHost.avt2ext; import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; import org.dcm4che2.data.DicomObject; import org.dcm4che2.data.Tag; import org.dcm4che2.io.DicomOutputStream; import org.jdom.Document; import org.jdom.JDOMException; import org.jdom.input.SAXBuilder; import org.jdom.output.XMLOutputter; import org.nema.dicom.wg23.ObjectDescriptor; import org.nema.dicom.wg23.ObjectLocator; import org.nema.dicom.wg23.Uuid; import com.siemens.scr.avt.ad.annotation.ImageAnnotation; import com.siemens.scr.avt.ad.api.ADFacade; import edu.wustl.xipHost.dataAccess.DataAccessListener; import edu.wustl.xipHost.dataAccess.Retrieve; import edu.wustl.xipHost.dataAccess.RetrieveEvent; import edu.wustl.xipHost.dataModel.Item; import edu.wustl.xipHost.dataModel.Patient; import edu.wustl.xipHost.dataModel.SearchResult; import edu.wustl.xipHost.dataModel.Series; import edu.wustl.xipHost.dataModel.Study; import edu.wustl.xipHost.iterator.Criteria; import edu.wustl.xipHost.iterator.RetrieveTarget; import edu.wustl.xipHost.iterator.TargetElement; public class AVTRetrieve2 implements Retrieve { final static Logger logger = Logger.getLogger(AVTRetrieve2.class); ADFacade adService = AVTFactory.getADServiceInstance(); TargetElement targetElement; RetrieveTarget retrieveTarget; File importDir; public AVTRetrieve2(File importDir){ this.importDir = importDir; } public AVTRetrieve2(TargetElement targetElement, RetrieveTarget retrieveTarget, File importDir) throws IOException{ this.targetElement = targetElement; this.retrieveTarget = retrieveTarget; this.importDir = importDir; } @Override public void setRetrieve(TargetElement targetElement, RetrieveTarget retrieveTarget) { this.targetElement = targetElement; this.retrieveTarget = retrieveTarget; } public void run() { try { logger.info("Executing AVT retrieve."); retrieve(targetElement, retrieveTarget); fireResultsAvailable(targetElement.getId()); } catch (IOException e) { logger.error(e, e); return; } } SAXBuilder builder = new SAXBuilder(); Document document; Map<String, ObjectLocator> objectLocators; XMLOutputter outToXMLFile = new XMLOutputter(); void retrieve(TargetElement targetElement, RetrieveTarget retrieveTarget) throws IOException { objectLocators = new HashMap<String, ObjectLocator>(); SearchResult subSearchResult = targetElement.getSubSearchResult(); Criteria originalCriteria = subSearchResult.getOriginalCriteria(); Map<Integer, Object> dicomCriteria = originalCriteria.getDICOMCriteria(); Map<String, Object> aimCriteria = originalCriteria.getAIMCriteria(); List<Patient> patients = subSearchResult.getPatients(); for(Patient patient : patients){ dicomCriteria.put(Tag.PatientName, patient.getPatientName()); dicomCriteria.put(Tag.PatientID, patient.getPatientID()); List<Study> studies = patient.getStudies(); for(Study study : studies){ dicomCriteria.put(Tag.StudyInstanceUID, study.getStudyInstanceUID()); List<Series> series = study.getSeries(); for(Series oneSeries : series){ //List<File> retrievedFiles = new ArrayList<File>(); dicomCriteria.put(Tag.SeriesInstanceUID, oneSeries.getSeriesInstanceUID()); if(aimCriteria == null){ logger.debug("AD AIM criteria: " + aimCriteria); }else{ logger.debug("AD AIM retrieve criteria:"); Set<String> keys = aimCriteria.keySet(); Iterator<String> iter = keys.iterator(); while(iter.hasNext()){ String key = iter.next(); String value = (String) aimCriteria.get(key); if(!value.isEmpty()){ logger.debug("Key: " + key + " Value: " + value); } } } File dirPath = importDir.getAbsoluteFile(); List<ObjectDescriptor> objectDescriptors = new ArrayList<ObjectDescriptor>(); List<Item> items = oneSeries.getItems(); for(Item item : items){ objectDescriptors.add(item.getObjectDescriptor()); } if(retrieveTarget == RetrieveTarget.DICOM_AND_AIM){ //Retrieve DICOM List<DicomObject> retrievedDICOM = adService.retrieveDicomObjs(dicomCriteria, aimCriteria); int i = 0; for(i = 0; i < retrievedDICOM.size(); i++){ DicomObject dicom = retrievedDICOM.get(i); String filePrefix = dicom.getString(Tag.SOPInstanceUID); try { File file = new File(importDir.getAbsolutePath() + File.separatorChar + filePrefix); if(!file.exists()){ file.createNewFile(); } FileOutputStream fos = new FileOutputStream(file); BufferedOutputStream bos = new BufferedOutputStream(fos); DicomOutputStream dout = new DicomOutputStream(bos); dout.writeDicomFile(dicom); dout.close(); ObjectLocator objLoc = new ObjectLocator(); Item item = items.get(i); Uuid itemUUID = item.getObjectDescriptor().getUuid(); objLoc.setUuid(itemUUID); objLoc.setUri(file.getAbsolutePath()); item.setObjectLocator(objLoc); objectLocators.put(itemUUID.getUuid(), objLoc); } catch (IOException e) { logger.error(e, e); } } //Retrieve AIM List<String> annotationUIDs = adService.findAnnotations(dicomCriteria, aimCriteria); Set<String> uniqueAnnotUIDs = new HashSet<String>(annotationUIDs); Iterator<String> iter = uniqueAnnotUIDs.iterator(); while(iter.hasNext()){ String uid = iter.next(); ImageAnnotation loadedAnnot = adService.getAnnotation(uid); String strXML = loadedAnnot.getAIM(); byte[] source = strXML.getBytes(); InputStream is = new ByteArrayInputStream(source); try { document = builder.build(is); } catch (JDOMException e) { logger.error(e, e); } //Ensure dirPath is correctly assign. There are references below of this variable File outFile = new File(dirPath + File.separator + uid); FileOutputStream outStream = new FileOutputStream(outFile); outToXMLFile.output(document, outStream); outStream.flush(); outStream.close(); //retrievedFiles.add(outFile); ObjectLocator objLoc = new ObjectLocator(); Item item = items.get(i); Uuid itemUUID = item.getObjectDescriptor().getUuid(); objLoc.setUuid(itemUUID); objLoc.setUri(outFile.getAbsolutePath()); item.setObjectLocator(objLoc); objectLocators.put(itemUUID.getUuid(), objLoc); //Retrieve DICOM SEG //temporarily voided. AVTQuery needs to be modified to query for DICOM SEG objects Set<String> dicomSegSOPInstanceUIDs = new HashSet<String>(); List<DicomObject> segObjects = adService.retrieveSegmentationObjects(uid); for(int j = 0; j < segObjects.size(); j++){ DicomObject dicom = segObjects.get(j); String sopInstanceUID = dicom.getString(Tag.SOPInstanceUID); //Check if DICOM SEG was not serialized in reference to another AIM if(!dicomSegSOPInstanceUIDs.contains(sopInstanceUID)){ dicomSegSOPInstanceUIDs.add(sopInstanceUID); DicomObject dicomSeg = adService.getDicomObject(sopInstanceUID); String message = "DICOM SEG " + sopInstanceUID + " cannot be loaded from file system!"; if(dicomSeg == null){ throw new FileNotFoundException(message); } else { //TODO DICOM SEG tmp file not found e.g. DICOM SEG belongs to not specified Study for which TargetIteratorRunner was not requested File outDicomSegFile = new File(dirPath + File.separator + sopInstanceUID); FileOutputStream fos = new FileOutputStream(outDicomSegFile); BufferedOutputStream bos = new BufferedOutputStream(fos); DicomOutputStream dout = new DicomOutputStream(bos); dout.writeDicomFile(dicomSeg); dout.close(); //retrievedFiles.add(outDicomSegFile); ObjectLocator dicomSegObjLoc = new ObjectLocator(); Item itemDicomSeg = items.get(i); Uuid dicomSegItemUUID = itemDicomSeg.getObjectDescriptor().getUuid(); dicomSegObjLoc.setUuid(dicomSegItemUUID); dicomSegObjLoc.setUri(outDicomSegFile.getAbsolutePath()); item.setObjectLocator(dicomSegObjLoc); objectLocators.put(dicomSegItemUUID.getUuid(), dicomSegObjLoc); } } } } } } } } } void fireResultsAvailable(String targetElementID){ RetrieveEvent event = new RetrieveEvent(targetElementID); listener.retrieveResultsAvailable(event); } DataAccessListener listener; @Override public void addDataAccessListener(DataAccessListener l) { listener = l; } public Map<String, ObjectLocator> getObjectLocators(){ return objectLocators; } }
// of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // all copies or substantial portions of the Software. // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. // File created: 2011-06-23 13:22:53 package fi.tkk.ics.hadoop.bam.cli.plugins; import java.io.File; import java.io.InputStream; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapred.FileAlreadyExistsException; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import net.sf.samtools.util.BlockCompressedStreamConstants; import fi.tkk.ics.hadoop.bam.custom.hadoop.InputSampler; import fi.tkk.ics.hadoop.bam.custom.hadoop.TotalOrderPartitioner; import fi.tkk.ics.hadoop.bam.custom.jargs.gnu.CmdLineParser; import fi.tkk.ics.hadoop.bam.custom.samtools.BAMFileWriter; import fi.tkk.ics.hadoop.bam.custom.samtools.SAMFileHeader; import fi.tkk.ics.hadoop.bam.custom.samtools.SAMFileReader; import static fi.tkk.ics.hadoop.bam.custom.jargs.gnu.CmdLineParser.Option.*; import fi.tkk.ics.hadoop.bam.BAMInputFormat; import fi.tkk.ics.hadoop.bam.KeyIgnoringBAMOutputFormat; import fi.tkk.ics.hadoop.bam.SAMRecordWritable; import fi.tkk.ics.hadoop.bam.cli.CLIPlugin; import fi.tkk.ics.hadoop.bam.cli.Utils; import fi.tkk.ics.hadoop.bam.util.Pair; import fi.tkk.ics.hadoop.bam.util.Timer; public final class Sort extends CLIPlugin { private static final List<Pair<CmdLineParser.Option, String>> optionDescs = new ArrayList<Pair<CmdLineParser.Option, String>>(); private static final CmdLineParser.Option verboseOpt = new BooleanOption('v', "verbose"), outputFileOpt = new StringOption('o', "output-file=PATH"); public Sort() { super("sort", "BAM sorting", "1.0", "WORKDIR INPATH", optionDescs, "Sorts the BAM file in INPATH in a distributed fashion using "+ "Hadoop. Output parts are placed in WORKDIR."); } static { optionDescs.add(new Pair<CmdLineParser.Option, String>( verboseOpt, "tell the Hadoop job to be more verbose")); optionDescs.add(new Pair<CmdLineParser.Option, String>( outputFileOpt, "output a complete BAM file to the file PATH, "+ "removing the parts from WORKDIR")); } @Override protected int run(CmdLineParser parser) { final List<String> args = parser.getRemainingArgs(); if (args.isEmpty()) { System.err.println("sort :: OUTDIR not given."); return 3; } if (args.size() == 1) { System.err.println("sort :: INPATH not given."); return 3; } final String wrkDir = args.get(0), in = args.get(1), out = (String)parser.getOptionValue(outputFileOpt); final boolean verbose = parser.getBoolean(verboseOpt); final Path inPath = new Path(in), wrkDirPath = new Path(wrkDir); final String inFile = inPath.getName(); final Configuration conf = getConf(); // Used by SortOutputFormat to fetch the SAM header to output and to name // the output files, respectively. conf.set(SortOutputFormat.INPUT_PATH_PROP, in); conf.set(SortOutputFormat.OUTPUT_NAME_PROP, inFile); final Timer t = new Timer(); try { Utils.setSamplingConf(inPath, conf); // As far as I can tell there's no non-deprecated way of getting this // info. We can silence this warning but not the import. @SuppressWarnings("deprecation") final int maxReduceTasks = new JobClient(new JobConf(conf)).getClusterStatus() .getMaxReduceTasks(); conf.setInt("mapred.reduce.tasks", Math.max(1, maxReduceTasks*9/10)); final Job job = new Job(conf); job.setJarByClass (Sort.class); job.setMapperClass (Mapper.class); job.setReducerClass(SortReducer.class); job.setMapOutputKeyClass(LongWritable.class); job.setOutputKeyClass (NullWritable.class); job.setOutputValueClass (SAMRecordWritable.class); job.setInputFormatClass (BAMInputFormat.class); job.setOutputFormatClass(SortOutputFormat.class); FileInputFormat .setInputPaths(job, inPath); FileOutputFormat.setOutputPath(job, wrkDirPath); job.setPartitionerClass(TotalOrderPartitioner.class); System.out.println("sort :: Sampling..."); t.start(); InputSampler.<LongWritable,SAMRecordWritable>writePartitionFile( job, new InputSampler.IntervalSampler<LongWritable,SAMRecordWritable>( 0.01, 100)); System.out.printf("sort :: Sampling complete in %d.%03d s.\n", t.stopS(), t.fms()); job.submit(); System.out.println("sort :: Waiting for job completion..."); t.start(); if (!job.waitForCompletion(verbose)) { System.err.println("sort :: Job failed."); return 4; } System.out.printf("sort :: Job complete in %d.%03d s.\n", t.stopS(), t.fms()); } catch (IOException e) { System.err.printf("sort :: Hadoop error: %s\n", e); return 4; } catch (ClassNotFoundException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } if (out != null) try { System.out.println("sort :: Merging output..."); t.start(); final Path outPath = new Path(out); final FileSystem srcFS = wrkDirPath.getFileSystem(conf); final FileSystem dstFS = outPath.getFileSystem(conf); // First, place the BAM header. final BAMFileWriter w = new BAMFileWriter(dstFS.create(outPath), new File("")); w.setSortOrder(SAMFileHeader.SortOrder.coordinate, true); final SAMFileReader r = new SAMFileReader(inPath.getFileSystem(conf).open(inPath)); w.setHeader(r.getFileHeader()); r.close(); w.close(); // Then, the BAM contents. final OutputStream outs = dstFS.append(outPath); final FileStatus[] parts = srcFS.globStatus(new Path( wrkDir, conf.get(SortOutputFormat.OUTPUT_NAME_PROP) + "-[0-9][0-9][0-9][0-9][0-9][0-9]*")); {int i = 0; final Timer t2 = new Timer(); for (final FileStatus part : parts) { t2.start(); final InputStream ins = srcFS.open(part.getPath()); IOUtils.copyBytes(ins, outs, conf, false); ins.close(); System.out.printf("sort :: Merged part %d in %d.%03d s.\n", ++i, t2.stopS(), t2.fms()); }} for (final FileStatus part : parts) srcFS.delete(part.getPath(), false); // Finally, the BGZF terminator. outs.write(BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK); outs.close(); System.out.printf("sort :: Merging complete in %d.%03d s.\n", t.stopS(), t.fms()); } catch (IOException e) { System.err.printf("sort :: Output merging failed: %s\n", e); return 5; } return 0; } } final class SortReducer extends Reducer<LongWritable,SAMRecordWritable, NullWritable,SAMRecordWritable> { @Override protected void reduce( LongWritable ignored, Iterable<SAMRecordWritable> records, Reducer<LongWritable,SAMRecordWritable, NullWritable,SAMRecordWritable>.Context ctx) throws IOException, InterruptedException { for (SAMRecordWritable rec : records) ctx.write(NullWritable.get(), rec); } } final class SortOutputFormat extends KeyIgnoringBAMOutputFormat<NullWritable> { public static final String INPUT_PATH_PROP = "hadoopbam.sort.input.path", OUTPUT_NAME_PROP = "hadoopbam.sort.output.name"; @Override public RecordWriter<NullWritable,SAMRecordWritable> getRecordWriter(TaskAttemptContext context) throws IOException { if (super.header == null) { Configuration c = context.getConfiguration(); readSAMHeaderFrom( new Path(c.get(INPUT_PATH_PROP)), FileSystem.get(c)); } return super.getRecordWriter(context); } @Override public Path getDefaultWorkFile( TaskAttemptContext context, String ext) throws IOException { String filename = context.getConfiguration().get(OUTPUT_NAME_PROP); String extension = ext.isEmpty() ? ext : "." + ext; int part = context.getTaskAttemptID().getTaskID().getId(); return new Path(super.getDefaultWorkFile(context, ext).getParent(), filename + "-" + String.format("%06d", part) + extension); } // Allow the output directory to exist. @Override public void checkOutputSpecs(JobContext job) throws FileAlreadyExistsException, IOException {} }
package io.sniffy.util; import sun.misc.Unsafe; import sun.reflect.ReflectionFactory; import java.lang.annotation.Annotation; import java.lang.reflect.*; import java.security.AccessController; import java.util.concurrent.locks.Lock; public class ReflectionUtil { public final static Unsafe UNSAFE; static { Unsafe unsafe = null; try { Field f = Unsafe.class.getDeclaredField("theUnsafe"); f.setAccessible(true); unsafe = (Unsafe) f.get(null); } catch (Exception e) { // TODO: what do we do with drunken sailor? } UNSAFE = unsafe; } /** * FakeAccessibleObject class has similar layout as {@link AccessibleObject} and can be used for calculating offsets */ private static class FakeAccessibleObject implements AnnotatedElement { static final private java.security.Permission ACCESS_PERMISSION = new ReflectPermission("suppressAccessChecks"); // Indicates whether language-level access checks are overridden // by this object. Initializes to "false". This field is used by // Field, Method, and Constructor. // NOTE: for security purposes, this field must not be visible // outside this package. boolean override; @Override public <T extends Annotation> T getAnnotation(Class<T> annotationClass) { return null; } @Override public Annotation[] getAnnotations() { return new Annotation[0]; } @Override public Annotation[] getDeclaredAnnotations() { return new Annotation[0]; } // Reflection factory used by subclasses for creating field, // method, and constructor accessors. Note that this is called // very early in the bootstrapping process. static final Object reflectionFactory = new Object(); volatile Object securityCheckCache; } public static boolean setAccessible(AccessibleObject ao) { if (JVMUtil.getVersion() >= 16) { try { long overrideOffset = UNSAFE.objectFieldOffset(FakeAccessibleObject.class.getDeclaredField("override")); UNSAFE.putBoolean(ao, overrideOffset, true); } catch (NoSuchFieldException e) { e.printStackTrace(); } if (ao.isAccessible()) { return true; } return false; } ao.setAccessible(true); return true; } public static <T, V> boolean setField(String className, T instance, String fieldName, V value) { return setField(className, instance, fieldName, value, null); } public static <T, V> boolean setField(String className, T instance, String fieldName, V value, String lockFieldName) { try { //noinspection unchecked return setField((Class<T>) Class.forName(className), instance, fieldName, value, lockFieldName); } catch (ClassNotFoundException e) { return false; } } public static <T, V> boolean setField(Class<T> clazz, T instance, String fieldName, V value) { return setField(clazz, instance, fieldName, value, null); } public static <T, V> boolean setField(Class<T> clazz, T instance, String fieldName, V value, String lockFieldName) { //noinspection TryWithIdenticalCatches try { Field instanceField = clazz.getDeclaredField(fieldName); /*if (JVMUtil.getVersion() >= 16) { long fieldOffset = null == instance ? UNSAFE.staticFieldOffset(instanceField) : UNSAFE.objectFieldOffset(instanceField); // TODO: acquire lock // TODO: use putvolatile if required if (instanceField.getType() == Boolean.TYPE && value instanceof Boolean) { UNSAFE.putBoolean(instance, fieldOffset, (Boolean) value); } else if (instanceField.getType() == Integer.TYPE && value instanceof Number) { UNSAFE.putInt(instance, fieldOffset, ((Number) value).intValue()); } else if (instanceField.getType() == Long.TYPE && value instanceof Number) { UNSAFE.putLong(instance, fieldOffset, ((Number) value).longValue()); } else if (instanceField.getType() == Short.TYPE && value instanceof Number) { UNSAFE.putShort(instance, fieldOffset, ((Number) value).shortValue()); } else if (instanceField.getType() == Byte.TYPE && value instanceof Number) { UNSAFE.putByte(instance, fieldOffset, ((Number) value).byteValue()); } else if (instanceField.getType() == Double.TYPE && value instanceof Number) { UNSAFE.putDouble(instance, fieldOffset, ((Number) value).doubleValue()); } else if (instanceField.getType() == Float.TYPE && value instanceof Number) { UNSAFE.putFloat(instance, fieldOffset, ((Number) value).floatValue()); } else if (instanceField.getType() == Character.TYPE && value instanceof Character) { UNSAFE.putChar(instance, fieldOffset, (Character) value); } UNSAFE.putObject(instance == null ? UNSAFE.staticFieldBase(instanceField) : instance, fieldOffset, value); return true; }*/ if (!instanceField.isAccessible()) { //instanceField.setAccessible(true); setAccessible(instanceField); } Field modifiersField = getModifiersField(); //modifiersField.setAccessible(true); setAccessible(modifiersField); modifiersField.setInt(instanceField, instanceField.getModifiers() & ~Modifier.FINAL); if (null != lockFieldName) { Field lockField = clazz.getDeclaredField(lockFieldName); if (!lockField.isAccessible()) { //lockField.setAccessible(true); setAccessible(lockField); } Object lockObject = lockField.get(instance); if (lockObject instanceof Lock) { Lock lock = (Lock) lockObject; try { lock.lock(); instanceField.set(instance, value); return true; } finally { lock.unlock(); } } else { //noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (lockObject) { instanceField.set(instance, value); return true; } } } else { instanceField.set(instance, value); return true; } } catch (NoSuchFieldException e) { return false; } catch (IllegalAccessException e) { return false; } } public static <T, V> V getField(String className, T instance, String fieldName) throws IllegalAccessException, NoSuchFieldException, ClassNotFoundException { return getField(className, instance, fieldName, null); } public static <T, V> V getField(String className, T instance, String fieldName, String lockFieldName) throws ClassNotFoundException, NoSuchFieldException, IllegalAccessException { //noinspection unchecked return getField((Class<T>) Class.forName(className), instance, fieldName, lockFieldName); } public static <T, V> V getField(Class<T> clazz, T instance, String fieldName) throws NoSuchFieldException, IllegalAccessException { Object field = getField(clazz, instance, fieldName, null); return (V) field; } public static <T, V> V getField(Class<T> clazz, T instance, String fieldName, String lockFieldName) throws NoSuchFieldException, IllegalAccessException { Field instanceField = clazz.getDeclaredField(fieldName); /*if (JVMUtil.getVersion() >= 16) { long fieldOffset = null == instance ? UNSAFE.staticFieldOffset(instanceField) : UNSAFE.objectFieldOffset(instanceField); // TODO: acquire lock // TODO: use getvolatile if required if (instanceField.getType() == Boolean.TYPE) { UNSAFE.getBoolean(instance, fieldOffset); } else if (instanceField.getType() == Integer.TYPE) { UNSAFE.getInt(instance, fieldOffset); } else if (instanceField.getType() == Long.TYPE) { UNSAFE.getLong(instance, fieldOffset); } else if (instanceField.getType() == Short.TYPE) { UNSAFE.getShort(instance, fieldOffset); } else if (instanceField.getType() == Byte.TYPE) { UNSAFE.getByte(instance, fieldOffset); } else if (instanceField.getType() == Double.TYPE) { UNSAFE.getDouble(instance, fieldOffset); } else if (instanceField.getType() == Float.TYPE) { UNSAFE.getFloat(instance, fieldOffset); } else if (instanceField.getType() == Character.TYPE) { UNSAFE.getChar(instance, fieldOffset); } //noinspection unchecked return (V) UNSAFE.getObject(null == instance ? UNSAFE.staticFieldOffset(instanceField) : instance, fieldOffset); }*/ if (!instanceField.isAccessible()) { //instanceField.setAccessible(true); setAccessible(instanceField); } Field modifiersField = getModifiersField(); //modifiersField.setAccessible(true); setAccessible(modifiersField); modifiersField.setInt(instanceField, instanceField.getModifiers() & ~Modifier.FINAL); if (null != lockFieldName) { Field lockField = clazz.getDeclaredField(lockFieldName); if (!lockField.isAccessible()) { lockField.setAccessible(true); } Object lockObject = lockField.get(instance); if (lockObject instanceof Lock) { Lock lock = (Lock) lockObject; try { lock.lock(); //noinspection unchecked return (V) instanceField.get(instance); } finally { lock.unlock(); } } else { //noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (lockObject) { //noinspection unchecked return (V) instanceField.get(instance); } } } else { //noinspection unchecked return (V) instanceField.get(instance); } } private static Field getModifiersField() throws NoSuchFieldException { try { return Field.class.getDeclaredField("modifiers"); } catch (NoSuchFieldException e) { try { Method getDeclaredFields0 = Class.class.getDeclaredMethod("getDeclaredFields0", boolean.class); //getDeclaredFields0.setAccessible(true); setAccessible(getDeclaredFields0); Field[] fields = (Field[]) getDeclaredFields0.invoke(Field.class, false); for (Field field : fields) { if ("modifiers".equals(field.getName())) { return field; } } } catch (Exception ex) { ExceptionUtil.addSuppressed(e, ex); } throw e; } } @SuppressWarnings("unchecked") public static <R, T> R invokeMethod( Class<T> clazz, T instance, String methodName, @SuppressWarnings("unused") Class<R> returnClass ) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { Method method = method(clazz, methodName); return (R) method.invoke(instance); } @SuppressWarnings("unchecked") public static <R, T, P1> R invokeMethod( Class<T> clazz, T instance, String methodName, Class<P1> argument1Type, P1 argument1, @SuppressWarnings("unused") Class<R> returnClass ) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { Method method = method(clazz, methodName, argument1Type); return (R) method.invoke(instance, argument1); } @SuppressWarnings("unchecked") public static <R, T, P1, P2> R invokeMethod( Class<T> clazz, T instance, String methodName, Class<P1> argument1Type, P1 argument1, Class<P2> argument2Type, P2 argument2, @SuppressWarnings("unused") Class<R> returnClass ) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { Method method = method(clazz, methodName, argument1Type, argument2Type); return (R) method.invoke(instance, argument1, argument2); } @SuppressWarnings("unchecked") public static <R, T, P1, P2, P3> R invokeMethod( Class<T> clazz, T instance, String methodName, Class<P1> argument1Type, P1 argument1, Class<P2> argument2Type, P2 argument2, Class<P3> argument3Type, P3 argument3, @SuppressWarnings("unused") Class<R> returnClass ) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { Method method = method(clazz, methodName, argument1Type, argument2Type, argument3Type); return (R) method.invoke(instance, argument1, argument2, argument3); } public static Method method(Class<?> clazz, String methodName, Class<?>... argumentTypes) throws NoSuchMethodException { Method method = clazz.getDeclaredMethod(methodName, argumentTypes); //method.setAccessible(true); setAccessible(method); return method; } }
package gov.nih.nci.cananolab.util; import java.io.BufferedInputStream; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.apache.poi.hssf.util.CellReference; import org.apache.poi.poifs.filesystem.POIFSFileSystem; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.DateUtil; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; public class ExcelParser { private String fileName; public ExcelParser(String fileName) { this.fileName = fileName; } /** * Parse the Excel file into a 2-D matrix represented as a map of map. Key * is column header, value is a map, whose key is row header and value is * the cell. * * @return * @throws IOException */ public SortedMap<String, SortedMap<String, Double>> verticalParse() throws IOException { Workbook wb = null; InputStream inputStream = new BufferedInputStream(new FileInputStream( fileName)); POIFSFileSystem fs = new POIFSFileSystem(inputStream); wb = new HSSFWorkbook(fs); Sheet sheet1 = wb.getSheetAt(0); // printSheet(sheet1); SortedMap<String, SortedMap<String, Double>> dataMatrix = new TreeMap<String, SortedMap<String, Double>>(); Row firstRow = sheet1.getRow(0); int i = 0; for (Row row : sheet1) { String rowHeader = row.getCell(0).getStringCellValue(); int j = 0; for (Cell cell : row) { if (i > 0 && j > 0) { String columnHeader = firstRow.getCell(j) .getStringCellValue(); SortedMap<String, Double> columnData = null; if (dataMatrix.get(columnHeader) != null) { columnData = dataMatrix.get(columnHeader); } else { columnData = new TreeMap<String, Double>(); } if (cell != null) { columnData.put(rowHeader, cell.getNumericCellValue()); dataMatrix.put(columnHeader, columnData); } } j++; } i++; } return dataMatrix; } /** * Parse the Excel file into a 2-D matrix represented as a map of map. Key * is row header, value is a map, whose key is column header and value is * the cell. * * @return * @throws IOException */ public SortedMap<String, SortedMap<String, Double>> horizontalParse() throws IOException { Workbook wb = null; InputStream inputStream = new BufferedInputStream(new FileInputStream( fileName)); POIFSFileSystem fs = new POIFSFileSystem(inputStream); wb = new HSSFWorkbook(fs); Sheet sheet1 = wb.getSheetAt(0); // printSheet(sheet1); SortedMap<String, SortedMap<String, Double>> dataMatrix = new TreeMap<String, SortedMap<String, Double>>(); Row firstRow = sheet1.getRow(0); int i = 0; for (Row row : sheet1) { String rowHeader = row.getCell(0).getStringCellValue(); int j = 0; for (Cell cell : row) { if (i > 0 && j > 0) { String columnHeader = firstRow.getCell(j) .getStringCellValue(); SortedMap<String, Double> rowData = null; if (dataMatrix.get(rowHeader) != null) { rowData = dataMatrix.get(rowHeader); } else { rowData = new TreeMap<String, Double>(); } if (cell != null) { rowData.put(columnHeader, cell.getNumericCellValue()); dataMatrix.put(rowHeader, rowData); } } j++; } i++; } return dataMatrix; } public void printSheet(Sheet sheet) { for (Row row : sheet) { for (Cell cell : row) { CellReference cellRef = new CellReference(cell.getRowIndex(), cell.getColumnIndex()); System.out.print(cellRef.formatAsString()); System.out.print(" - "); switch (cell.getCellType()) { case Cell.CELL_TYPE_STRING: System.out.println(cell.getRichStringCellValue() .getString()); break; case Cell.CELL_TYPE_NUMERIC: if (DateUtil.isCellDateFormatted(cell)) { System.out.println(cell.getDateCellValue()); } else { System.out.println(cell.getNumericCellValue()); } break; case Cell.CELL_TYPE_BOOLEAN: System.out.println(cell.getBooleanCellValue()); break; case Cell.CELL_TYPE_FORMULA: System.out.println(cell.getCellFormula()); break; default: System.out.println(); } } } } public void printMatrix( SortedMap<String, SortedMap<String, Double>> dataMatrix) { for (String key : dataMatrix.keySet()) { System.out.println("key:" + key); Map<String, Double> data = dataMatrix.get(key); for (Map.Entry<String, Double> entry : data.entrySet()) { System.out.println("key-" + entry.getKey()+": "+entry.getValue()); } } } public static void main(String[] args) { if (args != null && args.length == 1) { String inputFileName = args[0]; try { ExcelParser parser = new ExcelParser(inputFileName); // SortedMap<String, SortedMap<String, Double>> matrix1 = parser // .verticalParse(); // parser.printMatrix(matrix1); SortedMap<String, SortedMap<String, Double>> matrix2 = parser .horizontalParse(); parser.printMatrix(matrix2); } catch (IOException e) { System.out.println("Input file not found."); e.printStackTrace(); System.exit(0); } } else { System.out.println("Invalid argument!"); System.out.println("java ExcelParser <inputFileName>"); } System.exit(1); } }
import java.util.ArrayList; import processing.core.*; public class Test extends PApplet { Astro astro; Startmenu start; Endmenu end; ArrayList<Stars> star =new ArrayList<Stars>(); ArrayList<Walls> topwalls = new ArrayList<Walls>(); ArrayList<Bottomwall> botwalls = new ArrayList<Bottomwall>(); ArrayList<speedPowerup> speeds = new ArrayList<speedPowerup>(); ArrayList<Fuelpowerup> fuels = new ArrayList<Fuelpowerup>(); float k; //int for game state 1=startmenu 2=game 3=restart menu int state; int count; int wallspawn; float wallspeed; boolean change; public void setup() { size(800,800); background(0); state=1; change=false; count=0; wallspawn=100; wallspeed=(float) 1.5; for(int i=0; i<200;i++) { Stars stars = null; stars = new Stars(this); star.add(stars); } astro = new Astro(width/2, height/2, 100, this); start = new Startmenu(this); end = new Endmenu(this); } boolean[] keys = new boolean[512]; public void keyPressed() { keys[keyCode] = true; } public void keyReleased() { keys[keyCode] = false; } public void draw() { background(0); for(int i= star.size()-1; i>=0;i { Stars go = star.get(i); go.update(); go.render(); } if(state==1) { startmenu(); if(keyPressed) { state=2; } } if(state==2) { game(); if(astro.fuel<0) { state=3; } } if(state==3) { endmenu(); } } public void topwalldetect() { for(int i= topwalls.size()-1; i>=0;i { Walls go = topwalls.get(i); if(astro.pos.y<go.h) { if((astro.pos.x+astro.w/2-10)>go.wallpos.x && (astro.pos.x+astro.w/2-10)<(go.wallpos.x+go.w) || (astro.pos.x-astro.w/2+5)>go.wallpos.x && (astro.pos.x-astro.w/2+5)<(go.wallpos.x+go.w)) { astro.fuel=0; } } } } public void bottomwalldetect() { for(int i= botwalls.size()-1; i>=0;i { Bottomwall go = botwalls.get(i); if(astro.pos.y+astro.w>height-go.h) { if(astro.pos.x-astro.w/2>go.wallpos.x && astro.pos.x-astro.w/2<(go.wallpos.x)+go.w || astro.pos.x+astro.w/2-15>go.wallpos.x && astro.pos.x+astro.w/2-15<(go.wallpos.x)+go.w) { astro.fuel=0; } } } } public void speeddetect() { for(int i= speeds.size()-1; i>=0;i { speedPowerup go = speeds.get(i); if(astro.pos.y<go.pos.y+(go.h/2) && astro.pos.y>go.pos.y-(go.h/2)) { if((astro.pos.x+astro.w/2-10)>go.pos.x && (astro.pos.x+astro.w/2-10)<(go.pos.x+go.w) || (astro.pos.x-astro.w/2+5)>go.pos.x && (astro.pos.x-astro.w/2+5)<(go.pos.x+go.w)) { //change wall speeds change=true; speeds.remove(go); wallspeed=3; wallspawn=50; for(int j= topwalls.size()-1; j>=0;j { Walls go1 = topwalls.get(j); go1.speed=3; } for(int j= botwalls.size()-1; j>=0;j { Bottomwall go2 = botwalls.get(j); go2.speed=3; } } } } if (change==true) { count++; if(count>300) { for(int j= topwalls.size()-1; j>=0;j { Walls go1 = topwalls.get(j); go1.speed=(float) 1.5; } for(int j= botwalls.size()-1; j>=0;j { Bottomwall go2 = botwalls.get(j); go2.speed=(float) 1.5; } wallspeed=(float) 1.5; wallspawn=100; change=false; count=0; } } } public void startmenu() { start.render(); start.update(); } public void endmenu() { end.render(); end.update(); } public void game() { if (frameCount % wallspawn == 0) { //initialise top wall Walls topwall = null; topwall = new Walls(this,wallspeed); topwalls.add(topwall); //initialise bottom wall Bottomwall bottomwall = null; bottomwall = new Bottomwall(this,wallspeed); botwalls.add(bottomwall); } if (frameCount % (wallspawn*4) == 0) { for(int i= topwalls.size()-1; i>=0;i { Walls go = topwalls.get(i); for(int j= botwalls.size()-1; j>=0;j { Bottomwall go1 = botwalls.get(j); k=random(go.h,(height-go1.h)); } } Fuelpowerup fuel = null; fuel = new Fuelpowerup(this,k,wallspeed); fuels.add(fuel); } if (frameCount % 600 == 0) { //initialise speedPowerup speedPowerup speed = null; speed = new speedPowerup(this); speeds.add(speed); } if(frameCount % 25 == 0) { astro.fuel-=3; } for(int i= speeds.size()-1; i>=0;i { speedPowerup go = speeds.get(i); go.update(); go.render(); } for(int i= fuels.size()-1; i>=0;i { Fuelpowerup go = fuels.get(i); go.update(); go.render(); } astro.render(); astro.update(); astro.fuel(); for(int i= topwalls.size()-1; i>=0;i { Walls go = topwalls.get(i); go.update(); go.render(); } for(int i= botwalls.size()-1; i>=0;i { Bottomwall go = botwalls.get(i); go.update(); go.render(); } topwalldetect(); bottomwalldetect(); speeddetect(); } }
import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Scanner; import java.util.concurrent.TimeUnit; public class Util { final ArrayList<String> size02 = new ArrayList<>(); final ArrayList<String> size03 = new ArrayList<>(); final ArrayList<String> size04 = new ArrayList<>(); final ArrayList<String> size05 = new ArrayList<>(); final ArrayList<String> size06 = new ArrayList<>(); // Variaveis para infraestrutura do applicativo private final ArrayList<Integer> intArrayList = new ArrayList<Integer>(); private final ArrayList<String> stringArrayList = new ArrayList<String>(); // Variaveis para solucao do problema proposto private final ArrayList<String> sequenciaAtual = new ArrayList<String>(); public static ArrayList<String> maiorSequencia = new ArrayList<String>(); // boolean deuMatch = false; String sinal = null; int F = 0; // like a sentinella for First int N = 1; // like a sentinella for Next String first; String next; int firstSize; int nextSize; public Util() { } public ArrayList<Integer> getIntArrayList() { return intArrayList; } public ArrayList<String> getStringArrayList() { return stringArrayList; } /** * Time counter for the program * * @param startTime - the time stated * @return - total time spend */ public String timeCounter(final long startTime) { final long endTime = System.currentTimeMillis(); final long duration = (endTime - startTime); // System.out.println("\nTime in Milliseconds: " + duration); final String timeSpend = String.format( "%d min, %d sec", TimeUnit.MILLISECONDS.toMinutes(duration), TimeUnit.MILLISECONDS.toSeconds(duration) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(duration))); return timeSpend; } /** * Convert a String ArrayList to a Integer ArrayList */ public void convertToInt(final ArrayList<String> strList) { for (final String s : strList) { intArrayList.add(Integer.valueOf(s)); } } /** * Convert a Integer ArrayList to a String ArrayList */ public void convertToString(final ArrayList<Integer> intList) { for (final Integer i : intList) { stringArrayList.add(String.valueOf(i)); } } /** * Base converter. Receive a Integer list and convert to a String list * * @param fileElements * @return */ private void baseConverter(final ArrayList<Integer> intList, final ArrayList<String> stringList) { for (int i = 0; i < intList.size(); i++) { final int value = intList.get(i); final String j = Integer.toString(value, 6); stringList.add(j); } } /** * @param fileElements - (ArrayList<String>) List where base 10 data will temporary be allocated * @param convertedElements - (ArrayList<String>) List where converted data will be added * @param TxtFileName - (String) File with base 10 data * @return - List with converted values in base 6 */ public void readAndConvert( final ArrayList<Integer> fileElements, final ArrayList<String> convertedElements, final String TxtFileName) { try { String linha; // @SuppressWarnings("resource") final BufferedReader br = new BufferedReader(new FileReader(TxtFileName)); while ((linha = br.readLine()) != null) { // @SuppressWarnings("resource") final Scanner sc = new Scanner(linha).useDelimiter("\n"); while (sc.hasNext()) { final String dado = sc.next(); fileElements.add(Integer.parseInt(dado)); } } baseConverter(fileElements, convertedElements); } catch (final IOException e) { System.err.format("Error reading the file.", e); } // Convert the String list to an Integer list. convertToInt(convertedElements); // Ordering the list Collections.sort(intArrayList); // Convert the Integer list to a String list. convertToString(intArrayList); separarTamanhoStrings(stringArrayList); } /** * Run the character comparator. Deve comparar o PRIMEIRO com os proximos. Quando encontrar algum que 'Casa', pego a partir dele * e verifico o prximo. E assim por diante. Porm preciso comparar o PRIMEIRO com todos os numeros, depois o segundo e assim * por diante. */ public ArrayList<String> runTel_Dor(final ArrayList<String> convertedElementsList) { // TODO: Ainda necessario encontrar forma de separar tamanhos de // elementos para poder testar de forma correta. // TODO: cuidar o caso onde no haver nenhum match e a lista dever ser // vazia. Fix para esta linha. int sentinella = F + 1; while ((N < convertedElementsList.size()) && (sentinella < convertedElementsList.size())) { first = convertedElementsList.get(F); next = convertedElementsList.get(N); //if (first.equals("30332")) // System.out.print("A"); sinal = match(); // Se vermelho, troca F e N uma posio adiante. // Se amarelo, troca apenas N uma posio adiante. // Se verde, troca variaveis: onde F recebe N. Onde N recebe N+1. // atualiza sequencia de acodo com o tamanho e sinal. if (sinal == "verde") { atualizaSequencia(); } // atualiza posicoes de F e N de acordo com o sinal. atualizaVariaveis(sinal); // controla pra que todos elementos sejam verificados. Saltos de // 'atualizaVariaveis();' faz com que F pule muitos numeros. if ((N == convertedElementsList.size())) { F = sentinella; N = F + 1; sentinella++; atualizaMaiorSequenca(); } } return maiorSequencia; } private void atualizaSequencia() { if (sequenciaAtual.isEmpty()) { sequenciaAtual.add(first); sequenciaAtual.add(next); } else { sequenciaAtual.add(next); } } public void atualizaMaiorSequenca() { final int sizeSequenciaAtual = sequenciaAtual.size(); final int sizeMaiorSequenciaArmazenada = maiorSequencia.size(); // atualiza tamanho sequencia atual quando sinal "Verde!". if (sizeSequenciaAtual > sizeMaiorSequenciaArmazenada) { maiorSequencia = new ArrayList<String>(sequenciaAtual); } sequenciaAtual.clear(); } public void atualizaVariaveis(final String result) { // Se vermelho, troca F e N uma posio adiante. Quando size diferente. // TEORIA: s vai acontecer no inicio. // Se amarelo, troca apenas N uma posio adiante. // Se verde, troca variaveis: onde F recebe N. Onde N recebe N+1. if (result == "amarelo") { N++; } if (result == "vermelho") { F++; N++; } if (result == "verde") { F = N; N = N + 1; } } /** * @param intArrayList */ private void separarTamanhoStrings(final ArrayList<String> list) { String valor = null; int valorSize = 0; for (int i = 0; i < list.size(); i++) { valor = list.get(i); valorSize = valor.length(); if (valorSize == 2) { size02.add(list.get(i)); } if (valorSize == 3) { size03.add(list.get(i)); } if (valorSize == 4) { size04.add(list.get(i)); } if (valorSize == 5) { size05.add(list.get(i)); } if (valorSize == 6) { size06.add(list.get(i)); } } } /** * Compara dois numeros e retorna True se entre eles haver apenas 1 caracter diferente na mesma posio. * * @return (boolean) resultado to match. */ public String match() { // TODO: Ainda necessario encontrar forma de separar tamanhos de // elementos para poder testar de forma correta. // Vou retornar sinais. // Vermelho: Quando size diferente. // Amarelo: quando size igual, mas no deu match. Elemento travado // procurando por um proximo. // Verde: DeuMatch e troca elemento travado para next. sinal = "vermelho"; int distictChar = 0; firstSize = first.length(); nextSize = next.length(); // se size diferente, finaliza metodo e retorna vermelho. for (int i = 0; (i < firstSize) && (firstSize == nextSize); i++) { // se distictChar, soma contador. if (first.charAt(i) != next.charAt(i)) { distictChar++; // se contador > 1, finaliza metodo e retorna amarelo. if (distictChar > 1) { sinal = "amarelo"; break; } continue; } // se entrar no else, match funcionando at o momento com sucesso. sinal = "verde"; } return sinal; } }
package io.cloudchaser.murmur; import io.cloudchaser.murmur.parser.MurmurParser; import io.cloudchaser.murmur.parser.MurmurParserBaseVisitor; import io.cloudchaser.murmur.symbol.LetSymbol; import io.cloudchaser.murmur.symbol.Symbol; import io.cloudchaser.murmur.symbol.SymbolContext; import io.cloudchaser.murmur.types.MurmurBoolean; import io.cloudchaser.murmur.types.MurmurInteger; import io.cloudchaser.murmur.types.MurmurObject; import java.util.HashMap; import java.util.Map; /** * * @author Mihail K * @since 0.1 */ public class MurmurASTVisitor extends MurmurParserBaseVisitor<MurmurObject> { private static class MurmurBaseContext implements SymbolContext { private final Map<String, Symbol> symbols; public MurmurBaseContext() { symbols = new HashMap<>(); } @Override public SymbolContext getParent() { return null; } @Override public void addSymbol(Symbol symbol) { symbols.put(symbol.getName(), symbol); } @Override public Symbol getSymbol(String name) { return symbols.get(name); } } private final SymbolContext context; public MurmurASTVisitor() { context = new MurmurBaseContext(); } @Override public MurmurObject visitCompilationUnit(MurmurParser.CompilationUnitContext ctx) { // Visit children. ctx.statement().stream().forEach(this::visitStatement); return null; } @Override public MurmurObject visitStatement(MurmurParser.StatementContext ctx) { if(ctx.keywordStatement() != null) { visitKeywordStatement(ctx.keywordStatement()); } else { // Print results for debug. System.out.println(visitExpression(ctx.expression())); } return null; } /* - Statements - */ public MurmurObject visitLeftArrowStatement(MurmurParser.KeywordStatementContext ctx) { // TODO return null; } public MurmurObject visitRightArrowStatement(MurmurParser.KeywordStatementContext ctx) { // TODO return null; } public MurmurObject visitBreakStatement(MurmurParser.KeywordStatementContext ctx) { // TODO return null; } public MurmurObject visitContinueStatement(MurmurParser.KeywordStatementContext ctx) { // TODO return null; } public MurmurObject visitLetInitializerList(MurmurParser.InitializerListContext ctx) { ctx.initializerElement().stream().forEach((element) -> { String name = element.Identifier().getText(); MurmurObject value = visitExpression(element.expression()); System.out.println(name + " : " + value); // Create a symbol entry. context.addSymbol(new LetSymbol(name, value)); }); return null; } public MurmurObject visitLetStatement(MurmurParser.KeywordStatementContext ctx) { // Let with an initializer list. if(ctx.initializerList() != null) { return visitLetInitializerList(ctx.initializerList()); } // Unsupported. throw new UnsupportedOperationException(); } public MurmurObject visitReturnStatement(MurmurParser.KeywordStatementContext ctx) { // TODO return null; } public MurmurObject visitThrowStatement(MurmurParser.KeywordStatementContext ctx) { // TODO return null; } @Override public MurmurObject visitKeywordStatement(MurmurParser.KeywordStatementContext ctx) { if(ctx.operator != null) { // Keyword/operator. switch(ctx.operator.getText()) { case "<-": return visitLeftArrowStatement(ctx); case "->": return visitRightArrowStatement(ctx); case "break": return visitBreakStatement(ctx); case "continue": return visitContinueStatement(ctx); case "let": return visitLetStatement(ctx); case "return": return visitReturnStatement(ctx); case "throw": return visitThrowStatement(ctx); default: // Unknown operation. throw new RuntimeException(); } } // Something went wrong. throw new RuntimeException(); } /* - Interfaces - */ public MurmurObject visitITypeFunction(MurmurParser.ITypeElementContext ctx) { // TODO return null; } @Override public MurmurObject visitITypeElement(MurmurParser.ITypeElementContext ctx) { // TODO return null; } /* - Classes - */ public MurmurObject visitTypeField(MurmurParser.TypeElementContext ctx) { // TODO return null; } public MurmurObject visitTypeFunction(MurmurParser.TypeElementContext ctx) { // TODO return null; } @Override public MurmurObject visitTypeElement(MurmurParser.TypeElementContext ctx) { // TODO return null; } /* - Expressions - */ public MurmurObject visitPositiveExpression(MurmurParser.ExpressionContext ctx) { MurmurObject right = visitExpression(ctx.right); return right.opPositive(); } public MurmurObject visitPreIncrementExpression(MurmurParser.ExpressionContext ctx) { MurmurObject right = visitExpression(ctx.right); // Must be a symbol to increment. if(!(right instanceof Symbol)) { throw new UnsupportedOperationException(); } // Increment and return value. Symbol symbol = (Symbol)right; symbol.setValue(right.opIncrement()); return symbol.getValue(); } public MurmurObject visitPostIncrementExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); // Must be a symbol to increment. if(!(left instanceof Symbol)) { throw new UnsupportedOperationException(); } // Increment and return old value. Symbol symbol = (Symbol)left; MurmurObject old = symbol.getValue(); symbol.setValue(left.opIncrement()); return old; } public MurmurObject visitAdditionExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opPlus(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitNegativeExpression(MurmurParser.ExpressionContext ctx) { MurmurObject right = visitExpression(ctx.right); return right.opNegative(); } public MurmurObject visitPreDecrementExpression(MurmurParser.ExpressionContext ctx) { MurmurObject right = visitExpression(ctx.right); // Must be a symbol to decrement. if(!(right instanceof Symbol)) { throw new UnsupportedOperationException(); } // Decrement and return value. Symbol symbol = (Symbol)right; symbol.setValue(right.opDecrement()); return symbol.getValue(); } public MurmurObject visitPostDecrementExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); // Must be a symbol to decrement. if(!(left instanceof Symbol)) { throw new UnsupportedOperationException(); } // Decrement and return old value. Symbol symbol = (Symbol)left; MurmurObject old = symbol.getValue(); symbol.setValue(left.opDecrement()); return old; } public MurmurObject visitSubtractionExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opMinus(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitMultiplicationExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opMultiply(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitDivisionExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opDivide(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitModuloExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opModulo(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitEqualExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opEquals(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitNotEqualExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opNotEquals(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitLogicalNotExpression(MurmurParser.ExpressionContext ctx) { MurmurObject right = visitExpression(ctx.right); return right.opLogicalNot(); } public MurmurObject visitLogicalAndExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opLogicalAnd(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitLogicalOrExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opLogicalOr(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitBinaryNotExpression(MurmurParser.ExpressionContext ctx) { MurmurObject right = visitExpression(ctx.right); return right.opBitNot(); } public MurmurObject visitBinaryAndExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opBitAnd(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitBinaryXorExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opBitXor(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitBinaryOrExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opBitOr(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitLessThanExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opLessThan(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitGreaterThanExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opGreaterThan(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitLessOrEqualExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opLessOrEqual(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitGreaterOrEqualExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opGreaterOrEqual(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitShiftLeftExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opShiftLeft(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitShiftRightExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Dereference symbols. return left.opShiftRight(right instanceof Symbol ? ((Symbol)right).getValue() : right); } public MurmurObject visitTernaryExpression(MurmurParser.ExpressionContext ctx) { MurmurObject clause = visitExpression(ctx.clause); clause = clause instanceof Symbol ? ((Symbol)clause).getValue() : clause; // Check that the clause is boolean. if(!(clause instanceof MurmurBoolean)) { throw new UnsupportedOperationException(); } // Check the clause. MurmurBoolean bool = (MurmurBoolean)clause; if(bool.getValue()) { // True; evaluate left. return visitExpression(ctx.expression(1)); } else { // False; evaluate right. return visitExpression(ctx.expression(2)); } } public MurmurObject visitArrayIndexExpression(MurmurParser.ExpressionContext ctx) { // TODO return null; } public MurmurObject visitFunctionArguments(MurmurParser.ExpressionListContext ctx) { // Visit the argument list. ctx.expression().stream().forEach(this::visitExpression); // TODO return null; } public MurmurObject visitFunctionCallExpression(MurmurParser.ExpressionContext ctx) { if(ctx.expressionList() != null) { visitFunctionArguments(ctx.expressionList()); } // TODO return null; } public MurmurObject visitAssignmentExpression(MurmurParser.ExpressionContext ctx) { MurmurObject left = visitExpression(ctx.left); MurmurObject right = visitExpression(ctx.right); // Check that this is an lvalue. if(!(left instanceof Symbol)) { throw new UnsupportedOperationException(); } System.out.println(left); // Assign the value to the symbol. ((Symbol)left).setValue(right); return right; } public MurmurObject visitMemberExpression(MurmurParser.ExpressionContext ctx) { // TODO return null; } public MurmurObject visitSetNotationExpression(MurmurParser.ExpressionContext ctx) { // TODO return null; } public MurmurObject visitIdentifierExpression(MurmurParser.ExpressionContext ctx) { Symbol symbol = context.getSymbol(ctx.getText()); // Check that the symbol exists. if(symbol == null) { // TODO throw new NullPointerException(); } // Return the symbol. return symbol; } @Override public MurmurObject visitExpression(MurmurParser.ExpressionContext ctx) { // Skip null elements. if(ctx == null) return null; // Literals. if(ctx.literal() != null) { return visitLiteral(ctx.literal()); } if(ctx.operator != null) { // Operator types. switch(ctx.operator.getText()) { case ".": // Expression: a.b return visitMemberExpression(ctx); case ",": // Expression: [a, b] return visitSetNotationExpression(ctx); case "+": if(ctx.left != null) // Expression: a + b return visitAdditionExpression(ctx); // Expression: +a return visitPositiveExpression(ctx); case "-": if(ctx.left != null) // Expression: a - b return visitSubtractionExpression(ctx); // Expression: -a return visitNegativeExpression(ctx); case "*": // Expression: a * b return visitMultiplicationExpression(ctx); case "/": // Expression: a / b return visitDivisionExpression(ctx); case "%": // Expression: a % b return visitModuloExpression(ctx); case "!": // Expression: !a return visitLogicalNotExpression(ctx); case "~": // Expression: ~a return visitBinaryNotExpression(ctx); case "&": // Expression: a & b return visitBinaryAndExpression(ctx); case "^": // Expression: a ^ b return visitBinaryXorExpression(ctx); case "|": // Expression: a | b return visitBinaryOrExpression(ctx); case "<": // Expression: a < b return visitLessThanExpression(ctx); case ">": // Expression: a > b return visitGreaterThanExpression(ctx); case "=": // Expression: a = b return visitAssignmentExpression(ctx); case "?": // Expression: a ? b : c return visitTernaryExpression(ctx); case "(": // Expression: a(b, c, ...) return visitFunctionCallExpression(ctx); case "[": // Expression: a[b] return visitArrayIndexExpression(ctx); case "++": if(ctx.left != null) // Expression: a++ return visitPostIncrementExpression(ctx); // Expression: ++a return visitPreIncrementExpression(ctx); case " if(ctx.left != null) // Expression: a-- return visitPostDecrementExpression(ctx); // Expression: --a return visitPreDecrementExpression(ctx); case "&&": // Expression: a && b return visitLogicalAndExpression(ctx); case "||": // Expression: a || b return visitLogicalOrExpression(ctx); case "==": // Expression: a == b return visitEqualExpression(ctx); case "!=": // Expression: a != b return visitNotEqualExpression(ctx); case "<=": // Expression: a <= b return visitLessOrEqualExpression(ctx); case ">=": // Expression: a >= b return visitGreaterOrEqualExpression(ctx); case "<<": // Expression: a << b return visitShiftLeftExpression(ctx); case ">>": // Expression: a >> b return visitShiftRightExpression(ctx); default: // Unknown operator. throw new RuntimeException(); } } // Identifier. if(ctx.Identifier() != null) { return visitIdentifierExpression(ctx); } // Lambda. if(ctx.lambda() != null) { return visitLambda(ctx.lambda()); } // Parenthesized. if(ctx.inner != null) { return visitExpression(ctx.inner); } throw new RuntimeException(); } /* - Literal Types - */ public MurmurInteger visitIntegerLiteral(MurmurParser.LiteralContext ctx) { long value; String text = ctx.getText().toLowerCase(); // Check for base. if(text.startsWith("0x")) { text = text.replaceAll("(0x|_|l)", ""); value = Long.parseLong(text, 16); } else { text = text.replaceAll("(_|l)", ""); value = Long.parseLong(text); } return MurmurInteger.create(value); } public MurmurObject visitDecimalLiteral(MurmurParser.LiteralContext ctx) { // TODO return null; } public MurmurObject visitBooleanLiteral(MurmurParser.LiteralContext ctx) { return MurmurBoolean.create(Boolean.parseBoolean(ctx.getText())); } public MurmurObject visitCharacterLiteral(MurmurParser.LiteralContext ctx) { // TODO return null; } public MurmurObject visitStringLiteral(MurmurParser.LiteralContext ctx) { // TODO return null; } public MurmurObject visitNullLiteral(MurmurParser.LiteralContext ctx) { // TODO return null; } @Override public MurmurObject visitLiteral(MurmurParser.LiteralContext ctx) { // Integer literals. if(ctx.IntegerLiteral() != null) { return visitIntegerLiteral(ctx); } // Decimal literals. if(ctx.DecimalLiteral() != null) { return visitDecimalLiteral(ctx); } // Boolean literals. if(ctx.BooleanLiteral() != null) { return visitBooleanLiteral(ctx); } // Character literals. if(ctx.CharacterLiteral() != null) { return visitCharacterLiteral(ctx); } // String literals. if(ctx.StringLiteral() != null) { return visitStringLiteral(ctx); } // Null literals. if(ctx.NullLiteral() != null) { return visitNullLiteral(ctx); } // Unknown literal type. throw new RuntimeException(); } }
import java.util.ArrayList; import java.util.List; /** * Firm Class * * Work on: John */ public class Firm { private List<Meeting> meetings; private SoftwareProjectManager suPremeManager; //private List<Developer> developers; private List<TeamLead> leads; private List<Thread> threadDevs; private List<Thread> threadLeads; private long startTime; private long firstStart; private long lastStart; private List<Thread> devsInBigMeeting; /** * @param suPremeManager * SoftwareProjectManager that's head of the office */ public Firm(SoftwareProjectManager suPremeManager) { meetings = new ArrayList<Meeting>(); devsInBigMeeting = new ArrayList<Thread>(); threadDevs = new ArrayList<Thread>(); threadLeads = new ArrayList<Thread>(); firstStart = -15; lastStart = -1; this.suPremeManager = suPremeManager; threadDevs.add(suPremeManager); this.leads = suPremeManager.getTeamLeaders(); //developers = new ArrayList<Developer>(); ArrayList<Thread> staff1 = new ArrayList<Thread>(); ArrayList<Thread> staff2 = new ArrayList<Thread>(); ArrayList<Thread> staff3 = new ArrayList<Thread>(); int count = 1; for (TeamLead lead : this.leads) { if ( count == 1 ) { staff1.add(lead); staff1.addAll(lead.getDevelopers()); } if ( count == 2 ) { staff2.add(lead); staff2.addAll(lead.getDevelopers()); } if ( count == 3 ) { staff3.add(lead); staff3.addAll(lead.getDevelopers()); } count ++; //developers.addAll(lead.getDevelopers()); //developers.add((Developer) lead); threadDevs.add(lead); threadDevs.addAll(lead.getDevelopers()); threadLeads.add(lead); } // Team1 Meeting meeting1 = new Meeting(0, 300, 150, staff1); meetings.add(meeting1); // Team2 Meeting meeting2 = new Meeting(1200, 0, 600, staff2); meetings.add(meeting2); // Team3 Meeting meeting3 = new Meeting(3600, 0, 600, staff3); meetings.add(meeting3); // End of day meeting, 4PM // ArrayList<Thread> staff4 = new ArrayList<Thread>(threadDevs); // staff4.add(suPremeManager); // Meeting meeting4 = new Meeting(0, 300, 150, staff4); // meetings.add(meeting4); } private void addAllFirms() { for ( Thread dev : this.threadDevs ) { try { ((Developer)dev).setFirm(this); }catch(Exception e){} try { ((TeamLead)dev).setFirm(this); }catch(Exception e){} try { ((SoftwareProjectManager)dev).setFirm(this); }catch(Exception e){} } } /** * Starts the simulation */ public void startDay() { addAllFirms(); startTime = System.currentTimeMillis(); addAllFirms(); for ( Thread thread : threadDevs ) { thread.start(); } } /** * Attempts to join a meeting in progress. Returns false if there is no current meeting the * thread is in. True otherwise. * @return False if there is no current meeting the thread is in. True otherwise. */ public void attemptJoin() { if ( getTime() < FirmTime.HOUR.ms() * 8 - 10) { synchronized(this){ while ( getTime() - firstStart < FirmTime.MINUTE.ms() * 15) { try { Thread.currentThread().wait(FirmTime.MINUTE.ms() * 15 - (getTime() - firstStart)); } catch (InterruptedException e) { e.printStackTrace(); } } firstStart = getTime(); } } else { synchronized(this) { this.devsInBigMeeting.add(Thread.currentThread()); if ( devsInBigMeeting.size() == threadDevs.size() ) { lastStart = getTime(); try { wait(FirmTime.MINUTE.ms() * 15); } catch (InterruptedException e) { e.printStackTrace(); } notifyAll(); } } } } /** * Returns time since start in milliseconds. * @return Time since start in milliseconds. */ public synchronized long getTime() { long currentTime = System.currentTimeMillis() - startTime; return currentTime; } public synchronized boolean isLastMeetingDone() { if ( lastStart == -1 || getTime() - lastStart < FirmTime.MINUTE.ms() * 15 ) { return false; } return true; } // /** // * Returns time until the soonest the next meeting starts for the current thread in milliseconds. // * @return Time until the soonest the next meeting starts for the current thread in milliseconds. // */ // public synchronized long getTimeUntilNextMeetingSuggestedStart() { // long timeUntil = 5400; // for (Meeting meeting : meetings) { // if (timeUntil > meeting.timeUntilScheduledStart()) { // timeUntil = meeting.timeUntilScheduledStart(); // return timeUntil; // /** // * Returns time until the latest the next meeting starts for the current thread in milliseconds. // * @return Time until the latest the next meeting starts for the current thread in milliseconds. // */ // public synchronized long getTimeUntilNextMeetingMustStart() { // long timeUntil = 5400; // for (Meeting meeting : meetings) { // if (timeUntil > meeting.timeUntilMustStart()) { // timeUntil = meeting.timeUntilMustStart(); // return timeUntil; // /** // * Returns true if the current thread currently is (or should be) in a meeting. // * @return True if the current thread currently is (or should be) in a meeting. // */ // public synchronized boolean inMeeting() { // if (currentMeeting() == null) { // return false; // return true; // /** // * Returns time left in the current meeting that the thread is in. 0 if there is no meeting. // * Returns the meeting's duration, if the meeting has not actually started yet. // * @return Time left in the current meeting that the thread is in, 0 if there is no meeting, // * and the meeting's duration, if the meeting has not actually started yet. // */ // public synchronized long timeLeft() { // Meeting meeting = currentMeeting(); // if (meeting == null) { // return 0; // return meeting.estimatedTimeUntilEnd(); // /** // * Returns the meeting that the current thread should be in. Null if no meeting fulfills that criterion. // * @return The meeting that the current thread should be in. Null if no meeting fulfills that criterion. // */ // private synchronized Meeting currentMeeting() { // Thread dev = Thread.currentThread(); // for (Meeting meeting : meetings) { // if (meeting.currentlyRunning() // && meeting.devsRequired.contains(dev)) { // return meeting; // return null; // /** // * Representation of a meeting. // */ private class Meeting { /** When the meeting is scheduled to start in milliseconds from start */ public long scheduledStart; /** * How late people are allowed to be, e.g., meeting is at 4:00, but people can arrive * as late as 4:15, so the leeway would be 150 (15 minutes in milliseconds) */ public long leeway; /** When everyone arrived and the meeting started. -1 if that has not happened yet. */ public long actualStart; /** How long the meeting is in milliseconds */ public long duration; /** List of developers, team leads, and SPMs that need to be there. */ public List<Thread> devsRequired; /** List of developers, team leads, and SPMs that have arrived. */ public List<Thread> devsPresent; /** * Constructor for Meeting * @param time * Time the meeting starts * @param leeway * How late people can be * @param duration * How long the meeting is * @param devsRequired * List of developers, team leads, and SPMs required for the meeting to start */ public Meeting(long time, long leeway, long duration, ArrayList<Thread> devsRequired) { this.scheduledStart = time; this.leeway = leeway; this.duration = duration; this.devsRequired = devsRequired; this.devsPresent = new ArrayList<Thread>(); this.actualStart = -1; } // /** // * Returns time until the meeting should start. // * @return Time until the meeting should start. // */ // public synchronized long timeUntilScheduledStart() { // long timeUntil = scheduledStart - getTime(); // return timeUntil; // /** // * Returns time until the meeting MUST start. // * @return Time until the meeting MUST start. // */ // public synchronized long timeUntilMustStart() { // long timeUntil = scheduledStart + leeway - getTime(); // return timeUntil; // /** // * Current thread attempts to join this meeting. // * @return True if current thread is a member of this meeting and meeting is currently running. // */ // public synchronized boolean attemptJoin() { // Thread dev = Thread.currentThread(); // if (devsRequired.contains(dev)) { // if (!devsPresent.contains(dev)) { // devsPresent.add(dev); // if (devsPresent.size() == devsRequired.size()) { // this.actualStart = getTime(); // notifyAll(); // return true; // return false; // /** // * Returns true if the meeting is currently running. // * @return True if the meeting is currently running. // */ // public synchronized boolean currentlyRunning() { // if (getTime() > scheduledStart) { // scheduled start is before the // // current time // if (actualStart == -1) { // meeting should be starting, but not // // everyone's here // return true; // if (actualStart + duration < getTime()) { // meeting started and // // has not yet // // reached duration // return true; // return false; // /** // * How long until the meeting ends. Returns duration, if it has not started yet. // * @return How long until the meeting ends. Returns duration, if it has not started yet. // */ // public synchronized long estimatedTimeUntilEnd() { // if (actualStart != -1) { // long ete = (actualStart + duration) - getTime(); // return ete; // } else { // return duration; } public void doneWithRoom() { // TODO Auto-generated method stub } }
package com.google.tagpost; import com.google.tagpost.spanner.DataService; import io.grpc.Status; import io.grpc.stub.StreamObserver; import io.grpc.StatusRuntimeException; import java.util.List; import com.google.inject.Inject; import com.google.common.flogger.FluentLogger; /** Encapsulate all RPC methods of {@link TagpostServer} */ public final class TagpostService extends TagpostServiceGrpc.TagpostServiceImplBase { private static final FluentLogger logger = FluentLogger.forEnclosingClass(); private final DataService dataService; @Inject public TagpostService(DataService dataService) { this.dataService = dataService; } @Override public void fetchMessage( FetchMessageRequest req, StreamObserver<FetchMessageResponse> responseObserver) { FetchMessageResponse response = fetchMessageImpl(req); responseObserver.onNext(response); responseObserver.onCompleted(); } @Override public void fetchThreadsByTag( FetchThreadsByTagRequest req, StreamObserver<FetchThreadsByTagResponse> responseObserver) { try { FetchThreadsByTagResponse response = fetchThreadsByTagImpl(req); responseObserver.onNext(response); responseObserver.onCompleted(); } catch (Exception e) { Status status = Status.INTERNAL.withDescription(e.getMessage()); logger.atWarning().withCause(e).log("Fetch Threads By Tag Failed"); responseObserver.onError(status.asRuntimeException()); } } @Override public void fetchCommentsUnderThread( FetchCommentsUnderThreadRequest req, StreamObserver<FetchCommentsUnderThreadResponse> responseObserver) { try { FetchCommentsUnderThreadResponse response = fetchCommentsUnderThreadImpl(req); responseObserver.onNext(response); responseObserver.onCompleted(); } catch (Exception e) { Status status = Status.INTERNAL.withDescription(e.getMessage()); logger.atWarning().withCause(e).log("Fetch Comments UnderThread Failed"); responseObserver.onError(status.asRuntimeException()); } } private FetchThreadsByTagResponse fetchThreadsByTagImpl(FetchThreadsByTagRequest req) { String tag = req.getTag(); logger.atInfo().log("Fetching all Threads with primaryTag = " + tag); List<Thread> threadList = dataService.getAllThreadsByTag(tag); FetchThreadsByTagResponse response = FetchThreadsByTagResponse.newBuilder().addAllThreads(threadList).build(); return response; } private FetchCommentsUnderThreadResponse fetchCommentsUnderThreadImpl( FetchCommentsUnderThreadRequest req) { long threadId = req.getThreadId(); logger.atInfo().log("Fetching all comments under threadID = " + threadId); List<Comment> commentList = dataService.getAllCommentsByThreadId(threadId); FetchCommentsUnderThreadResponse response = FetchCommentsUnderThreadResponse.newBuilder().addAllComment(commentList).build(); return response; } private FetchMessageResponse fetchMessageImpl(FetchMessageRequest req) { return FetchMessageResponse.newBuilder().setMessage("Request received.").build(); } }
import java.awt.Color; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.image.BufferedImage; import javax.swing.JFrame; import javax.swing.JPanel; public class Game extends JPanel { public static final String NAME = "Titanium Gastropod v0.0"; public static final int WINDOW_W = 800, WINDOW_H = 600; private BufferedImage mScreenBuffer; public Game() { mScreenBuffer = new BufferedImage(WINDOW_W, WINDOW_H, BufferedImage.TYPE_INT_ARGB); setFocusable(true); setPreferredSize(new Dimension(WINDOW_W, WINDOW_H)); Thread updateThread = new Thread("Game update thread") { @Override public void run() { while (true) { Graphics2D g = (Graphics2D) mScreenBuffer.getGraphics(); g.setColor(Color.BLACK); g.fillRect(0, 0, WINDOW_W, WINDOW_H); // do game drawing here repaint(); try { Thread.sleep(30); } catch (InterruptedException e) { e.printStackTrace(); } } } }; updateThread.start(); } public void paint(Graphics g) { g.drawImage(mScreenBuffer, 0, 0, this); } public static void main(String args[]) { final JFrame frame = new JFrame(NAME); frame.getContentPane().add(new Game()); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.pack(); frame.setVisible(true); } }
package ie.omk.smpp.util; import java.io.ByteArrayOutputStream; /** * Encoding class representing the HP-Roman8 character set. * * @version $Id$ */ public final class HPRoman8Encoding extends AlphabetEncoding { /** * Data coding value. There isn't an 'official' value for HP-Roman8. Usually * it is the default encoding of the SMSC. */ private static final int DCS = 0; private static final char[] CHAR_TABLE = { '\u0000', '\u0001', '\u0002', '\u0003', '\u0004', '\u0005', '\u0006', '\u0007', '\b', '\t', '\n', '\u000b', '\f', '\r', '\u000e', '\u000f', '\u0010', '\u0011', '\u0012', '\u0013', '\u0014', '\u0015', '\u0016', '\u0017', '\u0018', '\u0019', '\u001a', '\u001b', '\u001c', '\u001d', '\u001e', '\u001f', ' ', '!', '"', ' '(', ')', '*', '+', ',', '-', '.', '/', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ':', ';', '<', '=', '>', '?', '@', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '[', '\\', ']', '^', '_', '`', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '{', '|', '}', '~', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '\u00a0', '\u00c0', '\u00c2', '\u00c8', '\u00ca', '\u00cb', '\u00ce', '\u00cf', '\u00b4', '\u02cb', '\u02c6', '\u00a8', '\u02dc', '\u00d9', '\u00db', '\u20a4', '\u00af', '\u00dd', '\u00fd', '\u00b0', '\u00c7', '\u00e7', '\u00d1', '\u00f1', '\u00a1', '\u00bf', '\u00a4', '\u00a3', '\u00a5', '\u00a7', '\u0192', '\u00a2', '\u00e2', '\u00ea', '\u00f4', '\u00fb', '\u00e1', '\u00e9', '\u00f3', '\u00fa', '\u00e0', '\u00e8', '\u00f2', '\u00f9', '\u00e4', '\u00eb', '\u00f6', '\u00fc', '\u00c5', '\u00ee', '\u00d8', '\u00c6', '\u00e5', '\u00ed', '\u00f8', '\u00e6', '\u00c4', '\u00ec', '\u00d6', '\u00dc', '\u00c9', '\u00ef', '\u00df', '\u00d4', '\u00c1', '\u00c3', '\u00e3', '\u00d0', '\u00f0', '\u00cd', '\u00cc', '\u00d3', '\u00d2', '\u00d5', '\u00f5', '\u0160', '\u0161', '\u00da', '\u0178', '\u00ff', '\u00de', '\u00fe', '\u00b7', '\u00b5', '\u00b6', '\u00be', '\u2014', '\u00bc', '\u00bd', '\u00aa', '\u00ba', '\u00ab', '\u25a0', '\u00bb', '\u00b1', }; private int unknownCharReplacement = 0x3f; public HPRoman8Encoding() { super(DCS); } public int getUnknownCharReplacement() { return unknownCharReplacement; } public void setUnknownCharReplacement(int unknownCharReplacement) { if (unknownCharReplacement < 0 || unknownCharReplacement > 255) { throw new IllegalArgumentException( "Invalid replacement character."); } this.unknownCharReplacement = unknownCharReplacement; } public String decodeString(byte[] b) { if (b == null) { return ""; } StringBuffer buf = new StringBuffer(); for (int i = 0; i < b.length; i++) { int code = (int) b[i] & 0x000000ff; buf.append((code >= CHAR_TABLE.length) ? '?' : CHAR_TABLE[code]); } return buf.toString(); } public byte[] encodeString(String s) { if (s == null) { return new byte[0]; } char[] c = s.toCharArray(); ByteArrayOutputStream enc = new ByteArrayOutputStream(256); for (int loop = 0; loop < c.length; loop++) { int search = 0; for (; search < CHAR_TABLE.length; search++) { if (c[loop] == CHAR_TABLE[search]) { enc.write((byte) search); break; } } if (search == CHAR_TABLE.length) { enc.write(unknownCharReplacement); } } return enc.toByteArray(); } }
/** * A short program to run a game of bowling * * @author Mike Zrimsek * */ public class Game { private int[] rollValues = new int[21]; private int rollCounter = 0; /** * Execute one roll * * @param numPins * Numbers of pins hit */ public void roll(int numPins) { rollValues[rollCounter++] = numPins; } /** * Calculate total score of game based on values for each roll * * @return Total score for game */ public int score() { int score = 0; // tracks score int rollIndex = 0; // track which roll currently on for (int frame = 0; frame < 10; frame++) { // check if rolled a strike // 10 + value of next 2 balls rolled if (rollValues[rollIndex] == 10) { score += 10 + rollValues[rollIndex + 1] + rollValues[rollIndex + 2]; rollIndex++; } // check if rolled a spare // 10 + value of next ball rolled else if (rollValues[rollIndex] + rollValues[rollIndex + 1] == 10) { score += 10 + rollValues[rollIndex + 2]; rollIndex += 2; } else { score += rollValues[rollIndex] + rollValues[rollIndex + 1]; rollIndex += 2; } } return score; } }
package net.sf.picard.metrics; import net.sf.picard.PicardException; import net.sf.picard.util.FormatUtil; import net.sf.picard.util.Histogram; import net.sf.samtools.util.StringUtil; import java.io.*; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.TreeSet; /** * Contains a set of metrics that can be written to a file and parsed back * again. The set of metrics is composed of zero or more instances of a class, * BEAN, that extends {@link MetricBase} (all instances must be of the same type) * and may optionally include one or more histograms that share the same key set. * * @author Tim Fennell */ public class MetricsFile<BEAN extends MetricBase, HKEY extends Comparable> { public static final String MAJOR_HEADER_PREFIX = " public static final String MINOR_HEADER_PREFIX = " public static final String SEPARATOR = "\t"; public static final String HISTO_HEADER = "## HISTOGRAM\t"; public static final String METRIC_HEADER = "## METRICS CLASS\t"; private final List<Header> headers = new ArrayList<Header>(); private final List<BEAN> metrics = new ArrayList<BEAN>(); private final List<Histogram<HKEY>> histograms = new ArrayList<Histogram<HKEY>>(); /** Adds a header to the collection of metrics. */ public void addHeader(Header h) { this.headers.add(h); } /** Returns the list of headers. */ public List<Header> getHeaders() { return Collections.unmodifiableList(this.headers); } /** Adds a bean to the collection of metrics. */ public void addMetric(BEAN bean) { this.metrics.add(bean); } /** Returns the list of headers. */ public List<BEAN> getMetrics() { return Collections.unmodifiableList(this.metrics); } /** Returns the histogram contained in the metrics file if any. */ public Histogram<HKEY> getHistogram() { if (histograms.size() > 0) return this.histograms.get(0); else return null; } /** Sets the histogram contained in the metrics file. */ public void setHistogram(Histogram<HKEY> histogram) { if (this.histograms.isEmpty()) { if (histogram != null) this.histograms.add(histogram); } else { this.histograms.set(0, histogram); } } /** Adds a histogram to the list of histograms in the metrics file. */ public void addHistogram(Histogram<HKEY> histogram) { this.histograms.add(histogram); } /** Returns the number of histograms added to the metrics file. */ public int getNumHistograms() { return this.histograms.size(); } /** Returns the list of headers with the specified type. */ public List<Header> getHeaders(Class<? extends Header> type) { List<Header> tmp = new ArrayList<Header>(); for (Header h : this.headers) { if (h.getClass().equals(type)) { tmp.add(h); } } return tmp; } /** * Writes out the metrics file to the supplied file. The file is written out * headers first, metrics second and histogram third. * * @param f a File into which to write the metrics */ public void write(File f) { FileWriter w = null; try { w = new FileWriter(f); write(w); } catch (IOException ioe) { throw new PicardException("Could not write metrics to file: " + f.getAbsolutePath(), ioe); } finally { if (w != null) { try { w.close(); } catch (IOException e) { } } } } /** * Writes out the metrics file to the supplied writer. The file is written out * headers first, metrics second and histogram third. * * @param w a Writer into which to write the metrics */ public void write(Writer w) { try { FormatUtil formatter = new FormatUtil(); BufferedWriter out = new BufferedWriter(w); printHeaders(out); out.newLine(); printBeanMetrics(out, formatter); out.newLine(); printHistogram(out, formatter); out.newLine(); out.flush(); } catch (IOException ioe) { throw new PicardException("Could not write metrics file.", ioe); } } /** Prints the headers into the provided PrintWriter. */ private void printHeaders(BufferedWriter out) throws IOException { for (Header h : this.headers) { out.append(MAJOR_HEADER_PREFIX); out.append(h.getClass().getName()); out.newLine(); out.append(MINOR_HEADER_PREFIX); out.append(h.toString()); out.newLine(); } } /** Prints each of the metrics entries into the provided PrintWriter. */ private void printBeanMetrics(BufferedWriter out, FormatUtil formatter) throws IOException { if (this.metrics.isEmpty()) { return; } // Write out a header row with the type of the metric class out.append(METRIC_HEADER + getBeanType().getName()); out.newLine(); // Write out the column headers Field[] fields = getBeanType().getFields(); final int fieldCount = fields.length; for (int i=0; i<fieldCount; ++i) { out.append(fields[i].getName()); if (i < fieldCount - 1) { out.append(MetricsFile.SEPARATOR); } else { out.newLine(); } } // Write out each of the data rows for (BEAN bean : this.metrics) { for (int i=0; i<fieldCount; ++i) { try { Object value = fields[i].get(bean); out.append(StringUtil.assertCharactersNotInString(formatter.format(value), '\t', '\n')); if (i < fieldCount - 1) { out.append(MetricsFile.SEPARATOR); } else { out.newLine(); } } catch (IllegalAccessException iae) { throw new PicardException("Could not read property " + fields[i].getName() + " from class of type " + bean.getClass()); } } } out.flush(); } /** Prints the histogram if one is present. */ private void printHistogram(BufferedWriter out, FormatUtil formatter) throws IOException { if (this.histograms.isEmpty()) { return; } // Build a combined key set java.util.Set<HKEY> keys = new TreeSet<HKEY>(); for (Histogram<HKEY> histo : histograms) { if (histo != null) keys.addAll(histo.keySet()); } // Add a header for the histogram key type out.append(HISTO_HEADER + this.histograms.get(0).keySet().iterator().next().getClass().getName()); out.newLine(); // Output a header row out.append(StringUtil.assertCharactersNotInString(this.histograms.get(0).getBinLabel(), '\t', '\n')); for (Histogram<HKEY> histo : this.histograms) { out.append(SEPARATOR); out.append(StringUtil.assertCharactersNotInString(histo.getValueLabel(), '\t', '\n')); } out.newLine(); for (HKEY key : keys) { out.append(key.toString()); for (Histogram<HKEY> histo : this.histograms) { Histogram<HKEY>.Bin bin = histo.get(key); final double value = (bin == null ? 0 : bin.getValue()); out.append(SEPARATOR); out.append(formatter.format(value)); } out.newLine(); } } /** Gets the type of the metrics bean being used. */ private Class<?> getBeanType() { if (this.metrics == null || this.metrics.isEmpty()) { return null; } else { return this.metrics.get(0).getClass(); } } /** Reads the Metrics in from the given reader. */ public void read(Reader r) { BufferedReader in = new BufferedReader(r); FormatUtil formatter = new FormatUtil(); String line = null; try { // First read the headers Header header = null; boolean inHeader = true; while ((line = in.readLine()) != null && inHeader) { line = line.trim(); // A blank line signals the end of the headers, otherwise parse out // the header types and values and build the headers. if ("".equals(line)) { inHeader = false; } else if (line.startsWith(MAJOR_HEADER_PREFIX)) { if (header != null) { throw new IllegalStateException("Consecutive header class lines encountered."); } String className = line.substring(MAJOR_HEADER_PREFIX.length()).trim(); try { header = (Header) loadClass(className).newInstance(); } catch (Exception e) { throw new PicardException("Error load and/or instantiating an instance of " + className, e); } } else if (line.startsWith(MINOR_HEADER_PREFIX)) { if (header == null) { throw new IllegalStateException("Header class must precede header value:" + line); } header.parse(line.substring(MINOR_HEADER_PREFIX.length())); this.headers.add(header); header = null; } else { throw new PicardException("Illegal state. Found following string in metrics file header: " + line); } } if (line == null) { throw new PicardException("No lines in metrics file after header."); } // Then read the metrics if there are any while (!line.startsWith(MAJOR_HEADER_PREFIX)) { line = in.readLine().trim(); } if (line.startsWith(METRIC_HEADER)) { // Get the metric class from the header String className = line.split(SEPARATOR)[1]; Class<?> type = null; try { type = loadClass(className); } catch (ClassNotFoundException cnfe) { throw new PicardException("Could not locate class with name " + className, cnfe); } // Read the next line with the column headers String[] fieldNames = in.readLine().split(SEPARATOR); Field[] fields = new Field[fieldNames.length]; for (int i=0; i<fieldNames.length; ++i) { try { fields[i] = type.getField(fieldNames[i]); } catch (Exception e) { throw new PicardException("Could not get field with name " + fieldNames[i] + " from class " + type.getName()); } } // Now read the values while ((line = in.readLine()) != null) { if ("".equals(line.trim())) { break; } else { String[] values = line.split(SEPARATOR, -1); BEAN bean = null; try { bean = (BEAN) type.newInstance(); } catch (Exception e) { throw new PicardException("Error instantiating a " + type.getName(), e); } for (int i=0; i<fields.length; ++i) { Object value = null; if (values[i] != null && values[i].length() > 0) { value = formatter.parseObject(values[i], fields[i].getType()); } try { fields[i].set(bean, value); } catch (Exception e) { throw new PicardException("Error setting field " + fields[i].getName() + " on class of type " + type.getName(), e); } } this.metrics.add(bean); } } } // Then read the histograms if any are present while (line != null && !line.startsWith(MAJOR_HEADER_PREFIX)) { line = in.readLine(); } if (line != null && line.startsWith(HISTO_HEADER)) { // Get the key type of the histogram String keyClassName = line.split(SEPARATOR)[1].trim(); Class<?> keyClass = null; try { keyClass = loadClass(keyClassName); } catch (ClassNotFoundException cnfe) { throw new PicardException("Could not load class with name " + keyClassName); } // Read the next line with the bin and value labels String[] labels = in.readLine().split(SEPARATOR); for (int i=1; i<labels.length; ++i) { this.histograms.add(new Histogram<HKEY>(labels[0], labels[i])); } // Read the entries in the histograms while ((line = in.readLine()) != null && !"".equals(line)) { String[] fields = line.trim().split(SEPARATOR); HKEY key = (HKEY) formatter.parseObject(fields[0], keyClass); for (int i=1; i<fields.length; ++i) { double value = formatter.parseDouble(fields[i]); this.histograms.get(i-1).increment(key, value); } } } } catch (IOException ioe) { throw new PicardException("Could not read metrics from reader.", ioe); } } /** Attempts to load a class, taking into account that some classes have "migrated" from the broad to sf. */ private Class<?> loadClass(String className) throws ClassNotFoundException { try { return Class.forName(className); } catch (ClassNotFoundException cnfe) { if (className.startsWith("edu.mit.broad.picard")) { return loadClass(className.replace("edu.mit.broad.picard", "net.sf.picard")); } else { throw cnfe; } } } /** Checks that the headers, metrics and histogram are all equal. */ @Override public boolean equals(Object o) { if (o == null) { return false; } if (getClass() != o.getClass()) { return false; } MetricsFile that = (MetricsFile) o; if (!this.headers.equals(that.headers)) { return false; } if (!this.metrics.equals(that.metrics)) { return false; } if (!this.histograms.equals(that.histograms)) { return false; } return true; } @Override public int hashCode() { int result = headers.hashCode(); result = 31 * result + metrics.hashCode(); return result; } }
package net.sf.picard.sam; import java.io.File; import java.io.FileNotFoundException; import java.io.PrintWriter; import java.util.List; import java.util.ArrayList; import net.sf.picard.cmdline.Usage; import net.sf.samtools.SAMFileReader; import net.sf.samtools.SAMValidationError; import net.sf.picard.PicardException; import net.sf.picard.cmdline.CommandLineProgram; import net.sf.picard.cmdline.Option; import net.sf.picard.cmdline.StandardOptionDefinitions; import net.sf.picard.io.IoUtil; import net.sf.picard.reference.ReferenceSequenceFile; import net.sf.picard.reference.ReferenceSequenceFileFactory; /** * Commandline program wrapping SamFileValidator. * * @author Doug Voet */ public class ValidateSamFile extends CommandLineProgram { @Usage public final String usage = getStandardUsagePreamble() + "Read a SAM or BAM file and report on its validity."; public enum Mode { VERBOSE, SUMMARY } @Option(shortName=StandardOptionDefinitions.INPUT_SHORT_NAME, doc="Input SAM/BAM file") public File INPUT; @Option(shortName=StandardOptionDefinitions.OUTPUT_SHORT_NAME, doc="Output file or standard out if missing", optional=true) public File OUTPUT; @Option(shortName="M", doc="Mode of output") public Mode MODE = Mode.VERBOSE; @Option(doc="List of validation error types to ignore.") public List<SAMValidationError.Type> IGNORE = new ArrayList<SAMValidationError.Type>(); @Option(shortName="MO", doc="The maximum number of lines output in verbose mode") public Integer MAX_OUTPUT = 100; @Option(shortName=StandardOptionDefinitions.REFERENCE_SHORT_NAME, doc="Reference sequence file, the NM tag check will be skipped if this is missing", optional=true) public File REFERENCE_SEQUENCE; @Option(doc="If true, only report errors, and ignore warnings.") public boolean IGNORE_WARNINGS = false; public static void main(final String[] args) { System.exit(new ValidateSamFile().instanceMain(args)); } @Override protected int doWork() { IoUtil.assertFileIsReadable(INPUT); ReferenceSequenceFile reference = null; if (REFERENCE_SEQUENCE != null) { IoUtil.assertFileIsReadable(REFERENCE_SEQUENCE); reference = ReferenceSequenceFileFactory.getReferenceSequenceFile(REFERENCE_SEQUENCE); } final PrintWriter out; if (OUTPUT != null) { IoUtil.assertFileIsWritable(OUTPUT); try { out = new PrintWriter(OUTPUT); } catch (FileNotFoundException e) { // we already asserted this so we should not get here throw new PicardException("Unexpected exception", e); } } else { out = new PrintWriter(System.out); } SAMFileReader.setDefaultValidationStringency(SAMFileReader.ValidationStringency.SILENT); final SAMFileReader samReader = new SAMFileReader(INPUT); final SamFileValidator validator = new SamFileValidator(out); validator.setErrorsToIgnore(IGNORE); if (IGNORE_WARNINGS) { validator.setIgnoreWarnings(IGNORE_WARNINGS); } if (MODE == ValidateSamFile.Mode.SUMMARY) { validator.setVerbose(false, 0); } else { validator.setVerbose(true, MAX_OUTPUT); } validator.validateBamFileTermination(INPUT); switch (MODE) { case SUMMARY: validator.validateSamFileSummary(samReader, reference); break; case VERBOSE: validator.validateSamFileVerbose(samReader, reference); break; } return 0; } }
package edu.umd.cs.findbugs; import java.util.*; import org.apache.bcel.*; import org.apache.bcel.classfile.*; import org.apache.bcel.generic.*; import edu.umd.cs.daveho.ba.*; /** * The site of a method call. */ public class CallSite { private final Method method; private final Location location; /** * Constructor. * @param method the method containing the call site * @param basicBlock the basic block where the call site is located * @param handle the instruction which performs the call */ public CallSite(Method method, BasicBlock basicBlock, InstructionHandle handle) { this.method = method; this.location = new Location(handle, basicBlock); } /** Get the method containing the call site. */ public Method getMethod() { return method; } /** Get the Location (basic block and instruction) where the call site is located. */ public Location getLocation() { return location; } /** Get the basic block where the call site is located. */ public BasicBlock getBasicBlock() { return location.getBasicBlock(); } /** Get the instruction which performs the call. */ public InstructionHandle getHandle() { return location.getHandle(); } public int hashCode() { return System.identityHashCode(method) ^ getBasicBlock().getId() ^ System.identityHashCode(location.getHandle()); } public boolean equals(Object o) { if (!(o instanceof CallSite)) return false; CallSite other = (CallSite) o; return method == other.method && getBasicBlock() == other.getBasicBlock() && getHandle() == other.getHandle(); } } // vim:ts=4
package edu.umd.cs.findbugs; import java.io.DataInputStream; import java.io.File; import java.io.FileFilter; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Constructor; import java.net.URL; import java.net.URLConnection; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import org.apache.bcel.Repository; import org.apache.bcel.classfile.ClassFormatException; import org.apache.bcel.classfile.ClassParser; import org.apache.bcel.classfile.JavaClass; import org.apache.bcel.util.ClassPath; import edu.umd.cs.findbugs.annotations.SuppressWarnings; import edu.umd.cs.findbugs.ba.AbstractClassMember; import edu.umd.cs.findbugs.ba.AnalysisContext; import edu.umd.cs.findbugs.ba.AnalysisException; import edu.umd.cs.findbugs.ba.AnalysisFeatures; import edu.umd.cs.findbugs.ba.ClassContext; import edu.umd.cs.findbugs.ba.URLClassPath; import edu.umd.cs.findbugs.ba.URLClassPathRepository; import edu.umd.cs.findbugs.classfile.ClassDescriptor; import edu.umd.cs.findbugs.classfile.IClassObserver; import edu.umd.cs.findbugs.config.AnalysisFeatureSetting; import edu.umd.cs.findbugs.config.CommandLine; import edu.umd.cs.findbugs.config.UserPreferences; import edu.umd.cs.findbugs.config.CommandLine.HelpRequestedException; import edu.umd.cs.findbugs.filter.Filter; import edu.umd.cs.findbugs.filter.FilterException; import edu.umd.cs.findbugs.plan.AnalysisPass; import edu.umd.cs.findbugs.plan.ExecutionPlan; import edu.umd.cs.findbugs.plan.OrderingConstraintException; import edu.umd.cs.findbugs.util.Archive; import edu.umd.cs.findbugs.util.ClassName; import edu.umd.cs.findbugs.visitclass.Constants2; /** * An instance of this class is used to apply the selected set of * analyses on some collection of Java classes. It also implements the * command line interface. * * <p>It is no longer possible to create instances of this class. * Use {@link FindBugs2} instead. Some useful static methods remain * in this class, but may eventually be moved elsewhere. * * @author Bill Pugh * @author David Hovemeyer * */ public class FindBugs implements Constants2, ExitCodes, IFindBugsEngine { /** * Delegating InputStream wrapper that never closes the * underlying input stream. */ private static class NoCloseInputStream extends DataInputStream { /** * Constructor. * @param in the real InputStream */ public NoCloseInputStream(InputStream in) { super(in); } @Override public void close() { } } /** * Work list item specifying a file/directory/URL containing * class files to analyze. */ private static class ArchiveWorkListItem { private String fileName; private boolean explicit; /** * Constructor. * * @param fileName file/directory/URL * @param explicit true if this source of classes appeared explicitly * in the project file, false if was found indirectly * (e.g., a nested jar file in a .war file) */ public ArchiveWorkListItem(String fileName, boolean explicit) { this.fileName = fileName; this.explicit = explicit; } /** * Get the file/directory/URL. */ public String getFileName() { return fileName; } /** * Return whether this class source appeared explicitly in * the project file. */ public boolean isExplicit() { return explicit; } } /** * Interface for an object representing a source of class files to analyze. */ private interface ClassProducer { /** * Get the next class to analyze. * * @return the class, or null of there are no more classes for this ClassProducer * @throws IOException if an IOException occurs * @throws InterruptedException if the thread is interrupted */ public JavaClass getNextClass() throws IOException, InterruptedException; /** * Did this class producer scan any Java source files? */ public boolean containsSourceFiles(); /** * Return the latest creation/modification time of any of the class files scanned. * @return the last modification time */ public long getLastModificationTime(); /** * Close any internal files or streams. */ public void close(); } /** * ClassProducer for single class files. */ private class SingleClassProducer implements ClassProducer { private URL url; long time = 0; /** * Constructor. * * @param url the single class file to be analyzed */ public SingleClassProducer(URL url) { this.url = url; } public JavaClass getNextClass() throws IOException, InterruptedException { if (url == null) return null; if (Thread.interrupted()) throw new InterruptedException(); URL urlToParse = url; url = null; // don't return it next time // ClassScreener may veto this class. if (!classScreener.matches(urlToParse.toString())) return null; try { URLConnection u = urlToParse.openConnection(); time = u.getLastModified(); return parseFromStream(u.getInputStream(), urlToParse.toString()); } catch (ClassFormatException e) { throw new ClassFormatException("Invalid class file format for " + urlToParse.toString() + ": " + e.getMessage()); } } public boolean containsSourceFiles() { return false; } public void close() { // Nothing to do here } /* (non-Javadoc) * @see edu.umd.cs.findbugs.FindBugs.ClassProducer#getLatestTimeOfClass() */ public long getLastModificationTime() { return time; } } /** * ClassProducer for zip/jar archives. */ private class ZipClassProducer implements ClassProducer { private URL url; private LinkedList<ArchiveWorkListItem> archiveWorkList; private List<String> additionalAuxClasspathEntryList; private ZipInputStream zipInputStream; private boolean containsSourceFiles; private long time = 0; private long zipTime = 0; public ZipClassProducer(URL url, LinkedList<ArchiveWorkListItem> archiveWorkList, List<String> additionalAuxClasspathEntryList) throws IOException { this.url = url; this.archiveWorkList = archiveWorkList; this.additionalAuxClasspathEntryList = additionalAuxClasspathEntryList; if (DEBUG) System.out.println("Opening jar/zip input stream for " + url.toString()); URLConnection u = url.openConnection(); this.zipTime = u.getLastModified(); this.zipInputStream = new ZipInputStream(u.getInputStream()); this.containsSourceFiles = false; } public JavaClass getNextClass() throws IOException, InterruptedException { for (;;) { if (Thread.interrupted()) throw new InterruptedException(); ZipEntry zipEntry = zipInputStream.getNextEntry(); if (zipEntry == null) return null; try { String entryName = zipEntry.getName(); // ClassScreener may veto this class. if (!classScreener.matches(entryName)) { // Add archive URL to aux classpath if (!additionalAuxClasspathEntryList.contains(url.toString())) { //System.out.println("Adding additional aux classpath entry: " + url.toString()); additionalAuxClasspathEntryList.add(url.toString()); } continue; } String fileExtension = URLClassPath.getFileExtension(entryName); if (fileExtension != null) { if (fileExtension.equals(".class")) { long modTime = zipEntry.getTime(); if (modTime > time) time = modTime; return parseClass(url.toString(), new NoCloseInputStream(zipInputStream), entryName); } else if (Archive.ARCHIVE_EXTENSION_SET.contains(fileExtension)) { // Add nested archive to archive work list if (url.toString().indexOf("!/") < 0) { ArchiveWorkListItem nestedItem = new ArchiveWorkListItem("jar:" + url.toString() + "!/" + entryName, false); archiveWorkList.addFirst(nestedItem); } } else if (fileExtension.equals(".java")) { containsSourceFiles = true; } } } finally { zipInputStream.closeEntry(); } } } public boolean containsSourceFiles() { return containsSourceFiles; } public void close() { if (zipInputStream != null) { try { zipInputStream.close(); } catch (IOException ignore) { // Ignore } } } static final long millisecondsInAYear = 31556926000L; /* (non-Javadoc) * @see edu.umd.cs.findbugs.FindBugs.ClassProducer#getLastModificationTime() */ public long getLastModificationTime() { if (time + millisecondsInAYear > zipTime) return time; return zipTime; } } /** * ClassProducer for directories. * The directory is scanned recursively for class files. */ private class DirectoryClassProducer implements ClassProducer { private String dirName; private List<String> additionalAuxClasspathEntryList; private Iterator<String> rfsIter; private boolean containsSourceFiles; private long time; public DirectoryClassProducer(String dirName, List<String> additionalAuxClasspathEntryList) throws InterruptedException { this.dirName = dirName; this.additionalAuxClasspathEntryList = additionalAuxClasspathEntryList; FileFilter filter = new FileFilter() { public boolean accept(File file) { String fileName = file.getName(); if (file.isDirectory() || fileName.endsWith(".class")) return true; if (fileName.endsWith(".java")) containsSourceFiles = true; return false; } }; // This will throw InterruptedException if the thread is // interrupted. RecursiveFileSearch rfs = new RecursiveFileSearch(dirName, filter).search(); this.rfsIter = rfs.fileNameIterator(); this.containsSourceFiles = false; } public JavaClass getNextClass() throws IOException, InterruptedException { String fileName; for (;;) { if (!rfsIter.hasNext()) return null; fileName = rfsIter.next(); if (classScreener.matches(fileName)) { break; } else { // Add directory URL to aux classpath String dirURL= "file:" + dirName; if (!additionalAuxClasspathEntryList.contains(dirURL)) { //System.out.println("Adding additional aux classpath entry: " + dirURL); additionalAuxClasspathEntryList.add(dirURL); } } } try { long modTime = new File(fileName).lastModified(); if (time < modTime) time = modTime; return parseClass(new URL("file:" + fileName)); } catch (ClassFormatException e) { throw new ClassFormatException("Invalid class file format for " + fileName + ": " + e.getMessage()); } } public boolean containsSourceFiles() { return containsSourceFiles; } public void close() { // Nothing to do here } /* (non-Javadoc) * @see edu.umd.cs.findbugs.FindBugs.ClassProducer#getLastModificationTime() */ public long getLastModificationTime() { return time; } } public static final AnalysisFeatureSetting[] MIN_EFFORT = new AnalysisFeatureSetting[]{ new AnalysisFeatureSetting(AnalysisFeatures.CONSERVE_SPACE, true), new AnalysisFeatureSetting(AnalysisFeatures.ACCURATE_EXCEPTIONS, false), new AnalysisFeatureSetting(AnalysisFeatures.MODEL_INSTANCEOF, false), new AnalysisFeatureSetting(AnalysisFeatures.SKIP_HUGE_METHODS, true), new AnalysisFeatureSetting(AnalysisFeatures.INTERATIVE_OPCODE_STACK_ANALYSIS, false), new AnalysisFeatureSetting(AnalysisFeatures.TRACK_GUARANTEED_VALUE_DEREFS_IN_NULL_POINTER_ANALYSIS, false), new AnalysisFeatureSetting(AnalysisFeatures.TRACK_VALUE_NUMBERS_IN_NULL_POINTER_ANALYSIS, false), new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS, false), new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS_OF_REFERENCED_CLASSES, false), }; public static final AnalysisFeatureSetting[] LESS_EFFORT = new AnalysisFeatureSetting[]{ new AnalysisFeatureSetting(AnalysisFeatures.CONSERVE_SPACE, false), new AnalysisFeatureSetting(AnalysisFeatures.ACCURATE_EXCEPTIONS, true), new AnalysisFeatureSetting(AnalysisFeatures.MODEL_INSTANCEOF, true), new AnalysisFeatureSetting(AnalysisFeatures.SKIP_HUGE_METHODS, true), new AnalysisFeatureSetting(AnalysisFeatures.INTERATIVE_OPCODE_STACK_ANALYSIS, true), new AnalysisFeatureSetting(AnalysisFeatures.TRACK_GUARANTEED_VALUE_DEREFS_IN_NULL_POINTER_ANALYSIS, false), new AnalysisFeatureSetting(AnalysisFeatures.TRACK_VALUE_NUMBERS_IN_NULL_POINTER_ANALYSIS, false), new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS, false), new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS_OF_REFERENCED_CLASSES, false), }; public static final AnalysisFeatureSetting[] DEFAULT_EFFORT = new AnalysisFeatureSetting[]{ new AnalysisFeatureSetting(AnalysisFeatures.CONSERVE_SPACE, false), new AnalysisFeatureSetting(AnalysisFeatures.ACCURATE_EXCEPTIONS, true), new AnalysisFeatureSetting(AnalysisFeatures.MODEL_INSTANCEOF, true), new AnalysisFeatureSetting(AnalysisFeatures.SKIP_HUGE_METHODS, true), new AnalysisFeatureSetting(AnalysisFeatures.INTERATIVE_OPCODE_STACK_ANALYSIS, true), new AnalysisFeatureSetting(AnalysisFeatures.TRACK_GUARANTEED_VALUE_DEREFS_IN_NULL_POINTER_ANALYSIS, true), new AnalysisFeatureSetting(AnalysisFeatures.TRACK_VALUE_NUMBERS_IN_NULL_POINTER_ANALYSIS, true), new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS, true), new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS_OF_REFERENCED_CLASSES, false), }; public static final AnalysisFeatureSetting[] MORE_EFFORT = new AnalysisFeatureSetting[]{ new AnalysisFeatureSetting(AnalysisFeatures.CONSERVE_SPACE, false), new AnalysisFeatureSetting(AnalysisFeatures.ACCURATE_EXCEPTIONS, true), new AnalysisFeatureSetting(AnalysisFeatures.MODEL_INSTANCEOF, true), new AnalysisFeatureSetting(AnalysisFeatures.SKIP_HUGE_METHODS, true), new AnalysisFeatureSetting(AnalysisFeatures.INTERATIVE_OPCODE_STACK_ANALYSIS, true), new AnalysisFeatureSetting(AnalysisFeatures.TRACK_GUARANTEED_VALUE_DEREFS_IN_NULL_POINTER_ANALYSIS, true), new AnalysisFeatureSetting(AnalysisFeatures.TRACK_VALUE_NUMBERS_IN_NULL_POINTER_ANALYSIS, true), new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS, true), new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS_OF_REFERENCED_CLASSES, false), }; public static final AnalysisFeatureSetting[] MAX_EFFORT = new AnalysisFeatureSetting[]{ new AnalysisFeatureSetting(AnalysisFeatures.CONSERVE_SPACE, false), new AnalysisFeatureSetting(AnalysisFeatures.ACCURATE_EXCEPTIONS, true), new AnalysisFeatureSetting(AnalysisFeatures.MODEL_INSTANCEOF, true), new AnalysisFeatureSetting(AnalysisFeatures.SKIP_HUGE_METHODS, false), new AnalysisFeatureSetting(AnalysisFeatures.INTERATIVE_OPCODE_STACK_ANALYSIS, true), new AnalysisFeatureSetting(AnalysisFeatures.TRACK_GUARANTEED_VALUE_DEREFS_IN_NULL_POINTER_ANALYSIS, true), new AnalysisFeatureSetting(AnalysisFeatures.TRACK_VALUE_NUMBERS_IN_NULL_POINTER_ANALYSIS, true), new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS, true), new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS_OF_REFERENCED_CLASSES, true), }; public static final boolean DEBUG = SystemProperties.getBoolean("findbugs.debug"); public static final boolean TIMEDEBUG = SystemProperties.getBoolean("findbugs.time"); public static final int TIMEQUANTUM = SystemProperties.getInteger("findbugs.time.quantum", 1000); /** * FindBugs home directory. */ private static String home; /** * Known URL protocols. * Filename URLs that do not have an explicit protocol are * assumed to be files. */ static public final Set<String> knownURLProtocolSet = new HashSet<String>(); static { knownURLProtocolSet.add("file"); knownURLProtocolSet.add("http"); knownURLProtocolSet.add("https"); knownURLProtocolSet.add("jar"); } private ErrorCountingBugReporter bugReporter; private boolean relaxedReportingMode; private Project project; private DetectorFactoryCollection detectorFactoryCollection; private UserPreferences userPreferences; private List<IClassObserver> classObserverList; private ExecutionPlan executionPlan; private FindBugsProgress progressCallback; private IClassScreener classScreener; private AnalysisContext analysisContext; private String currentClass; private Map<String,Long> detectorTimings; private boolean useTrainingInput; private boolean emitTrainingOutput; private String trainingInputDir; private String trainingOutputDir; private AnalysisFeatureSetting[] settingList = DEFAULT_EFFORT; private String releaseName; private String projectName; private int passCount; private String sourceInfoFile; /** * Constructor. * The setBugReporter() and setProject() methods must be called * before this object is used. * * @deprecated You use should FindBugs2 instead. */ public @Deprecated FindBugs() { this.relaxedReportingMode = false; this.classObserverList = new LinkedList<IClassObserver>(); // Create a no-op progress callback. this.progressCallback = new NoOpFindBugsProgress(); // Class screener this.classScreener = new ClassScreener(); } /** * Constructor. * * @param bugReporter the BugReporter object that will be used to report * BugInstance objects, analysis errors, class to source mapping, etc. * @param project the Project indicating which files to analyze and * the auxiliary classpath to use; note that the FindBugs * object will create a private copy of the Project object * * @deprecated You use should FindBugs2 instead. */ public @Deprecated FindBugs(BugReporter bugReporter, Project project) { this(); if (bugReporter == null) throw new IllegalArgumentException("null bugReporter"); if (project == null) throw new IllegalArgumentException("null project"); setBugReporter(bugReporter); setProject(project); } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#setDetectorFactoryCollection(edu.umd.cs.findbugs.DetectorFactoryCollection) */ public void setDetectorFactoryCollection(DetectorFactoryCollection detectorFactoryCollection) { this.detectorFactoryCollection = detectorFactoryCollection; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#getBugReporter() */ public BugReporter getBugReporter() { return bugReporter; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#setBugReporter(edu.umd.cs.findbugs.BugReporter) */ public void setBugReporter(BugReporter bugReporter) { this.bugReporter = new ErrorCountingBugReporter(bugReporter); addClassObserver(bugReporter); } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#setProject(edu.umd.cs.findbugs.Project) */ public void setProject(Project project) { this.project = project.duplicate(); } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#getProject() */ public Project getProject() { return project; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#setProgressCallback(edu.umd.cs.findbugs.FindBugsProgress) */ public void setProgressCallback(FindBugsProgress progressCallback) { this.progressCallback = progressCallback; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#addFilter(java.lang.String, boolean) */ public void addFilter(String filterFileName, boolean include) throws IOException, FilterException { configureFilter(bugReporter, filterFileName, include); } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#setUserPreferences(edu.umd.cs.findbugs.config.UserPreferences) */ public void setUserPreferences(UserPreferences userPreferences) { this.userPreferences = userPreferences; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#addClassObserver(edu.umd.cs.findbugs.classfile.IClassObserver) */ public void addClassObserver(IClassObserver classObserver) { classObserverList.add(classObserver); } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#setClassScreener(edu.umd.cs.findbugs.ClassScreener) */ public void setClassScreener(IClassScreener classScreener) { this.classScreener = classScreener; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#setRelaxedReportingMode(boolean) */ public void setRelaxedReportingMode(boolean relaxedReportingMode) { this.relaxedReportingMode = relaxedReportingMode; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#enableTrainingOutput(java.lang.String) */ public void enableTrainingOutput(String trainingOutputDir) { this.emitTrainingOutput = true; this.trainingOutputDir = trainingOutputDir; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#enableTrainingInput(java.lang.String) */ public void enableTrainingInput(String trainingInputDir) { this.useTrainingInput = true; this.trainingInputDir = trainingInputDir; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#setAnalysisFeatureSettings(edu.umd.cs.findbugs.config.AnalysisFeatureSetting[]) */ public void setAnalysisFeatureSettings(AnalysisFeatureSetting[] settingList) { if (settingList != null) this.settingList = settingList; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#getReleaseName() */ public String getReleaseName() { return releaseName; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#setReleaseName(java.lang.String) */ public void setReleaseName(String releaseName) { this.releaseName = releaseName; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#setSourceInfoFile(java.lang.String) */ public void setSourceInfoFile(String sourceInfoFile) { this.sourceInfoFile = sourceInfoFile; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#execute() */ public void execute() throws java.io.IOException, InterruptedException { // Configure the analysis context analysisContext = AnalysisContext.create(bugReporter); // We still need to call analysisContext.initDatabases(), but not until after we have set up the repository. analysisContext.setSourcePath(project.getSourceDirList()); if (sourceInfoFile != null) { analysisContext.getSourceInfoMap().read(new FileInputStream(sourceInfoFile)); } // Enable/disable relaxed reporting mode FindBugsAnalysisFeatures.setRelaxedMode(relaxedReportingMode); // Enable input/output of interprocedural property databases configureTrainingDatabases(this); // Configure analysis features configureAnalysisFeatures(); // Set the release name and timestamp(s) in the BugCollection (if we are generating one). configureBugCollection(this); // Create execution plan try { createExecutionPlan(); } catch (OrderingConstraintException e) { IOException ioe = new IOException("Invalid detector ordering constraints"); ioe.initCause(e); throw ioe; } // Clear the repository of classes analysisContext.clearRepository(); // Get list of files to analyze. LinkedList<ArchiveWorkListItem> archiveWorkList = new LinkedList<ArchiveWorkListItem>(); for (String fileName : project.getFileList()) { archiveWorkList.add(new ArchiveWorkListItem(fileName, true)); } // Report how many archives/directories/files will be analyzed, // for progress dialog in GUI progressCallback.reportNumberOfArchives(archiveWorkList.size()); // Keep track of the names of all classes to be analyzed List<String> repositoryClassList = new LinkedList<String>(); // set the initial repository classpath. setRepositoryClassPath(); // Record additional entries that should be added to // the aux classpath. These occur when one or more classes // in a directory or archive are skipped, to ensure that // the skipped classes can still be referenced. List<String> additionalAuxClasspathEntryList = new LinkedList<String>(); // Add all classes in analyzed archives/directories/files while (!archiveWorkList.isEmpty()) { ArchiveWorkListItem item = archiveWorkList.removeFirst(); scanArchiveOrDirectory(item, archiveWorkList, repositoryClassList, additionalAuxClasspathEntryList); } // Add "extra" aux classpath entries needed to ensure that // skipped classes can be referenced. addCollectionToClasspath(additionalAuxClasspathEntryList); // finish up initializing analysisContext analysisContext.initDatabases(); // Examine all classes for bugs. // Don't examine the same class more than once. // (The user might specify two jar files that contain // the same class.) if (DEBUG) detectorTimings = new HashMap<String,Long>(); Iterator<AnalysisPass> i = executionPlan.passIterator(); if (i.hasNext()) { AnalysisPass firstPass = i.next(); // Do this to force referenced classes to be loaded Set<JavaClass> allReferencedClasses = analysisContext.getSubtypes().getAllClasses(); ArrayList<String> listOfReferencedClasses = new ArrayList<String>(allReferencedClasses.size()); for(JavaClass c : allReferencedClasses) listOfReferencedClasses.add(c.getClassName()); executeAnalysisPass(firstPass, listOfReferencedClasses); analysisContext.clearClassContextCache(); } else if (DEBUG) System.err.println("execution plan has no passes"); // Execute each subsequent analysis pass in the execution plan while (i.hasNext()) { AnalysisPass analysisPass = i.next(); executeAnalysisPass(analysisPass, repositoryClassList); if (false) System.out.println("Class content stats: " + analysisContext.getClassContextStats()); // Clear the ClassContext cache. // It may contain data that should be recomputed on the next pass. analysisContext.clearClassContextCache(); } // Flush any queued bug reports bugReporter.finish(); // Flush any queued error reports bugReporter.reportQueuedErrors(); // Free up memory for reports analysisContext.clearRepository(); if (false) System.out.println(analysisContext.getClassContextStats()); } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#getCurrentClass() */ public String getCurrentClass() { return currentClass; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#getBugCount() */ public int getBugCount() { return bugReporter.getBugCount(); } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#getErrorCount() */ public int getErrorCount() { return bugReporter.getErrorCount(); } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#getMissingClassCount() */ public int getMissingClassCount() { return bugReporter.getMissingClassCount(); } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#emitTrainingOutput() */ public boolean emitTrainingOutput() { return emitTrainingOutput; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#getUserPreferences() */ public UserPreferences getUserPreferences() { if (userPreferences == null) userPreferences = UserPreferences.createDefaultUserPreferences(); return userPreferences; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#getTrainingInputDir() */ public String getTrainingInputDir() { return trainingInputDir; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#getTrainingOutputDir() */ public String getTrainingOutputDir() { return trainingOutputDir; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#useTrainingInput() */ public boolean useTrainingInput() { return useTrainingInput; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#setScanNestedArchives(boolean) */ public void setScanNestedArchives(boolean scanNestedArchives) { // Ignore this - we're not really going to try to do this } /** * Set the FindBugs home directory. */ public static void setHome(String home) { FindBugs.home = home; } /** * Get the FindBugs home directory. */ public static String getHome() { if (home == null) { home = SystemProperties.getProperty("findbugs.home"); if (home == null) { System.err.println("Error: The findbugs.home property is not set!"); } } return home; } /** * Configure analysis features. */ private void configureAnalysisFeatures() { for (AnalysisFeatureSetting setting : settingList) { setting.configure(analysisContext); } } /** * Configure training databases. * * @throws IOException */ public static void configureTrainingDatabases(IFindBugsEngine findBugs) throws IOException { if (findBugs.emitTrainingOutput()) { String trainingOutputDir = findBugs.getTrainingOutputDir(); if (!new File(trainingOutputDir).isDirectory()) throw new IOException("Training output directory " + trainingOutputDir + " does not exist"); AnalysisContext.currentAnalysisContext().setDatabaseOutputDir(trainingOutputDir); // XXX: hack System.setProperty("findbugs.checkreturn.savetraining", new File(trainingOutputDir, "checkReturn.db").getPath()); } if (findBugs.useTrainingInput()) { String trainingInputDir = findBugs.getTrainingInputDir(); if (!new File(trainingInputDir).isDirectory()) throw new IOException("Training input directory " + trainingInputDir + " does not exist"); AnalysisContext.currentAnalysisContext().setDatabaseInputDir(trainingInputDir); AnalysisContext.currentAnalysisContext().loadInterproceduralDatabases(); // XXX: hack System.setProperty("findbugs.checkreturn.loadtraining", new File(trainingInputDir, "checkReturn.db").getPath()); } else { AnalysisContext.currentAnalysisContext().loadDefaultInterproceduralDatabases(); } } /** * Create the ExecutionPlan. * * @throws OrderingConstraintException */ private void createExecutionPlan() throws OrderingConstraintException { executionPlan = new ExecutionPlan(); // Only enabled detectors should be part of the execution plan executionPlan.setDetectorFactoryChooser(new DetectorFactoryChooser() { HashSet<DetectorFactory> forcedEnabled = new HashSet<DetectorFactory>(); public boolean choose(DetectorFactory factory) { return FindBugs.isDetectorEnabled(FindBugs.this, factory) || forcedEnabled.contains(factory); } public void enable(DetectorFactory factory) { forcedEnabled.add(factory); factory.setEnabledButNonReporting(true); } }); // Add plugins for (Iterator<Plugin> i = detectorFactoryCollection.pluginIterator(); i.hasNext();) { Plugin plugin = i.next(); executionPlan.addPlugin(plugin); } // Build the plan executionPlan.build(); } /** * Determing whether or not given DetectorFactory should be enabled. * * @param findBugs the IFindBugsEngine * @param factory the DetectorFactory * @return true if the DetectorFactory should be enabled, false otherwise */ public static boolean isDetectorEnabled(IFindBugsEngine findBugs, DetectorFactory factory) { if (!factory.getPlugin().isEnabled()) return false; if (!findBugs.getUserPreferences().isDetectorEnabled(factory)) return false; if (!factory.isEnabledForCurrentJRE()) return false; // Slow first pass detectors are usually disabled, but may be explicitly enabled if (!AnalysisContext.currentAnalysisContext().getBoolProperty(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS) && factory.isDetectorClassSubtypeOf(InterproceduralFirstPassDetector.class)) return false; // Training detectors are enabled if, and only if, we are emitting training output boolean isTrainingDetector = factory.isDetectorClassSubtypeOf(TrainingDetector.class); boolean isNonReportingDetector = factory.isDetectorClassSubtypeOf(NonReportingDetector.class); if (findBugs.emitTrainingOutput()) { return isTrainingDetector || isNonReportingDetector; } if (isTrainingDetector) return false; return true; } /** * Based on Project settings, set the classpath to be used * by the Repository when looking up classes. */ private void setRepositoryClassPath() { // Set aux classpath entries addCollectionToClasspath(project.getAuxClasspathEntryList()); // Set implicit classpath entries addCollectionToClasspath(project.getImplicitClasspathEntryList()); // Add system classpath entries String systemClassPath = ClassPath.getClassPath(); StringTokenizer tok = new StringTokenizer(systemClassPath, File.pathSeparator); while (tok.hasMoreTokens()) { String entry = tok.nextToken(); try { analysisContext.addClasspathEntry(entry); } catch (IOException e) { bugReporter.logError("Warning: could not add URL " + entry + " to classpath", e); } } } /** * Add all classpath entries in given Collection to the given * URLClassPathRepository. Missing entries are not fatal: * we'll log them as analysis errors, but the analysis can * continue. * * @param collection classpath entries to add */ private void addCollectionToClasspath(Collection<String> collection) { for (String entry : collection) { try { //repository.addURL(entry); analysisContext.addClasspathEntry(entry); } catch (IOException e) { bugReporter.logError("Warning: could not add URL " + entry + " to classpath", e); } } } /** * Add all classes contained in given file or directory to the BCEL Repository. * * @param item work list item representing the file, which may be a jar/zip * archive, a single class file, or a directory to be recursively * searched for class files * @param archiveWorkList work list of archives to analyze: this method * may add to the work list if it finds nested archives * @param repositoryClassList a List to which all classes found in * the archive or directory are added, so we later know * which files to analyze */ private void scanArchiveOrDirectory(ArchiveWorkListItem item, LinkedList<ArchiveWorkListItem> archiveWorkList, List<String> repositoryClassList, List<String> additionalAuxClasspathEntryList) throws IOException, InterruptedException { String fileName = item.getFileName(); ClassProducer classProducer = null; try { // Create a URL for the filename. // The protocol defaults to "file" if not explicitly // specified in the filename. String protocol = URLClassPath.getURLProtocol(fileName); if (protocol == null) { protocol = "file"; fileName = "file:" + fileName; } URL url = new URL(fileName); // Figure out the file extension String fileExtension = null; int lastDot = fileName.lastIndexOf('.'); if (lastDot >= 0) { fileExtension = fileName.substring(lastDot); } // Create the ClassProducer if (fileExtension != null && URLClassPath.isArchiveExtension(fileExtension)) classProducer = new ZipClassProducer(url, archiveWorkList, additionalAuxClasspathEntryList); else if (fileExtension != null && fileExtension.equals(".class")) classProducer = new SingleClassProducer(url); else if (protocol.equals("file")) { // Assume it's a directory fileName = fileName.substring("file:".length()); File dir = new File(fileName); if (!dir.isDirectory()) throw new IOException("Path " + fileName + " is not an archive, class file, or directory"); classProducer = new DirectoryClassProducer(fileName, additionalAuxClasspathEntryList); } else throw new IOException("URL " + fileName + " is not an archive, class file, or directory"); if (DEBUG || URLClassPathRepository.DEBUG) { System.out.println("Scanning " + url + " for classes"); } // Load all referenced classes into the Repository for (; ;) { if (Thread.interrupted()) throw new InterruptedException(); try { JavaClass jclass = classProducer.getNextClass(); if (jclass == null) break; if (DEBUG) System.out.println("Scanned " + jclass.getClassName()); analysisContext.addApplicationClassToRepository(jclass); repositoryClassList.add(jclass.getClassName()); } catch (ClassFormatException e) { if (DEBUG) e.printStackTrace(); bugReporter.logError("Invalid classfile format", e); } } if (item.isExplicit()) progressCallback.finishArchive(); // If the archive or directory scanned contained source files, // add it to the end of the source path. if (classProducer.containsSourceFiles()) project.addSourceDir(fileName); project.addTimestamp(classProducer.getLastModificationTime()); } catch (IOException e) { // You'd think that the message for a FileNotFoundException would include // the filename, but you'd be wrong. So, we'll add it explicitly. IOException ioe = new IOException("Could not analyze " + fileName); ioe.initCause(e); throw ioe; } finally { if (classProducer != null) { classProducer.close(); } } } /** * Execute a single AnalysisPass. * * @param analysisPass the AnalysisPass * @param repositoryClassList list of application classes in the repository * @throws InterruptedException */ private void executeAnalysisPass(AnalysisPass analysisPass, List<String> repositoryClassList) throws InterruptedException { // Callback for progress dialog: analysis is starting progressCallback.startAnalysis(repositoryClassList.size()); int thisPass = passCount++; if (ExecutionPlan.DEBUG) { System.out.println("************* Analysis pass " + thisPass + " *************"); for (Iterator<DetectorFactory> i = analysisPass.iterator(); i.hasNext();) { DetectorFactory factory = i.next(); System.out.println("\t" + factory.getFullName()); } } // Create detectors // XXX: we can only support BCEL-based detectors. Detector[] detectors = analysisPass.instantiateDetectorsInPass(bugReporter); // Examine each class in the application Set<String> examinedClassSet = new HashSet<String>(); for (String className : repositoryClassList) { if (examinedClassSet.add(className)) examineClass(detectors, className); } if (DEBUG) { long total = 0; for (Long aLong : detectorTimings.values()) { total += aLong.longValue(); } System.out.println(); System.out.println("Detector Timings"); for (Map.Entry<String, Long> entry : detectorTimings.entrySet()) { String detectorName = entry.getKey(); long detectorTime = entry.getValue().longValue(); System.out.println(detectorName + ": " + detectorTime + " ms -> (" + (detectorTime * 100.0f / (float) total) + ") %"); } System.out.println(); detectorTimings = new HashMap<String,Long>(); } // Callback for progress dialog: analysis finished progressCallback.finishPerClassAnalysis(); // Force any detectors which defer work until all classes have // been seen to do that work. this.reportFinal(detectors); AnalysisContext.currentAnalysisContext().updateDatabases(thisPass); } /** * Examine a single class by invoking all of the Detectors on it. * * @param detectors the Detectors to execute on the class * @param className the fully qualified name of the class to examine */ private void examineClass(Detector[] detectors, String className) throws InterruptedException { if (DEBUG) System.out.println("Examining class " + className); long entireClassAnalysisStart = 0; if (TIMEDEBUG || DEBUG) { entireClassAnalysisStart = System.currentTimeMillis(); } this.currentClass = className; try { JavaClass javaClass = Repository.lookupClass(className); // Notify ClassObservers for (IClassObserver aClassObserver : classObserverList) { ClassDescriptor classDescriptor = new ClassDescriptor(ClassName.toSlashedClassName(javaClass.getClassName())); aClassObserver.observeClass(classDescriptor); } // Create a ClassContext for the class ClassContext classContext = analysisContext.getClassContext(javaClass); // Run the Detectors for (Detector detector1 : detectors) { if (Thread.interrupted()) throw new InterruptedException(); Detector detector = detector1; // MUSTFIX: Evaluate whether this makes a difference if (false && detector instanceof StatelessDetector) { try { detector = (Detector) ((StatelessDetector) detector).clone(); } catch (CloneNotSupportedException e) { throw new AssertionError(e); } } try { long start = 0, end; if (TIMEDEBUG || DEBUG) { start = System.currentTimeMillis(); if (DEBUG) { System.out.println(" running " + detector.getClass().getName()); } } detector.visitClassContext(classContext); if (TIMEDEBUG || DEBUG) { end = System.currentTimeMillis(); long delta = end - start; entireClassAnalysisStart += delta; if (delta > TIMEQUANTUM) System.out.println("TIME: " + detector.getClass().getName() + " " + className + " " + delta); if (DEBUG) { String detectorName = detector.getClass().getName(); Long total = detectorTimings.get(detectorName); if (total == null) total = (Long)(delta); else total = (Long)(total.longValue() + delta); detectorTimings.put(detectorName, total); } } } catch (AnalysisException e) { reportRecoverableDetectorException(className, detector, e); } catch (ArrayIndexOutOfBoundsException e) { reportRecoverableDetectorException(className, detector, e); } catch (ClassCastException e) { reportRecoverableDetectorException(className, detector, e); } } } catch (ClassNotFoundException e) { // This should never happen unless there are bugs in BCEL. bugReporter.reportMissingClass(e); reportRecoverableException(className, e); } catch (ClassFormatException e) { reportRecoverableException(className, e); } catch (RuntimeException re) { RuntimeException annotatedEx; try { String sep = SystemProperties.getProperty("line.separator"); Constructor<? extends RuntimeException> c = re.getClass().getConstructor(new Class[] { String.class }); String msg = re.getMessage(); msg = sep + "While finding bugs in class: " + className + ((msg == null) ? "" : (sep + msg)); annotatedEx = c.newInstance(new Object[] {msg}); annotatedEx.setStackTrace(re.getStackTrace()); } catch (RuntimeException e) { throw re; } catch (Exception e) { throw re; } throw annotatedEx; } if (TIMEDEBUG || DEBUG) { long classSetupTime = System.currentTimeMillis() - entireClassAnalysisStart; if (classSetupTime > TIMEQUANTUM) System.out.println("TIME: setup " + className + " " + classSetupTime); } progressCallback.finishClass(); } private void reportRecoverableException(String className, Exception e) { if (DEBUG) { e.printStackTrace(); } bugReporter.logError("Exception analyzing " + className, e); } private void reportRecoverableDetectorException(String className, Detector detector, Exception e) { if (DEBUG) { e.printStackTrace(); } bugReporter.logError("Exception analyzing " + className + " using detector " + detector.getClass().getName(), e); } /** * Call report() on all detectors, to give them a chance to * report any accumulated bug reports. */ private void reportFinal(Detector[] detectors) throws InterruptedException { for (Detector detector : detectors) { if (Thread.interrupted()) throw new InterruptedException(); detector.report(); } } /** * Parse the data for a class to create a JavaClass object. */ private static JavaClass parseClass(String archiveName, InputStream in, String fileName) throws IOException { if (DEBUG) System.out.println("About to parse " + fileName + " in " + archiveName); return parseFromStream(in, fileName); } /** * Parse the data for a class to create a JavaClass object. */ private static JavaClass parseClass(URL url) throws IOException { if (DEBUG) System.out.println("About to parse " + url.toString()); InputStream in = null; try { in = url.openStream(); return parseFromStream(in, url.toString()); } finally { if (in != null) in.close(); } } /** * Parse an input stream to produce a JavaClass object. * Makes sure that the input stream is closed no * matter what. */ private static JavaClass parseFromStream(InputStream in, String fileName) throws IOException { try { return new ClassParser(in, fileName).parse(); } finally { try { in.close(); } catch (IOException ignore) { // Ignore } } } /** * Process -bugCategories option. * * @param userPreferences * UserPreferences representing which Detectors are enabled * @param categories * comma-separated list of bug categories * @return Set of categories to be used */ static Set<String> handleBugCategories(UserPreferences userPreferences, String categories) { // Parse list of bug categories Set<String> categorySet = new HashSet<String>(); StringTokenizer tok = new StringTokenizer(categories, ","); while (tok.hasMoreTokens()) { categorySet.add(tok.nextToken()); } // // Enable only those detectors that can emit those categories // // (and the ones that produce unknown bug patterns, just to be safe). // // Skip disabled detectors, though. // for (Iterator<DetectorFactory> i = DetectorFactoryCollection.instance().factoryIterator(); i.hasNext();) { // DetectorFactory factory = i.next(); // if (!factory.isEnabledForCurrentJRE()) // continue; // Collection<BugPattern> reported = factory.getReportedBugPatterns(); // boolean enable = false; // if (reported.isEmpty()) { // // Don't know what bug patterns are produced by this detector // if (DEBUG) System.out.println("Unknown bug patterns for " + factory.getShortName()); // enable = true; // } else { // for (Iterator<BugPattern> j = reported.iterator(); j.hasNext();) { // BugPattern bugPattern = j.next(); // if (categorySet.contains(bugPattern.getCategory())) { // if (DEBUG) // System.out.println("MATCH ==> " + categorySet + // " -- " + bugPattern.getCategory()); // enable = true; // break; // if (DEBUG && enable) { // System.out.println("Enabling " + factory.getShortName()); // userPreferences.enableDetector(factory, enable); return categorySet; } /** * Process the command line. * * @param commandLine the TextUICommandLine object which will parse the command line * @param argv the command line arguments * @param findBugs the IFindBugsEngine to configure * @throws IOException * @throws FilterException */ public static void processCommandLine(TextUICommandLine commandLine, String[] argv, IFindBugsEngine findBugs) throws IOException, FilterException { // Expand option files in command line. // An argument beginning with "@" is treated as specifying // the name of an option file. // Each line of option files are treated as a single argument. // Blank lines and comment lines (beginning with " // are ignored. argv = CommandLine.expandOptionFiles(argv, true, true); int argCount = 0; try { argCount = commandLine.parse(argv); } catch (IllegalArgumentException e) { System.out.println(e.getMessage()); showHelp(commandLine); } catch (HelpRequestedException e) { showHelp(commandLine); } Project project = commandLine.getProject(); for (int i = argCount; i < argv.length; ++i) project.addFile(argv[i]); commandLine.handleXArgs(); if (project.getFileCount() == 0) { showHelp(commandLine); } commandLine.configureEngine(findBugs); } @SuppressWarnings("DM_EXIT") public static void showHelp(TextUICommandLine commandLine) { showSynopsis(); ShowHelp.showGeneralOptions(); FindBugs.showCommandLineOptions(commandLine); System.exit(1); } @SuppressWarnings("DM_EXIT") public static void runMain(IFindBugsEngine findBugs, TextUICommandLine commandLine) throws java.io.IOException, RuntimeException { try { findBugs.execute(); } catch (InterruptedException e) { // Not possible when running from the command line } int bugCount = findBugs.getBugCount(); int missingClassCount = findBugs.getMissingClassCount(); int errorCount = findBugs.getErrorCount(); if (!commandLine.quiet() || commandLine.setExitCode()) { if (bugCount > 0) System.err.println("Warnings generated: " + bugCount); if (missingClassCount > 0) System.err.println("Missing classes: " + missingClassCount); if (errorCount > 0) System.err.println("Analysis errors: " + errorCount); } if (commandLine.setExitCode()) { int exitCode = 0; if (errorCount > 0) exitCode |= ERROR_FLAG; if (missingClassCount > 0) exitCode |= MISSING_CLASS_FLAG; if (bugCount > 0) exitCode |= BUGS_FOUND_FLAG; System.exit(exitCode); } } /** * Print command line options synopses to stdout. */ public static void showCommandLineOptions() { showCommandLineOptions(new TextUICommandLine()); } public static void showCommandLineOptions(TextUICommandLine commandLine) { System.out.println("Command line options:"); commandLine.printUsage(System.out); } public static void showSynopsis() { System.out.println("Usage: findbugs [general options] -textui [command line options...] [jar/zip/class files, directories...]"); } public static void configureFilter(DelegatingBugReporter bugReporter, String filterFileName, boolean include) throws IOException, FilterException { Filter filter = new Filter(filterFileName); BugReporter origBugReporter = bugReporter.getDelegate(); BugReporter filterBugReporter = new FilterBugReporter(origBugReporter, filter, include); bugReporter.setDelegate(filterBugReporter); } /** * Configure the BugCollection (if the BugReporter being used * is constructing one). * * @param findBugs the IFindBugsEngine */ public static void configureBugCollection(IFindBugsEngine findBugs) { BugReporter realBugReporter = findBugs.getBugReporter().getRealBugReporter(); if (realBugReporter instanceof BugCollectionBugReporter) { BugCollectionBugReporter bugCollectionBugReporter = (BugCollectionBugReporter) realBugReporter; bugCollectionBugReporter = (BugCollectionBugReporter) realBugReporter; bugCollectionBugReporter.getBugCollection().setReleaseName(findBugs.getReleaseName()); Project project = findBugs.getProject(); if (project.getProjectName() == null) project.setProjectName(findBugs.getProjectName()); if (project.getTimestamp() != 0) { bugCollectionBugReporter.getBugCollection().setTimestamp(project.getTimestamp()); bugCollectionBugReporter.getBugCollection().getProjectStats().setTimestamp(project.getTimestamp()); } } } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#getProjectName() */ public String getProjectName() { return projectName; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#setProjectName(java.lang.String) */ public void setProjectName(String projectName) { this.projectName = projectName; } /* (non-Javadoc) * @see edu.umd.cs.findbugs.IFindBugsEngine#setAbridgedMessages(boolean) */ public void setAbridgedMessages(boolean xmlWithAbridgedMessages) { // TODO Auto-generated method stub } } // vim:ts=4
package picard.util; import htsjdk.samtools.util.Interval; import htsjdk.samtools.util.IntervalList; import picard.PicardException; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; /** * @author mccowan */ public class IntervalListScatterer { public enum Mode { /** * A simple scatter approach in which all output intervals have size equal to the total base count of the source list divide by the * scatter count (except for possible variance in the final interval list). */ INTERVAL_SUBDIVISION, /** * A scatter approach that differs from {@link Mode#INTERVAL_SUBDIVISION} in a few ways. * <ol> * <li>No interval will be subdivided, and consequently, the requested scatter count is an upper bound of scatter count, not a * guarantee as to how many {@link IntervalList}s will be produced (e.g., if scatterCount = 10 but there is only one input interval, * only 1 interval list will be emitted).</li> * <li>When an interval would otherwise be split, it is instead deferred to the next scatter list.</li> * <li>The "target width" of each scatter list may be wider than what is computed for {@link Mode#INTERVAL_SUBDIVISION}. * Specifically, if the widest interval in the source interval list is larger than what would otherwise be the target width, that * interval's width is used.<br/><br/>The reasoning for this is that this approach produces more consistently-sized interval lists, * which is one of the objectives of scattering.</li> * </ol> */ BALANCING_WITHOUT_INTERVAL_SUBDIVISION, /** * A scatter approach that differs from {@link Mode#BALANCING_WITHOUT_INTERVAL_SUBDIVISION}. * <ol> * <li>We try to balance the number of unique bases in each interval list by estimating the remaining interval lists sizes. This is * computed from the total number of unique bases and the bases we have consumed. This means that the interval list with the most * number of unique bases is at most the ideal split length larger than the smallest interval list (unique # of bases).</li> * </ol> */ BALANCING_WITHOUT_INTERVAL_SUBDIVISION_WITH_OVERFLOW } private final Mode mode; public IntervalListScatterer(final Mode mode) {this.mode = mode;} private int deduceIdealSplitLength(final IntervalList uniquedList, final int scatterCount) { final int splitWidth = Math.max((int) Math.floor(uniquedList.getBaseCount() / (1.0 * scatterCount)), 1); switch (mode) { case INTERVAL_SUBDIVISION: return splitWidth; case BALANCING_WITHOUT_INTERVAL_SUBDIVISION: case BALANCING_WITHOUT_INTERVAL_SUBDIVISION_WITH_OVERFLOW: final int widestIntervalLength = Collections.max(uniquedList.getIntervals(), new Comparator<Interval>() { @Override public int compare(final Interval o1, final Interval o2) { return Integer.valueOf(o1.length()).compareTo(o2.length()); } }).length(); // There is no purpose to splitting more granularly than the widest interval, so do not. return Math.max(widestIntervalLength, splitWidth); default: throw new IllegalStateException(); } } public List<IntervalList> scatter(final IntervalList uniquedIntervalList, final int scatterCount) { return scatter(uniquedIntervalList, scatterCount, false); } /** Helper for the scatter method */ private boolean shouldAddToRunningIntervalList(final long idealSplitLength, final long projectedSize, final double projectedSizeOfRemainingDivisions) { switch (mode) { case BALANCING_WITHOUT_INTERVAL_SUBDIVISION_WITH_OVERFLOW: return (projectedSize <= idealSplitLength || idealSplitLength < projectedSizeOfRemainingDivisions); default: return (projectedSize <= idealSplitLength); } } public List<IntervalList> scatter(final IntervalList sourceIntervalList, final int scatterCount, final boolean isUniqued) { if (scatterCount < 1) throw new IllegalArgumentException("scatterCount < 1"); final IntervalList uniquedList = isUniqued ? sourceIntervalList : sourceIntervalList.uniqued(); final long idealSplitLength = deduceIdealSplitLength(uniquedList, scatterCount); System.err.println("idealSplitLength=" + idealSplitLength); final List<IntervalList> accumulatedIntervalLists = new ArrayList<IntervalList>(); IntervalList runningIntervalList = new IntervalList(uniquedList.getHeader()); final ArrayDeque<Interval> intervalQueue = new ArrayDeque<Interval>(uniquedList.getIntervals()); long numBasesLeft = uniquedList.getBaseCount(); while (!intervalQueue.isEmpty() && accumulatedIntervalLists.size() < scatterCount - 1) { final Interval interval = intervalQueue.pollFirst(); final long projectedSize = runningIntervalList.getBaseCount() + interval.length(); // The mean expected size of the remaining divisions // NOTE: that this looks like double counting but isn't, we subtract here the bases that are in the _current_ running intervalList, // and when we create a new intervalList (below) we modify numBasesLeft. // Another Note: the -1 in the denominator is for "runningIntervalList" that isn't yet counted in accumulatedIntervalLists.size() final double projectedSizeOfRemainingDivisions = (numBasesLeft - runningIntervalList.getBaseCount()) / ((double)(scatterCount - accumulatedIntervalLists.size() - 1)); // should we add this interval to the list of running intervals? if (shouldAddToRunningIntervalList(idealSplitLength, projectedSize, projectedSizeOfRemainingDivisions)) { runningIntervalList.add(interval); } else { switch (mode) { case INTERVAL_SUBDIVISION: final int amountToConsume = (int) (idealSplitLength - runningIntervalList.getBaseCount()); final Interval left = new Interval( interval.getContig(), interval.getStart(), interval.getStart() + amountToConsume - 1, interval.isNegativeStrand(), interval.getName() ); final Interval right = new Interval( interval.getContig(), interval.getStart() + amountToConsume, interval.getEnd(), interval.isNegativeStrand(), interval.getName() ); runningIntervalList.add(left); // Push back the excess back onto our queue for reconsideration. intervalQueue.addFirst(right); break; case BALANCING_WITHOUT_INTERVAL_SUBDIVISION: case BALANCING_WITHOUT_INTERVAL_SUBDIVISION_WITH_OVERFLOW: if (runningIntervalList.getIntervals().isEmpty()) { runningIntervalList.add(interval); } else { // Push this interval into the next scatter; re-inject it into the queue, then advance the scatter. intervalQueue.addFirst(interval); numBasesLeft -= runningIntervalList.getBaseCount(); accumulatedIntervalLists.add(runningIntervalList.uniqued()); runningIntervalList = new IntervalList(uniquedList.getHeader()); } break; } } if (runningIntervalList.getBaseCount() >= idealSplitLength) { numBasesLeft -= runningIntervalList.getBaseCount(); // keep track of the number of *unique* bases left accumulatedIntervalLists.add(runningIntervalList.uniqued()); runningIntervalList = new IntervalList(uniquedList.getHeader()); } } // Flush the remaining intervals into the last split. while (!intervalQueue.isEmpty()) { runningIntervalList.add(intervalQueue.pollFirst()); } if (!runningIntervalList.getIntervals().isEmpty()) { accumulatedIntervalLists.add(runningIntervalList.uniqued()); } long maximumIntervalSize = -1, minimumIntervalSize = Integer.MAX_VALUE; for (final IntervalList intervalList : accumulatedIntervalLists) { final long baseCount = intervalList.getBaseCount(); if (baseCount < minimumIntervalSize) minimumIntervalSize = baseCount; if (maximumIntervalSize < baseCount) maximumIntervalSize = baseCount; } return accumulatedIntervalLists; } }
package dk.netarkivet.harvester.harvesting; import javax.management.openmbean.CompositeData; import javax.management.openmbean.TabularData; import javax.management.remote.JMXConnector; import java.io.File; import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; import java.io.PrintWriter; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.archive.crawler.Heritrix; import org.archive.util.JmxUtils; import dk.netarkivet.common.CommonSettings; import dk.netarkivet.common.exceptions.ArgumentNotValid; import dk.netarkivet.common.exceptions.IOFailure; import dk.netarkivet.common.exceptions.IllegalState; import dk.netarkivet.common.utils.FileUtils; import dk.netarkivet.common.utils.JMXUtils; import dk.netarkivet.common.utils.NotificationsFactory; import dk.netarkivet.common.utils.ProcessUtils; import dk.netarkivet.common.utils.Settings; import dk.netarkivet.common.utils.StringUtils; import dk.netarkivet.common.utils.SystemUtils; import dk.netarkivet.common.utils.TimeUtils; import dk.netarkivet.harvester.HarvesterSettings; /** * This implementation of the HeritrixController interface starts Heritrix * as a separate process and uses JMX to communicate with it. Each instance * executes exactly one process that runs exactly one crawl job. */ public class JMXHeritrixController implements HeritrixController { /** The logger for this class. */ private static final Log log = LogFactory.getLog( JMXHeritrixController.class); /** The command to submit a new crawljob to the Crawlcontroller. */ private static final String ADD_JOB_COMMAND = "addJob"; /** The command to retrieve progress statistics for the currently * running job. */ private static final String PROGRESS_STATISTICS_COMMAND = "progressStatistics"; /** The command to retrieve a progress statistics legend for the currently * running job. */ private static final String PROGRESS_STATISTICS_LEGEND_COMMAND = "progressStatisticsLegend"; /** The attribute for the current download rate in kbytes for the * currently running job. */ private static final String CURRENT_KB_RATE_ATTRIBUTE = "CurrentKbRate"; /** The attribute for the number of currently running process-threads. */ private static final String THREAD_COUNT_ATTRIBUTE = "ThreadCount"; /** The attribute for the number of discovered URIs for the * currently running job. */ private static final String DISCOVERED_COUNT_ATTRIBUTE = "DiscoveredCount"; /** The attribute for the number of downloaded URIs for the * currently running job. */ private static final String DOWNLOADED_COUNT_ATTRIBUTE = "DownloadedCount"; /** The attribute for the status for the currently running job. */ private static final String STATUS_ATTRIBUTE = "Status"; /* Note: The Heritrix JMX interface has two apparent ways to stop crawling: * stopCrawling and terminateCurrentJob. stopCrawling merely makes Heritrix * not start any more jobs, but the old jobs continue. Note that if we * start using more than one job at a time, terminateCurrentJob will only * stop one job. */ /** Command to start crawling. */ private static final String START_CRAWLING_COMMAND = "startCrawling"; /** Make the currently active (selected?) job stop. */ private static final String TERMINATE_CURRENT_JOB_COMMAND = "terminateCurrentJob"; /** Command for returning list of pending jobs. */ private static final String PENDING_JOBS_COMMAND = "pendingJobs"; /** Command for returning list of completed jobs. */ private static final String COMPLETED_JOBS_COMMAND = "completedJobs"; /** Command for shutting down Heritrix. */ private static final String SHUTDOWN_COMMAND = "shutdown"; /** How long we're willing to wait for Heritrix to shutdown in a * shutdown hook. */ private static final long SHUTDOWN_HOOK_MAX_WAIT = 1000L; private static final String UID_PROPERTY = "uid"; /** File path Separator. Used to separate the jar-files in the classpath. */ private static final String FILE_PATH_SEPARATOR = ":"; /** The one-shot Heritrix process created in the constructor. It will * only perform a single crawl before being shut down. */ private final Process heritrixProcess; /** The shutdownHook that takes care of killing our process. This is * removed in cleanup() when the process is shut down. */ private Thread processKillerHook; /** The threads used to collect process output. * Only one thread used presently. */ private Set<Thread> collectionThreads = new HashSet<Thread>(1); /** The name that Heritrix gives to the job we ask it to create. This * is part of the name of the MBean for that job, but we can only retrieve * the name after the MBean has been created. */ private String jobName; /** The various files used by Heritrix. */ private final HeritrixFiles files; /** The header line (legend) for the statistics report. */ private String progressStatisticsLegend; /** The 'NASCENT' status. */ private static final String NASCENT_STATUS = "NASCENT"; /** The 'RUNNING' status. */ private static final String RUNNING_STATUS = "RUNNING"; /** The 'PAUSED' status. */ private static final String PAUSED_STATUS = "PAUSED"; /** The 'PAUSING' status. */ private static final String PAUSING_STATUS = "PAUSING"; /** The 'CHECKPOINTING' status. */ private static final String CHECKPOINTING_STATUS = "CHECKPOINTING"; /** The 'STOPPING' status. */ private static final String STOPPING_STATUS = "STOPPING"; /** The 'FINISHED' status. */ private static final String FINISHED_STATUS = "FINISHED"; /** The 'STARTED status. */ private static final String STARTED_STATUS = "STARTED"; /** The 'PREPARING' status. */ private static final String PREPARING_STATUS = "PREPARING"; private static final String ILLEGAL_STATUS = "Illegal State"; /** Create a JMXHeritrixController object. * * @param files Files that are used to set up Heritrix. */ public JMXHeritrixController(HeritrixFiles files) { ArgumentNotValid.checkNotNull(files, "HeritrixFile files"); this.files = files; SystemUtils.checkPortNotUsed(getGUIPort()); SystemUtils.checkPortNotUsed(getJMXPort()); try { log.info("Starting Heritrix for " + this); /* To start Heritrix, we need to do the following (taken from the Heritrix startup shell script): - set heritrix.home to base dir of Heritrix stuff - set com.sun.management.jmxremote.port to JMX port - set com.sun.management.jmxremote.ssl to false - set com.sun.management.jmxremote.password.file to JMX password file - set heritrix.out to heritrix_out.log - set java.protocol.handler.pkgs=org.archive.net - send processOutput & stderr into heritrix.out - let the Heritrix GUI-webserver listen on all available network interfaces: This is done with argument "--bind /" (default is 127.0.0.1) - listen on a specific port using the port argument: --port <GUI port> We also need to output something like the following to heritrix.out: `date Starting heritrix uname -a java -version JAVA_OPTS ulimit -a */ File heritrixOutputFile = files.getHeritrixOutput(); StringBuilder settingProperty = new StringBuilder(); for (File file : Settings.getSettingsFiles()) { settingProperty.append(File.pathSeparator); settingProperty.append(file.getAbsolutePath()); } if (settingProperty.length() > 0) { // delete last path-separator settingProperty.deleteCharAt(0); } List<String> allOpts = new LinkedList<String>(); allOpts.add(new File(new File(System.getProperty("java.home"), "bin"), "java").getAbsolutePath()); allOpts.add("-Xmx" + Settings.get(HarvesterSettings.HERITRIX_HEAP_SIZE)); allOpts.add("-Dheritrix.home=" + files.getCrawlDir().getAbsolutePath()); String jvmOptsStr = Settings.get(HarvesterSettings.HERITRIX_JVM_OPTS); if ((jvmOptsStr != null) && (!jvmOptsStr.isEmpty())) { String[] add = jvmOptsStr.split(" "); for (String additionalOpt : add) { allOpts.add(additionalOpt); } } allOpts.add("-Dcom.sun.management.jmxremote.port=" + getJMXPort()); allOpts.add("-Dcom.sun.management.jmxremote.ssl=false"); allOpts.add("-Dcom.sun.management.jmxremote.password.file=" + new File(Settings.get(CommonSettings.JMX_PASSWORD_FILE)) .getAbsolutePath()); allOpts.add("-Dcom.sun.management.jmxremote.access.file=" + new File(Settings.get(CommonSettings.JMX_ACCESS_FILE)) .getAbsolutePath()); allOpts.add("-Dheritrix.out=" + heritrixOutputFile.getAbsolutePath()); allOpts.add("-Djava.protocol.handler.pkgs=org.archive.net"); allOpts.add("-Ddk.netarkivet.settings.file=" + settingProperty); allOpts.add(Heritrix.class.getName()); allOpts.add("--bind"); allOpts.add("/"); allOpts.add("--port=" + getGUIPort()); allOpts.add("--admin=" + getHeritrixAdminName() + ":" + getHeritrixAdminPassword()); String[] args = (String[]) allOpts.toArray(new String[allOpts.size()]); log.info("Starting Heritrix process with args" + Arrays.toString(args)); ProcessBuilder builder = new ProcessBuilder(args); updateEnvironment(builder.environment()); FileUtils.copyDirectory( new File("lib/heritrix"), files.getCrawlDir()); builder.directory(files.getCrawlDir()); builder.redirectErrorStream(true); writeSystemInfo(heritrixOutputFile, builder); FileUtils.appendToFile(heritrixOutputFile, "Working directory: " + files.getCrawlDir()); addProcessKillerHook(); heritrixProcess = builder.start(); ProcessUtils.writeProcessOutput(heritrixProcess.getInputStream(), heritrixOutputFile, collectionThreads); } catch (IOException e) { throw new IOFailure("Error starting Heritrix process", e); } } /** * @throws IOFailure If Heritrix dies before initialization, * or we encounter any problems during the initialization. * @see HeritrixController#initialize() */ public void initialize() { if (processHasExited()) { String errMsg = "Heritrix process of " + this + " died before initialization"; log.warn(errMsg); throw new IOFailure(errMsg); } // We want to be sure there are no jobs when starting, in case we got // an old Heritrix or somebody added jobs behind our back. TabularData doneJobs = (TabularData) executeHeritrixCommand(COMPLETED_JOBS_COMMAND); TabularData pendingJobs = (TabularData) executeHeritrixCommand(PENDING_JOBS_COMMAND); if (doneJobs != null && doneJobs.size() > 0 || pendingJobs != null && pendingJobs.size() > 0) { throw new IllegalState( "This Heritrix instance is in a illegalState! " + "This instance has either old done jobs (" + doneJobs + "), or old pending jobs (" + pendingJobs + ")."); } // From here on, we can assume there's only the one job we make. // We'll use the arc file prefix to name the job, since the prefix // already contains the harvest id and job id. executeHeritrixCommand(ADD_JOB_COMMAND, files.getOrderXmlFile().getAbsolutePath(), files.getArcFilePrefix(), getJobDescription(), files.getSeedsTxtFile().getAbsolutePath()); jobName = getJobName(); initializeProgressStatisticsLegend(); } /** * @throws IOFailure if unable to communicate with Heritrix * @see HeritrixController#requestCrawlStart() */ public void requestCrawlStart() { executeHeritrixCommand(START_CRAWLING_COMMAND); } /** @see HeritrixController#atFinish() */ public boolean atFinish() { return crawlIsEnded(); } /** * @throws IOFailure if unable to communicate with Heritrix * @see HeritrixController#beginCrawlStop() */ public void beginCrawlStop() { executeHeritrixCommand(TERMINATE_CURRENT_JOB_COMMAND); } /** @see HeritrixController#getActiveToeCount() */ public int getActiveToeCount() { Integer activeToeCount = (Integer) getCrawlJobAttribute(THREAD_COUNT_ATTRIBUTE); if (activeToeCount == null) { return 0; } return activeToeCount; } /** @see HeritrixController#requestCrawlStop(String) */ public void requestCrawlStop(String reason) { if (!atFinish()) { beginCrawlStop(); } } /** * @see HeritrixController#getQueuedUriCount() * */ public long getQueuedUriCount() { /* Implementation note: This count is not as precise as what * StatisticsTracker could provide, but it's presently only used in * a warning in the HeritrixLauncher.doCrawlLoop() method. */ Long discoveredUris = (Long) getCrawlJobAttribute(DISCOVERED_COUNT_ATTRIBUTE); Long downloadedUris = (Long) getCrawlJobAttribute(DOWNLOADED_COUNT_ATTRIBUTE); if (discoveredUris == null) { return 0; } if (downloadedUris == null) { return discoveredUris; } return discoveredUris - downloadedUris; } /** @see HeritrixController#getCurrentProcessedKBPerSec() */ public int getCurrentProcessedKBPerSec() { Long currentDownloadRate = (Long) getCrawlJobAttribute(CURRENT_KB_RATE_ATTRIBUTE); if (currentDownloadRate == null) { return 0; } return currentDownloadRate.intValue(); } /** @see HeritrixController#getProgressStats() */ public String getProgressStats() { String status = (String) getCrawlJobAttribute(STATUS_ATTRIBUTE); if (status == null) { status = "NO STATUS"; } String progressStatistics = (String) executeCrawlJobCommand(PROGRESS_STATISTICS_COMMAND); if (progressStatistics == null) { progressStatistics = "No progress statistics available"; } else { // Since progressStatisticsLegend acts as a latch, we can check // for non-null even though it gets assigned asynchronously. if (progressStatisticsLegend != null) { progressStatistics = progressStatisticsLegend + '\n' + progressStatistics; } } return status + " " + progressStatistics; } /** Store the statistics legend line (asynchronously). */ private void initializeProgressStatisticsLegend() { new Thread() { public void run() { progressStatisticsLegend = (String) executeCrawlJobCommand( PROGRESS_STATISTICS_LEGEND_COMMAND); } }.start(); } /** @see HeritrixController#isPaused() */ public boolean isPaused() { String status = (String) getCrawlJobAttribute(STATUS_ATTRIBUTE); log.debug("Heritrix state: '" + status + "'"); // Either Pausing or Paused in case of not null if (status == null) { return false; } else { return status.equals(PAUSED_STATUS) || status.equals(PAUSING_STATUS); } } /** Check if the crawl has ended, either because Heritrix finished * of its own, or because we terminated it. * * @return True if the crawl has ended, either because Heritrix finished * or because we terminated it. Otherwise we return false. * @see HeritrixController#crawlIsEnded() */ public synchronized boolean crawlIsEnded() { // End of crawl can be seen in one of three ways: // 1) The Heritrix process has exited. // 2) The job has been moved to the completed jobs list in Heritrix. // 3) The job is in one of the FINISHED states. if (processHasExited()) { return true; } TabularData jobs = (TabularData) executeHeritrixCommand( COMPLETED_JOBS_COMMAND); if (jobs != null && jobs.size() > 0) { for (CompositeData value : (Collection<CompositeData>) jobs.values()) { String thisJobID = value.get(JmxUtils.NAME) + "-" + value.get(UID_PROPERTY); if (thisJobID.equals(jobName)) { return true; } } } String status = (String) getCrawlJobAttribute(STATUS_ATTRIBUTE); return status == null || status.equals(FINISHED_STATUS) || status.equals(ILLEGAL_STATUS); } /** Return true if the Heritrix process has exited, logging the exit * value if so. * * @return True if the process has exited. */ private boolean processHasExited() { // First check if the process has exited already try { int exitValue = heritrixProcess.exitValue(); log.info("Process of " + this + " returned exit code " + exitValue); return true; } catch (IllegalThreadStateException e) { // Not exited yet, that's fine } return false; } /** * Cleanup after an Heritrix process. * This entails sending the shutdown command to the Heritrix process, * and killing it forcefully, if it is still alive after waiting * the period of time specified by the CommonSettings.PROCESS_TIMEOUT * setting. * * @see HeritrixController#cleanup() */ public void cleanup() { try { executeHeritrixCommand(SHUTDOWN_COMMAND); } catch (IOFailure e) { log.error("JMX error while cleaning up Heritrix controller", e); } final long maxWait = Settings.getLong(CommonSettings.PROCESS_TIMEOUT); Integer exitValue = ProcessUtils.waitFor(heritrixProcess, maxWait); if (exitValue != null) { log.info("Heritrix process of " + this + " exited with exit code " + exitValue); } else { log.warn("Heritrix process of " + this + " not dead after " + maxWait + " millis, killing it"); heritrixProcess.destroy(); exitValue = ProcessUtils.waitFor(heritrixProcess, maxWait); if (exitValue != null) { log.info("Heritrix process of " + this + " exited with exit code " + exitValue); } else { // If it's not dead now, there's little we can do. log.fatal("Heritrix process of " + this + " not dead after destroy. " + "Exiting harvest controller. " + "Make sure you kill the runaway Heritrix " + "before you restart."); NotificationsFactory.getInstance().errorEvent( "Heritrix process of " + this + " not dead after destroy. " + "Exiting harvest controller. " + "Make sure you kill the runaway Heritrix " + "before you restart."); System.exit(1); } } Runtime.getRuntime().removeShutdownHook(processKillerHook); // Wait until all collection threads are dead or until we have // tried JMXUtils.MAX_TRIES times. int attempt = 0; do { boolean anyAlive = false; for (Thread t : collectionThreads) { if (t.isAlive()) { anyAlive = true; } } if (!anyAlive) { break; } TimeUtils.exponentialBackoffSleep(attempt); } while (attempt++ < JMXUtils.MAX_TRIES); } /** * Return the URL for monitoring this instance. * @return the URL for monitoring this instance. */ public String getHarvestInformation() { return "http://" + getHostName() + ":" + getGUIPort(); } /** Change an environment to be suitable for running Heritrix. * * At the moment, this involves the following: * * Prepend the Jar files from the lib/heritrix/lib dir to the classpath. * Make sure the Heritrix jar file is at the front. * * @param environment The environment from a process builder * @throws IOFailure If a Heritrix jarfile is not found. */ private static void updateEnvironment(Map<String, String> environment) { List<String> classPathParts = SystemUtils.getCurrentClasspath(); File heritrixLibDir = new File("lib/heritrix/lib"); File[] jars = heritrixLibDir.listFiles(new FilenameFilter() { public boolean accept(File file, String string) { return string.endsWith(".jar"); } }); // Reverse sort the file list in order to add in alphabetical order // before the basic jars. Arrays.sort(jars, new Comparator<File>() { public int compare(File file, File file1) { return file1.compareTo(file); } }); String heritixJar = null; for (File lib : jars) { final String jarPath = new File(heritrixLibDir, lib.getName()) .getAbsolutePath(); if (lib.getName().startsWith("heritrix-")) { // Heritrix should be at the very head, as it redefines some // of the functions in its dependencies (!). Thus, we have to // save it for later insertion at the head. heritixJar = jarPath; } else { classPathParts.add(0, jarPath); } } if (heritixJar != null) { classPathParts.add(0, heritixJar); } else { throw new IOFailure("Heritrix jar file not found"); } environment.put("CLASSPATH", StringUtils.conjoin(FILE_PATH_SEPARATOR, classPathParts)); } /** Write various info on the system we're using into the given file. * This info will later get put into metadata for the crawl. * * @param outputFile A file to write to. * @param builder The ProcessBuilder being used to start the Heritrix * process */ private void writeSystemInfo(File outputFile, ProcessBuilder builder) { PrintWriter writer = null; try { writer = new PrintWriter(new FileWriter(outputFile)); writer.println("The Heritrix process is started in the following" + " environment\n (note that some entries will be" + " changed by the starting JVM):"); Map<String, String> env = builder.environment(); List<String> keyList = new ArrayList<String>(env.keySet()); Collections.sort(keyList); for (String key : keyList) { writer.println(key + "=" + env.get(key)); } writer.println("Process properties:"); Properties properties = System.getProperties(); keyList = new ArrayList<String>((Set) properties.keySet()); Collections.sort(keyList); for (String key : keyList) { writer.println(key + "=" + properties.get(key)); } } catch (IOException e) { log.warn("Error writing basic properties to output file.", e); } finally { if (writer != null) { writer.close(); } } } /** Get a string that describes the current controller in terms of * job ID, harvest ID, and crawldir. * * @return A human-readable string describing this controller. */ public String toString() { if (heritrixProcess != null) { return "job " + files.getJobID() + " of harvest " + files.getHarvestID() + " in " + files.getCrawlDir() + " running process " + heritrixProcess; } else { return "job " + files.getJobID() + " of harvest " + files.getHarvestID() + " in " + files.getCrawlDir(); } } /** Add a shutdown hook that kills the process we've created. Since this * hook will be run only in case of JVM shutdown, it cannot expect that * the standard logging framework is still usable, and therefore writes * to stdout instead. */ private void addProcessKillerHook() { // Make sure that the process gets killed at the very end, at least processKillerHook = new Thread() { public void run() { try { // Only non-blocking way to check for process liveness int exitValue = heritrixProcess.exitValue(); System.out.println("Heritrix process of " + this + " exited with exit code " + exitValue); } catch (IllegalThreadStateException e) { // Process is still alive, kill it. System.out.println("Killing process of " + this); heritrixProcess.destroy(); final Integer exitValue = ProcessUtils .waitFor(heritrixProcess, SHUTDOWN_HOOK_MAX_WAIT); if (exitValue != null) { System.out.println("Process of " + this + " returned exit code " + exitValue); } else { System.out.println("Process of " + this + " never exited!"); } } } }; Runtime.getRuntime().addShutdownHook(processKillerHook); } /** Return a human-readable description of the job. This will only be * visible in the Heritrix GUI. * * @return String containing various information grabbed from HeritrixFiles. */ private String getJobDescription() { return "Job " + files.getJobID() + " for harvest " + files.getHarvestID() + " performed in " + files.getCrawlDir() + " with index in " + files.getIndexDir() + " and " + FileUtils.countLines(files.getSeedsTxtFile()) + " seeds"; } private String getJobName() { /* This is called just after we've told Heritrix to create a job. * It may take a while before the job is actually created, so we have * to wait around a bit. */ TabularData pendingJobs = null; TabularData doneJobs; int retries = 0; while (retries++ < JMXUtils.MAX_TRIES) { // If the job turns up in Heritrix' pending jobs list, it's ready pendingJobs = (TabularData) executeHeritrixCommand( PENDING_JOBS_COMMAND); if (pendingJobs != null && pendingJobs.size() > 0) { break; // It's ready, we can move on. } // If there's an error in the job configuration, the job will be put // in Heritrix' completed jobs list. doneJobs = (TabularData) executeHeritrixCommand( COMPLETED_JOBS_COMMAND); if (doneJobs != null && doneJobs.size() >= 1) { // Since we haven't allowed Heritrix to start any crawls yet, // the only way the job could have ended and then put into // the list of completed jobs is by error. if (doneJobs.size() > 1) { throw new IllegalState("More than one job in done list: " + doneJobs); } else { CompositeData job = JMXUtils.getOneCompositeData(doneJobs); throw new IOFailure("Job " + job + " failed: " + job.get(STATUS_ATTRIBUTE)); } } if (retries < JMXUtils.MAX_TRIES) { TimeUtils.exponentialBackoffSleep(retries); } } // If all went well, we now have exactly one job in the pending // jobs list. if (pendingJobs == null || pendingJobs.size() == 0) { throw new IOFailure("Heritrix has not created a job after " + (Math.pow(2, JMXUtils.MAX_TRIES) / 1000) + " seconds, giving up."); } else if (pendingJobs.size() > 1) { throw new IllegalState("More than one pending job: " + pendingJobs); } else { // Note that we may actually get through to here even if the job // is malformed. The job will then die as soon as we tell it to // start crawling. CompositeData job = JMXUtils.getOneCompositeData(pendingJobs); String name = job.get(JmxUtils.NAME) + "-" + job.get(UID_PROPERTY); log.info("Heritrix created a job with name " + name); return name; } } /** Return the local host name in the way that Heritrix understands it. * * @return The host name for this machine that matches what Heritrix * uses in its MBean names. */ private String getHostName() { try { return InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { throw new IOFailure("Failed to find name of localhost", e); } } /** Get the login name for accessing the Heritrix GUI. This name can be * set in the settings.xml file. * * @return Name to use for accessing Heritrix web GUI */ private String getHeritrixAdminName() { return Settings.get(HarvesterSettings.HERITRIX_ADMIN_NAME); } /** Get the login password for accessing the Heritrix GUI. This password * can be set in the settings.xml file. * * @return Password to use for accessing the Heritrix GUI */ private String getHeritrixAdminPassword() { return Settings.get(HarvesterSettings.HERITRIX_ADMIN_PASSWORD); } /** Get the name to use for logging on to Heritrix' JMX with full control. * The name cannot be set by the user. * * @return Name to use when connecting to Heritrix JMX */ private String getJMXAdminName() { String jmxUsername = Settings.get( HarvesterSettings.HERITRIX_JMX_USERNAME); log.debug("The JMX username used for connecting to " + "the Heritrix GUI is: " + "'" + jmxUsername + "'."); return jmxUsername; } /** Get the password to use to access the Heritrix JMX as the user returned * by getJMXAdminName(). This password can be set in a file pointed to * in settings.xml. * @return Password for accessing Heritrix JMX */ private String getJMXAdminPassword() { return Settings.get(HarvesterSettings.HERITRIX_JMX_PASSWORD); } /** Get the port to use for Heritrix JMX, as set in settings.xml. * * @return Port that Heritrix will expose its JMX interface on. */ private int getJMXPort() { return Settings.getInt(HarvesterSettings.HERITRIX_JMX_PORT); } /** Get the port to use for Heritrix GUI, as set in settings.xml. * * @return Port that Heritrix will expose its web interface on. */ private int getGUIPort() { return Settings.getInt(HarvesterSettings.HERITRIX_GUI_PORT); } /** Execute a command for the Heritrix process we're running. * * @param command The command to execute. * @param arguments Any arguments to the command. These arguments can * only be of String type. * @return Whatever object was returned by the JMX invocation. */ private Object executeHeritrixCommand(String command, String... arguments) { return JMXUtils.executeCommand(getHeritrixJMXConnector(), getHeritrixBeanName(), command, arguments); } /** Execute a command for the Heritrix job. This must only be called after * initialize() has been run. * * @param command The command to execute. * @param arguments Any arguments to the command. These arguments can * only be of String type. * @return Whatever object was returned by the JMX invocation. */ private Object executeCrawlJobCommand(String command, String... arguments) { return JMXUtils.executeCommand(getHeritrixJMXConnector(), getCrawlJobBeanName(), command, arguments); } /** Get an attribute of the Heritrix process we're running. * * @param attribute The attribute to get. * @return The value of the attribute. */ private Object getHeritrixAttribute(String attribute) { return JMXUtils.getAttribute(getHeritrixJMXConnector(), getHeritrixBeanName(), attribute); } /** Get an attribute of the Heritrix job. This must only be called after * initialize() has been run. * * @param attribute The attribute to get. * @return The value of the attribute. */ private Object getCrawlJobAttribute(String attribute) { return JMXUtils.getAttribute(getHeritrixJMXConnector(), getCrawlJobBeanName(), attribute); } /** Get the name for the main bean of the Heritrix instance. * * @return Bean name, to be passed into JMXUtils#getBeanName(String) */ private String getHeritrixBeanName() { final String beanName = "org.archive.crawler:" + JmxUtils.NAME + "=Heritrix," + JmxUtils.TYPE + "=CrawlService," + JmxUtils.JMX_PORT + "=" + getJMXPort() + "," + JmxUtils.GUI_PORT + "=" + getGUIPort() + "," + JmxUtils.HOST + "=" + getHostName(); return beanName; } /** Get the name for the bean of a single job. This bean does not exist * until after a job has been created using initialize(). * * @return Bean name, to be passed into JMXUtils#getBeanName(String) */ private String getCrawlJobBeanName() { final String beanName = "org.archive.crawler:" + JmxUtils.NAME + "=" + jobName + "," + JmxUtils.TYPE + "=CrawlService.Job," + JmxUtils.JMX_PORT + "=" + getJMXPort() + "," + JmxUtils.MOTHER + "=Heritrix," + JmxUtils.HOST + "=" + getHostName(); return beanName; } /** Get the JMX connector to Heritrix. * * @return A connector that connects to a local Heritrix instance. */ private JMXConnector getHeritrixJMXConnector() { JMXConnector connector = JMXUtils.getJMXConnector(SystemUtils.LOCALHOST, getJMXPort(), getJMXAdminName(), getJMXAdminPassword()); return connector; } }
package com.dmdirc; import com.dmdirc.actions.ActionManager; import com.dmdirc.actions.CoreActionType; import com.dmdirc.actions.wrappers.AliasWrapper; import com.dmdirc.commandparser.CommandManager; import com.dmdirc.commandparser.CommandType; import com.dmdirc.config.ConfigManager; import com.dmdirc.config.Identity; import com.dmdirc.config.IdentityManager; import com.dmdirc.interfaces.AwayStateListener; import com.dmdirc.interfaces.InviteListener; import com.dmdirc.logger.ErrorLevel; import com.dmdirc.logger.Logger; import com.dmdirc.parser.ChannelInfo; import com.dmdirc.parser.ClientInfo; import com.dmdirc.parser.IRCParser; import com.dmdirc.parser.MyInfo; import com.dmdirc.parser.ParserError; import com.dmdirc.parser.ServerInfo; import com.dmdirc.ui.WindowManager; import com.dmdirc.ui.input.TabCompleter; import com.dmdirc.ui.interfaces.InputWindow; import com.dmdirc.ui.interfaces.ServerWindow; import com.dmdirc.ui.interfaces.Window; import com.dmdirc.ui.messages.Formatter; import java.io.Serializable; import java.util.ArrayList; import java.util.Hashtable; import java.util.List; import java.util.Map; import java.util.Timer; import java.util.TimerTask; /** * The Server class represents the client's view of a server. It maintains * a list of all channels, queries, etc, and handles parser callbacks pertaining * to the server. * * @author chris */ public final class Server extends WritableFrameContainer implements Serializable { /** * A version number for this class. It should be changed whenever the class * structure is changed (or anything else that would prevent serialized * objects being unserialized with the new class). */ private static final long serialVersionUID = 1; /** The name of the general domain. */ private static final String DOMAIN_GENERAL = "general".intern(); /** The name of the profile domain. */ private static final String DOMAIN_PROFILE = "profile".intern(); /** The name of the server domain. */ private static final String DOMAIN_SERVER = "server".intern(); /** Open channels that currently exist on the server. */ private final Map<String, Channel> channels = new Hashtable<String, Channel>(); /** Open query windows on the server. */ private final List<Query> queries = new ArrayList<Query>(); /** The IRC Parser instance handling this server. */ private transient IRCParser parser; /** The raw frame used for this server instance. */ private Raw raw; /** The ServerWindow corresponding to this server. */ private ServerWindow window; /** The details of the server we're connecting to. */ private ServerInfo serverInfo; /** The profile we're using. */ private transient Identity profile; /** The current state of this server. */ private ServerState myState = ServerState.DISCONNECTED; /** The timer we're using to delay reconnects. */ private Timer reconnectTimer; /** Channels we're meant to auto-join. */ private final List<String> autochannels; /** The tabcompleter used for this server. */ private final TabCompleter tabCompleter = new TabCompleter(); /** The last activated internal frame for this server. */ private FrameContainer activeFrame = this; /** The config manager for this server. */ private ConfigManager configManager; /** Our reason for being away, if any. */ private String awayMessage = null; /** Our event handler. */ private final ServerEventHandler eventHandler = new ServerEventHandler(this); /** A list of outstanding invites. */ private final List<Invite> invites = new ArrayList<Invite>(); /** Our ignore list. */ private IgnoreList ignoreList = new IgnoreList(); /** * Creates a new instance of Server. * * @param server The hostname/ip of the server to connect to * @param port The port to connect to * @param password The server password * @param ssl Whether to use SSL or not * @param profile The profile to use */ public Server(final String server, final int port, final String password, final boolean ssl, final Identity profile) { this(server, port, password, ssl, profile, new ArrayList<String>()); } /** * Creates a new instance of Server. * * @param server The hostname/ip of the server to connect to * @param port The port to connect to * @param password The server password * @param ssl Whether to use SSL or not * @param profile The profile to use * @param autochannels A list of channels to auto-join when we connect */ public Server(final String server, final int port, final String password, final boolean ssl, final Identity profile, final List<String> autochannels) { super(); serverInfo = new ServerInfo(server, port, password); serverInfo.setSSL(ssl); ServerManager.getServerManager().registerServer(this); configManager = new ConfigManager("", "", server); window = Main.getUI().getServer(this); WindowManager.addWindow(window); window.setTitle(server + ":" + port); tabCompleter.addEntries(AliasWrapper.getAliasWrapper().getAliases()); window.getInputHandler().setTabCompleter(tabCompleter); updateIcon(); window.open(); tabCompleter.addEntries(CommandManager.getCommandNames(CommandType.TYPE_SERVER)); tabCompleter.addEntries(CommandManager.getCommandNames(CommandType.TYPE_GLOBAL)); this.autochannels = autochannels; new Timer("Server Who Timer").scheduleAtFixedRate(new TimerTask() { @Override public void run() { for (Channel channel : channels.values()) { channel.checkWho(); } } }, 0, configManager.getOptionInt(DOMAIN_GENERAL, "whotime", 60000)); if (configManager.getOptionBool(DOMAIN_GENERAL, "showrawwindow", false)) { addRaw(); } connect(server, port, password, ssl, profile); } /** * Connects to a new server with the specified details. * * @param server The hostname/ip of the server to connect to * @param port The port to connect to * @param password The server password * @param ssl Whether to use SSL or not * @param profile The profile to use */ @Precondition({ "The IRC Parser is null or not connected", "The specified profile is not null" }) public void connect(final String server, final int port, final String password, final boolean ssl, final Identity profile) { assert(profile != null); synchronized(myState) { switch (myState) { case RECONNECT_WAIT: reconnectTimer.cancel(); break; case CLOSING: // Ignore the connection attempt return; case CONNECTED: case CONNECTING: disconnect(configManager.getOption(DOMAIN_GENERAL, "quitmessage")); break; default: // Do nothing break; } myState = ServerState.CONNECTING; } ActionManager.processEvent(CoreActionType.SERVER_CONNECTING, null, this); assert(parser == null || parser.getSocketState() != IRCParser.STATE_OPEN); serverInfo = new ServerInfo(server, port, password); serverInfo.setSSL(ssl); this.profile = profile; configManager = new ConfigManager("", "", server); updateIcon(); addLine("serverConnecting", server, port); final MyInfo myInfo = getMyInfo(); parser = new IRCParser(myInfo, serverInfo); parser.setRemoveAfterCallback(true); parser.setCreateFake(true); parser.setAddLastLine(true); parser.setIgnoreList(ignoreList); if (configManager.hasOption(DOMAIN_GENERAL, "bindip")) { parser.setBindIP(configManager.getOption(DOMAIN_GENERAL, "bindip")); } doCallbacks(); awayMessage = null; invites.clear(); window.setAwayIndicator(false); try { new Thread(parser, "IRC Parser thread").start(); } catch (IllegalThreadStateException ex) { Logger.appError(ErrorLevel.FATAL, "Unable to start IRC Parser", ex); } } /** * Reconnects to the IRC server with a specified reason. * * @param reason The quit reason to send */ public void reconnect(final String reason) { synchronized(myState) { if (myState == ServerState.CLOSING) { return; } } disconnect(reason); connect(serverInfo.getHost(), serverInfo.getPort(), serverInfo.getPassword(), serverInfo.getSSL(), profile); } /** * Reconnects to the IRC server. */ public void reconnect() { reconnect(configManager.getOption(DOMAIN_GENERAL, "reconnectmessage")); } /** * Disconnects from the server with the default quit message. */ public void disconnect() { disconnect(configManager.getOption(DOMAIN_GENERAL, "quitmessage")); } /** * Disconnects from the server. * * @param reason disconnect reason */ public void disconnect(final String reason) { synchronized(myState) { switch (myState) { case CLOSING: case DISCONNECTED: case TRANSIENTLY_DISCONNECTED: return; case RECONNECT_WAIT: reconnectTimer.cancel(); break; default: break; } myState = ServerState.DISCONNECTED; } removeInvites(); updateIcon(); if (parser != null && parser.getSocketState() == IRCParser.STATE_OPEN) { parser.disconnect(reason); if (configManager.getOptionBool(DOMAIN_GENERAL, "closechannelsonquit", false)) { closeChannels(); } else { clearChannels(); } if (configManager.getOptionBool(DOMAIN_GENERAL, "closequeriesonquit", false)) { closeQueries(); } } } /** * Schedules a reconnect attempt to be performed after a user-defiend delay. */ private void doDelayedReconnect() { final int delay = Math.max(1, configManager.getOptionInt(DOMAIN_GENERAL, "reconnectdelay", 5)); handleNotification("connectRetry", getName(), delay); reconnectTimer = new Timer("Server Reconnect Timer"); reconnectTimer.schedule(new TimerTask() { @Override public void run() { synchronized(myState) { if (myState == ServerState.RECONNECT_WAIT) { myState = ServerState.TRANSIENTLY_DISCONNECTED; reconnect(); } } } }, delay * 1000); myState = ServerState.RECONNECT_WAIT; updateIcon(); } /** * Determines whether the server knows of the specified channel. * * @param channel The channel to be checked * @return True iff the channel is known, false otherwise */ public boolean hasChannel(final String channel) { return parser != null && channels.containsKey(parser.toLowerCase(channel)); } /** * Retrieves the specified channel belonging to this server. * * @param channel The channel to be retrieved * @return The appropriate channel object */ public Channel getChannel(final String channel) { return channels.get(parser.toLowerCase(channel)); } /** * Retrieves a list of channel names belonging to this server. * * @return list of channel names belonging to this server */ public List<String> getChannels() { final ArrayList<String> res = new ArrayList<String>(); for (String channel : channels.keySet()) { res.add(channel); } return res; } /** * Determines whether the server knows of the specified query. * * @param host The host of the query to look for * @return True iff the query is known, false otherwise */ public boolean hasQuery(final String host) { final String nick = ClientInfo.parseHost(host); for (Query query : queries) { if (parser.equalsIgnoreCase(ClientInfo.parseHost(query.getHost()), nick)) { return true; } } return false; } /** * Retrieves the specified query belonging to this server. * * @param host The host of the query to look for * @return The appropriate query object */ public Query getQuery(final String host) { final String nick = ClientInfo.parseHost(host); for (Query query : queries) { if (parser.equalsIgnoreCase(ClientInfo.parseHost(query.getHost()), nick)) { return query; } } throw new IllegalArgumentException("No such query: " + host); } /** * Retrieves a list of queries belonging to this server. * * @return list of queries belonging to this server */ public List<Query> getQueries() { return new ArrayList<Query>(queries); } /** * Adds a raw window to this server. */ public void addRaw() { if (raw == null) { raw = new Raw(this); if (parser != null) { raw.registerCallbacks(); } } else { raw.activateFrame(); } } /** * Retrieves the raw window associated with this server. * * @return The raw window associated with this server. */ public Raw getRaw() { return raw; } /** * Removes our reference to the raw object (presumably after it has been * closed). */ public void delRaw() { raw = null; //NOPMD } /** * Removes a specific channel and window from this server. * * @param chan channel to remove */ public void delChannel(final String chan) { tabCompleter.removeEntry(chan); channels.remove(parser.toLowerCase(chan)); } /** * Adds a specific channel and window to this server. * * @param chan channel to add */ public void addChannel(final ChannelInfo chan) { if (hasChannel(chan.getName())) { getChannel(chan.getName()).setChannelInfo(chan); getChannel(chan.getName()).selfJoin(); } else { final Channel newChan = new Channel(this, chan); tabCompleter.addEntry(chan.getName()); channels.put(parser.toLowerCase(chan.getName()), newChan); newChan.show(); } } /** * Adds a query to this server. * * @param host host of the remote client being queried */ public void addQuery(final String host) { if (!hasQuery(host)) { final Query newQuery = new Query(this, host); tabCompleter.addEntry(ClientInfo.parseHost(host)); queries.add(newQuery); } } /** * Deletes a query from this server. * * @param query The query that should be removed. */ public void delQuery(final Query query) { tabCompleter.removeEntry(query.getNickname()); queries.remove(query); } /** {@inheritDoc} */ @Override public boolean ownsFrame(final Window target) { // Check if it's our server frame if (window != null && window.equals(target)) { return true; } // Check if it's the raw frame if (raw != null && raw.ownsFrame(target)) { return true; } // Check if it's a channel frame for (Channel channel : channels.values()) { if (channel.ownsFrame(target)) { return true; } } // Check if it's a query frame for (Query query : queries) { if (query.ownsFrame(target)) { return true; } } return false; } /** * Sets the specified frame as the most-recently activated. * * @param source The frame that was activated */ public void setActiveFrame(final FrameContainer source) { activeFrame = source; } /** * Retrieves a list of all children of this server instance. * * @return A list of this server's children */ public List<WritableFrameContainer> getChildren() { final List<WritableFrameContainer> res = new ArrayList<WritableFrameContainer>(); if (raw != null) { res.add(raw); } res.addAll(channels.values()); res.addAll(queries); return res; } /** * Updates this server's icon. */ private void updateIcon() { icon = IconManager.getIconManager().getIcon( myState == ServerState.CONNECTED ? serverInfo.getSSL() ? "secure-server" : "server" : "server-disconnected"); if (window != null) { window.setFrameIcon(icon); iconUpdated(icon); } } /** * Retrieves the MyInfo object used for the IRC Parser. * * @return The MyInfo object for our profile */ @Precondition("The current profile is not null") private MyInfo getMyInfo() { assert(profile != null); final MyInfo myInfo = new MyInfo(); myInfo.setNickname(profile.getOption(DOMAIN_PROFILE, "nickname")); myInfo.setRealname(profile.getOption(DOMAIN_PROFILE, "realname")); if (profile.hasOption(DOMAIN_PROFILE, "ident")) { myInfo.setUsername(profile.getOption(DOMAIN_PROFILE, "ident")); } return myInfo; } /** * Registers callbacks. */ private void doCallbacks() { if (raw != null) { raw.registerCallbacks(); } eventHandler.registerCallbacks(); for (Query query : queries) { query.reregister(); } } /** * Joins the specified channel, or adds it to the auto-join list if the * server is not connected. * * @param channel The channel to be joined */ public void join(final String channel) { if (myState == ServerState.CONNECTED) { removeInvites(channel); if (hasChannel(channel)) { getChannel(channel).join(); getChannel(channel).activateFrame(); } else { parser.joinChannel(channel); } } else { autochannels.add(channel); } } /** {@inheritDoc} */ @Override public void sendLine(final String line) { synchronized(myState) { if (parser != null && myState == ServerState.CONNECTED) { parser.sendLine(window.getTranscoder().encode(line)); } } } /** {@inheritDoc} */ @Override public int getMaxLineLength() { return IRCParser.MAX_LINELENGTH; } /** * Retrieves the parser used for this connection. * * @return IRCParser this connection's parser */ public IRCParser getParser() { return parser; } /** * Retrieves the profile that's in use for this server. * * @return The profile in use by this server */ public Identity getProfile() { return profile; } /** * Retrieves the name of this server. * * @return The name of this server */ public String getName() { return serverInfo.getHost(); } /** * Retrieves the name of this server's network. The network name is * determined using the following rules: * * 1. If the server includes its network name in the 005 information, we * use that * 2. If the server's name ends in biz, com, info, net or org, we use the * second level domain (e.g., foo.com) * 3. If the server's name contains more than two dots, we drop everything * up to and including the first part, and use the remainder * 4. In all other cases, we use the full server name * * @return The name of this server's network */ public String getNetwork() { if (parser == null) { return ""; } else if (parser.getNetworkName().isEmpty()) { return getNetworkFromServerName(parser.getServerName()); } else { return parser.getNetworkName(); } } /** * Caclaultes a network name from the specified server name. This method * implements parts 2-4 of the procedure documented at getNetwork(). * * @param serverName The server name to parse * @return A network name for the specified server */ protected static String getNetworkFromServerName(final String serverName) { final String[] parts = serverName.split("\\."); final String[] tlds = {"biz", "com", "info", "net", "org"}; boolean isTLD = false; for (String tld : tlds) { if (serverName.endsWith("." + tld)) { isTLD = true; } } if (isTLD && parts.length > 2) { return parts[parts.length - 2] + "." + parts[parts.length - 1]; } else if (parts.length > 2) { final StringBuilder network = new StringBuilder(); for (int i = 1; i < parts.length; i++) { if (network.length() > 0) { network.append('.'); } network.append(parts[i]); } return network.toString(); } else { return serverName; } } /** * Retrieves the name of this server's IRCd. * * @return The name of this server's IRCd */ public String getIrcd() { return parser.getIRCD(true); } /** * Returns the current away status. * * @return True if the client is marked as away, false otherwise */ public boolean isAway() { return awayMessage != null; } /** * Gets the current away message. * * @return Null if the client isn't away, or a textual away message if it is */ public String getAwayMessage() { return awayMessage; } /** * Returns the tab completer for this connection. * * @return The tab completer for this server */ public TabCompleter getTabCompleter() { return tabCompleter; } /** {@inheritDoc} */ @Override public InputWindow getFrame() { return window; } /** {@inheritDoc} */ @Override public ConfigManager getConfigManager() { return configManager; } /** * Retrieves the current state for this server. * * @return This server's state */ public ServerState getState() { return myState; } /** {@inheritDoc} */ @Override public void windowClosing() { // 1: Make the window non-visible window.setVisible(false); // 2: Remove any callbacks or listeners if (parser != null) { parser.getCallbackManager().delAllCallback(eventHandler); } // 3: Trigger any actions neccessary if (parser != null && parser.isReady()) { disconnect(); } myState = ServerState.CLOSING; closeChannels(); closeQueries(); removeInvites(); if (raw != null) { raw.close(); } // 4: Trigger action for the window closing // 5: Inform any parents that the window is closing ServerManager.getServerManager().unregisterServer(this); // 6: Remove the window from the window manager WindowManager.removeWindow(window); // 7: Remove any references to the window and parents window = null; //NOPMD parser = null; //NOPMD } /** * Closes all open channel windows associated with this server. */ private void closeChannels() { for (Channel channel : new ArrayList<Channel>(channels.values())) { channel.close(); } } /** * Clears the nicklist of all open channels. */ private void clearChannels() { for (Channel channel : channels.values()) { channel.resetWindow(); } } /** * Closes all open query windows associated with this server. */ private void closeQueries() { for (Query query : new ArrayList<Query>(queries)) { query.close(); } } /** * Passes the arguments to the most recently activated frame for this * server. If the frame isn't know, or isn't visible, use this frame * instead. * * @param messageType The type of message to send * @param args The arguments for the message */ public void addLineToActive(final String messageType, final Object... args) { if (activeFrame == null || !activeFrame.getFrame().isVisible()) { activeFrame = this; } activeFrame.getFrame().addLine(messageType, args); } /** * Passes the arguments to all frames for this server. * * @param messageType The type of message to send * @param args The arguments of the message */ public void addLineToAll(final String messageType, final Object... args) { for (Channel channel : channels.values()) { channel.getFrame().addLine(messageType, args); } for (Query query : queries) { query.getFrame().addLine(messageType, args); } addLine(messageType, args); } /** * Replies to an incoming CTCP message. * * @param source The source of the message * @param type The CTCP type * @param args The CTCP arguments */ public void sendCTCPReply(final String source, final String type, final String args) { if (type.equalsIgnoreCase("VERSION")) { parser.sendCTCPReply(source, "VERSION", "DMDirc " + Main.VERSION + " - http: } else if (type.equalsIgnoreCase("PING")) { parser.sendCTCPReply(source, "PING", args); } else if (type.equalsIgnoreCase("CLIENTINFO")) { parser.sendCTCPReply(source, "CLIENTINFO", "VERSION PING CLIENTINFO"); } } /** * Determines if the specified channel name is valid. A channel name is * valid if we already have an existing Channel with the same name, or * we have a valid parser instance and the parser says it's valid. * * @param channelName The name of the channel to test * @return True if the channel name is valid, false otherwise */ public boolean isValidChannelName(String channelName) { return hasChannel(channelName) || (parser != null && parser.isValidChannelName(channelName)); } /** * Returns this server's name. * * @return A string representation of this server (i.e., its name) */ @Override public String toString() { return getName(); } /** * Returns the server instance associated with this frame. * * @return the associated server connection */ @Override public Server getServer() { return this; } /** {@inheritDoc} */ @Override protected boolean processNotificationArg(final Object arg, final List<Object> args) { if (arg instanceof ClientInfo) { final ClientInfo clientInfo = (ClientInfo) arg; args.add(clientInfo.getNickname()); args.add(clientInfo.getIdent()); args.add(clientInfo.getHost()); return true; } else { return super.processNotificationArg(arg, args); } } /** * Retusnt the list of invites for this server. * * @return Invite list */ public List<Invite> getInvites() { return invites; } /** * Called when the server says that the nickname we're trying to use is * already in use. * * @param nickname The nickname that we were trying to use */ public void onNickInUse(final String nickname) { final String lastNick = parser.getMyNickname(); // If our last nick is still valid, ignore the in use message if (!parser.equalsIgnoreCase(lastNick, nickname)) { return; } String newNick = lastNick + (int) (Math.random() * 10); if (profile.hasOption(DOMAIN_PROFILE, "altnicks")) { final String[] alts = profile.getOption(DOMAIN_PROFILE, "altnicks").split("\n"); int offset = 0; if (!parser.equalsIgnoreCase(lastNick, profile.getOption(DOMAIN_PROFILE, "nickname"))) { for (String alt : alts) { offset++; if (parser.equalsIgnoreCase(alt, lastNick)) { break; } } } if (offset < alts.length && !alts[offset].isEmpty()) { newNick = alts[offset]; } } parser.setNickname(newNick); } /** * Called when the server sends a numeric event. * * @param numeric The numeric code for the event * @param tokens The (tokenised) arguments of the event */ public void onNumeric(final int numeric, final String[] tokens) { String snumeric = String.valueOf(numeric); if (numeric < 10) { snumeric = "00" + snumeric; } else if (numeric < 100) { snumeric = "0" + snumeric; } final String withIrcd = "numeric_" + parser.getIRCD(true) + "_" + snumeric; final String sansIrcd = "numeric_" + snumeric; String target = null; if (Formatter.hasFormat(withIrcd)) { target = withIrcd; } else if (Formatter.hasFormat(sansIrcd)) { target = sansIrcd; } else if (Formatter.hasFormat("numeric_unknown")) { target = "numeric_unknown"; } if (target != null) { handleNotification(target, (Object[]) tokens); } ActionManager.processEvent(CoreActionType.SERVER_NUMERIC, null, this, Integer.valueOf(numeric), tokens); } /** * Called when the socket has been closed. */ public void onSocketClosed() { handleNotification("socketClosed", getName()); ActionManager.processEvent(CoreActionType.SERVER_DISCONNECTED, null, this); synchronized(myState) { if (myState == ServerState.CLOSING || myState == ServerState.DISCONNECTED) { // This has been triggered via .disconect() return; } myState = ServerState.TRANSIENTLY_DISCONNECTED; } updateIcon(); if (configManager.getOptionBool(DOMAIN_GENERAL, "closechannelsondisconnect", false)) { closeChannels(); } else { clearChannels(); } if (configManager.getOptionBool(DOMAIN_GENERAL, "closequeriesondisconnect", false)) { closeQueries(); } removeInvites(); if (configManager.getOptionBool(DOMAIN_GENERAL, "reconnectondisconnect", false)) { doDelayedReconnect(); } } /** * Called when an error was encountered while connecting. * * @param errorInfo The parser's error information */ @Precondition("The current server state is CONNECTING") public void onConnectError(final ParserError errorInfo) { synchronized(myState) { assert(myState == ServerState.CONNECTING); myState = ServerState.TRANSIENTLY_DISCONNECTED; } updateIcon(); String description; if (errorInfo.getException() == null) { description = errorInfo.getData(); } else { final Exception exception = errorInfo.getException(); if (exception instanceof java.net.UnknownHostException) { description = "Unknown host (unable to resolve)"; } else if (exception instanceof java.net.NoRouteToHostException) { description = "No route to host"; } else if (exception instanceof java.net.SocketException) { description = exception.getMessage(); } else { Logger.appError(ErrorLevel.LOW, "Unknown socket error", exception); description = "Unknown error: " + exception.getMessage(); } } ActionManager.processEvent(CoreActionType.SERVER_CONNECTERROR, null, this, description); handleNotification("connectError", getName(), description); if (configManager.getOptionBool(DOMAIN_GENERAL, "reconnectonconnectfailure", false)) { doDelayedReconnect(); } } /** * Called when we fail to receive a ping reply within a set period of time. */ public void onPingFailed() { Main.getUI().getStatusBar().setMessage("No ping reply from " + getName() + " for over " + ((int) (Math.floor(parser.getPingTime(false) / 1000.0))) + " seconds.", null, 10); ActionManager.processEvent(CoreActionType.SERVER_NOPING, null, this, Long.valueOf(parser.getPingTime(false))); if (parser.getPingTime(false) >= configManager.getOptionInt(DOMAIN_SERVER, "pingtimeout", 60000)) { handleNotification("stonedServer", getName()); reconnect(); } } /** * Called after the parser receives the 005 headers from the server. */ public void onPost005() { synchronized(myState) { myState = ServerState.CONNECTED; } updateIcon(); configManager = new ConfigManager(parser.getIRCD(true), getNetwork(), getName()); updateIgnoreList(); ActionManager.processEvent(CoreActionType.SERVER_CONNECTED, null, this); if (configManager.hasOption(DOMAIN_GENERAL, "rejoinchannels")) { for (Channel chan : channels.values()) { chan.join(); } } for (String channel : autochannels) { parser.joinChannel(channel); } checkModeAliases(); } /** * Checks that we have the neccessary mode aliases for this server. */ private void checkModeAliases() { // Check we have mode aliases final String modes = parser.getBoolChanModes() + parser.getListChanModes() + parser.getSetOnlyChanModes() + parser.getSetUnsetChanModes(); final String umodes = parser.getUserModeString(); final StringBuffer missingModes = new StringBuffer(); final StringBuffer missingUmodes = new StringBuffer(); for (char mode : modes.toCharArray()) { if (!configManager.hasOption(DOMAIN_SERVER, "mode" + mode)) { missingModes.append(mode); } } for (char mode : umodes.toCharArray()) { if (!configManager.hasOption(DOMAIN_SERVER, "umode" + mode)) { missingUmodes.append(mode); } } if (missingModes.length() + missingUmodes.length() > 0) { final StringBuffer missing = new StringBuffer("Missing mode aliases: "); if (missingModes.length() > 0) { missing.append("channel: +"); missing.append(missingModes); } if (missingUmodes.length() > 0) { if (missingModes.length() > 0) { missing.append(' '); } missing.append("user: +"); missing.append(missingUmodes); } Logger.appError(ErrorLevel.LOW, missing.toString() + " [" + getNetwork() + "]", new Exception(missing.toString() + "\n" // NOPMD + "Network: " + getNetwork() + "\n" + "IRCd: " + parser.getIRCD(false) + " (" + parser.getIRCD(true) + ")\n\n")); } } /** * Retrieves this server's ignore list. * * @return This server's ignore list */ public IgnoreList getIgnoreList() { return ignoreList; } /** * Updates this server's ignore list to use the entries stored in the * config manager. */ public void updateIgnoreList() { ignoreList.clear(); ignoreList.addAll(configManager.getOptionList("network", "ignorelist")); } /** * Saves the contents of our ignore list to the network identity. */ public void saveIgnoreList() { getNetworkIdentity().setOption("network", "ignorelist", ignoreList.getRegexList()); } /** * Retrieves the identity for this server. * * @return This server's identity */ public Identity getServerIdentity() { return IdentityManager.getServerConfig(getName()); } /** * Retrieves the identity for this server's network. * * @return This server's network identity */ public Identity getNetworkIdentity() { return IdentityManager.getNetworkConfig(getNetwork()); } /** * Adds an invite listener to this server. * * @param listener The listener to be added */ public void addInviteListener(final InviteListener listener) { listeners.add(InviteListener.class, listener); } /** * Removes an invite listener from this server. * * @param listener The listener to be removed */ public void removeInviteListener(final InviteListener listener) { listeners.remove(InviteListener.class, listener); } /** * Adds an invite to this server, and fires the appropriate listeners. * * @param invite The invite to be added */ public void addInvite(final Invite invite) { for (Invite oldInvite : new ArrayList<Invite>(invites)) { if (oldInvite.getChannel().equals(invite.getChannel())) { removeInvite(oldInvite); } } invites.add(invite); for (InviteListener listener : listeners.get(InviteListener.class)) { listener.inviteReceived(this, invite); } } /** * Removes all invites for the specified channel. * * @param channel The channel to remove invites for */ public void removeInvites(final String channel) { for (Invite invite : new ArrayList<Invite>(invites)) { if (invite.getChannel().equals(channel)) { removeInvite(invite); } } } /** * Removes all invites for all channels. */ private void removeInvites() { for (Invite invite : new ArrayList<Invite>(invites)) { removeInvite(invite); } } /** * Removes an invite from this server, and fires the appropriate listeners. * * @param invite The invite to be removed */ public void removeInvite(final Invite invite) { invites.remove(invite); for (InviteListener listener : listeners.get(InviteListener.class)) { listener.inviteExpired(this, invite); } } /** * Adds an away state lisener to this server. * * @param listener The listener to be added */ public void addAwayStateListener(final AwayStateListener listener) { listeners.add(AwayStateListener.class, listener); } /** * Removes an away state lisener from this server. * * @param listener The listener to be removed */ public void removeAwayStateListener(final AwayStateListener listener) { listeners.remove(AwayStateListener.class, listener); } /** * Updates our away state and fires the relevant listeners. * * @param message The away message to use, or null if we're not away. */ public void updateAwayState(final String message) { awayMessage = message; if (message == null) { for (AwayStateListener listener : listeners.get(AwayStateListener.class)) { listener.onBack(); } } else { for (AwayStateListener listener : listeners.get(AwayStateListener.class)) { listener.onAway(message); } } } }
package com.jacob.com; import java.lang.ref.Reference; import java.lang.ref.ReferenceQueue; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; public abstract class ROT { private static ThreadLocal<Boolean> initMTA = new ThreadLocal<Boolean>() { @Override public Boolean initialValue() { return FALSE; } }; public static Map<PointerWeakReference,Boolean> objects = new ConcurrentHashMap<PointerWeakReference, Boolean>(); private static final Boolean FALSE = new Boolean(false); private static final Boolean TRUE = new Boolean(true); private static int count = 0; private static final int CULL_COUNT; private static final int GC_COUNT; static { String cull_count = System.getProperty("com.jacob.cull_count"); if (cull_count == null) cull_count = "2000"; CULL_COUNT = Integer.parseInt(cull_count); String gc_count = System.getProperty("com.jacob.gc_count"); if (gc_count == null) gc_count = "-1"; GC_COUNT = Integer.parseInt(gc_count); } private static ThreadLocal<ReferenceQueue<IUnknown>> deadPool = new ThreadLocal<ReferenceQueue<IUnknown>>() { @Override protected ReferenceQueue<IUnknown> initialValue() { return new ReferenceQueue<IUnknown>(); } }; public static Map<PointerWeakReference,Boolean> getThreadObjects(boolean ignored) { return objects; } /** * safeRelease all remaining alive objects. */ protected static void clearObjects() { if (IUnknown.isDebugEnabled()) { IUnknown.debug("ROT: " + objects.size() + " objects to clear in this thread's ROT "); } // walk the values for (PointerWeakReference reference : objects.keySet()) { IUnknown value = (IUnknown) reference.get(); if (value != null) { value.safeRelease(); } } objects.clear(); } /** * Stores object so it can be released in COM when it is no longer * referenced. * * @param o */ protected static void addObject(IUnknown o) { // If a new thread joins we need to add it to the apartment if (initMTA.get() == FALSE) { ComThread.InitMTA(false); initMTA.set(TRUE); } ReferenceQueue<IUnknown> deadObjects = deadPool.get(); objects.put(new PointerWeakReference(o, deadObjects), FALSE); if (GC_COUNT != -1 && (count % GC_COUNT) == 0) { System.gc(); } if ((count++ % CULL_COUNT) == 0) { int numberCulled = cullDeadPool(deadObjects, objects); if (IUnknown.isDebugEnabled()) { if (numberCulled > 0) { IUnknown.debug("ROT: added instance of " + o.getClass().getSimpleName() + "->[+1, -" + numberCulled + "] with " + objects.size() + " remaining live objects"); } } } } @SuppressWarnings("element-type-mismatch") protected static int cullDeadPool(ReferenceQueue<IUnknown> deadObjects, Map<PointerWeakReference, Boolean> liveList) { int numberReleased = 0; Reference<? extends IUnknown> deadReference; while ((deadReference = deadObjects.poll()) != null) { ((PointerWeakReference) deadReference).safeRelease(); liveList.remove(deadReference); numberReleased++; } return numberReleased; } /** * ROT can't be a subclass of IUnknown because of the way ROT pools are * managed so we force a DLL load here by referencing IUnknown */ static { LibraryLoader.loadJacobLibrary(); } }
package com.cloud.vm; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import javax.ejb.Local; import javax.naming.ConfigurationException; import org.apache.log4j.Logger; import com.cloud.agent.AgentManager; import com.cloud.agent.AgentManager.OnError; import com.cloud.agent.Listener; import com.cloud.agent.api.AgentControlAnswer; import com.cloud.agent.api.AgentControlCommand; import com.cloud.agent.api.Answer; import com.cloud.agent.api.CheckVirtualMachineAnswer; import com.cloud.agent.api.CheckVirtualMachineCommand; import com.cloud.agent.api.Command; import com.cloud.agent.api.MigrateAnswer; import com.cloud.agent.api.MigrateCommand; import com.cloud.agent.api.PingRoutingCommand; import com.cloud.agent.api.PrepareForMigrationAnswer; import com.cloud.agent.api.PrepareForMigrationCommand; import com.cloud.agent.api.RebootAnswer; import com.cloud.agent.api.RebootCommand; import com.cloud.agent.api.StartAnswer; import com.cloud.agent.api.StartCommand; import com.cloud.agent.api.StartupCommand; import com.cloud.agent.api.StartupRoutingCommand; import com.cloud.agent.api.StopAnswer; import com.cloud.agent.api.StopCommand; import com.cloud.agent.api.to.VirtualMachineTO; import com.cloud.agent.manager.Commands; import com.cloud.agent.manager.allocator.HostAllocator; import com.cloud.alert.AlertManager; import com.cloud.capacity.CapacityManager; import com.cloud.cluster.ClusterManager; import com.cloud.cluster.StackMaid; import com.cloud.configuration.Config; import com.cloud.configuration.ConfigurationManager; import com.cloud.configuration.ResourceCount.ResourceType; import com.cloud.configuration.dao.ConfigurationDao; import com.cloud.consoleproxy.ConsoleProxyManager; import com.cloud.dc.DataCenter; import com.cloud.dc.DataCenterVO; import com.cloud.dc.HostPodVO; import com.cloud.dc.dao.DataCenterDao; import com.cloud.dc.dao.HostPodDao; import com.cloud.deploy.DataCenterDeployment; import com.cloud.deploy.DeployDestination; import com.cloud.deploy.DeploymentPlan; import com.cloud.deploy.DeploymentPlanner; import com.cloud.deploy.DeploymentPlanner.ExcludeList; import com.cloud.domain.dao.DomainDao; import com.cloud.event.dao.UsageEventDao; import com.cloud.exception.AgentUnavailableException; import com.cloud.exception.ConcurrentOperationException; import com.cloud.exception.ConnectionException; import com.cloud.exception.InsufficientCapacityException; import com.cloud.exception.InsufficientServerCapacityException; import com.cloud.exception.ManagementServerException; import com.cloud.exception.OperationTimedoutException; import com.cloud.exception.ResourceUnavailableException; import com.cloud.exception.VirtualMachineMigrationException; import com.cloud.ha.HighAvailabilityManager; import com.cloud.ha.HighAvailabilityManager.WorkType; import com.cloud.host.Host; import com.cloud.host.HostVO; import com.cloud.host.Status; import com.cloud.host.dao.HostDao; import com.cloud.hypervisor.Hypervisor.HypervisorType; import com.cloud.hypervisor.HypervisorGuru; import com.cloud.hypervisor.HypervisorGuruManager; import com.cloud.network.Network; import com.cloud.network.NetworkManager; import com.cloud.network.NetworkVO; import com.cloud.offering.ServiceOffering; import com.cloud.org.Cluster; import com.cloud.service.ServiceOfferingVO; import com.cloud.service.dao.ServiceOfferingDao; import com.cloud.storage.DiskOfferingVO; import com.cloud.storage.Storage.ImageFormat; import com.cloud.storage.StorageManager; import com.cloud.storage.StoragePoolVO; import com.cloud.storage.VMTemplateVO; import com.cloud.storage.Volume; import com.cloud.storage.Volume.Type; import com.cloud.storage.VolumeVO; import com.cloud.storage.dao.GuestOSCategoryDao; import com.cloud.storage.dao.GuestOSDao; import com.cloud.storage.dao.StoragePoolDao; import com.cloud.storage.dao.VMTemplateDao; import com.cloud.storage.dao.VolumeDao; import com.cloud.user.Account; import com.cloud.user.AccountManager; import com.cloud.user.User; import com.cloud.user.dao.AccountDao; import com.cloud.user.dao.UserDao; import com.cloud.uservm.UserVm; import com.cloud.utils.Journal; import com.cloud.utils.NumbersUtil; import com.cloud.utils.Pair; import com.cloud.utils.Ternary; import com.cloud.utils.component.Adapters; import com.cloud.utils.component.ComponentLocator; import com.cloud.utils.component.Inject; import com.cloud.utils.concurrency.NamedThreadFactory; import com.cloud.utils.db.DB; import com.cloud.utils.db.GlobalLock; import com.cloud.utils.db.Transaction; import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.exception.ExecutionException; import com.cloud.utils.fsm.NoTransitionException; import com.cloud.utils.fsm.StateMachine2; import com.cloud.vm.ItWorkVO.Step; import com.cloud.vm.VirtualMachine.Event; import com.cloud.vm.VirtualMachine.State; import com.cloud.vm.dao.ConsoleProxyDao; import com.cloud.vm.dao.DomainRouterDao; import com.cloud.vm.dao.NicDao; import com.cloud.vm.dao.SecondaryStorageVmDao; import com.cloud.vm.dao.UserVmDao; import com.cloud.vm.dao.VMInstanceDao; @Local(value = VirtualMachineManager.class) public class VirtualMachineManagerImpl implements VirtualMachineManager, Listener { private static final Logger s_logger = Logger.getLogger(VirtualMachineManagerImpl.class); String _name; @Inject protected StorageManager _storageMgr; @Inject protected NetworkManager _networkMgr; @Inject protected AgentManager _agentMgr; @Inject protected VMInstanceDao _vmDao; @Inject protected ServiceOfferingDao _offeringDao; @Inject protected VMTemplateDao _templateDao; @Inject protected UserDao _userDao; @Inject protected AccountDao _accountDao; @Inject protected DomainDao _domainDao; @Inject protected ClusterManager _clusterMgr; @Inject protected ItWorkDao _workDao; @Inject protected UserVmDao _userVmDao; @Inject protected DomainRouterDao _routerDao; @Inject protected ConsoleProxyDao _consoleDao; @Inject protected SecondaryStorageVmDao _secondaryDao; @Inject protected UsageEventDao _usageEventDao; @Inject protected NicDao _nicsDao; @Inject protected AccountManager _accountMgr; @Inject protected HostDao _hostDao; @Inject protected AlertManager _alertMgr; @Inject protected GuestOSCategoryDao _guestOsCategoryDao; @Inject protected GuestOSDao _guestOsDao; @Inject protected VolumeDao _volsDao; @Inject protected ConsoleProxyManager _consoleProxyMgr; @Inject protected ConfigurationManager _configMgr; @Inject protected CapacityManager _capacityMgr; @Inject protected HighAvailabilityManager _haMgr; @Inject protected HostPodDao _podDao; @Inject protected DataCenterDao _dcDao; @Inject protected StoragePoolDao _storagePoolDao; @Inject protected HypervisorGuruManager _hvGuruMgr; @Inject(adapter = DeploymentPlanner.class) protected Adapters<DeploymentPlanner> _planners; @Inject(adapter = HostAllocator.class) protected Adapters<HostAllocator> _hostAllocators; Map<VirtualMachine.Type, VirtualMachineGuru<? extends VMInstanceVO>> _vmGurus = new HashMap<VirtualMachine.Type, VirtualMachineGuru<? extends VMInstanceVO>>(); protected StateMachine2<State, VirtualMachine.Event, VirtualMachine> _stateMachine; ScheduledExecutorService _executor = null; protected int _operationTimeout; protected int _retry; protected long _nodeId; protected long _cleanupWait; protected long _cleanupInterval; protected long _cancelWait; protected long _opWaitInterval; protected int _lockStateRetry; @Override public <T extends VMInstanceVO> void registerGuru(VirtualMachine.Type type, VirtualMachineGuru<T> guru) { synchronized (_vmGurus) { _vmGurus.put(type, guru); } } @Override @DB public <T extends VMInstanceVO> T allocate(T vm, VMTemplateVO template, ServiceOfferingVO serviceOffering, Pair<? extends DiskOfferingVO, Long> rootDiskOffering, List<Pair<DiskOfferingVO, Long>> dataDiskOfferings, List<Pair<NetworkVO, NicProfile>> networks, Map<VirtualMachineProfile.Param, Object> params, DeploymentPlan plan, HypervisorType hyperType, Account owner) throws InsufficientCapacityException { if (s_logger.isDebugEnabled()) { s_logger.debug("Allocating entries for VM: " + vm); } VirtualMachineProfileImpl<T> vmProfile = new VirtualMachineProfileImpl<T>(vm, template, serviceOffering, owner, params); vm.setDataCenterId(plan.getDataCenterId()); if (plan.getPodId() != null) { vm.setPodId(plan.getPodId()); } assert (plan.getClusterId() == null && plan.getPoolId() == null) : "We currently don't support cluster and pool preset yet"; @SuppressWarnings("unchecked") VirtualMachineGuru<T> guru = (VirtualMachineGuru<T>) _vmGurus.get(vm.getType()); Transaction txn = Transaction.currentTxn(); txn.start(); vm = guru.persist(vm); if (s_logger.isDebugEnabled()) { s_logger.debug("Allocating nics for " + vm); } try { _networkMgr.allocate(vmProfile, networks); } catch (ConcurrentOperationException e) { throw new CloudRuntimeException("Concurrent operation while trying to allocate resources for the VM", e); } if (dataDiskOfferings == null) { dataDiskOfferings = new ArrayList<Pair<DiskOfferingVO, Long>>(0); } if (s_logger.isDebugEnabled()) { s_logger.debug("Allocaing disks for " + vm); } if (template.getFormat() == ImageFormat.ISO) { _storageMgr.allocateRawVolume(Type.ROOT, "ROOT-" + vm.getId(), rootDiskOffering.first(), rootDiskOffering.second(), vm, owner); } else if (template.getFormat() == ImageFormat.BAREMETAL) { // Do nothing } else { _storageMgr.allocateTemplatedVolume(Type.ROOT, "ROOT-" + vm.getId(), rootDiskOffering.first(), template, vm, owner); } for (Pair<DiskOfferingVO, Long> offering : dataDiskOfferings) { _storageMgr.allocateRawVolume(Type.DATADISK, "DATA-" + vm.getId(), offering.first(), offering.second(), vm, owner); } txn.commit(); if (s_logger.isDebugEnabled()) { s_logger.debug("Allocation completed for VM: " + vm); } return vm; } @Override public <T extends VMInstanceVO> T allocate(T vm, VMTemplateVO template, ServiceOfferingVO serviceOffering, Long rootSize, Pair<DiskOfferingVO, Long> dataDiskOffering, List<Pair<NetworkVO, NicProfile>> networks, DeploymentPlan plan, HypervisorType hyperType, Account owner) throws InsufficientCapacityException { List<Pair<DiskOfferingVO, Long>> diskOfferings = new ArrayList<Pair<DiskOfferingVO, Long>>(1); if (dataDiskOffering != null) { diskOfferings.add(dataDiskOffering); } return allocate(vm, template, serviceOffering, new Pair<DiskOfferingVO, Long>(serviceOffering, rootSize), diskOfferings, networks, null, plan, hyperType, owner); } @Override public <T extends VMInstanceVO> T allocate(T vm, VMTemplateVO template, ServiceOfferingVO serviceOffering, List<Pair<NetworkVO, NicProfile>> networks, DeploymentPlan plan, HypervisorType hyperType, Account owner) throws InsufficientCapacityException { return allocate(vm, template, serviceOffering, new Pair<DiskOfferingVO, Long>(serviceOffering, null), null, networks, null, plan, hyperType, owner); } @SuppressWarnings("unchecked") private <T extends VMInstanceVO> VirtualMachineGuru<T> getVmGuru(T vm) { return (VirtualMachineGuru<T>) _vmGurus.get(vm.getType()); } @SuppressWarnings("unchecked") private <T extends VMInstanceVO> VirtualMachineGuru<T> getBareMetalVmGuru(T vm) { return (VirtualMachineGuru<T>) _vmGurus.get(VirtualMachine.Type.UserBareMetal); } @Override public <T extends VMInstanceVO> boolean expunge(T vm, User caller, Account account) throws ResourceUnavailableException { try { if (advanceExpunge(vm, caller, account)) { // Mark vms as removed remove(vm, caller, account); return true; } else { s_logger.info("Did not expunge " + vm); return false; } } catch (OperationTimedoutException e) { throw new CloudRuntimeException("Operation timed out", e); } catch (ConcurrentOperationException e) { throw new CloudRuntimeException("Concurrent operation ", e); } } @Override public <T extends VMInstanceVO> boolean advanceExpunge(T vm, User caller, Account account) throws ResourceUnavailableException, OperationTimedoutException, ConcurrentOperationException { if (vm == null || vm.getRemoved() != null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to find vm or vm is destroyed: " + vm); } return true; } if (!this.advanceStop(vm, false, caller, account)) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to stop the VM so we can't expunge it."); } } try { if (!stateTransitTo(vm, VirtualMachine.Event.ExpungeOperation, vm.getHostId())) { s_logger.debug("Unable to destroy the vm because it is not in the correct state: " + vm); return false; } } catch (NoTransitionException e) { s_logger.debug("Unable to destroy the vm because it is not in the correct state: " + vm); return false; } if (s_logger.isDebugEnabled()) { s_logger.debug("Destroying vm " + vm); } VirtualMachineProfile<T> profile = new VirtualMachineProfileImpl<T>(vm); _networkMgr.cleanupNics(profile); // Clean up volumes based on the vm's instance id _storageMgr.cleanupVolumes(vm.getId()); VirtualMachineGuru<T> guru = getVmGuru(vm); guru.finalizeExpunge(vm); if (s_logger.isDebugEnabled()) { s_logger.debug("Expunged " + vm); } return true; } @Override public boolean start() { _executor.scheduleAtFixedRate(new CleanupTask(), _cleanupInterval, _cleanupInterval, TimeUnit.SECONDS); cancelWorkItems(_nodeId); return true; } @Override public boolean stop() { return true; } @Override public boolean configure(String name, Map<String, Object> xmlParams) throws ConfigurationException { _name = name; ComponentLocator locator = ComponentLocator.getCurrentLocator(); ConfigurationDao configDao = locator.getDao(ConfigurationDao.class); Map<String, String> params = configDao.getConfiguration(xmlParams); _retry = NumbersUtil.parseInt(params.get(Config.StartRetry.key()), 10); ReservationContextImpl.setComponents(_userDao, _domainDao, _accountDao); VirtualMachineProfileImpl.setComponents(_offeringDao, _templateDao, _accountDao); _cancelWait = NumbersUtil.parseLong(params.get(Config.VmOpCancelInterval.key()), 3600); _cleanupWait = NumbersUtil.parseLong(params.get(Config.VmOpCleanupWait.key()), 3600); _cleanupInterval = NumbersUtil.parseLong(params.get(Config.VmOpCleanupInterval.key()), 86400) * 1000; _opWaitInterval = NumbersUtil.parseLong(params.get(Config.VmOpWaitInterval.key()), 120) * 1000; _lockStateRetry = NumbersUtil.parseInt(params.get(Config.VmOpLockStateRetry.key()), 5); _operationTimeout = NumbersUtil.parseInt(params.get(Config.Wait.key()), 1800) * 2; _executor = Executors.newScheduledThreadPool(1, new NamedThreadFactory("Vm-Operations-Cleanup")); _nodeId = _clusterMgr.getManagementNodeId(); _agentMgr.registerForHostEvents(this, true, true, true); return true; } @Override public String getName() { return _name; } protected VirtualMachineManagerImpl() { setStateMachine(); } @Override public <T extends VMInstanceVO> T start(T vm, Map<VirtualMachineProfile.Param, Object> params, User caller, Account account) throws InsufficientCapacityException, ResourceUnavailableException { return start(vm, params, caller, account, null); } @Override public <T extends VMInstanceVO> T start(T vm, Map<VirtualMachineProfile.Param, Object> params, User caller, Account account, DeploymentPlan planToDeploy) throws InsufficientCapacityException, ResourceUnavailableException { try { return advanceStart(vm, params, caller, account, planToDeploy); } catch (ConcurrentOperationException e) { throw new CloudRuntimeException("Unable to start a VM due to concurrent operation", e); } } protected boolean checkWorkItems(VMInstanceVO vm, State state) throws ConcurrentOperationException { while (true) { ItWorkVO vo = _workDao.findByOutstandingWork(vm.getId(), state); if (vo == null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to find work for VM: " + vm + " and state: " + state); } return true; } if (vo.getStep() == Step.Done) { if (s_logger.isDebugEnabled()) { s_logger.debug("Work for " + vm + " is " + vo.getStep()); } return true; } if (vo.getSecondsTaskIsInactive() > _cancelWait) { s_logger.warn("The task item for vm " + vm + " has been inactive for " + vo.getSecondsTaskIsInactive()); return false; } try { Thread.sleep(_opWaitInterval); } catch (InterruptedException e) { s_logger.info("Waiting for " + vm + " but is interrupted"); throw new ConcurrentOperationException("Waiting for " + vm + " but is interrupted"); } s_logger.debug("Waiting some more to make sure there's no activity on " + vm); } } @DB protected <T extends VMInstanceVO> Ternary<T, ReservationContext, ItWorkVO> changeToStartState(VirtualMachineGuru<T> vmGuru, T vm, User caller, Account account) throws ConcurrentOperationException { long vmId = vm.getId(); ItWorkVO work = new ItWorkVO(UUID.randomUUID().toString(), _nodeId, State.Starting, vm.getType(), vm.getId()); int retry = _lockStateRetry; while (retry Transaction txn = Transaction.currentTxn(); Ternary<T, ReservationContext, ItWorkVO> result = null; txn.start(); try { Journal journal = new Journal.LogJournal("Creating " + vm, s_logger); work = _workDao.persist(work); ReservationContextImpl context = new ReservationContextImpl(work.getId(), journal, caller, account); if (stateTransitTo(vm, Event.StartRequested, null, work.getId())) { if (s_logger.isDebugEnabled()) { s_logger.debug("Successfully transitioned to start state for " + vm + " reservation id = " + work.getId()); } result = new Ternary<T, ReservationContext, ItWorkVO>(vmGuru.findById(vmId), context, work); txn.commit(); return result; } } catch (NoTransitionException e) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to transition into Starting state due to " + e.getMessage()); } } finally { if (result == null) { txn.rollback(); } } VMInstanceVO instance = _vmDao.findById(vmId); if (instance == null) { throw new ConcurrentOperationException("Unable to acquire lock on " + vm); } if (s_logger.isDebugEnabled()) { s_logger.debug("Determining why we're unable to update the state to Starting for " + instance + ". Retry=" + retry); } State state = instance.getState(); if (state == State.Running) { if (s_logger.isDebugEnabled()) { s_logger.debug("VM is already started: " + vm); } return null; } if (state.isTransitional()) { if (!checkWorkItems(vm, state)) { throw new ConcurrentOperationException("There are concurrent operations on " + vm); } else { continue; } } if (state != State.Stopped) { s_logger.debug("VM " + vm + " is not in a state to be started: " + state); return null; } } throw new ConcurrentOperationException("Unable to change the state of " + vm); } @DB protected <T extends VMInstanceVO> boolean changeState(T vm, Event event, Long hostId, ItWorkVO work, Step step) throws NoTransitionException { Transaction txn = Transaction.currentTxn(); txn.start(); if (!stateTransitTo(vm, event, hostId)) { return false; } _workDao.updateStep(work, step); txn.commit(); return true; } @Override public <T extends VMInstanceVO> T advanceStart(T vm, Map<VirtualMachineProfile.Param, Object> params, User caller, Account account) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException { return advanceStart(vm, params, caller, account, null); } @Override public <T extends VMInstanceVO> T advanceStart(T vm, Map<VirtualMachineProfile.Param, Object> params, User caller, Account account, DeploymentPlan planToDeploy) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException { long vmId = vm.getId(); VirtualMachineGuru<T> vmGuru; if (vm.getHypervisorType() == HypervisorType.BareMetal) { vmGuru = getBareMetalVmGuru(vm); } else { vmGuru = getVmGuru(vm); } vm = vmGuru.findById(vm.getId()); Ternary<T, ReservationContext, ItWorkVO> start = changeToStartState(vmGuru, vm, caller, account); if (start == null) { return vmGuru.findById(vmId); } vm = start.first(); ReservationContext ctx = start.second(); ItWorkVO work = start.third(); T startedVm = null; ServiceOfferingVO offering = _offeringDao.findById(vm.getServiceOfferingId()); VMTemplateVO template = _templateDao.findById(vm.getTemplateId()); DataCenterDeployment plan = new DataCenterDeployment(vm.getDataCenterIdToDeployIn(), vm.getPodIdToDeployIn(), null, null, null); if(planToDeploy != null){ if (s_logger.isDebugEnabled()) { s_logger.debug("advanceStart: DeploymentPlan is provided, using that plan to deploy"); } plan = (DataCenterDeployment)planToDeploy; } HypervisorGuru hvGuru = _hvGuruMgr.getGuru(vm.getHypervisorType()); boolean canRetry = true; try { Journal journal = start.second().getJournal(); ExcludeList avoids = new ExcludeList(); if (vm.getType().equals(VirtualMachine.Type.DomainRouter)) { List<DomainRouterVO> routers = _routerDao.findBy(vm.getAccountId(), vm.getDataCenterIdToDeployIn()); for (DomainRouterVO router : routers) { if (router.hostId != null) { avoids.addHost(router.hostId); s_logger.info("Router: try to avoid host " + router.hostId); } } } int retry = _retry; while (retry-- != 0) { // It's != so that it can match -1. // edit plan if this vm's ROOT volume is in READY state already // edit plan if this vm's ROOT volume is in READY state already List<VolumeVO> vols = _volsDao.findReadyRootVolumesByInstance(vm.getId()); for (VolumeVO vol : vols) { // make sure if the templateId is unchanged. If it is changed, let planner // reassign pool for the volume even if it ready. Long volTemplateId = vol.getTemplateId(); if (volTemplateId != null && volTemplateId.longValue() != template.getId()) { if (s_logger.isDebugEnabled()) { s_logger.debug(vol + " of " + vm + " is READY, but template ids don't match, let the planner reassign a new pool"); } continue; } StoragePoolVO pool = _storagePoolDao.findById(vol.getPoolId()); if (!pool.isInMaintenance()) { if (s_logger.isDebugEnabled()) { s_logger.debug("Root volume is ready, need to place VM in volume's cluster"); } long rootVolDcId = pool.getDataCenterId(); Long rootVolPodId = pool.getPodId(); Long rootVolClusterId = pool.getClusterId(); if(planToDeploy != null){ Long clusterIdSpecified = planToDeploy.getClusterId(); if(clusterIdSpecified != null && rootVolClusterId != null){ if(rootVolClusterId.longValue() != clusterIdSpecified.longValue()){ //cannot satisfy the plan passed in to the planner if (s_logger.isDebugEnabled()) { s_logger.debug("Cannot satisfy the deployment plan passed in since the ready Root volume is in different cluster. volume's cluster: "+rootVolClusterId + ", cluster specified: "+clusterIdSpecified); } throw new ResourceUnavailableException("Root volume is ready in different cluster, Deployment plan provided cannot be satisfied, unable to create a deployment for " + vm, Cluster.class, clusterIdSpecified); } } plan = new DataCenterDeployment(planToDeploy.getDataCenterId(), planToDeploy.getPodId(), planToDeploy.getClusterId(), planToDeploy.getHostId(), vol.getPoolId()); }else{ plan = new DataCenterDeployment(rootVolDcId, rootVolPodId, rootVolClusterId, null, vol.getPoolId()); if (s_logger.isDebugEnabled()) { s_logger.debug(vol + " is READY, changing deployment plan to use this pool's dcId: " + rootVolDcId + " , podId: " + rootVolPodId + " , and clusterId: " + rootVolClusterId); } } } } VirtualMachineProfileImpl<T> vmProfile = new VirtualMachineProfileImpl<T>(vm, template, offering, account, params); DeployDestination dest = null; for (DeploymentPlanner planner : _planners) { if (planner.canHandle(vmProfile, plan, avoids)) { dest = planner.plan(vmProfile, plan, avoids); } else { continue; } if (dest != null) { avoids.addHost(dest.getHost().getId()); journal.record("Deployment found ", vmProfile, dest); break; } } if (dest == null) { //see if we can allocate the router without limitation if (vm.getType().equals(VirtualMachine.Type.DomainRouter)) { avoids = new ExcludeList(); s_logger.info("Router: cancel avoids "); for (DeploymentPlanner planner : _planners) { if (planner.canHandle(vmProfile, plan, avoids)) { dest = planner.plan(vmProfile, plan, avoids); } else { continue; } if (dest != null) { avoids.addHost(dest.getHost().getId()); journal.record("Deployment found ", vmProfile, dest); break; } } } if (dest == null) { throw new InsufficientServerCapacityException("Unable to create a deployment for " + vmProfile + " due to lack of VLAN available.", DataCenter.class, plan.getDataCenterId()); } } long destHostId = dest.getHost().getId(); try { if (!changeState(vm, Event.OperationRetry, destHostId, work, Step.Prepare)) { throw new ConcurrentOperationException("Unable to update the state of the Virtual Machine"); } } catch (NoTransitionException e1) { throw new ConcurrentOperationException(e1.getMessage()); } try { _networkMgr.prepare(vmProfile, dest, ctx); if (vm.getHypervisorType() != HypervisorType.BareMetal) { _storageMgr.prepare(vmProfile, dest); } vmGuru.finalizeVirtualMachineProfile(vmProfile, dest, ctx); VirtualMachineTO vmTO = hvGuru.implement(vmProfile); Commands cmds = new Commands(OnError.Stop); cmds.addCommand(new StartCommand(vmTO)); vmGuru.finalizeDeployment(cmds, vmProfile, dest, ctx); vm.setPodId(dest.getPod().getId()); work = _workDao.findById(work.getId()); if (work == null || work.getStep() != Step.Prepare) { throw new ConcurrentOperationException("Work steps have been changed: " + work); } _workDao.updateStep(work, Step.Starting); _agentMgr.send(destHostId, cmds); _workDao.updateStep(work, Step.Started); Answer startAnswer = cmds.getAnswer(StartAnswer.class); if (startAnswer != null && startAnswer.getResult()) { if (vmGuru.finalizeStart(vmProfile, destHostId, cmds, ctx)) { if (!changeState(vm, Event.OperationSucceeded, destHostId, work, Step.Done)) { throw new ConcurrentOperationException("Unable to transition to a new state."); } startedVm = vm; if (s_logger.isDebugEnabled()) { s_logger.debug("Start completed for VM " + vm); } return startedVm; } else { if (s_logger.isDebugEnabled()) { s_logger.info("The guru did not like the answers so stopping " + vm); } StopCommand cmd = new StopCommand(vm.getInstanceName()); StopAnswer answer = (StopAnswer)_agentMgr.easySend(destHostId, cmd); if (answer == null || !answer.getResult()) { s_logger.warn("Unable to stop " + vm + " due to " + (answer != null ? answer.getDetails() : "no answers")); canRetry = false; _haMgr.scheduleStop(vm, destHostId, WorkType.ForceStop); throw new ExecutionException("Unable to stop " + vm + " so we are unable to retry the start operation"); } } } s_logger.info("Unable to start VM on " + dest.getHost() + " due to " + (startAnswer == null ? " no start answer" : startAnswer.getDetails())); } catch (OperationTimedoutException e) { s_logger.debug("Unable to send the start command to host " + dest.getHost()); if (e.isActive()) { _haMgr.scheduleStop(vm, destHostId, WorkType.CheckStop); } canRetry = false; throw new AgentUnavailableException("Unable to start " + vm.getHostName(), destHostId, e); } catch (ResourceUnavailableException e) { s_logger.info("Unable to contact resource.", e); if (!avoids.add(e)) { if (e.getScope() == Volume.class || e.getScope() == Nic.class) { throw e; } else { s_logger.warn("unexpected ResourceUnavailableException : " + e.getScope().getName(), e); throw e; } } } catch (InsufficientCapacityException e) { s_logger.info("Insufficient capacity ", e); if (!avoids.add(e)) { if (e.getScope() == Volume.class || e.getScope() == Nic.class) { throw e; } else { s_logger.warn("unexpected InsufficientCapacityException : " + e.getScope().getName(), e); } } } catch (Exception e) { s_logger.error("Failed to start instance " + vm, e); throw new AgentUnavailableException("Unable to start instance", destHostId, e); } finally { if (startedVm == null && canRetry) { _workDao.updateStep(work, Step.Release); cleanup(vmGuru, vmProfile, work, Event.OperationFailed, false, caller, account); } } } } finally { if (startedVm == null) { // decrement only for user VM's and newly created VM if (vm.getType().equals(VirtualMachine.Type.User) && (vm.getLastHostId() == null)) { _accountMgr.decrementResourceCount(vm.getAccountId(), ResourceType.user_vm); } if (canRetry) { try { changeState(vm, Event.OperationFailed, null, work, Step.Done); } catch (NoTransitionException e) { throw new ConcurrentOperationException(e.getMessage()); } } } } return startedVm; } @Override public <T extends VMInstanceVO> boolean stop(T vm, User user, Account account) throws ResourceUnavailableException { try { return advanceStop(vm, false, user, account); } catch (OperationTimedoutException e) { throw new AgentUnavailableException("Unable to stop vm because the operation to stop timed out", vm.getHostId(), e); } catch (ConcurrentOperationException e) { throw new CloudRuntimeException("Unable to stop vm because of a concurrent operation", e); } } protected <T extends VMInstanceVO> boolean sendStop(VirtualMachineGuru<T> guru, VirtualMachineProfile<T> profile, boolean force) { VMInstanceVO vm = profile.getVirtualMachine(); StopCommand stop = new StopCommand(vm, vm.getInstanceName(), null); try { Answer answer = _agentMgr.send(vm.getHostId(), stop); if (!answer.getResult()) { s_logger.debug("Unable to stop VM due to " + answer.getDetails()); return false; } guru.finalizeStop(profile, (StopAnswer)answer); } catch (AgentUnavailableException e) { if (!force) { return false; } } catch (OperationTimedoutException e) { if (!force) { return false; } } return true; } protected <T extends VMInstanceVO> boolean cleanup(VirtualMachineGuru<T> guru, VirtualMachineProfile<T> profile, ItWorkVO work, Event event, boolean force, User user, Account account) { T vm = profile.getVirtualMachine(); State state = vm.getState(); s_logger.debug("Cleaning up resources for the vm " + vm + " in " + state + " state"); if (state == State.Starting) { Step step = work.getStep(); if (step == Step.Starting && !force) { s_logger.warn("Unable to cleanup vm " + vm + "; work state is incorrect: " + step); return false; } if (step == Step.Started || step == Step.Starting) { if (vm.getHostId() != null) { if (!sendStop(guru, profile, force)) { s_logger.warn("Failed to stop vm " + vm + " in " + State.Starting + " state as a part of cleanup process"); return false; } } } if (step != Step.Release && step != Step.Prepare && step != Step.Started && step != Step.Starting) { s_logger.debug("Cleanup is not needed for vm " + vm + "; work state is incorrect: " + step); return true; } } else if (state == State.Stopping) { if (vm.getHostId() != null) { if (!sendStop(guru, profile, force)) { s_logger.warn("Failed to stop vm " + vm + " in " + State.Stopping + " state as a part of cleanup process"); return false; } } } else if (state == State.Migrating) { if (vm.getHostId() != null) { if (!sendStop(guru, profile, force)) { s_logger.warn("Failed to stop vm " + vm + " in " + State.Migrating + " state as a part of cleanup process"); return false; } } if (vm.getLastHostId() != null) { if (!sendStop(guru, profile, force)) { s_logger.warn("Failed to stop vm " + vm + " in " + State.Migrating + " state as a part of cleanup process"); return false; } } } else if (state == State.Running) { if (!sendStop(guru, profile, force)) { s_logger.warn("Failed to stop vm " + vm + " in " + State.Running + " state as a part of cleanup process"); return false; } } _networkMgr.release(profile, force); _storageMgr.release(profile); s_logger.debug("Successfully cleanued up resources for the vm " + vm + " in " + state + " state"); return true; } @Override public <T extends VMInstanceVO> boolean advanceStop(T vm, boolean forced, User user, Account account) throws AgentUnavailableException, OperationTimedoutException, ConcurrentOperationException { State state = vm.getState(); if (state == State.Stopped) { if (s_logger.isDebugEnabled()) { s_logger.debug("VM is already stopped: " + vm); } return true; } if (state == State.Destroyed || state == State.Expunging || state == State.Error) { if (s_logger.isDebugEnabled()) { s_logger.debug("Stopped called on " + vm + " but the state is " + state); } return true; } Long hostId = vm.getHostId(); if (hostId == null) { try { stateTransitTo(vm, Event.AgentReportStopped, null, null); } catch (NoTransitionException e) { s_logger.warn(e.getMessage()); } return true; } VirtualMachineGuru<T> vmGuru = getVmGuru(vm); try { if (!stateTransitTo(vm, forced ? Event.AgentReportStopped : Event.StopRequested, vm.getHostId(), null)) { throw new ConcurrentOperationException("VM is being operated on."); } } catch (NoTransitionException e1) { throw new CloudRuntimeException("We cannot stop " + vm + " when it is in state " + vm.getState()); } VirtualMachineProfile<T> profile = new VirtualMachineProfileImpl<T>(vm); if ((vm.getState() == State.Starting || vm.getState() == State.Stopping || vm.getState() == State.Migrating) && forced) { ItWorkVO work = _workDao.findByOutstandingWork(vm.getId(), vm.getState()); if (work != null) { if (cleanup(vmGuru, new VirtualMachineProfileImpl<T>(vm), work, Event.StopRequested, forced, user, account)) { try { return stateTransitTo(vm, Event.AgentReportStopped, null); } catch (NoTransitionException e) { s_logger.warn("Unable to cleanup " + vm); return false; } } } } if (vm.getHostId() != null) { String routerPrivateIp = null; if (vm.getType() == VirtualMachine.Type.DomainRouter) { routerPrivateIp = vm.getPrivateIpAddress(); } StopCommand stop = new StopCommand(vm, vm.getInstanceName(), null, routerPrivateIp); boolean stopped = false; StopAnswer answer = null; try { answer = (StopAnswer) _agentMgr.send(vm.getHostId(), stop); stopped = answer.getResult(); if (!stopped) { throw new CloudRuntimeException("Unable to stop the virtual machine due to " + answer.getDetails()); } vmGuru.finalizeStop(profile, answer); } catch (AgentUnavailableException e) { } catch (OperationTimedoutException e) { } finally { if (!stopped) { if (!forced) { s_logger.warn("Unable to stop vm " + vm); try { stateTransitTo(vm, Event.OperationFailed, vm.getHostId()); } catch (NoTransitionException e) { s_logger.warn("Unable to transition the state " + vm); } return false; } else { s_logger.warn("Unable to actually stop " + vm + " but continue with release because it's a force stop"); vmGuru.finalizeStop(profile, answer); } } } } if (s_logger.isDebugEnabled()) { s_logger.debug(vm + " is stopped on the host. Proceeding to release resource held."); } try { _networkMgr.release(profile, forced); s_logger.debug("Successfully released network resources for the vm " + vm); } catch (Exception e) { s_logger.warn("Unable to release some network resources.", e); } try { if (vm.getHypervisorType() != HypervisorType.BareMetal) { _storageMgr.release(profile); s_logger.debug("Successfully released storage resources for the vm " + vm); } } catch (Exception e) { s_logger.warn("Unable to release storage resources.", e); } vm.setReservationId(null); try { return stateTransitTo(vm, Event.OperationSucceeded, null); } catch (NoTransitionException e) { s_logger.warn(e.getMessage()); return false; } } private void setStateMachine() { _stateMachine = VirtualMachine.State.getStateMachine(); } protected boolean stateTransitTo(VMInstanceVO vm, VirtualMachine.Event e, Long hostId, String reservationId) throws NoTransitionException { vm.setReservationId(reservationId); return _stateMachine.transitTo(vm, e, hostId, _vmDao); } @Override public boolean stateTransitTo(VMInstanceVO vm, VirtualMachine.Event e, Long hostId) throws NoTransitionException { State oldState = vm.getState(); if (oldState == State.Starting) { if (e == Event.OperationSucceeded) { vm.setLastHostId(hostId); } } else if (oldState == State.Stopping) { if (e == Event.OperationSucceeded) { vm.setLastHostId(vm.getHostId()); } } return _stateMachine.transitTo(vm, e, hostId, _vmDao); } @Override public <T extends VMInstanceVO> boolean remove(T vm, User user, Account caller) { // expunge the corresponding nics VirtualMachineProfile<T> profile = new VirtualMachineProfileImpl<T>(vm); _networkMgr.expungeNics(profile); s_logger.trace("Nics of the vm " + vm + " are expunged successfully"); return _vmDao.remove(vm.getId()); } @Override public <T extends VMInstanceVO> boolean destroy(T vm, User user, Account caller) throws AgentUnavailableException, OperationTimedoutException, ConcurrentOperationException { if (s_logger.isDebugEnabled()) { s_logger.debug("Destroying vm " + vm); } if (vm == null || vm.getState() == State.Destroyed || vm.getState() == State.Expunging || vm.getRemoved() != null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to find vm or vm is destroyed: " + vm); } return true; } if (!advanceStop(vm, false, user, caller)) { s_logger.debug("Unable to stop " + vm); return false; } try { if (!stateTransitTo(vm, VirtualMachine.Event.DestroyRequested, vm.getHostId())) { s_logger.debug("Unable to destroy the vm because it is not in the correct state: " + vm); return false; } } catch (NoTransitionException e) { s_logger.debug(e.getMessage()); return false; } return true; } protected boolean checkVmOnHost(VirtualMachine vm, long hostId) throws AgentUnavailableException, OperationTimedoutException { CheckVirtualMachineAnswer answer = (CheckVirtualMachineAnswer) _agentMgr.send(hostId, new CheckVirtualMachineCommand(vm.getInstanceName())); if (!answer.getResult() || answer.getState() == State.Stopped) { return false; } return true; } @Override public <T extends VMInstanceVO> T migrate(T vm, long srcHostId, DeployDestination dest) throws ResourceUnavailableException, ConcurrentOperationException, ManagementServerException, VirtualMachineMigrationException { s_logger.info("Migrating " + vm + " to " + dest); long dstHostId = dest.getHost().getId(); Host fromHost = _hostDao.findById(srcHostId); if (fromHost == null) { s_logger.info("Unable to find the host to migrate from: " + srcHostId); throw new CloudRuntimeException("Unable to find the host to migrate from: " + srcHostId); } if (fromHost.getClusterId().longValue() != dest.getCluster().getId()) { s_logger.info("Source and destination host are not in same cluster, unable to migrate to host: " + dest.getHost().getId()); throw new CloudRuntimeException("Source and destination host are not in same cluster, unable to migrate to host: " + dest.getHost().getId()); } VirtualMachineGuru<T> vmGuru = getVmGuru(vm); long vmId = vm.getId(); vm = vmGuru.findById(vmId); if (vm == null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to find the vm " + vm); } throw new ManagementServerException("Unable to find a virtual machine with id " + vmId); } if (vm.getState() != State.Running) { if (s_logger.isDebugEnabled()) { s_logger.debug("VM is not Running, unable to migrate the vm " + vm); } throw new VirtualMachineMigrationException("VM is not Running, unable to migrate the vm currently " + vm); } short alertType = AlertManager.ALERT_TYPE_USERVM_MIGRATE; if (VirtualMachine.Type.DomainRouter.equals(vm.getType())) { alertType = AlertManager.ALERT_TYPE_DOMAIN_ROUTER_MIGRATE; } else if (VirtualMachine.Type.ConsoleProxy.equals(vm.getType())) { alertType = AlertManager.ALERT_TYPE_CONSOLE_PROXY_MIGRATE; } VirtualMachineProfile<VMInstanceVO> profile = new VirtualMachineProfileImpl<VMInstanceVO>(vm); _networkMgr.prepareNicForMigration(profile, dest); _storageMgr.prepareForMigration(profile, dest); HypervisorGuru hvGuru = _hvGuruMgr.getGuru(vm.getHypervisorType()); VirtualMachineTO to = hvGuru.implement(profile); PrepareForMigrationCommand pfmc = new PrepareForMigrationCommand(to); ItWorkVO work = new ItWorkVO(UUID.randomUUID().toString(), _nodeId, State.Migrating, vm.getType(), vm.getId()); work.setStep(Step.Prepare); work.setResourceType(ItWorkVO.ResourceType.Host); work.setResourceId(dstHostId); work = _workDao.persist(work); PrepareForMigrationAnswer pfma = null; try { pfma = (PrepareForMigrationAnswer) _agentMgr.send(dstHostId, pfmc); if (!pfma.getResult()) { String msg = "Unable to prepare for migration due to " + pfma.getDetails(); pfma = null; throw new AgentUnavailableException(msg, dstHostId); } } catch (OperationTimedoutException e1) { throw new AgentUnavailableException("Operation timed out", dstHostId); } finally { if (pfma == null) { work.setStep(Step.Done); _workDao.update(work.getId(), work); } } vm.setLastHostId(srcHostId); try { if (vm == null || vm.getHostId() == null || vm.getHostId() != srcHostId || !changeState(vm, Event.MigrationRequested, dstHostId, work, Step.Migrating)) { s_logger.info("Migration cancelled because state has changed: " + vm); throw new ConcurrentOperationException("Migration cancelled because state has changed: " + vm); } } catch (NoTransitionException e1) { s_logger.info("Migration cancelled because " + e1.getMessage()); throw new ConcurrentOperationException("Migration cancelled because " + e1.getMessage()); } boolean migrated = false; try { boolean isWindows = _guestOsCategoryDao.findById(_guestOsDao.findById(vm.getGuestOSId()).getCategoryId()).getName().equalsIgnoreCase("Windows"); MigrateCommand mc = new MigrateCommand(vm.getInstanceName(), dest.getHost().getPrivateIpAddress(), isWindows); try { MigrateAnswer ma = (MigrateAnswer) _agentMgr.send(vm.getLastHostId(), mc); if (!ma.getResult()) { s_logger.error("Unable to migrate due to " + ma.getDetails()); return null; } } catch (OperationTimedoutException e) { if (e.isActive()) { s_logger.warn("Active migration command so scheduling a restart for " + vm); _haMgr.scheduleRestart(vm, true); } throw new AgentUnavailableException("Operation timed out on migrating " + vm, dstHostId); } try { if (!changeState(vm, VirtualMachine.Event.OperationSucceeded, dstHostId, work, Step.Started)) { throw new ConcurrentOperationException("Unable to change the state for " + vm); } } catch (NoTransitionException e1) { throw new ConcurrentOperationException("Unable to change state due to " + e1.getMessage()); } try { if (!checkVmOnHost(vm, dstHostId)) { s_logger.error("Unable to complete migration for " + vm); try { _agentMgr.send(srcHostId, new Commands(cleanup(vm.getInstanceName())), null); } catch (AgentUnavailableException e) { s_logger.error("AgentUnavailableException while cleanup on source host: " + srcHostId); } cleanup(vmGuru, new VirtualMachineProfileImpl<T>(vm), work, Event.AgentReportStopped, true, _accountMgr.getSystemUser(), _accountMgr.getSystemAccount()); return null; } } catch (OperationTimedoutException e) { } migrated = true; return vm; } finally { if (!migrated) { s_logger.info("Migration was unsuccessful. Cleaning up: " + vm); _alertMgr.sendAlert(alertType, fromHost.getDataCenterId(), fromHost.getPodId(), "Unable to migrate vm " + vm.getInstanceName() + " from host " + fromHost.getName() + " in zone " + dest.getDataCenter().getName() + " and pod " + dest.getPod().getName(), "Migrate Command failed. Please check logs."); try { _agentMgr.send(dstHostId, new Commands(cleanup(vm.getInstanceName())), null); } catch (AgentUnavailableException ae) { s_logger.info("Looks like the destination Host is unavailable for cleanup"); } try { stateTransitTo(vm, Event.OperationFailed, srcHostId); } catch (NoTransitionException e) { s_logger.warn(e.getMessage()); } } work.setStep(Step.Done); _workDao.update(work.getId(), work); } } protected void cancelWorkItems(long nodeId) { GlobalLock scanLock = GlobalLock.getInternLock("vmmgr.cancel.workitem"); try { if (scanLock.lock(3)) { try { List<ItWorkVO> works = _workDao.listWorkInProgressFor(nodeId); for (ItWorkVO work : works) { s_logger.info("Handling unfinished work item: " + work); try { VMInstanceVO vm = _vmDao.findById(work.getInstanceId()); if (vm != null) { if (work.getType() == State.Starting) { _haMgr.scheduleRestart(vm, true); } else if (work.getType() == State.Stopping) { _haMgr.scheduleStop(vm, vm.getHostId(), WorkType.CheckStop); } else if (work.getType() == State.Migrating) { _haMgr.scheduleMigration(vm); } } work.setStep(Step.Done); _workDao.update(work.getId(), work); } catch (Exception e) { s_logger.error("Error while handling " + work, e); } } } finally { scanLock.unlock(); } } } finally { scanLock.releaseRef(); } } @Override public boolean migrateAway(VirtualMachine.Type vmType, long vmId, long srcHostId) throws InsufficientServerCapacityException, VirtualMachineMigrationException { VirtualMachineGuru<? extends VMInstanceVO> vmGuru = _vmGurus.get(vmType); VMInstanceVO vm = vmGuru.findById(vmId); if (vm == null) { s_logger.debug("Unable to find a VM for " + vmId); return true; } VirtualMachineProfile<VMInstanceVO> profile = new VirtualMachineProfileImpl<VMInstanceVO>(vm); Long hostId = vm.getHostId(); if (hostId == null) { s_logger.debug("Unable to migrate because the VM doesn't have a host id: " + vm); return true; } Host host = _hostDao.findById(hostId); DataCenterDeployment plan = new DataCenterDeployment(host.getDataCenterId(), host.getPodId(), host.getClusterId(), null, null); ExcludeList excludes = new ExcludeList(); excludes.addHost(hostId); DeployDestination dest = null; while (true) { for (DeploymentPlanner planner : _planners) { dest = planner.plan(profile, plan, excludes); if (dest != null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Planner " + planner + " found " + dest + " for migrating to."); } break; } if (s_logger.isDebugEnabled()) { s_logger.debug("Planner " + planner + " was unable to find anything."); } } if (dest == null) { throw new InsufficientServerCapacityException("Unable to find a server to migrate to.", host.getClusterId()); } excludes.addHost(dest.getHost().getId()); VMInstanceVO vmInstance = null; try { vmInstance = migrate(vm, srcHostId, dest); } catch (ResourceUnavailableException e) { s_logger.debug("Unable to migrate to unavailable " + dest); } catch (ConcurrentOperationException e) { s_logger.debug("Unable to migrate VM due to: " + e.getMessage()); } catch (ManagementServerException e) { s_logger.debug("Unable to migrate VM: " + e.getMessage()); } catch (VirtualMachineMigrationException e) { s_logger.debug("Got VirtualMachineMigrationException, Unable to migrate: " + e.getMessage()); if (vm.getState() == State.Starting) { s_logger.debug("VM seems to be still Starting, we should retry migration later"); throw e; } else { s_logger.debug("Unable to migrate VM, VM is not in Running or even Starting state, current state: " + vm.getState().toString()); } } if (vmInstance != null) { return true; } try { boolean result = advanceStop(vm, true, _accountMgr.getSystemUser(), _accountMgr.getSystemAccount()); return result; } catch (ResourceUnavailableException e) { s_logger.debug("Unable to stop VM due to " + e.getMessage()); } catch (ConcurrentOperationException e) { s_logger.debug("Unable to stop VM due to " + e.getMessage()); } catch (OperationTimedoutException e) { s_logger.debug("Unable to stop VM due to " + e.getMessage()); } return false; } } protected class CleanupTask implements Runnable { @Override public void run() { s_logger.trace("VM Operation Thread Running"); try { _workDao.cleanup(_cleanupWait); } catch (Exception e) { s_logger.error("VM Operations failed due to ", e); } } } @Override public boolean isVirtualMachineUpgradable(UserVm vm, ServiceOffering offering) { Enumeration<HostAllocator> en = _hostAllocators.enumeration(); boolean isMachineUpgradable = true; while (isMachineUpgradable && en.hasMoreElements()) { final HostAllocator allocator = en.nextElement(); isMachineUpgradable = allocator.isVirtualMachineUpgradable(vm, offering); } return isMachineUpgradable; } @Override public <T extends VMInstanceVO> T reboot(T vm, Map<VirtualMachineProfile.Param, Object> params, User caller, Account account) throws InsufficientCapacityException, ResourceUnavailableException { try { return advanceReboot(vm, params, caller, account); } catch (ConcurrentOperationException e) { throw new CloudRuntimeException("Unable to reboot a VM due to concurrent operation", e); } } @Override public <T extends VMInstanceVO> T advanceReboot(T vm, Map<VirtualMachineProfile.Param, Object> params, User caller, Account account) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException { T rebootedVm = null; DataCenter dc = _configMgr.getZone(vm.getDataCenterIdToDeployIn()); Host host = _hostDao.findById(vm.getHostId()); Cluster cluster = null; if (host != null) { cluster = _configMgr.getCluster(host.getClusterId()); } HostPodVO pod = _configMgr.getPod(host.getPodId()); DeployDestination dest = new DeployDestination(dc, pod, cluster, host); try { Commands cmds = new Commands(OnError.Stop); cmds.addCommand(new RebootCommand(vm.getInstanceName())); _agentMgr.send(host.getId(), cmds); Answer rebootAnswer = cmds.getAnswer(RebootAnswer.class); if (rebootAnswer != null && rebootAnswer.getResult()) { rebootedVm = vm; return rebootedVm; } s_logger.info("Unable to reboot VM " + vm + " on " + dest.getHost() + " due to " + (rebootAnswer == null ? " no reboot answer" : rebootAnswer.getDetails())); } catch (OperationTimedoutException e) { s_logger.warn("Unable to send the reboot command to host " + dest.getHost() + " for the vm " + vm + " due to operation timeout", e); throw new CloudRuntimeException("Failed to reboot the vm on host " + dest.getHost()); } return rebootedVm; } @Override public VMInstanceVO findById(VirtualMachine.Type type, long vmId) { VirtualMachineGuru<? extends VMInstanceVO> guru = _vmGurus.get(type); return guru.findById(vmId); } public Command cleanup(String vmName) { return new StopCommand(vmName); } public Commands deltaSync(long hostId, Map<String, State> newStates) { Map<Long, AgentVmInfo> states = convertToInfos(newStates); Commands commands = new Commands(OnError.Continue); for (Map.Entry<Long, AgentVmInfo> entry : states.entrySet()) { AgentVmInfo info = entry.getValue(); VMInstanceVO vm = info.vm; Command command = null; if (vm != null) { HypervisorGuru hvGuru = _hvGuruMgr.getGuru(vm.getHypervisorType()); command = compareState(hostId, vm, info, false, hvGuru.trackVmHostChange()); } else { if (s_logger.isDebugEnabled()) { s_logger.debug("Cleaning up a VM that is no longer found: " + info.name); } command = cleanup(info.name); } if (command != null) { commands.addCommand(command); } } return commands; } protected Map<Long, AgentVmInfo> convertToInfos(final Map<String, State> states) { final HashMap<Long, AgentVmInfo> map = new HashMap<Long, AgentVmInfo>(); if (states == null) { return map; } Collection<VirtualMachineGuru<? extends VMInstanceVO>> vmGurus = _vmGurus.values(); for (Map.Entry<String, State> entry : states.entrySet()) { for (VirtualMachineGuru<? extends VMInstanceVO> vmGuru : vmGurus) { String name = entry.getKey(); VMInstanceVO vm = vmGuru.findByName(name); if (vm != null) { map.put(vm.getId(), new AgentVmInfo(entry.getKey(), vmGuru, vm, entry.getValue())); break; } Long id = vmGuru.convertToId(name); if (id != null) { map.put(id, new AgentVmInfo(entry.getKey(), vmGuru, null, entry.getValue())); break; } } } return map; } /** * compareState does as its name suggests and compares the states between management server and agent. It returns whether * something should be cleaned up * */ protected Command compareState(long hostId, VMInstanceVO vm, final AgentVmInfo info, final boolean fullSync, boolean nativeHA) { State agentState = info.state; final String agentName = info.name; final State serverState = vm.getState(); final String serverName = vm.getInstanceName(); VirtualMachineGuru<VMInstanceVO> vmGuru = getVmGuru(vm); Command command = null; if (s_logger.isDebugEnabled()) { s_logger.debug("VM " + serverName + ": server state = " + serverState + " and agent state = " + agentState); } if (agentState == State.Error) { agentState = State.Stopped; short alertType = AlertManager.ALERT_TYPE_USERVM; if (VirtualMachine.Type.DomainRouter.equals(vm.getType())) { alertType = AlertManager.ALERT_TYPE_DOMAIN_ROUTER; } else if (VirtualMachine.Type.ConsoleProxy.equals(vm.getType())) { alertType = AlertManager.ALERT_TYPE_CONSOLE_PROXY; } HostPodVO podVO = _podDao.findById(vm.getPodIdToDeployIn()); DataCenterVO dcVO = _dcDao.findById(vm.getDataCenterIdToDeployIn()); HostVO hostVO = _hostDao.findById(vm.getHostId()); String hostDesc = "name: " + hostVO.getName() + " (id:" + hostVO.getId() + "), availability zone: " + dcVO.getName() + ", pod: " + podVO.getName(); _alertMgr.sendAlert(alertType, vm.getDataCenterIdToDeployIn(), vm.getPodIdToDeployIn(), "VM (name: " + vm.getInstanceName() + ", id: " + vm.getId() + ") stopped on host " + hostDesc + " due to storage failure", "Virtual Machine " + vm.getInstanceName() + " (id: " + vm.getId() + ") running on host [" + vm.getHostId() + "] stopped due to storage failure."); } // if (serverState == State.Migrating) { // s_logger.debug("Skipping vm in migrating state: " + vm); // return null; if (agentState == serverState) { if (s_logger.isDebugEnabled()) { s_logger.debug("Both states are " + agentState + " for " + vm); } assert (agentState == State.Stopped || agentState == State.Running) : "If the states we send up is changed, this must be changed."; if (agentState == State.Running) { try { if(nativeHA) { stateTransitTo(vm, VirtualMachine.Event.AgentReportRunning, hostId); } else { stateTransitTo(vm, VirtualMachine.Event.AgentReportRunning, vm.getHostId()); } } catch (NoTransitionException e) { s_logger.warn(e.getMessage()); } // FIXME: What if someone comes in and sets it to stopping? Then what? return null; } s_logger.debug("State matches but the agent said stopped so let's send a cleanup command anyways."); return cleanup(agentName); } if (agentState == State.Shutdowned) { if (serverState == State.Running || serverState == State.Starting || serverState == State.Stopping) { try { advanceStop(vm, true, _accountMgr.getSystemUser(), _accountMgr.getSystemAccount()); } catch (AgentUnavailableException e) { assert (false) : "How do we hit this with forced on?"; return null; } catch (OperationTimedoutException e) { assert (false) : "How do we hit this with forced on?"; return null; } catch (ConcurrentOperationException e) { assert (false) : "How do we hit this with forced on?"; return null; } } else { s_logger.debug("Sending cleanup to a shutdowned vm: " + agentName); command = cleanup(agentName); } } else if (agentState == State.Stopped) { // This state means the VM on the agent was detected previously // and now is gone. This is slightly different than if the VM // was never completed but we still send down a Stop Command // to ensure there's cleanup. if (serverState == State.Running) { // Our records showed that it should be running so let's restart it. _haMgr.scheduleRestart(vm, false); } else if (serverState == State.Stopping) { _haMgr.scheduleStop(vm, vm.getHostId(), WorkType.ForceStop); s_logger.debug("Scheduling a check stop for VM in stopping mode: " + vm); } else if (serverState == State.Starting) { s_logger.debug("Ignoring VM in starting mode: " + vm.getInstanceName()); _haMgr.scheduleRestart(vm, false); } command = cleanup(agentName); } else if (agentState == State.Running) { if (serverState == State.Starting) { if (fullSync) { s_logger.debug("VM state is starting on full sync so updating it to running"); vm = findById(vm.getType(), vm.getId()); try { stateTransitTo(vm, Event.AgentReportRunning, vm.getHostId()); } catch (NoTransitionException e1) { s_logger.warn(e1.getMessage()); } s_logger.debug("VM's " + vm + " state is starting on full sync so updating it to Running"); vm = vmGuru.findById(vm.getId()); VirtualMachineProfile<VMInstanceVO> profile = new VirtualMachineProfileImpl<VMInstanceVO>(vm); List<NicVO> nics = _nicsDao.listByVmId(profile.getId()); for (NicVO nic : nics) { Network network = _networkMgr.getNetwork(nic.getNetworkId()); NicProfile nicProfile = new NicProfile(nic, network, nic.getBroadcastUri(), nic.getIsolationUri(), null); profile.addNic(nicProfile); } Commands cmds = new Commands(OnError.Stop); s_logger.debug("Finalizing commands that need to be send to complete Start process for the vm " + vm); if (vmGuru.finalizeCommandsOnStart(cmds, profile)) { if (cmds.size() != 0) { try { _agentMgr.send(vm.getHostId(), cmds); } catch (OperationTimedoutException e) { s_logger.error("Exception during update for running vm: " + vm, e); return null; } catch (ResourceUnavailableException e) { s_logger.error("Exception during update for running vm: " + vm, e); return null; } } if (vmGuru.finalizeStart(profile, vm.getHostId(), cmds, null)) { try { stateTransitTo(vm, Event.AgentReportRunning, vm.getHostId()); } catch (NoTransitionException e) { s_logger.warn(e.getMessage()); } } else { s_logger.error("Exception during update for running vm: " + vm); return null; } } else { s_logger.error("Unable to finalize commands on start for vm: " + vm); return null; } } } else if (serverState == State.Stopping) { s_logger.debug("Scheduling a stop command for " + vm); _haMgr.scheduleStop(vm, vm.getHostId(), WorkType.Stop); } else { s_logger.debug("VM state is in stopped so stopping it on the agent"); command = cleanup(agentName); } } return command; } public Commands fullSync(final long hostId, final Map<String, State> newStates) { Commands commands = new Commands(OnError.Continue); final List<? extends VMInstanceVO> vms = _vmDao.listByHostId(hostId); s_logger.debug("Found " + vms.size() + " VMs for host " + hostId); Map<Long, AgentVmInfo> infos = convertToInfos(newStates); for (VMInstanceVO vm : vms) { AgentVmInfo info = infos.remove(vm.getId()); VMInstanceVO castedVm = null; if (info == null) { info = new AgentVmInfo(vm.getInstanceName(), getVmGuru(vm), vm, State.Stopped); castedVm = info.guru.findById(vm.getId()); } else { castedVm = info.vm; } HypervisorGuru hvGuru = _hvGuruMgr.getGuru(castedVm.getHypervisorType()); Command command = compareState(hostId, castedVm, info, true, hvGuru.trackVmHostChange()); if (command != null) { commands.addCommand(command); } } for (final AgentVmInfo left : infos.values()) { for (VirtualMachineGuru<? extends VMInstanceVO> vmGuru : _vmGurus.values()) { VMInstanceVO vm = vmGuru.findByName(left.name); if (vm == null) { s_logger.warn("Stopping a VM that we have no record of: " + left.name); commands.addCommand(cleanup(left.name)); } else { HypervisorGuru hvGuru = _hvGuruMgr.getGuru(vm.getHypervisorType()); if(hvGuru.trackVmHostChange()) { Command command = compareState(hostId, vm, left, true, true); if (command != null) { commands.addCommand(command); } } else { s_logger.warn("Stopping a VM that we have no record of: " + left.name); commands.addCommand(cleanup(left.name)); } } } } return commands; } @Override public boolean isRecurring() { return false; } @Override public boolean processAnswers(long agentId, long seq, Answer[] answers) { for (final Answer answer : answers) { if (!answer.getResult()) { s_logger.warn("Cleanup failed due to " + answer.getDetails()); } else { if (s_logger.isDebugEnabled()) { s_logger.debug("Cleanup succeeded. Details " + answer.getDetails()); } } } return true; } @Override public boolean processTimeout(long agentId, long seq) { return true; } @Override public int getTimeout() { return -1; } @Override public boolean processCommands(long agentId, long seq, Command[] cmds) { boolean processed = false; for (Command cmd : cmds) { if (cmd instanceof PingRoutingCommand) { PingRoutingCommand ping = (PingRoutingCommand) cmd; if (ping.getNewStates().size() > 0) { Commands commands = deltaSync(agentId, ping.getNewStates()); if (commands.size() > 0) { try { _agentMgr.send(agentId, commands, this); } catch (final AgentUnavailableException e) { s_logger.warn("Agent is now unavailable", e); } } } processed = true; } } return processed; } @Override public AgentControlAnswer processControlCommand(long agentId, AgentControlCommand cmd) { return null; } @Override public boolean processDisconnect(long agentId, Status state) { return true; } @Override public void processConnect(HostVO agent, StartupCommand cmd) throws ConnectionException { if (!(cmd instanceof StartupRoutingCommand)) { return; } long agentId = agent.getId(); StartupRoutingCommand startup = (StartupRoutingCommand) cmd; Commands commands = fullSync(agentId, startup.getVmStates()); if (commands.size() > 0) { s_logger.debug("Sending clean commands to the agent"); try { boolean error = false; Answer[] answers = _agentMgr.send(agentId, commands); for (Answer answer : answers) { if (!answer.getResult()) { s_logger.warn("Unable to stop a VM due to " + answer.getDetails()); error = true; } } if (error) { throw new ConnectionException(true, "Unable to stop VMs"); } } catch (final AgentUnavailableException e) { s_logger.warn("Agent is unavailable now", e); throw new ConnectionException(true, "Unable to sync", e); } catch (final OperationTimedoutException e) { s_logger.warn("Agent is unavailable now", e); throw new ConnectionException(true, "Unable to sync", e); } } } protected class TransitionTask implements Runnable { @Override public void run() { GlobalLock lock = GlobalLock.getInternLock("TransitionChecking"); if (lock == null) { s_logger.debug("Couldn't get the global lock"); return; } if (!lock.lock(30)) { s_logger.debug("Couldn't lock the db"); return; } try { lock.addRef(); List<VMInstanceVO> instances = _vmDao.findVMInTransition(new Date(new Date().getTime() - (_operationTimeout * 1000)), State.Starting, State.Stopping); for (VMInstanceVO instance : instances) { State state = instance.getState(); if (state == State.Stopping) { _haMgr.scheduleStop(instance, instance.getHostId(), WorkType.CheckStop); } else if (state == State.Starting) { _haMgr.scheduleRestart(instance, true); } } } catch (Exception e) { s_logger.warn("Caught the following exception on transition checking", e); } finally { StackMaid.current().exitCleanup(); lock.unlock(); } } } protected class AgentVmInfo { public String name; public State state; public VMInstanceVO vm; public VirtualMachineGuru<VMInstanceVO> guru; @SuppressWarnings("unchecked") public AgentVmInfo(String name, VirtualMachineGuru<? extends VMInstanceVO> guru, VMInstanceVO vm, State state) { this.name = name; this.state = state; this.vm = vm; this.guru = (VirtualMachineGuru<VMInstanceVO>) guru; } } }
package com.cloud.vm; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import javax.ejb.Local; import javax.naming.ConfigurationException; import org.apache.log4j.Logger; import com.cloud.agent.AgentManager; import com.cloud.agent.AgentManager.OnError; import com.cloud.agent.Listener; import com.cloud.agent.api.AgentControlAnswer; import com.cloud.agent.api.AgentControlCommand; import com.cloud.agent.api.Answer; import com.cloud.agent.api.CheckVirtualMachineAnswer; import com.cloud.agent.api.CheckVirtualMachineCommand; import com.cloud.agent.api.Command; import com.cloud.agent.api.MigrateAnswer; import com.cloud.agent.api.MigrateCommand; import com.cloud.agent.api.PingRoutingCommand; import com.cloud.agent.api.PrepareForMigrationAnswer; import com.cloud.agent.api.PrepareForMigrationCommand; import com.cloud.agent.api.RebootAnswer; import com.cloud.agent.api.RebootCommand; import com.cloud.agent.api.StartAnswer; import com.cloud.agent.api.StartCommand; import com.cloud.agent.api.StartupCommand; import com.cloud.agent.api.StartupRoutingCommand; import com.cloud.agent.api.StopAnswer; import com.cloud.agent.api.StopCommand; import com.cloud.agent.api.to.VirtualMachineTO; import com.cloud.agent.manager.Commands; import com.cloud.alert.AlertManager; import com.cloud.capacity.CapacityManager; import com.cloud.cluster.ClusterManager; import com.cloud.cluster.StackMaid; import com.cloud.configuration.Config; import com.cloud.configuration.ConfigurationManager; import com.cloud.configuration.ResourceCount.ResourceType; import com.cloud.configuration.dao.ConfigurationDao; import com.cloud.consoleproxy.ConsoleProxyManager; import com.cloud.dc.DataCenter; import com.cloud.dc.DataCenterVO; import com.cloud.dc.HostPodVO; import com.cloud.dc.dao.DataCenterDao; import com.cloud.dc.dao.HostPodDao; import com.cloud.deploy.DataCenterDeployment; import com.cloud.deploy.DeployDestination; import com.cloud.deploy.DeploymentPlan; import com.cloud.deploy.DeploymentPlanner; import com.cloud.deploy.DeploymentPlanner.ExcludeList; import com.cloud.domain.dao.DomainDao; import com.cloud.event.dao.UsageEventDao; import com.cloud.exception.AgentUnavailableException; import com.cloud.exception.ConcurrentOperationException; import com.cloud.exception.ConnectionException; import com.cloud.exception.InsufficientCapacityException; import com.cloud.exception.InsufficientServerCapacityException; import com.cloud.exception.ManagementServerException; import com.cloud.exception.OperationTimedoutException; import com.cloud.exception.ResourceUnavailableException; import com.cloud.exception.VirtualMachineMigrationException; import com.cloud.ha.HighAvailabilityManager; import com.cloud.ha.HighAvailabilityManager.WorkType; import com.cloud.host.Host; import com.cloud.host.HostVO; import com.cloud.host.Status; import com.cloud.host.dao.HostDao; import com.cloud.hypervisor.Hypervisor.HypervisorType; import com.cloud.hypervisor.HypervisorGuru; import com.cloud.hypervisor.HypervisorGuruManager; import com.cloud.network.NetworkManager; import com.cloud.network.NetworkVO; import com.cloud.org.Cluster; import com.cloud.service.ServiceOfferingVO; import com.cloud.service.dao.ServiceOfferingDao; import com.cloud.storage.DiskOfferingVO; import com.cloud.storage.Storage.ImageFormat; import com.cloud.storage.StorageManager; import com.cloud.storage.StoragePoolVO; import com.cloud.storage.VMTemplateVO; import com.cloud.storage.Volume; import com.cloud.storage.Volume.Type; import com.cloud.storage.VolumeVO; import com.cloud.storage.dao.GuestOSCategoryDao; import com.cloud.storage.dao.GuestOSDao; import com.cloud.storage.dao.StoragePoolDao; import com.cloud.storage.dao.VMTemplateDao; import com.cloud.storage.dao.VolumeDao; import com.cloud.user.Account; import com.cloud.user.AccountManager; import com.cloud.user.User; import com.cloud.user.dao.AccountDao; import com.cloud.user.dao.UserDao; import com.cloud.utils.Journal; import com.cloud.utils.NumbersUtil; import com.cloud.utils.Pair; import com.cloud.utils.Ternary; import com.cloud.utils.component.Adapters; import com.cloud.utils.component.ComponentLocator; import com.cloud.utils.component.Inject; import com.cloud.utils.concurrency.NamedThreadFactory; import com.cloud.utils.db.DB; import com.cloud.utils.db.GlobalLock; import com.cloud.utils.db.Transaction; import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.fsm.StateMachine2; import com.cloud.vm.ItWorkVO.Step; import com.cloud.vm.VirtualMachine.Event; import com.cloud.vm.VirtualMachine.State; import com.cloud.vm.dao.ConsoleProxyDao; import com.cloud.vm.dao.DomainRouterDao; import com.cloud.vm.dao.NicDao; import com.cloud.vm.dao.SecondaryStorageVmDao; import com.cloud.vm.dao.UserVmDao; import com.cloud.vm.dao.VMInstanceDao; @Local(value=VirtualMachineManager.class) public class VirtualMachineManagerImpl implements VirtualMachineManager, Listener { private static final Logger s_logger = Logger.getLogger(VirtualMachineManagerImpl.class); String _name; @Inject protected StorageManager _storageMgr; @Inject protected NetworkManager _networkMgr; @Inject protected AgentManager _agentMgr; @Inject protected VMInstanceDao _vmDao; @Inject protected ServiceOfferingDao _offeringDao; @Inject protected VMTemplateDao _templateDao; @Inject protected UserDao _userDao; @Inject protected AccountDao _accountDao; @Inject protected DomainDao _domainDao; @Inject protected ClusterManager _clusterMgr; @Inject protected ItWorkDao _workDao; @Inject protected UserVmDao _userVmDao; @Inject protected DomainRouterDao _routerDao; @Inject protected ConsoleProxyDao _consoleDao; @Inject protected SecondaryStorageVmDao _secondaryDao; @Inject protected UsageEventDao _usageEventDao; @Inject protected NicDao _nicsDao; @Inject protected AccountManager _accountMgr; @Inject protected HostDao _hostDao; @Inject protected AlertManager _alertMgr; @Inject protected GuestOSCategoryDao _guestOsCategoryDao; @Inject protected GuestOSDao _guestOsDao; @Inject protected VolumeDao _volsDao; @Inject protected ConsoleProxyManager _consoleProxyMgr; @Inject protected ConfigurationManager _configMgr; @Inject protected CapacityManager _capacityMgr; @Inject protected HighAvailabilityManager _haMgr; @Inject protected HostPodDao _podDao; @Inject protected DataCenterDao _dcDao; @Inject protected StoragePoolDao _storagePoolDao; @Inject protected HypervisorGuruManager _hvGuruMgr; @Inject(adapter=DeploymentPlanner.class) protected Adapters<DeploymentPlanner> _planners; Map<VirtualMachine.Type, VirtualMachineGuru<? extends VMInstanceVO>> _vmGurus = new HashMap<VirtualMachine.Type, VirtualMachineGuru<? extends VMInstanceVO>>(); protected StateMachine2<State, VirtualMachine.Event, VirtualMachine> _stateMachine; ScheduledExecutorService _executor = null; protected int _operationTimeout; protected int _retry; protected long _nodeId; protected long _cleanupWait; protected long _cleanupInterval; protected long _cancelWait; protected long _opWaitInterval; protected int _lockStateRetry; @Override public <T extends VMInstanceVO> void registerGuru(VirtualMachine.Type type, VirtualMachineGuru<T> guru) { synchronized(_vmGurus) { _vmGurus.put(type, guru); } } @Override @DB public <T extends VMInstanceVO> T allocate(T vm, VMTemplateVO template, ServiceOfferingVO serviceOffering, Pair<? extends DiskOfferingVO, Long> rootDiskOffering, List<Pair<DiskOfferingVO, Long>> dataDiskOfferings, List<Pair<NetworkVO, NicProfile>> networks, Map<VirtualMachineProfile.Param, Object> params, DeploymentPlan plan, HypervisorType hyperType, Account owner) throws InsufficientCapacityException { if (s_logger.isDebugEnabled()) { s_logger.debug("Allocating entries for VM: " + vm); } VirtualMachineProfileImpl<T> vmProfile = new VirtualMachineProfileImpl<T>(vm, template, serviceOffering, owner, params); vm.setDataCenterId(plan.getDataCenterId()); if (plan.getPodId() != null) { vm.setPodId(plan.getPodId()); } if (plan.getHostId() != null) { vm.setHostId(plan.getHostId()); } assert (plan.getPoolId() == null) : "We currently don't support pool preset yet"; @SuppressWarnings("unchecked") VirtualMachineGuru<T> guru = (VirtualMachineGuru<T>)_vmGurus.get(vm.getType()); Transaction txn = Transaction.currentTxn(); txn.start(); vm = guru.persist(vm); if (s_logger.isDebugEnabled()) { s_logger.debug("Allocating nics for " + vm); } try { _networkMgr.allocate(vmProfile, networks); } catch (ConcurrentOperationException e) { throw new CloudRuntimeException("Concurrent operation while trying to allocate resources for the VM", e); } if (dataDiskOfferings == null) { dataDiskOfferings = new ArrayList<Pair<DiskOfferingVO, Long>>(0); } if (s_logger.isDebugEnabled()) { s_logger.debug("Allocaing disks for " + vm); } if (template.getFormat() == ImageFormat.ISO) { _storageMgr.allocateRawVolume(Type.ROOT, "ROOT-" + vm.getId(), rootDiskOffering.first(), rootDiskOffering.second(), vm, owner); } else if (template.getFormat() == ImageFormat.BAREMETAL) { // Do nothing }else { _storageMgr.allocateTemplatedVolume(Type.ROOT, "ROOT-" + vm.getId(), rootDiskOffering.first(), template, vm, owner); } for (Pair<DiskOfferingVO, Long> offering : dataDiskOfferings) { _storageMgr.allocateRawVolume(Type.DATADISK, "DATA-" + vm.getId(), offering.first(), offering.second(), vm, owner); } txn.commit(); if (s_logger.isDebugEnabled()) { s_logger.debug("Allocation completed for VM: " + vm); } return vm; } @Override public <T extends VMInstanceVO> T allocate(T vm, VMTemplateVO template, ServiceOfferingVO serviceOffering, Long rootSize, Pair<DiskOfferingVO, Long> dataDiskOffering, List<Pair<NetworkVO, NicProfile>> networks, DeploymentPlan plan, HypervisorType hyperType, Account owner) throws InsufficientCapacityException { List<Pair<DiskOfferingVO, Long>> diskOfferings = new ArrayList<Pair<DiskOfferingVO, Long>>(1); if (dataDiskOffering != null) { diskOfferings.add(dataDiskOffering); } return allocate(vm, template, serviceOffering, new Pair<DiskOfferingVO, Long>(serviceOffering, rootSize), diskOfferings, networks, null, plan, hyperType, owner); } @Override public <T extends VMInstanceVO> T allocate(T vm, VMTemplateVO template, ServiceOfferingVO serviceOffering, List<Pair<NetworkVO, NicProfile>> networks, DeploymentPlan plan, HypervisorType hyperType, Account owner) throws InsufficientCapacityException { return allocate(vm, template, serviceOffering, new Pair<DiskOfferingVO, Long>(serviceOffering, null), null, networks, null, plan, hyperType, owner); } @SuppressWarnings("unchecked") private <T extends VMInstanceVO> VirtualMachineGuru<T> getVmGuru(T vm) { return (VirtualMachineGuru<T>)_vmGurus.get(vm.getType()); } @SuppressWarnings("unchecked") private <T extends VMInstanceVO> VirtualMachineGuru<T> getBareMetalVmGuru(T vm) { return (VirtualMachineGuru<T>)_vmGurus.get(VirtualMachine.Type.UserBareMetal); } @Override public <T extends VMInstanceVO> boolean expunge(T vm, User caller, Account account) throws ResourceUnavailableException { try { if (advanceExpunge(vm, caller, account)) { //Mark vms as removed remove(vm, caller, account); return true; } else { s_logger.info("Did not expunge " + vm); return false; } } catch (OperationTimedoutException e) { throw new CloudRuntimeException("Operation timed out", e); } catch (ConcurrentOperationException e) { throw new CloudRuntimeException("Concurrent operation ", e); } } @Override public <T extends VMInstanceVO> boolean advanceExpunge(T vm, User caller, Account account) throws ResourceUnavailableException, OperationTimedoutException, ConcurrentOperationException { if (vm == null || vm.getRemoved() != null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to find vm or vm is destroyed: " + vm); } return true; } if (!this.advanceStop(vm, false, caller, account)) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to stop the VM so we can't expunge it."); } } if (!stateTransitTo(vm, VirtualMachine.Event.ExpungeOperation, vm.getHostId())) { s_logger.debug("Unable to destroy the vm because it is not in the correct state: " + vm.toString()); return false; } if (s_logger.isDebugEnabled()) { s_logger.debug("Destroying vm " + vm); } VirtualMachineProfile<T> profile = new VirtualMachineProfileImpl<T>(vm); _networkMgr.cleanupNics(profile); //Clean up volumes based on the vm's instance id _storageMgr.cleanupVolumes(vm.getId()); VirtualMachineGuru<T> guru = getVmGuru(vm); guru.finalizeExpunge(vm); if (s_logger.isDebugEnabled()) { s_logger.debug("Expunged " + vm); } return true; } @Override public boolean start() { _executor.scheduleAtFixedRate(new CleanupTask(), _cleanupInterval, _cleanupInterval, TimeUnit.SECONDS); cancelWorkItems(_nodeId); return true; } @Override public boolean stop() { return true; } @Override public boolean configure(String name, Map<String, Object> xmlParams) throws ConfigurationException { _name = name; ComponentLocator locator = ComponentLocator.getCurrentLocator(); ConfigurationDao configDao = locator.getDao(ConfigurationDao.class); Map<String, String> params = configDao.getConfiguration(xmlParams); _retry = NumbersUtil.parseInt(params.get(Config.StartRetry.key()), 10); ReservationContextImpl.setComponents(_userDao, _domainDao, _accountDao); VirtualMachineProfileImpl.setComponents(_offeringDao, _templateDao, _accountDao); _cancelWait = NumbersUtil.parseLong(params.get(Config.VmOpCancelInterval.key()), 3600); _cleanupWait = NumbersUtil.parseLong(params.get(Config.VmOpCleanupWait.key()), 3600); _cleanupInterval = NumbersUtil.parseLong(params.get(Config.VmOpCleanupInterval.key()), 86400) * 1000; _opWaitInterval = NumbersUtil.parseLong(params.get(Config.VmOpWaitInterval.key()), 120) * 1000; _lockStateRetry = NumbersUtil.parseInt(params.get(Config.VmOpLockStateRetry.key()), 5); _operationTimeout = NumbersUtil.parseInt(params.get(Config.Wait.key()), 1800) * 2; _executor = Executors.newScheduledThreadPool(1, new NamedThreadFactory("Vm-Operations-Cleanup")); _nodeId = _clusterMgr.getManagementNodeId(); _agentMgr.registerForHostEvents(this, true, true, true); return true; } @Override public String getName() { return _name; } protected VirtualMachineManagerImpl() { setStateMachine(); } @Override public <T extends VMInstanceVO> T start(T vm, Map<VirtualMachineProfile.Param, Object> params, User caller, Account account) throws InsufficientCapacityException, ResourceUnavailableException { try { return advanceStart(vm, params, caller, account); } catch (ConcurrentOperationException e) { throw new CloudRuntimeException("Unable to start a VM due to concurrent operation", e); } } protected boolean checkWorkItems(VMInstanceVO vm, State state) throws ConcurrentOperationException { while (true) { ItWorkVO vo = _workDao.findByOutstandingWork(vm.getId(), state); if (vo == null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to find work for VM: " + vm + " and state: " + state); } return true; } if (vo.getStep() == Step.Done) { if (s_logger.isDebugEnabled()) { s_logger.debug("Work for " + vm + " is " + vo.getStep()); } return true; } if (vo.getSecondsTaskIsInactive() > _cancelWait) { s_logger.warn("The task item for vm " + vm + " has been inactive for " + vo.getSecondsTaskIsInactive()); return false; } try { Thread.sleep(_opWaitInterval); } catch (InterruptedException e) { s_logger.info("Waiting for " + vm + " but is interrupted"); throw new ConcurrentOperationException("Waiting for " + vm + " but is interrupted"); } s_logger.debug("Waiting some more to make sure there's no activity on " + vm); } } @DB protected <T extends VMInstanceVO> Ternary<T, ReservationContext, ItWorkVO> changeToStartState(VirtualMachineGuru<T> vmGuru, T vm, User caller, Account account) throws ConcurrentOperationException { long vmId = vm.getId(); ItWorkVO work = new ItWorkVO(UUID.randomUUID().toString(), _nodeId, State.Starting, vm.getType(), vm.getId()); int retry = _lockStateRetry; while (retry Transaction txn = Transaction.currentTxn(); txn.start(); try { if (stateTransitTo(vm, Event.StartRequested, null, work.getId())) { Journal journal = new Journal.LogJournal("Creating " + vm, s_logger); work = _workDao.persist(work); ReservationContextImpl context = new ReservationContextImpl(work.getId(), journal, caller, account); if (s_logger.isDebugEnabled()) { s_logger.debug("Successfully transitioned to start state for " + vm + " reservation id = " + work.getId()); } return new Ternary<T, ReservationContext, ItWorkVO>(vmGuru.findById(vmId), context, work); } if (s_logger.isDebugEnabled()) { s_logger.debug("Determining why we're unable to update the state to Starting for " + vm); } VMInstanceVO instance = _vmDao.findById(vmId); if (instance == null) { throw new ConcurrentOperationException("Unable to acquire lock on " + vm); } State state = instance.getState(); if (state == State.Running) { if (s_logger.isDebugEnabled()) { s_logger.debug("VM is already started: " + vm); } return null; } if (state.isTransitional()) { if (!checkWorkItems(vm, state)) { throw new ConcurrentOperationException("There are concurrent operations on the VM " + vm); } else { continue; } } if (state != State.Stopped) { s_logger.debug("VM " + vm + " is not in a state to be started: " + state); return null; } } finally { txn.commit(); } } throw new ConcurrentOperationException("Unable to change the state of " + vm); } @DB protected <T extends VMInstanceVO> boolean changeState(T vm, Event event, Long hostId, ItWorkVO work, Step step) { Transaction txn = Transaction.currentTxn(); txn.start(); if (!stateTransitTo(vm, event, hostId)) { return false; } _workDao.updateStep(work, step); txn.commit(); return true; } @Override public <T extends VMInstanceVO> T advanceStart(T vm, Map<VirtualMachineProfile.Param, Object> params, User caller, Account account) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException { long vmId = vm.getId(); Long hostIdSpecified = vm.getHostId(); VirtualMachineGuru<T> vmGuru; if (vm.getHypervisorType() == HypervisorType.BareMetal) { vmGuru = getBareMetalVmGuru(vm); } else { vmGuru = getVmGuru(vm); } vm = vmGuru.findById(vm.getId()); Ternary<T, ReservationContext, ItWorkVO> start = changeToStartState(vmGuru, vm, caller, account); if (start == null) { return vmGuru.findById(vmId); } vm = start.first(); ReservationContext ctx = start.second(); ItWorkVO work = start.third(); T startedVm = null; ServiceOfferingVO offering = _offeringDao.findById(vm.getServiceOfferingId()); VMTemplateVO template = _templateDao.findById(vm.getTemplateId()); Long clusterSpecified = null; if(hostIdSpecified != null){ Host destinationHost = _hostDao.findById(hostIdSpecified); clusterSpecified = destinationHost.getClusterId(); } DataCenterDeployment plan = new DataCenterDeployment(vm.getDataCenterId(), vm.getPodId(), clusterSpecified, hostIdSpecified, null); HypervisorGuru hvGuru = _hvGuruMgr.getGuru(vm.getHypervisorType()); try { Journal journal = start.second().getJournal(); ExcludeList avoids = new ExcludeList(); int retry = _retry; while (retry-- != 0) { // It's != so that it can match -1. //edit plan if this vm's ROOT volume is in READY state already List<VolumeVO> vols = _volsDao.findReadyRootVolumesByInstance(vm.getId()); for (VolumeVO vol : vols) { Volume.State state = vol.getState(); if (state == Volume.State.Ready) { StoragePoolVO pool = _storagePoolDao.findById(vol.getPoolId()); if (!pool.isInMaintenance()) { long rootVolDcId = pool.getDataCenterId(); Long rootVolPodId = pool.getPodId(); Long rootVolClusterId = pool.getClusterId(); plan = new DataCenterDeployment(rootVolDcId, rootVolPodId, rootVolClusterId, null, vol.getPoolId()); if (s_logger.isDebugEnabled()) { s_logger.debug("Root Volume " + vol + " is ready, changing deployment plan to use this pool's datacenterId: "+rootVolDcId +" , podId: "+rootVolPodId +" , and clusterId: "+rootVolClusterId); } } } } VirtualMachineProfileImpl<T> vmProfile = new VirtualMachineProfileImpl<T>(vm, template, offering, account, params); DeployDestination dest = null; for (DeploymentPlanner planner : _planners) { if (planner.canHandle(vmProfile, plan, avoids)) { dest = planner.plan(vmProfile, plan, avoids); } else { continue; } if (dest != null) { avoids.addHost(dest.getHost().getId()); journal.record("Deployment found ", vmProfile, dest); break; } } if (dest == null) { throw new InsufficientServerCapacityException("Unable to create a deployment for " + vmProfile, DataCenter.class, plan.getDataCenterId()); } long destHostId = dest.getHost().getId(); if (!changeState(vm, Event.OperationRetry, destHostId, work, Step.Prepare)) { throw new ConcurrentOperationException("Unable to update the state of the Virtual Machine"); } try { if (vm.getHypervisorType() != HypervisorType.BareMetal) { _storageMgr.prepare(vmProfile, dest); } _networkMgr.prepare(vmProfile, dest, ctx); vmGuru.finalizeVirtualMachineProfile(vmProfile, dest, ctx); VirtualMachineTO vmTO = hvGuru.implement(vmProfile); Commands cmds = new Commands(OnError.Revert); cmds.addCommand(new StartCommand(vmTO)); vmGuru.finalizeDeployment(cmds, vmProfile, dest, ctx); vm.setPodId(dest.getPod().getId()); work = _workDao.findById(work.getId()); if (work == null || work.getStep() != Step.Prepare) { throw new ConcurrentOperationException("Work steps have been changed: " + work); } _workDao.updateStep(work, Step.Starting); _agentMgr.send(destHostId, cmds); _workDao.updateStep(work, Step.Started); Answer startAnswer = cmds.getAnswer(StartAnswer.class); if (startAnswer != null && startAnswer.getResult()) { if (vmGuru.finalizeStart(vmProfile, destHostId, cmds, ctx)) { if (!changeState(vm, Event.OperationSucceeded, destHostId, work, Step.Done)) { throw new ConcurrentOperationException("Unable to transition to a new state."); } startedVm = vm; if (s_logger.isDebugEnabled()) { s_logger.debug("Creation complete for VM " + vm); } return startedVm; } } s_logger.info("Unable to start VM on " + dest.getHost() + " due to " + (startAnswer == null ? " no start answer" : startAnswer.getDetails())); } catch (OperationTimedoutException e) { s_logger.debug("Unable to send the start command to host " + dest.getHost()); if (e.isActive()) { //TODO: This one is different as we're not sure if the VM is actually started. } avoids.addHost(destHostId); continue; } catch (ResourceUnavailableException e) { s_logger.info("Unable to contact resource.", e); if (!avoids.add(e)) { if (e.getScope() == Volume.class || e.getScope() == Nic.class) { throw e; } else { s_logger.warn("unexpected ResourceUnavailableException : " + e.getScope().getName(), e); throw e; } } continue; } catch (InsufficientCapacityException e) { s_logger.info("Insufficient capacity ", e); if (!avoids.add(e)) { if (e.getScope() == Volume.class || e.getScope() == Nic.class) { throw e; } else { s_logger.warn("unexpected InsufficientCapacityException : " + e.getScope().getName(), e); } } continue; } catch (RuntimeException e) { s_logger.warn("Failed to start instance " + vm, e); throw e; } finally { if (startedVm == null) { _workDao.updateStep(work, Step.Release); cleanup(vmGuru, vmProfile, work, Event.OperationFailed, false, caller, account); } } } } finally { if (startedVm == null) { _accountMgr.decrementResourceCount(vm.getAccountId(), ResourceType.user_vm); changeState(vm, Event.OperationFailed, null, work, Step.Done); } } return startedVm; } @Override public <T extends VMInstanceVO> boolean stop(T vm, User user, Account account) throws ResourceUnavailableException { try { return advanceStop(vm, false, user, account); } catch (OperationTimedoutException e) { throw new AgentUnavailableException("Unable to stop vm because the operation to stop timed out", vm.getHostId(), e); } catch (ConcurrentOperationException e) { throw new CloudRuntimeException("Unable to stop vm because of a concurrent operation", e); } } protected <T extends VMInstanceVO> boolean sendStop(VirtualMachineGuru<T> guru, VirtualMachineProfile<T> profile, boolean force) { VMInstanceVO vm = profile.getVirtualMachine(); StopCommand stop = new StopCommand(vm, vm.getInstanceName(), null); try { StopAnswer answer = (StopAnswer)_agentMgr.send(vm.getHostId(), stop); if (!answer.getResult()) { s_logger.debug("Unable to stop VM due to " + answer.getDetails()); return false; } guru.finalizeStop(profile, answer); } catch (AgentUnavailableException e) { if (!force) { return false; } } catch (OperationTimedoutException e) { if (!force) { return false; } } return true; } protected <T extends VMInstanceVO> boolean cleanup(VirtualMachineGuru<T> guru, VirtualMachineProfile<T> profile, ItWorkVO work, Event event, boolean force, User user, Account account) { T vm = profile.getVirtualMachine(); State state = vm.getState(); s_logger.debug("Cleaning up resources for the vm " + vm + " in " + state + " state"); if (state == State.Starting) { Step step = work.getStep(); if (step == Step.Starting && !force) { s_logger.warn("Unable to cleanup vm " + vm + "; work state is incorrect: " + step); return false; } if (step == Step.Started || step == Step.Starting) { if (vm.getHostId() != null) { if (!sendStop(guru, profile, force)) { s_logger.warn("Failed to stop vm " + vm + " in " + State.Starting + " state as a part of cleanup process"); return false; } } } if (step != Step.Release && step != Step.Prepare && step != Step.Started && step != Step.Starting) { s_logger.debug("Cleanup is not needed for vm " + vm + "; work state is incorrect: " + step); return true; } } else if (state == State.Stopping) { if (vm.getHostId() != null) { if (!sendStop(guru, profile, force)) { s_logger.warn("Failed to stop vm " + vm + " in " + State.Stopping + " state as a part of cleanup process"); return false; } } } else if (state == State.Migrating) { if (vm.getHostId() != null) { if (!sendStop(guru, profile, force)) { s_logger.warn("Failed to stop vm " + vm + " in " + State.Migrating + " state as a part of cleanup process"); return false; } } if (vm.getLastHostId() != null) { if (!sendStop(guru, profile, force)) { s_logger.warn("Failed to stop vm " + vm + " in " + State.Migrating + " state as a part of cleanup process"); return false; } } } else if (state == State.Running) { if (!sendStop(guru, profile, force)) { s_logger.warn("Failed to stop vm " + vm + " in " + State.Running + " state as a part of cleanup process"); return false; } } _networkMgr.release(profile, force); _storageMgr.release(profile); s_logger.debug("Successfully cleanued up resources for the vm " + vm + " in " + state + " state"); return true; } @Override public <T extends VMInstanceVO> boolean advanceStop(T vm, boolean forced, User user, Account account) throws AgentUnavailableException, OperationTimedoutException, ConcurrentOperationException { long vmId = vm.getId(); State state = vm.getState(); if (state == State.Stopped) { if (s_logger.isDebugEnabled()) { s_logger.debug("VM is already stopped: " + vm); } return true; } if (state == State.Destroyed || state == State.Expunging || state == State.Error) { if (s_logger.isDebugEnabled()) { s_logger.debug("Stopped called on " + vm + " but the state is " + state); } return true; } VirtualMachineGuru<T> vmGuru = getVmGuru(vm); if (!stateTransitTo(vm, Event.StopRequested, vm.getHostId())) { if (!forced) { throw new ConcurrentOperationException("VM is being operated on by someone else."); } vm = vmGuru.findById(vmId); if (vm == null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to find VM " + vmId); } return true; } } if ((vm.getState() == State.Starting || vm.getState() == State.Stopping || vm.getState() == State.Migrating) && forced) { ItWorkVO work = _workDao.findByOutstandingWork(vm.getId(), vm.getState()); if (work != null) { if (cleanup(vmGuru, new VirtualMachineProfileImpl<T>(vm), work, Event.StopRequested, forced, user, account)) { return stateTransitTo(vm, Event.AgentReportStopped, null); } } } VirtualMachineProfile<T> profile = new VirtualMachineProfileImpl<T>(vm); if (vm.getHostId() != null) { String routerPrivateIp = null; if(vm.getType() == VirtualMachine.Type.DomainRouter){ routerPrivateIp = vm.getPrivateIpAddress(); } StopCommand stop = new StopCommand(vm, vm.getInstanceName(), null, routerPrivateIp); boolean stopped = false; StopAnswer answer = null; try { answer = (StopAnswer)_agentMgr.send(vm.getHostId(), stop); stopped = answer.getResult(); if (!stopped) { throw new CloudRuntimeException("Unable to stop the virtual machine due to " + answer.getDetails()); } vmGuru.finalizeStop(profile, answer); } catch (AgentUnavailableException e) { } catch (OperationTimedoutException e) { } finally { if (!stopped) { if (!forced) { s_logger.warn("Unable to stop vm " + vm); stateTransitTo(vm, Event.OperationFailed, vm.getHostId()); return false; } else { s_logger.warn("Unable to actually stop " + vm + " but continue with release because it's a force stop"); } } } } if (s_logger.isDebugEnabled()) { s_logger.debug(vm + " is stopped on the host. Proceeding to release resource held."); } try { _networkMgr.release(profile, forced); s_logger.debug("Successfully released network resources for the vm " + vm); } catch (Exception e) { s_logger.warn("Unable to release some network resources.", e); } try { if (vm.getHypervisorType() != HypervisorType.BareMetal) { _storageMgr.release(profile); s_logger.debug("Successfully released storage resources for the vm " + vm); } } catch (Exception e) { s_logger.warn("Unable to release storage resources.", e); } vm.setReservationId(null); return stateTransitTo(vm, Event.OperationSucceeded, null); } private void setStateMachine() { _stateMachine = VirtualMachine.State.getStateMachine(); } protected boolean stateTransitTo(VMInstanceVO vm, VirtualMachine.Event e, Long hostId, String reservationId) { vm.setReservationId(reservationId); return _stateMachine.transitTo(vm, e, hostId, _vmDao); } @Override public boolean stateTransitTo(VMInstanceVO vm, VirtualMachine.Event e, Long hostId) { State oldState = vm.getState(); if (oldState == State.Starting ) { if (e == Event.OperationSucceeded) { vm.setLastHostId(hostId); } }else if (oldState == State.Stopping ) { if (e == Event.OperationSucceeded) { vm.setLastHostId(vm.getHostId()); } } return _stateMachine.transitTo(vm, e, hostId, _vmDao); } @Override public <T extends VMInstanceVO> boolean remove(T vm, User user, Account caller) { //expunge the corresponding nics VirtualMachineProfile<T> profile = new VirtualMachineProfileImpl<T>(vm); _networkMgr.expungeNics(profile); s_logger.trace("Nics of the vm " + vm + " are expunged successfully"); return _vmDao.remove(vm.getId()); } @Override public <T extends VMInstanceVO> boolean destroy(T vm, User user, Account caller) throws AgentUnavailableException, OperationTimedoutException, ConcurrentOperationException { if (s_logger.isDebugEnabled()) { s_logger.debug("Destroying vm " + vm.toString()); } if (vm == null || vm.getState() == State.Destroyed || vm.getState() == State.Expunging || vm.getRemoved() != null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to find vm or vm is destroyed: " + vm); } return true; } if (!advanceStop(vm, false, user, caller)) { s_logger.debug("Unable to stop " + vm); return false; } if (!stateTransitTo(vm, VirtualMachine.Event.DestroyRequested, vm.getHostId())) { s_logger.debug("Unable to destroy the vm because it is not in the correct state: " + vm.toString()); return false; } return true; } protected boolean checkVmOnHost(VirtualMachine vm, long hostId) throws AgentUnavailableException, OperationTimedoutException { CheckVirtualMachineAnswer answer = (CheckVirtualMachineAnswer)_agentMgr.send(hostId, new CheckVirtualMachineCommand(vm.getInstanceName())); if (!answer.getResult() || answer.getState() == State.Stopped) { return false; } return true; } @Override public <T extends VMInstanceVO> T migrate(T vm, long srcHostId, DeployDestination dest) throws ResourceUnavailableException, ConcurrentOperationException, ManagementServerException, VirtualMachineMigrationException { s_logger.info("Migrating " + vm + " to " + dest); long dstHostId = dest.getHost().getId(); Host fromHost = _hostDao.findById(srcHostId); if (fromHost == null) { s_logger.info("Unable to find the host to migrate from: " + srcHostId); throw new CloudRuntimeException("Unable to find the host to migrate from: " + srcHostId); } if(fromHost.getClusterId().longValue() != dest.getCluster().getId()){ s_logger.info("Source and destination host are not in same cluster, unable to migrate to host: " + dest.getHost().getId()); throw new CloudRuntimeException("Source and destination host are not in same cluster, unable to migrate to host: " + dest.getHost().getId()); } VirtualMachineGuru<T> vmGuru = getVmGuru(vm); long vmId = vm.getId(); vm = vmGuru.findById(vmId); if (vm == null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to find the vm " + vm); } throw new ManagementServerException("Unable to find a virtual machine with id " + vmId); } if(vm.getState() != State.Running){ if (s_logger.isDebugEnabled()) { s_logger.debug("VM is not Running, unable to migrate the vm " + vm); } throw new VirtualMachineMigrationException("VM is not Running, unable to migrate the vm currently " + vm); } short alertType = AlertManager.ALERT_TYPE_USERVM_MIGRATE; if (VirtualMachine.Type.DomainRouter.equals(vm.getType())) { alertType = AlertManager.ALERT_TYPE_DOMAIN_ROUTER_MIGRATE; } else if (VirtualMachine.Type.ConsoleProxy.equals(vm.getType())) { alertType = AlertManager.ALERT_TYPE_CONSOLE_PROXY_MIGRATE; } VirtualMachineProfile<VMInstanceVO> profile = new VirtualMachineProfileImpl<VMInstanceVO>(vm); _networkMgr.prepareNicForMigration(profile, dest); _storageMgr.prepareForMigration(profile, dest); HypervisorGuru hvGuru = _hvGuruMgr.getGuru(vm.getHypervisorType()); VirtualMachineTO to = hvGuru.implement(profile); PrepareForMigrationCommand pfmc = new PrepareForMigrationCommand(to); ItWorkVO work = new ItWorkVO(UUID.randomUUID().toString(), _nodeId, State.Migrating, vm.getType(), vm.getId()); work.setStep(Step.Prepare); work.setResourceType(ItWorkVO.ResourceType.Host); work.setResourceId(dstHostId); work = _workDao.persist(work); PrepareForMigrationAnswer pfma = null; try { pfma = (PrepareForMigrationAnswer)_agentMgr.send(dstHostId, pfmc); if (!pfma.getResult()) { String msg = "Unable to prepare for migration due to " + pfma.getDetails(); pfma = null; throw new AgentUnavailableException(msg, dstHostId); } } catch (OperationTimedoutException e1) { throw new AgentUnavailableException("Operation timed out", dstHostId); } finally { if (pfma == null) { work.setStep(Step.Done); _workDao.update(work.getId(), work); } } vm.setLastHostId(srcHostId); if (vm == null || vm.getHostId() == null || vm.getHostId() != srcHostId || !changeState(vm, Event.MigrationRequested, dstHostId, work, Step.Migrating)) { s_logger.info("Migration cancelled because state has changed: " + vm); throw new ConcurrentOperationException("Migration cancelled because state has changed: " + vm); } boolean migrated = false; try { boolean isWindows = _guestOsCategoryDao.findById(_guestOsDao.findById(vm.getGuestOSId()).getCategoryId()).getName().equalsIgnoreCase("Windows"); MigrateCommand mc = new MigrateCommand(vm.getInstanceName(), dest.getHost().getPrivateIpAddress(), isWindows); try { MigrateAnswer ma = (MigrateAnswer)_agentMgr.send(vm.getLastHostId(), mc); if (!ma.getResult()) { s_logger.error("Unable to migrate due to " + ma.getDetails()); return null; } } catch (OperationTimedoutException e) { if (e.isActive()) { s_logger.warn("Active migration command so scheduling a restart for " + vm); _haMgr.scheduleRestart(vm, true); } throw new AgentUnavailableException("Operation timed out on migrating " + vm, dstHostId); } changeState(vm, VirtualMachine.Event.OperationSucceeded, dstHostId, work, Step.Started); try { if (!checkVmOnHost(vm, dstHostId)) { s_logger.error("Unable to complete migration for " + vm); try{ _agentMgr.send(srcHostId, new Commands(cleanup(vm.getInstanceName())), null); }catch (AgentUnavailableException e) { s_logger.error("AgentUnavailableException while cleanup on source host: " + srcHostId); } cleanup(vmGuru, new VirtualMachineProfileImpl<T>(vm), work, Event.AgentReportStopped, true, _accountMgr.getSystemUser(), _accountMgr.getSystemAccount()); return null; } } catch (OperationTimedoutException e) { } migrated = true; return vm; } finally { if (!migrated) { s_logger.info("Migration was unsuccessful. Cleaning up: " + vm); _alertMgr.sendAlert(alertType, fromHost.getDataCenterId(), fromHost.getPodId(), "Unable to migrate vm " + vm.getName() + " from host " + fromHost.getName() + " in zone " + dest.getDataCenter().getName() + " and pod " + dest.getPod().getName(), "Migrate Command failed. Please check logs."); try{ _agentMgr.send(dstHostId, new Commands(cleanup(vm.getInstanceName())), null); }catch(AgentUnavailableException ae){ s_logger.info("Looks like the destination Host is unavailable for cleanup"); } stateTransitTo(vm, Event.OperationFailed, srcHostId); } work.setStep(Step.Done); _workDao.update(work.getId(), work); } } protected void cancelWorkItems(long nodeId) { GlobalLock scanLock = GlobalLock.getInternLock(this.getClass().getName()); try { if (scanLock.lock(3)) { try { List<ItWorkVO> works = _workDao.listWorkInProgressFor(nodeId); for (ItWorkVO work : works) { s_logger.info("Handling unfinished work item: " + work); try { VMInstanceVO vm = _vmDao.findById(work.getInstanceId()); if (vm != null) { if (work.getType() == State.Starting) { _haMgr.scheduleRestart(vm, true); } else if (work.getType() == State.Stopping) { _haMgr.scheduleStop(vm, vm.getHostId(), WorkType.CheckStop); } else if (work.getType() == State.Migrating) { _haMgr.scheduleMigration(vm); } } work.setStep(Step.Done); _workDao.update(work.getId(), work); } catch (Exception e) { s_logger.error("Error while handling " + work, e); } } } finally { scanLock.unlock(); } } } finally { scanLock.releaseRef(); } } @Override public boolean migrateAway(VirtualMachine.Type vmType, long vmId, long srcHostId) throws InsufficientServerCapacityException, VirtualMachineMigrationException { VirtualMachineGuru<? extends VMInstanceVO> vmGuru = _vmGurus.get(vmType); VMInstanceVO vm = vmGuru.findById(vmId); if (vm == null) { s_logger.debug("Unable to find a VM for " + vmId); return true; } VirtualMachineProfile<VMInstanceVO> profile = new VirtualMachineProfileImpl<VMInstanceVO>(vm); Long hostId = vm.getHostId(); if (hostId == null) { s_logger.debug("Unable to migrate because the VM doesn't have a host id: " + vm); return true; } Host host = _hostDao.findById(hostId); DataCenterDeployment plan = new DataCenterDeployment(host.getDataCenterId(), host.getPodId(), host.getClusterId(), null, null); ExcludeList excludes = new ExcludeList(); excludes.addHost(hostId); DeployDestination dest = null; while (true) { for (DeploymentPlanner planner : _planners) { dest = planner.plan(profile, plan, excludes); if (dest != null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Planner " + planner + " found " + dest + " for migrating to."); } break; } if (s_logger.isDebugEnabled()) { s_logger.debug("Planner " + planner + " was unable to find anything."); } } if (dest == null) { throw new InsufficientServerCapacityException("Unable to find a server to migrate to.", host.getClusterId()); } excludes.addHost(dest.getHost().getId()); VMInstanceVO vmInstance = null; try { vmInstance = migrate(vm, srcHostId, dest); } catch (ResourceUnavailableException e) { s_logger.debug("Unable to migrate to unavailable " + dest); } catch (ConcurrentOperationException e) { s_logger.debug("Unable to migrate VM due to: " + e.getMessage()); } catch (ManagementServerException e) { s_logger.debug("Unable to migrate VM: " + e.getMessage()); } catch (VirtualMachineMigrationException e) { s_logger.debug("Got VirtualMachineMigrationException, Unable to migrate: " + e.getMessage()); if(vm.getState() == State.Starting){ s_logger.debug("VM seems to be still Starting, we should retry migration later"); throw e; }else{ s_logger.debug("Unable to migrate VM, VM is not in Running or even Starting state, current state: "+vm.getState().toString()); } } if (vmInstance != null) { return true; } try { boolean result = advanceStop(vm, true, _accountMgr.getSystemUser(), _accountMgr.getSystemAccount()); return result; } catch (ResourceUnavailableException e) { s_logger.debug("Unable to stop VM due to " + e.getMessage()); } catch (ConcurrentOperationException e) { s_logger.debug("Unable to stop VM due to " + e.getMessage()); } catch (OperationTimedoutException e) { s_logger.debug("Unable to stop VM due to " + e.getMessage()); } return false; } } protected class CleanupTask implements Runnable { @Override public void run() { s_logger.trace("VM Operation Thread Running"); try { _workDao.cleanup(_cleanupWait); } catch (Exception e) { s_logger.error("VM Operations failed due to ", e); } } } @Override public <T extends VMInstanceVO> T reboot(T vm, Map<VirtualMachineProfile.Param, Object> params, User caller, Account account) throws InsufficientCapacityException, ResourceUnavailableException { try { return advanceReboot(vm, params, caller, account); } catch (ConcurrentOperationException e) { throw new CloudRuntimeException("Unable to reboot a VM due to concurrent operation", e); } } @Override public <T extends VMInstanceVO> T advanceReboot(T vm, Map<VirtualMachineProfile.Param, Object> params, User caller, Account account) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException { T rebootedVm = null; DataCenter dc = _configMgr.getZone(vm.getDataCenterId()); HostPodVO pod = _configMgr.getPod(vm.getPodId()); Host host = _hostDao.findById(vm.getHostId()); Cluster cluster = null; if (host != null) { cluster = _configMgr.getCluster(host.getClusterId()); } DeployDestination dest = new DeployDestination(dc, pod, cluster, host); try { Commands cmds = new Commands(OnError.Revert); cmds.addCommand(new RebootCommand(vm.getName())); _agentMgr.send(host.getId(), cmds); Answer rebootAnswer = cmds.getAnswer(RebootAnswer.class); if (rebootAnswer != null && rebootAnswer.getResult()) { rebootedVm = vm; return rebootedVm; } s_logger.info("Unable to reboot VM " + vm + " on " + dest.getHost() + " due to " + (rebootAnswer == null ? " no reboot answer" : rebootAnswer.getDetails())); } catch (OperationTimedoutException e) { s_logger.warn("Unable to send the reboot command to host " + dest.getHost() + " for the vm " + vm + " due to operation timeout", e); throw new CloudRuntimeException("Failed to reboot the vm on host " + dest.getHost()); } return rebootedVm; } @Override public VMInstanceVO findById(VirtualMachine.Type type, long vmId) { VirtualMachineGuru<? extends VMInstanceVO> guru = _vmGurus.get(type); return guru.findById(vmId); } public Command cleanup(String vmName) { return new StopCommand(vmName); } public Commands deltaSync(long hostId, Map<String, State> newStates) { Map<Long, AgentVmInfo> states = convertToInfos(newStates); Commands commands = new Commands(OnError.Continue); boolean nativeHA = _agentMgr.isHostNativeHAEnabled(hostId); for (Map.Entry<Long, AgentVmInfo> entry : states.entrySet()) { AgentVmInfo info = entry.getValue(); VMInstanceVO vm = info.vm; Command command = null; if (vm != null) { command = compareState(vm, info, false, nativeHA); } else { if (s_logger.isDebugEnabled()) { s_logger.debug("Cleaning up a VM that is no longer found: " + info.name); } command = cleanup(info.name); } if (command != null) { commands.addCommand(command); } } return commands; } protected Map<Long, AgentVmInfo> convertToInfos(final Map<String, State> states) { final HashMap<Long, AgentVmInfo> map = new HashMap<Long, AgentVmInfo>(); if (states == null) { return map; } Collection<VirtualMachineGuru<? extends VMInstanceVO>> vmGurus = _vmGurus.values(); for (Map.Entry<String, State> entry : states.entrySet()) { for (VirtualMachineGuru<? extends VMInstanceVO> vmGuru : vmGurus) { String name = entry.getKey(); VMInstanceVO vm = vmGuru.findByName(name); if (vm != null) { map.put(vm.getId(), new AgentVmInfo(entry.getKey(), vmGuru, vm, entry.getValue())); break; } Long id = vmGuru.convertToId(name); if (id != null) { map.put(id, new AgentVmInfo(entry.getKey(), vmGuru, null, entry.getValue())); } } } return map; } /** * compareState does as its name suggests and compares the states between * management server and agent. It returns whether something should be * cleaned up * */ protected Command compareState(VMInstanceVO vm, final AgentVmInfo info, final boolean fullSync, boolean nativeHA) { State agentState = info.state; final String agentName = info.name; final State serverState = vm.getState(); final String serverName = vm.getName(); VirtualMachineGuru<VMInstanceVO> vmGuru = getVmGuru(vm); Command command = null; if (s_logger.isDebugEnabled()) { s_logger.debug("VM " + serverName + ": server state = " + serverState.toString() + " and agent state = " + agentState.toString()); } if (agentState == State.Error) { agentState = State.Stopped; short alertType = AlertManager.ALERT_TYPE_USERVM; if (VirtualMachine.Type.DomainRouter.equals(vm.getType())) { alertType = AlertManager.ALERT_TYPE_DOMAIN_ROUTER; } else if (VirtualMachine.Type.ConsoleProxy.equals(vm.getType())) { alertType = AlertManager.ALERT_TYPE_CONSOLE_PROXY; } HostPodVO podVO = _podDao.findById(vm.getPodId()); DataCenterVO dcVO = _dcDao.findById(vm.getDataCenterId()); HostVO hostVO = _hostDao.findById(vm.getHostId()); String hostDesc = "name: " + hostVO.getName() + " (id:" + hostVO.getId() + "), availability zone: " + dcVO.getName() + ", pod: " + podVO.getName(); _alertMgr.sendAlert(alertType, vm.getDataCenterId(), vm.getPodId(), "VM (name: " + vm.getName() + ", id: " + vm.getId() + ") stopped on host " + hostDesc + " due to storage failure", "Virtual Machine " + vm.getName() + " (id: " + vm.getId() + ") running on host [" + vm.getHostId() + "] stopped due to storage failure."); } // if (serverState == State.Migrating) { // s_logger.debug("Skipping vm in migrating state: " + vm.toString()); // return null; if (agentState == serverState) { if (s_logger.isDebugEnabled()) { s_logger.debug("Both states are " + agentState + " for " + vm); } assert (agentState == State.Stopped || agentState == State.Running) : "If the states we send up is changed, this must be changed."; if (agentState == State.Running) { stateTransitTo(vm, VirtualMachine.Event.AgentReportRunning, vm.getHostId()); // FIXME: What if someone comes in and sets it to stopping? Then what? return null; } s_logger.debug("State matches but the agent said stopped so let's send a cleanup command anyways."); return cleanup(agentName); } if (agentState == State.Shutdowned) { if (serverState == State.Running || serverState == State.Starting || serverState == State.Stopping) { try { advanceStop(vm, true, _accountMgr.getSystemUser(), _accountMgr.getSystemAccount()); } catch (AgentUnavailableException e) { assert(false) : "How do we hit this with forced on?"; return null; } catch (OperationTimedoutException e) { assert(false) : "How do we hit this with forced on?"; return null; } catch (ConcurrentOperationException e) { assert(false) : "How do we hit this with forced on?"; return null; } } else { s_logger.debug("Sending cleanup to a shutdowned vm: " + agentName); command = cleanup(agentName); } } else if (agentState == State.Stopped) { // This state means the VM on the agent was detected previously // and now is gone. This is slightly different than if the VM // was never completed but we still send down a Stop Command // to ensure there's cleanup. if (serverState == State.Running) { // Our records showed that it should be running so let's restart it. _haMgr.scheduleRestart(vm, false); } else if (serverState == State.Stopping) { _haMgr.scheduleStop(vm, vm.getHostId(), WorkType.ForceStop); s_logger.debug("Scheduling a check stop for VM in stopping mode: " + vm); } else if (serverState == State.Starting) { s_logger.debug("Ignoring VM in starting mode: " + vm.getName()); _haMgr.scheduleRestart(vm, false); } command = cleanup(agentName); } else if (agentState == State.Running) { if (serverState == State.Starting) { if (fullSync) { s_logger.debug("VM state is starting on full sync so updating it to running"); vm = findById(vm.getType(), vm.getId()); stateTransitTo(vm, Event.AgentReportRunning, vm.getHostId()); s_logger.debug("VM's " + vm + " state is starting on full sync so updating it to Running"); vm = vmGuru.findById(vm.getId()); VirtualMachineProfile<VMInstanceVO> profile = new VirtualMachineProfileImpl<VMInstanceVO>(vm); Commands cmds = new Commands(OnError.Revert); s_logger.debug("Finalizing commands that need to be send to complete Start process for the vm " + vm); if (vmGuru.finalizeCommandsOnStart(cmds, profile)) { if (cmds.size() != 0) { try { _agentMgr.send(vm.getHostId(), cmds); } catch (OperationTimedoutException e){ s_logger.error("Exception during update for running vm: " + vm, e); return null; } catch (ResourceUnavailableException e) { s_logger.error("Exception during update for running vm: " + vm, e); return null; } } if (vmGuru.finalizeStart(profile, vm.getHostId(), cmds, null)) { stateTransitTo(vm, Event.AgentReportRunning, vm.getHostId()); } else { s_logger.error("Exception during update for running vm: " + vm); return null; } } else { s_logger.error("Unable to finalize commands on start for vm: " + vm); return null; } } } else if (serverState == State.Stopping) { s_logger.debug("Scheduling a stop command for " + vm); _haMgr.scheduleStop(vm, vm.getHostId(), WorkType.Stop); } else { s_logger.debug("VM state is in stopped so stopping it on the agent"); command = cleanup(agentName); } } return command; } public Commands fullSync(final long hostId, final Map<String, State> newStates) { Commands commands = new Commands(OnError.Continue); final List<? extends VMInstanceVO> vms = _vmDao.listByHostId(hostId); s_logger.debug("Found " + vms.size() + " VMs for host " + hostId); Map<Long, AgentVmInfo> infos = convertToInfos(newStates); boolean nativeHA = _agentMgr.isHostNativeHAEnabled(hostId); for (VMInstanceVO vm : vms) { AgentVmInfo info = infos.remove(vm.getId()); VMInstanceVO castedVm = null; if (info == null) { info = new AgentVmInfo(vm.getInstanceName(), getVmGuru(vm), vm, State.Stopped); castedVm = info.guru.findById(vm.getId()); } else { castedVm = info.vm; } Command command = compareState(castedVm, info, true, nativeHA); if (command != null) { commands.addCommand(command); } } for (final AgentVmInfo left : infos.values()) { if (nativeHA) { for (VirtualMachineGuru<? extends VMInstanceVO> vmGuru : _vmGurus.values()) { VMInstanceVO vm = vmGuru.findByName(left.name); if (vm == null) { s_logger.warn("Stopping a VM that we have no record of: " + left.name); commands.addCommand(cleanup(left.name)); } else { Command command = compareState(vm, left, true, nativeHA); if (command != null) { commands.addCommand(command); } } } } else { s_logger.warn("Stopping a VM that we have no record of: " + left.name); commands.addCommand(cleanup(left.name)); } } return commands; } @Override public boolean isRecurring() { return false; } @Override public boolean processAnswers(long agentId, long seq, Answer[] answers) { for (final Answer answer : answers) { if (!answer.getResult()) { s_logger.warn("Cleanup failed due to " + answer.getDetails()); } else { if (s_logger.isDebugEnabled()) { s_logger.debug("Cleanup succeeded. Details " + answer.getDetails()); } } } return true; } @Override public boolean processTimeout(long agentId, long seq) { return true; } @Override public int getTimeout() { return -1; } @Override public boolean processCommands(long agentId, long seq, Command[] cmds) { boolean processed = false; for (Command cmd : cmds) { if (cmd instanceof PingRoutingCommand) { PingRoutingCommand ping = (PingRoutingCommand)cmd; if (ping.getNewStates().size() > 0) { Commands commands = deltaSync(agentId, ping.getNewStates()); if (commands.size() > 0) { try { _agentMgr.send(agentId, commands, this); } catch (final AgentUnavailableException e) { s_logger.warn("Agent is now unavailable", e); } } } processed = true; } } return processed; } @Override public AgentControlAnswer processControlCommand(long agentId, AgentControlCommand cmd) { return null; } @Override public boolean processDisconnect(long agentId, Status state) { return true; } @Override public void processConnect(HostVO agent, StartupCommand cmd) throws ConnectionException { if (!(cmd instanceof StartupRoutingCommand)) { return; } long agentId = agent.getId(); StartupRoutingCommand startup = (StartupRoutingCommand)cmd; Commands commands = fullSync(agentId, startup.getVmStates()); if (commands.size() > 0) { s_logger.debug("Sending clean commands to the agent"); try { boolean error = false; Answer[] answers = _agentMgr.send(agentId, commands); for (Answer answer : answers) { if (!answer.getResult()) { s_logger.warn("Unable to stop a VM due to " + answer.getDetails()); error = true; } } if (error) { throw new ConnectionException(true, "Unable to stop VMs"); } } catch (final AgentUnavailableException e) { s_logger.warn("Agent is unavailable now", e); throw new ConnectionException(true, "Unable to sync", e); } catch (final OperationTimedoutException e) { s_logger.warn("Agent is unavailable now", e); throw new ConnectionException(true, "Unable to sync", e); } } } protected class TransitionTask implements Runnable { @Override public void run() { GlobalLock lock = GlobalLock.getInternLock("TransitionChecking"); if (lock == null) { s_logger.debug("Couldn't get the global lock"); return; } if (!lock.lock(30)) { s_logger.debug("Couldn't lock the db"); return; } try { lock.addRef(); List<VMInstanceVO> instances = _vmDao.findVMInTransition(new Date(new Date().getTime() - (_operationTimeout * 1000)), State.Starting, State.Stopping); for (VMInstanceVO instance : instances) { State state = instance.getState(); if (state == State.Stopping) { _haMgr.scheduleStop(instance, instance.getHostId(), WorkType.CheckStop); } else if (state == State.Starting) { _haMgr.scheduleRestart(instance, true); } } } catch (Exception e) { s_logger.warn("Caught the following exception on transition checking", e); } finally { StackMaid.current().exitCleanup(); lock.unlock(); } } } protected class AgentVmInfo { public String name; public State state; public VMInstanceVO vm; public VirtualMachineGuru<VMInstanceVO> guru; @SuppressWarnings("unchecked") public AgentVmInfo(String name, VirtualMachineGuru<? extends VMInstanceVO> guru, VMInstanceVO vm, State state) { this.name = name; this.state = state; this.vm = vm; this.guru = (VirtualMachineGuru<VMInstanceVO>)guru; } } }
package com.cloud.vm; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import javax.ejb.Local; import javax.naming.ConfigurationException; import org.apache.log4j.Logger; import com.cloud.agent.AgentManager; import com.cloud.agent.AgentManager.OnError; import com.cloud.agent.api.Answer; import com.cloud.agent.api.StartAnswer; import com.cloud.agent.api.StartCommand; import com.cloud.agent.api.StopAnswer; import com.cloud.agent.api.StopCommand; import com.cloud.agent.api.to.VirtualMachineTO; import com.cloud.agent.manager.Commands; import com.cloud.cluster.ClusterManager; import com.cloud.configuration.Config; import com.cloud.configuration.dao.ConfigurationDao; import com.cloud.dc.DataCenter; import com.cloud.deploy.DataCenterDeployment; import com.cloud.deploy.DeployDestination; import com.cloud.deploy.DeploymentPlan; import com.cloud.deploy.DeploymentPlanner; import com.cloud.deploy.DeploymentPlanner.ExcludeList; import com.cloud.domain.dao.DomainDao; import com.cloud.event.EventTypes; import com.cloud.event.UsageEventVO; import com.cloud.event.dao.UsageEventDao; import com.cloud.exception.AgentUnavailableException; import com.cloud.exception.ConcurrentOperationException; import com.cloud.exception.InsufficientCapacityException; import com.cloud.exception.InsufficientServerCapacityException; import com.cloud.exception.OperationTimedoutException; import com.cloud.exception.ResourceUnavailableException; import com.cloud.hypervisor.Hypervisor.HypervisorType; import com.cloud.hypervisor.HypervisorGuru; import com.cloud.network.NetworkManager; import com.cloud.network.NetworkVO; import com.cloud.service.ServiceOfferingVO; import com.cloud.service.dao.ServiceOfferingDao; import com.cloud.storage.DiskOfferingVO; import com.cloud.storage.Storage.ImageFormat; import com.cloud.storage.StorageManager; import com.cloud.storage.VMTemplateVO; import com.cloud.storage.Volume; import com.cloud.storage.Volume.VolumeType; import com.cloud.storage.dao.VMTemplateDao; import com.cloud.user.Account; import com.cloud.user.AccountManager; import com.cloud.user.User; import com.cloud.user.dao.AccountDao; import com.cloud.user.dao.UserDao; import com.cloud.utils.Journal; import com.cloud.utils.NumbersUtil; import com.cloud.utils.Pair; import com.cloud.utils.Ternary; import com.cloud.utils.component.Adapters; import com.cloud.utils.component.ComponentLocator; import com.cloud.utils.component.Inject; import com.cloud.utils.concurrency.NamedThreadFactory; import com.cloud.utils.db.DB; import com.cloud.utils.db.Transaction; import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.fsm.StateListener; import com.cloud.utils.fsm.StateMachine2; import com.cloud.vm.ItWorkVO.Step; import com.cloud.vm.VirtualMachine.Event; import com.cloud.vm.VirtualMachine.State; import com.cloud.vm.dao.ConsoleProxyDao; import com.cloud.vm.dao.DomainRouterDao; import com.cloud.vm.dao.NicDao; import com.cloud.vm.dao.SecondaryStorageVmDao; import com.cloud.vm.dao.UserVmDao; import com.cloud.vm.dao.VMInstanceDao; @Local(value=VirtualMachineManager.class) public class VirtualMachineManagerImpl implements VirtualMachineManager { private static final Logger s_logger = Logger.getLogger(VirtualMachineManagerImpl.class); String _name; @Inject protected StorageManager _storageMgr; @Inject protected NetworkManager _networkMgr; @Inject protected AgentManager _agentMgr; @Inject protected VMInstanceDao _vmDao; @Inject protected ServiceOfferingDao _offeringDao; @Inject protected VMTemplateDao _templateDao; @Inject protected UserDao _userDao; @Inject protected AccountDao _accountDao; @Inject protected DomainDao _domainDao; @Inject protected ClusterManager _clusterMgr; @Inject protected ItWorkDao _workDao; @Inject protected UserVmDao _userVmDao; @Inject protected DomainRouterDao _routerDao; @Inject protected ConsoleProxyDao _consoleDao; @Inject protected SecondaryStorageVmDao _secondaryDao; @Inject protected UsageEventDao _usageEventDao; @Inject protected NicDao _nicsDao; @Inject protected AccountManager _accountMgr; @Inject(adapter=DeploymentPlanner.class) protected Adapters<DeploymentPlanner> _planners; @Inject(adapter=StateListener.class) protected Adapters<StateListener<State, VirtualMachine.Event, VMInstanceVO>> _stateListner; Map<VirtualMachine.Type, VirtualMachineGuru<? extends VMInstanceVO>> _vmGurus = new HashMap<VirtualMachine.Type, VirtualMachineGuru<? extends VMInstanceVO>>(); Map<HypervisorType, HypervisorGuru> _hvGurus = new HashMap<HypervisorType, HypervisorGuru>(); protected StateMachine2<State, VirtualMachine.Event, VMInstanceVO> _stateMachine; ScheduledExecutorService _executor = null; protected int _retry; protected long _nodeId; protected long _cleanupWait; protected long _cleanupInterval; protected long _cancelWait; protected long _opWaitInterval; protected int _lockStateRetry; @Override public <T extends VMInstanceVO> void registerGuru(VirtualMachine.Type type, VirtualMachineGuru<T> guru) { synchronized(_vmGurus) { _vmGurus.put(type, guru); } } @Override @DB public <T extends VMInstanceVO> T allocate(T vm, VMTemplateVO template, ServiceOfferingVO serviceOffering, Pair<? extends DiskOfferingVO, Long> rootDiskOffering, List<Pair<DiskOfferingVO, Long>> dataDiskOfferings, List<Pair<NetworkVO, NicProfile>> networks, Map<String, Object> params, DeploymentPlan plan, HypervisorType hyperType, Account owner) throws InsufficientCapacityException { if (s_logger.isDebugEnabled()) { s_logger.debug("Allocating entries for VM: " + vm); } VirtualMachineProfileImpl<T> vmProfile = new VirtualMachineProfileImpl<T>(vm, template, serviceOffering, owner, params); vm.setDataCenterId(plan.getDataCenterId()); if (plan.getPodId() != null) { vm.setPodId(plan.getPodId()); } assert (plan.getClusterId() == null && plan.getPoolId() == null) : "We currently don't support cluster and pool preset yet"; @SuppressWarnings("unchecked") VirtualMachineGuru<T> guru = (VirtualMachineGuru<T>)_vmGurus.get(vm.getType()); Transaction txn = Transaction.currentTxn(); txn.start(); vm = guru.persist(vm); if (s_logger.isDebugEnabled()) { s_logger.debug("Allocating nics for " + vm); } try { _networkMgr.allocate(vmProfile, networks); } catch (ConcurrentOperationException e) { throw new CloudRuntimeException("Concurrent operation while trying to allocate resources for the VM", e); } if (dataDiskOfferings == null) { dataDiskOfferings = new ArrayList<Pair<DiskOfferingVO, Long>>(0); } if (s_logger.isDebugEnabled()) { s_logger.debug("Allocaing disks for " + vm); } if (template.getFormat() == ImageFormat.ISO) { _storageMgr.allocateRawVolume(VolumeType.ROOT, "ROOT-" + vm.getId(), rootDiskOffering.first(), rootDiskOffering.second(), vm, owner); } else { _storageMgr.allocateTemplatedVolume(VolumeType.ROOT, "ROOT-" + vm.getId(), rootDiskOffering.first(), template, vm, owner); } for (Pair<DiskOfferingVO, Long> offering : dataDiskOfferings) { _storageMgr.allocateRawVolume(VolumeType.DATADISK, "DATA-" + vm.getId(), offering.first(), offering.second(), vm, owner); } stateTransitTo(vm, Event.OperationSucceeded, null); txn.commit(); if (s_logger.isDebugEnabled()) { s_logger.debug("Allocation completed for VM: " + vm); } return vm; } protected void reserveNics(VirtualMachineProfile<? extends VMInstanceVO> vmProfile, DeployDestination dest, ReservationContext context) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException { // List<NicVO> nics = _nicsDao.listBy(vmProfile.getId()); // for (NicVO nic : nics) { // Pair<NetworkGuru, NetworkVO> implemented = _networkMgr.implementNetwork(nic.getNetworkId(), dest, context); // NetworkGuru concierge = implemented.first(); // NetworkVO network = implemented.second(); // NicProfile profile = null; // if (nic.getReservationStrategy() == ReservationStrategy.Start) { // nic.setState(Resource.State.Reserving); // nic.setReservationId(context.getReservationId()); // _nicsDao.update(nic.getId(), nic); // URI broadcastUri = nic.getBroadcastUri(); // if (broadcastUri == null) { // network.getBroadcastUri(); // URI isolationUri = nic.getIsolationUri(); // profile = new NicProfile(nic, network, broadcastUri, isolationUri); // concierge.reserve(profile, network, vmProfile, dest, context); // nic.setIp4Address(profile.getIp4Address()); // nic.setIp6Address(profile.getIp6Address()); // nic.setMacAddress(profile.getMacAddress()); // nic.setIsolationUri(profile.getIsolationUri()); // nic.setBroadcastUri(profile.getBroadCastUri()); // nic.setReserver(concierge.getName()); // nic.setState(Resource.State.Reserved); // nic.setNetmask(profile.getNetmask()); // nic.setGateway(profile.getGateway()); // nic.setAddressFormat(profile.getFormat()); // _nicsDao.update(nic.getId(), nic); // } else { // profile = new NicProfile(nic, network, nic.getBroadcastUri(), nic.getIsolationUri()); // for (NetworkElement element : _networkElements) { // if (s_logger.isDebugEnabled()) { // s_logger.debug("Asking " + element.getName() + " to prepare for " + nic); // element.prepare(network, profile, vmProfile, dest, context); // vmProfile.addNic(profile); // _networksDao.changeActiveNicsBy(network.getId(), 1); } protected void prepareNics(VirtualMachineProfile<? extends VMInstanceVO> vmProfile, DeployDestination dest, ReservationContext context) { } @Override public <T extends VMInstanceVO> T allocate(T vm, VMTemplateVO template, ServiceOfferingVO serviceOffering, Long rootSize, Pair<DiskOfferingVO, Long> dataDiskOffering, List<Pair<NetworkVO, NicProfile>> networks, DeploymentPlan plan, HypervisorType hyperType, Account owner) throws InsufficientCapacityException { List<Pair<DiskOfferingVO, Long>> diskOfferings = new ArrayList<Pair<DiskOfferingVO, Long>>(1); if (dataDiskOffering != null) { diskOfferings.add(dataDiskOffering); } return allocate(vm, template, serviceOffering, new Pair<DiskOfferingVO, Long>(serviceOffering, rootSize), diskOfferings, networks, null, plan, hyperType, owner); } @Override public <T extends VMInstanceVO> T allocate(T vm, VMTemplateVO template, ServiceOfferingVO serviceOffering, List<Pair<NetworkVO, NicProfile>> networks, DeploymentPlan plan, HypervisorType hyperType, Account owner) throws InsufficientCapacityException { return allocate(vm, template, serviceOffering, new Pair<DiskOfferingVO, Long>(serviceOffering, null), null, networks, null, plan, hyperType, owner); } @SuppressWarnings("unchecked") private <T extends VMInstanceVO> VirtualMachineGuru<T> getVmGuru(T vm) { return (VirtualMachineGuru<T>)_vmGurus.get(vm.getType()); } @Override public <T extends VMInstanceVO> boolean expunge(T vm, User caller, Account account) throws ResourceUnavailableException { try { if (advanceExpunge(vm, caller, account)) { //Mark vms as removed remove(vm, _accountMgr.getSystemUser(), account); return true; } else { s_logger.info("Did not expunge " + vm); return false; } } catch (OperationTimedoutException e) { throw new CloudRuntimeException("Operation timed out", e); } catch (ConcurrentOperationException e) { throw new CloudRuntimeException("Concurrent operation ", e); } } @Override public <T extends VMInstanceVO> boolean advanceExpunge(T vm, User caller, Account account) throws ResourceUnavailableException, OperationTimedoutException, ConcurrentOperationException { if (vm == null || vm.getRemoved() != null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to find vm or vm is destroyed: " + vm); } return true; } if (!this.advanceStop(vm, false, caller, account)) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to stop the VM so we can't expunge it."); } } if (!stateTransitTo(vm, VirtualMachine.Event.ExpungeOperation, vm.getHostId())) { s_logger.debug("Unable to destroy the vm because it is not in the correct state: " + vm.toString()); return false; } if (s_logger.isDebugEnabled()) { s_logger.debug("Destroying vm " + vm); } VirtualMachineProfile<T> profile = new VirtualMachineProfileImpl<T>(vm); _networkMgr.cleanupNics(profile); //Clean up volumes based on the vm's instance id _storageMgr.cleanupVolumes(vm.getId()); if (s_logger.isDebugEnabled()) { s_logger.debug("Expunged " + vm); } return true; } @Override public boolean start() { _executor.scheduleAtFixedRate(new CleanupTask(), _cleanupInterval, _cleanupInterval, TimeUnit.SECONDS); return true; } @Override public boolean stop() { return true; } @Override public boolean configure(String name, Map<String, Object> xmlParams) throws ConfigurationException { _name = name; ComponentLocator locator = ComponentLocator.getCurrentLocator(); ConfigurationDao configDao = locator.getDao(ConfigurationDao.class); Map<String, String> params = configDao.getConfiguration(xmlParams); _retry = NumbersUtil.parseInt(params.get(Config.StartRetry.key()), 10); ReservationContextImpl.setComponents(_userDao, _domainDao, _accountDao); VirtualMachineProfileImpl.setComponents(_offeringDao, _templateDao, _accountDao); Adapters<HypervisorGuru> hvGurus = locator.getAdapters(HypervisorGuru.class); for (HypervisorGuru guru : hvGurus) { _hvGurus.put(guru.getHypervisorType(), guru); } _cancelWait = NumbersUtil.parseLong(params.get(Config.VmOpCancelInterval.key()), 3600); _cleanupWait = NumbersUtil.parseLong(params.get(Config.VmOpCleanupWait.key()), 3600); _cleanupInterval = NumbersUtil.parseLong(params.get(Config.VmOpCleanupInterval.key()), 86400) * 1000; _opWaitInterval = NumbersUtil.parseLong(params.get(Config.VmOpWaitInterval.key()), 120) * 1000; _lockStateRetry = NumbersUtil.parseInt(params.get(Config.VmOpLockStateRetry.key()), 5); _executor = Executors.newScheduledThreadPool(1, new NamedThreadFactory("Vm-Operations-Cleanup")); _nodeId = _clusterMgr.getId(); setStateMachine(); return true; } @Override public String getName() { return _name; } protected VirtualMachineManagerImpl() { } @Override public <T extends VMInstanceVO> T start(T vm, Map<String, Object> params, User caller, Account account) throws InsufficientCapacityException, ResourceUnavailableException { try { return advanceStart(vm, params, caller, account); } catch (ConcurrentOperationException e) { throw new CloudRuntimeException("Unable to start a VM due to concurrent operation", e); } } private Answer getStartAnswer(Answer[] answers) { for (Answer ans : answers) { if (ans instanceof StartAnswer) { return ans; } } assert false : "Why there is no Start Answer???"; return null; } protected boolean checkWorkItems(VMInstanceVO vm, State state) throws ConcurrentOperationException { while (true) { ItWorkVO vo = _workDao.findByInstance(vm.getId(), state); if (vo == null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to find work for " + vm); } return true; } if (vo.getStep() == Step.Done || vo.getStep() == Step.Cancelled) { if (s_logger.isDebugEnabled()) { s_logger.debug("Work for " + vm + " is " + vo.getStep()); } return true; } if (vo.getSecondsTaskIsInactive() > _cancelWait) { s_logger.warn("The task item for vm " + vm + " has been inactive for " + vo.getSecondsTaskIsInactive()); return false; } try { Thread.sleep(_opWaitInterval); } catch (InterruptedException e) { s_logger.info("Waiting for " + vm + " but is interrupted"); throw new ConcurrentOperationException("Waiting for " + vm + " but is interrupted"); } s_logger.debug("Waiting some more to make sure there's no activity on " + vm); } } @DB protected <T extends VMInstanceVO> Ternary<T, ReservationContext, ItWorkVO> changeToStartState(VirtualMachineGuru<T> vmGuru, T vm, User caller, Account account) throws ConcurrentOperationException { long vmId = vm.getId(); ItWorkVO work = new ItWorkVO(UUID.randomUUID().toString(), _nodeId, State.Starting, vm.getId()); int retry = _lockStateRetry; while (retry Transaction txn = Transaction.currentTxn(); txn.start(); if (stateTransitTo(vm, Event.StartRequested, null, work.getId())) { Journal journal = new Journal.LogJournal("Creating " + vm, s_logger); work = _workDao.persist(work); ReservationContextImpl context = new ReservationContextImpl(work.getId(), journal, caller, account); if (s_logger.isDebugEnabled()) { s_logger.debug("Successfully transitioned to start state for " + vm + " reservation id = " + work.getId()); } txn.commit(); return new Ternary<T, ReservationContext, ItWorkVO>(vmGuru.findById(vmId), context, work); } if (s_logger.isDebugEnabled()) { s_logger.debug("Determining why we're unable to update the state to Starting for " + vm); } VMInstanceVO instance = _vmDao.lockRow(vmId, true); if (instance == null) { throw new ConcurrentOperationException("Unable to acquire lock on " + vm); } State state = instance.getState(); if (state == State.Running) { if (s_logger.isDebugEnabled()) { s_logger.debug("VM is already started: " + vm); } txn.commit(); return null; } if (state.isTransitional()) { if (!checkWorkItems(vm, state)) { throw new ConcurrentOperationException("There are concurrent operations on the VM " + vm); } else { continue; } } if (state != State.Stopped) { s_logger.debug("VM " + vm + " is not in a state to be started: " + state); txn.commit(); return null; } } throw new ConcurrentOperationException("Unable to change the state of " + vm); } @Override public <T extends VMInstanceVO> T advanceStart(T vm, Map<String, Object> params, User caller, Account account) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException { long vmId = vm.getId(); VirtualMachineGuru<T> vmGuru = getVmGuru(vm); Ternary<T, ReservationContext, ItWorkVO> start = changeToStartState(vmGuru, vm, caller, account); if (start == null) { return vmGuru.findById(vmId); } vm = start.first(); ReservationContext ctx = start.second(); ItWorkVO work = start.third(); T startedVm = null; try { ServiceOfferingVO offering = _offeringDao.findById(vm.getServiceOfferingId()); VMTemplateVO template = _templateDao.findById(vm.getTemplateId()); DataCenterDeployment plan = new DataCenterDeployment(vm.getDataCenterId(), vm.getPodId(), null, null); HypervisorGuru hvGuru = _hvGurus.get(vm.getHypervisorType()); VirtualMachineProfileImpl<T> vmProfile = new VirtualMachineProfileImpl<T>(vm, template, offering, null, params); Journal journal = start.second().getJournal(); ExcludeList avoids = new ExcludeList(); int retry = _retry; while (retry-- != 0) { // It's != so that it can match -1. DeployDestination dest = null; for (DeploymentPlanner planner : _planners) { dest = planner.plan(vmProfile, plan, avoids); if (dest != null) { avoids.addHost(dest.getHost().getId()); journal.record("Deployment found ", vmProfile, dest); break; } } if (dest == null) { throw new InsufficientServerCapacityException("Unable to create a deployment for " + vmProfile, DataCenter.class, plan.getDataCenterId()); } stateTransitTo(vm, Event.OperationRetry, dest.getHost().getId()); try { _storageMgr.prepare(vmProfile, dest); _networkMgr.prepare(vmProfile, dest, ctx); } catch (ResourceUnavailableException e) { if (!avoids.add(e)) { if (e.getScope() == Volume.class || e.getScope() == Nic.class) { throw e; } else { throw new CloudRuntimeException("Resource is not available to start the VM.", e); } } s_logger.info("Unable to contact resource.", e); continue; } catch (InsufficientCapacityException e) { if (!avoids.add(e)) { if (e.getScope() == Volume.class || e.getScope() == Nic.class) { throw e; } else { throw new CloudRuntimeException("Insufficient capacity to start the VM.", e); } } s_logger.info("Insufficient capacity ", e); continue; } catch (RuntimeException e) { s_logger.warn("Failed to start instance " + vm, e); throw new CloudRuntimeException("Failed to start " + vm, e); } vmGuru.finalizeVirtualMachineProfile(vmProfile, dest, ctx); VirtualMachineTO vmTO = hvGuru.implement(vmProfile); Commands cmds = new Commands(OnError.Revert); cmds.addCommand(new StartCommand(vmTO)); vmGuru.finalizeDeployment(cmds, vmProfile, dest, ctx); vm.setPodId(dest.getPod().getId()); try { Answer[] answers = _agentMgr.send(dest.getHost().getId(), cmds); if (getStartAnswer(answers).getResult() && vmGuru.finalizeStart(cmds, vmProfile, dest, ctx)) { if (!stateTransitTo(vm, Event.OperationSucceeded, dest.getHost().getId())) { throw new CloudRuntimeException("Unable to transition to a new state."); } startedVm = vm; break; } s_logger.info("Unable to start VM on " + dest.getHost() + " due to " + answers[0].getDetails()); } catch (AgentUnavailableException e) { s_logger.debug("Unable to send the start command to host " + dest.getHost()); continue; } catch (OperationTimedoutException e) { s_logger.debug("Unable to send the start command to host " + dest.getHost()); continue; } } if (s_logger.isDebugEnabled()) { s_logger.debug("Creation complete for VM " + vm); } } finally { if (startedVm == null) { stateTransitTo(vm, Event.OperationFailed, null); } work.setStep(Step.Done); _workDao.update(work.getId(), work); } return startedVm; } @Override public <T extends VMInstanceVO> boolean stop(T vm, User user, Account account) throws ResourceUnavailableException { try { return advanceStop(vm, false, user, account); } catch (OperationTimedoutException e) { throw new AgentUnavailableException("Unable to stop vm because the operation to stop timed out", vm.getHostId(), e); } catch (ConcurrentOperationException e) { throw new CloudRuntimeException("Unable to stop vm because of a concurrent operation", e); } } @Override public <T extends VMInstanceVO> boolean advanceStop(T vm, boolean forced, User user, Account account) throws AgentUnavailableException, OperationTimedoutException, ConcurrentOperationException { State state = vm.getState(); if (state == State.Stopped) { if (s_logger.isDebugEnabled()) { s_logger.debug("VM is already stopped: " + vm); } return true; } if (state == State.Creating || state == State.Destroyed || state == State.Expunging || state == State.Error) { s_logger.debug("Stopped called on " + vm + " but the state is " + state); return true; } if (!stateTransitTo(vm, Event.StopRequested, vm.getHostId())) { throw new ConcurrentOperationException("VM is being operated on by someone else."); } if (vm.getHostId() == null) { s_logger.debug("Host id is null so we can't stop it. How did we get into here?"); return false; } String reservationId = vm.getReservationId(); StopCommand stop = new StopCommand(vm, vm.getInstanceName(), null); boolean stopped = false; StopAnswer answer = null; try { answer = (StopAnswer)_agentMgr.send(vm.getHostId(), stop); stopped = answer.getResult(); if (!stopped) { throw new CloudRuntimeException("Unable to stop the virtual machine due to " + answer.getDetails()); } else { UsageEventVO usageEvent = new UsageEventVO(EventTypes.EVENT_VM_STOP, vm.getAccountId(), vm.getDataCenterId(), vm.getId(), vm.getName(), vm.getServiceOfferingId(), vm.getTemplateId(), null); _usageEventDao.persist(usageEvent); } } finally { if (!stopped) { if (!forced) { stateTransitTo(vm, Event.OperationFailed, vm.getHostId()); } else { s_logger.warn("Unable to actually stop " + vm + " but continue with release because it's a force stop"); } } } if (s_logger.isDebugEnabled()) { s_logger.debug(vm + " is stopped on the host. Proceeding to release resource held."); } boolean cleanup = false; VirtualMachineProfile<T> profile = new VirtualMachineProfileImpl<T>(vm); try { _networkMgr.release(profile, forced); s_logger.debug("Successfully released network resources for the vm " + vm); } catch (Exception e) { s_logger.warn("Unable to release some network resources.", e); cleanup = true; } try { _storageMgr.release(profile); s_logger.debug("Successfully released storage resources for the vm " + vm); } catch (Exception e) { s_logger.warn("Unable to release storage resources.", e); cleanup = true; } @SuppressWarnings("unchecked") VirtualMachineGuru<T> guru = (VirtualMachineGuru<T>)_vmGurus.get(vm.getType()); try { guru.finalizeStop(profile, vm.getHostId(), vm.getReservationId(), answer); } catch (Exception e) { s_logger.warn("Guru " + guru.getClass() + " has trouble processing stop "); cleanup = true; } vm.setReservationId(null); stateTransitTo(vm, Event.OperationSucceeded, null); if (cleanup) { ItWorkVO work = new ItWorkVO(reservationId, _nodeId, State.Stopping, vm.getId()); _workDao.persist(work); } return stopped; } private void setStateMachine() { _stateMachine = new StateMachine2<State, VirtualMachine.Event, VMInstanceVO>(); _stateMachine.addTransition(null, VirtualMachine.Event.CreateRequested, State.Creating); _stateMachine.addTransition(State.Creating, VirtualMachine.Event.OperationSucceeded, State.Stopped); _stateMachine.addTransition(State.Creating, VirtualMachine.Event.OperationFailed, State.Error); _stateMachine.addTransition(State.Stopped, VirtualMachine.Event.StartRequested, State.Starting); _stateMachine.addTransition(State.Error, VirtualMachine.Event.DestroyRequested, State.Expunging); _stateMachine.addTransition(State.Error, VirtualMachine.Event.ExpungeOperation, State.Expunging); _stateMachine.addTransition(State.Stopped, VirtualMachine.Event.DestroyRequested, State.Destroyed); _stateMachine.addTransition(State.Stopped, VirtualMachine.Event.StopRequested, State.Stopped); _stateMachine.addTransition(State.Stopped, VirtualMachine.Event.AgentReportStopped, State.Stopped); _stateMachine.addTransition(State.Stopped, VirtualMachine.Event.OperationFailed, State.Error); _stateMachine.addTransition(State.Stopped, VirtualMachine.Event.ExpungeOperation, State.Expunging); _stateMachine.addTransition(State.Stopped, VirtualMachine.Event.AgentReportShutdowned, State.Stopped); _stateMachine.addTransition(State.Starting, VirtualMachine.Event.OperationRetry, State.Starting); _stateMachine.addTransition(State.Starting, VirtualMachine.Event.OperationSucceeded, State.Running); _stateMachine.addTransition(State.Starting, VirtualMachine.Event.OperationFailed, State.Stopped); _stateMachine.addTransition(State.Starting, VirtualMachine.Event.AgentReportRunning, State.Running); _stateMachine.addTransition(State.Starting, VirtualMachine.Event.AgentReportStopped, State.Stopped); _stateMachine.addTransition(State.Starting, VirtualMachine.Event.AgentReportShutdowned, State.Stopped); _stateMachine.addTransition(State.Destroyed, VirtualMachine.Event.RecoveryRequested, State.Stopped); _stateMachine.addTransition(State.Destroyed, VirtualMachine.Event.ExpungeOperation, State.Expunging); _stateMachine.addTransition(State.Creating, VirtualMachine.Event.MigrationRequested, State.Destroyed); _stateMachine.addTransition(State.Running, VirtualMachine.Event.MigrationRequested, State.Migrating); _stateMachine.addTransition(State.Running, VirtualMachine.Event.AgentReportRunning, State.Running); _stateMachine.addTransition(State.Running, VirtualMachine.Event.AgentReportStopped, State.Stopped); _stateMachine.addTransition(State.Running, VirtualMachine.Event.StopRequested, State.Stopping); _stateMachine.addTransition(State.Running, VirtualMachine.Event.AgentReportShutdowned, State.Stopped); _stateMachine.addTransition(State.Migrating, VirtualMachine.Event.MigrationRequested, State.Migrating); _stateMachine.addTransition(State.Migrating, VirtualMachine.Event.OperationSucceeded, State.Running); _stateMachine.addTransition(State.Migrating, VirtualMachine.Event.OperationFailed, State.Running); _stateMachine.addTransition(State.Migrating, VirtualMachine.Event.MigrationFailedOnSource, State.Running); _stateMachine.addTransition(State.Migrating, VirtualMachine.Event.MigrationFailedOnDest, State.Running); _stateMachine.addTransition(State.Migrating, VirtualMachine.Event.AgentReportRunning, State.Running); _stateMachine.addTransition(State.Migrating, VirtualMachine.Event.AgentReportStopped, State.Stopped); _stateMachine.addTransition(State.Stopping, VirtualMachine.Event.OperationSucceeded, State.Stopped); _stateMachine.addTransition(State.Migrating, VirtualMachine.Event.AgentReportShutdowned, State.Stopped); _stateMachine.addTransition(State.Stopping, VirtualMachine.Event.OperationFailed, State.Running); _stateMachine.addTransition(State.Stopping, VirtualMachine.Event.AgentReportRunning, State.Running); _stateMachine.addTransition(State.Stopping, VirtualMachine.Event.AgentReportStopped, State.Stopped); _stateMachine.addTransition(State.Stopping, VirtualMachine.Event.StopRequested, State.Stopping); _stateMachine.addTransition(State.Stopping, VirtualMachine.Event.AgentReportShutdowned, State.Stopped); _stateMachine.addTransition(State.Expunging, VirtualMachine.Event.OperationFailed, State.Expunging); _stateMachine.addTransition(State.Expunging, VirtualMachine.Event.ExpungeOperation, State.Expunging); _stateMachine.registerListeners(_stateListner); } protected boolean stateTransitTo(VMInstanceVO vm, VirtualMachine.Event e, Long hostId, String reservationId) { vm.setReservationId(reservationId); if (vm instanceof UserVmVO) { return _stateMachine.transitTO(vm, e, hostId, _userVmDao); } else if (vm instanceof ConsoleProxyVO) { return _stateMachine.transitTO(vm, e, hostId, _consoleDao); } else if (vm instanceof SecondaryStorageVmVO) { return _stateMachine.transitTO(vm, e, hostId, _secondaryDao); } else if (vm instanceof DomainRouterVO) { return _stateMachine.transitTO(vm, e, hostId, _routerDao); } else { return _stateMachine.transitTO(vm, e, hostId, _vmDao); } } @Override public boolean stateTransitTo(VMInstanceVO vm, VirtualMachine.Event e, Long hostId) { if (vm instanceof UserVmVO) { return _stateMachine.transitTO(vm, e, hostId, _userVmDao); } else if (vm instanceof ConsoleProxyVO) { return _stateMachine.transitTO(vm, e, hostId, _consoleDao); } else if (vm instanceof SecondaryStorageVmVO) { return _stateMachine.transitTO(vm, e, hostId, _secondaryDao); } else if (vm instanceof DomainRouterVO) { return _stateMachine.transitTO(vm, e, hostId, _routerDao); } else { return _stateMachine.transitTO(vm, e, hostId, _vmDao); } } @Override public <T extends VMInstanceVO> boolean remove(T vm, User user, Account caller) { return _vmDao.remove(vm.getId()); } @Override public <T extends VMInstanceVO> boolean destroy(T vm, User user, Account caller) throws AgentUnavailableException, OperationTimedoutException, ConcurrentOperationException { if (s_logger.isDebugEnabled()) { s_logger.debug("Destroying vm " + vm.toString()); } if (vm == null || vm.getState() == State.Destroyed || vm.getState() == State.Expunging || vm.getRemoved() != null) { if (s_logger.isDebugEnabled()) { s_logger.debug("Unable to find vm or vm is destroyed: " + vm); } return true; } if (!advanceStop(vm, false, user, caller)) { s_logger.debug("Unable to stop " + vm); return false; } if (!stateTransitTo(vm, VirtualMachine.Event.DestroyRequested, vm.getHostId())) { s_logger.debug("Unable to destroy the vm because it is not in the correct state: " + vm.toString()); return false; } return true; } protected class CleanupTask implements Runnable { @Override public void run() { s_logger.trace("VM Operation Thread Running"); try { _workDao.cleanup(_cleanupWait); } catch (Exception e) { s_logger.error("VM Operations failed due to ", e); } } } }
package org.helioviewer.jhv; import java.awt.Desktop; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.net.URI; import java.net.URISyntaxException; import java.util.Arrays; import java.util.concurrent.ExecutorService; import java.util.jar.Attributes; import java.util.jar.JarFile; import java.util.jar.Manifest; import javax.swing.event.HyperlinkEvent; import javax.swing.event.HyperlinkListener; import org.helioviewer.jhv.base.logging.Log; import org.helioviewer.jhv.gui.dialogs.TextDialog; import org.helioviewer.jhv.threads.JHVExecutor; import com.jidesoft.comparator.AlphanumComparator; public class JHVGlobals { public static final String programName = "ESA JHelioviewer"; public static final String downloadURL = "http://swhv.oma.be/download/"; public static String version = "2.-1.-1"; public static String revision = "-1"; public static String userAgent = "JHV/SWHV-"; public static final AlphanumComparator alphanumComparator = new AlphanumComparator(true); public static final int hiDpiCutoff = 1024; private static final ExecutorService executorService = JHVExecutor.getJHVWorkersExecutorService("MAIN", 10); public static ExecutorService getExecutorService() { return executorService; } private static int readTimeout = -1; private static int connectTimeout = -1; public static int getStdReadTimeout() { if (readTimeout == -1) readTimeout = Integer.parseInt(Settings.getSingletonInstance().getProperty("connection.read.timeout")); return readTimeout; } public static int getStdConnectTimeout() { if (connectTimeout == -1) connectTimeout = Integer.parseInt(Settings.getSingletonInstance().getProperty("connection.connect.timeout")); return connectTimeout; } public static void determineVersionAndRevision() { File jarPath; try { jarPath = new File(JHVGlobals.class.getProtectionDomain().getCodeSource().getLocation().toURI()); } catch (URISyntaxException e1) { Log.error("JHVGlobals.determineVersionAndRevision > Could not open code source location: " + JHVGlobals.class.getProtectionDomain().getCodeSource().getLocation()); Log.warn("JHVGlobals.determineVersionAndRevision > Set version and revision to null."); return; } if (jarPath.isFile()) { try (JarFile jarFile = new JarFile(jarPath)) { Manifest manifest = jarFile.getManifest(); if (manifest == null) { Log.warn("JHVGlobals.determineVersionAndRevision > Manifest not found in jar file: " + jarPath + ". Set version and revision to null."); return; } Attributes mainAttributes = manifest.getMainAttributes(); version = mainAttributes.getValue("version"); revision = mainAttributes.getValue("revision"); userAgent += version + '.' + revision + " (" + System.getProperty("os.arch") + ' ' + System.getProperty("os.name") + ' ' + System.getProperty("os.version") + ") " + System.getProperty("java.vendor") + " JRE " + System.getProperty("java.version"); System.setProperty("jhv.version", version); System.setProperty("jhv.revision", revision); Log.info("Running " + userAgent); } catch (IOException e) { Log.error("JHVGlobals.determineVersionAndRevision > Error while reading version and revision from manifest in jar file: " + jarPath, e); } } else { Log.warn("JHVGlobals.determineVersionAndRevision > Classes are not within a jar file. Set version and revision to null."); } } /** * Attempts to create the necessary directories if they do not exist. It * gets its list of directories to create from the JHVDirectory class. * * @throws SecurityException */ public static void createDirs() { JHVDirectory[] dirs = JHVDirectory.values(); for (JHVDirectory dir : dirs) { File f = dir.getFile(); if (!f.exists()) { f.mkdirs(); } } } public static final HyperOpenURL hyperOpenURL = new HyperOpenURL(); private static class HyperOpenURL implements HyperlinkListener { @Override public void hyperlinkUpdate(HyperlinkEvent e) { if (e.getEventType() == HyperlinkEvent.EventType.ACTIVATED) { openURL(e.getURL().toString()); } } } public static void openURL(String url) { try { if (url != null) Desktop.getDesktop().browse(new URI(url)); } catch (Exception e) { e.printStackTrace(); } } public static void displayNotification(String moviePath) { String openURL = new File(moviePath).toURI().toString(); if (System.getProperty("jhv.os").equals("mac")) { try { String msg = "Export of " + moviePath + " finished."; File jarParent = new File(JHVGlobals.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getCanonicalFile().getParentFile(); if (jarParent != null) { String[] cmd = { jarParent.getParent() + "/Helpers/terminal-notifier.app/Contents/MacOS/terminal-notifier", "-message", "\"" + msg + "\"", "-execute", "open " + "\"" + openURL + "\"", "-title", "JHelioviewer" }; Log.info("JHVGlobals.displayNotification " + Arrays.toString(cmd)); Runtime.getRuntime().exec(cmd); return; } } catch (Exception e) { StringWriter errors = new StringWriter(); e.printStackTrace(new PrintWriter(errors)); Log.error("JHVGlobals.displayNotification " + errors); } } // otherwise new TextDialog("Export Ready", "Export of <a href=\"" + openURL + "\">" + moviePath + "</a> finished.", false).showDialog(); } }
package lighthouse.utils; import com.sun.prism.GraphicsPipeline; import com.sun.prism.sw.SWPipeline; import org.bitcoinj.core.Coin; import com.google.common.base.Throwables; import com.google.common.util.concurrent.Uninterruptibles; import javafx.animation.*; import javafx.application.Platform; import javafx.beans.binding.BooleanBinding; import javafx.beans.binding.NumberBinding; import javafx.beans.property.BooleanProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.value.WritableDoubleValue; import javafx.fxml.FXMLLoader; import javafx.scene.CacheHint; import javafx.scene.Node; import javafx.scene.Scene; import javafx.scene.effect.BlurType; import javafx.scene.effect.ColorAdjust; import javafx.scene.effect.DropShadow; import javafx.scene.effect.GaussianBlur; import javafx.scene.image.ImageView; import javafx.scene.layout.Pane; import javafx.scene.paint.Color; import javafx.scene.shape.Rectangle; import javafx.stage.DirectoryChooser; import javafx.stage.FileChooser; import javafx.stage.Modality; import javafx.stage.Stage; import javafx.util.Duration; import lighthouse.Main; import lighthouse.protocol.LHUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.io.File; import java.net.URL; import java.nio.file.Path; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import static com.google.common.base.Preconditions.checkState; import static lighthouse.protocol.LHUtils.unchecked; public class GuiUtils { public static final Logger log = LoggerFactory.getLogger(GuiUtils.class); public static void runAlert(BiConsumer<Stage, AlertWindowController> setup) { try { // JavaFX doesn't actually have a standard alert template. Instead the Scene Builder app will create FXML // files for an alert window for you, and then you customise it as you see fit. Stage dialogStage = new Stage(); dialogStage.initModality(Modality.APPLICATION_MODAL); FXMLLoader loader = new FXMLLoader(GuiUtils.class.getResource("alert.fxml")); Pane pane = loader.load(); AlertWindowController controller = loader.getController(); setup.accept(dialogStage, controller); dialogStage.setScene(new Scene(pane)); dialogStage.showAndWait(); } catch (Throwable e) { // We crashed whilst trying to show the alert dialog. This can happen if we're being crashed by inbound // closures onto the event thread which will execute in the nested event loop. Just give up here: at the // moment we have no way to filter them out of the event queue. e.printStackTrace(); Runtime.getRuntime().exit(1); } } public static void crashAlert(Throwable t) { Throwable rootCause = Throwables.getRootCause(t); log.error("CRASH!", rootCause); // Always use runLater to avoid "Nested event loops are allowed only while handling system events" error that // can occur if the crash occurs during e.g. an animation. Platform.runLater(() -> { runAlert((stage, controller) -> controller.crashAlert(stage, rootCause.toString())); Platform.exit(); }); } /** Show a GUI alert box for any unhandled exceptions that propagate out of this thread. */ public static void handleCrashesOnThisThread() { Thread.currentThread().setUncaughtExceptionHandler((thread, exception) -> { Platform.runLater(() -> { Main.instance.mainStage.hide(); GuiUtils.crashAlert(Throwables.getRootCause(exception)); }); }); } public static void informationalAlert(String message, String details, Object... args) { String formattedDetails = String.format(details, args); Runnable r = () -> runAlert((stage, controller) -> controller.informational(stage, message, formattedDetails)); if (Platform.isFxApplicationThread()) r.run(); else Platform.runLater(r); } public static final int UI_ANIMATION_TIME_MSEC = 300; public static final Duration UI_ANIMATION_TIME = Duration.millis(UI_ANIMATION_TIME_MSEC); public static Animation fadeIn(Node ui) { return fadeIn(ui, 0, 1.0); } public static Animation fadeIn(Node ui, int delayMillis, double targetValue) { ui.setCache(true); ui.setCacheHint(CacheHint.SPEED); FadeTransition ft = new FadeTransition(Duration.millis(UI_ANIMATION_TIME_MSEC), ui); ft.setFromValue(ui.getOpacity()); ft.setToValue(targetValue); ft.setOnFinished(ev -> ui.setCache(false)); ft.setDelay(Duration.millis(delayMillis)); ft.play(); return ft; } public static Animation fadeOut(Node ui) { ui.setCache(true); ui.setCacheHint(CacheHint.SPEED); FadeTransition ft = new FadeTransition(Duration.millis(UI_ANIMATION_TIME_MSEC), ui); ft.setFromValue(ui.getOpacity()); ft.setToValue(0.0); ft.setOnFinished(ev -> ui.setCache(false)); ft.play(); return ft; } public static Animation fadeOutAndRemove(Pane parentPane, Node... nodes) { Animation animation = fadeOut(nodes[0]); animation.setOnFinished(actionEvent -> parentPane.getChildren().removeAll(nodes)); for (int i = 1; i < nodes.length; i++) { fadeOut(nodes[i]); } return animation; } public static Animation fadeOutAndRemove(Duration duration, Pane parentPane, Node... nodes) { nodes[0].setCache(true); FadeTransition ft = new FadeTransition(duration, nodes[0]); ft.setFromValue(nodes[0].getOpacity()); ft.setToValue(0.0); ft.setOnFinished(ev -> parentPane.getChildren().removeAll(nodes)); ft.play(); return ft; } public static void blurOut(Node node) { GaussianBlur blur = new GaussianBlur(0.0); node.setEffect(blur); Timeline timeline = new Timeline(); KeyValue kv = new KeyValue(blur.radiusProperty(), 10.0); KeyFrame kf = new KeyFrame(UI_ANIMATION_TIME, kv); timeline.getKeyFrames().add(kf); timeline.play(); } public static void blurIn(Node node, Duration duration) { GaussianBlur blur = (GaussianBlur) node.getEffect(); if (blur == null) { Main.log.error("BUG: Attempted to cancel non-existent blur."); return; } Timeline timeline = new Timeline(); KeyValue kv = new KeyValue(blur.radiusProperty(), 0.0); KeyFrame kf = new KeyFrame(duration, kv); timeline.getKeyFrames().add(kf); timeline.setOnFinished(actionEvent -> node.setEffect(null)); timeline.play(); } /* public static void blurOut(Node node) { BoxBlur blur = new BoxBlur(); blur.setIterations(1); blur.setWidth(0.0); blur.setHeight(0.0); Timeline timeline = new Timeline(); KeyFrame kf = new KeyFrame(Duration.millis(UI_ANIMATION_TIME_MSEC), new KeyValue(blur.widthProperty(), 8.0), new KeyValue(blur.heightProperty(), 8.0)); timeline.getKeyFrames().add(kf); timeline.play(); node.setEffect(blur); } public static void blurIn(Node node) { BoxBlur blur = (BoxBlur) node.getEffect(); Timeline timeline = new Timeline(); KeyFrame kf = new KeyFrame(Duration.millis(UI_ANIMATION_TIME_MSEC), new KeyValue(blur.widthProperty(), 0.0), new KeyValue(blur.heightProperty(), 0.0)); timeline.getKeyFrames().add(kf); timeline.setOnFinished(actionEvent -> node.setEffect(null)); timeline.play(); } */ public static ScaleTransition zoomIn(Node node) { return zoomIn(node, 0); } public static ScaleTransition zoomIn(Node node, int delayMillis) { return scaleFromTo(node, 0.95, 1.0, delayMillis); } public static ScaleTransition explodeOut(Node node) { return scaleFromTo(node, 1.0, 1.05, 0); } private static ScaleTransition scaleFromTo(Node node, double from, double to, int delayMillis) { //node.setCache(true); //node.setCacheHint(CacheHint.SPEED); ScaleTransition scale = new ScaleTransition(Duration.millis(UI_ANIMATION_TIME_MSEC), node); scale.setFromX(from); scale.setFromY(from); scale.setToX(to); scale.setToY(to); scale.setDelay(Duration.millis(delayMillis)); //scale.setOnFinished(ev -> node.setCache(false)); scale.play(); return scale; } public static void dropShadowOn(Node node) { DropShadow dropShadow = node.getEffect() != null ? (DropShadow) node.getEffect() : new DropShadow(BlurType.THREE_PASS_BOX, Color.BLACK, 0.0, 0.0, 0, 0); node.setEffect(dropShadow); Timeline timeline = new Timeline(); timeline.getKeyFrames().add( new KeyFrame(Duration.millis(UI_ANIMATION_TIME_MSEC / 3), new KeyValue(dropShadow.radiusProperty(), 3.0)) ); timeline.play(); } public static void dropShadowOff(Node node) { DropShadow dropShadow = (DropShadow) node.getEffect(); Timeline timeline = new Timeline(); timeline.getKeyFrames().add( new KeyFrame(Duration.millis(UI_ANIMATION_TIME_MSEC / 3), new KeyValue(dropShadow.radiusProperty(), 0.0)) ); timeline.setOnFinished((ev) -> node.setEffect(null)); timeline.play(); } public static void brightnessAdjust(Node node, double adjustment) { node.setCache(true); node.setCacheHint(CacheHint.SPEED); ColorAdjust adjust = new ColorAdjust(); adjust.setBrightness(0.0); node.setEffect(adjust); Timeline timeline = new Timeline(new KeyFrame(Duration.millis(UI_ANIMATION_TIME_MSEC * 0.7), new KeyValue(adjust.brightnessProperty(), adjustment))); timeline.play(); } public static void brightnessUnadjust(Node node) { ColorAdjust effect = (ColorAdjust) node.getEffect(); Timeline timeline = new Timeline(new KeyFrame(Duration.millis(UI_ANIMATION_TIME_MSEC * 0.7), new KeyValue(effect.brightnessProperty(), 0.0))); timeline.setOnFinished(ev -> node.setCache(false)); timeline.play(); } public static void checkGuiThread() { checkState(Platform.isFxApplicationThread()); } public static BooleanBinding conjunction(List<BooleanProperty> list) { BooleanBinding accumulator = new SimpleBooleanProperty(true).and(list.get(0)); for (int i = 1; i < list.size(); i++) { accumulator = accumulator.and(list.get(i)); } return accumulator; } public static Path resourceOverrideDirectory; public static URL getResource(String name) { if (resourceOverrideDirectory != null) return unchecked(() -> new URL("file://" + resourceOverrideDirectory.resolve(name).toAbsolutePath())); else return Main.class.getResource(name); } @Nullable public static Coin valueOrNull(String str) { try { return valueOrThrow(str); } catch (NumberFormatException e) { return null; } } public static Coin valueOrThrow(String str) throws NumberFormatException { long value = BitcoinValue.userInputToSatoshis(str); if (value > 0) return Coin.valueOf(value); throw new NumberFormatException(); } public static void runOnGuiThreadAfter(long millis, Runnable runnable) { new Thread(() -> { Uninterruptibles.sleepUninterruptibly(millis, TimeUnit.MILLISECONDS); Platform.runLater(runnable); }).start(); } public static void runAfterFrame(Runnable runnable) { AnimationTimer frameWaiter = new AnimationTimer() { private int frames; @Override public void handle(long l) { frames++; if (frames > 2) { stop(); runnable.run(); } } }; frameWaiter.start(); } public static void platformFiddleChooser(FileChooser chooser) { // Work around FileChooser bugs. if (LHUtils.isUnix()) { chooser.setInitialDirectory(new File(System.getProperty("user.home"))); } } public static void platformFiddleChooser(DirectoryChooser chooser) { // Work around DirectoryChooser bugs. if (LHUtils.isUnix()) { chooser.setInitialDirectory(new File(System.getProperty("user.home"))); } } public static void roundCorners(ImageView view, double amount) { // This should be easier to do just with CSS. Rectangle clipRect = new Rectangle(view.getFitWidth(), view.getFitHeight()); clipRect.setArcWidth(amount); clipRect.setArcHeight(amount); view.setClip(clipRect); } public static class AnimatedBindInfo { @Nullable public Timeline timeline; public NumberBinding bindFrom; public Runnable onAnimFinish; } public static AnimatedBindInfo animatedBind(Node node, WritableDoubleValue bindTo, NumberBinding bindFrom) { return animatedBind(node, bindTo, bindFrom, null); } public static AnimatedBindInfo animatedBind(Node node, WritableDoubleValue bindTo, NumberBinding bindFrom, @Nullable Interpolator interpolator) { bindTo.set(bindFrom.doubleValue()); // Initialise. bindFrom.addListener((o, prev, cur) -> { AnimatedBindInfo info = (AnimatedBindInfo) node.getUserData(); if (info.timeline != null) info.timeline.stop(); info.timeline = new Timeline(new KeyFrame(UI_ANIMATION_TIME, interpolator != null ? new KeyValue(bindTo, cur, interpolator) : new KeyValue(bindTo, cur))); info.timeline.setOnFinished(ev -> { ((AnimatedBindInfo) node.getUserData()).timeline = null; if (info.onAnimFinish != null) info.onAnimFinish.run(); }); info.timeline.play(); }); // We must pin bindFrom into the object graph, otherwise something like: // animatedBind(node, node.opacityProperty(), when(a).then(1).otherwise(2)) // will mysteriously stop working when the result of when() gets garbage collected and the listener with it. AnimatedBindInfo info = new AnimatedBindInfo(); info.bindFrom = bindFrom; node.setUserData(info); return info; } public static boolean isSoftwarePipeline() { return GraphicsPipeline.getPipeline() instanceof SWPipeline; } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package leaptest.controller; import com.leapmotion.leap.Controller; import com.leapmotion.leap.Frame; import com.leapmotion.leap.Hand; import com.leapmotion.leap.HandList; /** * * @author Annet */ public class GestureGrabControl extends LeapControl { private final static int HAND_PALM_THRESHOLD = 1; private Frame frame; private Hand previousHand; private Controller leap; private boolean isRightHanded = true; public GestureGrabControl(Controller leap) { super(leap); this.leap = leap; } @Override public void update(float tpf) { HandList hands = frame.hands(); Hand hand = getGrabHand(hands); if(previousHand != null) { if(hand == null) return; if(Math.abs(hand.palmPosition().getX()-previousHand.palmPosition().getX())<HAND_PALM_THRESHOLD && Math.abs(hand.palmPosition().getY()-previousHand.palmPosition().getY())<HAND_PALM_THRESHOLD && Math.abs(hand.palmPosition().getZ()-previousHand.palmPosition().getZ())<HAND_PALM_THRESHOLD) { } } previousHand = hand; } @Override protected void onFrame(Controller leap) { frame = controller.frame(); } private Hand getGrabHand(HandList hands) { if (isRightHanded) return hands.rightmost(); return hands.leftmost(); } }
package battlecode.common; /** * A RobotController allows contestants to make their robot sense and interact * with the game world. When a contestant's <code>RobotPlayer</code> is * constructed, it is passed an instance of <code>RobotController</code> that * controls the newly created robot. */ @SuppressWarnings("unused") public interface RobotController { /** * Gets the number of rounds in the game. After this many rounds, if neither * team has destroyed the enemy HQ, then the tiebreakers will be used. * * @return the number of rounds in the game. * * @battlecode.doc.costlymethod */ int getRoundLimit(); /** * Gets the team's total parts. * * @return the team's total parts. * * @battlecode.doc.costlymethod */ double getTeamParts(); /** * Returns the zombie spawn schedule for a given round on the map. Only * works on zombie dens. NOT AVAILABLE TO COMPETITORS? * * @param round the round number * @return the zombie spawn schedule. * * @battlecode.doc.costlymethod */ ZombieCount[] getZombieSpawnSchedule(int round); /** * Use this method to access your ID. * * @return the ID of the robot. * * @battlecode.doc.costlymethod */ int getID(); /** * Gets the Team of this robot. * * @return this robot's Team * * @battlecode.doc.costlymethod */ Team getTeam(); /** * Gets this robot's type (SOLDIER, HQ, etc.). * * @return this robot's type. * * @battlecode.doc.costlymethod */ RobotType getType(); /** * Gets the robot's current location. * * @return this robot's current location. * * @battlecode.doc.costlymethod */ MapLocation getLocation(); /** * Returns the amount of core delay a robot has accumulated. If the result * is strictly less than 1, then the robot can perform a core action. * * @return the amount of core delay a robot has accumulated. * * @battlecode.doc.costlymethod */ double getCoreDelay(); /** * Returns the amount of weapon delay a robot has accumulated. If the result * is strictly less than 1, then the robot can attack. * * @return the number of weapon delay a robot has accumulated. * * @battlecode.doc.costlymethod */ double getWeaponDelay(); /** * Gets the robot's current health. * * @return this robot's current health. * * @battlecode.doc.costlymethod */ double getHealth(); // TODO: seems weird that onTheMap throws GameActionExceptions for things // that can't be sensed while senseRubble and senseParts return -1 for // thosee /** * Senses whether a MapLocation is on the map. Will throw an exception if * the location is not currently and has never been within sensor range. * * @param loc the location to check. * @return true if the location is on the map, and false if it is not. * @throws GameActionException if the location has never been within * sensor range. * * @battlecode.doc.costlymethod */ boolean onTheMap(MapLocation loc) throws GameActionException; /** * Senses the rubble at the given location. Returns -1 for a location * outside sensor range. Returns 0 for off map locations. * * @param loc * the location to check. * @return the amount of rubble at the location * * @battlecode.doc.costlymethod */ double senseRubble(MapLocation loc); /** * Senses the parts at the given location. Returns -1 for a location * outside sensor range. Returns 0 for off map locations. * * @param loc * the location to check. * @return the amount of parts at the location * * @battlecode.doc.costlymethod */ double senseParts(MapLocation loc); /** * Returns true if the given location is within the robot's sensor range, or * within the sensor range of some ally. * * @param loc * the location to check. * @return whether the given location is within the robot's sensor range. * * @battlecode.doc.costlymethod */ boolean canSenseLocation(MapLocation loc); /** * Returns whether there is a robot at the given location. * * @param loc * the location to check. * @return whether there is a robot at the given location. * @throws GameActionException * if <code>loc</code> is not within sensor range * (CANT_SENSE_THAT). * * @battlecode.doc.costlymethod */ boolean isLocationOccupied(MapLocation loc) throws GameActionException; /** * Returns the robot at the given location, or <code>null</code> if there is * no object there. * * @param loc * the location to check. * @return the robot at the given location. * @throws GameActionException * if <code>loc</code> is not within sensor range * (CANT_SENSE_THAT). * * @battlecode.doc.costlymethod */ RobotInfo senseRobotAtLocation(MapLocation loc) throws GameActionException; /** * Returns true if the given robot is within the robot's sensor range. * * @param id * the ID of the robot to query. * @return whether the given robot is within the robot's sensor range. * * @battlecode.doc.costlymethod */ boolean canSenseRobot(int id); /** * Senses information about a particular robot given its ID. * * @param id * the ID of the robot to query. * @return a RobotInfo object for the sensed robot. * @throws GameActionException * if the robot cannot be sensed (for example, if it doesn't * exist or is out of sight range). * * @battlecode.doc.costlymethod */ RobotInfo senseRobot(int id) throws GameActionException; /** * Returns all robots that can be sensed on the map. * * @return array of class type of game objects. * * @battlecode.doc.costlymethod */ RobotInfo[] senseNearbyRobots(); /** * Returns all robots that can be sensed within a certain radius of the * robot. * * @param radiusSquared * return objects this distance away from the center. * @return array of class type of game objects. * * @battlecode.doc.costlymethod */ RobotInfo[] senseNearbyRobots(int radiusSquared); /** * Returns all robots of a given team that can be sensed within a certain * radius of the robot. * * @param radiusSquared * return objects this distance away from the center. * @param team * filter game objects by the given team. If null is passed, * objects from all teams are returned. * @return array of class type of game objects. * * @battlecode.doc.costlymethod */ RobotInfo[] senseNearbyRobots(int radiusSquared, Team team); /** * Returns all robots of a givin team that can be sensed within a certain * radius of a specified location. * * @param center * center of the given search radius. * @param radiusSquared * return objects this distance away from the center. * @param team * filter game objects by the given team. If null is passed, * objects from all teams are returned. * @return array of class type of game objects. * * @battlecode.doc.costlymethod */ RobotInfo[] senseNearbyRobots(MapLocation center, int radiusSquared, Team team); /** * Returns whether the core delay is strictly less than 1 (whether the robot * can perform a core action in the given turn). * * @return whether the robot can perform a core action in this turn. * * @battlecode.doc.costlymethod */ boolean isCoreReady(); /** * Returns whether the weapon delay is less than 1 (whether the robot can * attack in the given turn). * * @return whether the robot is able to attack in the current turn. * * @battlecode.doc.costlymethod */ boolean isWeaponReady(); /** * Queues a rubble clear in the given direction to be executed at the end * of the turn. * * @param dir * the direction to clear rubble in. * @throws GameActionException * if the robot cannot move in this direction. * * @battlecode.doc.costlymethod */ void clearRubble(Direction dir) throws GameActionException; /** * Tells whether this robot can move in the given direction, without taking * any sort of delays into account. Takes into account only the map terrain, * positions of other robots, and the current robot's type. Does not take * into account whether this robot is currently active (no core delay), but * will only * return true for units that are capable of movement. * Returns false for the OMNI and NONE directions. * * @param dir * the direction to move in. * @return true if there are no robots or voids preventing this robot from * moving in the given direction; false otherwise. * * @battlecode.doc.costlymethod */ boolean canMove(Direction dir); /** * Queues a move in the given direction to be performed at the end of this * turn. * * @param dir * the direction to move in. * @throws GameActionException * if the robot cannot move in this direction. * * @battlecode.doc.costlymethod */ void move(Direction dir) throws GameActionException; /** * Returns whether the given location is within the robot's attack range. * Does not take into account whether the robot is currently attacking or * has the delay to do so. * * @param loc * the location to attempt to attack. * @return true if the given location is within this robot's attack range. * Does not take into account whether the robot is currently * attacking. * * @battlecode.doc.costlymethod */ boolean canAttackLocation(MapLocation loc); /** * Queues an attack on the given location to be performed at the end of this * turn. * * @param loc * the location to attack. * @throws GameActionException * if the robot cannot attack the given square. * * @battlecode.doc.costlymethod */ void attackLocation(MapLocation loc) throws GameActionException; /** * Broadcasts a message to the global message board. * * @param channel * the channel to write to, from 0 to * <code>BROADCAST_MAX_CHANNELS</code>. * @param data * one int's worth of data to write. * @throws GameActionException * if the channel is invalid. * * @battlecode.doc.costlymethod */ void broadcast(int channel, int data) throws GameActionException; /** * Retrieves the message stored at the given radio channel. * * @param channel * radio channel to query, from 0 to * <code>BROADCAST_MAX_CHANNELS</code>. * @return data currently stored on the channel. * @throws GameActionException * if the channel is invalid. * * @battlecode.doc.costlymethod */ int readBroadcast(int channel) throws GameActionException; /** * Returns whether you have the parts and the dependencies to build the given * robot, and that the robot can build structures. * * @param type * the type to build. * @return whether the requirements to build are met. * * @battlecode.doc.costlymethod */ boolean hasBuildRequirements(RobotType type); /** * Returns whether the robot can build a structure of the given type in the * given direction, without taking delays into account. Checks dependencies, * parts costs, whether the robot can build, and that the given direction is * not blocked. Does not check if a robot has sufficiently low coreDelay or * not. * * @param dir * the direction to build in. * @param type * the robot type to spawn. * @return whether it is possible to build a building of the given type in * the given direction. * * @battlecode.doc.costlymethod */ boolean canBuild(Direction dir, RobotType type); /** * Builds a structure in the given direction, queued for the end of the * turn. The structure will initially be inactive for a number of turns * (during which this robot cannot move or attack). After a number of turns, * the structure will become active. * * @param dir * the direction to bulid in. * @param type * the type to build. * @throws GameActionException * if the build is bad. * * @battlecode.doc.costlymethod */ void build(Direction dir, RobotType type) throws GameActionException; /** * Kills your robot and ends the current round. Never fails. * * @battlecode.doc.costlymethod */ void disintegrate(); /** * Causes your team to lose the game. It's like typing "gg." * * @battlecode.doc.costlymethod */ void resign(); /** * Turret only. Transforms the turret into a TTM after a short delay. * * @throws GameActionException if this robot cannot pack * * @battlecode.doc.costlymethod */ void pack() throws GameActionException; /** * TTM only. Transforms the TTM into a turret after a short delay. * * @throws GameActionException if this robot cannot unpack * * @battlecode.doc.costlymethod */ void unpack() throws GameActionException; /** * Sets the team's "memory", which is saved for the next game in the match. * The memory is an array of {@link GameConstants#TEAM_MEMORY_LENGTH} longs. * If this method is called more than once with the same index in the same * game, the last call is what is saved for the next game. * * @param index * the index of the array to set. * @param value * the data that the team should remember for the next game. * @throws java.lang.ArrayIndexOutOfBoundsException * if {@code index} is less than zero or greater than or equal * to {@link GameConstants#TEAM_MEMORY_LENGTH}. * @see #getTeamMemory * @see #setTeamMemory(int, long, long) * * @battlecode.doc.costlymethod */ void setTeamMemory(int index, long value); /** * Sets this team's "memory". This function allows for finer control than * {@link #setTeamMemory(int, long)} provides. For example, if * {@code mask == 0xFF} then only the eight least significant bits of the * memory will be set. * * @param index * the index of the array to set. * @param value * the data that the team should remember for the next game. * @param mask * indicates which bits should be set. * @throws java.lang.ArrayIndexOutOfBoundsException * if {@code index} is less than zero or greater than or equal * to {@link GameConstants#TEAM_MEMORY_LENGTH}. * @see #getTeamMemory * @see #setTeamMemory(int, long) * * @battlecode.doc.costlymethod */ void setTeamMemory(int index, long value, long mask); /** * Returns the team memory from the last game of the match. The return value * is an array of length {@link GameConstants#TEAM_MEMORY_LENGTH}. If * setTeamMemory was not called in the last game, or there was no last game, * the corresponding long defaults to 0. * * @return the team memory from the the last game of the match. * @see #setTeamMemory(int, long) * @see #setTeamMemory(int, long, long) * * @battlecode.doc.costlymethod */ long[] getTeamMemory(); /** * Sets one of this robot's 'indicator strings' for debugging purposes. * These strings are displayed in the client. This method has no effect on * gameplay (aside from the number of bytecodes executed to call this * method). * * @param stringIndex * the index of the indicator string to set. Must be between 0 * and GameConstants.NUMBER_OF_INDICATOR_STRINGS. * @param newString * the value to which the indicator string should be set. * * @battlecode.doc.costlymethod */ void setIndicatorString(int stringIndex, String newString); /** * Draws a dot on the game map, for debugging purposes. Press V in the * client to toggle which team's indicator dots are displayed. * * @param loc * the location to draw the dot. * @param red * the red component of the dot's color. * @param green * the green component of the dot's color. * @param blue * the blue component of the dot's color. * * @battlecode.doc.costlymethod */ void setIndicatorDot(MapLocation loc, int red, int green, int blue); /** * Draws a line on the game map, for debugging purposes. Press V in the * client to toggle which team's indicator lines are displayed. * * @param from * the location to draw the line from. * @param to * the location to draw the line to. * @param red * the red component of the line's color. * @param green * the green component of the line's color. * @param blue * the blue component of the line's color. * * @battlecode.doc.costlymethod */ void setIndicatorLine(MapLocation from, MapLocation to, int red, int green, int blue); /** * Gets this robot's 'control bits' for debugging purposes. These bits can * be set manually by the user, so a robot can respond to them. * * @return this robot's control bits * * @battlecode.doc.costlymethod */ long getControlBits(); /** * Adds a custom observation to the match file, such that when it is * analyzed, this observation will appear. * * @param observation * the observation you want to inject into the match file. * * @battlecode.doc.costlymethod */ void addMatchObservation(String observation); /** * Returns the current round number, where round 0 is the first round of the match. * @return the current round number, where 0 is the first round of the match. * * @battlecode.doc.costlymethod */ int getRoundNum(); }
package dr.evomodel.coalescent; //import com.lowagie.text.Paragraph; import dr.evolution.tree.NodeRef; import dr.evolution.tree.Tree; import dr.evomodel.tree.TreeModel; import dr.evomodelxml.coalescent.GaussianProcessSkytrackLikelihoodParser; import dr.inference.model.Model; import dr.inference.model.Parameter; import dr.inference.model.Variable; import dr.math.MathUtils; import no.uib.cipr.matrix.*; import java.util.ArrayList; import java.util.List; //import dr.evolution.tree.TreeTrait; /** * @author Vladimir Minin * @author Marc Suchard * @author Julia Palacios */ //For implementation, public class GaussianProcessSkytrackLikelihood extends OldAbstractCoalescentLikelihood { // protected Parameter groupSizeParameter; public static final double LOG_TWO_TIMES_PI = 1.837877; protected Parameter precisionParameter; protected Parameter lambda_boundParameter; // protected Parameter numGridPoints; protected Parameter lambdaParameter; //prior for lambda_bound, will be used in operators only protected Parameter betaParameter; protected Parameter alphaParameter; protected Parameter GPtype; protected Parameter GPcounts; protected Parameter coalfactor; protected Parameter popSizeParameter; //before called GPvalues protected Parameter changePoints; protected Parameter Tmrca; // protected Parameter popValue; protected Parameter CoalCounts; protected Parameter numPoints; // protected double [] GPchangePoints; // protected double [] storedGPchangePoints; protected double [] GPcoalfactor; protected double [] storedGPcoalfactor; protected double [] GPCoalInterval; protected double [] storedGPCoalInterval; protected double [] backupIntervals; // protected double [] storedcoalfactor; // protected int [] GPcounts; //It changes values, no need to storage // protected int [] storedGPcounts; protected int [] CoalPosIndicator; protected int [] storedCoalPosIndicator; protected double [] CoalTime; protected double [] storedCoalTime; protected int numintervals; protected int numcoalpoints; protected double constlik; protected double storedconstlik; protected double logGPLikelihood; // protected double storedLogGPLikelihood; protected SymmTridiagMatrix weightMatrix; //this now changes in dimension, no need to storage // protected MatrixParameter dMatrix; protected boolean rescaleByRootHeight; private static List<Tree> wrapTree(Tree tree) { List<Tree> treeList = new ArrayList<Tree>(); treeList.add(tree); return treeList; } public GaussianProcessSkytrackLikelihood(Tree tree, Parameter precParameter, boolean rescaleByRootHeight, Parameter lambda_bound, Parameter lambda_parameter, Parameter popParameter, Parameter alpha_parameter, Parameter beta_parameter, Parameter change_points, Parameter GPtype, Parameter GPcounts, Parameter coalfactor, Parameter CoalCounts, Parameter numPoints, Parameter Tmrca) { this(wrapTree(tree), precParameter, rescaleByRootHeight, lambda_bound, lambda_parameter, popParameter, alpha_parameter, beta_parameter, change_points,GPtype,GPcounts,coalfactor,CoalCounts, numPoints, Tmrca); } public GaussianProcessSkytrackLikelihood(String name) { super(name); } public GaussianProcessSkytrackLikelihood(List<Tree> treeList, Parameter precParameter, boolean rescaleByRootHeight, Parameter lambda_bound, Parameter lambda_parameter, Parameter popParameter, Parameter alpha_parameter, Parameter beta_parameter, Parameter change_points, Parameter GPtype, Parameter GPcounts, Parameter coalfactor, Parameter CoalCounts, Parameter numPoints, Parameter Tmrca) { super(GaussianProcessSkytrackLikelihoodParser.SKYTRACK_LIKELIHOOD); this.popSizeParameter = popParameter; this.Tmrca=Tmrca; // this.popValue=popValues; this.changePoints=change_points; this.numPoints=numPoints; // this.groupSizeParameter = groupParameter; this.precisionParameter = precParameter; this.lambdaParameter = lambda_parameter; this.betaParameter = beta_parameter; this.alphaParameter=alpha_parameter; // this.dMatrix = dMatrix; this.rescaleByRootHeight = rescaleByRootHeight; this.lambda_boundParameter= lambda_bound; this.GPcounts=GPcounts; this.GPtype=GPtype; this.coalfactor=coalfactor; this.CoalCounts=CoalCounts; // addVariable(GPvalues); addVariable(precisionParameter); addVariable(popSizeParameter); addVariable(changePoints); addVariable(numPoints); // addVariable(popValue); addVariable(GPcounts); // addVariable(GPcoalfactor); addVariable(GPtype); addVariable(coalfactor); addVariable(lambda_boundParameter); addVariable(CoalCounts); // addVariable(lambdaParameter); // addVariable(lambda_boundParameter); // if (betaParameter != null) { // addVariable(betaParameter); setTree(treeList); wrapSetupIntervals(); // intervalCount = the size for constant vectors // int fieldLength = getCorrectFieldLength(); numintervals= getIntervalCount(); numcoalpoints=getCorrectFieldLength(); GPcoalfactor = new double[numintervals]; backupIntervals=new double[numintervals]; GPCoalInterval=new double[numcoalpoints]; storedGPCoalInterval=new double[numcoalpoints]; CoalPosIndicator= new int[numcoalpoints]; storedCoalPosIndicator=new int[numcoalpoints]; CoalTime=new double[numcoalpoints]; storedCoalTime=new double[numcoalpoints]; storedGPcoalfactor = new double[numintervals]; GPcounts.setDimension(numintervals); CoalCounts.setDimension(numcoalpoints); // storedGPcounts= new int[numintervals]; GPtype.setDimension(numcoalpoints); numPoints.setParameterValue(0,numcoalpoints); // storedGPtype = new int[numcoalpoints]; popSizeParameter.setDimension(numcoalpoints); // int gridpoint= (int) numGridPoints.getParameterValue(0); // popValue.setDimension(gridpoint); changePoints.setDimension(numcoalpoints); coalfactor.setDimension(numcoalpoints); // storedcoalfactor= new double[numcoalpoints]; initializationReport(); setupSufficientStatistics(); setupGPvalues(); System.err.println("initial GP likelihood +priors"+getLogLikelihood()); System.err.println("like"+intervalsKnown); // System.err.println(getLogLikelihood()); } // Methods that override existent methods private boolean flagForJulia = false; /** * Demonstration of how to mark stuff as dirty * @param model * @param object * @param index */ protected void handleModelChangedEvent(Model model, Object object, int index) { super.handleModelChangedEvent(model, object, index); // Call super, since it may do something important if (model == tree) { // treeModel has changed; treeModel calls pushTreeChangedEvent that ultimately gets passed to here if (object instanceof TreeModel.TreeChangedEvent) { TreeModel.TreeChangedEvent tce = (TreeModel.TreeChangedEvent) object; // tce tells much about what type of event happened. In general, one does not care. flagForJulia = true; // flag set, so lazy work can occur elsewhere. } else { throw new IllegalArgumentException("Not sure what type of model changed event occurred: " + object.getClass().toString()); } } } protected void setTree(List<Tree> treeList) { if (treeList.size() != 1) { throw new RuntimeException("GP-based method only implemented for one tree"); } this.tree = treeList.get(0); this.treesSet = null; if (tree instanceof TreeModel) { addModel((TreeModel) tree); } } protected void dobackupIntervals() { for (int j=0; j<numintervals;j++){ backupIntervals[j]=getInterval(j); } } // public final void makeDirty() { // likelihoodKnown = false; // intervalsKnown = false; // System.err.println("setting intervalsknown false in makeDirty"); protected void wrapSetupIntervals() { setupIntervals(); intervalsKnown = true; } // protected int getCorrectFieldLength() { // return tree.getExternalNodeCount() - 1; //This is actually the Augmented loglikelihood for fixed genealogy. For sequence data directly // this becomes the coalescent point process prior on an augmented "tree" public double calculateLogLikelihood(Parameter Gfunction, Parameter latentCounts, Parameter eventType, Parameter upper_Bound, double [] Gfactor) { if (!intervalsKnown) { // System.err.println(intervalsKnown+"It recomputes"); // recomputeValues(); intervalsKnown = true; } double upperBound = upper_Bound.getParameterValue(0); // System.err.println("Likelihood with "+getPopSizeParameter().getSize()+"and G-function"+eventType.getSize()); System.err.println("GP calculations used"); logGPLikelihood=-upperBound*getConstlik(); for (int i=0; i<latentCounts.getSize(); i++){ if (Gfactor[i]>0) { logGPLikelihood+=latentCounts.getParameterValue(i)*Math.log(upperBound*Gfactor[i]); } } double[] currentGfunction = Gfunction.getParameterValues(); for (int i=0; i<Gfunction.getSize();i++){ logGPLikelihood+= -Math.log(1+Math.exp(-eventType.getParameterValue(i)*currentGfunction[i])); } return logGPLikelihood; } // protected double calculateLogCoalescentLikelihood() { // if (!intervalsKnown) { // // intervalsKnown -> false when handleModelChanged event occurs in super. // wrapSetupIntervals(); // setupGMRFWeights(); // intervalsKnown = true; // // Matrix operations taken from block update sampler to calculate data likelihood and field prior // double currentLike = 0; // double[] currentGamma = popSizeParameter.getParameterValues(); // for (int i = 0; i < fieldLength; i++) { // currentLike += -currentGamma[i] - sufficientStatistics[i] * Math.exp(-currentGamma[i]); // return currentLike;// + LogNormalDistribution.logPdf(Math.exp(popSizeParameter.getParameterValue(coalescentIntervals.length - 1)), mu, sigma); // return 0.0; public double getConstlik(){ return constlik; } //For fixed genealogy this contains the Augmented likelihood, the GP prior and prior on a the upper bound public double getLogLikelihood() { System.err.println("get GPlikelihood used"); if (!likelihoodKnown) { logLikelihood = calculateLogLikelihood(popSizeParameter,GPcounts,GPtype,lambda_boundParameter,GPcoalfactor)+calculateLogGP() +getLogPriorLambda(lambdaParameter.getParameterValue(0),0.01,lambda_boundParameter.getParameterValue(0)); likelihoodKnown = true; } return logLikelihood; // return 0.0; } //Calculates prior on g function protected double calculateLogGP() { // if (!intervalsKnown) { //// System.err.println("intervalsknown"); //// intervalsKnown -> false when handleModelChanged event occurs in super. // wrapSetupIntervals(); //// setupQmatrix(precisionParameter.getParameterValue(0)); // intervalsKnown = true; setupQmatrix(precisionParameter.getParameterValue(0)); double currentLike; DenseVector diagonal1 = new DenseVector(popSizeParameter.getSize()); DenseVector currentGamma = new DenseVector(popSizeParameter.getParameterValues()); SymmTridiagMatrix currentQ = weightMatrix; // System.err.println("Q.matrix sizes"+currentQ.numRows()+"and"+currentQ.numColumns()); currentQ.mult(currentGamma, diagonal1); currentLike = -0.5 * logGeneralizedDeterminant(currentQ) - 0.5 * currentGamma.dot(diagonal1) - 0.5 * (popSizeParameter.getSize() - 1) * LOG_TWO_TIMES_PI; return currentLike; } // Calculates logprior on Upper Bound private double getLogPriorLambda(double lambdaMean, double epsilon, double lambdaValue){ double res; if (lambdaValue < lambdaMean) {res=epsilon*(1/lambdaMean);} else {res=Math.log(1-epsilon)*(1/lambdaMean)*Math.exp(-(1/lambdaMean)*(lambdaValue-lambdaMean)); } return res; } //log pseudo-determinant public static double logGeneralizedDeterminant(SymmTridiagMatrix X) { //Set up the eigenvalue solver SymmTridiagEVD eigen = new SymmTridiagEVD(X.numRows(), false); //Solve for the eigenvalues try { eigen.factor(X); } catch (NotConvergedException e) { throw new RuntimeException("Not converged error in generalized determinate calculation.\n" + e.getMessage()); } //Get the eigenvalues double[] x = eigen.getEigenvalues(); double a = 0; for (double d : x) { if (d > 0.00001) a += Math.log(d); } return a; } protected void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type){ likelihoodKnown = false; } protected void restoreState() { super.restoreState(); System.arraycopy(storedGPcoalfactor, 0, GPcoalfactor, 0, storedGPcoalfactor.length); System.arraycopy(storedCoalTime,0,CoalTime,0,storedCoalTime.length); System.arraycopy(storedGPCoalInterval,0,GPCoalInterval,0,storedGPCoalInterval.length); System.arraycopy(storedCoalPosIndicator,0,CoalPosIndicator,0,storedCoalPosIndicator.length); constlik=storedconstlik; // weightMatrix = storedWeightMatrix; // logGPLikelihood = storedLogGPLikelihood; } protected void storeState() { super.storeState(); System.arraycopy(GPcoalfactor, 0, storedGPcoalfactor, 0, GPcoalfactor.length); System.arraycopy(CoalTime,0,storedCoalTime,0,CoalTime.length); System.arraycopy(GPCoalInterval, 0, storedGPCoalInterval, 0, GPCoalInterval.length); System.arraycopy(CoalPosIndicator, 0, storedCoalPosIndicator,0,CoalPosIndicator.length); storedconstlik=constlik; // storedWeightMatrix = weightMatrix.copy(); // storedLogGPLikelihood = logGPLikelihood; } // I don't understand this public String toString() { return getId() + "(" + Double.toString(getLogLikelihood()) + ")"; } //// private final Parameter latentPoints; // private final Parameter lambda_bound; public void initializationReport() { System.out.println("Creating a GP based estimation of effective population trajectories:"); // System.out.println("\tPopulation sizes: " + popSizeParameter.getDimension()); System.out.println("\tIf you publish results using this model, please reference: Minin, Palacios, Suchard (XXXX), AAA"); } public static void checkTree(TreeModel treeModel) { // todo Should only be run if there exists a zero-length interval // TreeModel treeModel = (TreeModel) tree; for (int i = 0; i < treeModel.getInternalNodeCount(); i++) { NodeRef node = treeModel.getInternalNode(i); if (node != treeModel.getRoot()) { double parentHeight = treeModel.getNodeHeight(treeModel.getParent(node)); double childHeight0 = treeModel.getNodeHeight(treeModel.getChild(node, 0)); double childHeight1 = treeModel.getNodeHeight(treeModel.getChild(node, 1)); double maxChild = childHeight0; if (childHeight1 > maxChild) maxChild = childHeight1; double newHeight = maxChild + MathUtils.nextDouble() * (parentHeight - maxChild); treeModel.setNodeHeight(node, newHeight); } } treeModel.pushTreeChangedEvent(); } //Sufficient Statistics for GP - coal+sampling protected void recomputeValues() { System.err.println("There is a change in tree, then re-order and re-compute values"); dobackupIntervals(); wrapSetupIntervals(); if (GPcounts.getSize()!=getIntervalCount()){System.err.println("Error when recomputing Values in GP Likelihood");} //There should be a better way but for now, I will go over each possible change... // Need to delete the node and add the new one System.exit(-1); double length = 0.0; double length2=0.0; double prevLength=0.0; int countcoal = 0; constlik= 0; int j=0; for (int i = 0; i < getIntervalCount(); i++) { int sum=0; length += getInterval(i); while (changePoints.getParameterValue(j)<=length) { sum++; j++; } // if (GPcounts.getSize()<=i){ // GPcounts.addDimension(i,0.0); // } else { GPcounts.setParameterValue(i,sum); GPcoalfactor[i] =getLineageCount(i)*(getLineageCount(i)-1.0) / 2.0; constlik+=GPcoalfactor[i]*getInterval(i); // System.err.println("i: "+i+"val: "+length+" type: "+getIntervalType(i)+" lineages: "+getLineageCount(i)); if (getIntervalType(i) == CoalescentEventType.COALESCENT) { GPcounts.setParameterValue(i,1.0); GPtype.setParameterValue(countcoal,1.0); CoalPosIndicator[countcoal]=i; changePoints.setParameterValue(countcoal,length); CoalCounts.setParameterValue(countcoal,0.0); CoalTime[countcoal]=length; System.err.println(countcoal+"coal is:"+length+"with branches:"+getLineageCount(i)); GPCoalInterval[countcoal]=length-prevLength; coalfactor.setParameterValue(countcoal,getLineageCount(i)*(getLineageCount(i)-1)/2.0); countcoal++; prevLength=length; } } Tmrca.setParameterValue(0,CoalTime[countcoal-1]); } protected void setupSufficientStatistics() { System.err.println("setting up sufficient statistics"); double length = 0.0; double prevLength=0.0; int countcoal = 0; constlik= 0; for (int i = 0; i < getIntervalCount(); i++) { length += getInterval(i); // if (GPcounts.getSize()<=i){ // GPcounts.addDimension(i,0.0); // } else { GPcounts.setParameterValue(i,0.0); GPcoalfactor[i] =getLineageCount(i)*(getLineageCount(i)-1.0) / 2.0; constlik+=GPcoalfactor[i]*getInterval(i); // System.err.println("i: "+i+"val: "+length+" type: "+getIntervalType(i)+" lineages: "+getLineageCount(i)); if (getIntervalType(i) == CoalescentEventType.COALESCENT) { GPcounts.setParameterValue(i,1.0); GPtype.setParameterValue(countcoal,1.0); CoalPosIndicator[countcoal]=i; changePoints.setParameterValue(countcoal,length); CoalCounts.setParameterValue(countcoal,0.0); CoalTime[countcoal]=length; System.err.println(countcoal+"coal is:"+length+"with branches:"+getLineageCount(i)); GPCoalInterval[countcoal]=length-prevLength; coalfactor.setParameterValue(countcoal,getLineageCount(i)*(getLineageCount(i)-1)/2.0); countcoal++; prevLength=length; } } Tmrca.setParameterValue(0,CoalTime[countcoal-1]); } protected int getCorrectFieldLength() { return tree.getExternalNodeCount() - 1; } protected void setupQmatrix(double precision) { // System.err.println("changepoints size"+changePoints.getSize()); //Set up the weight Matrix double trick=0.000001; double[] offdiag = new double[changePoints.getSize() - 1]; double[] diag = new double[changePoints.getSize()]; for (int i = 0; i < changePoints.getSize() - 1; i++) { offdiag[i] = precision*(-1.0 / (changePoints.getParameterValue(i+1)-changePoints.getParameterValue(i))); if (i<getCorrectFieldLength()-2){ diag[i+1]= -offdiag[i]+precision*(1.0/(changePoints.getParameterValue(i+2)-changePoints.getParameterValue(i+1))+trick); } } // Diffuse prior correction - intrinsic //Take care of the endpoints diag[0] = -offdiag[0]+precision*trick; diag[getCorrectFieldLength() - 1] = -offdiag[getCorrectFieldLength() - 2]+precision*(trick); weightMatrix = new SymmTridiagMatrix(diag, offdiag); } protected void setupGPvalues() { System.err.println("It is setting up the GPvalues"); setupQmatrix(precisionParameter.getParameterValue(0)); int length = getCorrectFieldLength(); DenseVector StandNorm = new DenseVector(length); DenseVector MultiNorm = new DenseVector(length); for (int i=0; i<length;i++){ StandNorm.set(i,MathUtils.nextGaussian()); // StandNorm.set(i,0.1); } UpperSPDBandMatrix Qcurrent = new UpperSPDBandMatrix(weightMatrix, 1); BandCholesky U = new BandCholesky(length,1,true); U.factor(Qcurrent); UpperTriangBandMatrix CholeskyUpper = U.getU(); CholeskyUpper.solve(StandNorm,MultiNorm); for (int i=0; i<length;i++){ // popSizeParameter.setParameterValue(i,MultiNorm.get(i)); popSizeParameter.setParameterValue(i,1.0); // popSizeParameter.setParameterValue(i,MathUtils.nextGaussian()); } } public Parameter getPrecisionParameter() { return precisionParameter; } public Parameter getPopSizeParameter() { return popSizeParameter; } public Parameter getNumPoints() { return numPoints; } public Parameter getLambdaParameter() { return lambdaParameter; } public Parameter getLambdaBoundParameter() { return lambda_boundParameter; } public Parameter getChangePoints() { return changePoints; } public double getAlphaParameter(){ return alphaParameter.getParameterValue(0); } public double getBetaParameter(){ return betaParameter.getParameterValue(0); } public double [] getGPcoalfactor(){ return GPcoalfactor; } public Parameter getcoalfactor(){ return coalfactor; } public Parameter getCoalCounts(){ return CoalCounts; } public Parameter getGPtype(){ return GPtype; } public Parameter getGPcounts(){ return GPcounts; } public SymmTridiagMatrix getWeightMatrix() { return weightMatrix.copy(); } // Methods needed for GP-based public double [] getGPCoalInterval(){ return GPCoalInterval; } public double [] getCoalTime(){ return CoalTime; } public double getGPCoalInterval(int j){ return GPCoalInterval[j]; } public int [] getCoalPosIndicator() { return CoalPosIndicator; } }
package dr.evomodel.coalescent; import dr.evolution.tree.Tree; import dr.inference.model.MatrixParameter; import dr.inference.model.Parameter; import java.util.List; /** * @author US.. */ public class GaussianProcessSkytrackLikelihood extends GMRFSkyrideLikelihood { public GaussianProcessSkytrackLikelihood(List<Tree> treeList, Parameter popParameter, Parameter groupParameter, Parameter precParameter, Parameter lambda, Parameter beta, MatrixParameter dMatrix, boolean timeAwareSmoothing, boolean rescaleByRootHeight, Parameter latentPoints) { super(treeList, popParameter, groupParameter, precParameter, lambda,beta, dMatrix, timeAwareSmoothing, rescaleByRootHeight); this.latentPoints = latentPoints; latentPoints.setDimension(1); } public double calculateLogLikelihood() { return 1.0; // TODO Return the correct log-density } public double getLogLikelihood() { if (!likelihoodKnown) { logLikelihood = calculateLogLikelihood(); likelihoodKnown = true; } return logLikelihood; } private final Parameter latentPoints; }
package edu.utah.sci.cyclist.view; import javafx.beans.property.ObjectProperty; import javafx.collections.ObservableList; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.geometry.Insets; import javafx.scene.control.ListView; import javafx.scene.control.ListViewBuilder; import javafx.scene.control.Menu; import javafx.scene.control.MenuBar; import javafx.scene.control.MenuItem; import javafx.scene.control.SeparatorMenuItem; import javafx.scene.image.ImageView; import javafx.scene.input.KeyCombination; import javafx.scene.layout.HBox; import javafx.scene.layout.HBoxBuilder; import javafx.scene.layout.Priority; import javafx.scene.layout.VBox; import javafx.scene.layout.VBoxBuilder; import javafx.scene.text.TextBuilder; import javafx.stage.Stage; import javafx.stage.Window; import edu.utah.sci.cyclist.Resources; import edu.utah.sci.cyclist.controller.CyclistController; import edu.utah.sci.cyclist.model.CyclistDatasource; import edu.utah.sci.cyclist.model.Table; import edu.utah.sci.cyclist.view.components.TablesPanel; import edu.utah.sci.cyclist.view.components.Spring; import edu.utah.sci.cyclist.view.components.Workspace; import edu.utah.sci.cyclist.view.wizard.DatasourceWizard; import edu.utah.sci.cyclist.view.wizard.DatatableWizard; import edu.utah.sci.cyclist.view.wizard.WorkspaceWizard; public class MainScreen extends VBox { public static final String ID = "main-screen"; private MenuBar _menubar; private VBox _toolsArea; private HBox _content; private TablesPanel _datasourcesPanel; private CyclistController _controller; /** * Constructor */ public MainScreen(Stage stage) { super(); setId(ID); init(stage); } public Window getWindow() { return getScene().getWindow(); } public void setControler(CyclistController controler) { this._controller = controler; } public ObjectProperty<String> selectWorkspace(ObservableList<String> list) { WorkspaceWizard wizard = new WorkspaceWizard(); wizard.setItems(list); return wizard.show(getScene().getWindow()); } public void setWorkspace(Workspace workspace) { HBox.setHgrow(workspace, Priority.ALWAYS); _content.getChildren().add(workspace); } public TablesPanel getDatasourcesPanel() { return _datasourcesPanel; } private void init(Stage stage){ // create the screen // -- menubar _menubar = createMenuBar(stage); // -- tables and schema _toolsArea = VBoxBuilder.create() .spacing(5) .prefWidth(150) .padding(new Insets(5)) .children( _datasourcesPanel = new TablesPanel(), // schema new Spring() ) .build(); VBox.setVgrow(_toolsArea, Priority.SOMETIMES); // -- workspace _content = HBoxBuilder.create() .children( _toolsArea // workspace ) .build(); // -- setup VBox.setVgrow(_content, Priority.ALWAYS); getChildren().addAll(_menubar, _content); } /* * Menus & Actions */ private MenuItem _datasourceMenuItem; private MenuItem _workspaceMenuItem; private MenuItem _quitMenuItem; private MenuItem _saveMenuItem; public ObjectProperty<EventHandler<ActionEvent>> onAddDatasource() { return _datasourceMenuItem.onActionProperty(); } public ObjectProperty<EventHandler<ActionEvent>> onSelectWorkspace() { return _workspaceMenuItem.onActionProperty(); } public ObjectProperty<EventHandler<ActionEvent>> onSave() { return _saveMenuItem.onActionProperty(); } public ObjectProperty<EventHandler<ActionEvent>> onQuit() { return _quitMenuItem.onActionProperty(); } private MenuBar createMenuBar(Stage stage) { MenuBar menubar = new MenuBar(); // -- File menu Menu fileMenu = createFileMenu(); menubar.getMenus().add(fileMenu); return menubar; } private Menu createFileMenu() { _datasourceMenuItem = new MenuItem("Add Datatable", new ImageView(Resources.getIcon("open.png"))); _workspaceMenuItem = new MenuItem("Workspace"); //new ImageView(Resources.getIcon("workspace.png"))); _saveMenuItem = new MenuItem("Save"); _saveMenuItem.setAccelerator(KeyCombination.keyCombination("Meta+S")); // -- Quit _quitMenuItem = new MenuItem("Quit"); _quitMenuItem.setAccelerator(KeyCombination.keyCombination("Meta+Q")); // -- setup the menu Menu fileMenu = new Menu("File"); fileMenu.getItems().addAll( _datasourceMenuItem, new SeparatorMenuItem(), _workspaceMenuItem, _saveMenuItem, new SeparatorMenuItem(), _quitMenuItem); return fileMenu; } }
import java.io.IOException; public class Main { public static void main(String[] args) throws IOException{ System.out.println("Welcome to Graph Isomorphism!"); //The following makes a "kite" graph G (with "a" as the main node). /* a-b |/| c-d */ Node<String> a= new Node("a"); Node<String> b= new Node("b"); Node<String> c= new Node("c"); Node<String> d= new Node("d"); a.addChild(b); a.addChild(c); b.addChild(a); b.addChild(c); b.addChild(d); c.addChild(a); c.addChild(b); c.addChild(d); d.addChild(c); d.addChild(b); Graph G1= new Graph(a); Graph G2= new Graph("graph"); Graph kite= new Graph("kite"); int n= 128; int count= 0; int opCount= 0; int loops= 10; for (int i= 0; i < loops; i++){ Graph randomGraph5= new Graph(n); Graph randomGraph5_Noniso= new Graph(n); //Reset the number of operations. Graph.numOp= 0; int[][] adj5= randomGraph5.adjacency; int[][] perm= Checker.makePermutation(n); int[][] permTranspose= Checker.makeTranspose(perm); int[][] adj5New= Checker.matrixMultiply(perm,adj5); adj5New= Checker.matrixMultiply(adj5New,permTranspose); //Note: adj5 and adj5New must represent isomorphic graphs. int[][] adj5_Noniso= randomGraph5_Noniso.adjacency; Graph A1= new Graph(); A1.traverseMatrix(adj5); Graph A2= new Graph(); A2.traverseMatrix(adj5_Noniso); Graph N1= new Graph("star_in_pentagon"); Graph N2= new Graph("star_in_pentagon_isomess"); boolean isomorphic= Graph.areIsomorphic(A1, A2); if (isomorphic == true) count++; System.out.println(" " + (A1.V.size() == A2.V.size()) + " " + isomorphic + " " + Graph.numOp); opCount+= Graph.numOp; } // Graph hyper= new Graph("hypersquare"); // Graph hypo= new Graph("hypotesseract"); // boolean isomorphic= Graph.areIsomorphic(hyper, hypo); System.out.println("Our success ratio is " + count); System.out.println("Our total number of operations is " + opCount); // System.out.println(isomorphic); } }
package jp.nyatla.mimic.mbedjs.javaapi.driver; import jp.nyatla.mimic.mbedjs.MbedJsException; import jp.nyatla.mimic.mbedjs.javaapi.I2C; import jp.nyatla.mimic.mbedjs.javaapi.Mcu; import jp.nyatla.mimic.mbedjs.javaapi.PinName; public class LPS331{ // Pressure configuration values. public final static int PRESSURE_AVG_1 =0x00; public final static int PRESSURE_AVG_2 =0x01; public final static int PRESSURE_AVG_4 =0x02; public final static int PRESSURE_AVG_8 =0x03; public final static int PRESSURE_AVG_16 =0x04; public final static int PRESSURE_AVG_32 =0x05; public final static int PRESSURE_AVG_64 =0x06; public final static int PRESSURE_AVG_128=0x07; public final static int PRESSURE_AVG_256=0x08; public final static int PRESSURE_AVG_384=0x09; public final static int PRESSURE_AVG_512=0x0a; // Temperature configuration values. public final static int TEMP_AVG_1 =0x00; public final static int TEMP_AVG_2 =0x01; public final static int TEMP_AVG_4 =0x02; public final static int TEMP_AVG_8 =0x03; public final static int TEMP_AVG_16 =0x04; public final static int TEMP_AVG_32 =0x05; public final static int TEMP_AVG_64 =0x06; public final static int TEMP_AVG_128 =0x07; // Data Rate Pressure / Temperature public final static int DATARATE_ONESHOT =0x00; // OneShot OneShot public final static int DATARATE_1HZ =0x01; // 1Hz 1Hz public final static int DATARATE_7HZ =0x02; // 7Hz 1Hz public final static int DATARATE_12_5HZ =0x03; // 12.5Hz 1Hz public final static int DATARATE_25HZ =0x04; // 25Hz 1Hz public final static int DATARATE_7HZ_T =0x05; // 7Hz 7Hz public final static int DATARATE_12_5HZ_T=0x06; // 12.5Hz 12.5Hz public final static int DATARATE_25HZ_T =0x07; // 25Hz 25Hz (*) // (*) Not allowed with PRESSURE_AVG_512 & TEMP_AVG_128. // More information , see datasheet. // I2C Address. public final static int I2C_ADDRESS_SA0_H=0xba; public final static int I2C_ADDRESS_SA0_L=0xb8; private final I2C _i2c; private final int _addr; /** I2C*/ private final boolean _is_attached; private int _ctrlreg1; /** * I2C * @param i_i2c * @param i_address * @throws MbedJsException */ public LPS331(I2C i_i2c,int i_address) throws MbedJsException { this._is_attached=false; this._i2c=i_i2c; this._addr=i_address; this._initDevice(); } /** * Mcu * @param i_mcu * @param sda * @param scl * @param i_address * @throws MbedJsException */ public LPS331(Mcu i_mcu, int sda, int scl, int i_address) throws MbedJsException { this._is_attached=true; this._i2c=new I2C(i_mcu, sda, scl); this._addr=i_address; this._i2c.frequency(10000); this._initDevice(); } private void _initDevice() throws MbedJsException { this._ctrlreg1 = 0x20; } public void dispose() throws MbedJsException{ if(this._is_attached){ this._i2c.dispose(); } } public int whoami() throws MbedJsException { return (this._read((byte)0x0f) & 0x0ff); } public boolean isAvailable() throws MbedJsException { if(this.whoami() == 0xbb) { return true; } return false; } public void setResolution(int i_pressure_avg,int i_temp_avg) throws MbedJsException { this._write((byte)0x10, (byte)(((i_temp_avg & 0x07) << 4) | (i_pressure_avg & 0x0f))); } public void setActive(boolean i_is_active) throws MbedJsException { if(i_is_active) { this._ctrlreg1 |= 0x80; } else { this._ctrlreg1 &= ~0x80; } this._write((byte)0x20,(byte)this._ctrlreg1); } public void setDataRate(int i_datarate) throws MbedJsException { int d= i_datarate & 0x07; this._ctrlreg1 &= ~(0x07 << 4); this._ctrlreg1 |= d << 4; this._write((byte)0x20,(byte)this._ctrlreg1); } public float getPressure() throws MbedJsException { byte[] data=this._read_multibyte((byte)0x28,3); float pressure = (data[0]&0x0ff)|((data[1]&0x0ff) << 8)|((data[2]&0x0ff) << 16); return pressure/4096.0f; } public float getTemperature() throws MbedJsException { byte[] b=this._read_multibyte((byte)0x2b, 2); int temp=((0x0ff & b[1])<<8)|(b[0] & 0x0ff); // temp = data[0]; // temp |= data[1] << 8; if((temp&0x00008000)!=0){ temp|=0xffff8000; } return (float)(42.5 + temp / 480.0); } private void _write(byte i_subaddress, byte i_data) throws MbedJsException { // _i2c.start(); // _i2c.write(_address); // _i2c.write(subaddress); // _i2c.write(data); // _i2c.stop(); this._i2c.write(this._addr,new byte[]{i_subaddress,i_data},false); } private byte _read(byte i_subaddress) throws MbedJsException { // int result = 0; // _i2c.start(); // _i2c.write(_address); // _i2c.write(subaddress); // _i2c.start(); // _i2c.write(_address | 1); // result = _i2c.read(0); // _i2c.stop(); this._i2c.write(this._addr,new byte[]{i_subaddress},false); I2C.ReadResult rs=this._i2c.read(this._addr,1,false); return rs.data[0]; } public byte[] _read_multibyte(byte i_startsubaddress,int i_count) throws MbedJsException { // _i2c.start(); // _i2c.write(_address); // _i2c.write(startsubaddress | 0x80); // _i2c.start(); // _i2c.write(_address | 1); // for(int i = 0; i < count; i++) { // data[i] = _i2c.read((i == count - 1) ? 0 : 1); // _i2c.stop(); this._i2c.write(this._addr,new byte[]{(byte)(i_startsubaddress|0x80)},false); I2C.ReadResult rs=this._i2c.read(this._addr,i_count,false); return rs.data; } /** * * @param args */ public static void main(String args[]){ try { Mcu mcu=new Mcu("192.168.128.39"); LPS331 a=new LPS331(mcu,PinName.p28,PinName.p27,0x90); System.out.println("Temperture:"+a.getTemperature()); System.out.println("Pressure:"+a.getPressure()); mcu.close(); System.out.println("done"); } catch (Exception e) { e.printStackTrace(); } } }
package consulo.cold; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.net.URLEncoder; import java.util.Set; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.io.FileUtilRt; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.io.ZipUtil; import consulo.cold.util.JavaCommandBuilder; /** * @author VISTALL * @since 12.04.2016 */ public class Main { private static final String ourDefaultPluginHost = "http://must-be.org/consulo/plugins/%s"; private static final String[] requiredPluginList = new String[]{ "org.consulo.devkit", "org.intellij.groovy", "org.intellij.intelliLang", "org.consulo.java", "JFlex Support", "com.intellij.junit", "com.intellij.properties", "com.intellij.regexp", "com.intellij.spellchecker", "com.intellij.uiDesigner", "com.intellij.xml", // internal plugin "consulo.internal.jenkins.helper" }; public static void main(String[] args) throws Exception { Logger.setFactory(ColdLoggerFactory.class); File tempDirectory = new File(".", ".cold"); tempDirectory.delete(); FileUtil.createDirectory(tempDirectory); File consuloBuildFile = FileUtilRt.createTempFile("consulo", "zip", true); FileOutputStream fileOutputStream = new FileOutputStream(consuloBuildFile); System.out.println("Downloading consulo build"); URL url = new URL("http://must-be.org/vulcan/site/_consulo-distribution/out/consulo-win-no-jre.zip"); FileUtilRt.copy(url.openStream(), fileOutputStream); fileOutputStream.close(); System.out.println("Extracting consulo build"); ZipUtil.extract(consuloBuildFile, tempDirectory, null); String javaHome = System.getProperty("java.home"); if(javaHome == null) { System.out.println("No java home"); System.exit(-1); return; } if(javaHome.endsWith("jre")) { javaHome = new File(javaHome).getParent(); } if(javaHome == null) { System.out.println("No jdk home"); System.exit(-1); return; } File consuloPath = new File(tempDirectory, "Consulo"); downloadColdRunner(consuloPath); for(String pluginId : requiredPluginList) { downloadRequiredPlugin(consuloPath, pluginId); } int exitValue = start(javaHome, consuloPath.getPath(), tempDirectory.getParentFile().getAbsolutePath()); tempDirectory.delete(); System.exit(exitValue); } private static void downloadColdRunner(File consuloPath) throws Exception { URL coldJar = new URL("https://github.com/consulo/cold/raw/master/build/cold-runner.jar"); File coldJarFile = new File(consuloPath, "lib/cold-runner.jar"); FileOutputStream fileOutputStream = new FileOutputStream(coldJarFile); System.out.println("Downloading cold-runner.jar"); FileUtilRt.copy(coldJar.openStream(), fileOutputStream); fileOutputStream.close(); System.out.println("Downloaded cold-runner.jar"); } private static void downloadRequiredPlugin(File consuloPath, String pluginId) throws Exception { URL url = null; if(pluginId.equals("com.intellij.uiDesigner")) { url = new URL("https://raw.githubusercontent.com/consulo/cold/master/build/ui-designer_hacked.zip"); } else { String urlString = String.format(ourDefaultPluginHost, "download?id=") + URLEncoder.encode(pluginId, "UTF8") + "&build=SNAPSHOT&uuid=" + URLEncoder.encode("cold", "UTF8"); url = new URL(urlString); } File tempFile = FileUtilRt.createTempFile(pluginId, "zip", true); FileOutputStream fileOutputStream = new FileOutputStream(tempFile); System.out.println("Downloading required plugin: " + pluginId + ", url: " + url); FileUtilRt.copy(url.openStream(), fileOutputStream); fileOutputStream.close(); System.out.println("Extracting required plugin: " + pluginId); ZipUtil.extract(tempFile, new File(consuloPath, "plugins"), null); } private static int start(String javaHome, String consuloPath, String workingDirectory) throws Exception { JavaCommandBuilder javaCommandBuilder = new JavaCommandBuilder(); javaCommandBuilder.setMainClassName("consulo.cold.runner.Main"); javaCommandBuilder.setJavaHome(javaHome); javaCommandBuilder.addClassPathEntry(javaHome + "/lib/tools.jar"); File libDir = new File(consuloPath, "lib"); for(File file : libDir.listFiles()) { javaCommandBuilder.addClassPathEntry(file.getAbsolutePath()); } // copy all cold properties Set<String> properties = System.getProperties().stringPropertyNames(); for(String property : properties) { if(property.startsWith("cold.")) { javaCommandBuilder.addSystemProperty(property, System.getProperty(property)); } } javaCommandBuilder.addSystemProperty("jdk6.home", javaHome); javaCommandBuilder.addSystemProperty("consulo.home", consuloPath); return execute(javaCommandBuilder.construct(), workingDirectory); } private static int execute(String[] args, String workDir) throws Exception { final Process process; System.out.println("Executing command: " + StringUtil.join(args, " ")); process = new ProcessBuilder(args).directory(new File(workDir)).redirectErrorStream(true).start(); try { Thread thread = new Thread() { @Override public void run() { InputStream inputStream = process.getInputStream(); try { int b; while((b = inputStream.read()) != -1) { String s = String.valueOf((char) b); System.out.print(s); } } catch(IOException e) { e.printStackTrace(); } } }; thread.setDaemon(true); thread.start(); return process.waitFor(); } catch(InterruptedException e) { process.destroy(); } return -1; } }
package alignment.alignment_v2; import gov.ornl.stucco.DBClient.Constraint; import gov.ornl.stucco.DBClient.DBConnection; import gov.ornl.stucco.DBClient.Constraint.Condition; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.json.*; import com.tinkerpop.rexster.client.RexProException; import com.tinkerpop.rexster.client.RexsterClient; /** * Connects to Graph DB, aligns and adds new incoming graph data, provides some misc. utility functions. * */ public class Align { private static final boolean SEARCH_FOR_DUPLICATES = false; private static final boolean ALIGN_VERT_PROPS = false; private static final int VERTEX_RETRIES = 2; private static final int EDGE_RETRIES = 2; private RexsterClient client = null; private Logger logger = null; private ConfigFileLoader config = null; private DBConnection connection = null; public Align() throws IOException{ this(new DBConnection()); } public Align(DBConnection c) throws IOException { this.connection = c; client = connection.getClient(); //TODO shouldn't need this... logger = LoggerFactory.getLogger(Align.class); //loading configuration file into HashMap config = new ConfigFileLoader(); connection.createIndices(); } @Override protected void finalize() throws Throwable { DBConnection.closeClient(this.client); super.finalize(); } public boolean load(String newGraphSection){ boolean ret = true; //do all the json obj parsing up front, in case you need to panic & leave early. List<JSONObject> verts = new ArrayList<JSONObject>(); List<JSONObject> edges = new ArrayList<JSONObject>(); try{ JSONObject graphson = new JSONObject(newGraphSection); JSONArray json_verts = graphson.optJSONArray("vertices"); if(json_verts != null){ //if there are vertices... int vertCount = json_verts.length(); for(int i=0; i<vertCount; i++){ //add each one to verts list JSONObject vert = (JSONObject)json_verts.get(i); if(!vert.has("name")) vert.put("name", vert.get("_id")); //add "name" field equals its ID if needed verts.add(vert); //place vertex json object } } //...and likewise for edges JSONArray json_edges = graphson.optJSONArray("edges"); if(json_edges != null){ int edgeCount = json_edges.length(); for(int i=0; i<edgeCount; i++){ JSONObject edge = (JSONObject)json_edges.get(i); edges.add(edge); } } }catch(Exception e){ //we want *any* graphson problems to end up here //being noisy when these fail is probably ok, we shouldn't really ever fail here except when testing, etc. logger.error("Error parsing GraphSON in load()!"); logger.error("The graphson was: " + newGraphSection); logger.error("Exception!",e); return false; } //try loading vertices. List<JSONObject> vertsToRetry; for(int currTry = 0; currTry <= VERTEX_RETRIES; currTry++){ vertsToRetry = new ArrayList<JSONObject>(); for(JSONObject vert : verts){ try{ String vert_name = vert.getString("name"); String vert_id = vert.optString("_id"); if(vert_name == null || vert_name == ""){ vert_name = vert_id; vert.put("name", vert_name); } boolean new_vert = false; String otherVertID = connection.findVertId(vert_name); new_vert = (otherVertID == null); if(new_vert && SEARCH_FOR_DUPLICATES){ Map<String, Object> vertMap = null; vertMap = jsonVertToMap(vert); otherVertID = findDuplicateVertex(vertMap); new_vert = (otherVertID == null); } if(new_vert){ //only add new... loadNewJSONVert(vert); List<JSONObject> newEdges = findNewEdges(vert); for(JSONObject edge: newEdges){ edges.add(edge); } }else{ if(ALIGN_VERT_PROPS){ Map<String, Object> vertMap = null; if(vertMap == null) vertMap = jsonVertToMap(vert); //might have this from above already, or might not alignVertProps(otherVertID, vertMap); } else{ logger.debug("Attempted to add vertex when duplicate exists. ALIGN_VERT_PROPS is false, so ignoring new vert. vert was: " + vert); } } }catch(Exception e){ if(currTry >= VERTEX_RETRIES){ //out of tries logger.error("Could not add vertex! Vertex is out of retry attempts!"); logger.error("vertex was: " + vert); logger.error("exception was: " + e.getLocalizedMessage() + "\n" + getStackTrace(e)); ret = false; }else{ logger.info("Could not add vertex! Adding vertex to retry queue."); logger.info("vertex was: " + vert); logger.info("exception was: " + e.getLocalizedMessage() + "\n" + getStackTrace(e)); vertsToRetry.add(vert); } } } verts = vertsToRetry; } //try loading edges. List<JSONObject> edgesToRetry; for(int currTry = 0; currTry <= EDGE_RETRIES; currTry++){ boolean lastTry = currTry >= EDGE_RETRIES; edgesToRetry = new ArrayList<JSONObject>(); //logger.debug("");//TODO try count / edge count info for(JSONObject edge : edges){ try{ boolean edgeResult = loadJSONEdge(edge, lastTry); if( edgeResult == false) edgesToRetry.add(edge); //this can happen if the edge is missing one of its verts, which could be in the retry queue as well. }catch(Exception e){ if(lastTry){ //out of tries logger.error("Could not add edge! Edge is out of retry attempts!"); logger.error("edge was: " + edge); logger.error("exception was: " + e.getLocalizedMessage() + "\n" + getStackTrace(e)); ret = false; } else{ logger.info("Could not add edge! Adding edge to retry queue."); logger.info("edge was: " + edge); logger.info("exception was: " + e.getLocalizedMessage() + "\n" + getStackTrace(e)); edgesToRetry.add(edge); } } } edges = edgesToRetry; } return ret;//TODO currently this is not idempotent, but after docIDs are added to the metadata (and used) they will be. } private void loadNewJSONVert(JSONObject vert) throws RexProException, IOException{ Map<String, Object> vertMap = null; String type = null; type = (String)vert.opt("vertexType"); if(type.equals("IP")){ //if its an ip vert, and doesn't have an ip int, just add that here. //TODO: the extractors should really be doing this, but some aren't long ipInt = vert.optLong("ipInt"); if(ipInt == 0){ String ipString = vert.getString("name"); ipInt = getIpInt(ipString); vert.put("ipInt", ipInt); } } vertMap = jsonVertToMap(vert); //connection.addVertexFromJSON(vert); connection.addVertexFromMap(vertMap); } private boolean loadJSONEdge(JSONObject edge) throws JSONException, IOException, RexProException{ return loadJSONEdge(edge, true); } //return true if succeeded //return false if edge cannot be added private boolean loadJSONEdge(JSONObject edge, boolean lastTry) throws JSONException, IOException, RexProException{ String outv_id = connection.findVertId(edge.getString("_outV")); String inv_id = connection.findVertId(edge.getString("_inV")); String edgeName = edge.getString("_id"); //String edgeID = findEdgeId(edgeName); if(outv_id == null){ if(lastTry) logger.warn("Could not find out_v for edge: " + edge); return false; } if(inv_id == null){ if(lastTry) logger.warn("Could not find in_v for edge: " + edge); return false; } String label = edge.optString("_label"); if(connection.getEdgeCount(inv_id, outv_id, label) >= 1){ //TODO need to merge edge props for this case, like verts above... logger.debug("Attempted to add duplicate edge. ignoring it. edge was: " + edge); return false; } connection.addEdgeFromJSON(edge); //TODO unused return code from this - is it even useful? return true; } private List<JSONObject> findNewEdges(JSONObject vert) throws IOException, RexProException { List<JSONObject> edges = new ArrayList<JSONObject>(); String vert_name = vert.getString("name"); String type = null; type = (String)vert.opt("vertexType"); if(type == null){ logger.warn("no vertex type specified for vertex:" + vert.toString()); }else{ if(type.equals("addressRange")){ long endIpInt = vert.optLong("endIPInt"); long startIpInt = vert.optLong("startIPInt"); if( endIpInt !=0 && startIpInt != 0){ List<Constraint> constraints = new ArrayList<Constraint>(); Constraint c = new Constraint("vertexType", Condition.eq, "IP"); constraints.add(c); c = new Constraint("ipInt", Condition.lte, endIpInt); constraints.add(c); c = new Constraint("ipInt", Condition.gte, startIpInt); constraints.add(c); List<Map<String,Object>> matches = null; matches = connection.findAllVertsWithProps(constraints); if(matches != null){ for(Map<String,Object> match : matches){ Map<String,Object> currMatchProps = (Map<String,Object>)match.get("_properties"); String inv = vert_name; String outv = (String)currMatchProps.get("name"); JSONObject edge = new JSONObject(); edge.put("_type", "edge"); edge.put("_id", outv + "_inAddressRange_" + inv); edge.put("_label", "inAddressRange"); edge.put("_inV", inv); edge.put("_outV", outv); edge.put("inVType", "addressRange"); edge.put("outVType", "IP"); edges.add(edge); } } }else{ logger.warn("address range vert did not have int addresses: " + vert.toString()); } }else if(type.equals("IP")){ List<Constraint> constraints = new ArrayList<Constraint>(); Constraint c = new Constraint("vertexType", Condition.eq, "addressRange"); constraints.add(c); c = new Constraint("endIPInt", Condition.gte, vert.getLong("ipInt")); constraints.add(c); c = new Constraint("startIPInt", Condition.lte, vert.getLong("ipInt")); constraints.add(c); List<Map<String,Object>> matches = null; matches = connection.findAllVertsWithProps(constraints); if(matches != null){ for(Map<String,Object> match : matches){ Map<String,Object> currMatchProps = (Map<String,Object>)match.get("_properties"); String inv = (String)currMatchProps.get("name"); String outv = vert_name; JSONObject edge = new JSONObject(); edge.put("_type", "edge"); edge.put("_id", outv + "_inAddressRange_" + inv); edge.put("_label", "inAddressRange"); edge.put("_inV", inv); edge.put("_outV", outv); edge.put("inVType", "addressRange"); edge.put("outVType", "IP"); edges.add(edge); } } } } return edges; } public void alignVertProps(String vertID, Map<String, Object> newProps) throws RexProException, IOException{ String type = (String)newProps.get("vertexType"); Map<String, Map<String, Object>> mergeMethods = getVertexConfig(type); alignVertProps(vertID, newProps, mergeMethods); } //mergeMethods are derived from ontology definition public void alignVertProps(String vertID, Map<String, Object> newProps, Map<String, Map<String, Object>> vertConfig) throws RexProException, IOException{ // System.out.println("vertID = " + vertID); // System.out.println("newProps = " + newProps); // System.out.println("mergeMethods = " + mergeMethods); Map<String, Object> oldProps = connection.getVertByID(vertID); Iterator<String> k = newProps.keySet().iterator(); String key; while(k.hasNext()){ key = k.next(); if(oldProps.containsKey(key)){ //both old & new have this, so check how to merge. if(key == "timeStamp" || key == "score"){ //yeah... don't try to merge those here, it breaks things. //TODO these will need special handling .... and it will need to be someplace else, after we finish w/ the rest of the vert's props. continue; } if(oldProps.get(key).equals(newProps.get(key))){ //if they're already the same value, don't waste any more time with it. continue; } String mergeMethod = null; try{ mergeMethod = (String) vertConfig.get(key).get("resolutionFunction"); }catch(NullPointerException e){ mergeMethod = null; //this will happen if 'key' isn't in the vertConfig map. if(key != "timeStamp" && key != "score"){ logger.warn("no config info found for property: " + key); } } // System.out.println("key = " + key + " mergeMethod = " + mergeMethod); if(mergeMethod == null || mergeMethod == "keepNew"){ oldProps.put(key, newProps.get(key)); }else if(mergeMethod == "appendList"){ Object oldVal = oldProps.get(key); List<Object> oldList; if(oldVal instanceof List ){ oldList = (List<Object>)oldVal; }else{ oldList = new ArrayList<Object>(); oldList.add(oldVal); } Object n = newProps.get(key); if(n instanceof List){ oldList.addAll((List<Object>)n); }else{ oldList.add(n); } oldProps.put(key, oldList); }else if(mergeMethod == "keepUpdates"){ Object oldVal = oldProps.get("timeStamp"); long oldTime = -1; if(oldVal instanceof String) oldTime = Integer.parseInt((String)oldProps.get("timeStamp")); else if(oldVal instanceof Long) oldTime = (Long)oldVal; //TODO else warn? long newTime = (Long)newProps.get("timeStamp"); if(newTime >= oldTime){ oldProps.put(key, newProps.get(key)); } }else if(mergeMethod == "keepConfidence"){ Object oldVal = oldProps.get("score"); double oldScore = 0.0; if(oldVal instanceof String) oldScore = Double.parseDouble((String)oldProps.get("score")); else if(oldVal instanceof Double) oldScore = (Double)oldVal; //TODO else warn? double newScore = (Long)newProps.get("score"); if(newScore >= oldScore){ oldProps.put(key, newProps.get(key)); } } }else{ //else oldProps did not contain this, so just add it. oldProps.put(key, newProps.get(key)); } } connection.updateVert(vertID, oldProps); } public List<Object> jsonArrayToList(JSONArray a){ List<Object> l = new ArrayList<Object>(); for(int i=0; i<a.length(); i++){ l.add(a.get(i)); } return l; } public Map<String, Object> jsonVertToMap(JSONObject v){ Map<String, Object> vert = new HashMap<String, Object>(); for(Object k : v.keySet()){ String key = (String) k; Object value = v.get(key); if(value instanceof JSONArray){ value = jsonArrayToList((JSONArray)value); } else if(value instanceof JSONObject){ logger.warn("jsonVertToMap: unexpected property type: JSONObject for property " + key + "\n" + v); } vert.put(key, value); } return vert; } //only public for tests public String findDuplicateVertex(Map<String, Object> vertex) { //TODO populate threshold double threshold = 0.75; Map<String, Object> vertexProps = (Map<String, Object>)(vertex.get("_properties")); String vertexType = (String)(vertexProps.get("vertexType")); //TODO check vertexType: for some types, we never want to search in this way (eg. flows.) List<Map<String,Object>> candidateVerts = findCandidateMatches(vertex); Map<String, Double> candidateScores = new HashMap<String, Double>(); Map<String, Object> candidateVertex = null; double bestScore = 0.0; String bestID = null; Map<String, Map<String, Object>> configProperties = getVertexConfig(vertexType); for(int i = 0; i < candidateVerts.size(); i++){ candidateVertex = candidateVerts.get(i); String id = (String)candidateVertex.get("_id"); double score = Compare.compareVertices(vertex, candidateVertex, configProperties); logger.info("Found score of " + score + " for id " + id); if(score >= threshold){ candidateScores.put(id, score); if(score > bestScore){ bestID = id; bestScore = score; } } } return bestID; } public List<Map<String,Object>> findCandidateMatches(Map<String, Object> vertex) { List<Map<String,Object>> results = new ArrayList<Map<String,Object>>(); Map<String, Object> vertexProps = (Map<String, Object>)(vertex.get("_properties")); String vertType = (String)(vertexProps.get("vertexType")); try { results = connection.findAllVertsByType(vertType); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (RexProException e) { // TODO Auto-generated catch block e.printStackTrace(); } //logger.info("candidate matches are: " + results); return results; } //Unused public Map<String, Map<String, Object>> getVertexConfig (JSONObject vertex) { return getVertexConfig(vertex.getString("vertexType")); } public Map<String, Map<String, Object>> getVertexConfig (String vertexType) { Map<String, Map<String, Object>> property = config.getVertexConfig(vertexType); return property; } //TODO only public for tests public long getIpInt(String ipString) { long retAddr = 0; try { InetAddress addr = InetAddress.getByName(ipString); for (byte b: addr.getAddress()){ retAddr = (retAddr << 8) | (b & 0xFF); } } catch (UnknownHostException e) { // TODO Auto-generated catch block e.printStackTrace(); } return retAddr; } private static String getStackTrace(Exception e){ StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); return sw.toString(); } }
package bdv.img.hdf5; import static bdv.img.hdf5.Util.getResolutionsPath; import static bdv.img.hdf5.Util.getSubdivisionsPath; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription; import mpicbg.spim.data.generic.sequence.BasicViewSetup; import mpicbg.spim.data.generic.sequence.ImgLoaderHint; import mpicbg.spim.data.generic.sequence.ImgLoaderHints; import mpicbg.spim.data.sequence.Angle; import mpicbg.spim.data.sequence.Channel; import mpicbg.spim.data.sequence.MultiResolutionImgLoader; import mpicbg.spim.data.sequence.MultiResolutionSetupImgLoader; import mpicbg.spim.data.sequence.TimePoint; import mpicbg.spim.data.sequence.ViewId; import mpicbg.spim.data.sequence.VoxelDimensions; import net.imglib2.Cursor; import net.imglib2.Dimensions; import net.imglib2.FinalDimensions; import net.imglib2.FinalInterval; import net.imglib2.IterableInterval; import net.imglib2.RandomAccess; import net.imglib2.RandomAccessibleInterval; import net.imglib2.img.Img; import net.imglib2.img.ImgFactory; import net.imglib2.img.NativeImg; import net.imglib2.img.array.ArrayImgFactory; import net.imglib2.img.array.ArrayImgs; import net.imglib2.img.basictypeaccess.array.ShortArray; import net.imglib2.img.basictypeaccess.volatiles.array.VolatileShortArray; import net.imglib2.img.cell.CellImg; import net.imglib2.img.cell.CellImgFactory; import net.imglib2.img.cell.DefaultCell; import net.imglib2.realtransform.AffineTransform3D; import net.imglib2.type.NativeType; import net.imglib2.type.numeric.integer.UnsignedShortType; import net.imglib2.type.numeric.real.FloatType; import net.imglib2.type.volatiles.VolatileUnsignedShortType; import net.imglib2.util.Fraction; import net.imglib2.util.Intervals; import net.imglib2.view.Views; import bdv.AbstractViewerSetupImgLoader; import bdv.ViewerImgLoader; import bdv.img.cache.CacheHints; import bdv.img.cache.CachedCellImg; import bdv.img.cache.LoadingStrategy; import bdv.img.cache.VolatileGlobalCellCache; import bdv.img.cache.VolatileImgCells; import bdv.img.cache.VolatileImgCells.CellCache; import bdv.util.ConstantRandomAccessible; import bdv.util.MipmapTransforms; import ch.systemsx.cisd.hdf5.HDF5Factory; import ch.systemsx.cisd.hdf5.IHDF5Reader; public class Hdf5ImageLoader implements ViewerImgLoader, MultiResolutionImgLoader { protected File hdf5File; /** * The {@link Hdf5ImageLoader} can be constructed with an existing * {@link IHDF5Reader} which if non-null will be used instead of creating a * new one on {@link #hdf5File}. * * <p> * <em>Note that {@link #close()} will not close the existingHdf5Reader!</em> */ protected IHDF5Reader existingHdf5Reader; protected IHDF5Access hdf5Access; protected VolatileGlobalCellCache cache; protected Hdf5VolatileShortArrayLoader shortLoader; /** * Maps setup id to {@link SetupImgLoader}. */ protected final HashMap< Integer, SetupImgLoader > setupImgLoaders; /** * List of partitions if the dataset is split across several files */ protected final ArrayList< Partition > partitions; protected int maxNumLevels; /** * Maps {@link ViewLevelId} (timepoint, setup, level) to * {@link DimsAndExistence}. Every entry is either null or the existence and * dimensions of one image. This is filled in when an image is loaded for * the first time. */ protected final HashMap< ViewLevelId, DimsAndExistence > cachedDimsAndExistence; protected final AbstractSequenceDescription< ?, ?, ? > sequenceDescription; /** * * @param hdf5File * @param hdf5Partitions * @param sequenceDescription * the {@link AbstractSequenceDescription}. When loading images, * this may be used to retrieve additional information for a * {@link ViewId}, such as setup name, {@link Angle}, * {@link Channel}, etc. */ public Hdf5ImageLoader( final File hdf5File, final ArrayList< Partition > hdf5Partitions, final AbstractSequenceDescription< ?, ?, ? > sequenceDescription ) { this( hdf5File, hdf5Partitions, sequenceDescription, true ); } public Hdf5ImageLoader( final File hdf5File, final ArrayList< Partition > hdf5Partitions, final AbstractSequenceDescription< ?, ?, ? > sequenceDescription, final boolean doOpen ) { this( hdf5File, null, hdf5Partitions, sequenceDescription, doOpen ); } protected Hdf5ImageLoader( final File hdf5File, final IHDF5Reader existingHdf5Reader, final ArrayList< Partition > hdf5Partitions, final AbstractSequenceDescription< ?, ?, ? > sequenceDescription, final boolean doOpen ) { this.existingHdf5Reader = existingHdf5Reader; this.hdf5File = hdf5File; setupImgLoaders = new HashMap< Integer, SetupImgLoader >(); cachedDimsAndExistence = new HashMap< ViewLevelId, DimsAndExistence >(); this.sequenceDescription = sequenceDescription; partitions = new ArrayList< Partition >(); if ( hdf5Partitions != null ) partitions.addAll( hdf5Partitions ); if ( doOpen ) open(); } private boolean isOpen = false; private void open() { if ( ! isOpen ) { synchronized ( this ) { if ( isOpen ) return; isOpen = true; final IHDF5Reader hdf5Reader = ( existingHdf5Reader != null ) ? existingHdf5Reader : HDF5Factory.openForReading( hdf5File ); maxNumLevels = 0; final List< ? extends BasicViewSetup > setups = sequenceDescription.getViewSetupsOrdered(); for ( final BasicViewSetup setup : setups ) { final int setupId = setup.getId(); final double[][] resolutions = hdf5Reader.readDoubleMatrix( getResolutionsPath( setupId ) ); final AffineTransform3D[] transforms = new AffineTransform3D[ resolutions.length ]; for ( int level = 0; level < resolutions.length; level++ ) transforms[ level ] = MipmapTransforms.getMipmapTransformDefault( resolutions[ level ] ); final int[][] subdivisions = hdf5Reader.readIntMatrix( getSubdivisionsPath( setupId ) ); if ( resolutions.length > maxNumLevels ) maxNumLevels = resolutions.length; setupImgLoaders.put( setupId, new SetupImgLoader( setupId, new MipmapInfo( resolutions, transforms, subdivisions ) ) ); } cachedDimsAndExistence.clear(); final List< TimePoint > timepoints = sequenceDescription.getTimePoints().getTimePointsOrdered(); final int maxNumTimepoints = timepoints.get( timepoints.size() - 1 ).getId() + 1; final int maxNumSetups = setups.get( setups.size() - 1 ).getId() + 1; try { hdf5Access = new HDF5AccessHack( hdf5Reader ); } catch ( final Exception e ) { e.printStackTrace(); hdf5Access = new HDF5Access( hdf5Reader ); } shortLoader = new Hdf5VolatileShortArrayLoader( hdf5Access ); cache = new VolatileGlobalCellCache( maxNumTimepoints, maxNumSetups, maxNumLevels, 1 ); } } } /** * Clear the cache and close the hdf5 file. Images that were obtained from * this loader before {@link #close()} will stop working. Requesting images * after {@link #close()} will cause the hdf5 file to be reopened (with a * new cache). */ public void close() { if ( isOpen ) { synchronized ( this ) { if ( !isOpen ) return; isOpen = false; cache.clearCache(); hdf5Access.closeAllDataSets(); // only close reader if we constructed it ourselves if ( existingHdf5Reader == null ) hdf5Access.close(); } } } public void initCachedDimensionsFromHdf5( final boolean background ) { open(); final long t0 = System.currentTimeMillis(); final List< TimePoint > timepoints = sequenceDescription.getTimePoints().getTimePointsOrdered(); final List< ? extends BasicViewSetup > setups = sequenceDescription.getViewSetupsOrdered(); for ( final TimePoint timepoint : timepoints ) { final int t = timepoint.getId(); for ( final BasicViewSetup setup : setups ) { final int s = setup.getId(); final int numLevels = getSetupImgLoader( s ).numMipmapLevels(); for ( int l = 0; l < numLevels; ++l ) getDimsAndExistence( new ViewLevelId( t, s, l ) ); } if ( background ) synchronized ( this ) { try { wait( 100 ); } catch ( final InterruptedException e ) {} } } final long t1 = System.currentTimeMillis() - t0; System.out.println( "initCachedDimensionsFromHdf5 : " + t1 + " ms" ); } public File getHdf5File() { return hdf5File; } public ArrayList< Partition > getPartitions() { return partitions; } @Override public VolatileGlobalCellCache getCache() { open(); return cache; } public Hdf5VolatileShortArrayLoader getShortArrayLoader() { open(); return shortLoader; } /** * Checks whether the given image data is present in the hdf5. Missing data * may be caused by missing partition files * * @return true, if the given image data is present. */ public boolean existsImageData( final ViewLevelId id ) { return getDimsAndExistence( id ).exists(); } public DimsAndExistence getDimsAndExistence( final ViewLevelId id ) { open(); DimsAndExistence dims = cachedDimsAndExistence.get( id ); if ( dims == null ) { // pause Fetcher threads for 5 ms. There will be more calls to // getImageDimension() because this happens when a timepoint is // loaded, and all setups for the timepoint are loaded then. We // don't want to interleave this with block loading operations. cache.pauseFetcherThreadsFor( 5 ); dims = hdf5Access.getDimsAndExistence( id ); cachedDimsAndExistence.put( id, dims ); } return dims; } public void printMipmapInfo() { open(); for ( final BasicViewSetup setup : sequenceDescription.getViewSetupsOrdered() ) { final int setupId = setup.getId(); System.out.println( "setup " + setupId ); final MipmapInfo mipmapInfo = getSetupImgLoader( setupId ).getMipmapInfo(); final double[][] reslevels = mipmapInfo.getResolutions(); final int[][] subdiv = mipmapInfo.getSubdivisions(); final int numLevels = mipmapInfo.getNumLevels(); System.out.println( " resolutions:"); for ( int level = 0; level < numLevels; ++level ) { final double[] res = reslevels[ level ]; System.out.println( " " + level + ": " + net.imglib2.util.Util.printCoordinates( res ) ); } System.out.println( " subdivisions:"); for ( int level = 0; level < numLevels; ++level ) { final int[] res = subdiv[ level ]; System.out.println( " " + level + ": " + net.imglib2.util.Util.printCoordinates( res ) ); } System.out.println( " level sizes:" ); final int timepointId = sequenceDescription.getTimePoints().getTimePointsOrdered().get( 0 ).getId(); for ( int level = 0; level < numLevels; ++level ) { final DimsAndExistence dims = getDimsAndExistence( new ViewLevelId( timepointId, setupId, level ) ); final long[] dimensions = dims.getDimensions(); System.out.println( " " + level + ": " + net.imglib2.util.Util.printCoordinates( dimensions ) ); } } } /** * normalize img to 0...1 */ protected static void normalize( final IterableInterval< FloatType > img ) { float currentMax = img.firstElement().get(); float currentMin = currentMax; for ( final FloatType t : img ) { final float f = t.get(); if ( f > currentMax ) currentMax = f; else if ( f < currentMin ) currentMin = f; } final float scale = ( float ) ( 1.0 / ( currentMax - currentMin ) ); for ( final FloatType t : img ) t.set( ( t.get() - currentMin ) * scale ); } @Override public SetupImgLoader getSetupImgLoader( final int setupId ) { open(); return setupImgLoaders.get( setupId ); } public class SetupImgLoader extends AbstractViewerSetupImgLoader< UnsignedShortType, VolatileUnsignedShortType > implements MultiResolutionSetupImgLoader< UnsignedShortType > { private final int setupId; /** * Description of available mipmap levels for the setup. Contains for * each mipmap level, the subsampling factors and subdivision block * sizes. */ private final MipmapInfo mipmapInfo; protected SetupImgLoader( final int setupId, final MipmapInfo mipmapInfo ) { super( new UnsignedShortType(), new VolatileUnsignedShortType() ); this.setupId = setupId; this.mipmapInfo = mipmapInfo; } private RandomAccessibleInterval< UnsignedShortType > loadImageCompletely( final int timepointId, final int level ) { open(); Img< UnsignedShortType > img = null; final DimsAndExistence dimsAndExistence = getDimsAndExistence( new ViewLevelId( timepointId, setupId, level ) ); final long[] dimsLong = dimsAndExistence.exists() ? dimsAndExistence.getDimensions() : null; final int n = dimsLong.length; final int[] dimsInt = new int[ n ]; final long[] min = new long[ n ]; if ( Intervals.numElements( new FinalDimensions( dimsLong ) ) <= Integer.MAX_VALUE ) { // use ArrayImg for ( int d = 0; d < dimsInt.length; ++d ) dimsInt[ d ] = ( int ) dimsLong[ d ]; short[] data = null; try { data = hdf5Access.readShortMDArrayBlockWithOffset( timepointId, setupId, level, dimsInt, min ); } catch ( final InterruptedException e ) {} img = ArrayImgs.unsignedShorts( data, dimsLong ); } else { final int[] cellDimensions = computeCellDimensions( dimsLong, mipmapInfo.getSubdivisions()[ level ] ); final CellImgFactory< UnsignedShortType > factory = new CellImgFactory< UnsignedShortType >( cellDimensions ); @SuppressWarnings( "unchecked" ) final CellImg< UnsignedShortType, ShortArray, DefaultCell< ShortArray > > cellImg = ( CellImg< UnsignedShortType, ShortArray, DefaultCell< ShortArray > > ) factory.create( dimsLong, new UnsignedShortType() ); final Cursor< DefaultCell< ShortArray > > cursor = cellImg.getCells().cursor(); while ( cursor.hasNext() ) { final DefaultCell< ShortArray > cell = cursor.next(); final short[] dataBlock = cell.getData().getCurrentStorageArray(); cell.dimensions( dimsInt ); cell.min( min ); try { hdf5Access.readShortMDArrayBlockWithOffset( timepointId, setupId, level, dimsInt, min, dataBlock ); } catch ( final InterruptedException e ) {} } img = cellImg; } return img; } private int[] computeCellDimensions( final long[] dimsLong, final int[] chunkSize ) { final int n = dimsLong.length; final long[] dimsInChunks = new long[ n ]; int elementsPerChunk = 1; for ( int d = 0; d < n; ++d ) { dimsInChunks[ d ] = ( dimsLong[ d ] + chunkSize[ d ] - 1 ) / chunkSize[ d ]; elementsPerChunk *= chunkSize[ d ]; } final int[] cellDimensions = new int[ n ]; long s = Integer.MAX_VALUE / elementsPerChunk; for ( int d = 0; d < n; ++d ) { final long ns = s / dimsInChunks[ d ]; if ( ns > 0 ) cellDimensions[ d ] = chunkSize[ d ] * ( int ) ( dimsInChunks[ d ] ); else { cellDimensions[ d ] = chunkSize[ d ] * ( int ) ( s % dimsInChunks[ d ] ); for ( ++d; d < n; ++d ) cellDimensions[ d ] = chunkSize[ d ]; break; } s = ns; } return cellDimensions; } @Override public RandomAccessibleInterval< UnsignedShortType > getImage( final int timepointId, final int level, final ImgLoaderHint... hints ) { final ViewLevelId id = new ViewLevelId( timepointId, setupId, level ); if ( ! existsImageData( id ) ) { System.err.println( String.format( "image data for timepoint %d setup %d level %d could not be found. Partition file missing?", id.getTimePointId(), id.getViewSetupId(), id.getLevel() ) ); return getMissingDataImage( id, new UnsignedShortType() ); } if ( Arrays.asList( hints ).contains( ImgLoaderHints.LOAD_COMPLETELY ) ) return loadImageCompletely( timepointId, level ); final CachedCellImg< UnsignedShortType, VolatileShortArray > img = prepareCachedImage( id, LoadingStrategy.BLOCKING ); final UnsignedShortType linkedType = new UnsignedShortType( img ); img.setLinkedType( linkedType ); return img; } @Override public RandomAccessibleInterval< VolatileUnsignedShortType > getVolatileImage( final int timepointId, final int level, final ImgLoaderHint... hints ) { final ViewLevelId id = new ViewLevelId( timepointId, setupId, level ); if ( ! existsImageData( id ) ) { System.err.println( String.format( "image data for timepoint %d setup %d level %d could not be found. Partition file missing?", id.getTimePointId(), id.getViewSetupId(), id.getLevel() ) ); return getMissingDataImage( id, new VolatileUnsignedShortType() ); } final CachedCellImg< VolatileUnsignedShortType, VolatileShortArray > img = prepareCachedImage( id, LoadingStrategy.BUDGETED ); final VolatileUnsignedShortType linkedType = new VolatileUnsignedShortType( img ); img.setLinkedType( linkedType ); return img; } /** * (Almost) create a {@link CellImg} backed by the cache. * The created image needs a {@link NativeImg#setLinkedType(net.imglib2.type.Type) linked type} before it can be used. * The type should be either {@link UnsignedShortType} and {@link VolatileUnsignedShortType}. */ protected < T extends NativeType< T > > CachedCellImg< T, VolatileShortArray > prepareCachedImage( final ViewLevelId id, final LoadingStrategy loadingStrategy ) { open(); final int timepointId = id.getTimePointId(); final int level = id.getLevel(); final long[] dimensions = getDimsAndExistence( id ).getDimensions(); final int[] cellDimensions = mipmapInfo.getSubdivisions()[ level ]; final int priority = mipmapInfo.getMaxLevel() - level; final CacheHints cacheHints = new CacheHints( loadingStrategy, priority, false ); final CellCache< VolatileShortArray > c = cache.new VolatileCellCache< VolatileShortArray >( timepointId, setupId, level, cacheHints, shortLoader ); final VolatileImgCells< VolatileShortArray > cells = new VolatileImgCells< VolatileShortArray >( c, new Fraction(), dimensions, cellDimensions ); final CachedCellImg< T, VolatileShortArray > img = new CachedCellImg< T, VolatileShortArray >( cells ); return img; } /** * For images that are missing in the hdf5, a constant image is created. If * the dimension of the missing image is known (see * {@link #getDimsAndExistence(ViewLevelId)}) then use that. Otherwise * create a 1x1x1 image. */ protected < T > RandomAccessibleInterval< T > getMissingDataImage( final ViewLevelId id, final T constant ) { final long[] d = getDimsAndExistence( id ).getDimensions(); return Views.interval( new ConstantRandomAccessible< T >( constant, 3 ), new FinalInterval( d ) ); } @Override public RandomAccessibleInterval< FloatType > getFloatImage( final int timepointId, final boolean normalize, final ImgLoaderHint... hints ) { return getFloatImage( timepointId, 0, normalize, hints ); } @Override public RandomAccessibleInterval< FloatType > getFloatImage( final int timepointId, final int level, final boolean normalize, final ImgLoaderHint... hints ) { final RandomAccessibleInterval< UnsignedShortType > ushortImg = getImage( timepointId, level, hints ); // copy unsigned short img to float img // create float img final FloatType f = new FloatType(); final ImgFactory< FloatType > imgFactory; if ( Intervals.numElements( ushortImg ) <= Integer.MAX_VALUE ) { imgFactory = new ArrayImgFactory< FloatType >(); } else { final long[] dimsLong = new long[ ushortImg.numDimensions() ]; ushortImg.dimensions( dimsLong ); final int[] cellDimensions = computeCellDimensions( dimsLong, mipmapInfo.getSubdivisions()[ level ] ); imgFactory = new CellImgFactory< FloatType >( cellDimensions ); } final Img< FloatType > floatImg = imgFactory.create( ushortImg, f ); // set up executor service final int numProcessors = Runtime.getRuntime().availableProcessors(); final ExecutorService taskExecutor = Executors.newFixedThreadPool( numProcessors ); final ArrayList< Callable< Void > > tasks = new ArrayList< Callable< Void > >(); // set up all tasks final int numPortions = numProcessors * 2; final long threadChunkSize = floatImg.size() / numPortions; final long threadChunkMod = floatImg.size() % numPortions; for ( int portionID = 0; portionID < numPortions; ++portionID ) { // move to the starting position of the current thread final long startPosition = portionID * threadChunkSize; // the last thread may has to run longer if the number of pixels cannot be divided by the number of threads final long loopSize = ( portionID == numPortions - 1 ) ? threadChunkSize + threadChunkMod : threadChunkSize; if ( Views.iterable( ushortImg ).iterationOrder().equals( floatImg.iterationOrder() ) ) { tasks.add( new Callable< Void >() { @Override public Void call() throws Exception { final Cursor< UnsignedShortType > in = Views.iterable( ushortImg ).cursor(); final Cursor< FloatType > out = floatImg.cursor(); in.jumpFwd( startPosition ); out.jumpFwd( startPosition ); for ( long j = 0; j < loopSize; ++j ) out.next().set( in.next().getRealFloat() ); return null; } } ); } else { tasks.add( new Callable< Void >() { @Override public Void call() throws Exception { final Cursor< UnsignedShortType > in = Views.iterable( ushortImg ).localizingCursor(); final RandomAccess< FloatType > out = floatImg.randomAccess(); in.jumpFwd( startPosition ); for ( long j = 0; j < loopSize; ++j ) { final UnsignedShortType vin = in.next(); out.setPosition( in ); out.get().set( vin.getRealFloat() ); } return null; } } ); } } try { // invokeAll() returns when all tasks are complete taskExecutor.invokeAll( tasks ); taskExecutor.shutdown(); } catch ( final InterruptedException e ) { return null; } if ( normalize ) // normalize the image to 0...1 normalize( floatImg ); return floatImg; } public MipmapInfo getMipmapInfo() { return mipmapInfo; } @Override public double[][] getMipmapResolutions() { return mipmapInfo.getResolutions(); } @Override public AffineTransform3D[] getMipmapTransforms() { return mipmapInfo.getTransforms(); } @Override public int numMipmapLevels() { return mipmapInfo.getNumLevels(); } @Override public Dimensions getImageSize( final int timepointId ) { return getImageSize( timepointId, 0 ); } @Override public Dimensions getImageSize( final int timepointId, final int level ) { final ViewLevelId id = new ViewLevelId( timepointId, setupId, level ); final DimsAndExistence dims = getDimsAndExistence( id ); if ( dims.exists() ) return new FinalDimensions( dims.getDimensions() ); else return null; } @Override public VoxelDimensions getVoxelSize( final int timepointId ) { // the voxel size is not stored in the hdf5 return null; } } }
package be.isach.samaritan; import POGOProtos.Networking.Envelopes.RequestEnvelopeOuterClass; import be.isach.samaritan.birthday.BirthdayTask; import be.isach.samaritan.brainfuck.BrainfuckInterpreter; import be.isach.samaritan.chat.PrivateMessageChatThread; import be.isach.samaritan.command.console.ConsoleListenerThread; import be.isach.samaritan.history.MessageHistoryPrinter; import be.isach.samaritan.level.AccessLevelManager; import be.isach.samaritan.listener.CleverBotListener; import be.isach.samaritan.listener.CommandListener; import be.isach.samaritan.listener.PrivateMessageListener; import be.isach.samaritan.listener.QuoteHandler; import be.isach.samaritan.log.SmartLogger; import be.isach.samaritan.music.SongPlayer; import be.isach.samaritan.pokemongo.LoginData; import be.isach.samaritan.runtime.ShutdownThread; import be.isach.samaritan.util.GifFactory; import be.isach.samaritan.util.SamaritanStatus; import be.isach.samaritan.websocket.SamaritanWebsocketServer; import com.pokegoapi.api.PokemonGo; import com.pokegoapi.auth.GoogleLogin; import com.pokegoapi.exceptions.LoginFailedException; import com.pokegoapi.exceptions.RemoteServerException; import net.dv8tion.jda.JDA; import net.dv8tion.jda.JDABuilder; import net.dv8tion.jda.entities.Guild; import net.dv8tion.jda.entities.PrivateChannel; import okhttp3.OkHttpClient; import org.joda.time.Instant; import javax.security.auth.login.LoginException; import java.io.File; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.*; public class Samaritan { /** * Song Players map with their corresponding guilds. */ private Map<Guild, SongPlayer> songPlayers; /** * Samaritan Status. */ private SamaritanStatus status; /** * The Jda of this Samaritan Instance. */ private JDA jda; /** * Message History Printer Util. */ private MessageHistoryPrinter messageHistoryPrinter; /** * The Smart Logger of this Samaritan Instance. */ private SmartLogger logger; /** * Absolute File! */ private File workingDirectory; /** * Brainfuck Code Interpreter */ private BrainfuckInterpreter brainfuckInterpreter; /** * UI WebSocket Server. */ private SamaritanWebsocketServer samaritanWebsocketServer; /** * Private Message Listener. */ private PrivateMessageListener pmListener; /** * Private Message Listener. */ private CleverBotListener cleverBotListener; /** * Bot Token. */ private String botToken; /** * Main Admin. Owner. As Discord User ID. */ private String ownerId; /** * WEB Interface using WebSockets? */ private boolean webUi; /** * Web Interface WebSocket Server Port. */ private int uiWebSocketPort; /** * Gif Factory. */ private GifFactory gifFactory; /** * Birthday task */ private BirthdayTask birthdayTask; /** * Timer. */ private Timer timer; /** * Users Acess Level Manager. */ private AccessLevelManager accessLevelManager; /** * Manages quotes. */ private QuoteHandler quoteHandler; private LoginData pokemonGoLoginData; private PokemonGo pokemonGo; /** * Samaritan Constructor. * * @param args Program Arguments. * Given when program is started * @param botToken Bot Token. * From samaritan.properties * @param webUi Use Web UI or not. * From samaritan.properties. * @param uiWebSocketPort Web UI Port. * From <samaritan.properties. */ public Samaritan(String[] args, String botToken, boolean webUi, int uiWebSocketPort, long ownerId, File workingDirectory, LoginData pokeGoLoginData) { this.botToken = botToken; this.logger = new SmartLogger(); this.status = new SamaritanStatus(); this.songPlayers = new HashMap<>(); this.gifFactory = new GifFactory(); this.ownerId = String.valueOf(ownerId); this.workingDirectory = workingDirectory; this.brainfuckInterpreter = new BrainfuckInterpreter(); this.messageHistoryPrinter = new MessageHistoryPrinter(); this.accessLevelManager = new AccessLevelManager(this); this.webUi = webUi; this.pokemonGoLoginData = pokeGoLoginData; status.setBootInstant(new Instant()); logger.write(" logger.write(); logger.write("Hello."); logger.write(); logger.write("I am Samaritan."); logger.write(); logger.write("Starting..."); logger.write(); logger.write("Boot Instant: " + new Instant().toString()); logger.write(); if (!initJda()) { logger.write("Invalid token! Please change it in samaritan.properties"); System.exit(1); return; } this.quoteHandler = new QuoteHandler(jda); this.birthdayTask = new BirthdayTask(this); this.timer = new Timer(); quoteHandler.start(); timer.schedule(birthdayTask, 0L, 1000L * 60L); this.accessLevelManager.loadUsers(); Runtime.getRuntime().addShutdownHook(new ShutdownThread(this)); startSongPlayers(); setUpListeners(); if (webUi) startWebSocketServer(); new ConsoleListenerThread(this).start(); try { logger.write("Pokémon Go -> Trying to connect."); OkHttpClient httpClient = new OkHttpClient(); RequestEnvelopeOuterClass.RequestEnvelope.AuthInfo auth = new GoogleLogin(httpClient).login(pokeGoLoginData.getUsername(), pokeGoLoginData.getPassword()); this.pokemonGo = new PokemonGo(auth, httpClient); System.out.println("Pokémon Go -> Successfully logged in."); } catch (LoginFailedException | RemoteServerException e) { this.pokemonGo = null; System.out.println("Pokémon Go -> Failed to log in."); e.printStackTrace(); } } /** * Starts JDA. * * @return {@code true} if everything went well, {@code false} otherwise. */ private boolean initJda() { try { jda = new JDABuilder().setBotToken(botToken).buildBlocking(); jda.getAccountManager().setGame("Beta 2.0.1"); jda.getAccountManager().update(); } catch (LoginException | InterruptedException e) { logger.write("Couldn't connect!"); return false; } return true; } /** * Shuts Samaritan down. * * @param exitSystem If true, will execute a 'System.exit(0);' */ public final void shutdown(boolean exitSystem) { for (PrivateMessageChatThread chatThread : getPrivateMessageListener().getChatThreads().values()) { PrivateChannel privateChannel = (PrivateChannel) chatThread.getMessageChannel(); privateChannel.sendMessage("I must go, a reboot is in the queue!\nYou can try speaking to me again in a few moments.\nGood bye, my dear " + privateChannel.getUser().getUsername() + "."); } try { jda.getAccountManager().setUsername("Samaritan"); jda.getAccountManager().update(); } catch (Exception exc) { } jda.shutdown(); if (exitSystem) System.exit(0); } /** * Starts WebSocket Server (for UI). */ private void startWebSocketServer() { try { samaritanWebsocketServer = new SamaritanWebsocketServer(new InetSocketAddress(11350)); samaritanWebsocketServer.start(); System.out.println("WS Server started."); } catch (UnknownHostException e) { System.out.println("WS Server couldn't start."); e.printStackTrace(); } } /** * Set up Listeners. */ private void setUpListeners() { this.pmListener = new PrivateMessageListener(); this.cleverBotListener = new CleverBotListener(getJda()); jda.addEventListener(new CommandListener(this)); jda.addEventListener(pmListener); } /** * Start Song Players. */ private void startSongPlayers() { for (Guild guild : jda.getGuilds()) { SongPlayer songPlayer = new SongPlayer(guild, this); songPlayers.put(guild, songPlayer); } } /** * @param guild The Guild. * @return A song Player by Guild. */ public SongPlayer getSongPlayer(Guild guild) { return songPlayers.get(guild); } /** * @return The SongPlayers map. */ public Map<Guild, SongPlayer> getSongPlayers() { return songPlayers; } /** * @return The UI WebSocket Server. */ public SamaritanWebsocketServer getWebSocketServer() { return samaritanWebsocketServer; } /** * @return The JDA of this Samaritan Instance. */ public JDA getJda() { return jda; } /** * @return The Smart Logger. */ public SmartLogger getLogger() { return logger; } /** * @return The Private Message Listener */ public PrivateMessageListener getPrivateMessageListener() { return pmListener; } /** * @return Samaritan's status. */ public SamaritanStatus getStatus() { return status; } /** * @return The Gif Factory. */ public GifFactory getGifFactory() { return gifFactory; } /** * @return webUi value. */ public boolean useWebUi() { return webUi; } /** * @return Access Level Manager. */ public AccessLevelManager getAccessLevelManager() { return accessLevelManager; } /** * @return Brainfuck code Interpreter. */ public BrainfuckInterpreter getBrainfuckInterpreter() { return brainfuckInterpreter; } /** * @return Message History Printer Util. */ public MessageHistoryPrinter getMessageHistoryPrinter() { return messageHistoryPrinter; } /** * @return Directory where Samaritan is currently running. */ public File getWorkingDirectory() { return workingDirectory; } /** * @return Main Admin ID. */ public String getOwnerId() { return ownerId; } /** * @return Quote Handler. */ public QuoteHandler getQuoteHandler() { return quoteHandler; } public LoginData getPokemonGoLoginData() { return pokemonGoLoginData; } public PokemonGo getPokemonGo() { return pokemonGo; } }
package browserview; import com.sun.jna.platform.win32.User32; import com.sun.jna.platform.win32.WinDef.HWND; import com.sun.jna.platform.win32.WinUser; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringEscapeUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.openqa.selenium.*; import org.openqa.selenium.Dimension; import org.openqa.selenium.Point; import org.openqa.selenium.chrome.ChromeOptions; import org.openqa.selenium.os.Kernel32; import ui.ScreenManager; import ui.UI; import util.GitHubURL; import util.GithubPageElements; import util.PlatformSpecific; import util.events.testevents.JumpToCommentEvent; import util.events.testevents.SendKeysToBrowserEvent; import java.awt.Rectangle; import java.io.*; import java.util.NoSuchElementException; import java.util.Optional; import java.util.concurrent.Executor; import java.util.concurrent.Executors; /** * An abstraction for the functions of the Selenium web driver. * It depends minimally on UI for width adjustments. */ public class BrowserComponent { private static final Logger logger = LogManager.getLogger(BrowserComponent.class.getName()); private static final String CHROMEDRIVER_VERSION = "2-18"; private static final boolean USE_MOBILE_USER_AGENT = false; private final boolean isTestChromeDriver; // Chrome, Android 4.2.2, Samsung Galaxy S4 private static final String MOBILE_USER_AGENT = "Mozilla/5.0 (Linux; Android 4.2.2; GT-I9505 Build/JDQ39)" + "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.59 Mobile Safari/537.36"; private static final String CHROME_DRIVER_LOCATION = "browserview/"; private static final String CHROME_DRIVER_BINARY_NAME = determineChromeDriverBinaryName(); private static final int SWP_NOSIZE = 0x0001; private static final int SWP_NOMOVE = 0x0002; private static final int SWP_NOACTIVATE = 0x0010; /** * Specifies how many tries to wait for a web page to load */ private static final int WAIT_PAGE_LOAD_MAX_RETRY = 20; /** * Specifies the duration between tries to wait for a web page to load */ private static final int WAIT_PAGE_LOAD_SLEEP_TIME = 10; private static HWND browserWindowHandle; private static User32 user32; private final UI ui; private final ScreenManager screenManager; private ChromeDriverEx driver = null; private String pageContentOnLoad = ""; // We want browser commands to be run on a separate thread, but not to // interfere with each other. This executor is limited to a single instance, // so it ensures that browser commands are queued and executed in sequence. // The alternatives would be to: // - allow race conditions // - interrupt the blocking WebDriver::get method // The first is not desirable and the second does not seem to be possible // at the moment. private final Executor executor; public BrowserComponent(UI ui, ScreenManager screenManager, boolean isTestChromeDriver) { this.ui = ui; this.screenManager = screenManager; executor = Executors.newSingleThreadExecutor(); this.isTestChromeDriver = isTestChromeDriver; setupJNA(); setupChromeDriverExecutable(); } /** * Called on application startup. Blocks until the driver is created. * Guaranteed to only happen once. */ public void initialise() { assert driver == null; executor.execute(() -> { driver = createChromeDriver(); logger.info("Successfully initialised browser component and ChromeDriver"); }); login(); } /** * Called when application quits. Guaranteed to only happen once. */ public void onAppQuit() { quit(); removeChromeDriverIfNecessary(); } /** * Quits the browser component. */ private void quit() { logger.info("Quitting browser component"); // The application may quit before the browser is initialised. // In that case, do nothing. if (driver != null) { try { driver.quit(); } catch (WebDriverException e) { // Chrome was closed; do nothing logger.info("Chrome already closed"); } } } /** * Creates, initialises, and returns a ChromeDriver. * @return */ private ChromeDriverEx createChromeDriver() { ChromeOptions options = new ChromeOptions(); if (USE_MOBILE_USER_AGENT) { options.addArguments(String.format("user-agent=\"%s\"", MOBILE_USER_AGENT)); } ChromeDriverEx driver = new ChromeDriverEx(options, isTestChromeDriver); WebDriver.Options driverOptions = driver.manage(); if (!isTestChromeDriver) { setWindowBounds(driverOptions); initialiseJNA(); } return driver; } /** * Positions the browser component according to the main window's position and size. * * @param driverOptions The options object belonging to the WebDriver managing the current Chrome window. */ public void setWindowBounds(WebDriver.Options driverOptions) { Rectangle windowBounds = screenManager.getBrowserComponentBounds(); driverOptions.window().setPosition(new Point((int) windowBounds.getX(), (int) windowBounds.getY())); driverOptions.window().setSize(new Dimension((int) windowBounds.getWidth(), (int) windowBounds.getHeight())); } private void removeChromeDriverIfNecessary() { if (ui.getCommandLineArgs().containsKey(UI.ARG_UPDATED_TO)) { boolean success = new File(CHROME_DRIVER_BINARY_NAME).delete(); if (!success) { logger.warn("Failed to delete chromedriver"); } } } /** * Executes Javascript in the currently-active driver window. * Run on the UI thread (will block until execution is complete, * i.e. change implementation if long-running scripts must be run). * @param script */ private void executeJavaScript(String script) { driver.executeScript(script); logger.info("Executed JavaScript " + script.substring(0, Math.min(script.length(), 10))); } /** * Navigates to the New Label page on GitHub. * Run on a separate thread. */ public void newLabel() { logger.info("Navigating to New Label page"); runBrowserOperation(() -> driver.get(GitHubURL.getPathForNewLabel(ui.logic.getDefaultRepo()), false)); bringToTop(); } /** * Navigates to the New Milestone page on GitHub. * Run on a separate thread. */ public void newMilestone() { logger.info("Navigating to New Milestone page"); runBrowserOperation(() -> driver.get(GitHubURL.getPathForNewMilestone(ui.logic.getDefaultRepo()), false)); bringToTop(); } /** * Navigates to the New Issue page on GitHub. * Run on a separate thread. */ public void newIssue() { logger.info("Navigating to New Issue page"); runBrowserOperation(() -> driver.get(GitHubURL.getPathForNewIssue(ui.logic.getDefaultRepo()), false)); bringToTop(); } /** * Navigates to the HubTurbo documentation page. * Run on a separate thread. */ public void showDocs() { logger.info("Showing documentation page"); runBrowserOperation(() -> driver.get(GitHubURL.DOCS_PAGE, false)); } /** * Navigates to HubTurbo filters doc page, run on separate thread. */ public void showFilterDocs() { logger.info("Showing filters documentation page"); runBrowserOperation(() -> driver.get(GitHubURL.FILTERS_PAGE, false)); } /** * Navigates to the GitHub changelog page. * Run on a separate thread. */ // public void showChangelog(String version) { // logger.info("Showing changelog for version " + version); // runBrowserOperation(() -> driver.get(GitHubURL.getChangelogForVersion(version))); /** * Navigates to the GitHub page for the given issue in the currently-active * driver window. * Run on a separate thread. */ public void showIssue(String repoId, int id, boolean isPullRequest, boolean isForceRefresh) { if (isPullRequest) { logger.info("Showing pull request runBrowserOperation(() -> driver.get(GitHubURL.getPathForPullRequest(repoId, id), isForceRefresh)); } else { logger.info("Showing issue runBrowserOperation(() -> driver.get(GitHubURL.getPathForIssue(repoId, id), isForceRefresh)); } runBrowserOperation(() -> scrollToBottom()); } public void jumpToComment(){ if (isTestChromeDriver) { UI.events.triggerEvent(new JumpToCommentEvent()); } try { WebElement comment = driver.findElementById(GithubPageElements.NEW_COMMENT); comment.click(); bringToTop(); } catch (Exception e) { logger.warn("Unable to reach jump to comments. "); } } private boolean isBrowserActive(){ if (driver == null) return false; try { // Throws an exception if unable to switch to original HT tab // which then triggers a browser reset when called from runBrowserOperation WebDriver.TargetLocator switchTo = driver.switchTo(); String windowHandle = driver.getWindowHandle(); if (!isTestChromeDriver) switchTo.window(windowHandle); // When the HT tab is closed (but the window is still alive), // a lot of the operations on the driver (such as getCurrentURL) // will hang (without throwing an exception, the thread will just freeze the UI forever), // so we cannot use getCurrentURL/getTitle to check if the original HT tab // is still open. The above line does not hang the driver but still throws // an exception, thus letting us detect that the HT tab is not active any more. return true; } catch (WebDriverException e) { logger.warn("Unable to reach bview. "); return false; } } // A helper function for reseting browser. private void resetBrowser(){ logger.info("Relaunching chrome."); quit(); // if the driver hangs driver = createChromeDriver(); login(); } /** * A helper function for running browser operations. * Takes care of running it on a separate thread, and normalises error-handling across * all types of code. */ private void runBrowserOperation (Runnable operation) { executor.execute(() -> { if (isBrowserActive()) { try { operation.run(); pageContentOnLoad = getCurrentPageSource(); } catch (WebDriverException e) { switch (BrowserComponentError.fromErrorMessage(e.getMessage())) { case NoSuchWindow: resetBrowser(); runBrowserOperation(operation); // Recurse and repeat break; case NoSuchElement: logger.info("Warning: no such element! " + e.getMessage()); break; default: break; } } } else { logger.info("Chrome window not responding."); resetBrowser(); runBrowserOperation(operation); } }); } /** * Logs in the currently-active driver window using the credentials * supplied by the user on login to the app. * Run on a separate thread. */ public void login() { logger.info("Logging in on GitHub..."); focus(ui.getMainWindowHandle()); runBrowserOperation(() -> { driver.get(GitHubURL.LOGIN_PAGE, false); try { WebElement searchBox = driver.findElement(By.name(GithubPageElements.LOGIN_FIELD)); searchBox.sendKeys(ui.logic.loginController.credentials.username); searchBox = driver.findElement(By.name(GithubPageElements.PASSWORD_FIELD)); searchBox.sendKeys(ui.logic.loginController.credentials.password); searchBox.submit(); } catch (Exception e) { // Already logged in; do nothing logger.info("Unable to login, may already be logged in. "); } }); } /** * One-time JNA setup. */ private static void setupJNA() { if (PlatformSpecific.isOnWindows()) user32 = User32.INSTANCE; } /** * JNA initialisation. Should happen whenever the Chrome window is recreated. */ private void initialiseJNA() { if (PlatformSpecific.isOnWindows()) { browserWindowHandle = user32.FindWindow(null, "data:, - Google Chrome"); } } public static String determineChromeDriverBinaryName() { if (PlatformSpecific.isOnMac()) { logger.info("Using chrome driver binary: chromedriver_" + CHROMEDRIVER_VERSION); return "chromedriver_" + CHROMEDRIVER_VERSION; } else if (PlatformSpecific.isOnWindows()) { logger.info("Using chrome driver binary: chromedriver_" + CHROMEDRIVER_VERSION + ".exe"); return "chromedriver_" + CHROMEDRIVER_VERSION + ".exe"; } else if (PlatformSpecific.isOn32BitsLinux()) { logger.info("Using chrome driver binary: chromedriver_linux_" + CHROMEDRIVER_VERSION); return "chromedriver_linux_" + CHROMEDRIVER_VERSION; } else if (PlatformSpecific.isOn64BitsLinux()) { logger.info("Using chrome driver binary: chromedriver_linux_x86_64_" + CHROMEDRIVER_VERSION); return "chromedriver_linux_x86_64_" + CHROMEDRIVER_VERSION; } else { logger.error("Unable to determine platform for chrome driver"); logger.info("Using chrome driver binary: chromedriver_linux_" + CHROMEDRIVER_VERSION); return "chromedriver_linux_" + CHROMEDRIVER_VERSION; } } /** * Ensures that the chromedriver executable is in the project root before * initialisation. Since executables are packaged for all platforms, this also * picks the right version to use. */ private static void setupChromeDriverExecutable() { File f = new File(CHROME_DRIVER_BINARY_NAME); if (!f.exists()) { InputStream in = BrowserComponent.class.getClassLoader() .getResourceAsStream(CHROME_DRIVER_LOCATION + CHROME_DRIVER_BINARY_NAME); assert in != null : "Could not find " + CHROME_DRIVER_BINARY_NAME + " at " + CHROME_DRIVER_LOCATION + "; this path must be updated if the executables are moved"; OutputStream out; try { out = new FileOutputStream(CHROME_DRIVER_BINARY_NAME); IOUtils.copy(in, out); out.close(); f.setExecutable(true); } catch (IOException e) { logger.error("Could not load Chrome driver binary! " + e.getLocalizedMessage(), e); } logger.info("Could not find " + CHROME_DRIVER_BINARY_NAME + "; extracted it from jar"); } else { logger.info("Located " + CHROME_DRIVER_BINARY_NAME); } System.setProperty("webdriver.chrome.driver", CHROME_DRIVER_BINARY_NAME); } private void bringToTop(){ if (PlatformSpecific.isOnWindows()) { user32.ShowWindow(browserWindowHandle, WinUser.SW_RESTORE); user32.SetForegroundWindow(browserWindowHandle); } } public void focus(HWND mainWindowHandle){ if (PlatformSpecific.isOnWindows()) { // Restores browser window if it is minimized / maximized user32.ShowWindow(browserWindowHandle, WinUser.SW_SHOWNOACTIVATE); // SWP_NOMOVE and SWP_NOSIZE prevents the 0,0,0,0 parameters from taking effect. logger.info("Bringing bView to front"); boolean success = user32.SetWindowPos(browserWindowHandle, mainWindowHandle, 0, 0, 0, 0, SWP_NOMOVE | SWP_NOSIZE | SWP_NOACTIVATE); if (!success) { logger.info("Failed to bring bView to front."); logger.info(Kernel32.INSTANCE.GetLastError()); } user32.SetForegroundWindow(mainWindowHandle); } } private String getCurrentPageSource() { return StringEscapeUtils.escapeHtml4( (String) driver.executeScript("return document.documentElement.outerHTML")); } public boolean hasBviewChanged() { if (isTestChromeDriver) return true; if (isBrowserActive()) { if (getCurrentPageSource().equals(pageContentOnLoad)) return false; pageContentOnLoad = getCurrentPageSource(); return true; } return false; } public void scrollToTop() { String script = GithubPageElements.SCROLL_TO_TOP; executeJavaScript(script); } public void scrollToBottom() { String script = GithubPageElements.SCROLL_TO_BOTTOM; executeJavaScript(script); } public void scrollPage(boolean isDownScroll) { String script; if (isDownScroll) { script = GithubPageElements.SCROLL_DOWN; } else { script = GithubPageElements.SCROLL_UP; } executeJavaScript(script); } private void sendKeysToBrowser(String keyCode) { if (isTestChromeDriver) { UI.events.triggerEvent(new SendKeysToBrowserEvent(keyCode)); } WebElement body; try { body = driver.findElementByTagName(GithubPageElements.BODY); body.sendKeys(keyCode); } catch (Exception e) { logger.error("No such element"); } } public void manageAssignees(String keyCode) { sendKeysToBrowser(keyCode.toLowerCase()); bringToTop(); } public void manageMilestones(String keyCode) { sendKeysToBrowser(keyCode.toLowerCase()); bringToTop(); } public void showIssues() { logger.info("Navigating to Issues page"); runBrowserOperation(() -> driver.get(GitHubURL.getPathForAllIssues(ui.logic.getDefaultRepo()), false)); } public void showPullRequests() { logger.info("Navigating to Pull requests page"); runBrowserOperation(() -> driver.get(GitHubURL.getPathForPullRequests(ui.logic.getDefaultRepo()), false)); } public void showKeyboardShortcuts() { logger.info("Navigating to Keyboard Shortcuts"); runBrowserOperation(() -> driver.get(GitHubURL.KEYBOARD_SHORTCUTS_PAGE, false)); } public void showMilestones() { logger.info("Navigating to Milestones page"); runBrowserOperation(() -> driver.get(GitHubURL.getPathForMilestones(ui.logic.getDefaultRepo()), false)); } public void showContributors() { logger.info("Navigating to Contributors page"); runBrowserOperation(() -> driver.get(GitHubURL.getPathForContributors(ui.logic.getDefaultRepo()), false)); } public boolean isCurrentUrlIssue() { return driver != null && GitHubURL.isUrlIssue(driver.getCurrentUrl()); } /** * Checks if current URL is GitHub issue or PR discussion page */ public boolean isCurrentUrlDiscussion() { return driver != null && GitHubURL.isUrlIssueOrPrDiscussionPage(driver.getCurrentUrl()); } public String getCurrentUrl() { return driver.getCurrentUrl(); } /** * Switches to the specified tab in GitHub PR page * @param tabName Either GithubPageElements.DISCUSSION_TAB, GithubPageElements.COMMITS_TAB * or GithubPageElements.FILES_TAB */ public void switchToTab(String tabName) { if (GitHubURL.isPullRequestLoaded(getCurrentUrl())) { int tabIndex = 0; switch(tabName) { case GithubPageElements.DISCUSSION_TAB: tabIndex = 1; break; case GithubPageElements.COMMITS_TAB: tabIndex = 2; break; case GithubPageElements.FILES_TAB: tabIndex = 3; break; default: assert false; return; } /** * Wait for discussion page (for issue or PR) to be loaded. */ public void waitUntilDiscussionPageLoaded() { for (int i = 0; i < WAIT_PAGE_LOAD_MAX_RETRY && !isCurrentUrlDiscussion(); i++) { try { Thread.sleep(WAIT_PAGE_LOAD_SLEEP_TIME); } catch (IllegalArgumentException | InterruptedException e) { logger.warn(e); } } } public void minimizeWindow() { if (PlatformSpecific.isOnWindows()) { user32.ShowWindow(browserWindowHandle, WinUser.SW_MINIMIZE); } } }
package sobolan.projecteuler.problems; import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException; import java.math.BigInteger; /** * @author Radu Murzea * * @problemstatement * Work out the first ten digits of the sum of the following one-hundred 50-digit numbers. * 37107287533902102798797998220837590246510135740250 * 46376937677490009712648124896970078050417018260538 * 74324986199524741059474233309513058123726617309629 * 91942213363574161572522430563301811072406154908250 * 23067588207539346171171980310421047513778063246676 * 89261670696623633820136378418383684178734361726757 * 28112879812849979408065481931592621691275889832738 * 44274228917432520321923589422876796487670272189318 * 47451445736001306439091167216856844588711603153276 * 70386486105843025439939619828917593665686757934951 * 62176457141856560629502157223196586755079324193331 * 64906352462741904929101432445813822663347944758178 * 92575867718337217661963751590579239728245598838407 * 58203565325359399008402633568948830189458628227828 * 80181199384826282014278194139940567587151170094390 * 35398664372827112653829987240784473053190104293586 * 86515506006295864861532075273371959191420517255829 * 71693888707715466499115593487603532921714970056938 * 54370070576826684624621495650076471787294438377604 * 53282654108756828443191190634694037855217779295145 * 36123272525000296071075082563815656710885258350721 * 45876576172410976447339110607218265236877223636045 * 17423706905851860660448207621209813287860733969412 * 81142660418086830619328460811191061556940512689692 * 51934325451728388641918047049293215058642563049483 * 62467221648435076201727918039944693004732956340691 * 15732444386908125794514089057706229429197107928209 * 55037687525678773091862540744969844508330393682126 * 18336384825330154686196124348767681297534375946515 * 80386287592878490201521685554828717201219257766954 * 78182833757993103614740356856449095527097864797581 * 16726320100436897842553539920931837441497806860984 * 48403098129077791799088218795327364475675590848030 * 87086987551392711854517078544161852424320693150332 * 59959406895756536782107074926966537676326235447210 * 69793950679652694742597709739166693763042633987085 * 41052684708299085211399427365734116182760315001271 * 65378607361501080857009149939512557028198746004375 * 35829035317434717326932123578154982629742552737307 * 94953759765105305946966067683156574377167401875275 * 88902802571733229619176668713819931811048770190271 * 25267680276078003013678680992525463401061632866526 * 36270218540497705585629946580636237993140746255962 * 24074486908231174977792365466257246923322810917141 * 91430288197103288597806669760892938638285025333403 * 34413065578016127815921815005561868836468420090470 * 23053081172816430487623791969842487255036638784583 * 11487696932154902810424020138335124462181441773470 * 63783299490636259666498587618221225225512486764533 * 67720186971698544312419572409913959008952310058822 * 95548255300263520781532296796249481641953868218774 * 76085327132285723110424803456124867697064507995236 * 37774242535411291684276865538926205024910326572967 * 23701913275725675285653248258265463092207058596522 * 29798860272258331913126375147341994889534765745501 * 18495701454879288984856827726077713721403798879715 * 38298203783031473527721580348144513491373226651381 * 34829543829199918180278916522431027392251122869539 * 40957953066405232632538044100059654939159879593635 * 29746152185502371307642255121183693803580388584903 * 41698116222072977186158236678424689157993532961922 * 62467957194401269043877107275048102390895523597457 * 23189706772547915061505504953922979530901129967519 * 86188088225875314529584099251203829009407770775672 * 11306739708304724483816533873502340845647058077308 * 82959174767140363198008187129011875491310547126581 * 97623331044818386269515456334926366572897563400500 * 42846280183517070527831839425882145521227251250327 * 55121603546981200581762165212827652751691296897789 * 32238195734329339946437501907836945765883352399886 * 75506164965184775180738168837861091527357929701337 * 62177842752192623401942399639168044983993173312731 * 32924185707147349566916674687634660915035914677504 * 99518671430235219628894890102423325116913619626622 * 73267460800591547471830798392868535206946944540724 * 76841822524674417161514036427982273348055556214818 * 97142617910342598647204516893989422179826088076852 * 87783646182799346313767754307809363333018982642090 * 10848802521674670883215120185883543223812876952786 * 71329612474782464538636993009049310363619763878039 * 62184073572399794223406235393808339651327408011116 * 66627891981488087797941876876144230030984490851411 * 60661826293682836764744779239180335110989069790714 * 85786944089552990653640447425576083659976645795096 * 66024396409905389607120198219976047599490197230297 * 64913982680032973156037120041377903785566085089252 * 16730939319872750275468906903707539413042652315011 * 94809377245048795150954100921645863754710598436791 * 78639167021187492431995700641917969777599028300699 * 15368713711936614952811305876380278410754449733078 * 40789923115535562561142322423255033685442488917353 * 44889911501440648020369068063960672322193204149535 * 41503128880339536053299340368006977710650566631954 * 81234880673210146739058568557934581403627822703280 * 82616570773948327592232845941706525094512325230608 * 22918802058777319719839450180888072429661980811197 * 77158542502016545090413245809786882778948721859617 * 72107838435069186155435662884062257473692284509516 * 20849603980134001723930671666823555245252804609722 * 53503534226472524250874054075591789781264330331690 */ public class ProjectEuler13 extends AbstractExecutableProblem { private final String FILE_PATH = "src/sobolan/projecteuler/problems/13.txt"; private final int NR_DIGITS = 10; @Override public String getResult() { BigInteger sum = BigInteger.ZERO; try (BufferedReader buff = new BufferedReader(new FileReader(this.FILE_PATH))) { String line; while ((line = buff.readLine()) != null) { sum = sum.add(new BigInteger(line)); } } catch (IOException e) { System.err.println("There was a problem while reading and parsing the file"); System.exit(-10); } String result = sum.toString().substring(0, this.NR_DIGITS); return result; } }
package sqlancer.postgres.gen; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import sqlancer.Randomly; import sqlancer.common.DBMSCommon; import sqlancer.common.query.ExpectedErrors; import sqlancer.common.query.SQLQueryAdapter; import sqlancer.postgres.PostgresGlobalState; import sqlancer.postgres.PostgresSchema; import sqlancer.postgres.PostgresSchema.PostgresColumn; import sqlancer.postgres.PostgresSchema.PostgresDataType; import sqlancer.postgres.PostgresSchema.PostgresTable; import sqlancer.postgres.PostgresVisitor; import sqlancer.postgres.ast.PostgresExpression; public class PostgresTableGenerator { private final String tableName; private boolean columnCanHavePrimaryKey; private boolean columnHasPrimaryKey; private final StringBuilder sb = new StringBuilder(); private boolean isTemporaryTable; private final PostgresSchema newSchema; private final List<PostgresColumn> columnsToBeAdded = new ArrayList<>(); protected final ExpectedErrors errors = new ExpectedErrors(); private final PostgresTable table; private final boolean generateOnlyKnown; private final PostgresGlobalState globalState; public PostgresTableGenerator(String tableName, PostgresSchema newSchema, boolean generateOnlyKnown, PostgresGlobalState globalState) { this.tableName = tableName; this.newSchema = newSchema; this.generateOnlyKnown = generateOnlyKnown; this.globalState = globalState; table = new PostgresTable(tableName, columnsToBeAdded, null, null, null, false, false); errors.add("invalid input syntax for"); errors.add("is not unique"); errors.add("integer out of range"); errors.add("division by zero"); errors.add("cannot create partitioned table as inheritance child"); errors.add("cannot cast"); errors.add("ERROR: functions in index expression must be marked IMMUTABLE"); errors.add("functions in partition key expression must be marked IMMUTABLE"); errors.add("functions in index predicate must be marked IMMUTABLE"); errors.add("has no default operator class for access method"); errors.add("does not exist for access method"); errors.add("does not accept data type"); errors.add("but default expression is of type text"); errors.add("has pseudo-type unknown"); errors.add("no collation was derived for partition key column"); errors.add("inherits from generated column but specifies identity"); errors.add("inherits from generated column but specifies default"); PostgresCommon.addCommonExpressionErrors(errors); PostgresCommon.addCommonTableErrors(errors); } public static SQLQueryAdapter generate(String tableName, PostgresSchema newSchema, boolean generateOnlyKnown, PostgresGlobalState globalState) { return new PostgresTableGenerator(tableName, newSchema, generateOnlyKnown, globalState).generate(); } private SQLQueryAdapter generate() { columnCanHavePrimaryKey = true; sb.append("CREATE"); if (Randomly.getBoolean()) { sb.append(" "); isTemporaryTable = true; sb.append(Randomly.fromOptions("TEMPORARY", "TEMP")); } else if (Randomly.getBoolean()) { sb.append(" UNLOGGED"); } sb.append(" TABLE"); if (Randomly.getBoolean()) { sb.append(" IF NOT EXISTS"); } sb.append(" "); sb.append(tableName); if (Randomly.getBoolean() && !newSchema.getDatabaseTables().isEmpty()) { createLike(); } else { createStandard(); } return new SQLQueryAdapter(sb.toString(), errors, true); } private void createStandard() throws AssertionError { sb.append("("); for (int i = 0; i < Randomly.smallNumber() + 1; i++) { if (i != 0) { sb.append(", "); } String name = DBMSCommon.createColumnName(i); createColumn(name); } if (Randomly.getBoolean()) { errors.add("constraints on temporary tables may reference only temporary tables"); errors.add("constraints on unlogged tables may reference only permanent or unlogged tables"); errors.add("constraints on permanent tables may reference only permanent tables"); errors.add("cannot be implemented"); errors.add("there is no unique constraint matching given keys for referenced table"); errors.add("cannot reference partitioned table"); errors.add("unsupported ON COMMIT and foreign key combination"); errors.add("ERROR: invalid ON DELETE action for foreign key constraint containing generated column"); errors.add("exclusion constraints are not supported on partitioned tables"); PostgresCommon.addTableConstraints(columnHasPrimaryKey, sb, table, globalState, errors); } sb.append(")"); generateInherits(); generatePartitionBy(); PostgresCommon.generateWith(sb, globalState, errors); if (Randomly.getBoolean() && isTemporaryTable) { sb.append(" ON COMMIT "); sb.append(Randomly.fromOptions("PRESERVE ROWS", "DELETE ROWS", "DROP")); sb.append(" "); } } private void createLike() { sb.append("("); sb.append("LIKE "); sb.append(newSchema.getRandomTable().getName()); if (Randomly.getBoolean()) { for (int i = 0; i < Randomly.smallNumber(); i++) { String option = Randomly.fromOptions("DEFAULTS", "CONSTRAINTS", "INDEXES", "STORAGE", "COMMENTS", "GENERATED", "IDENTITY", "STATISTICS", "STORAGE", "ALL"); sb.append(" "); sb.append(Randomly.fromOptions("INCLUDING", "EXCLUDING")); sb.append(" "); sb.append(option); } } sb.append(")"); } private void createColumn(String name) throws AssertionError { sb.append(name); sb.append(" "); PostgresDataType type = PostgresDataType.getRandomType(); boolean serial = PostgresCommon.appendDataType(type, sb, true, generateOnlyKnown, globalState.getCollates()); PostgresColumn c = new PostgresColumn(name, type); c.setTable(table); columnsToBeAdded.add(c); sb.append(" "); if (Randomly.getBoolean()) { createColumnConstraint(type, serial); } } private void generatePartitionBy() { if (Randomly.getBoolean()) { return; } sb.append(" PARTITION BY "); // TODO "RANGE", String partitionOption = Randomly.fromOptions("RANGE", "LIST", "HASH"); sb.append(partitionOption); sb.append("("); errors.add("unrecognized parameter"); errors.add("cannot use constant expression"); errors.add("cannot add NO INHERIT constraint to partitioned table"); errors.add("unrecognized parameter"); errors.add("unsupported PRIMARY KEY constraint with partition key definition"); errors.add("which is part of the partition key."); errors.add("unsupported UNIQUE constraint with partition key definition"); errors.add("does not accept data type"); int n = partitionOption.contentEquals("LIST") ? 1 : Randomly.smallNumber() + 1; PostgresCommon.addCommonExpressionErrors(errors); for (int i = 0; i < n; i++) { if (i != 0) { sb.append(", "); } sb.append("("); PostgresExpression expr = PostgresExpressionGenerator.generateExpression(globalState, columnsToBeAdded); sb.append(PostgresVisitor.asString(expr)); sb.append(")"); if (Randomly.getBoolean()) { sb.append(globalState.getRandomOpclass()); errors.add("does not exist for access method"); } } sb.append(")"); } private void generateInherits() { if (Randomly.getBoolean() && !newSchema.getDatabaseTables().isEmpty()) { sb.append(" INHERITS("); sb.append(newSchema.getDatabaseTablesRandomSubsetNotEmpty().stream().map(t -> t.getName()) .collect(Collectors.joining(", "))); sb.append(")"); errors.add("has a type conflict"); errors.add("has a generation conflict"); errors.add("cannot create partitioned table as inheritance child"); errors.add("cannot inherit from temporary relation"); errors.add("cannot inherit from partitioned table"); errors.add("has a collation conflict"); errors.add("inherits conflicting default values"); } } private enum ColumnConstraint { NULL_OR_NOT_NULL, UNIQUE, PRIMARY_KEY, DEFAULT, CHECK, GENERATED }; private void createColumnConstraint(PostgresDataType type, boolean serial) { List<ColumnConstraint> constraintSubset = Randomly.nonEmptySubset(ColumnConstraint.values()); if (Randomly.getBoolean()) { // make checks constraints less likely constraintSubset.remove(ColumnConstraint.CHECK); } if (!columnCanHavePrimaryKey || columnHasPrimaryKey) { constraintSubset.remove(ColumnConstraint.PRIMARY_KEY); } if (constraintSubset.contains(ColumnConstraint.GENERATED) && constraintSubset.contains(ColumnConstraint.DEFAULT)) { // otherwise: ERROR: both default and identity specified for column constraintSubset.remove(Randomly.fromOptions(ColumnConstraint.GENERATED, ColumnConstraint.DEFAULT)); } if (constraintSubset.contains(ColumnConstraint.GENERATED) && type != PostgresDataType.INT) { // otherwise: ERROR: identity column type must be smallint, integer, or bigint constraintSubset.remove(ColumnConstraint.GENERATED); } if (serial) { constraintSubset.remove(ColumnConstraint.GENERATED); constraintSubset.remove(ColumnConstraint.DEFAULT); constraintSubset.remove(ColumnConstraint.NULL_OR_NOT_NULL); } for (ColumnConstraint c : constraintSubset) { sb.append(" "); switch (c) { case NULL_OR_NOT_NULL: sb.append(Randomly.fromOptions("NOT NULL", "NULL")); break; case UNIQUE: sb.append("UNIQUE"); break; case PRIMARY_KEY: sb.append("PRIMARY KEY"); columnHasPrimaryKey = true; break; case DEFAULT: sb.append("DEFAULT"); sb.append(" ("); sb.append(PostgresVisitor.asString(PostgresExpressionGenerator.generateExpression(globalState, type))); sb.append(")"); // CREATE TEMPORARY TABLE t1(c0 smallint DEFAULT ('566963878')); errors.add("out of range"); errors.add("is a generated column"); break; case CHECK: sb.append("CHECK ("); sb.append(PostgresVisitor.asString(PostgresExpressionGenerator.generateExpression(globalState, columnsToBeAdded, PostgresDataType.BOOLEAN))); sb.append(")"); if (Randomly.getBoolean()) { sb.append(" NO INHERIT"); } errors.add("out of range"); break; case GENERATED: sb.append("GENERATED "); if (Randomly.getBoolean()) { sb.append(" ALWAYS AS ("); sb.append(PostgresVisitor.asString( PostgresExpressionGenerator.generateExpression(globalState, columnsToBeAdded, type))); sb.append(") STORED"); errors.add("A generated column cannot reference another generated column."); errors.add("cannot use generated column in partition key"); errors.add("generation expression is not immutable"); errors.add("cannot use column reference in DEFAULT expression"); } else { sb.append(Randomly.fromOptions("ALWAYS", "BY DEFAULT")); sb.append(" AS IDENTITY"); } break; default: throw new AssertionError(sb); } } } }
package com.accenture.multibank; import com.accenture.multibank.accounts.AccountType; import com.accenture.multibank.bank.Bank; import com.accenture.multibank.bank.RaiffeisenBank; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; /** * @author manuel * @version 11/29/16 */ public class Main { public static void main(String[] args) { ApplicationContext context = new ClassPathXmlApplicationContext("beans.xml", "daos.xml"); Bank bank = (Bank) context.getBean(RaiffeisenBank.class); Integer from = bank.createAccount(AccountType.SAVING, 1000), to = bank.createAccount(AccountType.SAVING, 2000); bank.transfer(from, to, 1001); } }
package team.unstudio.udpc.core.nms.common; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import team.unstudio.udpc.api.nms.ReflectionUtils; public class NMSPacket implements team.unstudio.udpc.api.nms.NMSPacket{ @Override public Object createPacketPlayOutChat(String message, byte type) throws Exception { Constructor<?> c = ReflectionUtils.getNMSClass("PacketPlayOutChat").getDeclaredConstructor(ReflectionUtils.getNMSClass("IChatBaseComponent"),byte.class); c.setAccessible(true); Method a = ReflectionUtils.getNMSClass("IChatBaseComponent$ChatSerializer").getDeclaredMethod("a", String.class); a.setAccessible(true); return c.newInstance(a.invoke(null, message),type); } @Override public Object createPacketPlayOutChat(String message) throws Exception { return createPacketPlayOutChat(message, (byte) 1); } }
package ch.bind.philib.cache; import ch.bind.philib.TestUtil; import ch.bind.philib.lang.Cloner; import ch.bind.philib.lang.NamedSeqThreadFactory; import ch.bind.philib.lang.ThreadUtil; import org.testng.annotations.Test; import java.util.concurrent.CountDownLatch; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNotSame; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; /** * tests which must pass on all cache implementations. * * @author philipp meinen */ @Test public abstract class CacheTestBase { private static final int UP_DOWN_CAP_COEFF = 5; private static final Cloner<Integer> INTEGER_CLONER = new Cloner<Integer>() { @Override public Integer clone(Integer value) { assertNotNull(value); // creating an entirely new object is the entirely intended to test the value cloning of caches. //noinspection UnnecessaryBoxing,BoxingBoxedValue return new Integer(value); } }; public static String itos(int i) { return Integer.toString(i); } abstract <K, V> Cache<K, V> create(); abstract <K, V> Cache<K, V> create(int capacity); abstract <K, V> Cache<K, V> create(Cloner<V> valueCloner); abstract int getMinCapacity(); abstract int getBucketSize(); abstract int getDefaultCapacity(); @Test public void defaultCapacityOrMore() { Cache<Integer, Integer> cache; final int def = getDefaultCapacity(); cache = this.create(); assertEquals(cache.capacity(), def); cache = this.create(def * 4); assertEquals(cache.capacity(), def * 4); } @Test public void minCapacity() { this.<Integer, Integer>create(getMinCapacity()); } @Test(expectedExceptions = {IllegalArgumentException.class}) public void notLessThanMinCapacity() { this.<Integer, Integer>create(getMinCapacity() - getBucketSize()); } @Test(expectedExceptions = {IllegalArgumentException.class}) public void notZeroCapacity() { this.<Integer, Integer>create(0); } @Test(expectedExceptions = IllegalArgumentException.class) public void getNullKey() { Cache<String, String> cache = this.create(); cache.get(null); } @Test(expectedExceptions = IllegalArgumentException.class) public void setNullKey() { Cache<String, String> cache = this.create(); cache.set(null, "abc"); } @Test(expectedExceptions = IllegalArgumentException.class) public void setNullValue() { Cache<String, String> cache = this.create(); cache.set("abc", null); } @Test(expectedExceptions = IllegalArgumentException.class) public void removeNullKey() { Cache<String, String> cache = this.create(); cache.remove(null); } @Test public void get() { Cache<String, String> cache = this.create(); assertNull(cache.get("1")); cache.set("1", "one"); assertEquals(cache.get("1"), "one"); cache.remove("2"); assertEquals(cache.get("1"), "one"); cache.remove("1"); assertNull(cache.get("1")); } // private static final int UP_DOWN_CAP_COEFF = 150; @Test public void overwrite() { Cache<String, String> cache = this.create(); cache.set("1", "version 1"); cache.set("1", "version 2"); assertEquals(cache.get("1"), "version 2"); cache.remove("1"); assertNull(cache.get("1")); } @Test public void cloner() { Cache<Integer, Integer> cache = this.create(INTEGER_CLONER); Integer one = 1; cache.set(one, one); Integer copy = cache.get(one); assertNotNull(copy); assertEquals(one.intValue(), copy.intValue()); // different reference assertNotSame(one, copy); } @Test public void up() { Cache<Integer, Integer> cache = this.create(); int hit = 0, miss = 0; final int N = cache.capacity() * UP_DOWN_CAP_COEFF; Integer[] is = new Integer[N]; for (int i = 0; i < N; i++) { is[i] = i; } for (int i = 0; i < N; i++) { cache.set(is[i], is[i]); for (int j = 0; j <= i; j++) { Integer v; if ((v = cache.get(is[j])) != null) { assertEquals(v, is[j]); hit++; } else { miss++; } } } // System.out.println("N: " + N + " hit: " + hit + " miss: " + miss); } @Test public void down() { Cache<Integer, Integer> cache = this.create(); int hit = 0, miss = 0; final int N = cache.capacity() * UP_DOWN_CAP_COEFF; Integer[] is = new Integer[N]; for (int i = 0; i < N; i++) { is[i] = i; } for (int i = 0; i < N; i++) { cache.set(is[i], is[i]); for (int j = i; j >= 0; j Integer v; if ((v = cache.get(is[j])) != null) { assertEquals(v, is[j]); hit++; } else { miss++; } } } // System.out.println("N: " + N + " hit: " + hit + " miss: " + miss); } @Test public void clear() { Cache<Integer, Integer> cache = this.create(); for (int i = 0; i < 256; i++) { cache.set(i, i); } int retained = 0; for (int i = 0; i < 256; i++) { if (cache.get(i) != null) { retained++; } } assertTrue(retained > cache.capacity() / 2); cache.clear(); for (int i = 0; i < 256; i++) { assertNull(cache.get(i)); } } @Test public void stressTest() throws InterruptedException { if (!TestUtil.RUN_STRESS_TESTS) { return; } int concurrency = Runtime.getRuntime().availableProcessors() * 64; int values = concurrency * 2; int minutes = 2; System.out.printf("stress testing %s for %d minutes with concurrency %d over %d values\n", getClass().getSimpleName(), minutes, concurrency, values); StressTester[] sts = new StressTester[concurrency]; Cache<Integer, Integer> cache = create(concurrency); for (int i = 0; i < concurrency; i++) { sts[i] = new StressTester(cache, values); } Thread[] ts = ThreadUtil.createThreads(sts, new NamedSeqThreadFactory("Cache Stress-Test")); ThreadUtil.startThreads(ts); Thread.sleep(minutes * 60 * 1000); ThreadUtil.interruptAndJoinThreads(ts); long iterations = 0; for (StressTester st : sts) { iterations += st.iterations; assertTrue(st.ok); } System.out.printf("finished with %d iterations\n", iterations); } private class StressTester implements Runnable { private final Cache<Integer, Integer> cache; private final Integer[] keys; final CountDownLatch finished = new CountDownLatch(1); boolean ok; long iterations; public StressTester(Cache<Integer, Integer> cache, int n) { this.cache = cache; this.keys = new Integer[n]; for (int i = 0; i < n; i++) { keys[i] = i; } } @Override public void run() { Thread t = Thread.currentThread(); long iterations = 0; try { while (!t.isInterrupted()) { for (Integer key : keys) { if (cache.get(key) == null) { cache.set(key, key); } } iterations++; } ok = true; } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); } finally { this.iterations = iterations; finished.countDown(); } } } }
package com.gh.mygreen.xlsmapper; import java.awt.Point; import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Calendar; import java.util.Date; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import org.apache.poi.ss.util.CellReference; import com.gh.mygreen.xlsmapper.validation.CellFieldError; import com.gh.mygreen.xlsmapper.validation.SheetBindingErrors; /** * * <p>static * @version 1.0 * @since 0.5 * @author T.TSUCHIE * */ public class TestUtils { /** * * @param errors * @param address * @return null */ public static CellFieldError cellFieldError(final SheetBindingErrors errors, final String address) { for(CellFieldError error : errors.getCellFieldErrors()) { if(error.getFormattedCellAddress().equalsIgnoreCase(address)) { return error; } } return null; } /** * * @param cellAddress * @return */ public static String cellAddress(final Point cellAddress) { return POIUtils.formatCellAddress(cellAddress); } /** * * @param pattern * @param date * @return */ public static String format(final String pattern, final Date date) { SimpleDateFormat formatter = new SimpleDateFormat(pattern); return formatter.format(date); } /** * (yyyy-MM-dd HH:mm:ss.SSS) * @param value * @return */ public static Timestamp toTimestamp(String value) { return Timestamp.valueOf(value); } /** * Timestamp{@link java.util.Date} * @param timestamp * @return */ public static Date toUtilDate(Timestamp timestamp) { return new Date(timestamp.getTime()); } /** * Timestamp{@link Calendar} * @since 1.0 * @param timestamp * @return */ public static Calendar toCalendar(Timestamp timestamp) { Calendar cal = Calendar.getInstance(); cal.setTime(timestamp); return cal; } /** * Timestamp{@link java.sql.Date} * @param timestamp * @return */ public static java.sql.Date toSqlDate(Timestamp timestamp) { return new java.sql.Date(timestamp.getTime()); } /** * Timestamp{@link java.sql.Time} * @param timestamp * @return */ public static java.sql.Time toSqlTime(Timestamp timestamp) { return new java.sql.Time(timestamp.getTime()); } /** * * null * @param str1 * @param str2 * @return */ public static boolean equalsStr(final String str1, final String str2) { if(str1 == null && str2 == null) { return true; } else if(str1 == null || str2 == null) { return false; } else { return str1.equals(str2); } } /** * * @param original * @return */ public static String toUnicode(char c) { return toUnicode(String.valueOf(c)); } /** * * @param original * @return */ public static String toUnicode(String original) { if(original == null) { return ""; } StringBuilder sb = new StringBuilder(); for (int i = 0; i < original.length(); i++) { sb.append(String.format("\\u%04X", Character.codePointAt(original, i))); } String unicode = sb.toString(); return unicode; } /** * * @param items * @return */ public static <T> List<T> toList(T... items) { return Arrays.asList(items); } /** * * @param items * @return */ public static <T> T[] toArray(T... items) { return items; } /** * * @param items * @return */ public static <T> Set<T> toSet(T... items) { return new LinkedHashSet<T>(toList(items)); } /** * * null * @param value */ public static String trim(String value) { if(value == null) { return ""; } else { return value.trim(); } } /** * Point * @param address * @return */ public static Point toPointAddress(final String address) { ArgUtils.notEmpty(address, "address"); CellReference ref = new CellReference(address); return new Point(ref.getCol(), ref.getRow()); } /** * days * @param base * @param days */ public static Date getDateByDay(Date base, int days) { Calendar cal = Calendar.getInstance(); cal.setTime(base); cal.add(Calendar.DAY_OF_MONTH, days); return cal.getTime(); } }
package com.alibaba.ttl; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.TimerTask; import java.util.concurrent.atomic.AtomicReference; import static com.alibaba.ttl.TransmittableThreadLocal.Transmitter.*; @Deprecated public final class TtlTimerTask extends TimerTask { private final AtomicReference<Object> capturedRef; private final TimerTask timerTask; private final boolean releaseTtlValueReferenceAfterRun; private TtlTimerTask(@Nonnull TimerTask timerTask, boolean releaseTtlValueReferenceAfterRun) { this.capturedRef = new AtomicReference<Object>(capture()); this.timerTask = timerTask; this.releaseTtlValueReferenceAfterRun = releaseTtlValueReferenceAfterRun; } /** * wrap method {@link TimerTask#run()}. */ @Override public void run() { Object captured = capturedRef.get(); if (captured == null || releaseTtlValueReferenceAfterRun && !capturedRef.compareAndSet(captured, null)) { throw new IllegalStateException("TTL value reference is released after run!"); } Object backup = replay(captured); try { timerTask.run(); } finally { restore(backup); } } @Override public boolean cancel() { timerTask.cancel(); return super.cancel(); } @Nonnull public TimerTask getTimerTask() { return timerTask; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TtlTimerTask that = (TtlTimerTask) o; return timerTask != null ? timerTask.equals(that.timerTask) : that.timerTask == null; } @Override public int hashCode() { return timerTask != null ? timerTask.hashCode() : 0; } /** * Factory method, wrapper input {@link Runnable} to {@link TtlTimerTask}. * <p> * This method is idempotent. * * @param timerTask input {@link TimerTask} * @return Wrapped {@link TimerTask} */ @Nullable public static TtlTimerTask get(@Nullable TimerTask timerTask) { return get(timerTask, false, false); } /** * Factory method, wrapper input {@link Runnable} to {@link TtlTimerTask}. * <p> * This method is idempotent. * * @param timerTask input {@link TimerTask} * @param releaseTtlValueReferenceAfterRun release TTL value reference after run, avoid memory leak even if {@link TtlRunnable} is referred. * @return Wrapped {@link TimerTask} */ @Nullable public static TtlTimerTask get(@Nullable TimerTask timerTask, boolean releaseTtlValueReferenceAfterRun) { return get(timerTask, releaseTtlValueReferenceAfterRun, false); } /** * Factory method, wrapper input {@link Runnable} to {@link TtlTimerTask}. * <p> * This method is idempotent. * * @param timerTask input {@link TimerTask} * @param releaseTtlValueReferenceAfterRun release TTL value reference after run, avoid memory leak even if {@link TtlRunnable} is referred. * @param idempotent is idempotent or not. {@code true} will cover up bugs! <b>DO NOT</b> set, only when you know why. * @return Wrapped {@link TimerTask} */ @Nullable public static TtlTimerTask get(@Nullable TimerTask timerTask, boolean releaseTtlValueReferenceAfterRun, boolean idempotent) { if (null == timerTask) { return null; } if (timerTask instanceof TtlTimerTask) { if (idempotent) { // avoid redundant decoration, and ensure idempotency return (TtlTimerTask) timerTask; } else { throw new IllegalStateException("Already TtlTimerTask!"); } } return new TtlTimerTask(timerTask, releaseTtlValueReferenceAfterRun); } }
package com.uwetrottmann.trakt.v2; import org.apache.oltu.oauth2.client.request.OAuthClientRequest; import org.apache.oltu.oauth2.client.response.OAuthAccessTokenResponse; import org.apache.oltu.oauth2.common.exception.OAuthProblemException; import org.apache.oltu.oauth2.common.exception.OAuthSystemException; import org.junit.Test; import java.net.URISyntaxException; import static org.fest.assertions.api.Assertions.assertThat; public class AuthTest extends BaseTestCase { public static final String TEST_CLIENT_ID = "e683ed71dd4a4afe73ba73151a4645f511b8703464a7807045088c733ef8d634"; public static final String TEST_CLIENT_SECRET = "21da158feb52479c53936a48b13e4abe94b907908387d47b70710deb2f4a51fa"; private static final String AUTH_CODE = ""; public static final String TEST_REDIRECT_URI = "urn:ietf:wg:oauth:2.0:oob"; @Test public void test_getAccessTokenRequest() throws OAuthSystemException { OAuthClientRequest request = TraktV2.getAccessTokenRequest(TEST_CLIENT_ID, TEST_CLIENT_SECRET, TEST_REDIRECT_URI, AUTH_CODE); assertThat(request).isNotNull(); assertThat(request.getLocationUri()).startsWith(TraktV2.OAUTH2_TOKEN_URL); System.out.println("Generated Token Request URI: " + request.getLocationUri()); } @Test public void test_getAccessToken() throws OAuthProblemException, OAuthSystemException { if (AUTH_CODE.length() == 0) { throw new IllegalArgumentException( "Make sure you set a temporary auth code to exchange for an access token"); } OAuthAccessTokenResponse response = TraktV2.getAccessToken( TEST_CLIENT_ID, TEST_CLIENT_SECRET, TEST_REDIRECT_URI, AUTH_CODE); System.out.println("Retrieved access token: " + response.getAccessToken()); } @Test public void test_getAuthorizationRequest() throws OAuthSystemException, URISyntaxException { OAuthClientRequest request = TraktV2.getAuthorizationRequest(TEST_CLIENT_ID, TEST_REDIRECT_URI); assertThat(request).isNotNull(); assertThat(request.getLocationUri()).startsWith(TraktV2.OAUTH2_AUTHORIZATION_URL); // trakt does not support scopes, so don't send one (server sets default scope) assertThat(request.getLocationUri()).doesNotContain("scope"); System.out.println("Generated Auth Request URI: " + request.getLocationUri()); } }
package com.blamejared.clumps; import com.blamejared.clumps.entities.EntityXPOrbBig; import net.minecraft.entity.*; import net.minecraft.entity.item.ExperienceOrbEntity; import net.minecraft.world.World; import net.minecraft.world.server.ServerWorld; import net.minecraftforge.api.distmarker.Dist; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.RegistryEvent.Register; import net.minecraftforge.event.TickEvent; import net.minecraftforge.event.entity.EntityJoinWorldEvent; import net.minecraftforge.fml.DistExecutor; import net.minecraftforge.fml.common.Mod; import net.minecraftforge.fml.javafmlmod.FMLJavaModLoadingContext; import java.util.*; @Mod(Clumps.MODID) public class Clumps { public static final String MODID = "clumps"; public static final EntityType<EntityXPOrbBig> BIG_ORB_ENTITY_TYPE = EntityType.Builder.<EntityXPOrbBig> create(EntityClassification.MISC).size(0.5f, 0.5f).setCustomClientFactory((pkt, world) -> new EntityXPOrbBig(world)).build(Clumps.MODID + ":xp_orb_big"); public Clumps() { DistExecutor.runWhenOn(Dist.CLIENT, () -> () -> FMLJavaModLoadingContext.get().getModEventBus().addListener(ClumpsClient::setupClient)); FMLJavaModLoadingContext.get().getModEventBus().addGenericListener(EntityType.class, this::registerEntity); MinecraftForge.EVENT_BUS.addListener(this::joinWorld); MinecraftForge.EVENT_BUS.addListener(this::update); } private void registerEntity(Register<EntityType<?>> register) { register.getRegistry().register(BIG_ORB_ENTITY_TYPE.setRegistryName(Clumps.MODID, "xp_orb_big")); } private static final List<ExperienceOrbEntity> orbs = new ArrayList<>(); private void update(TickEvent.WorldTickEvent e) { if(e.world.isRemote || e.phase == TickEvent.Phase.START) { return; } if(e.world instanceof ServerWorld) { ServerWorld world = (ServerWorld) e.world; ArrayList<ExperienceOrbEntity> list = new ArrayList<>(orbs); for(ExperienceOrbEntity entity : list) { EntityXPOrbBig bigOrb = new EntityXPOrbBig(world, entity.getPosX(), entity.getPosY(), entity.getPosZ(), entity.xpValue); bigOrb.setMotion(entity.getMotion()); world.addEntity(bigOrb); entity.remove(); } orbs.removeAll(list); } } private void joinWorld(EntityJoinWorldEvent e) { if(e.getEntity() instanceof ExperienceOrbEntity && !(e.getEntity() instanceof EntityXPOrbBig)) { World world = e.getEntity().world; if(!world.isRemote) { orbs.add((ExperienceOrbEntity) e.getEntity()); } } } }
package com.xiaogua.better.str; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.text.StringCharacterIterator; import java.util.Arrays; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import org.apache.commons.lang3.StringEscapeUtils; import org.junit.Assert; import org.junit.Test; public class TestStrCode { @Test public void testForEachMap() { Map<String, String[]> map = new HashMap<String, String[]>(); map.put("key_1", new String[] { "value_1" }); for (String str : map.keySet()) { String[] values = map.get(str); for (int i = 0, len = values.length; i < len; i++) { values[i] = values[i] + "_update"; } } Assert.assertEquals("value_1_update", map.get("key_1")[0]); map.put("key_1", new String[] { "value_1" }); for (Entry<String, String[]> entry : map.entrySet()) { String[] values = entry.getValue(); for (String str : values) { str = str + "_update"; } } Assert.assertEquals("value_1", map.get("key_1")[0]); } @Test public void testStrErrorCode() { String str = "1"; if (str.equals(1)) { System.out.println("never print out"); } } @Test public void testConvertNumToExcelStr() { String rtnStr = StrCode.convertNumToExcelStr(731); Assert.assertEquals("ABC", rtnStr); } @Test public void testConvertExcelStrToNumber() throws Exception { int intRtn = StrCode.convertExcelStrToNumber("ABC"); Assert.assertEquals(731, intRtn); } @Test public void testIsHasChinese() { String str = ""; Assert.assertTrue(StrCode.isHasChinese(str)); } @Test public void testIsJavaIdentifier() { String str = "java"; Assert.assertTrue(StrCode.isJavaIdentifier(str)); } @Test public void testNullStr() { Object a = null; String b = (String) a; String c = b + ""; Assert.assertEquals("null", c); } @Test public void testCleanWhitespace() { String str = " \u00a0\u00a0\u3000\u3000 "; Assert.assertEquals(0, StringCommonUtils.trimWhitespace(str).length()); String htmlSpaceStr = StringEscapeUtils.unescapeHtml4("&nbsp;"); Assert.assertFalse(htmlSpaceStr.equals(" ")); Assert.assertEquals(160, (int) StringEscapeUtils.unescapeHtml4("&nbsp;").charAt(0)); Assert.assertEquals(0, StrCode.trimBlankSpace(htmlSpaceStr).length()); Assert.assertEquals(1, htmlSpaceStr.trim().length()); } @Test public void testContainsIgnoreCase() { String str = ""; String[] strArr = new String[] { "Tes", "TEST" }; Assert.assertTrue(StringCommonUtils.containsIgnoreCase(strArr, str)); } @Test public void testStrIndex() { String str = "1,3,5,4,6"; int index = str.indexOf(3); Assert.assertEquals(index, -1); index = str.indexOf('3'); Assert.assertEquals(index, 2); str = ""; index = str.indexOf(""); Assert.assertEquals(index, 0); index = str.lastIndexOf(""); Assert.assertEquals(index, 5); } @Test public void testStrObject() { Object he = new Object(); String str = "hello"; str += he; Assert.assertTrue(str.indexOf("hello") >= 0 && str.toString().indexOf("Object") > 0); he = new Object(); str = "hello"; he = he + str; Assert.assertTrue(he.toString().indexOf("Object") > 0); } @Test public void testStrCodePointCount() { String str3 = "123\uD800\uDC00456"; System.out.println(str3); // Unicode int codePointCount = str3.codePointCount(0, str3.length()); System.out.println(str3.length() + ":codePointCount : " + codePointCount); } @Test public void testStrContain() { String str = "123"; boolean rtn = str.contains(""); Assert.assertTrue(rtn); rtn = str.contains(" "); Assert.assertFalse(rtn); } @Test public void testStrSplit() { /** * split 0 split * */ String str1 = "a,b,,,,"; Assert.assertEquals(2, str1.split(",").length); str1 = "a,b,,,,c"; Assert.assertEquals(6, str1.split(",").length); } @Test public void testStrUnicode() { System.out.println(" : \u2665 "); System.out.println(" :\u2666 "); System.out.println(" :\u2663 "); System.out.println(" : \u2660 "); String original1 = "a\u00ea\u00f1\u00fcc"; System.out.println(original1); } @Test public void testStrReplace() { String str = "iiiiiaaaammmxiaogua"; String regex = "(.)\\1+"; str = str.replaceAll(regex, "$1"); Assert.assertEquals("iamxiaogua", str); str = "a b c d e "; System.out.println(str.replace(" ", "") + "---=" + str.replace(" ", "").length()); System.out.println(str.replaceAll(" ", "") + "----=" + str.replaceAll(" ", "").length()); str = "**a**b***c***d*****e*"; String str_1 = str.replaceAll("(^\\*)|(\\*$)|\\*", "$1$2"); System.out.println(str_1); String str_2 = str.replaceAll("(?<!^)\\*+(?!$)", ""); System.out.println(str_2); Assert.assertEquals(str_1, str_2); } @Test public void testIsRotation() { String str1 = "abc"; String str2 = "cab"; boolean rtn = StrCode.isRotation(str1, str2); Assert.assertTrue(rtn); } @Test public void testHtmlSpacType() { System.out.println(",1=" + StringEscapeUtils.unescapeHtml4("&nbsp;&160 System.out.println(",1=" + StringEscapeUtils.unescapeHtml4("&ensp;&8194 System.out.println(",2=" + StringEscapeUtils.unescapeHtml4("&emsp;&8195 System.out.println(",1=" + StringEscapeUtils.unescapeHtml4("&thinsp;&8201 } @Test public void testEscapeHtml() { String html = "<html><head><title></title></head></html>"; System.out.println(StringEscapeUtils.escapeHtml3(html)); System.out.println(StringEscapeUtils.escapeHtml4(html)); System.out.println(org.springframework.web.util.HtmlUtils.htmlEscape(html)); } @Test public void testCharacter() { char c = 'c'; // convert char to String type String str = Character.toString(c); Assert.assertEquals("c", str); // convert char primitive to Character type Character c2 = Character.valueOf(c); Assert.assertNotNull(c2); } @Test public void testStrByteBufferConvert() throws Exception { String str = "hello"; ByteBuffer buffer = StrCode.convertStrToByteBuffer(str); String rtnStr = new String(buffer.array(), buffer.position(), buffer.limit()); System.out.println(rtnStr); System.out.println(StrCode.convertByteBufferToStr(buffer)); System.out.println(String.format("position=%s,limit=%s,capacity=%s", buffer.position(), buffer.limit(), buffer.capacity())); System.out.println(StrCode.convertByteBufferToStr(buffer)); System.out.println(String.format("position=%s,limit=%s,capacity=%s", buffer.position(), buffer.limit(), buffer.capacity())); Charset charset = Charset.forName("UTF-8"); System.out.println(StrCode.convertByteBufferToStr(buffer, charset)); System.out.println(String.format("position=%s,limit=%s,capacity=%s", buffer.position(), buffer.limit(), buffer.capacity())); System.out.println(StrCode.convertByteBufferToStr(buffer, charset)); System.out.println(String.format("position=%s,limit=%s,capacity=%s", buffer.position(), buffer.limit(), buffer.capacity())); } @Test public void testByteArrByteBufferConvert() throws Exception { String str = "hello"; ByteBuffer buffer = StrCode.convertStrToByteBuffer(str); System.out.println(String.format("position=%s,limit=%s,capacity=%s", buffer.position(), buffer.limit(), buffer.capacity())); byte[] byteArr = StrCode.convertByteBufferToByteArr(buffer); System.out.println(byteArr.length); ByteBuffer buffer2 = StrCode.convertByteArrToByteBuffer(byteArr); System.out.println(String.format("position=%s,limit=%s,capacity=%s", buffer2.position(), buffer2.limit(), buffer2.capacity())); } @Test public void testStringCharacterIterator() { String str = "\"\""; StringBuffer sb = new StringBuffer(); StringCharacterIterator iterator = new StringCharacterIterator(str); char myChar = iterator.current(); while (myChar != StringCharacterIterator.DONE) { if (myChar == '\"') { sb.append("\\\""); } else if (myChar == '\n') { sb.append("\\n"); } else if (myChar == '\r') { sb.append("\\r"); } else if (myChar == '\\') { sb.append("\\\\"); } else { sb.append(myChar); } myChar = iterator.next(); } System.out.println(str + "," + sb.toString()); } @Test public void testStrHashCode() { String str = "192.168.1.0:1111"; // hashcode System.out.println(str.hashCode()); String str_1 = "vFrKiaNHfF7t[9::E[XsX?L7xPp3DZSteIZvdRT8CX:w6d;v<_KZnhsM_^dqoppe"; String str_2 = "hI4pFxGOfS@suhVUd:mTo_begImJPB@Fl[6WJ?ai=RXfIx^=Aix@9M;;?Vdj_Zsi"; // hashcode Assert.assertEquals(str_1.hashCode(), str_2.hashCode()); } @Test public void testStrToUpperCase() { String str = "i"; String a = str.toUpperCase(); String b = str.toUpperCase(new Locale("tr")); String c = str.toUpperCase(Locale.US); System.out.println(a); System.out.println(b); System.out.println(c); Assert.assertFalse(a.equals(b)); final String word1 = "Straße"; final String word2 = word1.toUpperCase(); System.out.println(String.format("\"%s\" - length %d", word1, word1.length())); System.out.println(String.format("\"%s\" - length %d", word2, word2.length())); } @Test public void testStrAdd() { System.out.println("String + String: " + ("A" + "K")); System.out.println("String + Char: " + ("A" + 'K')); System.out.println("Char + Char: " + ('A' + 'K'));// 140 A = 65 and B = } @Test public void testStringBuffer() { StringBuffer sb = new StringBuffer(10); sb.setLength(10); sb.setCharAt(9, 'a'); System.out.println(sb.toString()); } @Test public void testStringBufferEqual() { String s1 = "abc"; // String equalsinstance of StringBuffer s2 = new StringBuffer(s1); Assert.assertFalse(s1.equals(s2)); } @Test public void testStringIntern() { String s1 = "abc"; String s2 = new String("abc"); s2 = s2.intern(); Assert.assertTrue(s1 == s2); } @Test(expected = NullPointerException.class) public void testStringValueNull() { // String.valueOf(char[]) String nullStr = String.valueOf(null); System.out.println("never print:" + nullStr); } @Test public void testStringValueNull_2() { String nullStr = null; // String valueOf(Object obj) String nullStr_2 = String.valueOf(nullStr); Assert.assertNotNull(nullStr_2); Assert.assertEquals("null", nullStr_2); } @Test public void testStringValueNull_3() { // String valueOf(Object obj) String nullStr = String.valueOf((Object) null); Assert.assertNotNull(nullStr); Assert.assertEquals("null", nullStr); } @Test public void testStrArr() { printStr("a", "b"); } @Test public void testStrGetBytes() { final Charset UTF8 = StandardCharsets.UTF_8; final Charset UTF16 = StandardCharsets.UTF_16BE; final String string1 = "Str१२३"; System.out.println("Original string: \"" + string1 + "\""); final byte[] utf8bytes = string1.getBytes(UTF8); final byte[] utf16bytes = string1.getBytes(UTF16); System.out.println("String encoded as UTF-8: " + Arrays.toString(utf8bytes)); System.out.println("String encoded as UTF-16: " + Arrays.toString(utf16bytes)); } @Test public void testStringBufferAppendCodePoint() { final int cp = 0x5B57; // unicode System.out.println(String.format("character: %s - code point: %d - %s in %s; character count: %d", new StringBuffer().appendCodePoint(cp), cp, Character.getName(cp), Character.UnicodeBlock.of(cp), Character.charCount(cp))); } @Test public void testStrCodePoint() { String str = "1234ABcd"; int codePoint = str.codePointAt(0); System.out.println(codePoint); codePoint = str.codePointBefore(3); System.out.println(codePoint); codePoint = str.codePointCount(0, 2); System.out.println(codePoint); byte[] b1 = { (byte) 0xE5, (byte) 0x8D, (byte) 0x97 }; byte[] b2 = { (byte) 0xF0, (byte) 0xA0, (byte) 0x80, (byte) 0x95 }; try { String s1 = new String(b1, "UTF-8"); String s2 = new String(b2, "UTF-8"); System.out.println("" + s1 + ":" + s2); System.out.println("UTF8" + s1.getBytes("UTF-8").length + ":" + s2.getBytes("UTF-8").length); System.out.println("*.length()" + s1.length() + ":" + s2.length()); System.out.println( "CodePointCount" + s1.codePointCount(0, s1.length()) + ":" + s2.codePointCount(0, s2.length())); } catch (Exception e) { e.printStackTrace(); } } public void printStr(String... strs) { for (int i = 0, len = strs.length; i < len; i++) { System.out.println(strs[i]); } } }
package dk.bitcraft.lc; import org.junit.Rule; import org.junit.Test; import java.util.List; import java.util.logging.Logger; import static org.assertj.core.api.Assertions.assertThat; public class JavaUtilLoggingTest { @Rule public LogCollector collector = new LogCollector(Logger.getLogger("test.logger")); @Test public void test() { { Logger log = Logger.getLogger("test.logger"); log.warning("This is an warning!"); log.warning("This is another warning!"); } assertThat(collector.getLogs()).hasSize(2); List<String> logs = collector.getLogs(); assertThat(logs.get(0)).contains("This is an warning!"); assertThat(logs.get(1)).contains("This is another warning!"); } }
package com.commerzinfo.data; import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.commons.lang.builder.ToStringStyle; import java.util.Date; public class DataRow { private Date bookingDate; private String bookingText; private Date valueDate; private Double value; public Date getBookingDate() { return bookingDate; } public void setBookingDate(Date bookingDate) { this.bookingDate = bookingDate; } public String getBookingText() { return bookingText; } public void setBookingText(String bookingText) { this.bookingText = bookingText; } public Date getValueDate() { return valueDate; } public void setValueDate(Date valueDate) { this.valueDate = valueDate; } public Double getValue() { return value; } public void setValue(Double value) { this.value = value; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DataRow dataRow = (DataRow) o; if (bookingDate != null ? !bookingDate.equals(dataRow.bookingDate) : dataRow.bookingDate != null) return false; if (bookingText != null ? !bookingText.equals(dataRow.bookingText) : dataRow.bookingText != null) return false; if (value != null ? !value.equals(dataRow.value) : dataRow.value != null) return false; if (valueDate != null ? !valueDate.equals(dataRow.valueDate) : dataRow.valueDate != null) return false; return true; } @Override public int hashCode() { int result = bookingDate != null ? bookingDate.hashCode() : 0; result = 31 * result + (bookingText != null ? bookingText.hashCode() : 0); result = 31 * result + (valueDate != null ? valueDate.hashCode() : 0); result = 31 * result + (value != null ? value.hashCode() : 0); return result; } }
package com.filestack.model; import com.filestack.model.transform.base.ImageTransform; import com.filestack.util.FilestackException; import com.filestack.util.FilestackService; import com.filestack.util.Networking; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import okhttp3.MediaType; import okhttp3.RequestBody; import okhttp3.ResponseBody; import okio.BufferedSink; import okio.BufferedSource; import okio.Okio; import org.apache.tika.Tika; import retrofit2.Response; /** * References a file in Filestack. */ public class FileLink { private String apiKey; private String handle; private Security security; private FilestackService.Cdn cdnService; private FilestackService.Api apiService; /** * Constructs an instance without security. * * @param apiKey account key from the dev portal * @param handle id for a file, first path segment in dev portal urls */ public FileLink(String apiKey, String handle) { this(apiKey, handle, null); } public FileLink(String apiKey, String handle, Security security) { this.apiKey = apiKey; this.handle = handle; this.security = security; this.cdnService = Networking.getCdnService(); this.apiService = Networking.getApiService(); } /** * Directly returns the content of a file. * * @return raw {@link ResponseBody ResponseBody } containing the file content * @throws IOException for network failures, invalid handles, or invalid security */ public ResponseBody getContent() throws IOException { String policy = security != null ? security.getPolicy() : null; String signature = security != null ? security.getSignature() : null; return cdnService.get(this.handle, policy, signature).execute().body(); } /** * Saves the file to the specified directory using the name it was uploaded with. * * @param directory location to save the file in * @return {@link File File} object pointing to new file * @throws IOException for network failures, invalid handles, or invalid security */ public File download(String directory) throws IOException { return download(directory, null); } /** * Saves the file to the specified directory overriding the name it was uploaded with. * * @param directory location to save the file in * @param filename local name for the file * @return {@link File File} object pointing to new file * @throws IOException for network failures, invalid handles, or invalid security */ public File download(String directory, String filename) throws IOException { String policy = security != null ? security.getPolicy() : null; String signature = security != null ? security.getSignature() : null; Response<ResponseBody> response = cdnService.get(this.handle, policy, signature).execute(); if (filename == null) { filename = response.headers().get("x-file-name"); } File file = new File(directory + "/" + filename); file.createNewFile(); BufferedSource source = response.body().source(); BufferedSink sink = Okio.buffer(Okio.sink(file)); sink.writeAll(source); sink.close(); return file; } /** * Replace the content of an existing file handle. * Does not update the filename or MIME type. * * @param pathname path to the file, can be local or absolute * @throws IOException for network failures, invalid handles, or invalid security * @throws FileNotFoundException if the given pathname isn't a file or doesn't exist */ public void overwrite(String pathname) throws IOException { if (security == null) { throw new FilestackException("Overwrite requires security to be set"); } File file = new File(pathname); if (!file.isFile()) { throw new FileNotFoundException(pathname); } Tika tika = new Tika(); String mimeType = tika.detect(file); RequestBody body = RequestBody.create(MediaType.parse(mimeType), file); apiService.overwrite(handle, security.getPolicy(), security.getSignature(), body).execute(); } /** * Deletes a file handle. * Requires security to be set. * * @throws IOException for network failures, invalid handles, or invalid security */ public void delete() throws IOException { if (security == null) { throw new FilestackException("Delete requires security to be set"); } apiService.delete(handle, apiKey, security.getPolicy(), security.getSignature()).execute(); } /** * Creates an image transformation object for this file. * A transformation call isn't made directly by this method. * * @return {@link ImageTransform ImageTransform} instance configured for this file */ public ImageTransform imageTransform() { return new ImageTransform(this); } public String getHandle() { return handle; } public Security getSecurity() { return security; } }
package jdk.java.util.collections; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.stream.Collectors; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.Predicate; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Joiner; /** * * * @since 2017-07-27 * @author fixalot */ public class ListTest { @SuppressWarnings("unused") private static final Logger logger = LoggerFactory.getLogger(ListTest.class); @SuppressWarnings("unused") private class ListTestModel { private String name; public String getName() { return name; } public void setName(String name) { this.name = name; } public ListTestModel(String name) { this.name = name; } } @Test public void testAdd() { List<Object> list = new ArrayList<>(); list.add(null); } @Test public void wrongWayClone() { ArrayList<ListTestModel> origins = new ArrayList<>(); origins.add(new ListTestModel("123")); origins.add(new ListTestModel("456")); origins.add(new ListTestModel("789")); ArrayList<ListTestModel> newbies = new ArrayList<>(origins); Assert.assertEquals(origins.get(0), newbies.get(0)); Assert.assertTrue(origins.get(0) == newbies.get(0)); } @Test public void cloneManual() { ArrayList<ListTestModel> origins = new ArrayList<>(); origins.add(new ListTestModel("123")); origins.add(new ListTestModel("456")); origins.add(new ListTestModel("789")); ArrayList<ListTestModel> newbies = new ArrayList<>(); for (ListTestModel ele : origins) { newbies.add(new ListTestModel(ele.getName())); } Assert.assertNotEquals(origins.get(0), newbies.get(0)); Assert.assertTrue(origins.get(0) != newbies.get(0)); } @Test public void getSize() { ArrayList<String> stringList = new ArrayList<>(); Assert.assertEquals(0, stringList.size()); stringList = new ArrayList<>(10); // capacity . size . Assert.assertEquals(0, stringList.size()); // capacity size . } /** * #1 * * @author fixalot */ @Test public void removeElement1() { ArrayList<String> list = new ArrayList<String>(Arrays.asList("a", "b", "c", "d")); Iterator<String> iter = list.iterator(); while (iter.hasNext()) { String s = iter.next(); // remove() . if (s.equals("a")) { iter.remove(); } } } /** * #2 * * @author fixalot */ @Test public void removeElement2() { ArrayList<String> list = new ArrayList<String>(Arrays.asList("a", "b", "c", "d")); for (int i = 0; i < list.size(); i++) { final String str = list.get(i); if (str.equals("a")) { list.remove(i); i } } Assert.assertEquals(Arrays.asList("b", "c", "d"), list); Assert.assertEquals(3, list.size()); } /** * #3 * * @author fixalot */ @Test public void removeElement3() { String[] strs = { "a", "b", "c", "d", "e" }; List<String> list = Arrays.stream(strs).collect(Collectors.toList()); // for (int cnt = 0, i = 0; i < list.size(); i++) { // if (cnt < 3) { // list.remove(0); // cnt++; // } // for { int cnt = 0; while (true) { if (cnt == 3) { break; } list.remove(0); cnt++; } } Assert.assertEquals("[d, e]", list.toString()); list = Arrays.stream(strs).collect(Collectors.toList()); for (int cnt = 0, i = list.size(); i >= 0; i if (cnt < 3) { list.remove(list.size() - 1); i++; cnt++; } } Assert.assertEquals("[a, b]", list.toString()); } @Test public void removeElementWithIterator() { ArrayList<String> list = new ArrayList<String>(Arrays.asList("a", "b", "c", "d")); Iterator<String> iter = list.iterator(); while (iter.hasNext()) { String s = iter.next(); if (s.equals("a")) { iter.remove(); } } Assert.assertEquals(Arrays.asList("b", "c", "d"), list); Assert.assertEquals(3, list.size()); } // @Test // public void removeElementWithStream() { // ArrayList<String> list = new ArrayList<String>(Arrays.asList("a", "b", "c", "d")); // list.stream().set /** * 1: for */ @Test public void search() { Integer[] values = { 1, 3, 7 }; List<Integer> list = new ArrayList<Integer>(Arrays.asList(values)); final int targetValue = 3; int targetIndex = 99; for (int i = 0; i < list.size(); i++) { if (list.get(i) == targetValue) { targetIndex = i; break; } } Assert.assertEquals(1, targetIndex); } private ArrayList<HashMap<String, Object>> getSomeList() { ArrayList<HashMap<String, Object>> list = new ArrayList<>(); HashMap<String, Object> map = new HashMap<>(); map.put("key", "a"); map.put("value", "123"); list.add(map); map = new HashMap<>(); map.put("key", "b"); map.put("value", "456"); list.add(map); map = new HashMap<>(); map.put("key", "c"); map.put("value", "789"); list.add(map); map = new HashMap<>(); map.put("key", "d"); map.put("value", "012"); list.add(map); return list; } /** * 2: apache commons {@link CollectionUtils} */ @Test public void searchWithApacheCommons() { ArrayList<HashMap<String, Object>> list = getSomeList(); // filter predicate Predicate condition = new Predicate() { @Override public boolean evaluate(Object arg) { @SuppressWarnings("unchecked") HashMap<String, Object> map = (HashMap<String, Object>) arg; String key = (String) map.get("key"); return "a".equals(key) || "c".equals(key); } }; @SuppressWarnings("unchecked") ArrayList<HashMap<String, Object>> searchResult = (ArrayList<HashMap<String, Object>>) CollectionUtils.select(list, condition); Assert.assertEquals(4, list.size()); Assert.assertEquals(2, searchResult.size()); // filter Assert.assertEquals("123", searchResult.get(0).get("value")); Assert.assertEquals("789", searchResult.get(1).get("value")); } /** * 3: java8 StreamAPI */ @Test public void searchWithStream() { List<HashMap<String, Object>> list = getSomeList(); List<HashMap<String, Object>> searchResult = list.stream().filter(ele -> "b".equals(ele.get("key")) || "d".equals(ele.get("key"))).collect(Collectors.toList()); Assert.assertEquals(4, list.size()); Assert.assertEquals(2, searchResult.size()); // filter Assert.assertEquals("456", searchResult.get(0).get("value")); Assert.assertEquals("012", searchResult.get(1).get("value")); } @Test public void testToArray() { List<Integer> list = new ArrayList<Integer>(); list.add(123); list.add(234); list.add(345); Assert.assertArrayEquals(new Integer[] { 123, 234, 345 }, list.toArray(new Integer[list.size()])); } @Test public void fromArray() { Integer[] values = { 1, 3, 7 }; List<Integer> list = new ArrayList<Integer>(Arrays.asList(values)); Assert.assertEquals("[1, 3, 7]", list.toString()); } @Test public void fromArrayByStream() { String[] strs = { "a", "b", "c", "d", "e" }; List<String> stringList = Arrays.stream(strs).collect(Collectors.toList()); Assert.assertEquals("[a, b, c, d, e]", stringList.toString()); int[] spam = new int[] { 1, 2, 3 }; List<Integer> integerList = Arrays.stream(spam).boxed().collect(Collectors.toList()); Assert.assertEquals("[1, 2, 3]", integerList.toString()); } @Test public void testClear() { ArrayList<Integer> list = new ArrayList<>(); Assert.assertNotNull(list); list.clear(); Assert.assertNotNull(list); } @Test public void arrayList() { ArrayList<Integer> list = new ArrayList<>(); list.add(9); list.add(8); list.add(7); list.add(6); list.add(5); list.add(4); list.add(3); list.add(2); list.add(1); list.remove(1); Assert.assertEquals("[9, 7, 6, 5, 4, 3, 2, 1]", list.toString()); } /** * sublist . substring . * * @author fixalot */ @Test public void testSublist() { List<Integer> numbers = Arrays.asList(5, 3, 1, 2, 9, 5, 0, 7); List<Integer> head = numbers.subList(0, 4); // 5, 3, 1, 2 Assert.assertEquals(4, head.size()); Assert.assertEquals(Arrays.asList(5, 3, 1, 2), head); List<Integer> tail = numbers.subList(4, numbers.size()); // 9, 5, 0, 7 Assert.assertEquals(4, tail.size()); Assert.assertEquals(Arrays.asList(9, 5, 0, 7), tail); } @Test public void getString() { List<String> texts = Arrays.asList("a", "b", "c"); Assert.assertEquals("a, b, c", String.join(", ", texts)); List<Integer> numbers = Arrays.asList(1, 2, 3); Joiner joiner = Joiner.on(", "); Assert.assertEquals("1, 2, 3", joiner.join(numbers)); } }
package org.takes.facets.auth; import com.google.common.collect.ImmutableMap; import java.io.IOException; import nl.jqno.equalsverifier.EqualsVerifier; import nl.jqno.equalsverifier.Warning; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.takes.Response; import org.takes.rq.RqFake; import org.takes.rs.RsWithBody; import org.takes.rs.RsWithStatus; import org.takes.rs.RsWithType; /** * Test case for {@link PsByFlag}. * * @author Yegor Bugayenko (yegor@teamed.io) * @version $Id$ * @since 0.10 */ public final class PsByFlagTest { /** * Testable PsByFlag object. */ private transient Pass pass; /** * Test set up. * * @throws Exception If some problem inside */ @Before public void setUp() throws Exception { this.pass = new PsByFlag( new PsByFlag.Pair( "some-key", new PsFake(true) ) ); } /** * PsByFlag can skip if nothing found. * * @throws IOException If some problem inside */ @Test public void skipsIfNothingFound() throws IOException { MatcherAssert.assertThat( this.pass.enter( new RqFake("GET", "/?PsByFlag=x") ).hasNext(), Matchers.is(false) ); } /** * PsByFlag finds flag and authenticates user. * * @throws IOException If some problem inside */ @Test public void flagIsFoundUserAuthenticated() throws IOException { MatcherAssert.assertThat( this.pass.enter( new RqFake("POST", "/?PsByFlag=some-key") ).next().urn(), Matchers.is("urn:test:1") ); } /** * PsByFlag wraps response with authenticated user. * * @throws IOException If some problem inside */ @Test public void exitTest() throws IOException { final Response response = new RsWithStatus( new RsWithType( new RsWithBody("<html>This is test response</html>"), "text/html" ), 200 ); MatcherAssert.assertThat( new PsByFlag( ImmutableMap.of( "key", (Pass) new PsFake(true) ) ).exit(response, Mockito.mock(Identity.class)), Matchers.is(response) ); } /** * Checks PsByFlag equals method. * * @throws Exception If some problem inside */ @Test public void equalsAndHashCodeEqualTest() throws Exception { EqualsVerifier.forClass(PsByFlag.class) .suppress(Warning.TRANSIENT_FIELDS) .verify(); } }
package com.hankcs.hanlp.seg; import com.hankcs.hanlp.HanLP; import com.hankcs.hanlp.collection.AhoCorasick.AhoCorasickDoubleArrayTrie; import com.hankcs.hanlp.collection.trie.DoubleArrayTrie; import com.hankcs.hanlp.collection.trie.bintrie.BaseNode; import com.hankcs.hanlp.corpus.tag.Nature; import com.hankcs.hanlp.dictionary.CoreDictionary; import com.hankcs.hanlp.dictionary.CustomDictionary; import com.hankcs.hanlp.dictionary.other.CharTable; import com.hankcs.hanlp.dictionary.other.CharType; import com.hankcs.hanlp.seg.NShort.Path.AtomNode; import com.hankcs.hanlp.seg.common.Term; import com.hankcs.hanlp.seg.common.Vertex; import com.hankcs.hanlp.seg.common.WordNet; import com.hankcs.hanlp.utility.Predefine; import com.hankcs.hanlp.utility.SentencesUtil; import com.hankcs.hanlp.utility.TextUtility; import java.util.*; import static com.hankcs.hanlp.utility.Predefine.logger; /** * <br> * Abstract<br> * * * @author hankcs */ public abstract class Segment { protected Config config; public Segment() { config = new Config(); } /** * * * @param charArray * @param start start * @param end endend * @return startfrom */ protected static List<AtomNode> atomSegment(char[] charArray, int start, int end) { List<AtomNode> atomSegment = new ArrayList<AtomNode>(); int pCur = start, nCurType, nNextType; StringBuilder sb = new StringBuilder(); char c; int[] charTypeArray = new int[end - start]; for (int i = 0; i < charTypeArray.length; ++i) { c = charArray[i + start]; charTypeArray[i] = CharType.get(c); if (c == '.' && i + start < (charArray.length - 1) && CharType.get(charArray[i + start + 1]) == CharType.CT_NUM) charTypeArray[i] = CharType.CT_NUM; else if (c == '.' && i + start < (charArray.length - 1) && charArray[i + start + 1] >= '0' && charArray[i + start + 1] <= '9') charTypeArray[i] = CharType.CT_SINGLE; else if (charTypeArray[i] == CharType.CT_LETTER) charTypeArray[i] = CharType.CT_SINGLE; } while (pCur < end) { nCurType = charTypeArray[pCur - start]; if (nCurType == CharType.CT_CHINESE || nCurType == CharType.CT_INDEX || nCurType == CharType.CT_DELIMITER || nCurType == CharType.CT_OTHER) { String single = String.valueOf(charArray[pCur]); if (single.length() != 0) atomSegment.add(new AtomNode(single, nCurType)); pCur++; } else if (pCur < end - 1 && ((nCurType == CharType.CT_SINGLE) || nCurType == CharType.CT_NUM)) { sb.delete(0, sb.length()); sb.append(charArray[pCur]); boolean reachEnd = true; while (pCur < end - 1) { nNextType = charTypeArray[++pCur - start]; if (nNextType == nCurType) sb.append(charArray[pCur]); else { reachEnd = false; break; } } atomSegment.add(new AtomNode(sb.toString(), nCurType)); if (reachEnd) pCur++; } else { atomSegment.add(new AtomNode(charArray[pCur], nCurType)); pCur++; } } return atomSegment; } /** * * * @param charArray * @param start * @param end * @return */ protected static List<AtomNode> simpleAtomSegment(char[] charArray, int start, int end) { List<AtomNode> atomNodeList = new LinkedList<AtomNode>(); atomNodeList.add(new AtomNode(new String(charArray, start, end - start), CharType.CT_LETTER)); return atomNodeList; } /** * * * @param charArray * @param start * @param end * @return */ protected static List<AtomNode> quickAtomSegment(char[] charArray, int start, int end) { List<AtomNode> atomNodeList = new LinkedList<AtomNode>(); int offsetAtom = start; int preType = CharType.get(charArray[offsetAtom]); int curType; while (++offsetAtom < end) { curType = CharType.get(charArray[offsetAtom]); if (curType != preType) { if (preType == CharType.CT_NUM && ",.".indexOf(charArray[offsetAtom]) != -1) { if (offsetAtom+1 < end) { int nextType = CharType.get(charArray[offsetAtom+1]); if (nextType == CharType.CT_NUM) { continue; } } } atomNodeList.add(new AtomNode(new String(charArray, start, offsetAtom - start), preType)); start = offsetAtom; } preType = curType; } if (offsetAtom == end) atomNodeList.add(new AtomNode(new String(charArray, start, offsetAtom - start), preType)); return atomNodeList; } /** * * @param vertexList * @return */ protected static List<Vertex> combineByCustomDictionary(List<Vertex> vertexList) { assert vertexList.size() >= 2 : "vertexList Vertex[] wordNet = new Vertex[vertexList.size()]; vertexList.toArray(wordNet); // DAT DoubleArrayTrie<CoreDictionary.Attribute> dat = CustomDictionary.dat; int length = wordNet.length - 1; for (int i = 1; i < length; ++i) { int state = 1; state = dat.transition(wordNet[i].realWord, state); if (state > 0) { int to = i + 1; int end = to; CoreDictionary.Attribute value = dat.output(state); for (; to < length; ++to) { state = dat.transition(wordNet[to].realWord, state); if (state < 0) break; CoreDictionary.Attribute output = dat.output(state); if (output != null) { value = output; end = to + 1; } } if (value != null) { combineWords(wordNet, i, end, value); i = end - 1; } } } // BinTrie if (CustomDictionary.trie != null) { for (int i = 1; i < length; ++i) { if (wordNet[i] == null) continue; BaseNode<CoreDictionary.Attribute> state = CustomDictionary.trie.transition(wordNet[i].realWord.toCharArray(), 0); if (state != null) { int to = i + 1; int end = to; CoreDictionary.Attribute value = state.getValue(); for (; to < length; ++to) { if (wordNet[to] == null) continue; state = state.transition(wordNet[to].realWord.toCharArray(), 0); if (state == null) break; if (state.getValue() != null) { value = state.getValue(); end = to + 1; } } if (value != null) { combineWords(wordNet, i, end, value); i = end - 1; } } } } vertexList.clear(); for (Vertex vertex : wordNet) { if (vertex != null) vertexList.add(vertex); } return vertexList; } /** * * @param vertexList * @param wordNetAll * @return */ protected static List<Vertex> combineByCustomDictionary(List<Vertex> vertexList, final WordNet wordNetAll) { List<Vertex> outputList = combineByCustomDictionary(vertexList); int line = 0; for (final Vertex vertex : outputList) { final int parentLength = vertex.realWord.length(); final int currentLine = line; if (parentLength >= 3) { CustomDictionary.parseText(vertex.realWord, new AhoCorasickDoubleArrayTrie.IHit<CoreDictionary.Attribute>() { @Override public void hit(int begin, int end, CoreDictionary.Attribute value) { if (end - begin == parentLength) return; wordNetAll.add(currentLine + begin, new Vertex(vertex.realWord.substring(begin, end), value)); } }); } line += parentLength; } return outputList; } /** * * @param wordNet * @param start * @param end * @param value */ private static void combineWords(Vertex[] wordNet, int start, int end, CoreDictionary.Attribute value) { if (start + 1 == end) { wordNet[start].attribute = value; } else { StringBuilder sbTerm = new StringBuilder(); for (int j = start; j < end; ++j) { if (wordNet[j] == null) continue; String realWord = wordNet[j].realWord; sbTerm.append(realWord); wordNet[j] = null; } wordNet[start] = new Vertex(sbTerm.toString(), value); } } /** * * * @param vertexList * @param offsetEnabled offset * @return */ protected static List<Term> convert(List<Vertex> vertexList, boolean offsetEnabled) { assert vertexList != null; assert vertexList.size() >= 2 : "2" + vertexList.toString(); int length = vertexList.size() - 2; List<Term> resultList = new ArrayList<Term>(length); Iterator<Vertex> iterator = vertexList.iterator(); iterator.next(); if (offsetEnabled) { int offset = 0; for (int i = 0; i < length; ++i) { Vertex vertex = iterator.next(); Term term = convert(vertex); term.offset = offset; offset += term.length(); resultList.add(term); } } else { for (int i = 0; i < length; ++i) { Vertex vertex = iterator.next(); Term term = convert(vertex); resultList.add(term); } } return resultList; } /** * term * * @param vertex * @return */ static Term convert(Vertex vertex) { return new Term(vertex.realWord, vertex.guessNature()); } /** * * @param termList */ protected void mergeNumberQuantifier(List<Vertex> termList, WordNet wordNetAll, Config config) { if (termList.size() < 4) return; StringBuilder sbQuantifier = new StringBuilder(); ListIterator<Vertex> iterator = termList.listIterator(); iterator.next(); int line = 1; while (iterator.hasNext()) { Vertex pre = iterator.next(); if (pre.hasNature(Nature.m)) { sbQuantifier.append(pre.realWord); Vertex cur = null; while (iterator.hasNext() && (cur = iterator.next()).hasNature(Nature.m)) { sbQuantifier.append(cur.realWord); iterator.remove(); removeFromWordNet(cur, wordNetAll, line, sbQuantifier.length()); } if (cur != null) { if ((cur.hasNature(Nature.q) || cur.hasNature(Nature.qv) || cur.hasNature(Nature.qt))) { if (config.indexMode > 0) { wordNetAll.add(line, new Vertex(sbQuantifier.toString(), new CoreDictionary.Attribute(Nature.m))); } sbQuantifier.append(cur.realWord); iterator.remove(); removeFromWordNet(cur, wordNetAll, line, sbQuantifier.length()); } else { line += cur.realWord.length(); // (cur = iterator.next()).hasNature(Nature.m) nextq } } if (sbQuantifier.length() != pre.realWord.length()) { for (Vertex vertex : wordNetAll.get(line + pre.realWord.length())) { vertex.from = null; } pre.realWord = sbQuantifier.toString(); pre.word = Predefine.TAG_NUMBER; pre.attribute = new CoreDictionary.Attribute(Nature.mq); pre.wordID = CoreDictionary.M_WORD_ID; sbQuantifier.setLength(0); } } sbQuantifier.setLength(0); line += pre.realWord.length(); } // System.out.println(wordNetAll); } /** * * @param cur * @param wordNetAll * @param line * @param length */ private static void removeFromWordNet(Vertex cur, WordNet wordNetAll, int line, int length) { LinkedList<Vertex>[] vertexes = wordNetAll.getVertexes(); // wordNet for (Vertex vertex : vertexes[line + length]) { if (vertex.from == cur) vertex.from = null; } ListIterator<Vertex> iterator = vertexes[line + length - cur.realWord.length()].listIterator(); while (iterator.hasNext()) { Vertex vertex = iterator.next(); if (vertex == cur) iterator.remove(); } } /** * <br> * * * @param text * @return */ public List<Term> seg(String text) { char[] charArray = text.toCharArray(); if (HanLP.Config.Normalization) { CharTable.normalization(charArray); } if (config.threadNumber > 1 && charArray.length > 10000) { List<String> sentenceList = SentencesUtil.toSentenceList(charArray); String[] sentenceArray = new String[sentenceList.size()]; sentenceList.toArray(sentenceArray); //noinspection unchecked List<Term>[] termListArray = new List[sentenceArray.length]; final int per = sentenceArray.length / config.threadNumber; WorkThread[] threadArray = new WorkThread[config.threadNumber]; for (int i = 0; i < config.threadNumber - 1; ++i) { int from = i * per; threadArray[i] = new WorkThread(sentenceArray, termListArray, from, from + per); threadArray[i].start(); } threadArray[config.threadNumber - 1] = new WorkThread(sentenceArray, termListArray, (config.threadNumber - 1) * per, sentenceArray.length); threadArray[config.threadNumber - 1].start(); try { for (WorkThread thread : threadArray) { thread.join(); } } catch (InterruptedException e) { logger.severe("" + TextUtility.exceptionToString(e)); return Collections.emptyList(); } List<Term> termList = new LinkedList<Term>(); if (config.offset || config.indexMode > 0) // offset { int sentenceOffset = 0; for (int i = 0; i < sentenceArray.length; ++i) { for (Term term : termListArray[i]) { term.offset += sentenceOffset; termList.add(term); } sentenceOffset += sentenceArray[i].length(); } } else { for (List<Term> list : termListArray) { termList.addAll(list); } } return termList; } // if (text.length() > 10000) // // List<Term> termList = new LinkedList<Term>(); // if (config.offset || config.indexMode) // int sentenceOffset = 0; // for (String sentence : SentencesUtil.toSentenceList(charArray)) // List<Term> termOfSentence = segSentence(sentence.toCharArray()); // for (Term term : termOfSentence) // term.offset += sentenceOffset; // termList.add(term); // sentenceOffset += sentence.length(); // else // for (String sentence : SentencesUtil.toSentenceList(charArray)) // termList.addAll(segSentence(sentence.toCharArray())); // return termList; return segSentence(charArray); } /** * * * @param text * @return */ public List<Term> seg(char[] text) { assert text != null; if (HanLP.Config.Normalization) { CharTable.normalization(text); } return segSentence(text); } /** * * * @param text * @return */ public List<List<Term>> seg2sentence(String text) { List<List<Term>> resultList = new LinkedList<List<Term>>(); { for (String sentence : SentencesUtil.toSentenceList(text)) { resultList.add(segSentence(sentence.toCharArray())); } } return resultList; } /** * * * @param sentence * @return */ protected abstract List<Term> segSentence(char[] sentence); /** * * * @return */ public Segment enableIndexMode(boolean enable) { config.indexMode = enable ? 2 : 0; return this; } /** * 1 * * @param minimalLength 2 * @return */ public Segment enableIndexMode(int minimalLength) { if (minimalLength < 1) throw new IllegalArgumentException("1"); config.indexMode = minimalLength; return this; } /** * * * @param enable * @return */ public Segment enablePartOfSpeechTagging(boolean enable) { config.speechTagging = enable; return this; } /** * * * @param enable * @return */ public Segment enableNameRecognize(boolean enable) { config.nameRecognize = enable; config.updateNerConfig(); return this; } /** * * * @param enable * @return */ public Segment enablePlaceRecognize(boolean enable) { config.placeRecognize = enable; config.updateNerConfig(); return this; } /** * * * @param enable * @return */ public Segment enableOrganizationRecognize(boolean enable) { config.organizationRecognize = enable; config.updateNerConfig(); return this; } /** * * * @param enable */ public Segment enableCustomDictionary(boolean enable) { config.useCustomDictionary = enable; return this; } public Segment enableCustomDictionaryForcing(boolean enable) { if (enable) { enableCustomDictionary(true); } config.forceCustomDictionary = enable; return this; } /** * * * @param enable */ public Segment enableTranslatedNameRecognize(boolean enable) { config.translatedNameRecognize = enable; config.updateNerConfig(); return this; } /** * * * @param enable */ public Segment enableJapaneseNameRecognize(boolean enable) { config.japaneseNameRecognize = enable; config.updateNerConfig(); return this; } /** * Term.offset * * @param enable * @return */ public Segment enableOffset(boolean enable) { config.offset = enable; return this; } /** * <br> * [, , ] => [][, , ] => [] * @param enable * @return */ public Segment enableNumberQuantifierRecognize(boolean enable) { config.numberQuantifierRecognize = enable; return this; } /** * * * @param enable * @return */ public Segment enableAllNamedEntityRecognize(boolean enable) { config.nameRecognize = enable; config.japaneseNameRecognize = enable; config.translatedNameRecognize = enable; config.placeRecognize = enable; config.organizationRecognize = enable; config.updateNerConfig(); return this; } class WorkThread extends Thread { String[] sentenceArray; List<Term>[] termListArray; int from; int to; public WorkThread(String[] sentenceArray, List<Term>[] termListArray, int from, int to) { this.sentenceArray = sentenceArray; this.termListArray = termListArray; this.from = from; this.to = to; } @Override public void run() { for (int i = from; i < to; ++i) { termListArray[i] = segSentence(sentenceArray[i].toCharArray()); } } } /** * * @param enable true[CPU]false * @return */ public Segment enableMultithreading(boolean enable) { if (enable) config.threadNumber = Runtime.getRuntime().availableProcessors(); else config.threadNumber = 1; return this; } /** * * @param threadNumber * @return */ public Segment enableMultithreading(int threadNumber) { config.threadNumber = threadNumber; return this; } }
package org.voovan.test.db; import java.io.File; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import org.voovan.db.JdbcOperate; import org.voovan.tools.TEnv; import org.voovan.tools.TObject; import org.voovan.tools.TProperties; import org.voovan.tools.log.Logger; import com.alibaba.druid.pool.DruidDataSource; import com.alibaba.druid.pool.DruidDataSourceFactory; public class JdbcOperateDemo { public static void main(String[] args) throws Exception { DruidDataSource dataSource = null; try { String druidPath = TEnv.getSystemPath("Config" + File.separator + "datasource.properties"); Properties druidProperites = TProperties.getProperties(new File(druidPath)); dataSource = TObject.cast(DruidDataSourceFactory.createDataSource(druidProperites)); dataSource.init(); Logger.info("Database connection pool init finished"); } catch (Exception e) { Logger.error(e); } JdbcOperate jOperate = new JdbcOperate(dataSource); List<Map<String,Object>> smm = jOperate.queryMapList("select * from sc_script"); Logger.info(smm); //Map => List<Map> HashMap<String, Object> xMap = new HashMap<String, Object>(); xMap.put("packagePath", "org.hocate.test"); List<Map<String,Object>> mm = jOperate.queryMapList("select * from sc_script where PackagePath=::packagePath",xMap); Logger.info(mm); // => List<Object> ScriptEntity sEntity = new ScriptEntity(); sEntity.setPackagePath("org.hocate.test"); List<ScriptEntity> lmm = jOperate.queryObjectList("select * from sc_script where PackagePath=::packagePath",ScriptEntity.class,sEntity); Logger.info(lmm); // => Object ScriptEntity llmm = jOperate.queryObject("select * from sc_script where PackagePath=::1 and version=::2",ScriptEntity.class,"org.hocate.test",2.0); Logger.info(llmm); jOperate = new JdbcOperate(dataSource,true); Logger.info(jOperate.update("update sc_script set version=0")); Logger.info(jOperate.queryMapList("select * from sc_script")); jOperate.rollback(); //(Mysql) String llmm1 = jOperate.queryObject("call test",String.class); Logger.info("xxxxx"+llmm1); } }
package rsc.parallel; import java.util.concurrent.TimeUnit; import org.junit.Test; import rsc.publisher.Px; import rsc.scheduler.*; import rsc.test.TestSubscriber; public class ParallelPublisherTest { @Test public void sequentialMode() { Px<Integer> source = Px.range(1, 1_000_000); for (int i = 1; i < 33; i++) { Px<Integer> result = ParallelPublisher.fork(source, false, i) .map(v -> v + 1) .join() ; TestSubscriber<Integer> ts = new TestSubscriber<>(); result.subscribe(ts); ts .assertSubscribed() .assertValueCount(1_000_000) .assertComplete() .assertNoError() ; } } @Test public void parallelMode() { Px<Integer> source = Px.range(1, 1_000_000); int ncpu = Math.max(4, Runtime.getRuntime().availableProcessors()); for (int i = 1; i < ncpu + 1; i++) { Scheduler scheduler = new ParallelScheduler(i); try { Px<Integer> result = ParallelPublisher.fork(source, false, i) .runOn(scheduler) .map(v -> v + 1) .join() ; TestSubscriber<Integer> ts = new TestSubscriber<>(); result.subscribe(ts); ts.assertTerminated(10, TimeUnit.SECONDS); ts .assertSubscribed() .assertValueCount(1_000_000) .assertComplete() .assertNoError() ; } finally { scheduler.shutdown(); } } } }
package com.imap4j.hbase; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; public class FieldAttrib { private enum Type { BooleanType(Boolean.TYPE), ByteType(Byte.TYPE), CharType(Character.TYPE), ShortType(Short.TYPE), IntegerType(Integer.TYPE), LongType(Long.TYPE), FloatType(Float.TYPE), DoubleType(Double.TYPE), ObjectType(Object.class); private final Class clazz; private Type(final Class clazz) { this.clazz = clazz; } private Class getClazz() { return clazz; } private static Type getType(final Field field) throws PersistException { final Class fieldClass = field.getType(); final Class<?> clazz = fieldClass.isArray() ? fieldClass.getComponentType() : fieldClass; if (!clazz.isPrimitive()) { return ObjectType; } else { for (final Type type : values()) if (clazz == type.getClazz()) return type; } throw new PersistException("Not dealing with type: " + clazz); } } private final Field field; private final Type type; private final String family; private final String column; private final String lookup; private final boolean mapKeysAsColumns; private Method lookupMethod = null; public FieldAttrib(final Class enclosingClass, final Field field, final Column column) throws PersistException { this.field = field; this.type = Type.getType(this.field); this.family = column.family(); this.column = column.column().length() > 0 ? column.column() : this.getField().getName(); this.lookup = column.lookup(); this.mapKeysAsColumns = column.mapKeysAsColumns(); try { if (this.isLookupAttrib()) { this.lookupMethod = enclosingClass.getDeclaredMethod(this.lookup); // Check return type and args of lookup method final Class<?> retClazz = this.getLookupMethod().getReturnType(); if (!(retClazz.isArray() && retClazz.getComponentType() == Byte.TYPE)) throw new PersistException(enclosingClass.getName() + "." + this.lookup + "()" + " does not have a return type of byte[]"); } } catch (NoSuchMethodException e) { throw new PersistException("Missing method " + enclosingClass.getName() + "." + this.lookup + "()"); } } @Override public String toString() { return this.getField().getDeclaringClass() + "." + this.getField().getName(); } public Type getComponentType() { return type; } private Method getLookupMethod() { return lookupMethod; } public boolean isLookupAttrib() { return this.lookup.length() > 0; } public String getFamily() { return this.family; } public String getColumn() { return column; } public Field getField() { return field; } public boolean isMapKeysAsColumns() { return this.mapKeysAsColumns; } public byte[] getValue(Object obj) throws IOException, PersistException { if (this.isLookupAttrib()) return invokeLookupMethod(obj); else return this.asBytes(obj); } private byte[] invokeLookupMethod(Object obj) throws PersistException { try { return (byte[])this.getLookupMethod().invoke(obj); } catch (IllegalAccessException e) { throw new PersistException("Error getting value of " + this.getField().getName()); } catch (InvocationTargetException e) { throw new PersistException("Error getting value of " + this.getField().getName()); } } public byte[] asBytes(final Object obj) throws IOException, PersistException { final Class clazz = obj.getClass(); if (clazz.isArray()) return this.getArrayasBytes(obj); else return this.getScalarAsBytes(obj); } private byte[] getScalarAsBytes(final Object obj) throws IOException, PersistException { final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final ObjectOutputStream oos = new ObjectOutputStream(baos); switch (this.getComponentType()) { case BooleanType: oos.writeBoolean((Boolean)obj); break; case ByteType: oos.writeByte((Byte)obj); break; case CharType: oos.writeByte((Byte)obj); break; case ShortType: oos.writeShort((Short)obj); break; case IntegerType: oos.writeInt((Integer)obj); break; case LongType: oos.writeLong((Long)obj); break; case FloatType: oos.writeFloat((Float)obj); break; case DoubleType: oos.writeDouble((Double)obj); break; case ObjectType: oos.writeObject(obj); break; } oos.flush(); return baos.toByteArray(); } private byte[] getArrayasBytes(final Object obj) throws IOException, PersistException { final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final ObjectOutputStream oos = new ObjectOutputStream(baos); switch (this.getComponentType()) { case BooleanType: { final boolean[] val = (boolean[])obj; for (int i = 0; i < val.length; i++) oos.writeBoolean(val[i]); break; } case ByteType: { final byte[] val = (byte[])obj; for (int i = 0; i < val.length; i++) oos.write(val[i]); break; } case CharType: { final char[] val = (char[])obj; for (int i = 0; i < val.length; i++) oos.write(val[i]); break; } case ShortType: { final short[] val = (short[])obj; for (int i = 0; i < val.length; i++) oos.writeShort(val[i]); break; } case IntegerType: { final int[] val = (int[])obj; for (int i = 0; i < val.length; i++) oos.writeInt(val[i]); break; } case LongType: { final long[] val = (long[])obj; for (int i = 0; i < val.length; i++) oos.writeLong(val[i]); break; } case FloatType: { final float[] val = (float[])obj; for (int i = 0; i < val.length; i++) oos.writeFloat(val[i]); break; } case DoubleType: { final double[] val = (double[])obj; for (int i = 0; i < val.length; i++) oos.writeDouble(val[i]); break; } case ObjectType: { final Object[] val = (Object[])obj; for (int i = 0; i < val.length; i++) oos.writeObject(val[i]); break; } } oos.flush(); return baos.toByteArray(); } }
package org.apache.commons.lang; import java.math.BigDecimal; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; public final class NumberRangeTest extends TestCase { private NumberRange tenToTwenty; private NumberRange fifteenToTwentyFive; private NumberRange fiveToNine; private Number five; private Number nine; private Number ten; private Number fifteen; private Number twenty; private Number twentyFive; public NumberRangeTest(String name) { super(name); } public void setUp() { five = new Integer(5); nine = new Double(9.0); ten = new Integer(10); fifteen = new Integer(15); twenty = new Integer(20); twentyFive = new Integer(25); tenToTwenty = new NumberRange(ten, twenty); fifteenToTwentyFive = new NumberRange( fifteen, twentyFive); fiveToNine = new NumberRange( five, nine ); } public static Test suite() { TestSuite suite = new TestSuite(NumberRangeTest.class); suite.setName("NumberRange Tests"); return suite; } public void testMaxMin() { boolean expected = true; boolean result = tenToTwenty.getMaximum().equals(twenty); assertEquals(expected, result); expected = true; result = tenToTwenty.getMinimum().equals(ten); assertEquals(expected, result); } public void testEquals() { boolean expected = false; boolean result = tenToTwenty.equals(new NumberRange(five, ten)); assertEquals(expected, result); expected = true; result = tenToTwenty.equals(new NumberRange(ten, twenty)); assertEquals(expected, result); expected = false; result = tenToTwenty.equals(new NumberRange(ten, fifteen)); assertEquals(expected, result); expected = false; result = tenToTwenty.equals(new NumberRange(fifteen, twenty)); assertEquals(expected, result); } public void testEqualsWithOtherObject() { assertEquals( "A NumberRange should not equals a String object", false, fiveToNine.equals("TEST")); } public void testEqualsWithSameReference() { assertEquals( "A NumberRange should equal itself", true, fiveToNine.equals(fiveToNine)); } public void testEqualsNull() { assertEquals( "A NumberRange should not equal null", false, fiveToNine.equals(null)); } public void testHashCode() { NumberRange nr = new NumberRange( new Integer(5), new Double(9.0)); assertEquals( "The hashCode of 5-9 should equals the hashcode of another NumberRange of the same min/max", fiveToNine.hashCode(), nr.hashCode()); assertTrue( "The hashCode of 10-20 should not equal the hashCode of 5-9", fiveToNine.hashCode() != tenToTwenty.hashCode()); } public void testIncludesNumber() { boolean expected = false; boolean result = tenToTwenty.includesNumber(five); assertEquals(expected, result); expected = true; result = tenToTwenty.includesNumber(ten); assertEquals(expected, result); expected = true; result = tenToTwenty.includesNumber(fifteen); assertEquals(expected, result); expected = true; result = tenToTwenty.includesNumber(twenty); assertEquals(expected, result); expected = false; result = tenToTwenty.includesNumber(twentyFive); assertEquals(expected, result); } public void testIncludesNumberNull() { boolean result = tenToTwenty.includesNumber(null); assertEquals("Includes number should return false for null values", false, result); } public void testIncludesRange() { boolean expected = false; boolean result = tenToTwenty.includesRange(new NumberRange(five, ten)); assertEquals(expected, result); expected = false; result = tenToTwenty.includesRange(new NumberRange(five, fifteen)); assertEquals(expected, result); expected = true; result = tenToTwenty.includesRange(new NumberRange(ten, fifteen)); assertEquals(expected, result); expected = true; result = tenToTwenty.includesRange(new NumberRange(ten, twenty)); assertEquals(expected, result); expected = true; result = tenToTwenty.includesRange(new NumberRange(fifteen, twenty)); assertEquals(expected, result); expected = false; result = tenToTwenty.includesRange(new NumberRange(fifteen, twentyFive)); assertEquals(expected, result); expected = false; result = tenToTwenty.includesRange(new NumberRange(twenty, twentyFive)); assertEquals(expected, result); } public void testIncludesRangeNull() { boolean result = tenToTwenty.includesRange(null); assertEquals("Includes range should return false for null values", false, result); } public void testConstructor() { NumberRange nr = new NumberRange( new Double(2.0)); assertEquals("Unexpected min on NumberRange", 2.0, nr.getMinimum().doubleValue(), Double.MIN_VALUE); assertEquals("Unexpected max on NumberRange", 2.0, nr.getMaximum().doubleValue(), Double.MIN_VALUE); } public void testConstructorNullParameters() { try { NumberRange nr = new NumberRange(null); fail("NumberRange(null) did not throw an exception."); } catch (Exception e) { assertTrue( "NumberRange(null)", e instanceof NullPointerException); } try { NumberRange nr = new NumberRange(five, null); fail("NumberRange(five, null) did not throw an exception."); } catch (Exception e) { assertTrue("NumberRange(five, null)", e instanceof NullPointerException); } try { NumberRange nr = new NumberRange(null, five); fail("NumberRange(null, five) did not throw an exception."); } catch (Exception e) { assertTrue("NumberRange(null, five)", e instanceof NullPointerException); } } public void testConstructorWithMaxLessThanMin() { NumberRange nr = new NumberRange( new Double(2.0), new Double(1.0)); assertEquals("Unexpected min on NumberRange", 2.0, nr.getMinimum().doubleValue(), Double.MIN_VALUE); assertEquals("Unexpected max on NumberRange", 2.0, nr.getMaximum().doubleValue(), Double.MIN_VALUE); } public void testOverlap() { assertEquals( "5-9 should not overlap 10-20", false, fiveToNine.overlaps( tenToTwenty )); assertEquals( "10-20 should overlap 15-25", true, tenToTwenty.overlaps( fifteenToTwentyFive )); } public void testOverlapNull() { assertEquals( "5-9 should not overlap null", false, fiveToNine.overlaps( null )); } public void testToString() { String expected = "10-20"; String result = tenToTwenty.toString(); assertEquals(expected, result); } public void testToStringWithNegatives() { String expected = "(-20)-(-10)"; NumberRange nr = new NumberRange( new Integer(-20), new Integer(-10)); String result = nr.toString(); assertEquals(expected, result); expected = "(-20)-10"; nr = new NumberRange( new Integer(-20), new Integer(10)); result = nr.toString(); assertEquals(expected, result); } }
package com.jaamsim.input; import java.awt.FileDialog; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import javax.swing.JOptionPane; import com.jaamsim.ui.ExceptionBox; import com.jaamsim.ui.FrameBox; import com.sandwell.JavaSimulation.Entity; import com.sandwell.JavaSimulation.ErrorException; import com.sandwell.JavaSimulation.FileEntity; import com.sandwell.JavaSimulation.FileInput; import com.sandwell.JavaSimulation.Group; import com.sandwell.JavaSimulation.Input; import com.sandwell.JavaSimulation.Input.ParseContext; import com.sandwell.JavaSimulation.InputErrorException; import com.sandwell.JavaSimulation.ObjectType; import com.sandwell.JavaSimulation.Palette; import com.sandwell.JavaSimulation.Simulation; import com.sandwell.JavaSimulation.StringVector; import com.sandwell.JavaSimulation.Util; import com.sandwell.JavaSimulation3D.GUIFrame; public class InputAgent { private static final String addedRecordMarker = "\" *** Added Records ***"; private static int numErrors = 0; private static int numWarnings = 0; private static FileEntity logFile; private static double lastTimeForTrace; private static String configFileName; private static boolean batchRun; private static boolean sessionEdited; private static boolean addedRecordFound; private static boolean recordEdits; // ConfigurationFile load and save variables final protected static int SAVE_ONLY = 2; private static final String INP_ERR_DEFINEUSED = "The name: %s has already been used and is a %s"; private static String reportDirectory; static { addedRecordFound = false; sessionEdited = false; batchRun = false; configFileName = null; reportDirectory = ""; lastTimeForTrace = -1.0d; } public static void clear() { logFile = null; numErrors = 0; numWarnings = 0; addedRecordFound = false; sessionEdited = false; configFileName = null; reportDirectory = ""; lastTimeForTrace = -1.0d; } public static String getReportDirectory() { return reportDirectory; } public static void setReportDirectory(String dir) { reportDirectory = Util.getAbsoluteFilePath(dir); if (!reportDirectory.substring(reportDirectory.length() - 1).equals("\\")) reportDirectory = reportDirectory + "\\"; // Create the report directory if it does not already exist // This code should probably be added to FileEntity someday to // create necessary folders on demand. File f = new File(reportDirectory); f.mkdirs(); } public static void setConfigFileName(String name) { configFileName = name; } public static String getConfigFileName() { return configFileName; } /** * if ( fileFullName = "C:\Projects\A01.cfg" ), returns "A01.cfg" * if ( fileFullName = "A01.cfg" ), returns "A01.cfg" */ private static String shortName(String fileFullName) { int idx = Math.max(fileFullName.lastIndexOf('\\'), fileFullName.lastIndexOf('/')); // if idx is -1, we return the entire string return fileFullName.substring(idx + 1); } public static String getRunName() { String runName; if( InputAgent.getConfigFileName() == null ) { runName = ""; } else { String shortName = shortName(InputAgent.getConfigFileName()); int index = shortName.indexOf( "." ); if( index > -1 ) { runName = shortName.substring( 0, index ); } else { runName = shortName; } } return runName; } public static boolean hasAddedRecords() { return addedRecordFound; } public static boolean recordEdits() { return recordEdits; } public static void setRecordEdits(boolean b) { recordEdits = b; } public static boolean isSessionEdited() { return sessionEdited; } public static void setBatch(boolean batch) { batchRun = batch; } public static boolean getBatch() { return batchRun; } /** * returns true if the first and last tokens are matched braces **/ public static boolean enclosedByBraces(ArrayList<String> tokens) { if(tokens.size() < 2 || tokens.indexOf("{") < 0) // no braces return false; int level =1; int i = 1; for(String each: tokens.subList(1, tokens.size())) { if(each.equals("{")) { level++; } if(each.equals("}")) { level // Matching close brace found if(level == 0) break; } i++; } if(level == 0 && i == tokens.size()-1) { return true; } return false; } private static int getBraceDepth(ArrayList<String> tokens, int startingBraceDepth, int startingIndex) { int braceDepth = startingBraceDepth; for (int i = startingIndex; i < tokens.size(); i++) { String token = tokens.get(i); if (token.equals("{")) braceDepth++; if (token.equals("}")) braceDepth if (braceDepth < 0) { InputAgent.logBadInput(tokens, "Extra closing braces found"); tokens.clear(); } if (braceDepth > 2) { InputAgent.logBadInput(tokens, "Maximum brace depth (2) exceeded"); tokens.clear(); } } return braceDepth; } private static URI resRoot; private static URI resPath; private static final String res = "/resources/"; static { try { // locate the resource folder, and create resRoot = InputAgent.class.getResource(res).toURI(); } catch (URISyntaxException e) {} resPath = URI.create(resRoot.toString()); } private static void rethrowWrapped(Exception ex) { StringBuilder causedStack = new StringBuilder(); for (StackTraceElement elm : ex.getStackTrace()) causedStack.append(elm.toString()).append("\n"); throw new InputErrorException("Caught exception: %s", ex.getMessage() + "\n" + causedStack.toString()); } public static final void readResource(String res) { if (res == null) return; try { readStream(resRoot.toString(), resPath, res); GUIFrame.instance().setProgressText(null); } catch (URISyntaxException ex) { rethrowWrapped(ex); } } public static final boolean readStream(String root, URI path, String file) throws URISyntaxException { String shortName = file.substring(file.lastIndexOf('/') + 1, file.length()); GUIFrame.instance().setProgressText(shortName); URI resolved = getFileURI(path, file, root); String resolvedPath = resolved.getSchemeSpecificPart(); String currentDir = resolvedPath.substring(0, resolvedPath.lastIndexOf('/') + 1); String oldRoot = FileEntity.getRootDirectory(); FileEntity.setRootDirectory(currentDir); URL url = null; try { url = resolved.normalize().toURL(); } catch (MalformedURLException e) { rethrowWrapped(e); } if (url == null) { InputAgent.logWarning("Unable to resolve path %s%s - %s", root, path.toString(), file); return false; } BufferedReader buf = null; try { InputStream in = url.openStream(); buf = new BufferedReader(new InputStreamReader(in)); } catch (IOException e) { InputAgent.logWarning("Could not read from %s", url.toString()); return false; } try { ArrayList<String> record = new ArrayList<String>(); int braceDepth = 0; Input.ParseContext pc = new Input.ParseContext(); pc.jail = root; pc.context = path; while (true) { String line = buf.readLine(); // end of file, stop reading if (line == null) break; if ( line.trim().equalsIgnoreCase( addedRecordMarker ) ) { addedRecordFound = true; } int previousRecordSize = record.size(); Parser.tokenize(record, line, true); braceDepth = InputAgent.getBraceDepth(record, braceDepth, previousRecordSize); if( braceDepth != 0 ) continue; if (record.size() == 0) continue; InputAgent.echoInputRecord(record); if ("DEFINE".equalsIgnoreCase(record.get(0))) { InputAgent.processDefineRecord(record); record.clear(); continue; } if ("INCLUDE".equalsIgnoreCase(record.get(0))) { try { InputAgent.processIncludeRecord(pc.jail, resolved, record); } catch (URISyntaxException ex) { rethrowWrapped(ex); } record.clear(); continue; } // Otherwise assume it is a Keyword record InputAgent.processKeywordRecord(record, pc); record.clear(); } // Leftover Input at end of file if (record.size() > 0) InputAgent.logBadInput(record, "Leftover input at end of file"); buf.close(); } catch (IOException e) { // Make best effort to ensure it closes try { buf.close(); } catch (IOException e2) {} } FileEntity.setRootDirectory(oldRoot); return true; } private static void processIncludeRecord(String root, URI path, ArrayList<String> record) throws URISyntaxException { if (record.size() != 2) { InputAgent.logError("Bad Include record, should be: Include <File>"); return; } InputAgent.readStream(root, path, record.get(1).replaceAll("\\\\", "/")); } private static void processDefineRecord(ArrayList<String> record) { if (record.size() < 5 || !record.get(2).equals("{") || !record.get(record.size() - 1).equals("}")) { InputAgent.logError("Bad Define record, should be: Define <Type> { <names>... }"); return; } Class<? extends Entity> proto = null; try { if( record.get( 1 ).equalsIgnoreCase( "Palette" ) ) { proto = Palette.class; } else if( record.get( 1 ).equalsIgnoreCase( "ObjectType" ) ) { proto = ObjectType.class; } else { proto = Input.parseEntityType(record.get(1)); } } catch (InputErrorException e) { InputAgent.logError("%s", e.getMessage()); return; } // Loop over all the new Entity names for (int i = 3; i < record.size() - 1; i++) { InputAgent.defineEntity(proto, record.get(i), addedRecordFound); } } /** * Like defineEntity(), but will generate a unique name if a name collision exists * @param proto * @param key * @param addedEntity * @return */ public static <T extends Entity> T defineEntityWithUniqueName(Class<T> proto, String key, boolean addedEntity) { // Has the provided name been used already? if (Entity.getNamedEntity(key) == null) { return defineEntity(proto, key, addedEntity); } // Try the provided name plus "-1", "-2", etc. until an unused name is found int entityNum = 1; while(true) { String name = String.format("%s-%d", key, entityNum); if (Entity.getNamedEntity(name) == null) { return defineEntity(proto, name, addedEntity); } entityNum++; } } /** * if addedEntity is true then this is an entity defined * by user interaction or after added record flag is found; * otherwise, it is from an input file define statement * before the model is configured * @param proto * @param key * @param addedEntity */ public static <T extends Entity> T defineEntity(Class<T> proto, String key, boolean addedEntity) { Entity existingEnt = Input.tryParseEntity(key, Entity.class); if (existingEnt != null) { InputAgent.logError(INP_ERR_DEFINEUSED, key, existingEnt.getClass().getSimpleName()); return null; } T ent = null; try { ent = proto.newInstance(); if (addedEntity) { ent.setFlag(Entity.FLAG_ADDED); sessionEdited = true; } } catch (InstantiationException e) {} catch (IllegalAccessException e) {} finally { if (ent == null) { InputAgent.logError("Could not create new Entity: %s", key); return null; } } ent.setInputName(key); return ent; } public static void processKeywordRecord(ArrayList<String> record, Input.ParseContext context) { Entity ent = Input.tryParseEntity(record.get(0), Entity.class); if (ent == null) { InputAgent.logError("Could not find Entity: %s", record.get(0)); return; } // Validate the tokens have the Entity Keyword { Args... } Keyword { Args... } ArrayList<KeywordIndex> words = InputAgent.getKeywords(record, context); for (KeywordIndex keyword : words) { try { InputAgent.processKeyword(ent, keyword); } catch (Throwable e) { InputAgent.logInpError("Entity: %s, Keyword: %s - %s", ent.getInputName(), keyword.keyword, e.getMessage()); } } } public static class KeywordIndex { public final ArrayList<String> input; public final String keyword; public final int start; public final int end; public final ParseContext context; public KeywordIndex(ArrayList<String> inp, int s, int e, ParseContext ctxt) { input = inp; keyword = input.get(s); start = s; end = e; context = ctxt; } } private static ArrayList<KeywordIndex> getKeywords(ArrayList<String> input, ParseContext context) { ArrayList<KeywordIndex> ret = new ArrayList<KeywordIndex>(); int braceDepth = 0; int index = 1; for (int i = 1; i < input.size(); i++) { String tok = input.get(i); if ("{".equals(tok)) { braceDepth++; continue; } if ("}".equals(tok)) { braceDepth if (braceDepth == 0) { ret.add(new KeywordIndex(input, index, i, context)); index = i + 1; continue; } } } // Look for a leftover keyword at the end of line KeywordIndex last = ret.get(ret.size() - 1); if (last.end != input.size() - 1) { ret.add(new KeywordIndex(input, last.end + 1, input.size() - 1, context)); } for (KeywordIndex kw : ret) { if (!"{".equals(input.get(kw.start + 1)) || !"}".equals(input.get(kw.end))) { throw new InputErrorException("Keyword %s not valid, should be <keyword> { <args> }", kw.keyword); } } return ret; } public static void doError(Throwable e) { if (!batchRun) return; System.out.println("An error occurred in the simulation environment. Please check inputs for an error:"); System.out.println(e); GUIFrame.shutdown(1); } // Load the run file public static void loadConfigurationFile( String fileName) throws URISyntaxException { String inputTraceFileName = InputAgent.getRunName() + ".log"; // Initializing the tracing for the model try { System.out.println( "Creating trace file" ); URI confURI = new File(fileName).toURI(); URI logURI = confURI.resolve(new URI(null, inputTraceFileName, null)); // The new URI here effectively escapes the file name // Set and open the input trace file name logFile = new FileEntity( logURI.getPath(), FileEntity.FILE_WRITE, false ); } catch( Exception e ) { InputAgent.logWarning("Could not create trace file"); } InputAgent.loadConfigurationFile(fileName, true); // At this point configuration file is loaded // The session is not considered to be edited after loading a configuration file sessionEdited = false; // Save and close the input trace file if (logFile != null) { if (InputAgent.numWarnings == 0 && InputAgent.numErrors == 0) { logFile.close(); logFile.delete(); logFile = new FileEntity( inputTraceFileName, FileEntity.FILE_WRITE, false ); } } // Check for found errors if( InputAgent.numErrors > 0 ) throw new InputErrorException("%d input errors and %d warnings found, check %s", InputAgent.numErrors, InputAgent.numWarnings, inputTraceFileName); if (Simulation.getPrintInputReport()) InputAgent.printInputFileKeywords(); } /** * * @param fileName * @param firstTime ( true => this is the main config file (run file); false => this is an included file within main config file or another included file ) */ public static void loadConfigurationFile( String rawFileName, boolean firstTime ) throws URISyntaxException { URI fileURI = new File(rawFileName).toURI(); String path = fileURI.getPath(); String dir = path.substring(0, path.lastIndexOf('/')+1); URI dirURI = new URI("file", dir, null); String fileName = path.substring(path.lastIndexOf('/') + 1, path.length()); readStream("", dirURI, fileName); FileEntity.setRootDirectory(dir); GUIFrame.instance().setProgressText(null); GUIFrame.instance().setProgress(0); } public static final void apply(Entity ent, KeywordIndex kw) { Input<?> in = ent.getInput(kw.keyword); if (in == null) { InputAgent.logWarning("Keyword %s could not be found for Entity %s.", kw.keyword, ent.getInputName()); return; } InputAgent.apply(ent, in, kw); FrameBox.valueUpdate(); } public static final void apply(Entity ent, Input<?> in, KeywordIndex kw) { StringVector data = new StringVector(kw.end - kw.start); for (int i = kw.start + 2; i < kw.end; i++) { data.add(kw.input.get(i)); } in.parse(data, kw.context); // Only mark the keyword edited if we have finished initial configuration if (InputAgent.hasAddedRecords() || InputAgent.recordEdits()) in.setEdited(true); ent.updateForInput(in); if(ent.testFlag(Entity.FLAG_GENERATED)) return; StringBuilder out = new StringBuilder(data.size() * 6); for (int i = 0; i < data.size(); i++) { String dat = data.get(i); if (Parser.needsQuoting(dat) && !dat.equals("{") && !dat.equals("}")) out.append("'").append(dat).append("'"); else out.append(dat); if( i < data.size() - 1 ) out.append(" "); } if(in.isEdited()) { ent.setFlag(Entity.FLAG_EDITED); sessionEdited = true; } in.setValueString(out.toString()); } private static void processKeyword(Entity entity, KeywordIndex key) { if (entity.testFlag(Entity.FLAG_LOCKED)) throw new InputErrorException("Entity: %s is locked and cannot be modified", entity.getName()); Input<?> input = entity.getInput( key.keyword ); if (input != null) { InputAgent.apply(entity, input, key); FrameBox.valueUpdate(); return; } if (!(entity instanceof Group)) throw new InputErrorException("Not a valid keyword"); Group grp = (Group)entity; grp.saveGroupKeyword(key); // Store the keyword data for use in the edit table for( int i = 0; i < grp.getList().size(); i++ ) { Entity ent = grp.getList().get( i ); InputAgent.apply(ent, key); } } private static class ConfigFileFilter implements FilenameFilter { @Override public boolean accept(File inFile, String fileName) { return fileName.endsWith("[cC][fF][gG]"); } } public static void load(GUIFrame gui) { System.out.println("Loading..."); FileDialog chooser = new FileDialog(gui, "Load Configuration File", FileDialog.LOAD); chooser.setFilenameFilter(new ConfigFileFilter()); chooser.setFile("*.cfg"); chooser.setVisible(true); // display the dialog, waits for selection String file = chooser.getFile(); if (file == null) return; String absFile = chooser.getDirectory() + file; absFile = absFile.trim(); setLoadFile(gui, absFile); } public static void save(GUIFrame gui) { System.out.println("Saving..."); if( InputAgent.getConfigFileName() != null ) { setSaveFile(gui, InputAgent.getConfigFileName(), SAVE_ONLY ); } else { saveAs( gui ); } } public static void saveAs(GUIFrame gui) { System.out.println("Save As..."); FileDialog chooser = new FileDialog(gui, "Save Configuration File As", FileDialog.SAVE); chooser.setFilenameFilter(new ConfigFileFilter()); chooser.setFile(InputAgent.getConfigFileName()); chooser.setVisible(true); // display the dialog, waits for selection String file = chooser.getFile(); if (file == null) return; String absFile = chooser.getDirectory() + file; absFile = absFile.trim(); setSaveFile(gui, absFile, FileDialog.SAVE); } public static void configure(GUIFrame gui, String configFileName) { try { gui.clear(); InputAgent.setConfigFileName(configFileName); gui.updateForSimulationState(GUIFrame.SIM_STATE_UNCONFIGURED); try { InputAgent.loadConfigurationFile(configFileName); } catch( InputErrorException iee ) { if (!batchRun) ExceptionBox.instance().setErrorBox(iee.getMessage()); else System.out.println( iee.getMessage() ); } System.out.println("Configuration File Loaded"); // show the present state in the user interface gui.setTitle( Simulation.getModelName() + " - " + InputAgent.getRunName() ); gui.updateForSimulationState(GUIFrame.SIM_STATE_CONFIGURED); } catch( Throwable t ) { ExceptionBox.instance().setError(t); } } /** * Loads configuration file , calls GraphicSimulation.configure() method */ private static void setLoadFile(final GUIFrame gui, String fileName) { final String chosenFileName = fileName; new Thread(new Runnable() { @Override public void run() { File temp = new File(chosenFileName); if( temp.isAbsolute() ) { InputAgent.setRecordEdits(false); InputAgent.configure(gui, chosenFileName); InputAgent.setRecordEdits(true); } else { System.out.printf("Error: loading a relative file: %s\n", chosenFileName); } GUIFrame.displayWindows(true); FrameBox.valueUpdate(); } }).start(); } /** * saves the cfg/pos file. checks for 'save' and 'save as', recursively goes to 'save as' if 'save' is not possible. * updates runname and filename of file. * if editbox is open and unaccepted, accepts changes. */ private static void setSaveFile(GUIFrame gui, String fileName, int saveOrLoadType) { String configFilePath = InputAgent.getConfigFileName(); // check ending string of filename, force cfg onto end if needed if (!(fileName.endsWith(".cfg"))) { fileName = fileName.concat(".cfg"); } File temp = new File(fileName); //System.out.println("fileName is " + fileName); // If the original configuration file is the same as the file to save, and there were no added records, // then do not save the file because it would be recursive, i.e. contain "include <fileName>" if( configFilePath.equalsIgnoreCase( fileName ) ) { if( !InputAgent.hasAddedRecords() ) { if( saveOrLoadType == FileDialog.SAVE) { // recursive -- if can't overwrite base file, 'save as' // Ask if appending to base configuration is ok int appendOption = JOptionPane.showConfirmDialog( null, "Cannot overwrite base configuration file. Do you wish to append changes?", "Confirm Append", JOptionPane.YES_OPTION, JOptionPane.WARNING_MESSAGE ); // Perform append only if yes if (appendOption == JOptionPane.YES_OPTION) { FileEntity configFile = new FileEntity( fileName, FileEntity.FILE_WRITE, true ); configFile.write( "\n" + addedRecordMarker ); addedRecordFound = true; } else { InputAgent.saveAs(gui); return; } } else { InputAgent.saveAs(gui); return; } } else if ( saveOrLoadType == SAVE_ONLY) { System.out.println("Saving..."); } } // set root directory FileEntity.setRootDirectory( temp.getParentFile() ); //saveFile = new FileEntity( fileName, FileEntity.FILE_WRITE, false ); //simulation.printNewConfigurationFileOn( saveFile ); InputAgent.printNewConfigurationFileWithName( fileName ); sessionEdited = false; //TODOalan set directory of model.. ? InputAgent.setConfigFileName(shortName(fileName)); // Set the title bar to match the new run name gui.setTitle( Simulation.getModelName() + " - " + InputAgent.getRunName() ); // close the window //dispose(); } /* * write input file keywords and values * * input file format: * Define Group { <Group names> } * Define <Object> { <Object names> } * * <Object name> <Keyword> { < values > } * */ public static void printInputFileKeywords() { // Create report file for the inputs FileEntity inputReportFile; String inputReportFileName = InputAgent.getReportDirectory() + InputAgent.getRunName() + ".inp"; if( FileEntity.fileExists( inputReportFileName ) ) { inputReportFile = new FileEntity( inputReportFileName, FileEntity.FILE_WRITE, false ); inputReportFile.flush(); } else { inputReportFile = new FileEntity( inputReportFileName, FileEntity.FILE_WRITE, false ); } // Loop through the entity classes printing Define statements for (ObjectType type : ObjectType.getAll()) { Class<? extends Entity> each = type.getJavaClass(); // Loop through the instances for this entity class int count = 0; for (Entity ent : Entity.getInstanceIterator(each)) { boolean hasinput = false; for (Input<?> in : ent.getEditableInputs()) { // If the keyword has been used, then add a record to the report if (in.getValueString().length() != 0) { hasinput = true; count++; break; } } if (hasinput) { String entityName = ent.getInputName(); if ((count - 1) % 5 == 0) { inputReportFile.putString("Define"); inputReportFile.putTab(); inputReportFile.putString(type.getInputName()); inputReportFile.putTab(); inputReportFile.putString("{ " + entityName); inputReportFile.putTab(); } else if ((count - 1) % 5 == 4) { inputReportFile.putString(entityName + " }"); inputReportFile.newLine(); } else { inputReportFile.putString(entityName); inputReportFile.putTab(); } } } if (!Entity.getInstanceIterator(each).hasNext()) { if (count % 5 != 0) { inputReportFile.putString(" }"); inputReportFile.newLine(); } inputReportFile.newLine(); } } for (ObjectType type : ObjectType.getAll()) { Class<? extends Entity> each = type.getJavaClass(); // Get the list of instances for this entity class // sort the list alphabetically ArrayList<? extends Entity> cloneList = Entity.getInstancesOf(each); // Print the entity class name to the report (in the form of a comment) if (cloneList.size() > 0) { inputReportFile.putString("\" " + each.getSimpleName() + " \""); inputReportFile.newLine(); inputReportFile.newLine(); // blank line below the class name heading } Collections.sort(cloneList, new Comparator<Entity>() { @Override public int compare(Entity a, Entity b) { return a.getInputName().compareTo(b.getInputName()); } }); // Loop through the instances for this entity class for (int j = 0; j < cloneList.size(); j++) { // Make sure the clone is an instance of the class (and not an instance of a subclass) if (cloneList.get(j).getClass() == each) { Entity ent = cloneList.get(j); String entityName = ent.getInputName(); boolean hasinput = false; // Loop through the editable keywords for this instance for (Input<?> in : ent.getEditableInputs()) { // If the keyword has been used, then add a record to the report if (in.getValueString().length() != 0) { if (!in.getCategory().contains("Graphics")) { hasinput = true; inputReportFile.putTab(); inputReportFile.putString(entityName); inputReportFile.putTab(); inputReportFile.putString(in.getKeyword()); inputReportFile.putTab(); if (in.getValueString().lastIndexOf("{") > 10) { String[] item1Array; item1Array = in.getValueString().trim().split(" }"); inputReportFile.putString("{ " + item1Array[0] + " }"); for (int l = 1; l < (item1Array.length); l++) { inputReportFile.newLine(); inputReportFile.putTabs(5); inputReportFile.putString(item1Array[l] + " } "); } inputReportFile.putString(" }"); } else { inputReportFile.putString("{ " + in.getValueString() + " }"); } inputReportFile.newLine(); } } } // Put a blank line after each instance if (hasinput) { inputReportFile.newLine(); } } } } // Close out the report inputReportFile.flush(); inputReportFile.close(); } public static void closeLogFile() { if (logFile == null) return; logFile.flush(); logFile.close(); if (numErrors ==0 && numWarnings == 0) { logFile.delete(); } logFile = null; } private static final String errPrefix = "*** ERROR *** %s%n"; private static final String inpErrPrefix = "*** INPUT ERROR *** %s%n"; private static final String wrnPrefix = "***WARNING*** %s%n"; public static int numErrors() { return numErrors; } public static int numWarnings() { return numWarnings; } private static void echoInputRecord(ArrayList<String> tokens) { if (logFile == null) return; StringBuilder line = new StringBuilder(); for (int i = 0; i < tokens.size(); i++) { line.append(" ").append(tokens.get(i)); if (tokens.get(i).startsWith("\"")) { logFile.write(line.toString()); logFile.newLine(); line.setLength(0); } } // Leftover input if (line.length() > 0) { logFile.write(line.toString()); logFile.newLine(); } logFile.flush(); } private static void logBadInput(ArrayList<String> tokens, String msg) { InputAgent.echoInputRecord(tokens); InputAgent.logError("%s", msg); } public static void logMessage(String fmt, Object... args) { String msg = String.format(fmt, args); System.out.println(msg); if (logFile == null) return; logFile.write(msg); logFile.newLine(); logFile.flush(); } public static void trace(int indent, Entity ent, String meth, String... text) { // Create an indent string to space the lines StringBuilder ind = new StringBuilder(""); for (int i = 0; i < indent; i++) ind.append(" "); String spacer = ind.toString(); // Print a TIME header every time time has advanced double traceTime = ent.getCurrentTime(); if (lastTimeForTrace != traceTime) { System.out.format(" \nTIME = %.5f\n", traceTime); lastTimeForTrace = traceTime; } // Output the traces line(s) System.out.format("%s%s %s\n", spacer, ent.getName(), meth); for (String line : text) { System.out.format("%s%s\n", spacer, line); } System.out.flush(); } public static void logWarning(String fmt, Object... args) { numWarnings++; String msg = String.format(fmt, args); InputAgent.logMessage(wrnPrefix, msg); } public static void logError(String fmt, Object... args) { numErrors++; String msg = String.format(fmt, args); InputAgent.logMessage(errPrefix, msg); } public static void logInpError(String fmt, Object... args) { numErrors++; String msg = String.format(fmt, args); InputAgent.logMessage(inpErrPrefix, msg); } public static void processEntity_Keyword_Value(Entity ent, Input<?> in, String value){ ArrayList<String> tokens = new ArrayList<String>(); tokens.add(in.getKeyword()); tokens.add("{"); Parser.tokenize(tokens, value, true); tokens.add("}"); KeywordIndex kw = new KeywordIndex(tokens, 0, tokens.size() - 1, null); InputAgent.processKeyword(ent, kw); } public static void processEntity_Keyword_Value(Entity ent, String keyword, String value){ ArrayList<String> tokens = new ArrayList<String>(); tokens.add(keyword); tokens.add("{"); Parser.tokenize(tokens, value, true); tokens.add("}"); KeywordIndex kw = new KeywordIndex(tokens, 0, tokens.size() - 1, null); InputAgent.processKeyword(ent, kw); } /** * Print out a configuration file with all the edited changes attached */ public static void printNewConfigurationFileWithName( String fileName ) { ArrayList<String> preAddedRecordLines = new ArrayList<String>(); String configFilePath = FileEntity.getRootDirectory() + System.getProperty( "file.separator" ) + InputAgent.getConfigFileName(); if( InputAgent.hasAddedRecords() && FileEntity.fileExists( configFilePath ) ) { // Store the original configuration file lines up to added records try { BufferedReader in = new BufferedReader( new FileReader( configFilePath ) ); String line; while ( ( line = in.readLine() ) != null ) { if ( line.startsWith( addedRecordMarker ) ) { break; } else { preAddedRecordLines.add( line ); } } in.close(); } catch ( Exception e ) { throw new ErrorException( e ); } } FileEntity file = new FileEntity( fileName, FileEntity.FILE_WRITE, false ); // include the original configuration file if (!InputAgent.hasAddedRecords()) { file.format( "\" File: %s%n%n", file.getFileName() ); file.format( "include %s%n%n", InputAgent.getConfigFileName() ); } else { for( int i=0; i < preAddedRecordLines.size(); i++ ) { String line = preAddedRecordLines.get( i ); if( line.startsWith( "\" File: " ) ) { file.format( "\" File: %s%n", file.getFileName() ); } else { file.format("%s%n", line); } } } file.format("%s%n", addedRecordMarker); addedRecordFound = true; // Determine all the new classes that were created ArrayList<Class<? extends Entity>> newClasses = new ArrayList<Class<? extends Entity>>(); for (int i = 0; i < Entity.getAll().size(); i++) { Entity ent = Entity.getAll().get(i); if (!ent.testFlag(Entity.FLAG_ADDED)) continue; if (!newClasses.contains(ent.getClass())) newClasses.add(ent.getClass()); } // Print the define statements for each new class for( Class<? extends Entity> newClass : newClasses ) { for (ObjectType o : ObjectType.getAll()) { if (o.getJavaClass() == newClass) { file.format("Define %s {", o.getInputName()); break; } } for (int i = 0; i < Entity.getAll().size(); i++) { Entity ent = Entity.getAll().get(i); if (!ent.testFlag(Entity.FLAG_ADDED)) continue; if (ent.getClass() == newClass) file.format(" %s ", ent.getInputName()); } file.format("}%n"); } // List all the changes that were saved for each edited entity for (int i = 0; i < Entity.getAll().size(); i++) { Entity ent = Entity.getAll().get(i); if (!ent.testFlag(Entity.FLAG_EDITED)) continue; writeInputsOnFile_ForEntity( file, ent ); } file.flush(); file.close(); } static void writeInputsOnFile_ForEntity( FileEntity file, Entity ent ) { // Write new configuration file for non-appendable keywords file.format("\n"); for( int j=0; j < ent.getEditableInputs().size(); j++ ) { Input<?> in = ent.getEditableInputs().get( j ); if (!in.isEdited()) continue; if (in instanceof FileInput) { writeFileInput((FileInput)in, file, ent); continue; } String value = in.getValueString(); ArrayList<String> tokens = new ArrayList<String>(); Parser.tokenize(tokens, value); if (!InputAgent.enclosedByBraces(tokens)) file.format("%s %s { %s }%n", ent.getInputName(), in.getKeyword(), value); else file.format("%s %s %s%n", ent.getInputName(), in.getKeyword(), value); } } static private void writeFileInput(FileInput in, FileEntity file, Entity ent) { URI fileURI = file.getFileURI(); URI inputURI = in.getValue(); String resString = resRoot.toString(); String inputString = inputURI.toString(); // Check if this is a resource if (inputString.indexOf(resString) == 0) { file.format("%s %s { '<res>/%s' }%n", ent.getInputName(), in.getKeyword(), inputString.substring(resString.length())); return; } // Try to relativize this URL to the current file try { String filePath = fileURI.getPath(); URI dirURI = new URI(fileURI.getScheme(), filePath.substring(0, filePath.lastIndexOf('/') + 1), null); inputURI = dirURI.relativize(inputURI); } catch (Exception ex) { // We failed, just spit out an absolute URI } file.format("%s %s { '%s' }%n", ent.getInputName(), in.getKeyword(), inputURI.getPath()); } public static void loadDefault() { // Read the default configuration file InputAgent.readResource("inputs/default.cfg"); sessionEdited = false; } /** * Split an input (list of strings) down to a single level of nested braces, this may then be called again for * further nesting. * @param input * @return */ public static ArrayList<ArrayList<String>> splitForNestedBraces(List<String> input) { ArrayList<ArrayList<String>> inputs = new ArrayList<ArrayList<String>>(); int braceDepth = 0; ArrayList<String> currentLine = null; for (int i = 0; i < input.size(); i++) { if (currentLine == null) currentLine = new ArrayList<String>(); currentLine.add(input.get(i)); if (input.get(i).equals("{")) { braceDepth++; continue; } if (input.get(i).equals("}")) { braceDepth if (braceDepth == 0) { inputs.add(currentLine); currentLine = null; continue; } } } return inputs; } /** * This is the heart of path handling, find a file relative to a root 'context' and then check that * the normalized URI matches the jail prefix, otherwise reject it * @param context * @param path * @param jailPrefix * @return */ public static URI getFileURI(URI context, String path, String jailPrefix) throws URISyntaxException { int openBrace = path.indexOf('<'); int closeBrace = path.indexOf('>'); int firstSlash = path.indexOf('/'); URI ret = null; if (openBrace == 0 && closeBrace != -1 && firstSlash == closeBrace + 1) { // Special path format, expand the resource String specPath = path.substring(openBrace + 1, closeBrace); if (specPath.equals("res")) { ret = new URI(resRoot.getScheme(), resRoot.getSchemeSpecificPart() + path.substring(closeBrace+2), null).normalize(); } } else { URI pathURI = new URI(null, path, null).normalize(); if (context != null) { if (context.isOpaque()) { // Things are going to get messy in here URI schemeless = new URI(null, context.getSchemeSpecificPart(), null); URI resolved = schemeless.resolve(pathURI).normalize(); // Note: we are using the one argument constructor here because the 'resolved' URI is already encoded // and we do not want to double-encode (and schemes should never need encoding, I hope) ret = new URI(context.getScheme() + ":" + resolved.toString()); } else { ret = context.resolve(pathURI).normalize(); } } else { // We have no context, so append a 'file' scheme if necessary if (pathURI.getScheme() == null) { ret = new URI("file", pathURI.getPath(), null); } else { ret = pathURI; } } } if (jailPrefix != null && ret.toString().indexOf(jailPrefix) != 0) { System.out.printf("Failed jail test: %s in jail: %s context: %s\n", ret.toString(), jailPrefix, context.toString()); return null; // This resolved URI is not in our jail } return ret; } }
package com.jaamsim.input; import java.awt.FileDialog; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import javax.swing.JOptionPane; import com.jaamsim.ui.ExceptionBox; import com.jaamsim.ui.FrameBox; import com.sandwell.JavaSimulation.Entity; import com.sandwell.JavaSimulation.ErrorException; import com.sandwell.JavaSimulation.FileEntity; import com.sandwell.JavaSimulation.Group; import com.sandwell.JavaSimulation.Input; import com.sandwell.JavaSimulation.InputErrorException; import com.sandwell.JavaSimulation.ObjectType; import com.sandwell.JavaSimulation.Palette; import com.sandwell.JavaSimulation.Simulation; import com.sandwell.JavaSimulation.StringVector; import com.sandwell.JavaSimulation.Util; import com.sandwell.JavaSimulation.Vector; import com.sandwell.JavaSimulation3D.DisplayEntity; import com.sandwell.JavaSimulation3D.GUIFrame; public class InputAgent { private static final String addedRecordMarker = "\" *** Added Records ***"; private static int numErrors = 0; private static int numWarnings = 0; private static FileEntity logFile; private static double lastTimeForTrace; private static String configFileName; private static boolean batchRun; private static boolean sessionEdited; private static boolean addedRecordFound; private static boolean endOfFileReached; // notes end of cfg files // ConfigurationFile load and save variables final protected static int SAVE_ONLY = 2; private static final String INP_ERR_DEFINEUSED = "The name: %s has already been used and is a %s"; private static boolean printInputReport; private static String reportDirectory; static { addedRecordFound = false; sessionEdited = false; endOfFileReached = false; batchRun = false; configFileName = null; reportDirectory = ""; lastTimeForTrace = -1.0d; printInputReport = false; } public static void clear() { logFile = null; numErrors = 0; numWarnings = 0; addedRecordFound = false; sessionEdited = false; configFileName = null; reportDirectory = ""; lastTimeForTrace = -1.0d; printInputReport = false; } public static void setPrintInputs(boolean print) { printInputReport = print; } public static String getReportDirectory() { return reportDirectory; } public static void setReportDirectory(String dir) { reportDirectory = Util.getAbsoluteFilePath(dir); if (!reportDirectory.substring(reportDirectory.length() - 1).equals("\\")) reportDirectory = reportDirectory + "\\"; // Create the report directory if it does not already exist // This code should probably be added to FileEntity someday to // create necessary folders on demand. File f = new File(reportDirectory); f.mkdirs(); } public static void setConfigFileName(String name) { configFileName = name; } public static String getConfigFileName() { return configFileName; } public static String getRunName() { String runName; if( InputAgent.getConfigFileName() == null ) { runName = ""; } else { int index = Util.fileShortName( InputAgent.getConfigFileName() ).indexOf( "." ); if( index > -1 ) { runName = Util.fileShortName( InputAgent.getConfigFileName() ).substring( 0, index ); } else { runName = Util.fileShortName( InputAgent.getConfigFileName() ); } } return runName; } public static boolean hasAddedRecords() { return addedRecordFound; } public static boolean isSessionEdited() { return sessionEdited; } public static void setBatch(boolean batch) { batchRun = batch; } public static boolean getBatch() { return batchRun; } /** * returns true if the first and last tokens are matched braces **/ public static boolean enclosedByBraces(ArrayList<String> tokens) { if(tokens.size() < 2 || tokens.indexOf("{") < 0) // no braces return false; int level =1; int i = 1; for(String each: tokens.subList(1, tokens.size())) { if(each.equals("{")) { level++; } if(each.equals("}")) { level // Matching close brace found if(level == 0) break; } i++; } if(level == 0 && i == tokens.size()-1) { return true; } return false; } private static int getBraceDepth(ArrayList<String> tokens, int startingBraceDepth, int startingIndex) { int braceDepth = startingBraceDepth; for (int i = startingIndex; i < tokens.size(); i++) { String token = tokens.get(i); if (token.equals("{")) braceDepth++; if (token.equals("}")) braceDepth if (braceDepth < 0) { InputAgent.logBadInput(tokens, "Extra closing braces found"); tokens.clear(); } if (braceDepth > 2) { InputAgent.logBadInput(tokens, "Maximum brace depth (2) exceeded"); tokens.clear(); } } return braceDepth; } private static URI pwdPath; private static URI pwdRoot; private static URI resRoot; private static URI resPath; private static final String res = "/resources/inputs/"; static { // Walk up the parent list until we find a parentless entry, call that // the 'root' File f = new File(System.getProperty("user.dir")); File par = f; while (true) { File t = par.getParentFile(); if (t == null) { pwdRoot = par.toURI(); break; } par = t; } pwdPath = pwdRoot.relativize(f.toURI()); try { // locate the resource folder, and create resRoot = InputAgent.class.getResource(res).toURI(); } catch (URISyntaxException e) {} resPath = URI.create(""); } public static final void readResource(String res) { if (res == null) return; readStream(resRoot, resPath, res); } public static final boolean readStream(URI root, URI path, String file) { URI resolved = path.resolve(file); resolved.normalize(); if (resolved.getRawPath().contains("../")) { InputAgent.logWarning("Unable to resolve path %s%s - %s", root.toString(), path.toString(), file); return false; } URL t = null; try { t = new URI(root.toString() + resolved.toString()).toURL(); } catch (MalformedURLException e) {} catch (URISyntaxException e) {} if (t == null) { InputAgent.logWarning("Unable to resolve path %s%s - %s", root.toString(), path.toString(), file); return false; } readURL(t); return true; } public static void readURL(URL url) { if (url == null) return; BufferedReader buf = null; try { InputStream in = url.openStream(); buf = new BufferedReader(new InputStreamReader(in)); } catch (IOException e) { InputAgent.logWarning("Could not read from %s", url.toString()); return; } try { ArrayList<String> record = new ArrayList<String>(); int braceDepth = 0; while (true) { String line = buf.readLine(); // end of file, stop reading if (line == null) break; if ( line.trim().equalsIgnoreCase( addedRecordMarker ) ) { addedRecordFound = true; } int previousRecordSize = record.size(); Parser.tokenize(record, line); braceDepth = InputAgent.getBraceDepth(record, braceDepth, previousRecordSize); if( braceDepth != 0 ) continue; Parser.removeComments(record); InputAgent.processRecord(url, record); record.clear(); } // Leftover Input at end of file if (record.size() > 0) InputAgent.logBadInput(record, "Leftover input at end of file"); buf.close(); } catch (IOException e) { // Make best effort to ensure it closes try { buf.close(); } catch (IOException e2) {} } } private static void processRecord(URL url, ArrayList<String> record) { //InputAgent.echoInputRecord(record); if (record.size() == 0) return; if (record.get(0).equalsIgnoreCase("INCLUDE")) { InputAgent.processIncludeRecord(url, record); return; } if (record.get(0).equalsIgnoreCase("DEFINE")) { InputAgent.processDefineRecord(record); return; } // Otherwise assume it is a Keyword record InputAgent.processKeywordRecord(record); } private static void processIncludeRecord(URL baseURL, ArrayList<String> record) { if (record.size() != 2) { InputAgent.logError("Bad Include record, should be: Include <File>"); return; } // Ensure the include filename is well formed URL finalURL = null; try { URI incFile = new URI(record.get(1).replaceAll("\\\\", "/")); // Construct a base file in case this URI is relative and split at ! to // account for a jar:file:<jarfilename>!<internalfilename> URL int bangIndex = baseURL.toString().lastIndexOf("!") + 1; String prefix = baseURL.toString().substring(0, bangIndex); String folder = baseURL.toString().substring(bangIndex); URI folderURI = new URI(folder).resolve(incFile); // Remove all remaining relative path directives ../ String noRelative = folderURI.toString().replaceAll("\\.\\./", ""); finalURL = new URL(prefix + noRelative); } catch (NullPointerException e) {} catch (URISyntaxException e) {} catch (MalformedURLException e) {} finally { if (finalURL == null) { InputAgent.logError("Unable to parse filename: %s", record.get(1)); return; } } InputAgent.readURL(finalURL); } private static void processDefineRecord(ArrayList<String> record) { if (record.size() < 5 || !record.get(2).equals("{") || !record.get(record.size() - 1).equals("}")) { InputAgent.logError("Bad Define record, should be: Define <Type> { <names>... }"); return; } Class<? extends Entity> proto = null; try { if( record.get( 1 ).equalsIgnoreCase( "Palette" ) ) { proto = Palette.class; } else if( record.get( 1 ).equalsIgnoreCase( "ObjectType" ) ) { proto = ObjectType.class; } else { proto = Input.parseEntityType(record.get(1)); } } catch (InputErrorException e) { InputAgent.logError("%s", e.getMessage()); return; } // Loop over all the new Entity names for (int i = 3; i < record.size() - 1; i++) { InputAgent.defineEntity(proto, record.get(i), addedRecordFound); } } /** * Like defineEntity(), but will generate a unique name if a name collision exists * @param proto * @param key * @param addedEntity * @return */ public static <T extends Entity> T defineEntityWithUniqueName(Class<T> proto, String key, boolean addedEntity) { int entityNum = 1; while(true) { String name = String.format("%s-%d", key, entityNum); if (Entity.getNamedEntity(name) == null) { return defineEntity(proto, name, addedEntity); } entityNum++; } } /** * if addedEntity is true then this is an entity defined * by user interaction or after added record flag is found; * otherwise, it is from an input file define statement * before the model is configured * @param proto * @param key * @param addedEntity */ public static <T extends Entity> T defineEntity(Class<T> proto, String key, boolean addedEntity) { Entity existingEnt = Input.tryParseEntity(key, Entity.class); if (existingEnt != null) { InputAgent.logError(INP_ERR_DEFINEUSED, key, existingEnt.getClass().getSimpleName()); return null; } T ent = null; try { ent = proto.newInstance(); if (addedEntity) { ent.setFlag(Entity.FLAG_ADDED); sessionEdited = true; } } catch (InstantiationException e) {} catch (IllegalAccessException e) {} finally { if (ent == null) { InputAgent.logError("Could not create new Entity: %s", key); return null; } } ent.setInputName(key); return ent; } private static void processKeywordRecord(ArrayList<String> record) { Entity ent = Input.tryParseEntity(record.get(0), Entity.class); if (ent == null) { InputAgent.logError("Could not find Entity: %s", record.get(0)); return; } ArrayList<ArrayList<String>> keywords = InputAgent.splitKeywords(record); for (ArrayList<String> keyword : keywords) { if (keyword.size() < 3 || !keyword.get(1).equals("{") || !keyword.get(keyword.size() - 1).equals("}")) { InputAgent.logError("Keyword not valid, should be <keyword> { <args> }"); continue; } String key = keyword.get(0); StringVector args = new StringVector(keyword.size() - 3); for (int i = 2; i < keyword.size() - 1; i++) { args.add(keyword.get(i)); } try { InputAgent.processKeyword(ent, args, key); } catch (Throwable e) { InputAgent.logError("Exception thrown from Entity: %s for keyword:%s - %s", ent.getInputName(), key, e.getMessage()); } } } private static ArrayList<ArrayList<String>> splitKeywords(ArrayList<String> input) { ArrayList<ArrayList<String>> inputs = new ArrayList<ArrayList<String>>(); int braceDepth = 0; ArrayList<String> currentLine = null; for (int i = 1; i < input.size(); i++) { if (currentLine == null) currentLine = new ArrayList<String>( input.size() ); currentLine.add(input.get(i)); if (input.get(i).equals("{")) { braceDepth++; continue; } if (input.get(i).equals("}")) { braceDepth if (braceDepth == 0) { inputs.add(currentLine); currentLine = null; continue; } } } return inputs; } public static void doError(Throwable e) { if (!batchRun) return; System.out.println("An error occurred in the simulation environment. Please check inputs for an error:"); System.out.println(e); GUIFrame.shutdown(1); } // Load the run file public static void loadConfigurationFile( String fileName) { String inputTraceFileName = InputAgent.getRunName() + ".log"; // Initializing the tracing for the model try { System.out.println( "Creating trace file" ); // Set and open the input trace file name logFile = new FileEntity( inputTraceFileName, FileEntity.FILE_WRITE, false ); } catch( Exception e ) { throw new ErrorException( "Could not create trace file" ); } InputAgent.loadConfigurationFile(fileName, true); // At this point configuration file is loaded // The session is not considered to be edited after loading a configuration file sessionEdited = false; // Save and close the input trace file if (logFile != null) { if (InputAgent.numWarnings == 0 && InputAgent.numErrors == 0) { logFile.close(); logFile.delete(); logFile = new FileEntity( inputTraceFileName, FileEntity.FILE_WRITE, false ); } } // Check for found errors if( InputAgent.numErrors > 0 ) throw new InputErrorException("%d input errors found, check log file", InputAgent.numErrors); if (printInputReport) InputAgent.printInputFileKeywords(); } /** * * @param fileName * @param firstTime ( true => this is the main config file (run file); false => this is an included file within main config file or another included file ) */ public static void loadConfigurationFile( String fileName, boolean firstTime ) { //System.out.println( "load configuration file " + fileName ); FileEntity file; Vector record; // If the file does not exist, write an error and exit if( !FileEntity.fileExists( fileName ) ) { System.out.println( (("Error -- The input file " + fileName) + " was not found ") ); GUIFrame.shutdown(0); } // Open the file file = new FileEntity( fileName, FileEntity.FILE_READ, false ); String mainRootDirectory = null; String originalJarFileRootDirectory = null; if( firstTime ) { // Store the directory of the first input file file.setRootDirectory(); } else { // Save the directory of the first file mainRootDirectory = FileEntity.getRootDirectory(); // Switch to the directory of the current input file file.setRootDirectory(); // Save the directory of the first file within the jar file originalJarFileRootDirectory = FileEntity.getJarFileRootDirectory(); } // Initialize the input file file.toStart(); GUIFrame.instance().setProgressText(file.getFileName()); // For each line in the file while( true ) { // Read the next line to record record = getNextParsedRecord( file ); // System.out.println( record.toString() ); // Determine the amount of file read and update the progress gauge int per = (int)(((double)file.getNumRead()) / ((double)file.getLength()) * 100.0); GUIFrame.instance().setProgress( per ); // When end-of-file is reached, record.size() == 0 if( endOfFileReached ) { break; } // Process this line if it is not empty if ( record.size() > 0 ) { // If there is an included file, LoadConfigurationFile is run for that (This is recursive) InputAgent.readRecord(record, file); } } // Reset the progress bar to zero and remove its label GUIFrame.instance().setProgressText(null); GUIFrame.instance().setProgress(0); // Close the file file.close(); // Restore to the directory of the first input file if ( ! firstTime ) { FileEntity.setRootDirectory( mainRootDirectory ); FileEntity.setJarFileRootDirectory( originalJarFileRootDirectory ); } } /** * Reads record, either as a default, define statement, include, or keyword * @param record * @param file */ private static void readRecord(Vector record, FileEntity file) { if(record.size() < 2){ InputAgent.logError("Invalid input line - missing keyword or parameter"); return; } try { if( "DEFINE".equalsIgnoreCase( (String)record.get( 0 ) ) ) { ArrayList<String> tempCopy = new ArrayList<String>(record.size()); for (int i = 0; i < record.size(); i++) tempCopy.add((String)record.get(i)); InputAgent.processDefineRecord(tempCopy); } // Process other files else if( "INCLUDE".equalsIgnoreCase( (String)record.get( 0 ) ) ) { if( record.size() == 2 ) { if( FileEntity.fileExists( (String)record.get( 1 ) ) ) { // Load the included file and process its records first InputAgent.loadConfigurationFile( (String)record.get( 1 ), false ); GUIFrame.instance().setProgressText(file.getFileName()); } else { InputAgent.logError("File not found: %s", (String)record.get(1)); } } else { InputAgent.logError("There must be exactly two entries in an Include record"); } } // is a keyword else { InputAgent.processData(record); } } catch( InputErrorException iee ) { InputAgent.logError( iee.getMessage() ); } } // Read the next line of the file protected static Vector getNextParsedRecord(FileEntity file) { Vector record = new Vector(); int noOfUnclosedBraces = 0; do { Vector nextLine = file.readAndParseRecord(); InputAgent.echoInput(nextLine); if (nextLine.size() == 0) { endOfFileReached = true; } else { endOfFileReached = false; } // Set flag if input records added through the EditBox interface are found if ( !(endOfFileReached) && ( ((String) nextLine.get( 0 )).equalsIgnoreCase( addedRecordMarker ) ) ) { addedRecordFound = true; } Util.discardComment( nextLine ); // Count braces and allow input with a missing space following an opening brace and/or a missing space preceding a closing brace for (int i = 0; i < nextLine.size(); i++) { String checkRecord = (String)nextLine.get( i ); Vector parsedString = new Vector( nextLine.size() ); // Check for braces for (int j=0; j<checkRecord.length(); j++) { // '(' & ')' are not allowed in the input file if( checkRecord.charAt(j) == '(' || checkRecord.charAt(j) == ')' ) { throw new ErrorException( "\n\"" + checkRecord.charAt(j) + "\"" + " is not allowed in the input file: \n" + nextLine + "\n" + FileEntity.getRootDirectory() + file.getFileName() ); } if (checkRecord.charAt(j) == '{') { noOfUnclosedBraces++; parsedString.add("{"); } else if (checkRecord.charAt(j) == '}') { noOfUnclosedBraces parsedString.add("}"); } else { // no brace is found, assume it is a whole word until the next brace StringBuffer stringDump = new StringBuffer( checkRecord.length() ); // iterate through for ( int k = j; k<checkRecord.length(); k++ ) { // if a brace is found, end checking this word if ( checkRecord.charAt(k) == '{' || checkRecord.charAt(k) == '}' ) { k = checkRecord.length(); } // otherwise, make the word else { stringDump.append( checkRecord.charAt(k) ); } } j += stringDump.length() - 1; parsedString.add(stringDump.toString()); } } // Add brackets as separate entries if (parsedString.size() > 1 ) { nextLine.remove( i ); nextLine.addAll( i , parsedString ); i = i + parsedString.size() - 1; } } record.addAll(nextLine); } while ( ( noOfUnclosedBraces != 0 ) && ( !endOfFileReached ) ); if( noOfUnclosedBraces != 0 ) { InputAgent.logError("Missing closing brace"); } return record; } public static final void apply(Entity ent, Input<?> in, StringVector data) { in.parse(data); ent.updateForInput(in); } public static final void apply(Entity ent, StringVector data, String keyword) throws InputErrorException { Input<?> in = ent.getInput(keyword); if (in != null) { InputAgent.apply(ent, in, data); FrameBox.valueUpdate(); } else { ent.readData_ForKeyword(data, keyword); FrameBox.valueUpdate(); } } private static void processKeyword( Entity entity, StringVector recordCmd, String keyword) { if (keyword == null) throw new InputErrorException("The keyword is null."); if (entity.testFlag(Entity.FLAG_LOCKED)) throw new InputErrorException("Entity: %s is locked and cannot be modified", entity.getName()); try { Input<?> input = entity.getInput( keyword ); if( input != null && input.isAppendable() ) { ArrayList<StringVector> splitData = Util.splitStringVectorByBraces(recordCmd); for ( int i = 0; i < splitData.size(); i++ ) { InputAgent.apply(entity, input, splitData.get(i)); } } else { InputAgent.apply(entity, recordCmd, keyword); } // Create a list of entities to update in the edit table ArrayList<Entity> updateList = null; if (entity instanceof Group && input == null) { updateList = ((Group)entity).getList(); } else { updateList = new ArrayList<Entity>(1); updateList.add(entity); } // Store the keyword data for use in the edit table for( int i = 0; i < updateList.size(); i++ ) { Entity ent = updateList.get( i ); Input<?> in = ent.getInput(keyword); if (in != null) { InputAgent.updateInput(ent, in, recordCmd); } // The keyword is not on the editable keyword list else { InputAgent.logWarning("Keyword %s is obsolete. Please replace the Keyword. Refer to the manual for more detail.", keyword); } } } catch ( InputErrorException e ) { InputAgent.logError("Entity: %s Keyword: %s - %s", entity.getName(), keyword, e.getMessage()); throw e; } } public static void processData(Entity ent, Vector rec) { if( rec.get( 1 ).toString().trim().equals( "{" ) ) { InputAgent.logError("A keyword expected after: %s", ent.getName()); } ArrayList<StringVector> multiCmds = InputAgent.splitMultipleCommands(rec); // Process each command for( int i = 0; i < multiCmds.size(); i++ ) { StringVector cmd = multiCmds.get(i); String keyword = cmd.remove(0); // Process the record InputAgent.processKeyword(ent, cmd, keyword); } return; } /** * process's input data from record for use as a keyword. * format of record: <obj-name> <keyword> <data> <keyword> <data> * braces are included */ public static void processData( Vector record ) { String item1 = ((String)record.get( 0 )).trim(); // Checks on Entity: Entity obj = Input.tryParseEntity(item1, Entity.class); if (obj == null) { InputAgent.logError("Object not found: %s", item1); return; } // Entity exists with name <entityName> or name <region>/<entityName> InputAgent.processData(obj, record); } /** * returns a vector of vectors * each vector will be of form <obj-name> <kwd> <data> <data> * no braces are returned */ private static ArrayList<StringVector> splitMultipleCommands( Vector record ) { // SUPPORTED SYNTAX: // <obj-name> <kwd> { <par> } // <obj-name> <kwd> { <par> <par> ... } // <obj-name> <kwd> { <par> <par> ... } <kwd> { <par> <par> ... } ... // <obj-name> <kwd> <par> <kwd> { <par> <par> ... } ... ArrayList<StringVector> multiCmds = new ArrayList<StringVector>(); int noOfUnclosedBraces = 0; // Loop through the keywords and assemble new commands for( int i = 1; i < record.size(); ) { // Enter the class, object, and keyword in the new command StringVector cmd = new StringVector( record.size() ); // Keyword changes as loop proceeds cmd.add((String)record.get(i)); i++; // For a command of the new form "<obj-name> <file-name>", record // will be empty here. if( i < record.size() ) { // If there is an opening brace, then the keyword has a list of // parameters String openingBrace = (String)record.get( i ); if( openingBrace.equals("{") ) { noOfUnclosedBraces ++ ; i++; // move past the opening brace { // Iterate through record while( (i < record.size()) && ( noOfUnclosedBraces > 0 ) ) { if ( record.get(i).equals("{") ) noOfUnclosedBraces ++ ; else if (record.get(i).equals("}")) noOfUnclosedBraces cmd.add((String)record.get(i)); i++; } if( ( record.size() == i ) && ( noOfUnclosedBraces != 0) ) { // corresponding "}" is missing InputAgent.logError("Closing brace } is missing."); return multiCmds; } // Last item added was the corresponding closing brace else { cmd.remove(cmd.size()-1); // throw out the closing brace } multiCmds.add( cmd ); } } // If there is no brace, then the keyword must have a single // parameter. else { cmd.add((String)record.get(i)); i++; multiCmds.add( cmd ); } } // Record contains no other items else { multiCmds.add( cmd ); } } return multiCmds; } private static class ConfigFileFilter implements FilenameFilter { @Override public boolean accept(File inFile, String fileName) { return fileName.endsWith("[cC][fF][gG]"); } } public static void load(GUIFrame gui) { System.out.println("Loading..."); FileDialog chooser = new FileDialog(gui, "Load Configuration File", FileDialog.LOAD); chooser.setFilenameFilter(new ConfigFileFilter()); String chosenFileName = chooseFile(chooser, FileDialog.LOAD); if (chosenFileName != null) { //dispose(); setLoadFile(gui, chosenFileName); } else { //dispose(); } } public static void save(GUIFrame gui) { System.out.println("Saving..."); if( InputAgent.getConfigFileName() != null ) { setSaveFile(gui, FileEntity.getRootDirectory() + System.getProperty( "file.separator" ) + InputAgent.getConfigFileName(), SAVE_ONLY ); } else { saveAs( gui ); } } public static void saveAs(GUIFrame gui) { System.out.println("Save As..."); FileDialog chooser = new FileDialog(gui, "Save Configuration File As", FileDialog.SAVE); chooser.setFilenameFilter(new ConfigFileFilter()); String chosenFileName = chooseFile(chooser, FileDialog.SAVE); if ( chosenFileName != null ) { //dispose(); setSaveFile(gui, chosenFileName, FileDialog.SAVE ); } else { //dispose(); } } /** * Opens browser to choose file. returns a boolean if a file was picked, false if canceled or closed. */ private static String chooseFile(FileDialog chooser, int saveOrLoadType) { // filter if (saveOrLoadType == FileDialog.SAVE) { chooser.setFile( InputAgent.getConfigFileName() ); } else { chooser.setFile( "*.cfg" ); } // display browser //this.show(); chooser.setVisible( true ); // if a file was picked, set entryarea to be this file if( chooser.getFile() != null ) { //chooser should not set root directory //FileEntity.setRootDirectory( chooser.getDirectory() ); String chosenFileName = chooser.getDirectory() + chooser.getFile(); return chosenFileName.trim(); } else { return null; } } public static void configure(GUIFrame gui, String configFileName) { try { gui.clear(); Simulation.setSimState(Simulation.SIM_STATE_UNCONFIGURED); InputAgent.setConfigFileName(configFileName); gui.updateForSimulationState(); try { InputAgent.loadConfigurationFile(configFileName); } catch( InputErrorException iee ) { if (!batchRun) ExceptionBox.instance().setError(iee); else System.out.println( iee.getMessage() ); } // store the present state Simulation.setSimState(Simulation.SIM_STATE_CONFIGURED); System.out.println("Configuration File Loaded"); // show the present state in the user interface gui.setTitle( Simulation.getModelName() + " - " + InputAgent.getRunName() ); gui.updateForSimulationState(); } catch( Throwable t ) { ExceptionBox.instance().setError(t); } } /** * Loads configuration file , calls GraphicSimulation.configure() method */ private static void setLoadFile(final GUIFrame gui, String fileName) { final String chosenFileName = fileName; new Thread(new Runnable() { @Override public void run() { File temp = new File(chosenFileName); if( temp.isAbsolute() ) { FileEntity.setRootDirectory( temp.getParentFile() ); InputAgent.configure(gui, temp.getName()); } else { InputAgent.configure(gui, chosenFileName); } GUIFrame.displayWindows(true); FrameBox.valueUpdate(); } }).start(); } /** * saves the cfg/pos file. checks for 'save' and 'save as', recursively goes to 'save as' if 'save' is not possible. * updates runname and filename of file. * if editbox is open and unaccepted, accepts changes. */ private static void setSaveFile(GUIFrame gui, String fileName, int saveOrLoadType) { String configFilePath = FileEntity.getRootDirectory() + System.getProperty( "file.separator" ) + InputAgent.getConfigFileName(); // check ending string of filename, force cfg onto end if needed if (!(fileName.endsWith(".cfg"))) { fileName = fileName.concat(".cfg"); } File temp = new File(fileName); //System.out.println("fileName is " + fileName); // If the original configuration file is the same as the file to save, and there were no added records, // then do not save the file because it would be recursive, i.e. contain "include <fileName>" if( configFilePath.equalsIgnoreCase( fileName ) ) { if( !InputAgent.hasAddedRecords() ) { if( saveOrLoadType == FileDialog.SAVE) { // recursive -- if can't overwrite base file, 'save as' // Ask if appending to base configuration is ok int appendOption = JOptionPane.showConfirmDialog( null, "Cannot overwrite base configuration file. Do you wish to append changes?", "Confirm Append", JOptionPane.YES_OPTION, JOptionPane.WARNING_MESSAGE ); // Perform append only if yes if (appendOption == JOptionPane.YES_OPTION) { FileEntity configFile = new FileEntity( fileName, FileEntity.FILE_WRITE, true ); configFile.write( "\n" + addedRecordMarker ); addedRecordFound = true; } else { InputAgent.saveAs(gui); return; } } else { InputAgent.saveAs(gui); return; } } else if ( saveOrLoadType == SAVE_ONLY) { System.out.println("Saving..."); } } // set root directory FileEntity.setRootDirectory( temp.getParentFile() ); //saveFile = new FileEntity( fileName, FileEntity.FILE_WRITE, false ); //simulation.printNewConfigurationFileOn( saveFile ); InputAgent.printNewConfigurationFileWithName( fileName ); sessionEdited = false; //TODOalan set directory of model.. ? InputAgent.setConfigFileName(Util.fileShortName(fileName)); // Set the title bar to match the new run name gui.setTitle( Simulation.getModelName() + " - " + InputAgent.getRunName() ); // close the window //dispose(); } /* * write input file keywords and values * * input file format: * Define Group { <Group names> } * Define <Object> { <Object names> } * * <Object name> <Keyword> { < values > } * */ public static void printInputFileKeywords() { Entity ent; // Create report file for the inputs FileEntity inputReportFile; String inputReportFileName = InputAgent.getReportDirectory() + InputAgent.getRunName() + ".inp"; if( FileEntity.fileExists( inputReportFileName ) ) { inputReportFile = new FileEntity( inputReportFileName, FileEntity.FILE_WRITE, false ); inputReportFile.flush(); } else { inputReportFile = new FileEntity( inputReportFileName, FileEntity.FILE_WRITE, false ); } // Loop through the entity classes boolean hasinput = false; // for formating output int count = 0; // for formating output String entityName = null; // to take out Region name // print Define statements for( ObjectType type : ObjectType.getAll() ) { Class<? extends Entity> each = type.getJavaClass(); // Loop through the instances for this entity class ArrayList<? extends Entity> cloneList = Entity.getInstancesOf(each); count = 0; for( int j=0; j < cloneList.size(); j++ ) { hasinput = false; ent = cloneList.get(j); for( Input<?> in : ent.getEditableInputs() ){ // If the keyword has been used, then add a record to the report if ( in.getValueString().length() != 0 ){ hasinput = true; count++; break; } } if ( each.getSimpleName().equalsIgnoreCase("Region") && ! hasinput ) { count++; hasinput = true; } if( hasinput ){ entityName = cloneList.get(j).getInputName(); if ( (count-1)%5 == 0) { inputReportFile.putString( "Define" ); inputReportFile.putTab(); inputReportFile.putString(type.getInputName()); inputReportFile.putTab(); inputReportFile.putString( "{ " + entityName ); inputReportFile.putTab(); } else if ( (count-1)%5 == 4 ){ inputReportFile.putString( entityName + " }" ); inputReportFile.newLine(); } else { inputReportFile.putString( entityName ); inputReportFile.putTab(); } } } if ( cloneList.size() > 0 ){ if ( count%5 != 0 ){ inputReportFile.putString( " }" ); inputReportFile.newLine(); } inputReportFile.newLine(); } } for( ObjectType type : ObjectType.getAll() ) { Class<? extends Entity> each = type.getJavaClass(); // Get the list of instances for this entity class // sort the list alphabetically ArrayList<? extends Entity> cloneList = Entity.getInstancesOf(each); // Print the entity class name to the report (in the form of a comment) if( cloneList.size() > 0 ) { inputReportFile.putString( "\" " + each.getSimpleName() + " \""); inputReportFile.newLine(); inputReportFile.newLine(); // blank line below the class name heading } Collections.sort(cloneList, new Comparator<Entity>() { @Override public int compare(Entity a, Entity b) { return a.getInputName().compareTo(b.getInputName()); } }); // Loop through the instances for this entity class for( int j=0; j < cloneList.size(); j++ ) { // Make sure the clone is an instance of the class (and not an instance of a subclass) if (cloneList.get(j).getClass() == each) { ent = cloneList.get(j); entityName = cloneList.get(j).getInputName(); hasinput = false; // Loop through the editable keywords for this instance for( Input<?> in : ent.getEditableInputs() ) { // If the keyword has been used, then add a record to the report if ( in.getValueString().length() != 0 ) { if ( ! in.getCategory().contains("Graphics") ) { hasinput = true; inputReportFile.putTab(); inputReportFile.putString( entityName ); inputReportFile.putTab(); inputReportFile.putString( in.getKeyword() ); inputReportFile.putTab(); if( in.getValueString().lastIndexOf( "{" ) > 10 ) { String[] item1Array; item1Array = in.getValueString().trim().split( " }" ); inputReportFile.putString( "{ " + item1Array[0] + " }" ); for (int l = 1; l < (item1Array.length); l++ ){ inputReportFile.newLine(); inputReportFile.putTabs( 5 ); inputReportFile.putString( item1Array[l] + " } " ); } inputReportFile.putString( " }" ); } else { inputReportFile.putString( "{ " + in.getValueString() + " }" ); } inputReportFile.newLine(); } } } // Put a blank line after each instance if ( hasinput ) { inputReportFile.newLine(); } } } } // Close out the report inputReportFile.flush(); inputReportFile.close(); } public static void closeLogFile() { if (logFile == null) return; logFile.flush(); logFile.close(); if (numErrors ==0 && numWarnings == 0) { logFile.delete(); } logFile = null; } private static final String errPrefix = "*** ERROR *** %s%n"; private static final String wrnPrefix = "***WARNING*** %s%n"; public static int numErrors() { return numErrors; } public static int numWarnings() { return numWarnings; } private static void echoInputRecord(ArrayList<String> tokens) { StringBuilder line = new StringBuilder(); for (int i = 0; i < tokens.size(); i++) { line.append(" ").append(tokens.get(i)); if (tokens.get(i).startsWith("\"")) { InputAgent.logMessage("%s", line.toString()); line.setLength(0); } } // Leftover input if (line.length() > 0) InputAgent.logMessage("%s", line.toString()); } private static void logBadInput(ArrayList<String> tokens, String msg) { InputAgent.echoInputRecord(tokens); InputAgent.logError("%s", msg); } public static void logMessage(String fmt, Object... args) { String msg = String.format(fmt, args); System.out.println(msg); if (logFile == null) return; logFile.write(msg); logFile.newLine(); logFile.flush(); } public static void trace(int indent, Entity ent, String meth, String... text) { // Create an indent string to space the lines StringBuilder ind = new StringBuilder(""); for (int i = 0; i < indent; i++) ind.append(" "); String spacer = ind.toString(); // Print a TIME header every time time has advanced double traceTime = ent.getCurrentTime(); if (lastTimeForTrace != traceTime) { System.out.format(" \nTIME = %.5f\n", traceTime); lastTimeForTrace = traceTime; } // Output the traces line(s) System.out.format("%s%s %s\n", spacer, ent.getName(), meth); for (String line : text) { System.out.format("%s%s\n", spacer, line); } System.out.flush(); } /* * Log the input to a file, but don't echo it out as well. */ public static void echoInput(Vector line) { // if there is no log file currently, output nothing if (logFile == null) return; StringBuilder msg = new StringBuilder(); for (Object each : line) { msg.append(" "); msg.append(each); } logFile.write(msg.toString()); logFile.newLine(); logFile.flush(); } public static void logWarning(String fmt, Object... args) { numWarnings++; String msg = String.format(fmt, args); InputAgent.logMessage(wrnPrefix, msg); } public static void logError(String fmt, Object... args) { numErrors++; String msg = String.format(fmt, args); InputAgent.logMessage(errPrefix, msg); } public static void processEntity_Keyword_Value(Entity ent, Input<?> in, String value){ ArrayList<String> tokens = new ArrayList<String>(); Parser.tokenize(tokens, value); if(! InputAgent.enclosedByBraces(tokens) ) { tokens.add(0, "{"); tokens.add("}"); } Parser.removeComments(tokens); tokens.add(0, ent.getInputName()); tokens.add(1, in.getKeyword()); Vector data = new Vector(tokens.size()); data.addAll(tokens); InputAgent.processData(ent, data); } public static void processEntity_Keyword_Value(Entity ent, String keyword, String value){ Input<?> in = ent.getInput( keyword ); processEntity_Keyword_Value(ent, in, value); } public static void updateInput(Entity ent, Input<?> in, StringVector data) { if(ent.testFlag(Entity.FLAG_GENERATED)) return; String str = data.toString(); // reformat input string to be added to keyword // strip out "{}" from data to find value if( data.size() > 0 ) { if (!(data.get(0).equals("{"))) { str = str.replaceAll("[{}]", ""); } else { int strLength = str.length(); str = String.format("{%s}", str.substring(3,strLength-3)); } str = str.replaceAll( "[,]", " " ); str = str.trim(); } // Takes care of old format, displaying as new format -- appending onto end of record. if( in.isAppendable() && ! data.get(0).equals("{") ) { str = String.format("%s { %s }", in.getValueString(), str ); } if(in.isEdited()) { ent.setFlag(Entity.FLAG_EDITED); sessionEdited = true; } in.setValueString(str); } /** * Print out a configuration file with all the edited changes attached */ public static void printNewConfigurationFileWithName( String fileName ) { ArrayList<String> preAddedRecordLines = new ArrayList<String>(); String configFilePath = FileEntity.getRootDirectory() + System.getProperty( "file.separator" ) + InputAgent.getConfigFileName(); if( InputAgent.hasAddedRecords() && FileEntity.fileExists( configFilePath ) ) { // Store the original configuration file lines up to added records try { BufferedReader in = new BufferedReader( new FileReader( configFilePath ) ); String line; while ( ( line = in.readLine() ) != null ) { if ( line.startsWith( addedRecordMarker ) ) { break; } else { preAddedRecordLines.add( line ); } } in.close(); } catch ( Exception e ) { throw new ErrorException( e ); } } FileEntity file = new FileEntity( fileName, FileEntity.FILE_WRITE, false ); // include the original configuration file if (!InputAgent.hasAddedRecords()) { file.format( "\" File: %s\n\n", file.getFileName() ); file.format( "include %s\n\n", InputAgent.getConfigFileName() ); } else { for( int i=0; i < preAddedRecordLines.size(); i++ ) { String line = preAddedRecordLines.get( i ); if( line.startsWith( "\" File: " ) ) { file.format( "\" File: %s\n", file.getFileName() ); } else { file.format("%s\n", line); } } } file.format("%s\n", addedRecordMarker); addedRecordFound = true; // Print changes to simulation writeInputsOnFile_ForEntity( file, DisplayEntity.simulation ); file.format("\n"); // Determine all the new classes that were created ArrayList<Class<? extends Entity>> newClasses = new ArrayList<Class<? extends Entity>>(); for (int i = 0; i < Entity.getAll().size(); i++) { Entity ent = Entity.getAll().get(i); if (!ent.testFlag(Entity.FLAG_ADDED)) continue; if (!newClasses.contains(ent.getClass())) newClasses.add(ent.getClass()); } // Print the define statements for each new class for( Class<? extends Entity> newClass : newClasses ) { for (ObjectType o : ObjectType.getAll()) { if (o.getJavaClass() == newClass) { file.putString( "Define " + o.getInputName()+" {" ); break; } } for (int i = 0; i < Entity.getAll().size(); i++) { Entity ent = Entity.getAll().get(i); if (!ent.testFlag(Entity.FLAG_ADDED)) continue; if (ent.getClass() == newClass) file.format(" %s ", ent.getInputName()); } file.format("}\n"); } // List all the changes that were saved for each edited entity for (int i = 0; i < Entity.getAll().size(); i++) { Entity ent = Entity.getAll().get(i); if( ent == DisplayEntity.simulation ) continue; if (!ent.testFlag(Entity.FLAG_EDITED)) continue; writeInputsOnFile_ForEntity( file, ent ); } file.flush(); file.close(); } static void writeInputsOnFile_ForEntity( FileEntity file, Entity ent ) { // Write new configuration file for non-appendable keywords file.format("\n"); for( int j=0; j < ent.getEditableInputs().size(); j++ ) { Input<?> in = ent.getEditableInputs().get( j ); if( in.isEdited() ) { // Each line starts with the entity name followed by changed keyword file.format("%s %s ", ent.getInputName(), in.getKeyword()); String value = in.getValueString(); ArrayList<String> tokens = new ArrayList<String>(); Parser.tokenize(tokens, value); if(! InputAgent.enclosedByBraces(tokens) ) { value = String.format("{ %s }", value); } file.format("%s\n", value); } } } public static void loadDefault() { // Read the default configuration file InputAgent.readResource("default.cfg"); sessionEdited = false; } /** * Split an input (list of strings) down to a single level of nested braces, this may then be called again for * further nesting. * @param input * @return */ public static ArrayList<ArrayList<String>> splitForNestedBraces(List<String> input) { ArrayList<ArrayList<String>> inputs = new ArrayList<ArrayList<String>>(); int braceDepth = 0; ArrayList<String> currentLine = null; for (int i = 0; i < input.size(); i++) { if (currentLine == null) currentLine = new ArrayList<String>(); currentLine.add(input.get(i)); if (input.get(i).equals("{")) { braceDepth++; continue; } if (input.get(i).equals("}")) { braceDepth if (braceDepth == 0) { inputs.add(currentLine); currentLine = null; continue; } } } return inputs; } }
package com.oliverdunk.jb2.api; import com.oliverdunk.jb2.exceptions.B2APIException; import com.oliverdunk.jb2.models.*; import org.json.JSONArray; import org.json.JSONObject; import javax.net.ssl.HttpsURLConnection; import java.io.*; import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.Base64; import java.util.List; /** * Class used for accessing the B2 API using an HTTP connection. */ public class B2API { //User-Agent field sent with all HTTP requests. private static final String USER_AGENT = "JB2/1.0"; private static final String API_URL = "https://api.backblaze.com"; private static JSONObject call(String URL, String method, String authorization, JSONObject body) throws B2APIException { try { URL url = new URL(URL + "/b2api/v1/" + method); HttpsURLConnection connection = (HttpsURLConnection) url.openConnection(); connection.setRequestMethod("POST"); connection.setRequestProperty("User-Agent", USER_AGENT); connection.setRequestProperty("Authorization", authorization); connection.setDoOutput(true); DataOutputStream outputStream = new DataOutputStream(connection.getOutputStream()); outputStream.writeBytes(body.toString()); outputStream.flush(); outputStream.close(); JSONObject requestResult; if(connection.getResponseCode() < 400){ InputStream inputStream = connection.getInputStream(); requestResult = inputToJSON(inputStream); }else{ InputStream errorStream = connection.getErrorStream(); requestResult = inputToJSON(errorStream); B2APIException exception = new B2APIException(requestResult.getString("message")); exception.setStatusCode(requestResult.getInt("status")); exception.setIdentifier(requestResult.getString("code")); throw exception; } connection.disconnect(); return requestResult; } catch (IOException ex) { return new JSONObject(); } } private static JSONObject uploadFile(File file, String name, B2UploadRequest upload) throws B2APIException { try { URL url = new URL(upload.getUploadURL()); HttpsURLConnection connection = (HttpsURLConnection) url.openConnection(); connection.setRequestMethod("POST"); connection.setRequestProperty("User-Agent", USER_AGENT); connection.setRequestProperty("Authorization", upload.getAuthorizationToken()); connection.setRequestProperty("Content-Type", "b2/x-auto"); connection.setRequestProperty("X-Bz-File-Name", name); connection.setRequestProperty("X-Bz-Content-Sha1", getFileHash(file)); connection.setDoOutput(true); DataOutputStream outputStream = new DataOutputStream(connection.getOutputStream()); outputStream.write(Files.readAllBytes(Paths.get(file.getPath()))); outputStream.flush(); outputStream.close(); JSONObject requestResult; if(connection.getResponseCode() < 400){ InputStream inputStream = connection.getInputStream(); requestResult = inputToJSON(inputStream); }else{ InputStream errorStream = connection.getErrorStream(); requestResult = inputToJSON(errorStream); B2APIException exception = new B2APIException(requestResult.getString("message")); exception.setStatusCode(requestResult.getInt("status")); exception.setIdentifier(requestResult.getString("code")); throw exception; } connection.disconnect(); return requestResult; } catch (IOException | NoSuchAlgorithmException ex) { return new JSONObject(); } } private static void downloadFile(String URL, String authorization, B2File file, File destination) throws B2APIException { try { URL url = new URL(URL + "/b2api/v1/b2_download_file_by_id"); HttpsURLConnection connection = (HttpsURLConnection) url.openConnection(); connection.setRequestMethod("POST"); connection.setRequestProperty("User-Agent", USER_AGENT); connection.setRequestProperty("Authorization", authorization); connection.setDoOutput(true); DataOutputStream outputStream = new DataOutputStream(connection.getOutputStream()); outputStream.writeBytes(new JSONObject().put("fileId", file.getID()).toString()); outputStream.flush(); outputStream.close(); if(connection.getResponseCode() < 400){ InputStream inputStream = connection.getInputStream(); OutputStream fileOutputStream = new FileOutputStream(destination); int read = 0; byte[] bytes = new byte[1024]; while ((read = inputStream.read(bytes)) != -1) { fileOutputStream.write(bytes, 0, read); } fileOutputStream.close(); connection.disconnect(); }else{ InputStream errorStream = connection.getErrorStream(); JSONObject requestResult = inputToJSON(errorStream); B2APIException exception = new B2APIException(requestResult.getString("message")); exception.setStatusCode(requestResult.getInt("status")); exception.setIdentifier(requestResult.getString("code")); throw exception; } } catch (IOException ignored) {} } /** * Reads the data from an InputStream and returns the string parsed into a JSONObject. * * @param inputStream InputStream which will be read to retrieve the data * @return JSONObject representing the data inside the InputStream * @throws IOException Thrown if an error occurs while reading data from the InputStream */ private static JSONObject inputToJSON(InputStream inputStream) throws IOException { StringBuilder JSON = new StringBuilder(); BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); while(reader.ready()) JSON.append(reader.readLine()); return new JSONObject(JSON.toString().trim()); } public static B2Session authorizeAccount(String accountID, String applicationKey){ String encodedAuth = encodeAuthorization(accountID + ":" + applicationKey); JSONObject requestResult = call(API_URL, "b2_authorize_account", encodedAuth, new JSONObject()); String authorizationToken = requestResult.getString("authorizationToken"); String apiURL = requestResult.getString("apiUrl"); String downloadURL = requestResult.getString("downloadUrl"); return new B2Session(authorizationToken, accountID, apiURL, downloadURL); } /** * Encodes an authentication input into Base64, and formats for the Authorization field. * Used for the authorizeAccount, which does not have the usual authorization token. * * @param input Account ID and application key in the format accountID:applicationKey * @return Encoded Base64 String, with the Basic prefix */ private static String encodeAuthorization(String input){ byte[] authorizationBytes = input.getBytes(StandardCharsets.UTF_8); String encodedAuthorization = Base64.getEncoder().encodeToString(authorizationBytes); return "Basic " + encodedAuthorization; } /** * Gets the SHA1 hash of a file. * * @param file The file for which the hash should be generated * @return The SHA1 hash of the specified file */ private static String getFileHash(File file) throws NoSuchAlgorithmException, IOException { MessageDigest md = MessageDigest.getInstance("SHA1"); FileInputStream fis = new FileInputStream(file); byte[] dataBytes = new byte[1024]; int nread = 0; while ((nread = fis.read(dataBytes)) != -1) md.update(dataBytes, 0, nread); byte[] mdBytes = md.digest(); StringBuffer sb = new StringBuffer(""); for (int i = 0; i < mdBytes.length; i++) sb.append(Integer.toString((mdBytes[i] & 0xff) + 0x100, 16).substring(1)); return sb.toString(); } /** * Creates a new B2Bucket using the API. * * @param session Session authenticated with the API, which will be used as Authorization * @param bucketName A name for the bucket, which is at least six characters and does not start with "b2-" * @param bucketType The privacy level of the bucket which is being created * @return String which is the ID of the bucket */ public static B2Bucket createBucket(B2Session session, String bucketName, BucketType bucketType){ JSONObject parameters = new JSONObject(); parameters.put("accountId", session.getAccountID()); parameters.put("bucketName", bucketName); parameters.put("bucketType", bucketType.getIdentifier()); JSONObject requestResult = call(session.getAPIURL(), "b2_create_bucket", session.getAuthToken(), parameters); return new B2Bucket(bucketName, requestResult.getString("bucketId"), bucketType); } /** * Deletes a B2Bucket using the API, but only if the bucket contains no versions of any files. * * @param session Session authenticated with the API, which will be used as Authorization * @param bucket The B2Bucket instance which should be deleted */ public static void deleteBucket(B2Session session, B2Bucket bucket){ JSONObject parameters = new JSONObject(); parameters.put("accountId", session.getAccountID()); parameters.put("bucketId", bucket.getID()); call(session.getAPIURL(), "b2_delete_bucket", session.getAuthToken(), parameters); } /** * Lists all buckets using the API * * @param session Session authenticated with the API, which will be used as Authorization */ public static List<B2Bucket> listBuckets(B2Session session){ JSONObject parameters = new JSONObject(); parameters.put("accountId", session.getAccountID()); JSONObject response = call(session.getAPIURL(), "b2_list_buckets", session.getAuthToken(), parameters); List<B2Bucket> buckets = new ArrayList<B2Bucket>(); for(int i = 0; i < response.getJSONArray("buckets").length(); i++){ JSONObject bucket = response.getJSONArray("buckets").getJSONObject(i); buckets.add(new B2Bucket( bucket.getString("bucketName"), bucket.getString("bucketId"), BucketType.getByIdentifier(bucket.getString("bucketType"))) ); } return buckets; } /** * Syncs a B2Bucket instance with the API. * * @param session Session authenticated with the API, which will be used as Authorization * @param bucket The B2Bucket instance which should be synced */ public static void updateBucket(B2Session session, B2Bucket bucket){ JSONObject parameters = new JSONObject(); parameters.put("accountId", session.getAccountID()); parameters.put("bucketId", bucket.getID()); parameters.put("bucketType", bucket.getType().getIdentifier()); call(session.getAPIURL(), "b2_update_bucket", session.getAuthToken(), parameters); } /** * Prepares the API for a file upload within a given bucket. * * @param session Session authenticated with the API, which will be used as Authorization * @param bucket The B2Bucket where the upload will take place * @return A B2UploadRequest instance representing where a file should be uploaded */ public static B2UploadRequest getUploadURL(B2Session session, B2Bucket bucket){ JSONObject parameters = new JSONObject(); parameters.put("bucketId", bucket.getID()); JSONObject result = call(session.getAPIURL(), "b2_get_upload_url", session.getAuthToken(), parameters); return new B2UploadRequest(bucket, result.getString("uploadUrl"), result.getString("authorizationToken")); } /** * Uploads a file to the API completing the upload request. * * @param upload An upload request created with the getUploadURL method * @param file The file which should be uploaded * @param name The name which should identify the file * @return A B2File instance */ public static B2File uploadFile(B2UploadRequest upload, File file, String name){ JSONObject result = uploadFile(file, name, upload); return new B2File(name, result.getString("contentType"), result.getString("fileId"), file.length(), System.currentTimeMillis()); } /** * Downloads a file from the API. * * @param session Session authenticated with the API, which will be used as Authorization * @param file The file which should be downloaded * @param destination Where the file should be downloaded to */ public static void downloadFile(B2Session session, B2File file, File destination){ downloadFile(session.getDownloadURL(), session.getAuthToken(), file, destination); } /** * Deletes a B2File using the API, with the given ID * * @param session Session authenticated with the API, which will be used as Authorization * @param file The B2File instance which should be deleted */ public static void deleteFile(B2Session session, B2File file){ JSONObject parameters = new JSONObject(); parameters.put("fileName", file.getName()); parameters.put("fileId", file.getID()); call(session.getAPIURL(), "b2_delete_file_version", session.getAuthToken(), parameters); } /** * Fetches a file and instantiates a new B2File. * * @param session Session authenticated with the API, which will be used as Authorization * @param fileID The ID of the file which should be fetched * @return A B2File (timestamp currently not supported) */ public static B2File getFile(B2Session session, String fileID){ JSONObject parameters = new JSONObject(); parameters.put("fileId", fileID); JSONObject result = call(session.getAPIURL(), "b2_get_file_info", session.getAuthToken(), parameters); //TODO: Get correct upload timestamp return new B2File(result.getString("fileName"), result.getString("contentType"), result.getString("fileId"), result.getLong("contentLength"), 0); } /** * Lists all files using the API, sending one separate request per 1000 files * * @param session Session authenticated with the API, which will be used as Authorization * @param bucket Bucket which should be searched */ public static List<B2File> listFiles(B2Session session, B2Bucket bucket){ JSONObject parameters = new JSONObject(); parameters.put("bucketId", bucket.getID()); JSONObject response = call(session.getAPIURL(), "b2_list_file_names", session.getAuthToken(), parameters); //Add initial files List<B2File> files = new ArrayList<B2File>(); addFiles(files, response.getJSONArray("files")); //Add any files which require additional requests while(response.has("nextFileName") && response.get("nextFileName").toString() == null){ System.out.println(response.get("nextFileName").toString()); parameters.put("startFileName", response.get("nextFileName")); response = call(session.getAPIURL(), "b2_list_file_names", session.getAuthToken(), parameters); addFiles(files, response.getJSONArray("files")); } return files; } private static void addFiles(List<B2File> currentList, JSONArray files){ for(int i = 0; i < files.length(); i++){ JSONObject file = files.getJSONObject(i); currentList.add(new B2File( file.getString("fileName"), //TODO: Send correct file type "Unknown", file.getString("fileId"), file.getLong("size"), file.getLong("uploadTimestamp") ) ); } } }
package com.rox.emu.mem; import com.rox.emu.env.RoxByte; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Simple array representing memory, implementing the memory interface * * @author Ross Drew */ public class SimpleMemory implements Memory{ private final Logger LOG = LoggerFactory.getLogger(this.getClass()); private final RoxByte[] memoryArray; public SimpleMemory(){ memoryArray = new RoxByte[0x10000]; reset(); } /** * {@inheritDoc} */ @Override public void setByteAt(int location, int byteValue) { LOG.trace("STORE mem[" + location + "] --> " + byteValue); memoryArray[location] = RoxByte.signedFrom(byteValue & 0xFF); } /** * {@inheritDoc} */ @Override public void setMemory(int startLocation, int[] byteValues) { LOG.trace("STORE mem[" + startLocation + "] --> " + byteValues.length + " bytes"); // System.arraycopy(byteValues, 0, memoryArray, startLocation, byteValues.length); for (int i=0; i<byteValues.length; i++){ memoryArray[startLocation + i] = RoxByte.signedFrom(byteValues[i]); } } /** * {@inheritDoc} */ @Override public int getByte(int location) { LOG.trace("FETCH mem[" + location +"] --> " + memoryArray[location]); return memoryArray[location].getAsInt(); } /** * {@inheritDoc} */ @Override public int getWord(int location) { int word = (memoryArray[location].getAsInt() << 8 | memoryArray[location+1].getAsInt()); LOG.trace("FETCH mem[" + location +"] --> " + word); return word; } /** * {@inheritDoc} */ @Override public int[] getBlock(int from, int to) { int[] extractedData = new int[to-from]; // System.arraycopy(memoryArray, from, extractedData, 0, extractedData.length); for (int i=0; i<extractedData.length; i++){ extractedData[i] = memoryArray[from + i].getAsInt(); } return extractedData; } /** * {@inheritDoc} */ @Override public void reset() { for (int i=0; i<memoryArray.length; i++) memoryArray[i] = RoxByte.ZERO; // for (int i : memoryArray) { // memoryArray[i] = 0; } }
package com.rultor.agents.ec2; import com.amazonaws.services.ec2.model.Instance; import com.jcabi.aspects.Immutable; import java.io.IOException; /** * Amazon EC2 abstraction. * * @author Yuriy Alevohin (alevohin@mail.ru) * @version $Id$ * @since 2.0 * @todo #629 Add implementation for com.rultor.agents.ec2.Amazon. * It must create com.amazonaws.services.ec2.AmazonEC2 client * with config params "credentials", "zone", "type", "key". Use * client.runInstances(com.amazonaws.services.ec2.model.RunInstancesRequest) * to run on-demand instance. Method runOnDemand must finally * wait for started instance and check if start was successful. */ @Immutable public interface Amazon { /** * Run EC2 OnDemand instance. * @return EC2 Instance * @throws IOException if fails */ Instance runOnDemand() throws IOException; }
package com.shippo.model; import java.util.Map; import com.shippo.Shippo; import com.shippo.exception.APIConnectionException; import com.shippo.exception.APIException; import com.shippo.exception.AuthenticationException; import com.shippo.exception.InvalidRequestException; import com.shippo.exception.RequestTimeoutException; import com.shippo.net.APIResource; public class Transaction extends APIResource { String objectState; String objectStatus; String object_purpose; String objectId; String objectOwner; Object objectCreated; Object objectUpdated; public Object commercialInvoiceUrl; Object wasTest; Object rate; Object trackingNumber; Object trackingStatus; Object trackingUrlProvider; Object labelUrl; Object messages; Object customsNote; Object submissionNote; Object metadata; public static Transaction create(Map<String, Object> params) throws AuthenticationException, InvalidRequestException, APIConnectionException, APIException { return create(params, null); } public String getInstanceURL() { return ""; } public static Transaction create(Map<String, Object> params, String apiKey) throws AuthenticationException, InvalidRequestException, APIConnectionException, APIException { return request(RequestMethod.POST, classURL(Transaction.class), params, Transaction.class, apiKey); } public static Transaction createSync(Map<String, Object> params) throws AuthenticationException, InvalidRequestException, APIConnectionException, APIException, RequestTimeoutException { return createSync(params, null); } public static Transaction createSync(Map<String, Object> params, String apiKey) throws AuthenticationException, InvalidRequestException, APIConnectionException, APIException, RequestTimeoutException { Transaction transaction = request(RequestMethod.POST, classURL(Transaction.class), params, Transaction.class, apiKey); String object_id = transaction.objectId; String objectStatus = transaction.objectStatus; long startTime = System.currentTimeMillis(); while (objectStatus.equals("QUEUED") || objectStatus.equals("WAITING")) { if (System.currentTimeMillis() - startTime > Shippo.TRANSACTION_REQ_TIMEOUT) { throw new RequestTimeoutException( "A timeout has occured while waiting for your label to generate. Try retreiving the Transaction object again and check if objectStatus is updated. If this issue persists, please contact support@goshippo.com"); } transaction = retrieve(object_id); objectStatus = (String) transaction.objectStatus; } return transaction; } public static Transaction retrieve(String id) throws AuthenticationException, InvalidRequestException, APIConnectionException, APIException { return retrieve(id, null); } public static Transaction retrieve(String id, String apiKey) throws AuthenticationException, InvalidRequestException, APIConnectionException, APIException { return request(RequestMethod.GET, instanceURL(Transaction.class, id), null, Transaction.class, apiKey); } public static TransactionCollection all(Map<String, Object> params) throws AuthenticationException, InvalidRequestException, APIConnectionException, APIException { return all(params, null); } public static TransactionCollection all(Map<String, Object> params, String apiKey) throws AuthenticationException, InvalidRequestException, APIConnectionException, APIException { return request(RequestMethod.GET, classURL(Transaction.class), params, TransactionCollection.class, apiKey); } public String getObjectState() { return objectState; } public void setObjectState(String objectState) { this.objectState = objectState; } public String getObjectStatus() { return objectStatus; } public void setObjectStatus(String objectStatus) { this.objectStatus = objectStatus; } public String getObject_purpose() { return object_purpose; } public void setObject_purpose(String object_purpose) { this.object_purpose = object_purpose; } public String getObjectId() { return objectId; } public void setObjectId(String objectId) { this.objectId = objectId; } public String getObjectOwner() { return objectOwner; } public void setObjectOwner(String objectOwner) { this.objectOwner = objectOwner; } public Object getObjectCreated() { return objectCreated; } public void setObjectCreated(Object objectCreated) { this.objectCreated = objectCreated; } public Object getObjectUpdated() { return objectUpdated; } public void setObjectUpdated(Object objectUpdated) { this.objectUpdated = objectUpdated; } public Object getWasTest() { return wasTest; } public void setWasTest(Object wasTest) { this.wasTest = wasTest; } public Object getRate() { return rate; } public void setRate(Object rate) { this.rate = rate; } public Object getTrackingNumber() { return trackingNumber; } public void setTrackingNumber(Object trackingNumber) { this.trackingNumber = trackingNumber; } public Object getTrackingStatus() { return trackingStatus; } public void setTrackingStatus(Object trackingStatus) { this.trackingStatus = trackingStatus; } public Object getTrackingUrlProvider() { return trackingUrlProvider; } public void setTrackingUrlProvider(Object trackingUrlProvider) { this.trackingUrlProvider = trackingUrlProvider; } public Object getLabelUrl() { return labelUrl; } public void setLabelUrl(Object labelUrl) { this.labelUrl = labelUrl; } public Object getMessages() { return messages; } public void setMessages(Object messages) { this.messages = messages; } public Object getCustomsNote() { return customsNote; } public void setCustomsNote(Object customsNote) { this.customsNote = customsNote; } public Object getSubmissionNote() { return submissionNote; } public void setSubmissionNote(Object submissionNote) { this.submissionNote = submissionNote; } public Object getCommercialInvoiceUrl() { return commercialInvoiceUrl; } public void setCommercialInvoiceUrl(Object commercialInvoiceUrl) { this.commercialInvoiceUrl = commercialInvoiceUrl; } public Object getMetadata() { return metadata; } public void setMetadata(Object metadata) { this.metadata = metadata; } }
package com.squareup.thumbor; import java.util.ArrayList; import java.util.List; import static com.squareup.thumbor.Utilities.aes128Encrypt; import static com.squareup.thumbor.Utilities.md5; import static com.squareup.thumbor.Utilities.normalizeString; import static com.squareup.thumbor.Utilities.stripProtocolAndParams; /** * Fluent interface to create a URL appropriate for passing to Thumbor. * * @see #image */ public final class Thumbor { private static final String PREFIX_UNSAFE = "unsafe/"; private static final String PREFIX_META = "meta/"; private static final String PART_SMART = "smart"; private static final String PART_FIT_IN = "fit-in"; private static final String PART_FILTERS = "filters"; private static final String FILTER_BRIGHTNESS = "brightness"; private static final String FILTER_CONTRAST = "contrast"; private static final String FILTER_NOISE = "noise"; private static final String FILTER_QUALITY = "quality"; private static final String FILTER_RGB = "rgb"; private static final String FILTER_ROUND_CORNER = "round_corner"; private static final String FILTER_WATERMARK = "watermark"; private static final String FILTER_SHARPEN = "sharpen"; private static final String FILTER_FILL = "fill"; /** * Horizontal alignment for crop positioning. */ public enum HorizontalAlign { LEFT("left"), CENTER("center"), RIGHT("right"); final String value; private HorizontalAlign(String value) { this.value = value; } } /** * Vertical alignment for crop positioning. */ public enum VerticalAlign { TOP("top"), MIDDLE("middle"), BOTTOM("bottom"); final String value; private VerticalAlign(String value) { this.value = value; } } /** * Exception denoting that a fatal error occurred while assembling the URL for the current configuration. * * @see #getCause() */ public static class UnableToBuildException extends RuntimeException { public UnableToBuildException(Throwable e) { super(e); } } final String target; String host = "/"; String key; boolean hasCrop = false; boolean hasResize = false; boolean isSmart = false; boolean flipHorizontally = false; boolean flipVertically = false; boolean fitIn = false; int resizeWidth; int resizeHeight; int cropTop; int cropLeft; int cropBottom; int cropRight; HorizontalAlign cropHorizontalAlign; VerticalAlign cropVerticalAlign; List<String> filters; /** * Start a new Thumbor URL configuration for the specified target image URL. * * @param target Target image URL. */ Thumbor(String target) { this.target = stripProtocolAndParams(target); } public static Thumbor image(String target) { if (target == null || target.length() == 0) { throw new IllegalArgumentException("Target image URL must not be blank."); } return new Thumbor(target); } public Thumbor key(String key) { if (key == null || key.length() == 0) { throw new IllegalArgumentException("Key must not be blank."); } this.key = key; return this; } public Thumbor host(String host) { if (host == null || host.length() == 0) { throw new IllegalArgumentException("Host must not be blank."); } if (!host.endsWith("/")) { host += "/"; } this.host = host; return this; } public Thumbor resize(int width, int height) { if (width < 1) { throw new IllegalArgumentException("Width must be greater than zero."); } if (height < 1) { throw new IllegalArgumentException("Height must be greater than zero."); } hasResize = true; resizeWidth = width; resizeHeight = height; return this; } public Thumbor flipHorizontally() { if (!hasResize) { throw new IllegalStateException("Image must be resized first in order to flip."); } flipHorizontally = true; return this; } public Thumbor flipVertically() { if (!hasResize) { throw new IllegalStateException("Image must be resized first in order to flip."); } flipVertically = true; return this; } public Thumbor fitIn() { if (!hasResize) { throw new IllegalStateException("Image must be resized first in order to apply 'fit-in'."); } fitIn = true; return this; } public Thumbor crop(int top, int left, int bottom, int right) { if (top < 0) { throw new IllegalArgumentException("Top must be greater or equal to zero."); } if (left < 0) { throw new IllegalArgumentException("Left must be greater or equal to zero."); } if (bottom < 1 || bottom <= top) { throw new IllegalArgumentException("Bottom must be greater than zero and top."); } if (right < 1 || right <= left) { throw new IllegalArgumentException("Right must be greater than zero and left."); } hasCrop = true; cropTop = top; cropLeft = left; cropBottom = bottom; cropRight = right; return this; } public Thumbor align(HorizontalAlign align) { if (!hasCrop) { throw new IllegalStateException("Image must be cropped first in order to align."); } cropHorizontalAlign = align; return this; } public Thumbor align(VerticalAlign align) { if (!hasCrop) { throw new IllegalStateException("Image must be cropped first in order to align."); } cropVerticalAlign = align; return this; } public Thumbor align(VerticalAlign valign, HorizontalAlign halign) { return align(valign).align(halign); } public Thumbor smart() { if (!hasCrop) { throw new IllegalStateException("Image must be cropped first in order to smart align."); } isSmart = true; return this; } public Thumbor filter(String... filters) { if (filters.length == 0) { throw new IllegalArgumentException("You must provide at least one filter."); } if (this.filters == null) { this.filters = new ArrayList<String>(1); } for (String filter : filters) { if (filter == null || filter.length() == 0) { throw new IllegalArgumentException("Filter must not be blank."); } this.filters.add(filter); } return this; } /** * Build an unsafe version of the URL. * * @return Unsafe URL for the current configuration. */ public String buildUnsafe() { return new StringBuilder(host) .append(PREFIX_UNSAFE) .append(assembleConfig()) .append(target) .toString(); } public String buildSafe() { if (key == null) { throw new IllegalStateException("Cannot build safe URL without a key."); } // Assemble config and an MD5 of the target image. StringBuilder config = assembleConfig().append(md5(target)); final byte[] encrypted = aes128Encrypt(config, normalizeString(key, 16)); // URL-safe Base64 encode. final String encoded = Utilities.base64Encode(encrypted); return new StringBuilder(host) .append(encoded) .append("/") .append(target) .toString(); } /** * Build a URL for fetching Thumbor metadata. * * @return Meta URL for the current configuration. */ public String buildMeta() { return new StringBuilder(host) .append(PREFIX_META) .append(assembleConfig()) .append(target) .toString(); } @Override public String toString() { return (key == null) ? buildUnsafe() : buildSafe(); } /** * Assembly the configuration section of the URL. * * @return Configuration assembled in a {@link StringBuilder}. */ StringBuilder assembleConfig() { StringBuilder builder = new StringBuilder(); if (hasCrop) { builder.append(cropLeft).append("x").append(cropTop) .append(":").append(cropRight).append("x").append(cropBottom); if (isSmart) { builder.append("/").append(PART_SMART); } else { if (cropHorizontalAlign != null) { builder.append("/").append(cropHorizontalAlign.value); } if (cropVerticalAlign != null) { builder.append("/").append(cropVerticalAlign.value); } } builder.append("/"); } if (hasResize) { if (flipHorizontally) { builder.append("-"); } builder.append(resizeWidth).append("x"); if (flipVertically) { builder.append("-"); } builder.append(resizeHeight); if (fitIn) { builder.append("/").append(PART_FIT_IN); } builder.append("/"); } if (filters != null) { builder.append(PART_FILTERS); for (String filter : filters) { builder.append(":").append(filter); } builder.append("/"); } return builder; } public static String brightness(int amount) { if (amount < -100 || amount > 100) { throw new IllegalArgumentException("Amount must be between -100 and 100, inclusive."); } return new StringBuilder(FILTER_BRIGHTNESS).append("(").append(amount).append(")").toString(); } public static String contrast(int amount) { if (amount < -100 || amount > 100) { throw new IllegalArgumentException("Amount must be between -100 and 100, inclusive."); } return new StringBuilder(FILTER_CONTRAST).append("(").append(amount).append(")").toString(); } public static String noise(int amount) { if (amount < 0 || amount > 100) { throw new IllegalArgumentException("Amount must be between 0 and 100, inclusive"); } return new StringBuilder(FILTER_NOISE).append("(").append(amount).append(")").toString(); } public static String quality(int amount) { if (amount < 0 || amount > 100) { throw new IllegalArgumentException("Amount must be between 0 and 100, inclusive."); } return new StringBuilder(FILTER_QUALITY).append("(").append(amount).append(")").toString(); } public static String rgb(int r, int g, int b) { if (r < -100 || r > 100) { throw new IllegalArgumentException("Redness value must be between -100 and 100, inclusive."); } if (g < -100 || g > 100) { throw new IllegalArgumentException("Greenness value must be between -100 and 100, inclusive."); } if (b < -100 || b > 100) { throw new IllegalArgumentException("Blueness value must be between -100 and 100, inclusive."); } return new StringBuilder(FILTER_RGB).append("(") .append(r).append(",") .append(g).append(",") .append(b).append(")") .toString(); } /** * This filter adds rounded corners to the image using the white as the background. * * @param radius amount of pixels to use as radius. * @return String representation of this filter. */ public static String roundCorner(int radius) { return roundCorner(radius, 0xFFFFFF); } /** * This filter adds rounded corners to the image using the specified color as the background. * * @param radius amount of pixels to use as radius. * @param color fill color for clipped region. * @return String representation of this filter. */ public static String roundCorner(int radius, int color) { return roundCorner(radius, 0, color); } /** * This filter adds rounded corners to the image using the specified color as the background. * * @param radiusInner amount of pixels to use as radius. * @param radiusOuter specifies the second value for the ellipse used for the radius. Use 0 for * no value. * @param color fill color for clipped region. * @return String representation of this filter. */ public static String roundCorner(int radiusInner, int radiusOuter, int color) { if (radiusInner < 1) { throw new IllegalArgumentException("Radius must be greater than zero."); } if (radiusOuter < 0) { throw new IllegalArgumentException("Outer radius must be greater than or equal to zero."); } StringBuilder builder = new StringBuilder(FILTER_ROUND_CORNER).append("(").append(radiusInner); if (radiusOuter > 0) { builder.append("|").append(radiusOuter); } return builder.append(",") .append((color & 0xFF0000) >>> 16).append(",") .append((color & 0xFF00) >>> 8).append(",") .append(color & 0xFF).append(")") .toString(); } public static String watermark(String imageUrl) { return watermark(imageUrl, 0, 0); } public static String watermark(Thumbor image) { return watermark(image, 0, 0); } public static String watermark(String imageUrl, int x, int y) { return watermark(imageUrl, x, y, 0); } public static String watermark(Thumbor image, int x, int y) { if (image == null) { throw new IllegalArgumentException("Thumbor image must not be null."); } return watermark(image.toString(), x, y, 0); } public static String watermark(String imageUrl, int x, int y, int transparency) { if (imageUrl == null || imageUrl.length() == 0) { throw new IllegalArgumentException("Image URL must not be blank."); } if (transparency < 0 || transparency > 100) { throw new IllegalArgumentException("Transparency must be between 0 and 100, inclusive."); } return new StringBuilder(FILTER_WATERMARK).append("(") .append(stripProtocolAndParams(imageUrl)).append(",") .append(x).append(",") .append(y).append(",") .append(transparency).append(")") .toString(); } public static String watermark(Thumbor image, int x, int y, int transparency) { if (image == null) { throw new IllegalArgumentException("Thumbor image must not be null."); } return watermark(image.toString(), x, y, transparency); } public static String sharpen(float amount, float radius, boolean luminanceOnly) { return new StringBuilder(FILTER_SHARPEN).append("(") .append(amount).append(",") .append(radius).append(",") .append(luminanceOnly).append(")") .toString(); } /** * This filter permit to return an image sized exactly as requested wherever is its ratio by * filling with chosen color the missing parts. Usually used with "fit-in" or "adaptive-fit-in" * * @param color integer representation of color. * @return String representation of this filter. */ public static String fill(int color) { final String colorCode = Integer.toHexString(color & 0xFFFFFF); // Strip alpha return new StringBuilder(FILTER_FILL).append("(").append(colorCode).append(")").toString(); } }
package com.telesign.rest; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import okhttp3.*; import okhttp3.internal.Version; import okio.Buffer; import javax.crypto.Mac; import javax.crypto.spec.SecretKeySpec; import java.io.IOException; import java.net.Proxy; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.*; import java.util.concurrent.TimeUnit; public class RestClient { private static final String userAgent = String.format("TeleSignSDK/java-%s Java/%s %s", BuildConfig.VERSION, System.getProperty("java.version"), Version.userAgent()); private String customerId; private String secretKey; private String apiHost; private OkHttpClient client; public RestClient(String customerId, String secretKey) { this(customerId, secretKey, null, null, null, null, null, null, null); } public RestClient(String customerId, String secretKey, String apiHost) { this(customerId, secretKey, apiHost, null, null, null, null, null, null); } public RestClient(String customerId, String secretKey, String apiHost, Long connectTimeout, Long readTimeout, Long writeTimeout, Proxy proxy, String proxyUserName, String proxyPassword) { this.customerId = customerId; this.secretKey = secretKey; if (apiHost == null) { this.apiHost = "https://rest-api.telesign.com"; } else { this.apiHost = apiHost; } if (connectTimeout == null) { connectTimeout = 10L; } if (readTimeout == null) { readTimeout = 10L; } if (writeTimeout == null) { writeTimeout = 10L; } OkHttpClient.Builder okHttpClientBuilder = new OkHttpClient.Builder() .connectTimeout(connectTimeout, TimeUnit.SECONDS) .readTimeout(readTimeout, TimeUnit.SECONDS) .writeTimeout(writeTimeout, TimeUnit.SECONDS); if (proxy != null) { okHttpClientBuilder.proxy(proxy); if (proxyUserName != null && proxyPassword != null) { Authenticator proxyAuthenticator = new Authenticator() { public Request authenticate(Route route, Response response) throws IOException { String credential = Credentials.basic(proxyUserName, proxyPassword); return response.request().newBuilder() .header("Proxy-Authorization", credential) .build(); } }; okHttpClientBuilder.proxyAuthenticator(proxyAuthenticator); } } this.client = okHttpClientBuilder.build(); } public static class TelesignResponse { public int statusCode; public Map<String, List<String>> headers; public String body; public boolean ok; public JsonObject json; public TelesignResponse(Response okHttpResponse) { this.statusCode = okHttpResponse.code(); this.headers = okHttpResponse.headers().toMultimap(); this.ok = okHttpResponse.isSuccessful(); try { this.body = okHttpResponse.body().string(); try { this.json = new JsonParser().parse(body).getAsJsonObject(); } catch (IllegalStateException e) { this.json = new JsonObject(); } } catch (IOException e) { this.body = ""; this.json = new JsonObject(); } } } public static class TelesignException extends RuntimeException { public TelesignException(String message, Throwable cause) { super(message, cause); } } public static Map<String, String> generateTelesignHeaders(String customerId, String secretKey, String methodName, String resource, String urlEncodedFields, String dateRfc2616, String nonce, String userAgent) { try { if (dateRfc2616 == null) { dateRfc2616 = DateTimeFormatter.RFC_1123_DATE_TIME.format(ZonedDateTime.now(ZoneId.of("GMT"))); } if (nonce == null) { nonce = UUID.randomUUID().toString(); } String contentType = ""; if (methodName.equals("POST") || methodName.equals("PUT")) { contentType = "application/x-www-form-urlencoded"; } String authMethod = "HMAC-SHA256"; StringBuilder string_to_sign_builder = new StringBuilder(); string_to_sign_builder.append(String.format("%s", methodName)); string_to_sign_builder.append(String.format("\n%s", contentType)); string_to_sign_builder.append(String.format("\n%s", dateRfc2616)); string_to_sign_builder.append(String.format("\nx-ts-auth-method:%s", authMethod)); string_to_sign_builder.append(String.format("\nx-ts-nonce:%s", nonce)); if (!contentType.isEmpty() && !urlEncodedFields.isEmpty()) { string_to_sign_builder.append(String.format("\n%s", urlEncodedFields)); } string_to_sign_builder.append(String.format("\n%s", resource)); String string_to_sign = string_to_sign_builder.toString(); String signature; Mac sha256_HMAC = Mac.getInstance("HmacSHA256"); SecretKeySpec secret_key = new SecretKeySpec(Base64.getDecoder().decode(secretKey), "HmacSHA256"); sha256_HMAC.init(secret_key); signature = Base64.getEncoder().encodeToString(sha256_HMAC.doFinal(string_to_sign.getBytes())); String authorization = String.format("TSA %s:%s", customerId, signature); Map<String, String> headers = new HashMap<>(); headers.put("Authorization", authorization); headers.put("Date", dateRfc2616); headers.put("Content-Type", contentType); headers.put("x-ts-auth-method", authMethod); headers.put("x-ts-nonce", nonce); if (userAgent != null) { headers.put("User-Agent", userAgent); } return headers; } catch (Exception e) { throw new RuntimeException(e); } } public TelesignResponse post(String resource, Map<String, String> params) { return this.execute("POST", resource, params); } public TelesignResponse get(String resource, Map<String, String> params) { return this.execute("GET", resource, params); } public TelesignResponse put(String resource, Map<String, String> params) { return this.execute("PUT", resource, params); } public TelesignResponse delete(String resource, Map<String, String> params) { return this.execute("DELETE", resource, params); } private TelesignResponse execute(String methodName, String resource, Map<String, String> params) { try { if (params == null) { params = new HashMap<>(); } String resourceUri = String.format("%s%s", this.apiHost, resource); FormBody formBody = null; String urlEncodedFields = ""; if (methodName.equals("POST") || methodName.equals("PUT")) { FormBody.Builder formBuilder = new FormBody.Builder(); for (Map.Entry<String, String> entry : params.entrySet()) { formBuilder.add(entry.getKey(), entry.getValue()); } formBody = formBuilder.build(); Buffer buffer = new Buffer(); formBody.writeTo(buffer); urlEncodedFields = buffer.readUtf8(); } Map<String, String> headers = RestClient.generateTelesignHeaders( this.customerId, this.secretKey, methodName, resource, urlEncodedFields, null, null, RestClient.userAgent); Request.Builder requestBuilder = new Request.Builder() .url(resourceUri) .method(methodName, formBody); for (Map.Entry<String, String> entry : headers.entrySet()) { requestBuilder.addHeader(entry.getKey(), entry.getValue()); } Request request = requestBuilder.build(); TelesignResponse telesignResponse; try (Response okhttpResponse = this.client.newCall(request).execute()) { telesignResponse = new TelesignResponse(okhttpResponse); } return telesignResponse; } catch (Exception e) { throw new RestClient.TelesignException("An error occurred executing the request.", e); } } }
package com.toomasr.sgf4j.gui; import java.io.File; import java.nio.file.Path; import java.nio.file.Paths; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import com.toomasr.sgf4j.Sgf; import com.toomasr.sgf4j.SgfProperties; import com.toomasr.sgf4j.board.BoardStone; import com.toomasr.sgf4j.board.CoordinateSquare; import com.toomasr.sgf4j.board.GuiBoardListener; import com.toomasr.sgf4j.board.StoneState; import com.toomasr.sgf4j.board.VirtualBoard; import com.toomasr.sgf4j.filetree.FileTreeView; import com.toomasr.sgf4j.movetree.EmptyTriangle; import com.toomasr.sgf4j.movetree.GlueStone; import com.toomasr.sgf4j.movetree.GlueStoneType; import com.toomasr.sgf4j.movetree.TreeStone; import com.toomasr.sgf4j.parser.Game; import com.toomasr.sgf4j.parser.GameNode; import com.toomasr.sgf4j.parser.Util; import com.toomasr.sgf4j.properties.AppState; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.geometry.Insets; import javafx.geometry.Pos; import javafx.scene.control.Button; import javafx.scene.control.Label; import javafx.scene.control.ScrollPane; import javafx.scene.control.ScrollPane.ScrollBarPolicy; import javafx.scene.control.TextArea; import javafx.scene.control.TextField; import javafx.scene.control.TreeItem; import javafx.scene.control.TreeView; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyEvent; import javafx.scene.input.MouseEvent; import javafx.scene.layout.GridPane; import javafx.scene.layout.HBox; import javafx.scene.layout.Pane; import javafx.scene.layout.TilePane; import javafx.scene.layout.VBox; public class MainUI { private Button nextButton; private GameNode currentMove = null; private GameNode prevMove = null; private Game game; private VirtualBoard virtualBoard; private BoardStone[][] board; private GridPane movePane; private GridPane boardPane = new GridPane(); private Map<GameNode, TreeStone> nodeToTreeStone = new HashMap<>(); private TextArea commentArea; private Button previousButton; private ScrollPane treePaneScrollPane; private Label whitePlayerName; private Label blackPlayerName; private Label label; public MainUI() { board = new BoardStone[19][19]; virtualBoard = new VirtualBoard(); virtualBoard.addBoardListener(new GuiBoardListener(this)); } public Pane buildUI() throws Exception { Insets paneInsets = new Insets(5, 0, 0, 0); VBox leftVBox = new VBox(5); leftVBox.setPadding(paneInsets); VBox centerVBox = new VBox(5); centerVBox.setPadding(paneInsets); VBox rightVBox = new VBox(5); rightVBox.setPadding(paneInsets); // constructing the left box VBox fileTreePane = generateFileTreePane(); leftVBox.getChildren().addAll(fileTreePane); // constructing the center box centerVBox.setMaxWidth(640); centerVBox.setMinWidth(640); boardPane = generateBoardPane(boardPane); TilePane buttonPane = generateButtonPane(); ScrollPane treePane = generateMoveTreePane(); centerVBox.getChildren().addAll(boardPane, buttonPane, treePane); // constructing the right box VBox gameMetaInfo = generateGameMetaInfo(); TextArea commentArea = generateCommentPane(); rightVBox.getChildren().addAll(gameMetaInfo, commentArea); HBox rootHBox = new HBox(); enableKeyboardShortcuts(rootHBox); rootHBox.getChildren().addAll(leftVBox, centerVBox, rightVBox); VBox rootVbox = new VBox(); HBox statusBar = generateStatusBar(); rootVbox.getChildren().addAll(rootHBox, statusBar); return rootVbox; } private HBox generateStatusBar() { HBox rtrn = new HBox(); label = new Label("MainUI loaded"); rtrn.getChildren().add(label); return rtrn; } public void updateStatus(String update) { this.label.setText(update); } public void initGame() { String game = "src/main/resources/game.sgf"; Path path = Paths.get(game); // in development it is nice to have a game open on start if (path.toFile().exists()) { initializeGame(Paths.get(game)); } } private VBox generateGameMetaInfo() { VBox vbox = new VBox(); vbox.setMinWidth(250); GridPane pane = new GridPane(); Label blackPlayerLabel = new Label("Black:"); GridPane.setConstraints(blackPlayerLabel, 1, 0); blackPlayerName = new Label("Unknown"); GridPane.setConstraints(blackPlayerName, 2, 0); Label whitePlayerLabel = new Label("White:"); GridPane.setConstraints(whitePlayerLabel, 1, 1); whitePlayerName = new Label("Unknown"); GridPane.setConstraints(whitePlayerName, 2, 1); pane.getChildren().addAll(blackPlayerLabel, blackPlayerName, whitePlayerLabel, whitePlayerName); vbox.getChildren().add(pane); return vbox; } private TextArea generateCommentPane() { commentArea = new TextArea(); commentArea.setFocusTraversable(false); commentArea.setWrapText(true); commentArea.setPrefSize(300, 600); return commentArea; } private void initializeGame(Path pathToSgf) { this.game = Sgf.createFromPath(pathToSgf); currentMove = this.game.getRootNode(); prevMove = null; // reset our virtual board and actual board virtualBoard = new VirtualBoard(); virtualBoard.addBoardListener(new GuiBoardListener(this)); initEmptyBoard(); // construct the tree of the moves nodeToTreeStone = new HashMap<>(); movePane.getChildren().clear(); movePane.add(new EmptyTriangle(), 0, 0); GameNode rootNode = game.getRootNode(); populateMoveTreePane(rootNode, 0); showMarkersForMove(rootNode); showCommentForMove(rootNode); showMetaInfoForGame(this.game); } private void showMetaInfoForGame(Game game) { whitePlayerName.setText(game.getProperty(SgfProperties.WHITE_PLAYER_NAME)); blackPlayerName.setText(game.getProperty(SgfProperties.BLACK_PLAYER_NAME)); } public void initEmptyBoard() { generateBoardPane(boardPane); placePreGameStones(game); } private void placePreGameStones(Game game) { String blackStones = game.getProperty("AB", ""); String whiteStones = game.getProperty("AW", ""); placePreGameStones(blackStones, whiteStones); } private void placePreGameStones(GameNode node) { String blackStones = node.getProperty("AB", ""); String whiteStones = node.getProperty("AW", ""); placePreGameStones(blackStones, whiteStones); } private void placePreGameStones(String addBlack, String addWhite) { if (addBlack.length() > 0) { String[] blackStones = addBlack.split(","); for (int i = 0; i < blackStones.length; i++) { int[] moveCoords = Util.alphaToCoords(blackStones[i]); virtualBoard.placeStone(StoneState.BLACK, moveCoords[0], moveCoords[1]); } } if (addWhite.length() > 0) { String[] whiteStones = addWhite.split(","); for (int i = 0; i < whiteStones.length; i++) { int[] moveCoords = Util.alphaToCoords(whiteStones[i]); virtualBoard.placeStone(StoneState.WHITE, moveCoords[0], moveCoords[1]); } } } private void populateMoveTreePane(GameNode node, int depth) { // we draw out only actual moves if (node.isMove()) { TreeStone treeStone = TreeStone.create(node); movePane.add(treeStone, node.getMoveNo(), node.getVisualDepth()); nodeToTreeStone.put(node, treeStone); treeStone.addEventHandler(MouseEvent.MOUSE_CLICKED, new EventHandler<MouseEvent>() { @Override public void handle(MouseEvent event) { TreeStone stone = (TreeStone) event.getSource(); fastForwardTo(stone.getMove()); } }); } // and recursively draw the next node on this line of play if (node.getNextNode() != null) { populateMoveTreePane(node.getNextNode(), depth + node.getVisualDepth()); } // populate the children also if (node.hasChildren()) { Set<GameNode> children = node.getChildren(); // will determine whether the glue stone should be a single // diagonal or a multiple (diagonal and vertical) GlueStoneType gStoneType = children.size() > 1 ? GlueStoneType.MULTIPLE : GlueStoneType.DIAGONAL; for (Iterator<GameNode> ite = children.iterator(); ite.hasNext();) { GameNode childNode = ite.next(); // the last glue shouldn't be a MULTIPLE if (GlueStoneType.MULTIPLE.equals(gStoneType) && !ite.hasNext()) { gStoneType = GlueStoneType.DIAGONAL; } // the visual lines can also be under a the first triangle int nodeVisualDepth = node.getVisualDepth(); int moveNo = node.getMoveNo(); if (moveNo == -1) { moveNo = 0; nodeVisualDepth = 0; } // also draw all the "missing" glue stones for (int i = nodeVisualDepth + 1; i < childNode.getVisualDepth(); i++) { movePane.add(new GlueStone(GlueStoneType.VERTICAL), moveNo, i); } // glue stone for the node movePane.add(new GlueStone(gStoneType), moveNo, childNode.getVisualDepth()); // and recursively draw the actual node populateMoveTreePane(childNode, depth + childNode.getVisualDepth()); } } } /* * Generates the boilerplate for the move tree pane. The * pane is actually populated during game initialization. */ private ScrollPane generateMoveTreePane() { movePane = new GridPane(); movePane.setPadding(new Insets(0, 0, 0, 0)); movePane.setStyle("-fx-background-color: white"); treePaneScrollPane = new ScrollPane(movePane); treePaneScrollPane.setPrefHeight(150); treePaneScrollPane.setHbarPolicy(ScrollBarPolicy.ALWAYS); treePaneScrollPane.setVbarPolicy(ScrollBarPolicy.AS_NEEDED); return treePaneScrollPane; } private void fastForwardTo(GameNode move) { // clear the board for (int i = 0; i < board.length; i++) { for (int j = 0; j < board[i].length; j++) { board[i][j].removeStone(); } } placePreGameStones(game); deHighLightStoneInTree(currentMove); removeMarkersForNode(currentMove); virtualBoard.fastForwardTo(move); highLightStoneOnBoard(move); } private VBox generateFileTreePane() { VBox vbox = new VBox(); vbox.setMinWidth(250); TreeView<File> treeView = new FileTreeView(); treeView.setFocusTraversable(false); Label label = new Label("Choose SGF File"); vbox.getChildren().addAll(label, treeView); treeView.setOnMouseClicked(new EventHandler<MouseEvent>() { @Override public void handle(MouseEvent event) { if (event.getClickCount() == 2) { TreeItem<File> item = treeView.getSelectionModel().getSelectedItem(); File file = item.getValue().toPath().toFile(); if (file.isFile()) { initializeGame(item.getValue().toPath()); } AppState.getInstance().addProperty(AppState.CURRENT_FILE, file.getAbsolutePath()); } } }); return vbox; } private TilePane generateButtonPane() { TilePane pane = new TilePane(); pane.setAlignment(Pos.CENTER); pane.getStyleClass().add("bordered"); TextField moveNoField = new TextField("0"); moveNoField.setFocusTraversable(false); moveNoField.setMaxWidth(40); moveNoField.setEditable(false); nextButton = new Button("Next"); nextButton.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent e) { handleNextPressed(); } }); previousButton = new Button("Previous"); previousButton.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent e) { handlePreviousPressed(); } }); pane.setPrefColumns(1); pane.getChildren().add(previousButton); pane.getChildren().add(moveNoField); pane.getChildren().add(nextButton); return pane; } private void handleNextPressed() { if (currentMove.getNextNode() != null) { prevMove = currentMove; currentMove = currentMove.getNextNode(); virtualBoard.makeMove(currentMove, prevMove); // scroll the scrollpane to make // the highlighted move visible ensureVisibleForActiveTreeNode(currentMove); } } private void handleNextBranch() { if (currentMove.hasChildren()) { prevMove = currentMove; currentMove = currentMove.getChildren().iterator().next(); virtualBoard.makeMove(currentMove, prevMove); // scroll the scrollpane to make // the highlighted move visible ensureVisibleForActiveTreeNode(currentMove); } } public void handlePreviousPressed() { if (currentMove.getParentNode() != null) { prevMove = currentMove; currentMove = currentMove.getParentNode(); virtualBoard.undoMove(prevMove, currentMove); } } public void playMove(GameNode move, GameNode prevMove) { this.currentMove = move; this.prevMove = prevMove; // we actually have a previous move! if (prevMove != null) { // de-highlight previously highlighted move if (prevMove.isMove() && !prevMove.isPass()) { deHighLightStoneOnBoard(prevMove); } // even non moves can haver markers removeMarkersForNode(prevMove); } if (move != null && !move.isPass() && !move.isPlacementMove()) { highLightStoneOnBoard(move); } // highlight stone in the tree pane deHighLightStoneInTree(prevMove); highLightStoneInTree(move); if (move != null && (move.getProperty("AB") != null || move.getProperty("AW") != null)) { placePreGameStones(move); } // show the associated comment showCommentForMove(move); // handle the prev and new markers showMarkersForMove(move); nextButton.requestFocus(); } public void undoMove(GameNode move, GameNode prevMove) { this.currentMove = prevMove; this.prevMove = move; if (move != null) { removeMarkersForNode(move); } if (prevMove != null) { showMarkersForMove(prevMove); showCommentForMove(prevMove); if (prevMove.isMove() && !prevMove.isPass()) highLightStoneOnBoard(prevMove); } deHighLightStoneInTree(move); highLightStoneInTree(prevMove); ensureVisibleForActiveTreeNode(prevMove); // rather have previous move button have focus previousButton.requestFocus(); } private void ensureVisibleForActiveTreeNode(GameNode move) { if (move != null && move.isMove()) { TreeStone stone = nodeToTreeStone.get(move); // the move tree is not yet fully operational and some // points don't exist in the map yet if (stone == null) return; double width = treePaneScrollPane.getContent().getBoundsInLocal().getWidth(); double x = stone.getBoundsInParent().getMaxX(); double scrollTo = ((x) - 11 * 30) / (width - 21 * 30); treePaneScrollPane.setHvalue(scrollTo); // adjust the vertical scroll double height = treePaneScrollPane.getContent().getBoundsInLocal().getHeight(); double y = stone.getBoundsInParent().getMaxY(); double scrollToY = y / height; if (move.getVisualDepth() == 0) { scrollToY = 0d; } treePaneScrollPane.setVvalue(scrollToY); } } private void highLightStoneInTree(GameNode move) { TreeStone stone = nodeToTreeStone.get(move); // can remove the null check at one point when the // tree is fully implemented if (stone != null) { stone.highLight(); stone.requestFocus(); } } private void deHighLightStoneInTree(GameNode node) { if (node != null && node.isMove()) { TreeStone stone = nodeToTreeStone.get(node); if (stone != null) { stone.deHighLight(); } else { throw new RuntimeException("Unable to find node for move " + node); } } } private void showCommentForMove(GameNode move) { String comment = move.getProperty("C"); if (comment == null) { comment = ""; } // some helpers I used for parsing needs to be undone - see the Parser.java // in sgf4j project comment = comment.replaceAll("@@@@@", "\\\\\\["); comment = comment.replaceAll(" // lets do some replacing - see http://www.red-bean.com/sgf/sgf4.html#text comment = comment.replaceAll("\\\\\n", ""); comment = comment.replaceAll("\\\\:", ":"); comment = comment.replaceAll("\\\\\\]", "]"); comment = comment.replaceAll("\\\\\\[", "["); commentArea.setText(comment); } private void showMarkersForMove(GameNode move) { // the L property is actually not used in FF3 and FF4 // but I own many SGFs that still have it String markerProp = move.getProperty("L"); if (markerProp != null) { int alphaIdx = 0; String[] markers = markerProp.split("\\]\\["); for (int i = 0; i < markers.length; i++) { int[] coords = Util.alphaToCoords(markers[i]); board[coords[0]][coords[1]].addOverlayText(Util.alphabet[alphaIdx++]); } } // also handle the LB labels Map<String, String> labels = Util.extractLabels(move.getProperty("LB")); for (Iterator<Map.Entry<String, String>> ite = labels.entrySet().iterator(); ite.hasNext();) { Map.Entry<String, String> entry = ite.next(); int[] coords = Util.alphaToCoords(entry.getKey()); board[coords[0]][coords[1]].addOverlayText(entry.getValue()); } } private void removeMarkersForNode(GameNode node) { // the L property is actually not used in FF3 and FF4 // but I own many SGFs that still have it String markerProp = node.getProperty("L"); if (markerProp != null) { String[] markers = markerProp.split("\\]\\["); for (int i = 0; i < markers.length; i++) { int[] coords = Util.alphaToCoords(markers[i]); board[coords[0]][coords[1]].removeOverlayText(); } } // also handle the LB labels Map<String, String> labels = Util.extractLabels(node.getProperty("LB")); for (Iterator<Map.Entry<String, String>> ite = labels.entrySet().iterator(); ite.hasNext();) { Map.Entry<String, String> entry = ite.next(); int[] coords = Util.alphaToCoords(entry.getKey()); board[coords[0]][coords[1]].removeOverlayText(); } } private void highLightStoneOnBoard(GameNode move) { String currentMove = move.getMoveString(); int[] moveCoords = Util.alphaToCoords(currentMove); board[moveCoords[0]][moveCoords[1]].highLightStone(); } private void deHighLightStoneOnBoard(GameNode prevMove) { String prevMoveAsStr = prevMove.getMoveString(); int[] moveCoords = Util.alphaToCoords(prevMoveAsStr); board[moveCoords[0]][moveCoords[1]].deHighLightStone(); } private GridPane generateBoardPane(GridPane boardPane) { boardPane.getChildren().clear(); for (int i = 0; i < 21; i++) { if (i > 1 && i < 20) { board[i - 1] = new BoardStone[19]; } for (int j = 0; j < 21; j++) { if (i == 0 || j == 0 || i == 20 || j == 20) { CoordinateSquare btn = new CoordinateSquare(i, j); boardPane.add(btn, i, j); } else { BoardStone btn = new BoardStone(i, j); boardPane.add(btn, i, j); board[i - 1][j - 1] = btn; } } } return boardPane; } private void enableKeyboardShortcuts(HBox topHBox) { topHBox.setOnKeyPressed(new EventHandler<KeyEvent>() { @Override public void handle(KeyEvent event) { if (event.getEventType().equals(KeyEvent.KEY_PRESSED)) { if (event.getCode().equals(KeyCode.LEFT)) { handlePreviousPressed(); } else if (event.getCode().equals(KeyCode.RIGHT)) { handleNextPressed(); } else if (event.getCode().equals(KeyCode.DOWN)) { handleNextBranch(); } } } }); } public BoardStone[][] getBoard() { return this.board; } }