answer
stringlengths 17
10.2M
|
|---|
package io.compgen.ngsutils.cli.bam;
import htsjdk.samtools.SAMFileHeader;
import htsjdk.samtools.SAMFileWriter;
import htsjdk.samtools.SAMFileWriterFactory;
import htsjdk.samtools.SAMProgramRecord;
import htsjdk.samtools.SAMRecord;
import htsjdk.samtools.SamInputResource;
import htsjdk.samtools.SamReader;
import htsjdk.samtools.SamReaderFactory;
import htsjdk.samtools.ValidationStringency;
import io.compgen.cmdline.annotation.Command;
import io.compgen.cmdline.annotation.Exec;
import io.compgen.cmdline.annotation.Option;
import io.compgen.cmdline.annotation.UnnamedArg;
import io.compgen.cmdline.exceptions.CommandArgumentException;
import io.compgen.cmdline.impl.AbstractCommand;
import io.compgen.common.progress.FileChannelStats;
import io.compgen.common.progress.ProgressMessage;
import io.compgen.common.progress.ProgressUtils;
import io.compgen.ngsutils.NGSUtils;
import io.compgen.ngsutils.bam.Orientation;
import io.compgen.ngsutils.bam.filter.BamFilter;
import io.compgen.ngsutils.bam.filter.BedExclude;
import io.compgen.ngsutils.bam.filter.BedInclude;
import io.compgen.ngsutils.bam.filter.FilterFlags;
import io.compgen.ngsutils.bam.filter.JunctionWhitelist;
import io.compgen.ngsutils.bam.filter.NullFilter;
import io.compgen.ngsutils.bam.filter.PairedFilter;
import io.compgen.ngsutils.bam.filter.RequiredFlags;
import io.compgen.ngsutils.bam.filter.TagMax;
import io.compgen.ngsutils.bam.filter.TagMin;
import io.compgen.ngsutils.bam.filter.UniqueMapping;
import io.compgen.ngsutils.bam.filter.UniqueStart;
import io.compgen.ngsutils.bam.filter.Whitelist;
import io.compgen.ngsutils.bam.support.ReadUtils;
import io.compgen.ngsutils.support.CloseableFinalizer;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Command(name="bam-filter", desc="Filters out reads based upon various criteria", category="bam")
public class BamFilterCli extends AbstractCommand {
private List<String> filenames=null;
private String tmpDir = null;
private String bedExclude = null;
private boolean bedExcludeRequireOne = false;
private boolean bedExcludeRequireBoth = false;
private boolean bedExcludeOnlyWithin = false;
private boolean bedExcludeReadStartPos = false;
private String bedIncludeFile = null;
private boolean bedIncludeRequireOne = false;
private boolean bedIncludeRequireBoth = false;
private boolean bedIncludeOnlyWithin = false;
private boolean bedIncludeReadStartPos = false;
private String junctionWhitelist = null;
private String whitelist = null;
private String failedFilename = null;
private Map<String, Integer> minTagValues = null;
private Map<String, Integer> maxTagValues = null;
private boolean paired = false;
private boolean unique = false;
private boolean uniqueStart = false;
private boolean lenient = false;
private boolean silent = false;
private int filterFlags = 0;
private int requiredFlags = 0;
private Orientation orient = Orientation.UNSTRANDED;
@UnnamedArg(name = "INFILE OUTFILE")
public void setFilename(List<String> filenames) throws CommandArgumentException {
if (filenames.size() != 2) {
throw new CommandArgumentException("You must specify both an input and an output file.");
}
this.filenames = filenames;
}
@Option(desc="Write temporary files here", name="tmpdir", helpValue="dir")
public void setTmpDir(String tmpDir) {
this.tmpDir = tmpDir;
}
@Option(desc="Force sanity checking of read pairing (simple - same chromosome, reversed orientation)", name="paired")
public void setPaired(boolean val) {
this.paired = val;
}
@Option(desc="Require junction-spanning reads to span one of these junctions", name="junction-whitelist", helpValue="fname")
public void setJunctionWhitelist(String junctionWhitelist) {
this.junctionWhitelist = junctionWhitelist;
}
@Option(desc="Keep only read names from this whitelist", name="whitelist", helpValue="fname")
public void setWhitelist(String whitelist) {
this.whitelist = whitelist;
}
@Option(desc="Write failed reads to this file (BAM)", name="failed", helpValue="fname")
public void setFailedFilename(String failedFilename) {
this.failedFilename = failedFilename;
}
@Option(desc="Exclude reads within BED regions", name="bed-exclude", helpValue="fname")
public void setBedExcludeFile(String bedExclude) {
this.bedExclude = bedExclude;
}
@Option(desc="BED Exclude option: only-within", name="bed-excl-only-within")
public void setBEDExcludeWithin(boolean val) {
this.bedExcludeOnlyWithin = val;
}
@Option(desc="BED Exclude option: require-one", name="bed-excl-require-one")
public void setBEDExcludeRequireOne(boolean val) {
this.bedExcludeRequireOne = val;
}
@Option(desc="BED Exclude option: start-pos", name="bed-excl-start-pos")
public void setBEDExcludeReadStartPos(boolean val) {
this.bedExcludeReadStartPos = val;
}
@Option(desc="BED Exclude option: require-both", name="bed-excl-require-both")
public void setBEDExcludeRequireBoth(boolean val) {
this.bedExcludeRequireBoth = val;
}
@Option(desc="Include reads within BED regions", name="bed-include", helpValue="fname")
public void setBedIncludeFile(String bedIncludeFile) {
this.bedIncludeFile = bedIncludeFile;
}
@Option(desc="BED Include option: only-within", name="bed-incl-only-within")
public void setBEDIncludeWithin(boolean val) {
this.bedIncludeOnlyWithin = val;
}
@Option(desc="BED Include option: require-one", name="bed-incl-require-one")
public void setBEDIncludeKeep(boolean val) {
this.bedIncludeRequireOne = val;
}
@Option(desc="BED Include option: require-both", name="bed-incl-require-both")
public void setBEDIncludeRemove(boolean val) {
this.bedIncludeRequireBoth = val;
}
@Option(desc="BED Include option: start-pos", name="bed-incl-start-pos")
public void setBEDIncludeReadStartPos(boolean val) {
this.bedIncludeReadStartPos = val;
}
@Option(desc="Use lenient validation strategy", name="lenient")
public void setLenient(boolean lenient) {
this.lenient = lenient;
}
@Option(desc="Library is in FR orientation (only used for BED filters)", name="library-fr")
public void setLibraryFR(boolean val) {
if (val) {
orient = Orientation.FR;
}
}
@Option(desc="Library is in RF orientation (only used for BED filters)", name="library-rf")
public void setLibraryRF(boolean val) {
if (val) {
orient = Orientation.RF;
}
}
@Option(desc="Library is in unstranded orientation (only used for BED filters, default)", name="library-unstranded")
public void setLibraryUnstranded(boolean val) {
if (val) {
orient = Orientation.UNSTRANDED;
}
}
@Option(desc="Only keep properly paired reads", name="proper-pairs")
public void setProperPairs(boolean val) {
if (val) {
requiredFlags |= ReadUtils.PROPER_PAIR_FLAG;
}
}
@Option(desc="Only keep reads that have one unique mapping (NH,IH tags, for MAPQ filter use --tag-max)", name="unique-mapping")
public void setUniqueMapping(boolean val) {
unique=val;
setMapped(true);
}
@Option(desc="Keep at most one read per position (strand-specific, only for single-read fragments)", name="unique-start")
public void setUniqueStart(boolean val) {
uniqueStart=val;
setMapped(true);
}
@Option(desc="Only keep mapped reads (both reads if paired)", name="mapped")
public void setMapped(boolean val) {
if (val) {
filterFlags |= ReadUtils.READ_UNMAPPED_FLAG | ReadUtils.MATE_UNMAPPED_FLAG;
}
}
@Option(desc="Only keep unmapped reads", name="unmapped")
public void setUnmapped(boolean val) {
if (val) {
requiredFlags |= ReadUtils.READ_UNMAPPED_FLAG;
}
}
@Option(desc="No secondary mappings", name="nosecondary")
public void setNoSecondary(boolean val) {
if (val) {
filterFlags |= ReadUtils.SUPPLEMENTARY_ALIGNMENT_FLAG;
}
}
@Option(desc="No PCR duplicates", name="nopcrdup")
public void setNoPCRDuplicates(boolean val) {
if (val) {
filterFlags |= ReadUtils.DUPLICATE_READ_FLAG;
}
}
@Option(desc="No QC failures", name="noqcfail")
public void setNoQCFail(boolean val) {
if (val) {
filterFlags |= ReadUtils.READ_FAILS_VENDOR_QUALITY_CHECK_FLAG;
}
}
@Option(desc="Filtering flags", name="filter-flags", defaultValue="0")
public void setFilterFlags(int flag) {
filterFlags |= flag;
}
@Option(desc="Required flags", name="required-flags", defaultValue="0")
public void setRequiredFlags(int flag) {
requiredFlags |= flag;
}
@Option(desc="Minimum tag value (tag:val, ex: AS:100)", name="tag-min")
public void setMinTagValue(String val) {
if (minTagValues == null) {
minTagValues = new HashMap<String, Integer>();
}
String key = val.split(":")[0];
Integer value = Integer.parseInt(val.split(":")[1]);
minTagValues.put(key, value);
}
@Option(desc="Maximum tag value (tag:val, ex: NH:0 or MAPQ:0)", name="tag-max")
public void setMaxTagValue(String val) {
if (maxTagValues == null) {
maxTagValues = new HashMap<String, Integer>();
}
String key = val.split(":")[0];
Integer value = Integer.parseInt(val.split(":")[1]);
maxTagValues.put(key, value);
}
@Exec
public void exec() throws CommandArgumentException, IOException {
if (filenames == null || filenames.size()!=2) {
throw new CommandArgumentException("You must specify an input BAM filename and an output BAM filename!");
}
SamReaderFactory readerFactory = SamReaderFactory.makeDefault();
if (lenient) {
readerFactory.validationStringency(ValidationStringency.LENIENT);
} else if (silent) {
readerFactory.validationStringency(ValidationStringency.SILENT);
}
SamReader reader = null;
String name;
FileChannel channel = null;
if (filenames.get(0).equals("-")) {
reader = readerFactory.open(SamInputResource.of(System.in));
name = "<stdin>";
} else {
File f = new File(filenames.get(0));
FileInputStream fis = new FileInputStream(f);
channel = fis.getChannel();
reader = readerFactory.open(SamInputResource.of(fis));
name = f.getName();
}
SAMFileWriterFactory factory = new SAMFileWriterFactory();
String outFilename = filenames.get(1);
File outfile = null;
OutputStream outStream = null;
if (outFilename.equals("-")) {
outStream = new BufferedOutputStream(System.out);
} else {
outfile = new File(outFilename);
}
if (tmpDir != null) {
factory.setTempDirectory(new File(tmpDir));
} else if (outfile == null || outfile.getParent() == null) {
factory.setTempDirectory(new File(".").getCanonicalFile());
} else if (outfile!=null) {
factory.setTempDirectory(outfile.getParentFile());
}
SAMFileHeader header = reader.getFileHeader().clone();
SAMProgramRecord pg = NGSUtils.buildSAMProgramRecord("bam-filter", header);
List<SAMProgramRecord> pgRecords = new ArrayList<SAMProgramRecord>(header.getProgramRecords());
pgRecords.add(0, pg);
header.setProgramRecords(pgRecords);
SAMFileWriter out;
if (outfile != null) {
out = factory.makeBAMWriter(header, true, outfile);
} else {
out = factory.makeSAMWriter(header, true, outStream);
}
SAMFileWriter failedWriter = null;
if (failedFilename != null) {
failedWriter = factory.makeBAMWriter(header, true, new File(failedFilename));
}
BamFilter parent = new NullFilter(ProgressUtils.getIterator(name, reader.iterator(), new FileChannelStats(channel), new ProgressMessage<SAMRecord>(){
@Override
public String msg(SAMRecord current) {
if (current != null) {
return current.getReadName();
}
return null;
}}, new CloseableFinalizer<SAMRecord>()), failedWriter);
if (filterFlags > 0) {
parent = new FilterFlags(parent, false, filterFlags);
if (verbose) {
System.err.println("FilterFlags: "+filterFlags);
}
}
if (requiredFlags > 0) {
parent = new RequiredFlags(parent, false, requiredFlags);
if (verbose) {
System.err.println("RequiredFlags: "+requiredFlags);
}
}
if (unique) {
parent = new UniqueMapping(parent, false);
if (verbose) {
System.err.println("Unique-mapping");
}
}
if (uniqueStart) {
parent = new UniqueStart(parent, false);
if (verbose) {
System.err.println("Unique-start");
}
}
if (paired) {
parent = new PairedFilter(parent, false);
if (verbose) {
System.err.println("Paired");
}
}
if (bedIncludeFile!=null) {
parent = new BedInclude(parent, false, bedIncludeFile, orient);
((BedInclude)parent).setOnlyWithin(bedIncludeOnlyWithin);
((BedInclude)parent).setRequireOnePair(bedIncludeRequireOne);
((BedInclude)parent).setRequireBothPairs(bedIncludeRequireBoth);
((BedInclude)parent).setReadStartPos(bedIncludeReadStartPos);
if (verbose) {
System.err.println("BEDInclude: "+bedIncludeFile);
}
}
if (bedExclude!=null) {
parent = new BedExclude(parent, false, bedExclude, orient);
((BedExclude)parent).setOnlyWithin(bedExcludeOnlyWithin);
((BedExclude)parent).setRequireOnePair(bedExcludeRequireOne);
((BedExclude)parent).setRequireBothPairs(bedExcludeRequireBoth);
((BedExclude)parent).setReadStartPos(bedExcludeReadStartPos);
if (verbose) {
System.err.println("BEDExclude: "+bedExclude);
}
}
if (junctionWhitelist != null) {
parent = new JunctionWhitelist(parent, false, junctionWhitelist);
if (verbose) {
System.err.println("JuntionWhitelist: "+junctionWhitelist);
}
}
if (whitelist != null) {
parent = new Whitelist(parent, false, whitelist);
if (verbose) {
System.err.println("Whitelist: "+whitelist);
}
}
if (minTagValues != null) {
parent = new TagMin(parent, false, minTagValues);
if (verbose) {
String outval = "";
for (String k:minTagValues.keySet()) {
if (!outval.equals("")) {
outval+=",";
}
outval += k+":"+minTagValues.get(k);
}
System.err.println("Tag min: "+outval);
}
}
if (maxTagValues != null) {
parent = new TagMax(parent, false, maxTagValues);
if (verbose) {
String outval = "";
for (String k:maxTagValues.keySet()) {
if (!outval.equals("")) {
outval+=",";
}
outval += k+":"+maxTagValues.get(k);
}
System.err.println("Tag max: "+outval);
}
}
for (SAMRecord read: parent) {
if (read != null) {
out.addAlignment(read);
}
}
if (verbose) {
dumpStats(parent);
}
reader.close();
out.close();
if (failedWriter != null) {
failedWriter.close();
}
}
private void dumpStats(BamFilter filter) {
if (filter.getParent()!=null) {
dumpStats(filter.getParent());
}
System.err.println(filter.getClass().getSimpleName());
System.err.println(" total: "+filter.getTotal());
System.err.println(" removed: "+filter.getRemoved());
}
}
|
package com.jonaslasauskas.gradle.plugin.capsule;
import java.util.HashMap;
import org.gradle.api.tasks.Input;
public final class Manifest {
public final String premainClass = "Capsule";
public final String mainClass = "Capsule";
@Input private String applicationId;
@Input private String applicationClass;
public void setApplicationId(String id) {
applicationId = id;
}
public String getApplicationId() {
return applicationId;
}
public void setApplicationClass(String className) {
applicationClass = className;
}
void defaultApplicationClassTo(String className) {
if (applicationClass == null) {
applicationClass = className;
}
}
public String getApplicationClass() {
return applicationClass;
}
void defaultApplicationIdTo(String id) {
if (applicationId == null) {
applicationId = id;
}
}
public void writeTo(org.gradle.api.java.archives.Manifest jarManifest) {
HashMap<String, String> capsuleAttributes = new HashMap<>();
capsuleAttributes.put("Premain-Class", premainClass);
capsuleAttributes.put("Main-Class", mainClass);
capsuleAttributes.put("Application-ID", applicationId);
capsuleAttributes.put("Application-Class", applicationClass);
jarManifest.attributes(capsuleAttributes);
}
}
|
package com.thoughtworks.twist.calabash.android;
import org.jruby.RubyArray;
import java.io.File;
import java.util.List;
import java.util.Map;
import static java.lang.String.format;
public class AndroidApplication {
private String installedOn;
private CalabashWrapper calabashWrapper;
public AndroidApplication(CalabashWrapper calabashWrapper, String serial) {
this.calabashWrapper = calabashWrapper;
this.installedOn = serial;
}
public String getInstalledOnSerial() {
return installedOn;
}
public UIElements query(String query) throws CalabashException {
RubyArray array = calabashWrapper.query(query);
return new UIElements(array, query, calabashWrapper);
}
/**
* Fetches all elements in this application and executes callback for each
* of them
*
* @param callback Callback to be executed for each element
* @throws CalabashException
*/
public void inspect(InspectCallback callback) throws CalabashException {
List<TreeNode> tree = new TreeBuilder(calabashWrapper).createTreeFromRoot();
if (tree.isEmpty()) return;
for (TreeNode treeNode : tree) {
Utils.inspectElement(treeNode, 0, callback);
}
}
/**
* @param dir Existing directory where the screenshot is saved
* @param fileName the name of the screenshot
* @throws CalabashException
*/
public void takeScreenshot(File dir, String fileName) throws CalabashException {
if (dir == null)
throw new CalabashException("Empty directory name");
if (fileName == null)
throw new CalabashException("Empty file name");
if (!dir.isDirectory())
throw new CalabashException(dir.getAbsolutePath() + " is not a directory");
if (!dir.canWrite())
throw new CalabashException(dir.getAbsolutePath() + " is not writeable");
calabashWrapper.takeScreenShot(dir, fileName);
}
/**
* Read the preferences inside the shared preference denoted by <code>preferenceName</code>
*
* @param preferenceName name of the shared preference
* @return a map of preferences in the shared preference
* @throws CalabashException
*/
public Map<String, String> getSharedPreferences(String preferenceName) throws CalabashException {
if (preferenceName == null || preferenceName.isEmpty()) {
throw new CalabashException("Invalid preference name");
}
return calabashWrapper.getPreferences(preferenceName);
}
/**
* waits for specified condition for the given timeoutInSec
*
* @param condition Condition to wait for
* @param timeoutInSec timeout in seconds
* @throws CalabashException
* @throws OperationTimedoutException
*/
public void waitFor(ICondition condition, int timeoutInSec) throws CalabashException, OperationTimedoutException {
calabashWrapper.waitFor(condition, new WaitOptions(timeoutInSec));
}
/**
* Waits for the specified condition with the options specified
*
* @param condition Condition to wait for
* @param options Wait options
* @throws CalabashException When any calabash operations fails
* @throws OperationTimedoutException When the operation elapsed the timeout period
*/
public void waitFor(ICondition condition, WaitOptions options) throws CalabashException, OperationTimedoutException {
calabashWrapper.waitFor(condition, options);
}
/**
* Gets the name of the current activity on the application.
*
* @return the name of the activity on the screen
*/
public String getCurrentActivity() throws CalabashException {
return calabashWrapper.getCurrentActivity();
}
/**
* simulates the press of 'back' button
*
* @throws CalabashException
*/
public void goBack() throws CalabashException {
calabashWrapper.performGoBack();
}
/**
* press the 'enter' key on the keypad
*
* @throws CalabashException
*/
public void pressEnterKey() throws CalabashException {
calabashWrapper.pressEnterKey();
}
/**
* Wait for an activity to come on the screen
*
* @param activityName the activity name which you want to wait for
* @param timeout in seconds to timeout when condition fails resulting <code>CalabashException</code>
* @throws CalabashException
*/
public void waitForActivity(final String activityName, int timeout) throws CalabashException, OperationTimedoutException {
waitFor(new ICondition() {
@Override
public boolean test() throws CalabashException {
return getCurrentActivity().contains(activityName);
}
}, timeout);
}
/**
* Wait till an element with id appears
* @param id id of the element
* @param timeoutInSec wait time in seconds
* @throws OperationTimedoutException
* @throws CalabashException
*/
public void waitForElementWithId(final String id, int timeoutInSec) throws OperationTimedoutException, CalabashException {
waitFor(new ICondition() {
@Override
public boolean test() throws CalabashException {
return calabashWrapper.query(format("* id:'%s'", id)).size() > 0;
}
}, timeoutInSec);
}
/**
* Scroll Down by one page
*/
public void scrollDown() throws CalabashException {
calabashWrapper.scrollDown();
}
/**
* Scroll Up by one page
*/
public void scrollUp() throws CalabashException {
calabashWrapper.scrollUp();
}
/**
* Selects a menu item from the menu
*
* @param menuItem The name of the menu item to be selected
* @throws CalabashException
*/
public void selectMenuItem(String menuItem) throws CalabashException {
calabashWrapper.selectMenuItem(menuItem);
}
/**
* Performs a swipe action on the screen
*
* @param direction the direction to swipe
* @throws CalabashException
*/
public void swipe(Direction direction) throws CalabashException {
switch (direction) {
case LEFT:
calabashWrapper.drag(1, 99, 50, 50, 5);
break;
case RIGHT:
calabashWrapper.drag(99, 1, 50, 50, 5);
break;
}
}
public void setGPSCoordinates(double latitude, double longitude) throws CalabashException {
calabashWrapper.setGPSCoordinates(latitude, longitude);
}
public void setGPSLocation(String location) throws CalabashException {
calabashWrapper.setGPSLocation(location);
}
/**
* Gets all the root elements available This can be used to make a tree view
* of all the elements available in the view currently
*
* @return list of root elements if available, null otherwise
* @throws CalabashException
*/
public List<TreeNode> getRootElements() throws CalabashException {
return new TreeBuilder(calabashWrapper).createTreeFromRoot();
}
/**
* click and drag from (fromX, fromY) to (toX, toY) where X and Y axis start at top left corner
* @param fromX source x-coordinate normalized to screen width
* @param toX destination x-coordinate normalized to screen width
* @param fromY source y-coordinate normalized to screen height
* @param toY destination y-coordinate normalized to screen height
* @param steps no.of steps that it takes between the two points
* @throws CalabashException
*/
public void drag(int fromX, int toX, int fromY, int toY, int steps) throws CalabashException {
calabashWrapper.drag(fromX, toX, fromY, toY, steps);
}
/**
* call calabash's performAction function with action and its corresponding args
* eg:
* performCalabashAction("enter_text_into_numbered_field","text to be entered","1");
*
* @param action action to be performed
* @param args list of arguments for the action
* @throws CalabashException
*/
public void performCalabashAction(String action, String... args) throws CalabashException {
calabashWrapper.performAction(action, args);
}
}
|
package nl.b3p.viewer.stripes;
import java.io.StringReader;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import javax.persistence.NoResultException;
import javax.servlet.http.HttpSession;
import net.sourceforge.stripes.action.ActionBean;
import net.sourceforge.stripes.action.ActionBeanContext;
import net.sourceforge.stripes.action.After;
import net.sourceforge.stripes.action.Resolution;
import net.sourceforge.stripes.action.StreamingResolution;
import net.sourceforge.stripes.action.StrictBinding;
import net.sourceforge.stripes.action.UrlBinding;
import net.sourceforge.stripes.controller.LifecycleStage;
import net.sourceforge.stripes.validation.Validate;
import nl.b3p.viewer.config.app.ApplicationLayer;
import nl.b3p.viewer.config.app.ConfiguredAttribute;
import nl.b3p.viewer.config.services.AttributeDescriptor;
import nl.b3p.viewer.config.services.Layer;
import nl.b3p.viewer.config.services.SimpleFeatureType;
import nl.b3p.viewer.config.services.WFSFeatureSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.geotools.data.FeatureSource;
import org.geotools.data.Query;
import org.geotools.data.wfs.WFSDataStoreFactory;
import org.geotools.factory.CommonFactoryFinder;
import org.geotools.factory.GeoTools;
import org.geotools.feature.FeatureCollection;
import org.geotools.feature.FeatureIterator;
import org.geotools.filter.text.cql2.CQL;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.filter.FilterFactory2;
import org.opengis.filter.sort.SortBy;
import org.opengis.filter.sort.SortOrder;
import org.stripesstuff.stripersist.Stripersist;
/**
*
* @author Matthijs Laan
*/
@UrlBinding("/action/appLayer")
@StrictBinding
public class AppLayerActionBean implements ActionBean {
private static final Log log = LogFactory.getLog(AppLayerActionBean.class);
private static final int MAX_FEATURES = 50;
private ActionBeanContext context;
@Validate
private ApplicationLayer appLayer;
private Layer layer = null;
@Validate
private int limit;
@Validate
private int page;
@Validate
private int start;
@Validate
private String dir;
@Validate
private String sort;
@Validate
private boolean arrays;
@Validate
private String filter;
@Validate
private boolean debug;
//<editor-fold defaultstate="collapsed" desc="getters en setters">
public ActionBeanContext getContext() {
return context;
}
public void setContext(ActionBeanContext context) {
this.context = context;
}
public ApplicationLayer getAppLayer() {
return appLayer;
}
public void setAppLayer(ApplicationLayer appLayer) {
this.appLayer = appLayer;
}
public int getLimit() {
return limit;
}
public void setLimit(int limit) {
this.limit = limit;
}
public int getPage() {
return page;
}
public void setPage(int page) {
this.page = page;
}
public int getStart() {
return start;
}
public void setStart(int start) {
this.start = start;
}
public boolean isDebug() {
return debug;
}
public void setDebug(boolean debug) {
this.debug = debug;
}
public String getDir() {
return dir;
}
public void setDir(String dir) {
this.dir = dir;
}
public String getSort() {
return sort;
}
public void setSort(String sort) {
this.sort = sort;
}
public boolean isArrays() {
return arrays;
}
public void setArrays(boolean arrays) {
this.arrays = arrays;
}
public String getFilter() {
return filter;
}
public void setFilter(String filter) {
this.filter = filter;
}
//</editor-fold>
@After(stages=LifecycleStage.BindingAndValidation)
public void loadLayer() {
// TODO check if user has rights to appLayer
try {
layer = (Layer)Stripersist.getEntityManager().createQuery("from Layer where service = :service and name = :n order by virtual desc")
.setParameter("service", appLayer.getService())
.setParameter("n", appLayer.getLayerName())
.setMaxResults(1)
.getSingleResult();
} catch(NoResultException nre) {
}
}
public Resolution attributes() throws JSONException {
JSONObject json = new JSONObject();
json.put("success", Boolean.FALSE);
String error = null;
if(appLayer == null) {
error = "Invalid parameters";
} else {
Map<String,AttributeDescriptor> featureTypeAttributes = new HashMap<String,AttributeDescriptor>();
SimpleFeatureType ft = null;
if(layer != null) {
ft = layer.getFeatureType();
if(ft != null) {
for(AttributeDescriptor ad: ft.getAttributes()) {
featureTypeAttributes.put(ad.getName(), ad);
}
}
}
Integer geometryAttributeIndex = null;
JSONArray attributes = new JSONArray();
for(ConfiguredAttribute ca: appLayer.getAttributes()) {
JSONObject j = ca.toJSONObject();
AttributeDescriptor ad = featureTypeAttributes.get(ca.getAttributeName());
if(ad != null) {
j.put("alias", ad.getAlias());
j.put("type", ad.getType());
if(ft != null && ca.getAttributeName().equals(ft.getGeometryAttribute())) {
geometryAttributeIndex = attributes.length();
}
}
attributes.put(j);
}
JSONObject details = new JSONObject();
for(Map.Entry<String,String> e: appLayer.getDetails().entrySet()) {
details.put(e.getKey(), e.getValue());
}
json.put("details", details);
if(ft != null) {
json.put("geometryAttribute", ft.getGeometryAttribute());
}
if(geometryAttributeIndex != null) {
json.put("geometryAttributeIndex", geometryAttributeIndex);
}
json.put("attributes", attributes);
json.put("success", Boolean.TRUE);
}
if(error != null) {
json.put("error", error);
}
return new StreamingResolution("application/json", new StringReader(json.toString()));
}
private static final String CACHE_APPLAYER = "total_count_cache_applayer";
private static final String CACHE_FILTER = "total_count_cache_filter";
private static final String CACHE_TIME = "total_count_cache_time";
private static final String CACHE_COUNT = "total_count_cache";
private static final int CACHE_MAX_AGE = 60 * 1000;
/**
* Call this to clear the "total feature count" cached value when a new feature
* is added to a feature source. Only clears the cache for the current session.
*/
public static void clearTotalCountCache(ActionBeanContext context) {
HttpSession sess = context.getRequest().getSession();
sess.removeAttribute(CACHE_APPLAYER);
sess.removeAttribute(CACHE_FILTER);
sess.removeAttribute(CACHE_TIME);
sess.removeAttribute(CACHE_COUNT);
}
private int lookupTotalCountCache(Callable<Integer> countProducer) throws Exception {
HttpSession session = context.getRequest().getSession();
Integer total = null;
Long age = null;
Long cacheAppLayerId = (Long)session.getAttribute(CACHE_APPLAYER);
if(appLayer.getId().equals(cacheAppLayerId)) {
if((filter == null && session.getAttribute(CACHE_FILTER) == null)
|| (filter != null && filter.equals(session.getAttribute(CACHE_FILTER)) )) {
Long time = (Long)session.getAttribute(CACHE_TIME);
if(time != null) {
age = System.currentTimeMillis() - time;
if(age <= CACHE_MAX_AGE) {
total = (Integer)session.getAttribute(CACHE_COUNT);
}
}
}
}
if(total != null) {
log.debug(String.format("Returning cached total count value %d which was cached %s ms ago for app layer id %d",
total,
age,
appLayer.getId()));
return total;
} else {
long startTime = System.currentTimeMillis();
total = countProducer.call();
log.debug(String.format("Caching total count value %d which took %d ms to get for app layer id %d",
total,
System.currentTimeMillis() - startTime,
appLayer.getId()));
// Maybe only cache if getting total took longer than threshold?
// Now a new feature is only counted for all users after CACHE_MAX_AGE
// If clearTotalCountCache() is called then the new feature will be
// counted for the current user/session).
session.setAttribute(CACHE_APPLAYER, appLayer.getId());
session.setAttribute(CACHE_FILTER, filter);
session.setAttribute(CACHE_TIME, System.currentTimeMillis());
session.setAttribute(CACHE_COUNT, total);
return total;
}
}
private List<String> setPropertyNames(Query q) {
List<String> propertyNames = new ArrayList<String>();
boolean haveInvisibleProperties = false;
for(ConfiguredAttribute ca: appLayer.getAttributes()) {
if(ca.isVisible()) {
propertyNames.add(ca.getAttributeName());
} else {
haveInvisibleProperties = true;
}
}
if(haveInvisibleProperties) {
// By default Query retrieves Query.ALL_NAMES
// Query.NO_NAMES is an empty String array
q.setPropertyNames(propertyNames);
}
return propertyNames;
}
private void setSortBy(Query q, List<String> propertyNames) {
FilterFactory2 ff2 = CommonFactoryFinder.getFilterFactory2(GeoTools.getDefaultHints());
if(sort != null) {
String sortAttribute = null;
if(arrays) {
int i = Integer.parseInt(sort.substring(1));
int j = 0;
for(String name: propertyNames) {
if(j == i) {
sortAttribute = name;
}
j++;
}
} else {
sortAttribute = sort;
}
if(sortAttribute != null) {
q.setSortBy(new SortBy[] {
ff2.sort(sortAttribute, "DESC".equals(dir) ? SortOrder.DESCENDING : SortOrder.ASCENDING)
});
}
}
}
private void setFilter(Query q) throws Exception {
if(filter != null) {
q.setFilter(CQL.toFilter(filter));
}
}
public Resolution store() throws JSONException, Exception {
JSONObject json = new JSONObject();
JSONArray features = new JSONArray();
json.put("features", features);
try {
int total = 0;
if(layer != null && layer.getFeatureType() != null) {
FeatureSource fs;
if(isDebug() && layer.getFeatureType().getFeatureSource() instanceof WFSFeatureSource) {
Map extraDataStoreParams = new HashMap();
extraDataStoreParams.put(WFSDataStoreFactory.TRY_GZIP.key, Boolean.FALSE);
fs = ((WFSFeatureSource)layer.getFeatureType().getFeatureSource()).openGeoToolsFeatureSource(layer.getFeatureType(), extraDataStoreParams);
} else {
fs = layer.getFeatureType().openGeoToolsFeatureSource();
}
boolean startIndexSupported = fs.getQueryCapabilities().isOffsetSupported();
final Query q = new Query(fs.getName().toString());
List<String> propertyNames = setPropertyNames(q);
setSortBy(q, propertyNames);
setFilter(q);
final FeatureSource fs2 = fs;
total = lookupTotalCountCache(new Callable<Integer>() {
public Integer call() throws Exception {
return fs2.getCount(q);
}
});
if(total == -1) {
total = MAX_FEATURES;
}
q.setStartIndex(start);
q.setMaxFeatures(Math.min(limit + (startIndexSupported ? 0 : start),MAX_FEATURES));
FeatureCollection fc = fs.getFeatures(q);
FeatureIterator<SimpleFeature> it = fc.features();
try {
while(it.hasNext()) {
SimpleFeature f = it.next();
if(!startIndexSupported && start > 0) {
start
continue;
}
if(arrays) {
JSONObject j = new JSONObject();
int idx = 0;
for(String name: propertyNames) {
Object value = f.getAttribute(name);
j.put("c" + idx++, formatValue(value));
}
features.put(j);
} else {
JSONObject j = new JSONObject();
for(String name: propertyNames) {
j.put(name, f.getAttribute(name));
}
features.put(j);
}
}
} finally {
it.close();
fs.getDataStore().dispose();
}
}
json.put("total", total);
} catch(Exception e) {
log.error("Error loading features", e);
json.put("success", false);
String message = "Fout bij ophalen features: " + e.toString();
Throwable cause = e.getCause();
while(cause != null) {
message += "; " + cause.toString();
cause = cause.getCause();
}
json.put("message", message);
}
return new StreamingResolution("application/json", new StringReader(json.toString(4)));
}
private DateFormat dateFormat = new SimpleDateFormat("dd-MM-yyyy HH:mm:ss");
private Object formatValue(Object value) {
if(value instanceof Date) {
// JSON has no date type so format the date as it is used for
// display, not calculation
return dateFormat.format((Date)value);
} else {
return value;
}
}
}
|
package edu.umd.cs.findbugs;
import java.util.Comparator;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.TreeMap;
import edu.umd.cs.findbugs.ba.ClassHash;
import edu.umd.cs.findbugs.ba.MethodHash;
/**
* A slightly more intellegent way of comparing BugInstances from two versions
* to see if they are the "same". Uses class and method hashes to try to
* handle renamings, at least for simple cases. (<em>Hashes disabled for the
* time being.</em>) Uses opcode context to try to identify code that is the
* same, even if it moves within the method. Also compares by bug abbreviation
* rather than bug type, since the "same" bug can change type if the context
* changes (e.g., "definitely null" to "null on simple path" for a null pointer
* dereference). Also, we often change bug types between different versions
* of FindBugs.
*
* @see edu.umd.cs.findbugs.BugInstance
* @see edu.umd.cs.findbugs.VersionInsensitiveBugComparator
* @author David Hovemeyer
*/
public class FuzzyBugComparator implements Comparator<BugInstance> {
private static final boolean DEBUG = false;
// Don't use hashes for now. Still ironing out issues there.
private static final boolean USE_HASHES = false;
/**
* Filter ignored BugAnnotations from given Iterator.
*/
private static class FilteringBugAnnotationIterator implements Iterator<BugAnnotation> {
Iterator<BugAnnotation> iter;
BugAnnotation next;
public FilteringBugAnnotationIterator(Iterator<BugAnnotation> iter) {
this.iter = iter;
}
private void findNext() {
if (next == null) {
while (iter.hasNext()) {
BugAnnotation candidate = iter.next();
if (!ignore(candidate)) {
next = candidate;
break;
}
}
}
}
/* (non-Javadoc)
* @see java.util.Iterator#hasNext()
*/
public boolean hasNext() {
findNext();
return next != null;
}
/* (non-Javadoc)
* @see java.util.Iterator#next()
*/
public BugAnnotation next() {
findNext();
if (next == null)
throw new NoSuchElementException();
BugAnnotation result = next;
next = null;
return result;
}
/* (non-Javadoc)
* @see java.util.Iterator#remove()
*/
public void remove() {
throw new UnsupportedOperationException();
}
}
/** Keep track of which BugCollections the various BugInstances have come from. */
private IdentityHashMap<BugInstance, BugCollection> bugCollectionMap;
/**
* Map of class hashes to canonicate class names used for comparison purposes.
*/
private Map<ClassHash, String> classHashToCanonicalClassNameMap;
public FuzzyBugComparator() {
if (DEBUG) System.out.println("Created fuzzy comparator");
this.bugCollectionMap = new IdentityHashMap<BugInstance, BugCollection>();
this.classHashToCanonicalClassNameMap = new TreeMap<ClassHash, String>();
}
/**
* Register a BugCollection. This allows us to find the class and method
* hashes for BugInstances to be compared.
*
* @param bugCollection a BugCollection
*/
public void registerBugCollection(BugCollection bugCollection) {
if (USE_HASHES) {
for (Iterator<BugInstance> i = bugCollection.iterator(); i.hasNext(); ) {
bugCollectionMap.put(i.next(), bugCollection);
}
// For each distinct ClassHash, keep track of the lexicographically
// least class name. This serves as the "representative" for all (equivalent)
// classes sharing that hash value. This allows us to ensure that the
// class ordering induced by this comparator is transitive.
for (Iterator<ClassHash> i = bugCollection.classHashIterator(); i.hasNext();) {
ClassHash classHash = i.next();
String canonicalClassName = classHashToCanonicalClassNameMap.get(classHash);
if (canonicalClassName == null || classHash.getClassName().compareTo(canonicalClassName) < 0) {
classHashToCanonicalClassNameMap.put(classHash, classHash.getClassName());
}
}
}
}
public int compare(BugInstance lhs, BugInstance rhs) {
int cmp;
if (DEBUG) System.out.println("Fuzzy comparison");
// Bug abbreviations must match.
BugPattern lhsPattern = lhs.getBugPattern();
BugPattern rhsPattern = rhs.getBugPattern();
if (lhsPattern == null || rhsPattern == null) {
if (DEBUG) {
if (lhsPattern == null)
System.out.println("Missing pattern: " + lhs.getType());
if (rhsPattern == null)
System.out.println("Missing pattern: " + rhs.getType());
}
String lhsCode = getCode(lhs.getType());
String rhsCode = getCode(rhs.getType());
if ((cmp = lhsCode.compareTo(rhsCode)) != 0)
return cmp;
} else {
if ((cmp = lhsPattern.getAbbrev().compareTo(rhsPattern.getAbbrev())) != 0)
return cmp;
}
BugCollection lhsCollection = bugCollectionMap.get(lhs);
BugCollection rhsCollection = bugCollectionMap.get(rhs);
// Scan through bug annotations, comparing fuzzily if possible
Iterator<BugAnnotation> lhsIter = new FilteringBugAnnotationIterator(lhs.annotationIterator());
Iterator<BugAnnotation> rhsIter = new FilteringBugAnnotationIterator(rhs.annotationIterator());
while (lhsIter.hasNext() && rhsIter.hasNext()) {
BugAnnotation lhsAnnotation = lhsIter.next();
BugAnnotation rhsAnnotation = rhsIter.next();
if (DEBUG) System.out.println("Compare annotations: " + lhsAnnotation + "," + rhsAnnotation);
// Annotation classes must match exactly
cmp = lhsAnnotation.getClass().getName().compareTo(rhsAnnotation.getClass().getName());
if (cmp != 0) {
if (DEBUG) System.out.println("annotation class mismatch: " + lhsAnnotation.getClass().getName() +
"," + rhsAnnotation.getClass().getName());
return cmp;
}
if (lhsAnnotation.getClass() == ClassAnnotation.class)
cmp = compareClasses(lhsCollection, rhsCollection, (ClassAnnotation) lhsAnnotation, (ClassAnnotation) rhsAnnotation);
else if (lhsAnnotation.getClass() == MethodAnnotation.class)
cmp = compareMethods(lhsCollection, rhsCollection, (MethodAnnotation) lhsAnnotation, (MethodAnnotation) rhsAnnotation);
else if (lhsAnnotation.getClass() == SourceLineAnnotation.class)
cmp = compareSourceLines(lhsCollection, rhsCollection, (SourceLineAnnotation) lhsAnnotation, (SourceLineAnnotation) rhsAnnotation);
else
// everything else just compare directly
cmp = lhsAnnotation.compareTo(rhsAnnotation);
if (cmp != 0)
return cmp;
}
// Number of bug annotations must match
if (!lhsIter.hasNext() && !rhsIter.hasNext()) {
if (DEBUG) System.out.println("Match!");
return 0;
} else
return (lhsIter.hasNext() ? 1 : -1);
}
/**
* @param type
* @return
*/
private String getCode(String type) {
int bar = type.indexOf('_');
if (bar < 0)
return "";
else
return type.substring(0, bar);
}
private static int compareNullElements(Object a, Object b) {
if (a != null)
return 1;
else if (b != null)
return -1;
else
return 0;
}
private static ClassHash getClassHash(BugCollection bugCollection, String className) {
if (bugCollection == null)
return null;
else
return bugCollection.getClassHash(className);
}
public int compareClasses(BugCollection lhsCollection, BugCollection rhsCollection, ClassAnnotation lhsClass, ClassAnnotation rhsClass) {
if (lhsClass == null || rhsClass == null) {
return compareNullElements(lhsClass, rhsClass);
} else {
return compareClassesByName(lhsCollection, rhsCollection, lhsClass.getClassName(), rhsClass.getClassName());
}
}
// Compare classes: either exact fully qualified name must match, or class hash must match
public int compareClassesByName(BugCollection lhsCollection, BugCollection rhsCollection, String lhsClassName, String rhsClassName) {
int cmp;
if (USE_HASHES) {
// Get class hashes
ClassHash lhsHash = getClassHash(lhsCollection, lhsClassName);
ClassHash rhsHash = getClassHash(rhsCollection, rhsClassName);
// Convert to canonical class names based on the class hashes.
// This has the effect that classes with the same hash compare as equal,
// while ensuring that all class names have a consistent ordering.
if (lhsHash != null)
lhsClassName = classHashToCanonicalClassNameMap.get(lhsHash);
if (rhsHash != null)
rhsClassName = classHashToCanonicalClassNameMap.get(rhsHash);
}
return lhsClassName.compareTo(rhsClassName);
}
// Compare methods: either exact name and signature must match, or method hash must match
public int compareMethods(BugCollection lhsCollection, BugCollection rhsCollection, MethodAnnotation lhsMethod, MethodAnnotation rhsMethod) {
if (lhsMethod == null || rhsMethod == null) {
return compareNullElements(lhsMethod, rhsMethod);
}
// Compare for exact match
int cmp = lhsMethod.compareTo(rhsMethod);
if (USE_HASHES) {
if (cmp == 0)
return 0;
// Get class hashes for primary classes
ClassHash lhsClassHash = getClassHash(lhsCollection, lhsMethod.getClassName());
ClassHash rhsClassHash = getClassHash(rhsCollection, rhsMethod.getClassName());
if (lhsClassHash == null || rhsClassHash == null)
return cmp;
// Look up method hashes
MethodHash lhsHash = lhsClassHash.getMethodHash(lhsMethod.toXMethod());
MethodHash rhsHash = rhsClassHash.getMethodHash(rhsMethod.toXMethod());
if (lhsHash == null || rhsHash == null)
return cmp;
if (lhsHash.isSameHash(rhsHash))
return 0;
}
return cmp;
}
/**
* For now, just look at the 2 preceeding and succeeding opcodes
* for fuzzy source line matching.
*/
private static final int NUM_CONTEXT_OPCODES = 2;
/**
* Compare source line annotations.
*
* @param rhsCollection lhs BugCollection
* @param lhsCollection rhs BugCollection
* @param lhs a SourceLineAnnotation
* @param rhs another SourceLineAnnotation
* @return comparison of lhs and rhs
*/
public int compareSourceLines(BugCollection lhsCollection, BugCollection rhsCollection, SourceLineAnnotation lhs, SourceLineAnnotation rhs) {
if (lhs == null || rhs == null) {
return compareNullElements(lhs, rhs);
}
// Classes must match fuzzily.
int cmp = compareClassesByName(lhsCollection, rhsCollection, lhs.getClassName(), rhs.getClassName());
if (cmp != 0)
return cmp;
// If both annotations refer to entire methods, as opposed to
// a specific instruction or range of instructions, consider them
// equal. This handles the case where a warning may refer to
// another method (i.e., it was called).
// Even if the referred-to method changes between versions, we want
// the warning to be considered equivalent in both versions.
if (!lhs.hasSpecificInstructions() && !rhs.hasSpecificInstructions())
return 0;
// Compare earlier opcodes
if ((cmp = lhs.getEarlierOpcodesAsString(NUM_CONTEXT_OPCODES).compareTo(
rhs.getEarlierOpcodesAsString(NUM_CONTEXT_OPCODES))) != 0) {
return cmp;
}
// Compare selected opcodes
if ((cmp = lhs.getSelectedOpcodesAsString().compareTo(rhs.getSelectedOpcodesAsString())) != 0) {
return cmp;
}
// Compare later opcodes
if ((cmp = lhs.getLaterOpcodesAsString(NUM_CONTEXT_OPCODES).compareTo(
rhs.getLaterOpcodesAsString(NUM_CONTEXT_OPCODES))) != 0) {
return cmp;
}
return 0;
}
// See "FindBugsAnnotationDescriptions.properties"
private static final HashSet<String> significantDescriptionSet = new HashSet<String>();
static {
// Classes, methods, and fields are significant.
significantDescriptionSet.add("CLASS_DEFAULT");
significantDescriptionSet.add("CLASS_EXCEPTION");
significantDescriptionSet.add("CLASS_REFTYPE");
significantDescriptionSet.add("INTERFACE_TYPE");
significantDescriptionSet.add("METHOD_DEFAULT");
significantDescriptionSet.add("METHOD_CALLED");
significantDescriptionSet.add("METHOD_DANGEROUS_TARGET"); // but do NOT use safe targets
significantDescriptionSet.add("METHOD_DECLARED_NONNULL");
significantDescriptionSet.add("FIELD_DEFAULT");
significantDescriptionSet.add("FIELD_ON");
significantDescriptionSet.add("FIELD_SUPER");
significantDescriptionSet.add("FIELD_MASKED");
significantDescriptionSet.add("FIELD_MASKING");
// Many int annotations are NOT significant: e.g., sync %, biased locked %, bytecode offset, etc.
// The null parameter annotations, however, are definitely significant.
significantDescriptionSet.add("INT_NULL_ARG");
significantDescriptionSet.add("INT_MAYBE_NULL_ARG");
significantDescriptionSet.add("INT_NONNULL_PARAM");
// Only DEFAULT source line annotations are significant.
significantDescriptionSet.add("SOURCE_LINE_DEFAULT");
}
public static boolean ignore(BugAnnotation annotation) {
return !significantDescriptionSet.contains(annotation.getDescription());
}
}
|
package edu.udayton.coopere5.spice;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Scanner;
import javax.swing.JFileChooser;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import Jama.Matrix;
/**
* @author Evan Cooper
*
*/
public class ResistancePanel extends JPanel {
private class MouseHandler extends MouseAdapter {
@Override
public void mouseClicked(MouseEvent event) {
int button = event.getButton();
xpos = event.getX();
ypos = event.getY();
int clicks = event.getClickCount();
if (!wireDraw) {
prevComponent = currentComponent;
if (prevComponent != null) {
prevComponent.setSelected(false);
}
currentComponent = ResistancePanel.this.find(event.getPoint());
if (currentComponent != null) {
currentComponent.setSelected(true);
if (currentComponent.getType() == CircuitComponent.WIRE) {
popupRotate.setEnabled(false);
} else {
popupRotate.setEnabled(true);
}
popupDelete.setEnabled(true);
} else {
popupRotate.setEnabled(false);
popupDelete.setEnabled(false);
}
ResistancePanel.this.repaint();
if (clicks > 1) {
ResistancePanel.this.makeDirty();
if (currentComponent == null) {
popup.show(ResistancePanel.this, event.getX(), event.getY());
} else {
int status = currentComponent.showDialog(ResistancePanel.this.getParent(), false);
if (status == JOptionPane.NO_OPTION) {
ResistancePanel.this.removeComponent();
}
}
} else if (button == 3) {
popup.show(ResistancePanel.this, event.getX(), event.getY());
}
} else if (clicks == 1 && button == 1) {
currentComponent.addPoint(event.getX(), event.getY());
ResistancePanel.this.repaint();
} else if (clicks > 1 || button == 3) {
wireDraw = false;
currentComponent.showDialog(ResistancePanel.this.getParent(), true);
}
ResistancePanel.this.repaint();
}
@Override
public void mouseDragged(MouseEvent event) {
if (!wireDraw) {
xpos = event.getX();
ypos = event.getY();
// current = find(event.getPoint());
if (currentComponent != null) {
currentComponent.setPosition(xpos, ypos);
ResistancePanel.this.makeDirty();
}
ResistancePanel.this.repaint();
}
}
@Override
public void mouseMoved(MouseEvent event) {
if (wireDraw) {
currentComponent.setLastPoint(event.getX(), event.getY());
ResistancePanel.this.repaint();
}
}
@Override
public void mousePressed(MouseEvent event) {
if (!wireDraw) {
prevComponent = currentComponent;
if (prevComponent != null) {
prevComponent.setSelected(false);
}
currentComponent = ResistancePanel.this.find(event.getPoint());
if (currentComponent != null) {
currentComponent.setSelected(true);
if (currentComponent.getType() == CircuitComponent.WIRE) {
popupRotate.setEnabled(false);
} else {
popupRotate.setEnabled(true);
}
popupDelete.setEnabled(true);
} else {
popupRotate.setEnabled(false);
popupDelete.setEnabled(false);
}
ResistancePanel.this.repaint();
}
}
@Override
public void mouseReleased(MouseEvent event) {
}
}
private static final long serialVersionUID = 1L;
private List<CircuitComponent> components;
private CircuitComponent currentComponent = null;
private CircuitComponent prevComponent = null;
private boolean dirty;
private int xpos, ypos;
private File currentFile;
private boolean wireDraw;
private JPopupMenu popup;
private JMenuItem popupRotate;
private JMenuItem popupDelete;
private JFileChooser fileChooser;
private ActionListener menuListener = new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
if (event.getActionCommand() == "Resistor") {
currentComponent = new Resistor();
currentComponent.setPosition(xpos, ypos);
int status = currentComponent.showDialog(ResistancePanel.this.getParent(), true);
if (status == JOptionPane.YES_OPTION) {
ResistancePanel.this.addComponent(currentComponent);
} else {
currentComponent = null;
}
} else if (event.getActionCommand() == "Wire") {
currentComponent = new Wire();
components.add(currentComponent);
wireDraw = true;
currentComponent.addPoint(xpos, ypos);
} else if (event.getActionCommand() == "Current") {
currentComponent = new Current();
currentComponent.setPosition(xpos, ypos);
int status = currentComponent.showDialog(ResistancePanel.this.getParent(), true);
if (status == JOptionPane.YES_OPTION) {
ResistancePanel.this.addComponent(currentComponent);
} else {
currentComponent = null;
}
} else if (event.getActionCommand() == "Rotate") {
ResistancePanel.this.rotate(1);
} else if (event.getActionCommand() == "Delete") {
ResistancePanel.this.removeComponent();
}
}
};
public ResistancePanel() {
super();
this.components = new ArrayList<CircuitComponent>();
MouseHandler mouse = new MouseHandler();
this.setPreferredSize(new Dimension(500, 500));
this.addMouseListener(mouse);
this.addMouseMotionListener(mouse);
this.popup = new JPopupMenu();
this.fileChooser = new JFileChooser(System.getProperty("user.dir"));
JMenuItem item;
popup.add(item = new JMenuItem("Resistor"));
item.addActionListener(menuListener);
popup.add(item = new JMenuItem("Wire"));
item.addActionListener(menuListener);
popup.add(item = new JMenuItem("Current"));
item.addActionListener(menuListener);
popup.add(popupRotate = new JMenuItem("Rotate"));
popupRotate.setEnabled(false);
popupRotate.addActionListener(menuListener);
popup.add(popupDelete = new JMenuItem("Delete"));
popupDelete.setEnabled(false);
popupDelete.addActionListener(menuListener);
this.dirty = false;
this.wireDraw = false;
}
public ResistancePanel(List<CircuitComponent> cList) {
this();
this.components = cList;
}
public void addComponent(CircuitComponent c) {
this.makeDirty();
components.add(c);
this.repaint();
}
public List<CircuitComponent> getCircuitComponents() {
return components;
}
public CircuitComponent getCurrentComponent() {
return currentComponent;
}
/**
* @return - name of {@link #currentFile}
*/
public String getFileName() {
if (currentFile != null) {
return this.currentFile.getName();
} else {
return "untitled";
}
}
/**
* @return {@link #saveIfDirty()}
*/
public int newFile() {
int rv = this.saveIfDirty();
if (rv != 0) {
this.clear();
}
return rv;
}
/**
* @return
* <ul>
* <li>0 upon success</li>
* <li>-1 upon save cancel or open cancel</li>
* </ul>
*/
public int open() {
int rv = this.saveIfDirty();
if (rv != -1) {
rv = fileChooser.showOpenDialog(this);
if (rv != JFileChooser.APPROVE_OPTION) {
return -1;
}
currentFile = fileChooser.getSelectedFile();
this.rebuildComponents();
this.makeClean();
return 0;
}
return -1;
}
@Override
public void paintComponent(Graphics g) {
super.paintComponent(g);
for (CircuitComponent c : this.components) {
c.draw(g);
}
}
public void removeComponent() {
if (currentComponent != null) {
this.makeDirty();
components.remove(currentComponent);
this.repaint();
}
}
public void rotate(int dir) {
if (currentComponent != null) {
this.makeDirty();
currentComponent.rotate(dir);
this.repaint();
}
}
/**
* @return
* <ul>
* <li>0 upon success</li>
* <li>-1 upon failure</li>
* </ul>
* @see #save(File)
*/
public int save() {
return this.save(currentFile);
}
/**
* @param f
* - The File to save to. Shows a file chooser if null.
* @return
* <ul>
* <li>0 upon success</li>
* <li>-1 upon failure</li>
* </ul>
*/
public int save(File f) {
if (f == null) {
int rv = fileChooser.showSaveDialog(this);
if (rv != JFileChooser.APPROVE_OPTION) {
return -1;
}
f = fileChooser.getSelectedFile();
}
try {
f.createNewFile();
PrintWriter writer = new PrintWriter(f);
for (CircuitComponent c : this.getCircuitComponents()) {
writer.println(c.getNetLine());
}
writer.close();
this.makeClean();
return 0;
} catch (IOException e) {
JOptionPane.showMessageDialog(this, "IOException");
return -1;
}
}
public void saveAndExit() {
if (this.saveIfDirty() != -1) {
System.exit(0);
}
}
public void solve() {
Matrix solutionMatrix = NodeAnalysis.solver(components);
StringWriter buffer = new StringWriter();
PrintWriter output = new PrintWriter(buffer);
solutionMatrix.print(output, NumberFormat.getInstance(Locale.US), 10);
String solutionString = buffer.toString();
JOptionPane.showMessageDialog(this, solutionString, "Solution", JOptionPane.INFORMATION_MESSAGE);
}
private void clear() {
this.currentComponent = null;
popupRotate.setEnabled(false);
popupDelete.setEnabled(false);
this.prevComponent = null;
this.currentFile = null;
this.getCircuitComponents().clear();
this.repaint();
}
private CircuitComponent find(Point p) {
for (CircuitComponent c : components) {
if (c.area.contains(p)) {
return c;
}
}
return null;
}
private boolean isDirty() {
return dirty;
}
private void makeClean() {
this.dirty = false;
}
private void makeDirty() {
this.dirty = true;
}
private void rebuildComponents() {
if (currentFile != null) {
components.clear();
int i = 1;
try {
Scanner sc = new Scanner(currentFile);
while (sc.hasNextLine()) {
String line = sc.nextLine();
String[] split = line.split(" +");
String type = split[0];
try {
if (type.equals("R")) {
this.addComponent(new Resistor(split));
} else if (type.equals("WIRE")) {
this.addComponent(new Wire(split));
} else if (type.equals("I")) {
this.addComponent(new Current(split));
} else if (type.equals("V")) {
this.addComponent(new Voltage(split));
} else {
System.err.println("Invalid component indicator '" + type + "' on line " + i
+ ". Excluding component " + split[1]);
}
} catch (NumberFormatException e) {
System.err.println(
"Invalid value '" + split[4] + "' on line " + i + ". Excluding component " + split[1]);
}
i++;
}
sc.close();
sc = null;
this.repaint();
} catch (FileNotFoundException e) {
JOptionPane.showMessageDialog(this, e);
}
}
}
/**
* @return
* <ul>
* <li>0 if clean or saved</li>
* <li>1 if not saved</li>
* <li>-1 if cancelled or closed</li>
* </ul>
*/
private int saveIfDirty() {
if (this.isDirty()) {
int saveDialog = JOptionPane.showConfirmDialog(this, "Save?", "Save?", JOptionPane.YES_NO_CANCEL_OPTION,
JOptionPane.QUESTION_MESSAGE);
if (saveDialog == JOptionPane.YES_OPTION) {
return this.save(currentFile);
} else if (saveDialog == JOptionPane.NO_OPTION) {
return 1;
} else {
return -1;
}
}
return 0;
}
}
|
package com.kryptnostic.rhizome.hazelcast.objects;
import java.util.Collection;
import java.util.HashSet;
import java.util.UUID;
import com.google.common.primitives.Ints;
public class UUIDSet extends HashSet<UUID> {
private static final long serialVersionUID = 6290405515755142889L;
public UUIDSet( Collection<UUID> c ) {
super( c );
}
public UUIDSet( int initialCapacity ) {
super( expectedSize( initialCapacity ) );
}
public UUIDSet() {
super();
}
public static int expectedSize( int expectedSize ) {
if ( expectedSize < 0 ) {
throw new IllegalArgumentException( "expectedSize cannot be negative but was: " + expectedSize );
}
if ( expectedSize < 3 ) {
return expectedSize + 1;
}
if ( expectedSize < Ints.MAX_POWER_OF_TWO ) {
return expectedSize + expectedSize / 3;
}
return Integer.MAX_VALUE;
}
public static UUIDSet of( UUID uuid ) {
UUIDSet us = new UUIDSet( 1 );
us.add( uuid );
return us;
}
}
|
package com.ui.ninePatch;
import java.awt.*;
import java.awt.FontMetrics;
import java.awt.geom.*;
import java.awt.image.*;
import java.io.*;
import java.nio.file.*;
import java.util.*;
import javax.imageio.*;
import javax.swing.*;
import com.ui.BackgroundPanel;
import com.ui.ninePatch.NinePatchPanel;
public class NinePatch {
private final BufferedImage middle;
private final BufferedImage topBorder;
private final BufferedImage leftBorder;
private final BufferedImage rightBorder;
private final BufferedImage bottomBorder;
private final BufferedImage topLeft;
private final BufferedImage topRight;
private final BufferedImage bottomLeft;
private final BufferedImage bottomRight;
static BufferedImage copyRotated(BufferedImage bi, double rotation) {
//paramaterize rotation
double rot = Math.toRadians(rotation);
int newWidth = (int) Math.abs(bi.getWidth()*Math.cos(rot) ) +
(int) Math.abs(bi.getHeight()*Math.sin(rot)) ;
int newHeight = (int) Math.abs(bi.getWidth()*Math.sin(rot) ) +
(int) Math.abs(bi.getHeight()*Math.cos(rot)) ;
//make compatible, rotated canvas
ColorModel cm = bi.getColorModel();
boolean isAlphaPremultiplied = cm.isAlphaPremultiplied();
WritableRaster raster = cm.createCompatibleWritableRaster(newWidth, newHeight);
BufferedImage ni = new BufferedImage(cm, raster, isAlphaPremultiplied, null);
//define transform to do rotation;
Graphics2D g = ni.createGraphics();
AffineTransform tx = new AffineTransform();
tx.translate(newWidth/2.0d, newHeight/2.0d); //move from 0,0 to new image center
tx.rotate(rot); //perform rotation
tx.translate(-bi.getWidth()/2.0d, -bi.getHeight()/2.0d); //move image to 0,0 for rotation
//draw the rotation into the new image
g.drawImage(bi, tx, null);
g.dispose();
return ni;
}
enum Flip { IDENTITY, HORIZONTAL, VERTICAL, BOTH };
static BufferedImage copyFlipped(BufferedImage bi, Flip dir) {
AffineTransform tx = new AffineTransform();
switch(dir) {
case IDENTITY:
tx = new AffineTransform();
break;
case HORIZONTAL:
tx = AffineTransform.getScaleInstance(-1, 1);
tx.translate(-bi.getWidth(), 0);
break;
case VERTICAL:
tx = AffineTransform.getScaleInstance(1, -1);
tx.translate(0, -bi.getHeight());
break;
case BOTH:
tx = AffineTransform.getScaleInstance(-1, -1);
tx.translate(-bi.getWidth(), -bi.getHeight());
break;
}
AffineTransformOp op = new AffineTransformOp(tx, AffineTransformOp.TYPE_NEAREST_NEIGHBOR);
return op.filter(bi, null);
}
private NinePatch() {
middle = null;
topBorder = null;
leftBorder = null;
rightBorder = null;
bottomBorder = null;
topLeft = null;
topRight = null;
bottomLeft = null;
bottomRight = null;
}
public NinePatch(BufferedImage center, BufferedImage wall, BufferedImage corner) {
middle = center;
topBorder = copyFlipped(wall, Flip.IDENTITY);
leftBorder = copyRotated(wall, -90);
rightBorder = copyRotated(wall, 90);
bottomBorder = copyFlipped(wall, Flip.VERTICAL);
topLeft = copyFlipped(corner, Flip.IDENTITY);
topRight = copyFlipped(corner, Flip.HORIZONTAL);
bottomLeft = copyFlipped(corner, Flip.VERTICAL);
bottomRight = copyFlipped(corner, Flip.BOTH);
}
public NinePatch(BufferedImage center,
BufferedImage topWall,
BufferedImage sideWall,
BufferedImage corner) {
middle = center;
leftBorder = copyFlipped(sideWall, Flip.IDENTITY);
rightBorder = copyFlipped(sideWall, Flip.BOTH);
topBorder = copyFlipped(topWall, Flip.IDENTITY);
bottomBorder = copyFlipped(topWall, Flip.BOTH);
topLeft = copyFlipped(corner, Flip.IDENTITY);
topRight = copyFlipped(corner, Flip.HORIZONTAL);
bottomLeft = copyFlipped(corner, Flip.VERTICAL);
bottomRight = copyFlipped(corner, Flip.BOTH);
}
public NinePatch(BufferedImage center, BufferedImage[] walls, BufferedImage[] corners) {
middle = center;
topBorder = walls[0];
leftBorder = walls[1];
rightBorder = walls[2];
bottomBorder = walls[3];
topLeft = corners[0];
topRight = corners[1];
bottomLeft = corners[2];
bottomRight = corners[3];
}
public static NinePatch loadFrom(Path dir) throws IOException {
// Only Full 9-image patches define a right border
boolean fullPatch = dir.resolve("RightBorder.png").toFile().exists();
if(fullPatch){
BufferedImage center = ImageIO.read(dir.resolve("Middle.png").toFile());
BufferedImage[] walls = new BufferedImage[] {
ImageIO.read(dir.resolve("TopBorder.png").toFile()),
ImageIO.read(dir.resolve("LeftBorder.png").toFile()),
ImageIO.read(dir.resolve("RightBorder.png").toFile()),
ImageIO.read(dir.resolve("BottomBorder.png").toFile())
};
BufferedImage[] joints = new BufferedImage[] {
ImageIO.read(dir.resolve("TopLeft.png").toFile()),
ImageIO.read(dir.resolve("TopRight.png").toFile()),
ImageIO.read(dir.resolve("BottomLeft.png").toFile()),
ImageIO.read(dir.resolve("BottomRight.png").toFile())
};
return new NinePatch(center, walls, joints);
}
// If not a full patch, then only 4-image patches have TopBorder
boolean fourPatch = dir.resolve("LeftBorder.png").toFile().exists();
if(fourPatch){
return new NinePatch(
ImageIO.read(dir.resolve("Middle.png").toFile()),
ImageIO.read(dir.resolve("TopBorder.png").toFile()),
ImageIO.read(dir.resolve("LeftBorder.png").toFile()),
ImageIO.read(dir.resolve("TopLeft.png").toFile())
);
}
// Must be a three image patch
return new NinePatch(
ImageIO.read(dir.resolve("Middle.png").toFile()),
ImageIO.read(dir.resolve("TopBorder.png").toFile()),
ImageIO.read(dir.resolve("TopLeft.png").toFile())
);
}
public Dimension minimumSize() {
//widest left corner + widest right corner
int width = Math.max(topLeft.getWidth(), bottomLeft.getWidth()) +
Math.max(topRight.getWidth(), bottomRight.getWidth());
//tallest top corner + tallest bottom corner
int height = Math.max(topLeft.getHeight(), topRight.getHeight()) +
Math.max(bottomLeft.getHeight(), bottomRight.getHeight());
return new Dimension(width, height);
}
public BufferedImage getImage(int width, int height) {
BufferedImage me = new BufferedImage(width, height, middle.getType());
Graphics2D g2d = me.createGraphics();
paintIn(g2d, width, height);
g2d.dispose();
return me;
}
public void paintIn(Graphics g, int width, int height) {
Graphics2D g2d = (Graphics2D) g;
//fill between walls
paintTexture(g2d,middle,
leftBorder.getWidth(), topBorder.getHeight(),
width-rightBorder.getWidth(), height-bottomBorder.getHeight() );
//draw four corners
g.drawImage(topLeft, 0, 0,null);
g.drawImage(topRight , width-topRight.getWidth(), 0,null);
g.drawImage(bottomLeft, 0, height-bottomLeft.getHeight(), null);
g.drawImage(bottomRight, width-bottomRight.getWidth(), height-bottomRight.getHeight(), null);
//draw walls
paintTexture(g2d, leftBorder,
0, topLeft.getHeight(),
leftBorder.getWidth(), height-bottomLeft.getHeight());
paintTexture(g2d, rightBorder,
width-rightBorder.getWidth(), topRight.getHeight(),
width, height-bottomRight.getHeight());
paintTexture(g2d, topBorder,
topLeft.getWidth(), 0,
width-topRight.getWidth(), topBorder.getHeight());
paintTexture(g2d, bottomBorder,
bottomLeft.getWidth(), height-bottomBorder.getHeight(),
width-bottomRight.getWidth(), height);
}
private void paintTexture(Graphics2D g, BufferedImage bi,
int x1, int y1, int x2, int y2 ) {
Graphics2D g2d = (Graphics2D) g.create();
g2d.translate(x1, y1);
g2d.setPaint(new TexturePaint(bi,
new Rectangle2D.Float(0f, 0f,
(float)bi.getWidth(), (float)bi.getHeight())));
g2d.fillRect(0,0,x2-x1,y2-y1);
g2d.dispose();
}
public static void main(String[] args) {
JFrame f = new JFrame("9-Patch Test");
NinePatch test3 = new NinePatch();
NinePatch test4 = new NinePatch();
NinePatch test9 = new NinePatch();
NinePatch testOdd = new NinePatch();
NinePatch testButton = new NinePatch();
try {
BufferedImage c1 = ImageIO.read(new File("./resources/images/nP/c1.png"));
BufferedImage c2 = ImageIO.read(new File("./resources/images/nP/c2.png"));
BufferedImage c3 = ImageIO.read(new File("./resources/images/nP/c3.png"));
BufferedImage c4 = ImageIO.read(new File("./resources/images/nP/c4.png"));
BufferedImage center = ImageIO.read(new File("./resources/images/nP/center.png"));
BufferedImage corner = ImageIO.read(new File("./resources/images/nP/corner.png"));
BufferedImage edge = ImageIO.read(new File("./resources/images/nP/edge.png"));
BufferedImage horzGreenGrad = ImageIO.read(new File("./resources/images/nP/horzGreenGrad.png"));
BufferedImage spiral = ImageIO.read(new File("./resources/images/nP/spiral.png"));
BufferedImage vertGreenGrad = ImageIO.read(new File("./resources/images/nP/vertGreenGrad.png"));
BufferedImage oddCorner = ImageIO.read(new File("./resources/images/nP/oddSize/corner.png"));
BufferedImage oddHorz = ImageIO.read(new File("./resources/images/nP/oddSize/horzWall.png"));
BufferedImage oddVert = ImageIO.read(new File("./resources/images/nP/oddSize/vertWall.png"));
BufferedImage oddCenter = ImageIO.read(new File("./resources/images/nP/oddSize/whiteCenter.png"));
BufferedImage[] walls = new BufferedImage[] {
vertGreenGrad, horzGreenGrad, horzGreenGrad, vertGreenGrad
};
BufferedImage[] corners = new BufferedImage[] {
c1, c2, c3, c4
};
test3 = new NinePatch(center, edge, corner);
test4 = new NinePatch(spiral, vertGreenGrad, horzGreenGrad, c1);
test9 = new NinePatch(spiral, walls, corners);
testOdd = new NinePatch(oddCenter, oddVert, oddHorz, oddCorner);
testButton = NinePatch.loadFrom(Paths.get("./resources/images/nP/screen"));
} catch (Exception e) {
e.printStackTrace();
}
JPanel layoutPanel = new JPanel();
JComponent[] panels = new JComponent[] {
new NinePatchPanel(test3)
,new NinePatchPanel(test4)
,new NinePatchPanel(test9)
,new BackgroundPanel(test9.getImage(200,200))
,new NinePatchPanel(testOdd)
,new NinePatchButton(testButton, "Hello")
};
for(JComponent panel: panels) {
panel.setPreferredSize(new Dimension(400, 400));
layoutPanel.add(panel);
}
//f.add(new NinePatchPanel(testButton));
f.add(layoutPanel);
f.pack();
f.setVisible(true);
while(f.isShowing()) {
try {
Thread.sleep(1000);
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
|
package com.vaadin.terminal.gwt.client.ui;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.ui.FocusWidget;
import com.google.gwt.user.client.ui.Focusable;
import com.google.gwt.user.client.ui.Widget;
import com.vaadin.terminal.gwt.client.ApplicationConnection;
import com.vaadin.terminal.gwt.client.TooltipInfo;
import com.vaadin.terminal.gwt.client.UIDL;
import com.vaadin.terminal.gwt.client.VPaintableMap;
import com.vaadin.terminal.gwt.client.VPaintableWidget;
import com.vaadin.terminal.gwt.client.VPaintableWidgetContainer;
public abstract class VAbstractPaintableWidget implements VPaintableWidget {
public static final String ATTRIBUTE_DESCRIPTION = "description";
public static final String ATTRIBUTE_ERROR = "error";
private Widget widget;
private ApplicationConnection connection;
private String id;
/* State variables */
private boolean enabled = true;
private boolean visible = true;
/**
* Default constructor
*/
public VAbstractPaintableWidget() {
}
/**
* Called after the application connection reference has been set up
*/
public void init() {
}
/**
* Creates and returns the widget for this VPaintableWidget. This method
* should only be called once when initializing the paintable.
*
* @return
*/
protected abstract Widget createWidget();
/**
* Returns the widget associated with this paintable. The widget returned by
* this method must not changed during the life time of the paintable.
*
* @return The widget associated with this paintable
*/
public Widget getWidgetForPaintable() {
if (widget == null) {
widget = createWidget();
}
return widget;
}
/*
* (non-Javadoc)
*
* @see com.vaadin.terminal.gwt.client.VPaintable#getConnection()
*/
public final ApplicationConnection getConnection() {
return connection;
}
/*
* (non-Javadoc)
*
* @see
* com.vaadin.terminal.gwt.client.VPaintable#setConnection(com.vaadin.terminal
* .gwt.client.ApplicationConnection)
*/
public final void setConnection(ApplicationConnection connection) {
this.connection = connection;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public VPaintableWidgetContainer getParent() {
// FIXME: Hierarchy should be set by framework instead of looked up here
VPaintableMap paintableMap = VPaintableMap.get(getConnection());
Widget w = getWidgetForPaintable();
while (w != null) {
w = w.getParent();
if (paintableMap.isPaintable(w)) {
return (VPaintableWidgetContainer) paintableMap
.getPaintable(w);
}
}
return null;
}
protected static boolean isRealUpdate(UIDL uidl) {
return !isCachedUpdate(uidl) && !uidl.getBooleanAttribute("invisible");
}
protected static boolean isCachedUpdate(UIDL uidl) {
return uidl.getBooleanAttribute("cached");
}
public void updateFromUIDL(UIDL uidl, ApplicationConnection client) {
if (isCachedUpdate(uidl)) {
return;
}
VPaintableMap paintableMap = VPaintableMap.get(getConnection());
// register the listened events by the server-side to the event-handler
// of the component
paintableMap.registerEventListenersFromUIDL(getId(), uidl);
// Visibility
setVisible(!uidl.getBooleanAttribute("invisible"), uidl);
if (uidl.getId().startsWith("PID_S")) {
DOM.setElementProperty(getWidgetForPaintable().getElement(), "id",
uidl.getId().substring(5));
}
if (!isVisible()) {
// component is invisible, delete old size to notify parent, if
// later made visible
paintableMap.setOffsetSize(this, null);
return;
}
/*
* Disabled state may affect (override) tabindex so the order must be
* first setting tabindex, then enabled state.
*/
if (uidl.hasAttribute("tabindex")
&& getWidgetForPaintable() instanceof Focusable) {
((Focusable) getWidgetForPaintable()).setTabIndex(uidl
.getIntAttribute("tabindex"));
}
setEnabled(!uidl.getBooleanAttribute("disabled"));
// Style names
String styleName = getStyleNameFromUIDL(getWidgetForPaintable()
.getStylePrimaryName(), uidl,
getWidgetForPaintable() instanceof Field);
getWidgetForPaintable().setStyleName(styleName);
// Update tooltip
TooltipInfo tooltipInfo = paintableMap.getTooltipInfo(this, null);
if (uidl.hasAttribute(ATTRIBUTE_DESCRIPTION)) {
tooltipInfo
.setTitle(uidl.getStringAttribute(ATTRIBUTE_DESCRIPTION));
} else {
tooltipInfo.setTitle(null);
}
// add error info to tooltip if present
if (uidl.hasAttribute(ATTRIBUTE_ERROR)) {
tooltipInfo.setErrorUidl(uidl.getErrors());
} else {
tooltipInfo.setErrorUidl(null);
}
// Set captions
if (delegateCaptionHandling()) {
getParent().updateCaption(this, uidl);
}
/*
* updateComponentSize need to be after caption update so caption can be
* taken into account
*/
getConnection().updateComponentSize(this, uidl);
}
/**
* Sets the enabled state of this paintable
*
* @param enabled
* true if the paintable is enabled, false otherwise
*/
protected void setEnabled(boolean enabled) {
this.enabled = enabled;
if (getWidgetForPaintable() instanceof FocusWidget) {
FocusWidget fw = (FocusWidget) getWidgetForPaintable();
fw.setEnabled(enabled);
}
}
public boolean isEnabled() {
return enabled;
}
/**
* Return true if parent handles caption, false if the paintable handles the
* caption itself.
*
*
* @deprecated This should always return true and all components should let
* the parent handle the caption and use other attributes for
* internal texts in the component
* @return
*/
@Deprecated
protected boolean delegateCaptionHandling() {
return true;
}
/**
* Sets the visible state for this paintable.
*
* @param visible
* true if the paintable should be made visible, false otherwise
* @param captionUidl
* The UIDL that is passed to the parent and onwards to VCaption
* if the caption needs to be updated as a result of the
* visibility change.
*/
protected void setVisible(boolean visible, UIDL captionUidl) {
boolean wasVisible = this.visible;
this.visible = visible;
getWidgetForPaintable().setVisible(visible);
if (wasVisible != visible) {
// Changed invisibile <-> visible
if (wasVisible && delegateCaptionHandling()) {
// Must hide caption when component is hidden
getParent().updateCaption(this, captionUidl);
}
}
}
protected boolean isVisible() {
return visible;
}
/**
* Generates the style name for the widget based on the given primary style
* name (typically returned by Widget.getPrimaryStyleName()) and the UIDL.
* An additional "modified" style name can be added if the field parameter
* is set to true.
*
* @param primaryStyleName
* @param uidl
* @param isField
* @return
*/
protected static String getStyleNameFromUIDL(String primaryStyleName,
UIDL uidl, boolean field) {
boolean enabled = !uidl.getBooleanAttribute("disabled");
StringBuffer styleBuf = new StringBuffer();
styleBuf.append(primaryStyleName);
// first disabling and read-only status
if (!enabled) {
styleBuf.append(" ");
styleBuf.append(ApplicationConnection.DISABLED_CLASSNAME);
}
if (uidl.getBooleanAttribute("readonly")) {
styleBuf.append(" ");
styleBuf.append("v-readonly");
}
// add additional styles as css classes, prefixed with component default
// stylename
if (uidl.hasAttribute("style")) {
final String[] styles = uidl.getStringAttribute("style").split(" ");
for (int i = 0; i < styles.length; i++) {
styleBuf.append(" ");
styleBuf.append(primaryStyleName);
styleBuf.append("-");
styleBuf.append(styles[i]);
styleBuf.append(" ");
styleBuf.append(styles[i]);
}
}
// add modified classname to Fields
if (field && uidl.hasAttribute("modified")) {
styleBuf.append(" ");
styleBuf.append(ApplicationConnection.MODIFIED_CLASSNAME);
}
// add error classname to components w/ error
if (uidl.hasAttribute(ATTRIBUTE_ERROR)) {
styleBuf.append(" ");
styleBuf.append(primaryStyleName);
styleBuf.append(ApplicationConnection.ERROR_CLASSNAME_EXT);
}
// add required style to required components
if (uidl.hasAttribute("required")) {
styleBuf.append(" ");
styleBuf.append(primaryStyleName);
styleBuf.append(ApplicationConnection.REQUIRED_CLASSNAME_EXT);
}
return styleBuf.toString();
}
}
|
package edu.umd.cs.findbugs;
import java.util.Comparator;
import java.util.Iterator;
import java.util.NoSuchElementException;
import edu.umd.cs.findbugs.ba.ClassHash;
import edu.umd.cs.findbugs.ba.MethodHash;
/**
* A slightly more intellegent way of comparing BugInstances from two versions
* to see if they are the "same". Uses class and method hashes to try to
* handle renamings.
*
* @see edu.umd.cs.findbugs.BugInstance
* @see edu.umd.cs.findbugs.VersionInsensitiveBugComparator
* @author David Hovemeyer
*/
public class FuzzyBugComparator implements Comparator<BugInstance> {
/**
* Filter ignored BugAnnotations from given Iterator.
*/
class FilteringBugAnnotationIterator implements Iterator<BugAnnotation> {
Iterator<BugAnnotation> iter;
BugAnnotation next;
public FilteringBugAnnotationIterator(Iterator<BugAnnotation> iter) {
this.iter = iter;
}
private void findNext() {
if (next == null) {
while (iter.hasNext()) {
BugAnnotation candidate = iter.next();
if (!ignore(candidate)) {
next = candidate;
break;
}
}
}
}
/* (non-Javadoc)
* @see java.util.Iterator#hasNext()
*/
public boolean hasNext() {
findNext();
return next != null;
}
/* (non-Javadoc)
* @see java.util.Iterator#next()
*/
public BugAnnotation next() {
findNext();
if (next == null)
throw new NoSuchElementException();
BugAnnotation result = next;
next = null;
return result;
}
/* (non-Javadoc)
* @see java.util.Iterator#remove()
*/
public void remove() {
throw new UnsupportedOperationException();
}
}
private SortedBugCollection bugCollection;
public FuzzyBugComparator(SortedBugCollection bugCollection) {
this.bugCollection = bugCollection;
}
public int compare(BugInstance a, BugInstance b) {
int cmp;
cmp = a.getType().compareTo(b.getType());
if (cmp != 0)
return cmp;
Iterator<BugAnnotation> lhsIter = new FilteringBugAnnotationIterator(a.annotationIterator());
Iterator<BugAnnotation> rhsIter = new FilteringBugAnnotationIterator(b.annotationIterator());
while (lhsIter.hasNext() && rhsIter.hasNext()) {
BugAnnotation lhs = lhsIter.next();
BugAnnotation rhs = rhsIter.next();
// Annotation classes must match
cmp = lhs.getClass().getName().compareTo(rhs.getClass().getName());
if (cmp != 0)
return cmp;
if (lhs.getClass() == ClassAnnotation.class)
cmp = compareClasses((ClassAnnotation) lhs, (ClassAnnotation) rhs);
else if (lhs.getClass() == MethodAnnotation.class)
cmp = compareMethods((MethodAnnotation) lhs, (MethodAnnotation) rhs);
else if (lhs.getClass() == SourceLineAnnotation.class)
cmp = compareSourceLines((SourceLineAnnotation) lhs, (SourceLineAnnotation) rhs);
}
// TODO
return 0;
}
private static <T> int compareNullElements(T a, T b) {
if (a != null)
return 1;
else if (b != null)
return -1;
else
return 0;
}
// Compare classes: either exact fully qualified name must match, or class hash must match
public int compareClasses(ClassAnnotation lhsClass, ClassAnnotation rhsClass) {
if (lhsClass == null || rhsClass == null) {
return compareNullElements(lhsClass, rhsClass);
}
int cmp;
// Compare by class name. If same, great.
cmp = lhsClass.compareTo(rhsClass);
if (cmp == 0)
return 0;
// Get class hashes
ClassHash lhsHash = bugCollection.getClassHash(lhsClass.getClassName());
ClassHash rhsHash = bugCollection.getClassHash(rhsClass.getClassName());
if (lhsHash == null || rhsHash == null)
return cmp;
return lhsHash.isSameHash(rhsHash) ? 0 : cmp;
}
// Compare methods: either exact name and signature must match, or method hash must match
public int compareMethods(MethodAnnotation lhsMethod, MethodAnnotation rhsMethod) {
if (lhsMethod == null || rhsMethod == null) {
return compareNullElements(lhsMethod, rhsMethod);
}
// Compare for exact match
int cmp = lhsMethod.compareTo(rhsMethod);
if (cmp == 0)
return 0;
// Get class hashes for primary classes
ClassHash lhsClassHash = bugCollection.getClassHash(lhsMethod.getClassName());
ClassHash rhsClassHash = bugCollection.getClassHash(rhsMethod.getClassName());
if (lhsClassHash == null || rhsClassHash == null)
return cmp;
// Look up method hashes
MethodHash lhsHash = lhsClassHash.getMethodHash(lhsMethod.toXMethod());
MethodHash rhsHash = rhsClassHash.getMethodHash(rhsMethod.toXMethod());
if (lhsHash == null || rhsHash == null)
return cmp;
return lhsHash.isSameHash(rhsHash) ? 0 : cmp;
}
/**
* @param lhs
* @param rhs
* @return
*/
public int compareSourceLines(SourceLineAnnotation lhs, SourceLineAnnotation rhs) {
// TODO Auto-generated method stub
return 0;
}
public boolean ignore(BugAnnotation annotation) {
return false;
}
}
|
package com.cloud.api.commands;
import java.util.List;
import org.apache.log4j.Logger;
import com.cloud.api.ApiDBUtils;
import com.cloud.api.BaseAsyncCmd;
import com.cloud.api.BaseCmd;
import com.cloud.api.BaseCmd.Manager;
import com.cloud.api.Implementation;
import com.cloud.api.Parameter;
import com.cloud.api.response.UserVmResponse;
import com.cloud.event.EventTypes;
import com.cloud.offering.ServiceOffering;
import com.cloud.storage.VMTemplateVO;
import com.cloud.user.Account;
import com.cloud.user.User;
import com.cloud.user.UserContext;
import com.cloud.uservm.UserVm;
import com.cloud.vm.InstanceGroupVO;
@Implementation(createMethod="createVirtualMachine", method="startVirtualMachine", manager=Manager.UserVmManager, description="Creates and automatically starts a virtual machine based on a service offering, disk offering, and template.")
public class DeployVmCmd extends BaseAsyncCmd {
public static final Logger s_logger = Logger.getLogger(DeployVMCmd.class.getName());
private static final String s_name = "deployvirtualmachineresponse";
//////////////// API parameters /////////////////////
@Parameter(name="account", type=CommandType.STRING, description="an optional account for the virtual machine. Must be used with domainId.")
private String accountName;
@Parameter(name="diskofferingid", type=CommandType.LONG, description="the ID of the disk offering for the virtual machine. If the template is of ISO format, the diskOfferingId is for the root disk volume. Otherwise this parameter is used to dinidcate the offering for the data disk volume. If the templateId parameter passed is from a Template object, the diskOfferingId refers to a DATA Disk Volume created. If the templateId parameter passed is from an ISO object, the diskOfferingId refers to a ROOT Disk Volume created.")
private Long diskOfferingId;
@Parameter(name="displayname", type=CommandType.STRING, description="an optional user generated name for the virtual machine")
private String displayName;
@Parameter(name="domainid", type=CommandType.LONG, description="an optional domainId for the virtual machine. If the account parameter is used, domainId must also be used.")
private Long domainId;
@Parameter(name="group", type=CommandType.STRING, description="an optional group for the virtual machine")
private String group;
@Parameter(name="hypervisor", type=CommandType.STRING, description="the hypervisor on which to deploy the virtual machine")
private String hypervisor;
@Parameter(name="networkgrouplist", type=CommandType.LIST, collectionType=CommandType.STRING, description="comma separated list of network groups that going to be applied to the virtual machine. Should be passed only when vm is created from service offering with Direct Attach Network support")
private List<String> networkGroupList;
@Parameter(name="serviceofferingid", type=CommandType.LONG, required=true, description="the ID of the service offering for the virtual machine")
private Long serviceOfferingId;
@Parameter(name="size", type=CommandType.LONG, description="the arbitrary size for the DATADISK volume. Mutually exclusive with diskOfferingId")
private Long size;
@Parameter(name="templateid", type=CommandType.LONG, required=true, description="the ID of the template for the virtual machine")
private Long templateId;
@Parameter(name="userdata", type=CommandType.STRING, description="an optional binary data that can be sent to the virtual machine upon a successful deployment. This binary data must be base64 encoded before adding it to the request. Currently only HTTP GET is supported. Using HTTP GET (via querystring), you can send up to 2KB of data after base64 encoding.")
private String userData;
@Parameter(name="zoneid", type=CommandType.LONG, required=true, description="availability zone for the virtual machine")
private Long zoneId;
// unexposed parameter needed for serializing/deserializing the command
@Parameter(name="password", type=CommandType.STRING, expose=false)
private String password;
/////////////////// Accessors ///////////////////////
public String getAccountName() {
return accountName;
}
public Long getDiskOfferingId() {
return diskOfferingId;
}
public String getDisplayName() {
return displayName;
}
public Long getDomainId() {
return domainId;
}
public String getGroup() {
return group;
}
public String getHypervisor() {
return hypervisor;
}
public List<String> getNetworkGroupList() {
return networkGroupList;
}
public Long getServiceOfferingId() {
return serviceOfferingId;
}
public Long getSize() {
return size;
}
public Long getTemplateId() {
return templateId;
}
public String getUserData() {
return userData;
}
public Long getZoneId() {
return zoneId;
}
// not exposed parameter
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
/////////////// API Implementation///////////////////
@Override
public String getName() {
return s_name;
}
public static String getResultObjectName() {
return "virtualmachine";
}
@Override
public long getAccountId() {
Account account = UserContext.current().getAccount();
if ((account == null) || isAdmin(account.getType())) {
if ((domainId != null) && (accountName != null)) {
Account userAccount = ApiDBUtils.findAccountByNameDomain(accountName, domainId);
if (userAccount != null) {
return userAccount.getId();
}
}
}
if (account != null) {
return account.getId();
}
return Account.ACCOUNT_ID_SYSTEM; // no account info given, parent this command to SYSTEM so ERROR events are tracked
}
@Override
public String getEventType() {
return EventTypes.EVENT_VM_CREATE;
}
@Override
public String getEventDescription() {
return "deploying Vm";
}
@Override @SuppressWarnings("unchecked")
public UserVmResponse getResponse() {
UserVm userVm = (UserVm)getResponseObject();
UserVmResponse response = new UserVmResponse();
response.setId(userVm.getId());
response.setName(userVm.getName());
response.setCreated(userVm.getCreated());
response.setZoneId(userVm.getDataCenterId());
response.setZoneName(ApiDBUtils.findZoneById(userVm.getDataCenterId()).getName());
response.setIpAddress(userVm.getPrivateIpAddress());
response.setServiceOfferingId(userVm.getServiceOfferingId());
response.setHaEnable(userVm.isHaEnabled());
InstanceGroupVO group = ApiDBUtils.findInstanceGroupForVM(userVm.getId());
if (group != null) {
response.setGroup(group.getName());
response.setGroupId(group.getId());
}
if (userVm.getDisplayName() == null || userVm.getDisplayName().length() == 0) {
response.setDisplayName(userVm.getName());
} else {
response.setDisplayName(userVm.getDisplayName());
}
if (userVm.getState() != null) {
response.setState(userVm.getState().toString());
}
VMTemplateVO template = ApiDBUtils.findTemplateById(userVm.getTemplateId());
Account acct = ApiDBUtils.findAccountById(Long.valueOf(userVm.getAccountId()));
if (acct != null) {
response.setAccountName(acct.getAccountName());
response.setDomainId(acct.getDomainId());
response.setDomainName(ApiDBUtils.findDomainById(acct.getDomainId()).getName());
}
Long userId = UserContext.current().getUserId();
if (userId == null) {
userId = User.UID_SYSTEM;
}
//this is for the case where the admin deploys a vm for a normal user
User userExecutingCmd = ApiDBUtils.findUserById(userId);
Account acctForUserExecutingCmd = ApiDBUtils.findAccountById(Long.valueOf(userExecutingCmd.getAccountId()));
if ((BaseCmd.isAdmin(acctForUserExecutingCmd.getType()) && (userVm.getHostId() != null)) || (BaseCmd.isAdmin(acct.getType()) && (userVm.getHostId() != null))) {
response.setHostName(ApiDBUtils.findHostById(userVm.getHostId()).getName());
response.setHostId(userVm.getHostId());
}
String templateName = "none";
boolean templatePasswordEnabled = false;
String templateDisplayText = null;
if (template != null) {
templateName = template.getName();
templatePasswordEnabled = template.getEnablePassword();
templateDisplayText = template.getDisplayText();
if (templateDisplayText == null) {
templateDisplayText = templateName;
}
}
if (templatePasswordEnabled) { // FIXME: where will the password come from in this case?
response.setPassword(getPassword());
}
// ISO Info
Long isoId = userVm.getIsoId();
if (isoId != null) {
VMTemplateVO iso = ApiDBUtils.findTemplateById(isoId.longValue());
if (iso != null) {
response.setIsoId(isoId.longValue());
response.setIsoName(iso.getName());
response.setTemplateId(isoId.longValue());
response.setTemplateName(iso.getName());
templateDisplayText = iso.getDisplayText();
if(templateDisplayText == null)
templateDisplayText = iso.getName();
response.setIsoDisplayText(templateDisplayText);
response.setTemplateDisplayText(templateDisplayText);
}
} else {
response.setTemplateId(userVm.getTemplateId());
response.setTemplateName(templateName);
response.setTemplateDisplayText(templateDisplayText);
response.setPasswordEnabled(templatePasswordEnabled);
}
ServiceOffering offering = ApiDBUtils.findServiceOfferingById(userVm.getServiceOfferingId());
response.setServiceOfferingId(userVm.getServiceOfferingId());
response.setServiceOfferingName(offering.getName());
response.setCpuNumber(offering.getCpu());
response.setCpuSpeed(offering.getSpeed());
response.setMemory(offering.getRamSize());
response.setNetworkGroupList(ApiDBUtils.getNetworkGroupsNamesForVm(userVm.getId()));
response.setResponseName(getName());
return response;
}
}
|
package com.lothrazar.cyclicmagic.item;
import java.util.List;
import com.lothrazar.cyclicmagic.IHasRecipe;
import com.lothrazar.cyclicmagic.util.UtilChat;
import com.lothrazar.cyclicmagic.util.UtilNBT;
import com.lothrazar.cyclicmagic.util.UtilSound;
import net.minecraft.block.BlockLever;
import net.minecraft.block.BlockStoneSlab;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.init.SoundEvents;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ActionResult;
import net.minecraft.util.EnumActionResult;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumHand;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.text.TextFormatting;
import net.minecraft.world.World;
import net.minecraftforge.fml.common.registry.GameRegistry;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
public class ItemPasswordRemote extends BaseItem implements IHasRecipe {
public ItemPasswordRemote() {
this.setMaxStackSize(1);
}
@SideOnly(Side.CLIENT)
public void addInformation(ItemStack stack, EntityPlayer playerIn, List<String> tooltip, boolean advanced) {
BlockPos pointer = UtilNBT.getItemStackBlockPos(stack);
if (pointer != null) {
tooltip.add(TextFormatting.RED + UtilChat.blockPosToString(pointer));
}
super.addInformation(stack, playerIn, tooltip, advanced);
}
@Override
public EnumActionResult onItemUse(EntityPlayer playerIn, World worldIn, BlockPos pos, EnumHand hand, EnumFacing facing, float hitX, float hitY, float hitZ) {
ItemStack stack = playerIn.getHeldItem(hand);
if (worldIn.getBlockState(pos).getBlock() instanceof BlockLever) {
UtilNBT.setItemStackBlockPos(stack, pos);
if (worldIn.isRemote) {
UtilChat.addChatMessage(playerIn, this.getUnlocalizedName() + ".saved");
}
UtilSound.playSound(playerIn, SoundEvents.BLOCK_LEVER_CLICK);
return EnumActionResult.SUCCESS;
}
else {
boolean success = false;
success = trigger(stack, worldIn, playerIn);
if (success)
return EnumActionResult.SUCCESS;
else
return EnumActionResult.FAIL;
}
}
@Override
public ActionResult<ItemStack> onItemRightClick(World worldIn, EntityPlayer playerIn, EnumHand hand) {
ItemStack stack = playerIn.getHeldItem(hand);
boolean success = false;
success = trigger(stack, worldIn, playerIn);
if (success)
return new ActionResult<ItemStack>(EnumActionResult.SUCCESS, stack);
else
return new ActionResult<ItemStack>(EnumActionResult.FAIL, stack);
}
private boolean trigger(ItemStack stack, World worldIn, EntityPlayer playerIn) {
BlockPos pointer = UtilNBT.getItemStackBlockPos(stack);
if (pointer == null) {
if (worldIn.isRemote) {
UtilChat.addChatMessage(playerIn, this.getUnlocalizedName() + ".invalid");
}
return false;
}
else {
IBlockState blockState = worldIn.getBlockState(pointer);
if (blockState == null || blockState.getBlock() != Blocks.LEVER) {
UtilChat.addChatMessage(playerIn, this.getUnlocalizedName() + ".invalid");
return false;
}
else {
boolean hasPowerHere = blockState.getValue(BlockLever.POWERED);//this.block.getStrongPower(blockState, worldIn, pointer, EnumFacing.UP) > 0;
worldIn.setBlockState(pointer, blockState.withProperty(BlockLever.POWERED, !hasPowerHere));
UtilSound.playSound(playerIn, SoundEvents.BLOCK_LEVER_CLICK);
return true;
}
}
}
@Override
public void addRecipe() {
GameRegistry.addShapelessRecipe(new ItemStack(this),
new ItemStack(Blocks.STONE_SLAB, 1, BlockStoneSlab.EnumType.STONE.getMetadata()),
Blocks.STONE_BUTTON,
Blocks.LEVER);
}
}
|
package com.valkryst.VTerminal.revamp.component.component;
import com.valkryst.VTerminal.revamp.component.Screen;
import com.valkryst.VTerminal.revamp.component.builder.CheckBoxBuilder;
import com.valkryst.VTerminal.revamp.component.palette.ColorPalette;
import lombok.Getter;
import lombok.NonNull;
import lombok.ToString;
import javax.swing.event.MouseInputListener;
import java.awt.event.MouseEvent;
@ToString
public class CheckBox extends Button {
/** The character to display when the checkbox is not checked. */
@Getter private char emptyBoxChar;
/** The character to display when the checkbox is checked. */
@Getter private char checkedBoxChar;
/** Whether or not the check box is checked. */
@Getter private boolean isChecked;
/**
* Constructs a new AsciiCheckBox.
*
* @param builder
* The builder to use.
*
* @throws NullPointerException
* If the builder is null.
*/
public CheckBox(final @NonNull CheckBoxBuilder builder) {
super(builder);
this.emptyBoxChar = builder.getEmptyBoxChar();
this.checkedBoxChar = builder.getCheckedBoxChar();
this.isChecked = builder.isChecked();
final ColorPalette colorPalette = builder.getColorPalette();
backgroundColor_normal = colorPalette.getCheckBox_defaultBackground();
foregroundColor_normal = colorPalette.getCheckBox_defaultForeground();
backgroundColor_hover = colorPalette.getCheckBox_hoverBackground();
foregroundColor_hover = colorPalette.getCheckBox_hoverForeground();
backgroundColor_pressed = colorPalette.getCheckBox_checkedBackground();
foregroundColor_pressed = colorPalette.getCheckBox_checkedForeground();
}
@Override
public void createEventListeners(final @NonNull Screen parentScreen) {
if (super.getEventListeners().size() > 0) {
return;
}
final MouseInputListener mouseListener = new MouseInputListener() {
@Override
public void mouseDragged(final MouseEvent e) {}
@Override
public void mouseMoved(final MouseEvent e) {
if (intersects(parentScreen.getMousePosition())) {
setStateHovered();
} else {
if (isChecked) {
setStatePressed();
} else {
setStateNormal();
}
}
}
@Override
public void mouseClicked(final MouseEvent e) {}
@Override
public void mousePressed(final MouseEvent e) {
if (e.getButton() == MouseEvent.BUTTON1) {
if (intersects(parentScreen.getMousePosition())) {
if (isChecked) {
setChecked(false);
} else {
CheckBox.super.getOnClickFunction().run();
setChecked(true);
}
}
}
}
@Override
public void mouseReleased(final MouseEvent e) {}
@Override
public void mouseEntered(final MouseEvent e) {}
@Override
public void mouseExited(final MouseEvent e) {}
};
super.eventListeners.add(mouseListener);
}
/**
* Sets the checked state.
*
* @param isChecked
* Whether or not the check box is checked.
*/
public void setChecked(final boolean isChecked) {
this.isChecked = isChecked;
if (isChecked) {
super.tiles.getTileAt(0, 0).setCharacter(checkedBoxChar);
} else {
super.tiles.getTileAt(0, 0).setCharacter(emptyBoxChar);
}
super.redrawFunction.run();
}
}
|
package edu.umd.cs.findbugs.ba.npe;
import edu.umd.cs.findbugs.SystemProperties;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.ba.Debug;
import edu.umd.cs.findbugs.ba.Location;
import edu.umd.cs.findbugs.ba.XMethod;
import edu.umd.cs.findbugs.ba.XMethodParameter;
/**
* A class to abstractly represent values in stack slots,
* indicating whether thoses values can be null, non-null,
* null on some incoming path, or unknown.
*
* @author David Hovemeyer
* @see IsNullValueFrame
* @see IsNullValueAnalysis
*/
public class IsNullValue implements IsNullValueAnalysisFeatures, Debug {
private static final boolean DEBUG_EXCEPTION = SystemProperties.getBoolean("inv.debugException");
private static final boolean DEBUG_KABOOM = SystemProperties.getBoolean("inv.debugKaboom");
/** Definitely null. */
private static final int NULL = 0;
/** Definitely null because of a comparison to a known null value. */
private static final int CHECKED_NULL = 1;
/** Definitely not null. */
private static final int NN = 2;
/** Definitely not null because of a comparison to a known null value. */
private static final int CHECKED_NN = 3;
/** Definitely not null an NPE would have occurred and we would not be here if it were null. */
private static final int NO_KABOOM_NN = 4;
/** Null on some simple path (at most one branch) to current location. */
private static final int NSP = 5;
/** Unknown value (method param, value read from heap, etc.), assumed not null. */
private static final int NN_UNKNOWN = 6;
/** Null on some complex path (at least two branches) to current location. */
private static final int NCP2 = 7;
/** Null on some complex path (at least three branches) to current location. */
private static final int NCP3 = 8;
private static final int FLAG_SHIFT = 8;
/** Value was propagated along an exception path. */
private static final int EXCEPTION = 1 << FLAG_SHIFT;
/** Value is (potentially) null because of a parameter passed to the method. */
private static final int PARAM = 2 << FLAG_SHIFT;
/** Value is (potentially) null because of a value returned from a called method. */
private static final int RETURN_VAL = 4 << FLAG_SHIFT;
private static final int FLAG_MASK = EXCEPTION | PARAM | RETURN_VAL;
private static final int[][] mergeMatrix = {
// NULL, CHECKED_NULL, NN, CHECKED_NN, NO_KABOOM_NN, NSP, NN_UNKNOWN, NCP2, NCP3
{NULL}, // NULL
{NULL, CHECKED_NULL, }, // CHECKED_NULL
{NSP, NSP, NN},
{NSP, NSP, NN, CHECKED_NN, }, // CHECKED_NN
{NSP, NSP, NN, NN, NO_KABOOM_NN}, // NO_KABOOM_NN
{NSP, NSP, NSP, NSP, NSP, NSP}, // NSP
{NSP, NSP, NN_UNKNOWN, NN_UNKNOWN, NN_UNKNOWN, NSP, NN_UNKNOWN, }, // NN_UNKNOWN
{NSP, NSP, NCP2, NCP2, NCP2, NCP2, NCP2, NCP2,}, // NCP2
{NSP, NSP, NCP3, NCP3, NCP3, NCP3, NCP3, NCP3, NCP3}// NCP3
};
private static final IsNullValue[][] instanceByFlagsList = createInstanceByFlagList();
private static IsNullValue[][] createInstanceByFlagList() {
final int max = FLAG_MASK >>> FLAG_SHIFT;
IsNullValue[][] result = new IsNullValue[max + 1][];
for (int i = 0; i <= max; ++i) {
final int flags = i << FLAG_SHIFT;
result[i] = new IsNullValue[]{
new IsNullValue(NULL | flags),
new IsNullValue(CHECKED_NULL | flags),
new IsNullValue(NN | flags),
new IsNullValue(CHECKED_NN | flags),
null, // NO_KABOOM_NN values must be allocated dynamically
new IsNullValue(NSP | flags),
new IsNullValue(NN_UNKNOWN | flags),
new IsNullValue(NCP2 | flags),
new IsNullValue(NCP3 | flags),
};
}
return result;
}
// Fields
private final int kind;
private final Location locationOfKaBoom;
private IsNullValue(int kind) {
this.kind = kind;
locationOfKaBoom = null;
if (VERIFY_INTEGRITY) checkNoKaboomNNLocation();
}
private IsNullValue(int kind, Location ins) {
this.kind = kind;
locationOfKaBoom = ins;
if (VERIFY_INTEGRITY) checkNoKaboomNNLocation();
}
private void checkNoKaboomNNLocation() {
if (getBaseKind() == NO_KABOOM_NN && locationOfKaBoom == null) {
throw new IllegalStateException("construction of no-KaBoom NN without Location");
}
}
@Override
public boolean equals(Object o) {
if (o == null || this.getClass() != o.getClass())
return false;
IsNullValue other = (IsNullValue) o;
if ( kind != other.kind) return false;
if (locationOfKaBoom == other.locationOfKaBoom) return true;
if (locationOfKaBoom == null || other.locationOfKaBoom == null) return false;
return locationOfKaBoom.equals(other.locationOfKaBoom);
}
@Override
public int hashCode() {
int hashCode = kind;
if (locationOfKaBoom != null)
hashCode += locationOfKaBoom.hashCode();
return hashCode;
}
private int getBaseKind() {
return kind & ~FLAG_MASK;
}
private int getFlags() {
return kind & FLAG_MASK;
}
/**
* Was this value propagated on an exception path?
*/
public boolean isException() {
return (kind & EXCEPTION) != 0;
}
/**
* Was this value marked as a possibly null return value?
*/
public boolean isReturnValue() {
return (kind & RETURN_VAL) != 0;
}
/**
* Was this value marked as a possibly null parameter?
*/
public boolean isParamValue() {
return (kind & PARAM) != 0;
}
/**
* Is this value known because of an explicit null check?
*/
public boolean isChecked() {
return getBaseKind() == CHECKED_NULL || getBaseKind() == CHECKED_NN;
}
/**
* Is this value known to be non null because a NPE would have occurred otherwise?
*/
public boolean wouldHaveBeenAKaboom() {
return getBaseKind() == NO_KABOOM_NN;
}
private IsNullValue toBaseValue() {
return instanceByFlagsList[0][getBaseKind()];
}
/**
* Convert to an exception path value.
*/
public IsNullValue toExceptionValue() {
if (getBaseKind() == NO_KABOOM_NN) return new IsNullValue(kind | EXCEPTION, locationOfKaBoom);
return instanceByFlagsList[(getFlags() | EXCEPTION) >> FLAG_SHIFT][getBaseKind()];
}
/**
* Convert to a value known because it was returned from a method
* in a method property database.
* @param methodInvoked TODO
*/
public IsNullValue markInformationAsComingFromReturnValueOfMethod(XMethod methodInvoked) {
if (getBaseKind() == NO_KABOOM_NN) return new IsNullValue(kind | RETURN_VAL, locationOfKaBoom);
return instanceByFlagsList[(getFlags() | RETURN_VAL) >> FLAG_SHIFT][getBaseKind()];
}
/**
* Get the instance representing values that are definitely null.
*/
public static IsNullValue nullValue() {
return instanceByFlagsList[0][NULL];
}
/**
* Get the instance representing a value known to be null
* because it was compared against null value, or because
* we saw that it was assigned the null constant.
*/
public static IsNullValue checkedNullValue() {
return instanceByFlagsList[0][CHECKED_NULL];
}
/**
* Get the instance representing values that are definitely not null.
*/
public static IsNullValue nonNullValue() {
return instanceByFlagsList[0][NN];
}
/**
* Get the instance representing a value known to be non-null
* because it was compared against null value, or because
* we saw the object creation.
*/
public static IsNullValue checkedNonNullValue() {
return instanceByFlagsList[0][CHECKED_NN];
}
/**
* Get the instance representing a value known to be non-null
* because a NPE would have occurred if it were null.
*/
public static IsNullValue noKaboomNonNullValue(@NonNull Location ins) {
if (ins == null)
throw new NullPointerException("ins cannot be null");
return new IsNullValue(NO_KABOOM_NN, ins);
}
/**
* Get the instance representing values that are definitely null
* on some simple (no branches) incoming path.
*/
public static IsNullValue nullOnSimplePathValue() {
return instanceByFlagsList[0][NSP];
}
/**
* Get instance representing a parameter marked as MightBeNull
*/
public static IsNullValue parameterMarkedAsMightBeNull(XMethodParameter mp) {
return instanceByFlagsList[PARAM >> FLAG_SHIFT][NSP];
}
/**
* Get non-reporting non-null value.
* This is what we use for unknown values.
*/
public static IsNullValue nonReportingNotNullValue() {
return instanceByFlagsList[0][NN_UNKNOWN];
}
/**
* Get null on complex path value.
* This is like null on simple path value, but there
* are at least two branches between the explicit null value
* and the current location. If the conditions are correlated,
* then the path on which the value is null may be infeasible.
*/
public static IsNullValue nullOnComplexPathValue() {
return instanceByFlagsList[0][NCP2];
}
/**
* Like "null on complex path" except that there are at least
* <em>three</em> branches between the explicit null value
* and the current location.
*/
public static IsNullValue nullOnComplexPathValue3() {
return instanceByFlagsList[0][NCP3];
}
/**
* Get null value resulting from comparison to explicit null.
*/
public static IsNullValue pathSensitiveNullValue() {
return instanceByFlagsList[0][CHECKED_NULL];
}
/**
* Get non-null value resulting from comparison to explicit null.
*/
public static IsNullValue pathSensitiveNonNullValue() {
return instanceByFlagsList[0][CHECKED_NN];
}
/**
* Merge two values.
*/
public static IsNullValue merge(IsNullValue a, IsNullValue b) {
if (a == b) return a;
if (a.equals(b)) return a;
int aKind = a.kind & 0xff;
int bKind = b.kind & 0xff;
int aFlags = a.getFlags();
int bFlags = b.getFlags();
int combinedFlags = aFlags & bFlags;
if (!(a.isNullOnSomePath() || a.isDefinitelyNull()) && b.isException())
combinedFlags |= EXCEPTION;
else
if (!(b.isNullOnSomePath() || b.isDefinitelyNull()) && a.isException())
combinedFlags |= EXCEPTION;
// Left hand value should be >=, since it is used
// as the first dimension of the matrix to index.
if (aKind < bKind) {
int tmp = aKind;
aKind = bKind;
bKind = tmp;
}
assert aKind >= bKind;
int result = mergeMatrix[aKind][bKind];
IsNullValue resultValue = (result == NO_KABOOM_NN)
? noKaboomNonNullValue(a.locationOfKaBoom)
: instanceByFlagsList[combinedFlags >> FLAG_SHIFT][result];
return resultValue;
}
/**
* Is this value definitely null?
*/
public boolean isDefinitelyNull() {
int baseKind = getBaseKind();
return baseKind == NULL || baseKind == CHECKED_NULL;
}
/**
* Is this value null on some path?
*/
public boolean isNullOnSomePath() {
int baseKind = getBaseKind();
if (NCP_EXTRA_BRANCH) {
// Note: NCP_EXTRA_BRANCH is an experimental feature
// to see how many false warnings we get when we allow
// two branches between an explicit null and a
// a dereference.
return baseKind == NSP || baseKind == NCP2;
} else {
return baseKind == NSP;
}
}
/**
* Return true if this value is either definitely null,
* or might be null on a simple path.
*
* @return true if this value is either definitely null,
* or might be null on a simple path, false otherwise
*/
public boolean mightBeNull() {
return isDefinitelyNull() || isNullOnSomePath();
}
/**
* Is this value definitely not null?
*/
public boolean isDefinitelyNotNull() {
int baseKind = getBaseKind();
return baseKind == NN || baseKind == CHECKED_NN || baseKind == NO_KABOOM_NN;
}
@Override
public String toString() {
String pfx = "";
if (DEBUG_EXCEPTION) {
int flags = getFlags();
if (flags == 0)
pfx = "_";
else {
if ((flags & EXCEPTION) != 0) pfx += "e";
if ((flags & PARAM) != 0) pfx += "p";
if ((flags & RETURN_VAL) != 0) pfx += "r";
}
}
if (DEBUG_KABOOM && locationOfKaBoom == null) {
pfx += "[?]";
}
switch (getBaseKind()) {
case NULL:
return pfx + "n" + ",";
case CHECKED_NULL:
return pfx + "w" + ",";
case NN:
return pfx + "N" + ",";
case CHECKED_NN:
return pfx + "W" + ",";
case NO_KABOOM_NN:
return pfx + "K" + ",";
case NSP:
return pfx + "s" + ",";
case NN_UNKNOWN:
return pfx + "-" + ",";
case NCP2:
return pfx + "/" + ",";
default:
throw new IllegalStateException("unknown kind of IsNullValue: " + kind);
}
}
public Location getLocationOfKaBoom() {
return locationOfKaBoom;
}
}
// vim:ts=4
|
package com.cloud.domain.dao;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import javax.ejb.Local;
import org.apache.log4j.Logger;
import com.cloud.domain.DomainVO;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.GlobalLock;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={DomainDao.class})
public class DomainDaoImpl extends GenericDaoBase<DomainVO, Long> implements DomainDao {
private static final Logger s_logger = Logger.getLogger(DomainDaoImpl.class);
protected SearchBuilder<DomainVO> DomainNameLikeSearch;
protected SearchBuilder<DomainVO> ParentDomainNameLikeSearch;
protected SearchBuilder<DomainVO> DomainPairSearch;
protected SearchBuilder<DomainVO> ImmediateChildDomainSearch;
protected SearchBuilder<DomainVO> FindAllChildrenSearch;
public DomainDaoImpl () {
DomainNameLikeSearch = createSearchBuilder();
DomainNameLikeSearch.and("name", DomainNameLikeSearch.entity().getName(), SearchCriteria.Op.LIKE);
DomainNameLikeSearch.done();
ParentDomainNameLikeSearch = createSearchBuilder();
ParentDomainNameLikeSearch.and("name", ParentDomainNameLikeSearch.entity().getName(), SearchCriteria.Op.LIKE);
ParentDomainNameLikeSearch.and("parent", ParentDomainNameLikeSearch.entity().getName(), SearchCriteria.Op.EQ);
ParentDomainNameLikeSearch.done();
DomainPairSearch = createSearchBuilder();
DomainPairSearch.and("id", DomainPairSearch.entity().getId(), SearchCriteria.Op.IN);
DomainPairSearch.done();
ImmediateChildDomainSearch = createSearchBuilder();
ImmediateChildDomainSearch.and("parent", ImmediateChildDomainSearch.entity().getParent(), SearchCriteria.Op.EQ);
ImmediateChildDomainSearch.done();
FindAllChildrenSearch = createSearchBuilder();
FindAllChildrenSearch.and("path", FindAllChildrenSearch.entity().getPath(), SearchCriteria.Op.LIKE);
FindAllChildrenSearch.and("id", FindAllChildrenSearch.entity().getId(), SearchCriteria.Op.NEQ);
FindAllChildrenSearch.done();
}
private static String allocPath(DomainVO parentDomain, String name) {
String parentPath = parentDomain.getPath();
return parentPath + name + "/";
}
@Override
public synchronized DomainVO create(DomainVO domain) {
// make sure domain name is valid
String domainName = domain.getName();
if (domainName != null) {
if (domainName.contains("/")) {
throw new IllegalArgumentException("Domain name contains one or more invalid characters. Please enter a name without '/' characters.");
}
} else {
throw new IllegalArgumentException("Domain name is null. Please specify a valid domain name.");
}
long parent = DomainVO.ROOT_DOMAIN;
if(domain.getParent() != null && domain.getParent().longValue() >= DomainVO.ROOT_DOMAIN) {
parent = domain.getParent().longValue();
}
DomainVO parentDomain = findById(parent);
if(parentDomain == null) {
s_logger.error("Unable to load parent domain: " + parent);
return null;
}
Transaction txn = Transaction.currentTxn();
try {
txn.start();
parentDomain = this.lockRow(parent, true);
if(parentDomain == null) {
s_logger.error("Unable to lock parent domain: " + parent);
return null;
}
domain.setPath(allocPath(parentDomain, domain.getName()));
domain.setLevel(parentDomain.getLevel() + 1);
parentDomain.setNextChildSeq(parentDomain.getNextChildSeq() + 1); // FIXME: remove sequence number?
parentDomain.setChildCount(parentDomain.getChildCount() + 1);
persist(domain);
update(parentDomain.getId(), parentDomain);
txn.commit();
return domain;
} catch(Exception e) {
s_logger.error("Unable to create domain due to " + e.getMessage(), e);
txn.rollback();
return null;
}
}
@Override
@DB
public boolean remove(Long id) {
// check for any active users / domains assigned to the given domain id and don't remove the domain if there are any
if (id != null && id.longValue() == DomainVO.ROOT_DOMAIN) {
s_logger.error("Can not remove domain " + id + " as it is ROOT domain");
return false;
}
DomainVO domain = findById(id);
if(domain == null) {
s_logger.error("Unable to remove domain as domain " + id + " no longer exists");
return false;
}
if(domain.getParent() == null) {
s_logger.error("Invalid domain " + id + ", orphan?");
return false;
}
String sql = "SELECT * from account where domain_id = " + id + " and removed is null";
String sql1 = "SELECT * from domain where parent = " + id + " and removed is null";
boolean success = false;
Transaction txn = Transaction.currentTxn();
try {
txn.start();
DomainVO parentDomain = super.lockRow(domain.getParent(), true);
if(parentDomain == null) {
s_logger.error("Unable to load parent domain: " + domain.getParent());
return false;
}
PreparedStatement stmt = txn.prepareAutoCloseStatement(sql);
ResultSet rs = stmt.executeQuery();
if (rs.next()) {
return false;
}
stmt = txn.prepareAutoCloseStatement(sql1);
rs = stmt.executeQuery();
if (rs.next()) {
return false;
}
parentDomain.setChildCount(parentDomain.getChildCount() - 1);
update(parentDomain.getId(), parentDomain);
success = super.remove(id);
txn.commit();
} catch (SQLException ex) {
success = false;
s_logger.error("error removing domain: " + id, ex);
txn.rollback();
}
return success;
}
@Override
public DomainVO findDomainByPath(String domainPath) {
SearchCriteria<DomainVO> sc = createSearchCriteria();
sc.addAnd("path", SearchCriteria.Op.EQ, domainPath);
return findOneBy(sc);
}
@Override
public DomainVO findImmediateChildForParent(Long parentId){
SearchCriteria<DomainVO> sc = ImmediateChildDomainSearch.create();
sc.setParameters("parent", parentId);
return (listBy(sc).size() > 0 ? listBy(sc).get(0) : null);//may need to revisit for multiple children case
}
@Override
public List<DomainVO> findImmediateChildrenForParent(Long parentId){
SearchCriteria<DomainVO> sc = ImmediateChildDomainSearch.create();
sc.setParameters("parent", parentId);
return listBy(sc);
}
@Override
public List<DomainVO> findAllChildren(String path, Long parentId){
SearchCriteria<DomainVO> sc = FindAllChildrenSearch.create();
sc.setParameters("path", "%"+path+"%");
sc.setParameters("id", parentId);
return listBy(sc);
}
@Override
public boolean isChildDomain(Long parentId, Long childId) {
if ((parentId == null) || (childId == null)) {
return false;
}
if (parentId.equals(childId)) {
return true;
}
boolean result = false;
SearchCriteria<DomainVO> sc = DomainPairSearch.create();
sc.setParameters("id", parentId, childId);
List<DomainVO> domainPair = listBy(sc);
if ((domainPair != null) && (domainPair.size() == 2)) {
DomainVO d1 = domainPair.get(0);
DomainVO d2 = domainPair.get(1);
if (d1.getId() == parentId) {
result = d2.getPath().startsWith(d1.getPath());
} else {
result = d1.getPath().startsWith(d2.getPath());
}
}
return result;
}
}
|
package com.lothrazar.samsbucketblocks;
import java.util.Random;
import net.minecraft.block.Block;
import net.minecraft.block.ITileEntityProvider;
import net.minecraft.block.SoundType;
import net.minecraft.block.material.Material;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.init.SoundEvents;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.BlockRenderLayer;
import net.minecraft.util.EnumBlockRenderType;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumHand;
import net.minecraft.util.EnumParticleTypes;
import net.minecraft.util.SoundCategory;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.text.TextComponentTranslation;
import net.minecraft.world.World;
import net.minecraftforge.fml.common.registry.GameRegistry;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
public class BlockBucketStorage extends Block implements ITileEntityProvider{
private Item bucketItem;
public BlockBucketStorage(Item bucketIn){
super(Material.iron);
this.setHardness(7F);
this.setResistance(7F);
this.setStepSound(SoundType.GLASS);
this.setHarvestLevel("pickaxe", 1);
bucketItem = bucketIn;
}
public static final String NBTBUCKETS = "buckets";
public static int getBucketsStored(ItemStack item){
if(item.getItem() == Item.getItemFromBlock(BlockRegistry.block_storeempty))
return 0;
if(item.getTagCompound() == null){
item.setTagCompound(new NBTTagCompound());
}
return item.getTagCompound().getInteger(NBTBUCKETS) + 1;
}
public static int getItemStackBucketNBT(ItemStack item){
if(item.getTagCompound() == null){
item.setTagCompound(new NBTTagCompound());
}
return item.getTagCompound().getInteger(NBTBUCKETS);
}
@Override
public void onBlockPlacedBy(World worldIn, BlockPos pos, IBlockState state, EntityLivingBase placer, ItemStack stack){
if(stack.getTagCompound() != null){
int b = BlockBucketStorage.getItemStackBucketNBT(stack);
TileEntityBucketStorage container = (TileEntityBucketStorage) worldIn.getTileEntity(pos);
container.setBuckets(b);
}
}
@SideOnly(Side.CLIENT)
@Override
public BlockRenderLayer getBlockLayer(){
return BlockRenderLayer.TRANSLUCENT;// ;// EnumWorldBlockLayer.CUTOUT;
}
@Override
public boolean isOpaqueCube(IBlockState state){
return false;
}
@Override
public boolean hasComparatorInputOverride(IBlockState state){
return true;
}
@Override
public int getComparatorInputOverride(IBlockState blockState, World world, BlockPos pos){
TileEntityBucketStorage container = (TileEntityBucketStorage) world.getTileEntity(pos);
return container.getBuckets();
}
@Override
public TileEntity createNewTileEntity(World worldIn, int meta){
return new TileEntityBucketStorage(worldIn, meta);
}
@Override
public Item getItemDropped(IBlockState state, Random rand, int fortune){
return null;
// return Item.getItemFromBlock(BlockRegistry.block_storeempty);
}
public boolean onBlockActivated(World world, BlockPos pos, IBlockState state, EntityPlayer entityPlayer, EnumHand hand, ItemStack heldItem, EnumFacing side, float hitX, float hitY, float hitZ){
// if(world.isRemote == false){System.out.println("Server.Right");}
ItemStack held = entityPlayer.getHeldItem(hand);
Block blockClicked = state.getBlock();
if((blockClicked instanceof BlockBucketStorage) == false){
return false;
}
BlockBucketStorage block = (BlockBucketStorage) blockClicked;
TileEntityBucketStorage container = (TileEntityBucketStorage) world.getTileEntity(pos);
long timeSince = world.getTotalWorldTime() - container.getTimeLast();
if(timeSince < TileEntityBucketStorage.TIMEOUT){
// System.out.println("SKIP"+timeSince);
return false;
}
if(held == null && block.bucketItem != null && block.bucketItem == this.bucketItem){
if(world.isRemote == false){
// server only
if(container.getBuckets() > 0){
removeBucket(entityPlayer, world, container, block.bucketItem);
}
else{
// it is also empty
removeBucket(entityPlayer, world, container, block.bucketItem);
world.setBlockState(pos, BlockRegistry.block_storeempty.getDefaultState());
}
container.setTimeLast(world.getTotalWorldTime());
world.updateComparatorOutputLevel(pos, blockClicked);
}
// both sides
world.playSound(pos.getX(), pos.getY(), pos.getZ(), SoundEvents.block_piston_extend, SoundCategory.BLOCKS, 1.0F, 1.0F, false);
spawnMyParticle(world, block.bucketItem, pos);// .offset(face)
}
return super.onBlockActivated(world, pos, state, entityPlayer, hand, heldItem, side, hitX, hitY, hitZ);
}
@Override
public void onBlockClicked(World world, BlockPos pos, EntityPlayer entityPlayer){
// only left click
// if(world.isRemote == false){System.out.println("Server.Left");}
EnumHand hand = entityPlayer.getActiveHand();
if(hand == null){
hand = EnumHand.MAIN_HAND;
}
ItemStack held = entityPlayer.getHeldItem(hand);
if(pos == null){
return;
}
IBlockState bstate = world.getBlockState(pos);
if(bstate == null){
return;
}
Block blockClicked = bstate.getBlock();
if(blockClicked == null || blockClicked == Blocks.air){
return;
}
if((blockClicked instanceof BlockBucketStorage) == false){
return;
}
BlockBucketStorage block = (BlockBucketStorage) blockClicked;
TileEntityBucketStorage container = (TileEntityBucketStorage) world.getTileEntity(pos);
if(entityPlayer.isSneaking() && world.isRemote == false){ // server has accurate number
int inside;
if(blockClicked == BlockRegistry.block_storeempty)
inside = 0;
else
inside = container.getBuckets() + 1;// yess its messed up?
entityPlayer.addChatMessage(new TextComponentTranslation(inside + ""));
return;// no sounds just tell us how much
}
if(held == null){
return;
}
// before we add the bucket, wait and should we set the block first?
if(blockClicked == BlockRegistry.block_storeempty && block.bucketItem == null){
IBlockState state = null;
if(held.getItem() == Items.lava_bucket){
state = BlockRegistry.block_storelava.getDefaultState();
}
else if(held.getItem() == Items.water_bucket){
state = BlockRegistry.block_storewater.getDefaultState();
}
if(held.getItem() == Items.milk_bucket){
state = BlockRegistry.block_storemilk.getDefaultState();
}
if(state != null){
if(world.isRemote == false){
// System.out.println("addBucket to EMPTY BLOCK");
// server only
world.setBlockState(pos, state);
container.addBucket();
// entityPlayer.destroyCurrentEquippedItem();
entityPlayer.inventory.decrStackSize(entityPlayer.inventory.currentItem, 1);
world.updateComparatorOutputLevel(pos, blockClicked);
}
// both sides
world.playSound(pos.getX(), pos.getY(), pos.getZ(), SoundEvents.block_piston_extend, SoundCategory.BLOCKS, 1.0F, 1.0F, false);
spawnMyParticle(world, held.getItem(), pos);// .offset(face)
}
return;
}
else if(held != null && held.getItem() == block.bucketItem){
if(world.isRemote == false){
// System.out.println("addBucket to EXISTING BLOCK"+world.isRemote);
// server only
container.addBucket();
// entityPlayer.destroyCurrentEquippedItem();
entityPlayer.inventory.decrStackSize(entityPlayer.inventory.currentItem, 1);
world.updateComparatorOutputLevel(pos, blockClicked);
}
// both sides
world.playSound(pos.getX(), pos.getY(), pos.getZ(), SoundEvents.block_piston_extend, SoundCategory.BLOCKS, 1.0F, 1.0F, false);
spawnMyParticle(world, block.bucketItem, pos);// .offset(face)
return;
}
super.onBlockClicked(world, pos, entityPlayer);
}
/*
* @SubscribeEvent public void onPlayerInteract(PlayerInteractEvent event) { EntityPlayer
* entityPlayer = event.getEntityPlayer(); BlockPos pos = event.getPos(); World world =
* event.getWorld(); EnumFacing face = event.getFace(); EnumHand hand =
* entityPlayer.getActiveHand(); ItemStack held = entityPlayer.getHeldItem(hand);
*
* if (pos == null) { return; } IBlockState bstate = world.getBlockState(pos); if (bstate ==
* null) { return; }
*
* Block blockClicked = bstate.getBlock();
*
* if (blockClicked == null || blockClicked == Blocks.air) { return; } if ((blockClicked
* instanceof BlockBucketStorage) == false) { return; }
*
* }
*/
private void spawnMyParticle(World world, Item item, BlockPos pos){
if(item == Items.milk_bucket)
ModBucketBlocks.spawnParticle(world, EnumParticleTypes.SNOW_SHOVEL, pos);
else if(item == Items.lava_bucket)
ModBucketBlocks.spawnParticle(world, EnumParticleTypes.LAVA, pos);
else if(item == Items.water_bucket)
ModBucketBlocks.spawnParticle(world, EnumParticleTypes.WATER_SPLASH, pos);
}
private void removeBucket(EntityPlayer entityPlayer, World world, TileEntityBucketStorage storage, Item bucketItem){
storage.removeBucket();
ModBucketBlocks.dropItemStackInWorld(world, entityPlayer.getPosition(), new ItemStack(bucketItem));
}
public void addRecipe(){
GameRegistry.addRecipe(new ItemStack(BlockRegistry.block_storeempty), "i i", " o ", "i i", 'o', Blocks.obsidian, 'i', Blocks.iron_block);
// the filled ones are not crafted, only obtained when filled and then
// harvested
}
}
|
package edu.umd.cs.findbugs.detect;
import java.math.BigDecimal;
import java.util.Iterator;
import org.apache.bcel.classfile.Attribute;
import org.apache.bcel.classfile.Code;
import org.apache.bcel.classfile.CodeException;
import org.apache.bcel.classfile.Constant;
import org.apache.bcel.classfile.ConstantClass;
import org.apache.bcel.classfile.ConstantDouble;
import org.apache.bcel.classfile.ConstantInteger;
import org.apache.bcel.classfile.ConstantLong;
import org.apache.bcel.classfile.ConstantPool;
import org.apache.bcel.classfile.ConstantValue;
import org.apache.bcel.classfile.Field;
import org.apache.bcel.classfile.JavaClass;
import org.apache.bcel.classfile.Method;
import org.apache.bcel.classfile.Synthetic;
import org.apache.bcel.generic.ObjectType;
import org.apache.bcel.generic.ReferenceType;
import org.apache.bcel.generic.Type;
import edu.umd.cs.findbugs.BugAccumulator;
import edu.umd.cs.findbugs.BugInstance;
import edu.umd.cs.findbugs.BugReporter;
import edu.umd.cs.findbugs.ClassAnnotation;
import edu.umd.cs.findbugs.IntAnnotation;
import edu.umd.cs.findbugs.LocalVariableAnnotation;
import edu.umd.cs.findbugs.MethodAnnotation;
import edu.umd.cs.findbugs.OpcodeStack;
import edu.umd.cs.findbugs.OpcodeStack.Item;
import edu.umd.cs.findbugs.Priorities;
import edu.umd.cs.findbugs.SourceLineAnnotation;
import edu.umd.cs.findbugs.StringAnnotation;
import edu.umd.cs.findbugs.SystemProperties;
import edu.umd.cs.findbugs.ba.AnalysisContext;
import edu.umd.cs.findbugs.ba.CFGBuilderException;
import edu.umd.cs.findbugs.ba.DataflowAnalysisException;
import edu.umd.cs.findbugs.ba.Hierarchy;
import edu.umd.cs.findbugs.ba.ObjectTypeFactory;
import edu.umd.cs.findbugs.ba.SignatureParser;
import edu.umd.cs.findbugs.ba.XField;
import edu.umd.cs.findbugs.ba.XMethod;
import edu.umd.cs.findbugs.ba.ch.Subtypes2;
import edu.umd.cs.findbugs.ba.type.TypeDataflow;
import edu.umd.cs.findbugs.bcel.OpcodeStackDetector;
import edu.umd.cs.findbugs.classfile.ClassDescriptor;
import edu.umd.cs.findbugs.classfile.DescriptorFactory;
import edu.umd.cs.findbugs.classfile.MethodDescriptor;
import edu.umd.cs.findbugs.util.ClassName;
import edu.umd.cs.findbugs.util.Util;
import edu.umd.cs.findbugs.visitclass.PreorderVisitor;
public class DumbMethods extends OpcodeStackDetector {
private abstract class SubDetector {
public void initMethod(Method method) {}
abstract public void sawOpcode(int seen);
}
private class InvalidMinMaxSubDetector extends SubDetector {
Number lowerBound, upperBound;
@Override
public void initMethod(Method method) {
lowerBound = upperBound = null;
}
@Override
public void sawOpcode(int seen) {
if(seen == INVOKESTATIC && getClassConstantOperand().equals("java/lang/Math") && (getMethodDescriptorOperand().getName().equals("max")
|| getMethodDescriptorOperand().getName().equals("min"))) {
Object const1 = stack.getStackItem(0).getConstant();
Object const2 = stack.getStackItem(1).getConstant();
Number n = null;
if(const1 != null ^ const2 != null) {
n = (const1 instanceof Number) ? (Number)const1 : (Number)const2;
if(getMethodDescriptorOperand().getName().equals("min")) {
upperBound = n;
} else {
lowerBound = n;
}
} else {
upperBound = lowerBound = null;
}
XMethod rvo1 = stack.getStackItem(0).getReturnValueOf();
XMethod rvo2 = stack.getStackItem(1).getReturnValueOf();
if(rvo1 != null ^ rvo2 != null) {
XMethod rvo = rvo1 == null ? rvo2 : rvo1;
if (lowerBound instanceof Comparable && upperBound != null && upperBound.getClass() == lowerBound.getClass()
&& rvo.getClassDescriptor().getClassName().equals("java/lang/Math")
&& (rvo.getName().equals("max") || rvo.getName().equals("min"))) {
@SuppressWarnings("unchecked")
int result = ((Comparable<Number>)lowerBound).compareTo(upperBound);
if(result > 0) {
accumulator.accumulateBug(
new BugInstance("DM_INVALID_MIN_MAX", HIGH_PRIORITY).addClassAndMethod(DumbMethods.this)
.addString(String.valueOf(n)),
DumbMethods.this);
}
}
}
}
}
}
private class NullMethodsSubDetector extends SubDetector {
@Override
public void sawOpcode(int seen) {
if (seen == INVOKESTATIC && ("com/google/common/base/Preconditions".equals(getClassConstantOperand())
&& "checkNotNull".equals(getNameConstantOperand())
|| "com/google/common/base/Strings".equals(getClassConstantOperand())
&& ("nullToEmpty".equals(getNameConstantOperand()) ||
"emptyToNull".equals(getNameConstantOperand()) ||
"isNullOrEmpty".equals(getNameConstantOperand())))
) {
int args = PreorderVisitor.getNumberArguments(getSigConstantOperand());
OpcodeStack.Item item = stack.getStackItem(args - 1);
Object o = item.getConstant();
if (o instanceof String) {
OpcodeStack.Item secondArgument = null;
String bugPattern = "DMI_DOH";
if (args > 1) {
secondArgument = stack.getStackItem(args - 2);
Object secondConstant = secondArgument.getConstant();
if (!(secondConstant instanceof String)) {
bugPattern = "DMI_ARGUMENTS_WRONG_ORDER";
}
}
BugInstance bug = new BugInstance(DumbMethods.this, bugPattern, NORMAL_PRIORITY).addClassAndMethod(DumbMethods.this)
.addCalledMethod(DumbMethods.this)
.addString("Passing String constant as value that should be null checked").describe(StringAnnotation.STRING_MESSAGE)
.addString((String) o).describe(StringAnnotation.STRING_CONSTANT_ROLE);
if (secondArgument != null) {
bug.addValueSource(secondArgument, DumbMethods.this);
}
accumulator.accumulateBug(bug, DumbMethods.this);
}
}
if (seen == INVOKESTATIC && ("junit/framework/Assert".equals(getClassConstantOperand()) || "org/junit/Assert".equals(getClassConstantOperand()))
&& "assertNotNull".equals(getNameConstantOperand())) {
int args = PreorderVisitor.getNumberArguments(getSigConstantOperand());
OpcodeStack.Item item = stack.getStackItem(0);
Object o = item.getConstant();
if (o instanceof String) {
OpcodeStack.Item secondArgument = null;
String bugPattern = "DMI_DOH";
if (args == 2) {
secondArgument = stack.getStackItem(1);
Object secondConstant = secondArgument.getConstant();
if (!(secondConstant instanceof String)) {
bugPattern = "DMI_ARGUMENTS_WRONG_ORDER";
}
}
BugInstance bug = new BugInstance(DumbMethods.this, bugPattern, NORMAL_PRIORITY).addClassAndMethod(DumbMethods.this)
.addCalledMethod(DumbMethods.this).addString("Passing String constant as value that should be null checked").describe(StringAnnotation.STRING_MESSAGE)
.addString((String) o).describe(StringAnnotation.STRING_CONSTANT_ROLE);
if (secondArgument != null) {
bug.addValueSource(secondArgument, DumbMethods.this);
}
accumulator.accumulateBug(bug, DumbMethods.this);
}
}
}
}
private class FutilePoolSizeSubDetector extends SubDetector {
@Override
public void sawOpcode(int seen) {
if (seen == INVOKEVIRTUAL && "java/util/concurrent/ScheduledThreadPoolExecutor".equals(getClassConstantOperand())
&& "setMaximumPoolSize".equals(getNameConstantOperand())) {
accumulator.accumulateBug(new BugInstance(DumbMethods.this,
"DMI_FUTILE_ATTEMPT_TO_CHANGE_MAXPOOL_SIZE_OF_SCHEDULED_THREAD_POOL_EXECUTOR", HIGH_PRIORITY)
.addClassAndMethod(DumbMethods.this), DumbMethods.this);
}
}
}
static int saturatingIncrement(int value) {
if (value == Integer.MAX_VALUE) {
return Integer.MAX_VALUE;
}
return value+1;
}
private class RangeCheckSubDetector extends SubDetector {
private void checkRange(Item item, Object minValue, Object maxValue, String pattern) {
if(!(item.getConstant() instanceof Number)) {
return;
}
int value = ((Number)item.getConstant()).intValue();
int intMin = Integer.MIN_VALUE;
int intMax = Integer.MAX_VALUE;
if(minValue instanceof Number) {
intMin = ((Number)minValue).intValue();
}
if(maxValue instanceof Number) {
intMax = ((Number)maxValue).intValue();
} else if(maxValue instanceof String) {
intMax = ((String)maxValue).length()-1;
} else if (maxValue instanceof OpcodeStack.Item){
OpcodeStack.Item maxItem = (OpcodeStack.Item ) maxValue;
if (maxItem.getSignature().charAt(0) == '[' && maxItem.getConstant() instanceof Integer) {
intMax = ((Integer)maxItem.getConstant())-1;
}
}
if(value < intMin || value > intMax) {
BugInstance bug = new BugInstance(pattern, NORMAL_PRIORITY ).addClassAndMethod(DumbMethods.this).addSourceLine(DumbMethods.this)
.addInt(value).describe(IntAnnotation.INT_VALUE);
if (intMin <= intMax) {
if (value < intMin) {
bug.addInt(intMin).describe(IntAnnotation.INT_MIN_VALUE);
}
if (value > intMax) {
bug.addInt(intMax) .describe(IntAnnotation.INT_MAX_VALUE);
}
}
if (isMethodCall()) {
bug.addCalledMethod(DumbMethods.this);
}
accumulator.accumulateBug(bug, DumbMethods.this);
}
}
@Override
public void sawOpcode(int seen) {
// System.out.printf("%4d %s%n", getPC(), OPCODE_NAMES[seen]);
switch(seen) {
case IALOAD:
case AALOAD:
case SALOAD:
case CALOAD:
case BALOAD:
case LALOAD:
case DALOAD:
case FALOAD: {
checkRange(stack.getStackItem(0), 0, stack.getStackItem(1), "RANGE_ARRAY_INDEX");
break;
}
case IASTORE:
case AASTORE:
case SASTORE:
case CASTORE:
case BASTORE:
case LASTORE:
case DASTORE:
case FASTORE: {
checkRange(stack.getStackItem(1), 0, stack.getStackItem(2), "RANGE_ARRAY_INDEX");
break;
}
case INVOKESTATIC: {
MethodDescriptor m = getMethodDescriptorOperand();
if(m.getSlashedClassName().equals("java/lang/System") && m.getName().equals("arraycopy")) {
// void arraycopy(Object src, int srcPos, Object dest, int destPos, int length)
Item length = stack.getStackItem(0);
Object constantLength = length.getConstant();
// if (constantLength instanceof Number && constantLength.equals(0)) {
// break;
Item srcPos = stack.getStackItem(3);
Item src = stack.getStackItem(4);
checkRange(srcPos, 0, src, "RANGE_ARRAY_OFFSET");
Item dest = stack.getStackItem(2);
Item destPos = stack.getStackItem(1);
checkRange(destPos, 0, dest, "RANGE_ARRAY_OFFSET");
if(constantLength instanceof Number) {
int length1 = Integer.MAX_VALUE;
if(src.getConstant() instanceof Integer) {
length1 = (int) src.getConstant();
}
if(srcPos.getConstant() instanceof Integer) {
length1 -= (int) srcPos.getConstant();
}
int length2 = Integer.MAX_VALUE;
if(dest.getConstant() instanceof Integer) {
length2 = (int) stack.getStackItem(2).getConstant();
}
if(destPos.getConstant() instanceof Integer) {
length2 -= (int) stack.getStackItem(1).getConstant();
}
checkRange(length, 0, Math.min(length1, length2), "RANGE_ARRAY_LENGTH");
}
}
break;
}
case INVOKEVIRTUAL:
case INVOKESPECIAL: {
MethodDescriptor m = getMethodDescriptorOperand();
if(m.getSlashedClassName().equals("java/lang/String")) {
if((m.getName().equals("charAt") || m.getName().equals("codePointAt")) && m.getSignature().startsWith("(I)")) {
checkRange(stack.getStackItem(0), 0, stack.getStackItem(1).getConstant(), "RANGE_STRING_INDEX");
}
if(m.getName().equals("substring") || m.getName().equals("subSequence")) {
int nArgs = getNumberArguments(m.getSignature());
Item thisArg = stack.getStackItem(nArgs);
Item firstArg = stack.getStackItem(nArgs-1);
Object thisConstantValue = thisArg.getConstant();
int maxLength = thisConstantValue instanceof String ? ((String)thisConstantValue).length() : Integer.MAX_VALUE;
checkRange(firstArg, 0,maxLength, "RANGE_STRING_INDEX");
if(nArgs == 2) {
Item secondArg = stack.getStackItem(0);
checkRange(secondArg, firstArg.getConstant() == null ? 0 : firstArg.getConstant(),
maxLength,
"RANGE_STRING_INDEX");
}
}
}
if ((m.getSignature().startsWith("([BII)") || m.getSignature().startsWith("([CII)") || m.getSignature().startsWith("([III)"))
&& (((m.getName().equals("write") || m.getName().equals("read")) && m.getSlashedClassName().startsWith(
"java/io/")) || (m.getName().equals("<init>") && m.getSlashedClassName().equals("java/lang/String")))) {
Item arrayArg = stack.getStackItem(2);
Item offsetArg = stack.getStackItem(1);
Item lengthArg = stack.getStackItem(0);
int length = Integer.MAX_VALUE;
if(arrayArg.getConstant() instanceof Integer) {
length = (int) arrayArg.getConstant();
}
if(offsetArg.getConstant() instanceof Integer) {
checkRange(offsetArg, 0, saturatingIncrement(length), "RANGE_ARRAY_OFFSET");
length -= (int) offsetArg.getConstant();
}
checkRange(lengthArg, 0, saturatingIncrement(length), "RANGE_ARRAY_LENGTH");
}
break;
}
default:
break;
}
}
}
private class UrlCollectionSubDetector extends SubDetector {
@Override
public void sawOpcode(int seen) {
if ((seen == INVOKEVIRTUAL && "java/util/HashMap".equals(getClassConstantOperand()) && "get".equals(getNameConstantOperand()))
|| (seen == INVOKEINTERFACE && "java/util/Map".equals(getClassConstantOperand()) && "get".equals(getNameConstantOperand()))
|| (seen == INVOKEVIRTUAL && "java/util/HashSet".equals(getClassConstantOperand()) && "contains".equals(getNameConstantOperand()))
|| (seen == INVOKEINTERFACE && "java/util/Set".equals(getClassConstantOperand()) && "contains".equals(getNameConstantOperand()))) {
OpcodeStack.Item top = stack.getStackItem(0);
if ("Ljava/net/URL;".equals(top.getSignature())) {
accumulator.accumulateBug(new BugInstance(DumbMethods.this, "DMI_COLLECTION_OF_URLS", HIGH_PRIORITY)
.addClassAndMethod(DumbMethods.this), DumbMethods.this);
}
}
}
}
private class VacuousComparisonSubDetector extends SubDetector {
@Override
public void sawOpcode(int seen) {
boolean foundVacuousComparison = false;
if (seen == IF_ICMPGT || seen == IF_ICMPLE) {
OpcodeStack.Item rhs = stack.getStackItem(0);
Object rhsConstant = rhs.getConstant();
if (rhsConstant instanceof Integer && ((Integer) rhsConstant).intValue() == Integer.MAX_VALUE) {
foundVacuousComparison = true;
}
OpcodeStack.Item lhs = stack.getStackItem(1);
Object lhsConstant = lhs.getConstant();
if (lhsConstant instanceof Integer && ((Integer) lhsConstant).intValue() == Integer.MIN_VALUE) {
foundVacuousComparison = true;
}
}
if (seen == IF_ICMPLT || seen == IF_ICMPGE) {
OpcodeStack.Item rhs = stack.getStackItem(0);
Object rhsConstant = rhs.getConstant();
if (rhsConstant instanceof Integer && ((Integer) rhsConstant).intValue() == Integer.MIN_VALUE) {
foundVacuousComparison = true;
}
OpcodeStack.Item lhs = stack.getStackItem(1);
Object lhsConstant = lhs.getConstant();
if (lhsConstant instanceof Integer && ((Integer) lhsConstant).intValue() == Integer.MAX_VALUE) {
foundVacuousComparison = true;
}
}
if (foundVacuousComparison) {
accumulator.accumulateBug(new BugInstance(DumbMethods.this, "INT_VACUOUS_COMPARISON", getBranchOffset() < 0 ? HIGH_PRIORITY
: NORMAL_PRIORITY).addClassAndMethod(DumbMethods.this), DumbMethods.this);
}
}
}
private class BadCastInEqualsSubDetector extends SubDetector {
private boolean isEqualsObject;
private boolean sawInstanceofCheck;
private boolean reportedBadCastInEquals;
@Override
public void initMethod(Method method) {
isEqualsObject = "equals".equals(getMethodName()) && "(Ljava/lang/Object;)Z".equals(getMethodSig()) && !method.isStatic();
sawInstanceofCheck = false;
reportedBadCastInEquals = false;
}
@Override
public void sawOpcode(int seen) {
if (isEqualsObject && !reportedBadCastInEquals) {
if (seen == INVOKEVIRTUAL && "isInstance".equals(getNameConstantOperand())
&& "java/lang/Class".equals(getClassConstantOperand())) {
OpcodeStack.Item item = stack.getStackItem(0);
if (item.getRegisterNumber() == 1) {
sawInstanceofCheck = true;
}
} else if (seen == INSTANCEOF || seen == INVOKEVIRTUAL && "getClass".equals(getNameConstantOperand())
&& "()Ljava/lang/Class;".equals(getSigConstantOperand())) {
OpcodeStack.Item item = stack.getStackItem(0);
if (item.getRegisterNumber() == 1) {
sawInstanceofCheck = true;
}
} else if (seen == INVOKESPECIAL && "equals".equals(getNameConstantOperand())
&& "(Ljava/lang/Object;)Z".equals(getSigConstantOperand())) {
OpcodeStack.Item item0 = stack.getStackItem(0);
OpcodeStack.Item item1 = stack.getStackItem(1);
if (item1.getRegisterNumber() + item0.getRegisterNumber() == 1) {
sawInstanceofCheck = true;
}
} else if (seen == CHECKCAST && !sawInstanceofCheck) {
OpcodeStack.Item item = stack.getStackItem(0);
if (item.getRegisterNumber() == 1) {
if (getSizeOfSurroundingTryBlock(getPC()) == Integer.MAX_VALUE) {
accumulator.accumulateBug(new BugInstance(DumbMethods.this, "BC_EQUALS_METHOD_SHOULD_WORK_FOR_ALL_OBJECTS",
NORMAL_PRIORITY).addClassAndMethod(DumbMethods.this), DumbMethods.this);
}
reportedBadCastInEquals = true;
}
}
}
}
}
private class RandomOnceSubDetector extends SubDetector {
private boolean freshRandomOnTos = false;
private boolean freshRandomOneBelowTos = false;
@Override
public void initMethod(Method method) {
freshRandomOnTos = false;
}
@Override
public void sawOpcode(int seen) {
if (seen == INVOKEVIRTUAL && "java/util/Random".equals(getClassConstantOperand())
&& (freshRandomOnTos || freshRandomOneBelowTos)) {
accumulator.accumulateBug(new BugInstance(DumbMethods.this, "DMI_RANDOM_USED_ONLY_ONCE", HIGH_PRIORITY)
.addClassAndMethod(DumbMethods.this).addCalledMethod(DumbMethods.this), DumbMethods.this);
}
freshRandomOneBelowTos = freshRandomOnTos && isRegisterLoad();
freshRandomOnTos = seen == INVOKESPECIAL && "java/util/Random".equals(getClassConstantOperand())
&& "<init>".equals(getNameConstantOperand());
}
}
private final SubDetector[] subDetectors = new SubDetector[] { new VacuousComparisonSubDetector(),
new RangeCheckSubDetector(), new BadCastInEqualsSubDetector(), new FutilePoolSizeSubDetector(),
new UrlCollectionSubDetector(), new RandomOnceSubDetector(), new NullMethodsSubDetector(),
new InvalidMinMaxSubDetector()};
private static final ObjectType CONDITION_TYPE = ObjectTypeFactory.getInstance("java.util.concurrent.locks.Condition");
private final BugReporter bugReporter;
private boolean sawCurrentTimeMillis;
private BugInstance gcInvocationBugReport;
private int gcInvocationPC;
private CodeException[] exceptionTable;
/*
* private boolean sawLDCEmptyString;
*/
private String primitiveObjCtorSeen;
private boolean ctorSeen;
private boolean prevOpcodeWasReadLine;
private int prevOpcode;
private boolean isPublicStaticVoidMain;
private int sawCheckForNonNegativeSignedByte;
private int sinceBufferedInputStreamReady;
private int randomNextIntState;
private boolean checkForBitIorofSignedByte;
/**
* A heuristic - how long a catch block for OutOfMemoryError might be.
*/
private static final int OOM_CATCH_LEN = 20;
private final boolean testingEnabled;
private final BugAccumulator accumulator;
private final BugAccumulator absoluteValueAccumulator;
private static final int MICROS_PER_DAY_OVERFLOWED_AS_INT
= 24 * 60 * 60 * 1000 * 1000;
public DumbMethods(BugReporter bugReporter) {
this.bugReporter = bugReporter;
accumulator = new BugAccumulator(bugReporter);
absoluteValueAccumulator = new BugAccumulator(bugReporter);
testingEnabled = SystemProperties.getBoolean("report_TESTING_pattern_in_standard_detectors");
}
boolean isSynthetic;
@Override
public void visit(JavaClass obj) {
String superclassName = obj.getSuperclassName();
isSynthetic = "java.rmi.server.RemoteStub".equals(superclassName);
Attribute[] attributes = obj.getAttributes();
if (attributes != null) {
for (Attribute a : attributes) {
if (a instanceof Synthetic) {
isSynthetic = true;
}
}
}
}
@Override
public void visitAfter(JavaClass obj) {
accumulator.reportAccumulatedBugs();
}
public static boolean isTestMethod(Method method) {
return method.getName().startsWith("test");
}
@Override
public void visit(Field field) {
ConstantValue value = field.getConstantValue();
if (value == null) {
return;
}
Constant c = getConstantPool().getConstant(value.getConstantValueIndex());
if (testingEnabled && c instanceof ConstantLong && ((ConstantLong)c).getBytes() == MICROS_PER_DAY_OVERFLOWED_AS_INT) {
bugReporter.reportBug( new BugInstance(this, "TESTING", HIGH_PRIORITY).addClass(this).addField(this)
.addString("Did you mean MICROS_PER_DAY")
.addInt(MICROS_PER_DAY_OVERFLOWED_AS_INT)
.describe(IntAnnotation.INT_VALUE));
}
}
@Override
public void visit(Method method) {
String cName = getDottedClassName();
for(SubDetector subDetector : subDetectors) {
subDetector.initMethod(method);
}
// System.out.println(getFullyQualifiedMethodName());
isPublicStaticVoidMain = method.isPublic() && method.isStatic() && "main".equals(getMethodName())
|| cName.toLowerCase().indexOf("benchmark") >= 0;
prevOpcodeWasReadLine = false;
Code code = method.getCode();
if (code != null) {
this.exceptionTable = code.getExceptionTable();
}
if (this.exceptionTable == null) {
this.exceptionTable = new CodeException[0];
}
primitiveObjCtorSeen = null;
ctorSeen = false;
randomNextIntState = 0;
checkForBitIorofSignedByte = false;
sinceBufferedInputStreamReady = 100000;
sawCheckForNonNegativeSignedByte = -1000;
sawLoadOfMinValue = false;
previousMethodCall = null;
}
int opcodesSincePendingAbsoluteValueBug;
BugInstance pendingAbsoluteValueBug;
SourceLineAnnotation pendingAbsoluteValueBugSourceLine;
boolean sawLoadOfMinValue = false;
MethodDescriptor previousMethodCall = null;
@Override
public void sawOpcode(int seen) {
if (isMethodCall()) {
MethodDescriptor called = getMethodDescriptorOperand();
if (previousMethodCall != null && !stack.isJumpTarget(getPC())) {
if ("toString".equals(called.getName())
&& "java/lang/Integer".equals(called.getClassDescriptor().getClassName())
&& "valueOf".equals(previousMethodCall.getName())
&& "(I)Ljava/lang/Integer;".equals(previousMethodCall.getSignature())
) {
MethodAnnotation preferred = new MethodAnnotation("java.lang.Integer", "toString", "(I)Ljava/lang/String;", true);
BugInstance bug = new BugInstance(this, "DM_BOXED_PRIMITIVE_TOSTRING", HIGH_PRIORITY).addClassAndMethod(this)
.addCalledMethod(this).addMethod(preferred).describe(MethodAnnotation.SHOULD_CALL);
accumulator.accumulateBug(bug, this);
} else if ("intValue".equals(called.getName())
&& "java/lang/Integer".equals(called.getClassDescriptor().getClassName())
&& "java/lang/Integer".equals(previousMethodCall.getSlashedClassName())
&& ("<init>".equals(previousMethodCall.getName())
&& "(Ljava/lang/String;)V".equals(previousMethodCall.getSignature())
|| "valueOf".equals(previousMethodCall.getName())
&& "(Ljava/lang/String;)Ljava/lang/Integer;".equals(previousMethodCall.getSignature())
)) {
MethodAnnotation preferred = new MethodAnnotation("java.lang.Integer", "parseInt", "(Ljava/lang/String;)I", true);
BugInstance bug = new BugInstance(this, "DM_BOXED_PRIMITIVE_FOR_PARSING", HIGH_PRIORITY).addClassAndMethod(this)
.addCalledMethod(this).addMethod(preferred).describe(MethodAnnotation.SHOULD_CALL);
accumulator.accumulateBug(bug, this);
} else if ("longValue".equals(called.getName())
&& "java/lang/Long".equals(called.getClassDescriptor().getClassName())
&& "java/lang/Long".equals(previousMethodCall.getSlashedClassName())
&& ( "<init>".equals(previousMethodCall.getName())
&& "(Ljava/lang/String;)V".equals(previousMethodCall.getSignature())
|| "valueOf".equals(previousMethodCall.getName())
&& "(Ljava/lang/String;)Ljava/lang/Long;".equals(previousMethodCall.getSignature()))
) {
MethodAnnotation preferred = new MethodAnnotation("java.lang.Long", "parseLong", "(Ljava/lang/String;)J", true);
BugInstance bug = new BugInstance(this, "DM_BOXED_PRIMITIVE_FOR_PARSING", HIGH_PRIORITY).addClassAndMethod(this)
.addCalledMethod(this).addMethod(preferred).describe(MethodAnnotation.SHOULD_CALL);
accumulator.accumulateBug(bug, this);
} else if("compareTo".equals(called.getName())
&& "valueOf".equals(previousMethodCall.getName())
&& called.getClassDescriptor().equals(previousMethodCall.getClassDescriptor()) && !previousMethodCall.getSignature().startsWith("(Ljava/lang/String;")
) {
String primitiveType = ClassName.getPrimitiveType(called.getClassDescriptor().getClassName());
XMethod rvo = stack.getStackItem(1).getReturnValueOf();
XField field = stack.getStackItem(1).getXField();
String signature;
if (rvo != null) {
signature = new SignatureParser(rvo.getSignature()).getReturnTypeSignature();
} else if (field != null) {
signature = field.getSignature();
} else {
signature = "";
}
if (primitiveType != null
&& (previousMethodCall.equals(rvo) || signature.equals(primitiveType))
&& (getThisClass().getMajor() >= MAJOR_1_7 || getThisClass().getMajor() >= MAJOR_1_4
&& (primitiveType.equals("D") || primitiveType.equals("F")))) {
MethodDescriptor shouldCall = new MethodDescriptor(called.getClassDescriptor().getClassName(), "compare",
"(" + primitiveType + primitiveType + ")I", true);
BugInstance bug = new BugInstance(this, "DM_BOXED_PRIMITIVE_FOR_COMPARE",
primitiveType.equals("Z") ? LOW_PRIORITY : primitiveType.equals("B") ? NORMAL_PRIORITY
: HIGH_PRIORITY).addClassAndMethod(this).addCalledMethod(this).addMethod(shouldCall)
.describe(MethodAnnotation.SHOULD_CALL);
accumulator.accumulateBug(bug, this);
}
}
}
previousMethodCall = called;
} else {
previousMethodCall = null;
}
if (seen == LDC || seen == LDC_W || seen == LDC2_W) {
Constant c = getConstantRefOperand();
if (testingEnabled && (c instanceof ConstantInteger && ((ConstantInteger) c).getBytes() == MICROS_PER_DAY_OVERFLOWED_AS_INT
|| c instanceof ConstantLong && ((ConstantLong) c).getBytes() == MICROS_PER_DAY_OVERFLOWED_AS_INT)) {
BugInstance bug = new BugInstance(this, "TESTING", HIGH_PRIORITY).addClassAndMethod(this)
.addString("Did you mean MICROS_PER_DAY").addInt(MICROS_PER_DAY_OVERFLOWED_AS_INT)
.describe(IntAnnotation.INT_VALUE);
accumulator.accumulateBug(bug, this);
}
if ((c instanceof ConstantInteger && ((ConstantInteger) c).getBytes() == Integer.MIN_VALUE
|| c instanceof ConstantLong && ((ConstantLong) c).getBytes() == Long.MIN_VALUE)) {
sawLoadOfMinValue = true;
pendingAbsoluteValueBug = null;
pendingAbsoluteValueBugSourceLine = null;
absoluteValueAccumulator.clearBugs();
}
}
if (seen == LCMP) {
OpcodeStack.Item left = stack.getStackItem(1);
OpcodeStack.Item right = stack.getStackItem(0);
checkForCompatibleLongComparison(left, right);
checkForCompatibleLongComparison(right, left);
}
if (stack.getStackDepth() >= 2) {
switch (seen) {
case IF_ICMPEQ:
case IF_ICMPNE:
case IF_ICMPLE:
case IF_ICMPGE:
case IF_ICMPLT:
case IF_ICMPGT:
OpcodeStack.Item item0 = stack.getStackItem(0);
OpcodeStack.Item item1 = stack.getStackItem(1);
if (item0.getConstant() instanceof Integer) {
OpcodeStack.Item tmp = item0;
item0 = item1;
item1 = tmp;
}
Object constant1 = item1.getConstant();
XMethod returnValueOf = item0.getReturnValueOf();
if (constant1 instanceof Integer
&& returnValueOf != null
&& "getYear".equals(returnValueOf.getName())
&& ("java.util.Date".equals(returnValueOf.getClassName()) || "java.sql.Date".equals(returnValueOf.getClassName()))) {
int year = (Integer) constant1;
if (testingEnabled && year > 1900) {
accumulator.accumulateBug(
new BugInstance(this, "TESTING", HIGH_PRIORITY).addClassAndMethod(this)
.addString("Comparison of getYear does understand that it returns year-1900")
.addMethod(returnValueOf).describe(MethodAnnotation.METHOD_CALLED).addInt(year)
.describe(IntAnnotation.INT_VALUE), this);
}
}
break;
default:
break;
}
}
// System.out.printf("%4d %10s: %s\n", getPC(), OPCODE_NAMES[seen],
// stack);
if (seen == IFLT && stack.getStackDepth() > 0 && stack.getStackItem(0).getSpecialKind() == OpcodeStack.Item.SIGNED_BYTE) {
sawCheckForNonNegativeSignedByte = getPC();
}
if (pendingAbsoluteValueBug != null) {
if (opcodesSincePendingAbsoluteValueBug == 0) {
opcodesSincePendingAbsoluteValueBug++;
} else {
if (seen == IREM) {
OpcodeStack.Item top = stack.getStackItem(0);
Object constantValue = top.getConstant();
if (constantValue instanceof Number && Util.isPowerOfTwo(((Number) constantValue).intValue())) {
pendingAbsoluteValueBug.setPriority(Priorities.LOW_PRIORITY);
}
}
/*
if (false)
try {
pendingAbsoluteValueBug.addString(OPCODE_NAMES[getPrevOpcode(1)] + ":" + OPCODE_NAMES[seen] + ":"
+ OPCODE_NAMES[getNextOpcode()]);
} catch (Exception e) {
pendingAbsoluteValueBug.addString(OPCODE_NAMES[getPrevOpcode(1)] + ":" + OPCODE_NAMES[seen]);
}
*/
absoluteValueAccumulator.accumulateBug(pendingAbsoluteValueBug, pendingAbsoluteValueBugSourceLine);
pendingAbsoluteValueBug = null;
pendingAbsoluteValueBugSourceLine = null;
}
}
if (seen == INVOKESTATIC
&& "org/easymock/EasyMock".equals(getClassConstantOperand())
&& ("replay".equals(getNameConstantOperand()) || "verify".equals(getNameConstantOperand()) || getNameConstantOperand()
.startsWith("reset")) && "([Ljava/lang/Object;)V".equals(getSigConstantOperand())
&& getPrevOpcode(1) == ANEWARRAY && getPrevOpcode(2) == ICONST_0) {
accumulator.accumulateBug(new BugInstance(this, "DMI_VACUOUS_CALL_TO_EASYMOCK_METHOD", NORMAL_PRIORITY)
.addClassAndMethod(this).addCalledMethod(this), this);
}
if ((seen == INVOKESTATIC || seen == INVOKEVIRTUAL || seen == INVOKESPECIAL || seen == INVOKEINTERFACE)
&& getSigConstantOperand().indexOf("Ljava/lang/Runnable;") >= 0) {
SignatureParser parser = new SignatureParser(getSigConstantOperand());
int count = 0;
for (Iterator<String> i = parser.parameterSignatureIterator(); i.hasNext(); count++) {
String parameter = i.next();
if ("Ljava/lang/Runnable;".equals(parameter)) {
OpcodeStack.Item item = stack.getStackItem(parser.getNumParameters() - 1 - count);
if ("Ljava/lang/Thread;".equals(item.getSignature())) {
accumulator.accumulateBug(new BugInstance(this, "DMI_THREAD_PASSED_WHERE_RUNNABLE_EXPECTED",
NORMAL_PRIORITY).addClassAndMethod(this).addCalledMethod(this), this);
}
}
}
}
if (prevOpcode == I2L && seen == INVOKESTATIC && "java/lang/Double".equals(getClassConstantOperand())
&& "longBitsToDouble".equals(getNameConstantOperand())) {
accumulator.accumulateBug(new BugInstance(this, "DMI_LONG_BITS_TO_DOUBLE_INVOKED_ON_INT", HIGH_PRIORITY)
.addClassAndMethod(this).addCalledMethod(this), this);
}
/**
* Since you can change the number of core threads for a scheduled
* thread pool executor, disabling this for now
*
if (false && seen == INVOKESPECIAL
&& getClassConstantOperand().equals("java/util/concurrent/ScheduledThreadPoolExecutor")
&& getNameConstantOperand().equals("<init>")) {
int arguments = getNumberArguments(getSigConstantOperand());
OpcodeStack.Item item = stack.getStackItem(arguments - 1);
Object value = item.getConstant();
if (value instanceof Integer && ((Integer) value).intValue() == 0)
accumulator.accumulateBug(new BugInstance(this, "DMI_SCHEDULED_THREAD_POOL_EXECUTOR_WITH_ZERO_CORE_THREADS",
HIGH_PRIORITY).addClassAndMethod(this), this);
}
*/
for(SubDetector subDetector : subDetectors) {
subDetector.sawOpcode(seen);
}
if (!sawLoadOfMinValue && seen == INVOKESTATIC &&
ClassName.isMathClass(getClassConstantOperand()) && "abs".equals(getNameConstantOperand())
) {
OpcodeStack.Item item0 = stack.getStackItem(0);
int special = item0.getSpecialKind();
if (special == OpcodeStack.Item.RANDOM_INT) {
pendingAbsoluteValueBug = new BugInstance(this, "RV_ABSOLUTE_VALUE_OF_RANDOM_INT", HIGH_PRIORITY)
.addClassAndMethod(this);
pendingAbsoluteValueBugSourceLine = SourceLineAnnotation.fromVisitedInstruction(this);
opcodesSincePendingAbsoluteValueBug = 0;
}
else if (special == OpcodeStack.Item.HASHCODE_INT) {
pendingAbsoluteValueBug = new BugInstance(this, "RV_ABSOLUTE_VALUE_OF_HASHCODE", HIGH_PRIORITY)
.addClassAndMethod(this);
pendingAbsoluteValueBugSourceLine = SourceLineAnnotation.fromVisitedInstruction(this);
opcodesSincePendingAbsoluteValueBug = 0;
}
}
try {
int stackLoc = stackEntryThatMustBeNonnegative(seen);
if (stackLoc >= 0) {
OpcodeStack.Item tos = stack.getStackItem(stackLoc);
switch (tos.getSpecialKind()) {
case OpcodeStack.Item.HASHCODE_INT_REMAINDER:
accumulator.accumulateBug(new BugInstance(this, "RV_REM_OF_HASHCODE", HIGH_PRIORITY).addClassAndMethod(this),
this);
break;
case OpcodeStack.Item.RANDOM_INT:
case OpcodeStack.Item.RANDOM_INT_REMAINDER:
accumulator.accumulateBug(
new BugInstance(this, "RV_REM_OF_RANDOM_INT", HIGH_PRIORITY).addClassAndMethod(this), this);
break;
default:
break;
}
}
if (seen == IREM) {
OpcodeStack.Item item0 = stack.getStackItem(0);
Object constant0 = item0.getConstant();
if (constant0 instanceof Integer && ((Integer) constant0).intValue() == 1) {
accumulator.accumulateBug(new BugInstance(this, "INT_BAD_REM_BY_1", HIGH_PRIORITY).addClassAndMethod(this),
this);
}
}
if (stack.getStackDepth() >= 1 && (seen == LOOKUPSWITCH || seen == TABLESWITCH)) {
OpcodeStack.Item item0 = stack.getStackItem(0);
if (item0.getSpecialKind() == OpcodeStack.Item.SIGNED_BYTE) {
int[] switchLabels = getSwitchLabels();
int[] switchOffsets = getSwitchOffsets();
for (int i = 0; i < switchLabels.length; i++) {
int v = switchLabels[i];
if (v <= -129 || v >= 128) {
accumulator.accumulateBug(new BugInstance(this, "INT_BAD_COMPARISON_WITH_SIGNED_BYTE", HIGH_PRIORITY)
.addClassAndMethod(this).addInt(v).describe(IntAnnotation.INT_VALUE),
SourceLineAnnotation.fromVisitedInstruction(this, getPC() + switchOffsets[i]));
}
}
}
}
// check for use of signed byte where is it assumed it can be out of
// the -128...127 range
if (stack.getStackDepth() >= 2) {
switch (seen) {
case IF_ICMPEQ:
case IF_ICMPNE:
case IF_ICMPLT:
case IF_ICMPLE:
case IF_ICMPGE:
case IF_ICMPGT:
OpcodeStack.Item item0 = stack.getStackItem(0);
OpcodeStack.Item item1 = stack.getStackItem(1);
int seen2 = seen;
if (item0.getConstant() != null) {
OpcodeStack.Item tmp = item0;
item0 = item1;
item1 = tmp;
switch (seen) {
case IF_ICMPLT:
seen2 = IF_ICMPGT;
break;
case IF_ICMPGE:
seen2 = IF_ICMPLE;
break;
case IF_ICMPGT:
seen2 = IF_ICMPLT;
break;
case IF_ICMPLE:
seen2 = IF_ICMPGE;
break;
default:
break;
}
}
Object constant1 = item1.getConstant();
if (item0.getSpecialKind() == OpcodeStack.Item.SIGNED_BYTE && constant1 instanceof Number) {
int v1 = ((Number) constant1).intValue();
if (v1 <= -129 || v1 >= 128 || v1 == 127 && !(seen2 == IF_ICMPEQ || seen2 == IF_ICMPNE
)) {
int priority = HIGH_PRIORITY;
if (v1 == 127) {
switch (seen2) {
case IF_ICMPGT: // 127 > x
priority = LOW_PRIORITY;
break;
case IF_ICMPGE: // 127 >= x : always true
priority = NORMAL_PRIORITY;
break;
case IF_ICMPLT: // 127 < x : never true
priority = NORMAL_PRIORITY;
break;
case IF_ICMPLE: // 127 <= x
priority = LOW_PRIORITY;
break;
}
} else if (v1 == 128) {
switch (seen2) {
case IF_ICMPGT: // 128 > x; always true
priority = NORMAL_PRIORITY;
break;
case IF_ICMPGE: // 128 >= x
priority = HIGH_PRIORITY;
break;
case IF_ICMPLT: // 128 < x
priority = HIGH_PRIORITY;
break;
case IF_ICMPLE: // 128 <= x; never true
priority = NORMAL_PRIORITY;
break;
}
} else if (v1 <= -129) {
priority = NORMAL_PRIORITY;
}
if (getPC() - sawCheckForNonNegativeSignedByte < 10) {
priority++;
}
accumulator.accumulateBug(new BugInstance(this, "INT_BAD_COMPARISON_WITH_SIGNED_BYTE", priority)
.addClassAndMethod(this).addInt(v1).describe(IntAnnotation.INT_VALUE).addValueSource(item0, this), this);
}
} else if (item0.getSpecialKind() == OpcodeStack.Item.NON_NEGATIVE && constant1 instanceof Number) {
int v1 = ((Number) constant1).intValue();
if (v1 < 0) {
accumulator.accumulateBug(new BugInstance(this, "INT_BAD_COMPARISON_WITH_NONNEGATIVE_VALUE",
HIGH_PRIORITY).addClassAndMethod(this).addInt(v1).describe(IntAnnotation.INT_VALUE).addValueSource(item0, this), this);
}
}
}
}
switch (seen) {
case IFGE:
case IFLT:
if(stack.getStackDepth() > 0 && stack.getStackItem(0).getSpecialKind() == OpcodeStack.Item.NON_NEGATIVE) {
OpcodeStack.Item top = stack.getStackItem(0);
if (top.getRegisterNumber() != -1 && getMaxPC() > getNextPC() + 6) {
if (false) {
for(int i = -2; i <= 0; i++) {
int o = getPrevOpcode(-i);
System.out.printf("%2d %3d %2x %s%n", i, o, o, OPCODE_NAMES[o]);
}
for(int i = 0; i < 7; i++) {
int o = getNextCodeByte(i);
System.out.printf("%2d %3d %2x %s%n", i, o, o, OPCODE_NAMES[o]);
}
}
int jump1, jump2;
if (seen == IFGE) {
jump1 = IF_ICMPLT;
jump2 = IF_ICMPLE;
} else {
jump1 = IF_ICMPGE;
jump2 = IF_ICMPGT;
}
int nextCodeByte0 = getNextCodeByte(0);
int loadConstant = 1;
if (nextCodeByte0 == ILOAD) {
loadConstant = 2;
}
int nextCodeByte1 = getNextCodeByte(loadConstant);
int nextCodeByte2 = getNextCodeByte(loadConstant+1);
int nextJumpOffset = loadConstant+2;
if (nextCodeByte1 == SIPUSH) {
nextJumpOffset++;
}
int nextCodeByteJump = getNextCodeByte(nextJumpOffset);
if (nextCodeByte0 == getPrevOpcode(1)
&& (nextCodeByte1 == BIPUSH || nextCodeByte1 == SIPUSH)
&& (IF_ICMPLT <= nextCodeByteJump && nextCodeByteJump <= IF_ICMPLE))
{
break;
}
}
accumulator.accumulateBug(new BugInstance(this, "INT_BAD_COMPARISON_WITH_NONNEGATIVE_VALUE",
NORMAL_PRIORITY).addClassAndMethod(this).addInt(0).describe(IntAnnotation.INT_VALUE).addValueSource(top, this), this);
}
break;
case IAND:
case LAND:
case IOR:
case LOR:
case IXOR:
case LXOR:
long badValue = (seen == IAND || seen == LAND) ? -1 : 0;
OpcodeStack.Item rhs = stack.getStackItem(0);
OpcodeStack.Item lhs = stack.getStackItem(1);
int prevOpcode = getPrevOpcode(1);
int prevPrevOpcode = getPrevOpcode(2);
if (rhs.hasConstantValue(badValue)
&& (prevOpcode == LDC || prevOpcode == ICONST_0 || prevOpcode == ICONST_M1 || prevOpcode == LCONST_0)
&& prevPrevOpcode != GOTO) {
reportVacuousBitOperation(seen, lhs);
}
}
if (checkForBitIorofSignedByte && seen != I2B) {
String pattern = (prevOpcode == LOR || prevOpcode == IOR) ? "BIT_IOR_OF_SIGNED_BYTE" : "BIT_ADD_OF_SIGNED_BYTE";
int priority = (prevOpcode == LOR || prevOpcode == LADD) ? HIGH_PRIORITY : NORMAL_PRIORITY;
accumulator.accumulateBug(new BugInstance(this, pattern, priority).addClassAndMethod(this), this);
checkForBitIorofSignedByte = false;
} else if ((seen == IOR || seen == LOR || seen == IADD || seen == LADD) && stack.getStackDepth() >= 2) {
OpcodeStack.Item item0 = stack.getStackItem(0);
OpcodeStack.Item item1 = stack.getStackItem(1);
int special0 = item0.getSpecialKind();
int special1 = item1.getSpecialKind();
if (special0 == OpcodeStack.Item.SIGNED_BYTE && special1 == OpcodeStack.Item.LOW_8_BITS_CLEAR
&& !item1.hasConstantValue(256) || special0 == OpcodeStack.Item.LOW_8_BITS_CLEAR
&& !item0.hasConstantValue(256) && special1 == OpcodeStack.Item.SIGNED_BYTE) {
checkForBitIorofSignedByte = true;
} else {
checkForBitIorofSignedByte = false;
}
} else {
checkForBitIorofSignedByte = false;
}
if (prevOpcodeWasReadLine && sinceBufferedInputStreamReady >= 100 && seen == INVOKEVIRTUAL
&& "java/lang/String".equals(getClassConstantOperand()) && getSigConstantOperand().startsWith("()")) {
accumulator.accumulateBug(
new BugInstance(this, "NP_IMMEDIATE_DEREFERENCE_OF_READLINE", NORMAL_PRIORITY).addClassAndMethod(this),
this);
}
if (seen == INVOKEVIRTUAL && "java/io/BufferedReader".equals(getClassConstantOperand())
&& "ready".equals(getNameConstantOperand()) && "()Z".equals(getSigConstantOperand())) {
sinceBufferedInputStreamReady = 0;
} else {
sinceBufferedInputStreamReady++;
}
prevOpcodeWasReadLine = (seen == INVOKEVIRTUAL || seen == INVOKEINTERFACE)
&& "readLine".equals(getNameConstantOperand()) && "()Ljava/lang/String;".equals(getSigConstantOperand());
// System.out.println(randomNextIntState + " " + OPCODE_NAMES[seen]
// + " " + getMethodName());
switch (randomNextIntState) {
case 0:
if (seen == INVOKEVIRTUAL && "java/util/Random".equals(getClassConstantOperand())
&& "nextDouble".equals(getNameConstantOperand()) || seen == INVOKESTATIC
&& ClassName.isMathClass(getClassConstantOperand()) && "random".equals(getNameConstantOperand())) {
randomNextIntState = 1;
}
break;
case 1:
if (seen == D2I) {
accumulator.accumulateBug(new BugInstance(this, "RV_01_TO_INT", HIGH_PRIORITY).addClassAndMethod(this), this);
randomNextIntState = 0;
} else if (seen == DMUL) {
randomNextIntState = 4;
} else if (seen == LDC2_W && getConstantRefOperand() instanceof ConstantDouble
&& ((ConstantDouble) getConstantRefOperand()).getBytes() == Integer.MIN_VALUE) {
randomNextIntState = 0;
} else {
randomNextIntState = 2;
}
break;
case 2:
if (seen == I2D) {
randomNextIntState = 3;
} else if (seen == DMUL) {
randomNextIntState = 4;
} else {
randomNextIntState = 0;
}
break;
case 3:
if (seen == DMUL) {
randomNextIntState = 4;
} else {
randomNextIntState = 0;
}
break;
case 4:
if (seen == D2I) {
accumulator.accumulateBug(
new BugInstance(this, "DM_NEXTINT_VIA_NEXTDOUBLE", NORMAL_PRIORITY).addClassAndMethod(this), this);
}
randomNextIntState = 0;
break;
default:
throw new IllegalStateException();
}
if (isPublicStaticVoidMain
&& seen == INVOKEVIRTUAL
&& getClassConstantOperand().startsWith("javax/swing/")
&& ("show".equals(getNameConstantOperand()) && "()V".equals(getSigConstantOperand())
|| "pack".equals(getNameConstantOperand()) && "()V".equals(getSigConstantOperand()) || "setVisible".equals(getNameConstantOperand()) && "(Z)V".equals(getSigConstantOperand()))) {
accumulator.accumulateBug(
new BugInstance(this, "SW_SWING_METHODS_INVOKED_IN_SWING_THREAD", LOW_PRIORITY).addClassAndMethod(this),
this);
}
// if ((seen == INVOKEVIRTUAL)
// && getClassConstantOperand().equals("java/lang/String")
// && getNameConstantOperand().equals("substring")
// && getSigConstantOperand().equals("(I)Ljava/lang/String;")
// && stack.getStackDepth() > 1) {
// OpcodeStack.Item item = stack.getStackItem(0);
// Object o = item.getConstant();
// if (o != null && o instanceof Integer) {
// int v = ((Integer) o).intValue();
// if (v == 0)
// accumulator.accumulateBug(new BugInstance(this,
// "DMI_USELESS_SUBSTRING", NORMAL_PRIORITY)
// .addClassAndMethod(this)
// .addSourceLine(this));
if ((seen == INVOKEVIRTUAL) && "isAnnotationPresent".equals(getNameConstantOperand())
&& "(Ljava/lang/Class;)Z".equals(getSigConstantOperand()) && stack.getStackDepth() > 0) {
OpcodeStack.Item item = stack.getStackItem(0);
Object value = item.getConstant();
if (value instanceof String) {
String annotationClassName = (String) value;
boolean lacksClassfileRetention = AnalysisContext.currentAnalysisContext().getAnnotationRetentionDatabase()
.lacksRuntimeRetention(annotationClassName.replace('/', '.'));
if (lacksClassfileRetention) {
ClassDescriptor annotationClass = DescriptorFactory.createClassDescriptor(annotationClassName);
accumulator.accumulateBug(
new BugInstance(this, "DMI_ANNOTATION_IS_NOT_VISIBLE_TO_REFLECTION", HIGH_PRIORITY)
.addClassAndMethod(this).addCalledMethod(this).addClass(annotationClass)
.describe(ClassAnnotation.ANNOTATION_ROLE), this);
}
}
}
if ((seen == INVOKEVIRTUAL) && "next".equals(getNameConstantOperand())
&& "()Ljava/lang/Object;".equals(getSigConstantOperand()) && "hasNext".equals(getMethodName())
&& "()Z".equals(getMethodSig()) && stack.getStackDepth() > 0) {
OpcodeStack.Item item = stack.getStackItem(0);
accumulator.accumulateBug(new BugInstance(this, "DMI_CALLING_NEXT_FROM_HASNEXT", item.isInitialParameter()
&& item.getRegisterNumber() == 0 ? NORMAL_PRIORITY : LOW_PRIORITY).addClassAndMethod(this)
.addCalledMethod(this), this);
}
if ((seen == INVOKESPECIAL) && "java/lang/String".equals(getClassConstantOperand())
&& "<init>".equals(getNameConstantOperand()) && "(Ljava/lang/String;)V".equals(getSigConstantOperand())
&& !Subtypes2.isJSP(getThisClass())) {
accumulator.accumulateBug(new BugInstance(this, "DM_STRING_CTOR", NORMAL_PRIORITY).addClassAndMethod(this), this);
}
if (seen == INVOKESTATIC && "java/lang/System".equals(getClassConstantOperand())
&& "runFinalizersOnExit".equals(getNameConstantOperand()) || seen == INVOKEVIRTUAL
&& "java/lang/Runtime".equals(getClassConstantOperand())
&& "runFinalizersOnExit".equals(getNameConstantOperand())) {
accumulator.accumulateBug(
new BugInstance(this, "DM_RUN_FINALIZERS_ON_EXIT", HIGH_PRIORITY).addClassAndMethod(this), this);
}
if ((seen == INVOKESPECIAL) && "java/lang/String".equals(getClassConstantOperand())
&& "<init>".equals(getNameConstantOperand()) && "()V".equals(getSigConstantOperand())) {
accumulator.accumulateBug(new BugInstance(this, "DM_STRING_VOID_CTOR", NORMAL_PRIORITY).addClassAndMethod(this),
this);
}
if (!isPublicStaticVoidMain && seen == INVOKESTATIC && "java/lang/System".equals(getClassConstantOperand())
&& "exit".equals(getNameConstantOperand()) && !"processWindowEvent".equals(getMethodName())
&& !getMethodName().startsWith("windowClos") && getMethodName().indexOf("exit") == -1
&& getMethodName().indexOf("Exit") == -1 && getMethodName().indexOf("crash") == -1
&& getMethodName().indexOf("Crash") == -1 && getMethodName().indexOf("die") == -1
&& getMethodName().indexOf("Die") == -1 && getMethodName().indexOf("main") == -1) {
accumulator.accumulateBug(new BugInstance(this, "DM_EXIT", getMethod().isStatic() ? LOW_PRIORITY
: NORMAL_PRIORITY).addClassAndMethod(this), SourceLineAnnotation.fromVisitedInstruction(this));
}
if (((seen == INVOKESTATIC && "java/lang/System".equals(getClassConstantOperand())) || (seen == INVOKEVIRTUAL && "java/lang/Runtime".equals(getClassConstantOperand())))
&& "gc".equals(getNameConstantOperand())
&& "()V".equals(getSigConstantOperand())
&& !getDottedClassName().startsWith("java.lang")
&& !getMethodName().startsWith("gc") && !getMethodName().endsWith("gc")) {
if (gcInvocationBugReport == null) {
// System.out.println("Saw call to GC");
if (isPublicStaticVoidMain) {
// System.out.println("Skipping GC complaint in main method");
return;
}
if (isTestMethod(getMethod())) {
return;
}
// Just save this report in a field; it will be flushed
// IFF there were no calls to System.currentTimeMillis();
// in the method.
gcInvocationBugReport = new BugInstance(this, "DM_GC", HIGH_PRIORITY).addClassAndMethod(this).addSourceLine(
this);
gcInvocationPC = getPC();
// System.out.println("GC invocation at pc " + PC);
}
}
if (!isSynthetic && (seen == INVOKESPECIAL) && "java/lang/Boolean".equals(getClassConstantOperand())
&& "<init>".equals(getNameConstantOperand()) && !"java/lang/Boolean".equals(getClassName())) {
int majorVersion = getThisClass().getMajor();
if (majorVersion >= MAJOR_1_4) {
accumulator.accumulateBug(new BugInstance(this, "DM_BOOLEAN_CTOR", NORMAL_PRIORITY).addClassAndMethod(this),
this);
}
}
if ((seen == INVOKESTATIC) && "java/lang/System".equals(getClassConstantOperand())
&& ("currentTimeMillis".equals(getNameConstantOperand()) || "nanoTime".equals(getNameConstantOperand()))) {
sawCurrentTimeMillis = true;
}
if ((seen == INVOKEVIRTUAL) && "java/lang/String".equals(getClassConstantOperand())
&& "toString".equals(getNameConstantOperand()) && "()Ljava/lang/String;".equals(getSigConstantOperand())) {
accumulator
.accumulateBug(new BugInstance(this, "DM_STRING_TOSTRING", LOW_PRIORITY).addClassAndMethod(this), this);
}
if ((seen == INVOKEVIRTUAL) && "java/lang/String".equals(getClassConstantOperand())
&& ("toUpperCase".equals(getNameConstantOperand()) || "toLowerCase".equals(getNameConstantOperand()))
&& "()Ljava/lang/String;".equals(getSigConstantOperand())) {
accumulator.accumulateBug(new BugInstance(this, "DM_CONVERT_CASE", LOW_PRIORITY).addClassAndMethod(this), this);
}
if ((seen == INVOKESPECIAL) && "<init>".equals(getNameConstantOperand())) {
String cls = getClassConstantOperand();
String sig = getSigConstantOperand();
String primitiveType = ClassName.getPrimitiveType(cls);
if (primitiveType != null && sig.charAt(1) == primitiveType.charAt(0)) {
primitiveObjCtorSeen = cls;
} else {
primitiveObjCtorSeen = null;
}
} else if ((primitiveObjCtorSeen != null) && (seen == INVOKEVIRTUAL) && "toString".equals(getNameConstantOperand())
&& getClassConstantOperand().equals(primitiveObjCtorSeen)
&& "()Ljava/lang/String;".equals(getSigConstantOperand())) {
BugInstance bug = new BugInstance(this, "DM_BOXED_PRIMITIVE_TOSTRING", NORMAL_PRIORITY).addClassAndMethod(this).addCalledMethod(this);
MethodAnnotation preferred = new MethodAnnotation(ClassName.toDottedClassName(primitiveObjCtorSeen),
"toString", "("+ClassName.getPrimitiveType(primitiveObjCtorSeen)+")Ljava/lang/String;", true);
bug.addMethod(preferred).describe(MethodAnnotation.SHOULD_CALL);
accumulator.accumulateBug(
bug, this);
primitiveObjCtorSeen = null;
} else {
primitiveObjCtorSeen = null;
}
if ((seen == INVOKESPECIAL) && "<init>".equals(getNameConstantOperand())) {
ctorSeen = true;
} else if (ctorSeen && (seen == INVOKEVIRTUAL) && "java/lang/Object".equals(getClassConstantOperand())
&& "getClass".equals(getNameConstantOperand()) && "()Ljava/lang/Class;".equals(getSigConstantOperand())) {
accumulator.accumulateBug(new BugInstance(this, "DM_NEW_FOR_GETCLASS", NORMAL_PRIORITY).addClassAndMethod(this),
this);
ctorSeen = false;
} else {
ctorSeen = false;
}
if ((seen == INVOKEVIRTUAL) && isMonitorWait(getNameConstantOperand(), getSigConstantOperand())) {
checkMonitorWait();
}
if ((seen == INVOKESPECIAL) && "<init>".equals(getNameConstantOperand())
&& "java/lang/Thread".equals(getClassConstantOperand())) {
String sig = getSigConstantOperand();
if ("()V".equals(sig) || "(Ljava/lang/String;)V".equals(sig)
|| "(Ljava/lang/ThreadGroup;Ljava/lang/String;)V".equals(sig)) {
OpcodeStack.Item invokedOn = stack.getItemMethodInvokedOn(this);
if (!"<init>".equals(getMethodName()) || invokedOn.getRegisterNumber() != 0) {
accumulator.accumulateBug(
new BugInstance(this, "DM_USELESS_THREAD", LOW_PRIORITY).addClassAndMethod(this), this);
}
}
}
if (seen == INVOKESPECIAL && "java/math/BigDecimal".equals(getClassConstantOperand())
&& "<init>".equals(getNameConstantOperand()) && "(D)V".equals(getSigConstantOperand())) {
OpcodeStack.Item top = stack.getStackItem(0);
Object value = top.getConstant();
if (value instanceof Double && !((Double)value).isInfinite() && !((Double)value).isNaN()) {
double arg = ((Double) value).doubleValue();
String dblString = Double.toString(arg);
String bigDecimalString = new BigDecimal(arg).toString();
boolean ok = dblString.equals(bigDecimalString) || dblString.equals(bigDecimalString + ".0");
if (!ok) {
boolean scary = dblString.length() <= 8 && bigDecimalString.length() > 12
&& dblString.toUpperCase().indexOf('E') == -1;
bugReporter.reportBug(new BugInstance(this, "DMI_BIGDECIMAL_CONSTRUCTED_FROM_DOUBLE",
scary ? NORMAL_PRIORITY : LOW_PRIORITY).addClassAndMethod(this).addCalledMethod(this)
.addMethod("java.math.BigDecimal", "valueOf", "(D)Ljava/math/BigDecimal;", true)
.describe(MethodAnnotation.METHOD_ALTERNATIVE_TARGET).addString(dblString)
.addString(bigDecimalString).addSourceLine(this));
}
}
}
} finally {
prevOpcode = seen;
}
}
private void checkForCompatibleLongComparison(OpcodeStack.Item left, OpcodeStack.Item right) {
if (left.getSpecialKind() == Item.RESULT_OF_I2L && right.getConstant() != null) {
long value = ((Number) right.getConstant()).longValue();
if ( (value > Integer.MAX_VALUE || value < Integer.MIN_VALUE)) {
int priority = Priorities.HIGH_PRIORITY;
if (value == Integer.MAX_VALUE+1L || value == Integer.MIN_VALUE-1L) {
priority = Priorities.NORMAL_PRIORITY;
}
String stringValue = IntAnnotation.getShortInteger(value)+"L";
if (value == 0xffffffffL) {
stringValue = "0xffffffffL";
} else if (value == 0x80000000L) {
stringValue = "0x80000000L";
}
accumulator.accumulateBug(new BugInstance(this, "INT_BAD_COMPARISON_WITH_INT_VALUE", priority ).addClassAndMethod(this)
.addString(stringValue).describe(StringAnnotation.STRING_NONSTRING_CONSTANT_ROLE)
.addValueSource(left, this) , this);
}
}
}
/**
* @param seen
* @param item
*/
private void reportVacuousBitOperation(int seen, OpcodeStack.Item item) {
if (item.getConstant() == null) {
accumulator
.accumulateBug(
new BugInstance(this, "INT_VACUOUS_BIT_OPERATION", NORMAL_PRIORITY)
.addClassAndMethod(this)
.addString(OPCODE_NAMES[seen])
.addOptionalAnnotation(
LocalVariableAnnotation.getLocalVariableAnnotation(getMethod(), item, getPC())), this);
}
}
/**
* Return index of stack entry that must be nonnegative.
*
* Return -1 if no stack entry is required to be nonnegative.
*/
private int stackEntryThatMustBeNonnegative(int seen) {
switch (seen) {
case INVOKEINTERFACE:
if ("java/util/List".equals(getClassConstantOperand())) {
return getStackEntryOfListCallThatMustBeNonnegative();
}
break;
case INVOKEVIRTUAL:
if ("java/util/LinkedList".equals(getClassConstantOperand())
|| "java/util/ArrayList".equals(getClassConstantOperand())) {
return getStackEntryOfListCallThatMustBeNonnegative();
}
break;
case IALOAD:
case AALOAD:
case SALOAD:
case CALOAD:
case BALOAD:
case LALOAD:
case DALOAD:
case FALOAD:
return 0;
case IASTORE:
case AASTORE:
case SASTORE:
case CASTORE:
case BASTORE:
case LASTORE:
case DASTORE:
case FASTORE:
return 1;
}
return -1;
}
private int getStackEntryOfListCallThatMustBeNonnegative() {
String name = getNameConstantOperand();
if (("add".equals(name) || "set".equals(name)) && getSigConstantOperand().startsWith("(I")) {
return 1;
}
if (("get".equals(name) || "remove".equals(name)) && getSigConstantOperand().startsWith("(I)")) {
return 0;
}
return -1;
}
private void checkMonitorWait() {
try {
TypeDataflow typeDataflow = getClassContext().getTypeDataflow(getMethod());
TypeDataflow.LocationAndFactPair pair = typeDataflow.getLocationAndFactForInstruction(getPC());
if (pair == null) {
return;
}
Type receiver = pair.frame.getInstance(pair.location.getHandle().getInstruction(), getClassContext()
.getConstantPoolGen());
if (!(receiver instanceof ReferenceType)) {
return;
}
if (Hierarchy.isSubtype((ReferenceType) receiver, CONDITION_TYPE)) {
accumulator.accumulateBug(
new BugInstance(this, "DM_MONITOR_WAIT_ON_CONDITION", HIGH_PRIORITY).addClassAndMethod(this), this);
}
} catch (ClassNotFoundException e) {
bugReporter.reportMissingClass(e);
} catch (DataflowAnalysisException e) {
bugReporter.logError("Exception caught by DumbMethods", e);
} catch (CFGBuilderException e) {
bugReporter.logError("Exception caught by DumbMethods", e);
}
}
private boolean isMonitorWait(String name, String sig) {
// System.out.println("Check call " + name + "," + sig);
return "wait".equals(name) && ("()V".equals(sig) || "(J)V".equals(sig) || "(JI)V".equals(sig));
}
@Override
public void visit(Code obj) {
super.visit(obj);
flush();
}
/**
* Flush out cached state at the end of a method.
*/
private void flush() {
if (pendingAbsoluteValueBug != null) {
absoluteValueAccumulator.accumulateBug(pendingAbsoluteValueBug, pendingAbsoluteValueBugSourceLine);
pendingAbsoluteValueBug = null;
pendingAbsoluteValueBugSourceLine = null;
}
accumulator.reportAccumulatedBugs();
if (sawLoadOfMinValue) {
absoluteValueAccumulator.clearBugs();
} else {
absoluteValueAccumulator.reportAccumulatedBugs();
}
if (gcInvocationBugReport != null && !sawCurrentTimeMillis) {
// Make sure the GC invocation is not in an exception handler
// for OutOfMemoryError.
boolean outOfMemoryHandler = false;
for (CodeException handler : exceptionTable) {
if (gcInvocationPC < handler.getHandlerPC() || gcInvocationPC > handler.getHandlerPC() + OOM_CATCH_LEN) {
continue;
}
int catchTypeIndex = handler.getCatchType();
if (catchTypeIndex > 0) {
ConstantPool cp = getThisClass().getConstantPool();
Constant constant = cp.getConstant(catchTypeIndex);
if (constant instanceof ConstantClass) {
String exClassName = (String) ((ConstantClass) constant).getConstantValue(cp);
if ("java/lang/OutOfMemoryError".equals(exClassName)) {
outOfMemoryHandler = true;
break;
}
}
}
}
if (!outOfMemoryHandler) {
bugReporter.reportBug(gcInvocationBugReport);
}
}
sawCurrentTimeMillis = false;
gcInvocationBugReport = null;
exceptionTable = null;
}
}
|
package com.metacodestudio.hotsuploader.files;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.metacodestudio.hotsuploader.models.ReplayFile;
import com.metacodestudio.hotsuploader.models.Status;
import com.metacodestudio.hotsuploader.models.UploadStatus;
import com.metacodestudio.hotsuploader.providers.Provider;
import com.metacodestudio.hotsuploader.utils.FileUtils;
import com.metacodestudio.hotsuploader.utils.OSUtils;
import javafx.application.Platform;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.concurrent.ScheduledService;
import javafx.concurrent.Task;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
public class FileHandler extends ScheduledService<ReplayFile> {
private final List<File> watchDirectories;
private final ObjectMapper mapper;
private Map<Status, ObservableList<ReplayFile>> fileMap;
private List<Provider> providers = Provider.getAll();
private BlockingQueue<ReplayFile> uploadQueue;
public FileHandler(final File root) throws IOException {
mapper = new ObjectMapper();
uploadQueue = new ArrayBlockingQueue<>(2500);
fileMap = new HashMap<>();
watchDirectories = OSUtils.getAccountDirectories(root);
cleanup();
registerInitial();
watchDirectories.stream().map(file -> Paths.get(file.toString())).forEach(path -> {
try {
WatchHandler watchHandler = new WatchHandler(path, fileMap, uploadQueue);
new Thread(watchHandler).start();
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
private void cleanup() {
List<File> accounts = OSUtils.getAccountDirectories(new File(OSUtils.getApplicationHome(), "Accounts"));
accounts.stream()
.flatMap(folder -> {
File[] children = folder.listFiles();
return Arrays.asList(children != null ? children : new File[0]).stream();
}).map(OSUtils::getReplayFile)
.filter(file -> !file.exists())
.map(OSUtils::getPropertiesFile).forEach(File::delete);
}
private void registerInitial() {
List<ReplayFile> fileList = watchDirectories.stream()
.map(ReplayFile::fromDirectory)
.flatMap(List::stream)
.collect(Collectors.toList());
fileMap = fileList.stream()
.map(replay -> {
File propertiesFile = OSUtils.getPropertiesFile(replay.getFile());
try {
if (propertiesFile.exists()) {
String properties = FileUtils.readFileToString(propertiesFile);
replay.addStatuses(Arrays.asList(mapper.readValue(properties, UploadStatus[].class)));
} else {
replay.addStatuses(providers.stream()
.map(UploadStatus::new)
.collect(Collectors.toList()));
FileUtils.writeStringToFile(propertiesFile, mapper.writeValueAsString(replay.getUploadStatuses()));
}
} catch (IOException e) {
throw new RuntimeException(e);
}
if (replay.getStatus() == Status.NEW) {
uploadQueue.add(replay);
}
return replay;
}).collect(Collectors.groupingBy(ReplayFile::getStatus, ConcurrentHashMap::new,
Collectors.toCollection(FXCollections::observableArrayList)));
verifyMap();
}
private void verifyMap() {
Status[] keys = Status.values();
for (final Status key : keys) {
if (!fileMap.containsKey(key)) {
fileMap.put(key, FXCollections.observableArrayList());
}
}
}
public void updateFile(ReplayFile file) throws IOException {
File propertiesFile = OSUtils.getPropertiesFile(file.getFile());
String data = mapper.writeValueAsString(file.getUploadStatuses());
FileUtils.writeStringToFile(propertiesFile, data);
}
@Override
protected Task<ReplayFile> createTask() {
if (isIdle()) {
return new Task<ReplayFile>() {
@Override
protected ReplayFile call() throws Exception {
Thread.sleep(20000);
return null;
}
};
}
try {
ReplayFile take = uploadQueue.take();
UploadTask uploadTask = new UploadTask(providers, take);
final Status oldStatus = take.getStatus();
uploadTask.setOnSucceeded(event -> {
try {
ReplayFile replayFile = uploadTask.get();
Status status = replayFile.getStatus();
if (status == oldStatus) {
return;
}
fileMap.get(oldStatus).remove(replayFile);
fileMap.get(status).add(replayFile);
updateFile(replayFile);
} catch (InterruptedException | ExecutionException | IOException e) {
e.printStackTrace();
}
});
uploadTask.setOnFailed(event -> uploadQueue.add(take));
return uploadTask;
} catch (InterruptedException e) {
return null;
}
}
public Map<Status, ObservableList<ReplayFile>> getFileMap() {
return fileMap;
}
public boolean isIdle() {
return uploadQueue.isEmpty();
}
public void invalidateByStatus(final Status status) {
ObservableList<ReplayFile> replayFiles = fileMap.get(status);
replayFiles.stream()
.flatMap(replayFile -> replayFile.getUploadStatuses()
.stream())
.forEach(uploadStatus -> uploadStatus.setStatus(Status.NEW));
uploadQueue.addAll(replayFiles);
Platform.runLater(() -> {
fileMap.get(Status.NEW).addAll(replayFiles);
replayFiles.clear();
});
restart();
}
}
|
package org.bimserver.plugins;
import java.io.Closeable;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarInputStream;
import java.util.zip.ZipEntry;
import javax.xml.bind.JAXBException;
import org.apache.maven.artifact.versioning.ArtifactVersion;
import org.apache.maven.artifact.versioning.DefaultArtifactVersion;
import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
import org.apache.maven.artifact.versioning.VersionRange;
import org.apache.maven.model.Model;
import org.apache.maven.model.Repository;
import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
import org.bimserver.interfaces.objects.SPluginBundle;
import org.bimserver.interfaces.objects.SPluginBundleType;
import org.bimserver.interfaces.objects.SPluginBundleVersion;
import org.bimserver.interfaces.objects.SPluginInformation;
import org.bimserver.plugins.classloaders.DelegatingClassLoader;
import org.bimserver.plugins.classloaders.EclipsePluginClassloader;
import org.bimserver.plugins.classloaders.FileJarClassLoader;
import org.bimserver.plugins.classloaders.JarClassLoader;
import org.bimserver.plugins.classloaders.PublicFindClassClassLoader;
import org.bimserver.plugins.web.WebModulePlugin;
import org.bimserver.shared.exceptions.PluginException;
import org.bimserver.shared.exceptions.UserException;
import org.bimserver.utils.FakeClosingInputStream;
import org.bimserver.utils.PathUtils;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.collection.CollectRequest;
import org.eclipse.aether.collection.CollectResult;
import org.eclipse.aether.collection.DependencyCollectionException;
import org.eclipse.aether.graph.Dependency;
import org.eclipse.aether.graph.DependencyNode;
import org.eclipse.aether.graph.Exclusion;
import org.eclipse.aether.resolution.ArtifactDescriptorException;
import org.eclipse.aether.resolution.ArtifactDescriptorRequest;
import org.eclipse.aether.resolution.ArtifactDescriptorResult;
import org.eclipse.aether.resolution.ArtifactRequest;
import org.eclipse.aether.resolution.ArtifactResolutionException;
import org.eclipse.aether.resolution.ArtifactResult;
import org.eclipse.aether.resolution.DependencyRequest;
import org.eclipse.aether.resolution.DependencyResolutionException;
import org.eclipse.aether.util.graph.visitor.PreorderNodeListGenerator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PluginBundleManager implements AutoCloseable {
private static final Logger LOGGER = LoggerFactory.getLogger(PluginBundleManager.class);
private final Map<PluginBundleIdentifier, PluginBundle> pluginBundleIdentifierToPluginBundle = new HashMap<>();
private final Map<PluginBundleVersionIdentifier, PluginBundle> pluginBundleVersionIdentifierToPluginBundle = new HashMap<>();
private final Map<PluginBundleIdentifier, PluginBundleVersionIdentifier> pluginBundleIdentifierToCurrentPluginBundleVersionIdentifier = new HashMap<>();
private PluginManager pluginManager;
private final Path pluginsDir;
private MavenPluginRepository mavenPluginRepository;
private final List<FileJarClassLoader> jarClassLoaders = new ArrayList<>();
public PluginBundleManager(PluginManager pluginManager, MavenPluginRepository mavenPluginRepository, Path pluginsDir) {
this.pluginManager = pluginManager;
this.mavenPluginRepository = mavenPluginRepository;
this.pluginsDir = pluginsDir;
if (pluginsDir != null) {
if (!Files.isDirectory(pluginsDir)) {
try {
Files.createDirectories(pluginsDir);
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
public PluginBundle install(MavenPluginBundle mavenPluginBundle, boolean strictDependencyChecking) throws Exception {
return install(mavenPluginBundle, null, strictDependencyChecking);
}
public PluginBundle install(MavenPluginBundle mavenPluginBundle, List<SPluginInformation> plugins, boolean strictDependencyChecking) throws Exception {
PluginBundleVersionIdentifier pluginBundleVersionIdentifier = mavenPluginBundle.getPluginVersionIdentifier();
MavenXpp3Reader mavenreader = new MavenXpp3Reader();
Model model = null;
try (InputStream pomInputStream = mavenPluginBundle.getPomInputStream()) {
model = mavenreader.read(pomInputStream);
}
if (plugins == null) {
try (InputStream inputStream = mavenPluginBundle.getJarInputStream()) {
try (JarInputStream jarInputStream = new JarInputStream(inputStream)) {
JarEntry nextJarEntry = jarInputStream.getNextJarEntry();
while (nextJarEntry != null) {
if (nextJarEntry.getName().equals("plugin/plugin.xml")) {
// Install all plugins
PluginDescriptor pluginDescriptor = pluginManager.getPluginDescriptor(new FakeClosingInputStream(jarInputStream));
plugins = new ArrayList<>();
pluginManager.processPluginDescriptor(pluginDescriptor, plugins);
for (SPluginInformation info : plugins) {
info.setInstallForAllUsers(true);
info.setInstallForNewUsers(true);
}
break;
}
nextJarEntry = jarInputStream.getNextJarEntry();
}
}
}
}
DelegatingClassLoader delegatingClassLoader = new DelegatingClassLoader(getClass().getClassLoader());
loadDependencies(mavenPluginBundle.getVersion(), strictDependencyChecking, model, delegatingClassLoader);
Path target = pluginsDir.resolve(pluginBundleVersionIdentifier.getFileName());
if (Files.exists(target)) {
throw new PluginException("This plugin has already been installed " + target.getFileName().toString());
}
InputStream jarInputStream = mavenPluginBundle.getJarInputStream();
try {
Files.copy(jarInputStream, target);
} finally {
jarInputStream.close();
}
return loadPlugin(pluginBundleVersionIdentifier, target, mavenPluginBundle.getPluginBundle(), mavenPluginBundle.getPluginBundleVersion(), plugins, delegatingClassLoader);
}
private void loadDependencies(String pluginBundleVersion, boolean strictDependencyChecking, Model model,
DelegatingClassLoader delegatingClassLoader)
throws DependencyCollectionException, InvalidVersionSpecificationException, Exception {
if (model.getRepositories() != null) {
for (Repository repository : model.getRepositories()) {
mavenPluginRepository.addRepository(repository.getId(), "default", repository.getUrl());
}
}
List<Dependency> dependenciesToResolve = new ArrayList<>();
for (org.apache.maven.model.Dependency dependency2 : model.getDependencies()) {
String scope = dependency2.getScope();
if (scope != null && (scope.contentEquals("test"))) {
// Skip
continue;
}
Dependency d = new Dependency(new DefaultArtifact(dependency2.getGroupId(), dependency2.getArtifactId(), dependency2.getType(), dependency2.getVersion()), dependency2.getScope());
Set<Exclusion> exclusions = new HashSet<>();
d.setExclusions(exclusions);
exclusions.add(new Exclusion("org.opensourcebim", "pluginbase", null, "jar"));
exclusions.add(new Exclusion("org.opensourcebim", "shared", null, "jar"));
exclusions.add(new Exclusion("org.opensourcebim", "ifcplugins", null, "jar"));
dependenciesToResolve.add(d);
}
CollectRequest collectRequest = new CollectRequest(dependenciesToResolve, null, null);
collectRequest.setRepositories(mavenPluginRepository.getRepositoriesAsList());
CollectResult collectDependencies = mavenPluginRepository.getSystem().collectDependencies(mavenPluginRepository.getSession(), collectRequest);
PreorderNodeListGenerator nlg = new PreorderNodeListGenerator();
DependencyNode rootDep = collectDependencies.getRoot();
rootDep.accept(nlg);
for (Dependency dependency : nlg.getDependencies(true)) {
if (dependency.getScope().contentEquals("test")) {
continue;
}
// LOGGER.info(dependency.getArtifact().getGroupId() + "." + dependency.getArtifact().getArtifactId());
Artifact dependencyArtifact = dependency.getArtifact();
PluginBundleIdentifier pluginBundleIdentifier = new PluginBundleIdentifier(dependencyArtifact.getGroupId(), dependencyArtifact.getArtifactId());
if (pluginBundleIdentifierToPluginBundle.containsKey(pluginBundleIdentifier)) {
if (strictDependencyChecking) {
String version = dependencyArtifact.getVersion();
if (!version.contains("[") && !version.contains("(")) {
version = "[" + version + "]";
}
VersionRange versionRange = VersionRange.createFromVersionSpec(version);
// String version =
// pluginBundleIdentifierToPluginBundle.get(pluginBundleIdentifier).getPluginBundleVersion().getVersion();
ArtifactVersion artifactVersion = new DefaultArtifactVersion(pluginBundleVersion);
if (versionRange.containsVersion(artifactVersion)) {
} else {
throw new Exception(
"Required dependency " + pluginBundleIdentifier + " is installed, but it's version (" + pluginBundleVersion + ") does not comply to the required version (" + dependencyArtifact.getVersion() + ")");
}
} else {
LOGGER.info("Skipping strict dependency checking for dependency " + dependencyArtifact.getArtifactId());
}
} else {
try {
if (dependencyArtifact.getGroupId().contentEquals("com.sun.xml.ws")) {
continue;
}
MavenPluginLocation mavenPluginLocation = mavenPluginRepository.getPluginLocation(dependencyArtifact.getGroupId(), dependencyArtifact.getArtifactId());
if (dependencyArtifact.getExtension().contentEquals("jar")) {
Path depJarFile = mavenPluginLocation.getVersionJar(dependencyArtifact.getVersion());
FileJarClassLoader jarClassLoader = new FileJarClassLoader(pluginManager, delegatingClassLoader, depJarFile);
jarClassLoaders.add(jarClassLoader);
delegatingClassLoader.add(jarClassLoader);
}
} catch (Exception e) {
e.printStackTrace();
throw new Exception("Required dependency " + pluginBundleIdentifier + " is not installed");
}
}
}
}
public PluginBundle loadFromPluginDir(PluginBundleVersionIdentifier pluginBundleVersionIdentifier, SPluginBundleVersion pluginBundleVersion, List<SPluginInformation> plugins, boolean strictDependencyChecking) throws Exception {
Path target = pluginsDir.resolve(pluginBundleVersionIdentifier.getFileName());
if (!Files.exists(target)) {
throw new PluginException(target.toString() + " not found");
}
SPluginBundle sPluginBundle = new SPluginBundle();
MavenXpp3Reader mavenreader = new MavenXpp3Reader();
Model model = null;
try (JarFile jarFile = new JarFile(target.toFile())) {
ZipEntry entry = jarFile.getEntry("META-INF/maven/" + pluginBundleVersion.getGroupId() + "/" + pluginBundleVersion.getArtifactId() + "/pom.xml");
try (InputStream inputStream = jarFile.getInputStream(entry)) {
model = mavenreader.read(inputStream);
}
}
sPluginBundle.setOrganization(model.getOrganization().getName());
sPluginBundle.setName(model.getName());
DelegatingClassLoader delegatingClassLoader = new DelegatingClassLoader(getClass().getClassLoader());
loadDependencies(model.getVersion(), strictDependencyChecking, model, delegatingClassLoader);
for (org.apache.maven.model.Dependency dependency : model.getDependencies()) {
if (dependency.getGroupId().equals("org.opensourcebim") && (dependency.getArtifactId().equals("shared") || dependency.getArtifactId().equals("pluginbase") || dependency.getArtifactId().equals("ifcplugins"))) {
// TODO Skip, we should also check the version though
} else {
PluginBundleIdentifier pluginBundleIdentifier = new PluginBundleIdentifier(dependency.getGroupId(), dependency.getArtifactId());
if (pluginBundleIdentifierToPluginBundle.containsKey(pluginBundleIdentifier)) {
if (strictDependencyChecking) {
VersionRange versionRange = VersionRange.createFromVersion(dependency.getVersion());
String version = pluginBundleIdentifierToPluginBundle.get(pluginBundleIdentifier).getPluginBundleVersion().getVersion();
ArtifactVersion artifactVersion = new DefaultArtifactVersion(version);
if (versionRange.containsVersion(artifactVersion)) {
} else {
throw new Exception("Required dependency " + pluginBundleIdentifier + " is installed, but it's version (" + version + ") does not comply to the required version (" + dependency.getVersion() + ")");
}
} else {
LOGGER.info("Skipping strict dependency checking for dependency " + dependency.getArtifactId());
}
} else {
if (dependency.getGroupId().equals("org.opensourcebim") && (dependency.getArtifactId().equals("shared") || dependency.getArtifactId().equals("pluginbase"))) {
} else {
MavenPluginLocation mavenPluginLocation = mavenPluginRepository.getPluginLocation(dependency.getGroupId(), dependency.getArtifactId());
try {
Path depJarFile = mavenPluginLocation.getVersionJar(dependency.getVersion());
FileJarClassLoader jarClassLoader = new FileJarClassLoader(pluginManager, delegatingClassLoader, depJarFile);
jarClassLoaders.add(jarClassLoader);
delegatingClassLoader.add(jarClassLoader);
} catch (Exception e) {
}
}
}
}
}
return loadPlugin(pluginBundleVersionIdentifier, target, sPluginBundle, pluginBundleVersion, plugins, delegatingClassLoader);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private PluginBundle loadPlugins(PluginBundleVersionIdentifier pluginBundleVersionIdentifier, ResourceLoader resourceLoader, ClassLoader classLoader, URI location, String classLocation, PluginDescriptor pluginDescriptor,
PluginSourceType pluginType, Set<org.bimserver.plugins.Dependency> dependencies, SPluginBundle sPluginBundle, SPluginBundleVersion sPluginBundleVersion) throws PluginException {
sPluginBundle.setInstalledVersion(sPluginBundleVersion);
PluginBundle pluginBundle = new PluginBundleImpl(pluginBundleVersionIdentifier, sPluginBundle, sPluginBundleVersion, pluginDescriptor);
if (classLoader != null && classLoader instanceof Closeable) {
pluginBundle.addCloseable((Closeable) classLoader);
}
for (AbstractPlugin pluginImplementation : pluginDescriptor.getPlugins()) {
if (pluginImplementation instanceof JavaPlugin) {
JavaPlugin javaPlugin = (JavaPlugin) pluginImplementation;
String interfaceClassName = javaPlugin.getInterfaceClass().trim().replace("\n", "");
try {
Class interfaceClass = getClass().getClassLoader().loadClass(interfaceClassName);
if (javaPlugin.getImplementationClass() != null) {
String implementationClassName = javaPlugin.getImplementationClass().trim().replace("\n", "");
try {
Class implementationClass = classLoader.loadClass(implementationClassName);
Plugin plugin = (Plugin) implementationClass.newInstance();
pluginBundle.add(pluginManager.loadPlugin(pluginBundle, interfaceClass, location, classLocation, plugin, classLoader, pluginType, pluginImplementation, dependencies, plugin.getClass().getName()));
} catch (NoClassDefFoundError e) {
throw new PluginException("Implementation class '" + implementationClassName + "' not found", e);
} catch (ClassNotFoundException e) {
throw new PluginException("Implementation class '" + e.getMessage() + "' not found in " + location, e);
} catch (InstantiationException e) {
throw new PluginException(e);
} catch (IllegalAccessException e) {
throw new PluginException(e);
}
}
} catch (ClassNotFoundException e) {
throw new PluginException("Interface class '" + interfaceClassName + "' not found", e);
} catch (Error e) {
throw new PluginException(e);
}
} else if (pluginImplementation instanceof org.bimserver.plugins.WebModulePlugin) {
org.bimserver.plugins.WebModulePlugin webModulePlugin = (org.bimserver.plugins.WebModulePlugin) pluginImplementation;
JsonWebModule jsonWebModule = new JsonWebModule(webModulePlugin);
pluginBundle.add(pluginManager.loadPlugin(pluginBundle, WebModulePlugin.class, location, classLocation, jsonWebModule, classLoader, pluginType, pluginImplementation, dependencies, webModulePlugin.getIdentifier()));
}
}
pluginBundleIdentifierToPluginBundle.put(pluginBundleVersionIdentifier.getPluginBundleIdentifier(), pluginBundle);
pluginBundleVersionIdentifierToPluginBundle.put(pluginBundleVersionIdentifier, pluginBundle);
pluginBundleIdentifierToCurrentPluginBundleVersionIdentifier.put(pluginBundleVersionIdentifier.getPluginBundleIdentifier(), pluginBundleVersionIdentifier);
return pluginBundle;
}
public PluginBundle loadPluginsFromJar(PluginBundleVersionIdentifier pluginBundleVersionIdentifier, Path file, SPluginBundle sPluginBundle, SPluginBundleVersion pluginBundleVersion, ClassLoader parentClassLoader)
throws PluginException {
PluginBundleIdentifier pluginBundleIdentifier = pluginBundleVersionIdentifier.getPluginBundleIdentifier();
if (pluginBundleIdentifierToPluginBundle.containsKey(pluginBundleIdentifier)) {
throw new PluginException("Plugin " + pluginBundleIdentifier.getHumanReadable() + " already loaded (version " + pluginBundleIdentifierToPluginBundle.get(pluginBundleIdentifier).getPluginBundleVersion().getVersion() + ")");
}
LOGGER.debug("Loading plugins from " + file.toString());
if (!Files.exists(file)) {
throw new PluginException("Not a file: " + file.toString());
}
FileJarClassLoader jarClassLoader = null;
try {
jarClassLoader = new FileJarClassLoader(pluginManager, parentClassLoader, file);
jarClassLoaders.add(jarClassLoader);
final JarClassLoader finalLoader = jarClassLoader;
URL resource = jarClassLoader.findResource("plugin/plugin.xml");
if (resource == null) {
throw new PluginException("No plugin/plugin.xml found in " + file.getFileName().toString());
}
PluginDescriptor pluginDescriptor = null;
try (InputStream pluginStream = resource.openStream()) {
pluginDescriptor = pluginManager.getPluginDescriptor(pluginStream);
if (pluginDescriptor == null) {
jarClassLoader.close();
throw new PluginException("No plugin descriptor could be created");
}
}
LOGGER.debug(pluginDescriptor.toString());
URI fileUri = file.toAbsolutePath().toUri();
URI jarUri = new URI("jar:" + fileUri.toString());
ResourceLoader resourceLoader = new ResourceLoader() {
@Override
public InputStream load(String name) {
return finalLoader.getResourceAsStream(name);
}
};
return loadPlugins(pluginBundleVersionIdentifier, resourceLoader, jarClassLoader, jarUri, file.toAbsolutePath().toString(), pluginDescriptor, PluginSourceType.JAR_FILE, new HashSet<org.bimserver.plugins.Dependency>(),
sPluginBundle, pluginBundleVersion);
} catch (Exception e) {
if (jarClassLoader != null) {
try {
jarClassLoader.close();
} catch (IOException e1) {
LOGGER.error("", e1);
}
}
throw new PluginException(e);
}
}
public PluginBundle loadJavaProject(Path projectRoot, Path pomFile, Path pluginFolder, PluginDescriptor pluginDescriptor, boolean resolveRemoteDependencies) throws PluginException, FileNotFoundException, IOException, XmlPullParserException {
MavenXpp3Reader mavenreader = new MavenXpp3Reader();
Model model = null;
try (FileReader reader = new FileReader(pomFile.toFile())) {
model = mavenreader.read(reader);
}
PluginBundleVersionIdentifier pluginBundleVersionIdentifier = new PluginBundleVersionIdentifier(model.getGroupId(), model.getArtifactId(), model.getVersion());
if (pluginBundleIdentifierToPluginBundle.containsKey(pluginBundleVersionIdentifier.getPluginBundleIdentifier())) {
throw new PluginException("Plugin " + pluginBundleVersionIdentifier.getPluginBundleIdentifier().getHumanReadable() + " already loaded (version "
+ pluginBundleIdentifierToPluginBundle.get(pluginBundleVersionIdentifier.getPluginBundleIdentifier()).getPluginBundleVersion().getVersion() + ")");
}
DelegatingClassLoader delegatingClassLoader = new DelegatingClassLoader(getClass().getClassLoader());
PublicFindClassClassLoader previous = new PublicFindClassClassLoader(getClass().getClassLoader()) {
@Override
public Class<?> findClass(String name) throws ClassNotFoundException {
return null;
}
@Override
public URL findResource(String name) {
return null;
}
@Override
public void dumpStructure(int indent) {
}
};
Set<org.bimserver.plugins.Dependency> bimServerDependencies = new HashSet<>();
pluginBundleVersionIdentifier = new PluginBundleVersionIdentifier(new PluginBundleIdentifier(model.getGroupId(), model.getArtifactId()), model.getVersion());
previous = loadDependencies(bimServerDependencies, model, previous, resolveRemoteDependencies);
delegatingClassLoader.add(previous);
// Path libFolder = projectRoot.resolve("lib");
// loadDependencies(libFolder, delegatingClassLoader);
EclipsePluginClassloader pluginClassloader = new EclipsePluginClassloader(delegatingClassLoader, projectRoot);
// pluginClassloader.dumpStructure(0);
ResourceLoader resourceLoader = new ResourceLoader() {
@Override
public InputStream load(String name) {
try {
return Files.newInputStream(pluginFolder.resolve(name));
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
};
SPluginBundle sPluginBundle = new SPluginBundle();
if (model.getOrganization() == null) {
throw new PluginException("Plugis are required to have an organization in the pom.xml file");
}
sPluginBundle.setOrganization(model.getOrganization().getName());
sPluginBundle.setName(model.getName());
SPluginBundleVersion sPluginBundleVersion = createPluginBundleVersionFromMavenModel(model, true);
Path icon = projectRoot.resolve("icon.png");
if (Files.exists(icon)) {
byte[] iconBytes = Files.readAllBytes(icon);
sPluginBundleVersion.setIcon(iconBytes);
}
sPluginBundle.setInstalledVersion(sPluginBundleVersion);
return loadPlugins(pluginBundleVersionIdentifier, resourceLoader, pluginClassloader, projectRoot.toUri(), projectRoot.resolve("target/classes").toString(), pluginDescriptor, PluginSourceType.ECLIPSE_PROJECT, bimServerDependencies,
sPluginBundle, sPluginBundleVersion);
}
public PluginBundle loadPluginsFromEclipseProject(Path projectRoot) throws PluginException {
try {
if (!Files.isDirectory(projectRoot)) {
throw new PluginException("No directory: " + projectRoot.toString());
}
final Path pluginFolder = projectRoot.resolve("plugin");
if (!Files.isDirectory(pluginFolder)) {
throw new PluginException("No 'plugin' directory found in " + projectRoot.toString());
}
Path pluginFile = pluginFolder.resolve("plugin.xml");
if (!Files.exists(pluginFile)) {
throw new PluginException("No 'plugin.xml' found in " + pluginFolder.toString());
}
PluginDescriptor pluginDescriptor = null;
try (InputStream newInputStream = Files.newInputStream(pluginFile)) {
pluginDescriptor = pluginManager.getPluginDescriptor(newInputStream);
}
Path pomFile = projectRoot.resolve("pom.xml");
if (!Files.exists(pomFile)) {
throw new PluginException("No pom.xml found in " + projectRoot);
}
// Path packageFile = projectRoot.resolve("package.json");
// if (Files.exists(packageFile)) {
// return loadJavaScriptProject(projectRoot, packageFile,
// pluginFolder, pluginDescriptor);
// } else if (Files.exists(pomFile)) {
PluginBundle pluginBundle = loadJavaProject(projectRoot, pomFile, pluginFolder, pluginDescriptor, false);
// } else {
// throw new PluginException("No pom.xml or package.json found in "
// + projectRoot.toString());
List<SPluginInformation> plugins = new ArrayList<>();
pluginManager.processPluginDescriptor(pluginDescriptor, plugins);
for (SPluginInformation sPluginInformation : plugins) {
if (sPluginInformation.isEnabled()) {
// For local plugins, we assume to install for all users
sPluginInformation.setInstallForAllUsers(true);
sPluginInformation.setInstallForNewUsers(true);
PluginContext pluginContext = pluginBundle.getPluginContext(sPluginInformation.getIdentifier());
if (pluginContext == null) {
throw new PluginException("No plugin context found for " + sPluginInformation.getIdentifier());
}
}
}
try {
long pluginBundleVersionId = pluginManager.pluginBundleInstalled(pluginBundle);
for (SPluginInformation sPluginInformation : plugins) {
if (sPluginInformation.isEnabled()) {
PluginContext pluginContext = pluginBundle.getPluginContext(sPluginInformation.getIdentifier());
// PluginConfiguration pluginConfiguration = PluginConfiguration.fromDefaults(pluginContext.getPlugin().getSystemSettingsDefinition());
// pluginContext.initialize(pluginConfiguration);
pluginManager.pluginInstalled(pluginBundleVersionId, pluginContext, sPluginInformation);
}
}
} catch (Exception e) {
LOGGER.error("", e);
throw new PluginException(e);
}
return pluginBundle;
} catch (JAXBException e) {
throw new PluginException(e);
} catch (FileNotFoundException e) {
throw new PluginException(e);
} catch (IOException e) {
throw new PluginException(e);
} catch (XmlPullParserException e) {
throw new PluginException(e);
}
}
private PublicFindClassClassLoader loadDependencies(Set<org.bimserver.plugins.Dependency> bimServerDependencies, Model model, PublicFindClassClassLoader previous, boolean resolveRemoteDependencies) throws FileNotFoundException, IOException {
List<org.apache.maven.model.Dependency> dependencies = model.getDependencies();
Iterator<org.apache.maven.model.Dependency> it = dependencies.iterator();
Path workspaceDir = Paths.get("..");
bimServerDependencies.add(new org.bimserver.plugins.Dependency(workspaceDir.resolve("PluginBase/target/classes")));
bimServerDependencies.add(new org.bimserver.plugins.Dependency(workspaceDir.resolve("Shared/target/classes")));
while (it.hasNext()) {
org.apache.maven.model.Dependency depend = it.next();
try {
if (depend.getGroupId().equals("org.opensourcebim") && (depend.getArtifactId().equals("shared") || depend.getArtifactId().equals("pluginbase") || depend.getArtifactId().equals("ifcplugins"))) {
// Skip this one, because we have already
// TODO we might want to check the version though
continue;
}
if (depend.isOptional() || "test".equals(depend.getScope())) {
continue;
}
Dependency dependency2 = new Dependency(new DefaultArtifact(depend.getGroupId() + ":" + depend.getArtifactId() + ":jar:" + depend.getVersion()), "compile");
DelegatingClassLoader depDelLoader = new DelegatingClassLoader(previous);
if (!dependency2.getArtifact().isSnapshot()) {
if (dependency2.getArtifact().getFile() != null) {
bimServerDependencies.add(new org.bimserver.plugins.Dependency(dependency2.getArtifact().getFile().toPath()));
loadDependencies(dependency2.getArtifact().getFile().toPath(), depDelLoader);
} else {
ArtifactRequest request = new ArtifactRequest();
request.setArtifact(dependency2.getArtifact());
request.setRepositories(resolveRemoteDependencies ? mavenPluginRepository.getRepositoriesAsList() : mavenPluginRepository.getLocalRepositories());
try {
ArtifactResult resolveArtifact = mavenPluginRepository.getSystem().resolveArtifact(mavenPluginRepository.getSession(), request);
if (resolveArtifact.getArtifact().getFile() != null) {
bimServerDependencies.add(new org.bimserver.plugins.Dependency(resolveArtifact.getArtifact().getFile().toPath()));
loadDependencies(resolveArtifact.getArtifact().getFile().toPath(), depDelLoader);
} else {
// TODO error?
}
} catch (ArtifactResolutionException e) {
LOGGER.error(model.getGroupId() + "." + model.getArtifactId(), e);
}
}
} else {
// Snapshot projects linked in Eclipse
ArtifactRequest request = new ArtifactRequest();
if ((!"test".equals(dependency2.getScope()) && !dependency2.getArtifact().isSnapshot())) {
request.setArtifact(dependency2.getArtifact());
request.setRepositories(mavenPluginRepository.getLocalRepositories());
try {
ArtifactResult resolveArtifact = mavenPluginRepository.getSystem().resolveArtifact(mavenPluginRepository.getSession(), request);
if (resolveArtifact.getArtifact().getFile() != null) {
bimServerDependencies.add(new org.bimserver.plugins.Dependency(resolveArtifact.getArtifact().getFile().toPath()));
loadDependencies(resolveArtifact.getArtifact().getFile().toPath(), depDelLoader);
} else {
// TODO error?
}
} catch (Exception e) {
LOGGER.info(dependency2.getArtifact().toString());
e.printStackTrace();
}
// bimServerDependencies.add(new
// org.bimserver.plugins.Dependency(resolveArtifact.getArtifact().getFile().toPath()));
}
}
ArtifactDescriptorRequest descriptorRequest = new ArtifactDescriptorRequest();
descriptorRequest.setArtifact(dependency2.getArtifact());
descriptorRequest.setRepositories(mavenPluginRepository.getRepositoriesAsList());
ArtifactDescriptorResult descriptorResult = mavenPluginRepository.getSystem().readArtifactDescriptor(mavenPluginRepository.getSession(), descriptorRequest);
CollectRequest collectRequest = new CollectRequest();
collectRequest.setRootArtifact(descriptorResult.getArtifact());
collectRequest.setDependencies(descriptorResult.getDependencies());
collectRequest.setManagedDependencies(descriptorResult.getManagedDependencies());
collectRequest.setRepositories(descriptorResult.getRepositories());
DependencyNode node = mavenPluginRepository.getSystem().collectDependencies(mavenPluginRepository.getSession(), collectRequest).getRoot();
DependencyRequest dependencyRequest = new DependencyRequest();
dependencyRequest.setRoot(node);
CollectResult collectResult = mavenPluginRepository.getSystem().collectDependencies(mavenPluginRepository.getSession(), collectRequest);
PreorderNodeListGenerator nlg = new PreorderNodeListGenerator();
// collectResult.getRoot().accept(new
// ConsoleDependencyGraphDumper());
collectResult.getRoot().accept(nlg);
try {
mavenPluginRepository.getSystem().resolveDependencies(mavenPluginRepository.getSession(), dependencyRequest);
} catch (DependencyResolutionException e) {
// Ignore
}
for (DependencyNode dependencyNode : nlg.getNodes()) {
ArtifactRequest newRequest = new ArtifactRequest(dependencyNode);
newRequest.setRepositories(mavenPluginRepository.getRepositoriesAsList());
ArtifactResult resolveArtifact = mavenPluginRepository.getSystem().resolveArtifact(mavenPluginRepository.getSession(), newRequest);
Artifact artifact = resolveArtifact.getArtifact();
Path jarFile = Paths.get(artifact.getFile().getAbsolutePath());
loadDependencies(jarFile, depDelLoader);
Artifact versionArtifact = new DefaultArtifact(artifact.getGroupId(), artifact.getArtifactId(), "pom", artifact.getVersion());
ArtifactRequest request = new ArtifactRequest();
request.setArtifact(versionArtifact);
request.setRepositories(mavenPluginRepository.getRepositoriesAsList());
// try {
// ArtifactResult resolveArtifact =
// mavenPluginRepository.getSystem().resolveArtifact(mavenPluginRepository.getSession(),
// request);
// File depPomFile =
// resolveArtifact.getArtifact().getFile();
// if (depPomFile != null) {
// MavenXpp3Reader mavenreader = new MavenXpp3Reader();
// Model depModel = null;
// try (FileReader reader = new FileReader(depPomFile)) {
// try {
// depModel = mavenreader.read(reader);
// } catch (XmlPullParserException e) {
// e.printStackTrace();
// previous = loadDependencies(bimServerDependencies,
// depModel, previous);
// } else {
// LOGGER.info("Artifact not found " + versionArtifact);
// } catch (ArtifactResolutionException e1) {
// LOGGER.error(e1.getMessage());
// EclipsePluginClassloader depLoader = new
// EclipsePluginClassloader(depDelLoader, projectRoot);
bimServerDependencies.add(new org.bimserver.plugins.Dependency(jarFile));
}
previous = depDelLoader;
} catch (DependencyCollectionException e) {
e.printStackTrace();
} catch (ArtifactDescriptorException e2) {
e2.printStackTrace();
} catch (ArtifactResolutionException e) {
e.printStackTrace();
}
}
return previous;
}
private void loadDependencies(Path libFile, DelegatingClassLoader classLoader) throws FileNotFoundException, IOException {
if (libFile.getFileName().toString().toLowerCase().endsWith(".jar")) {
FileJarClassLoader jarClassLoader = new FileJarClassLoader(pluginManager, classLoader, libFile);
jarClassLoaders.add(jarClassLoader);
classLoader.add(jarClassLoader);
}
}
public void loadPluginsFromEclipseProjectNoExceptions(Path projectRoot) {
try {
loadPluginsFromEclipseProject(projectRoot);
} catch (PluginException e) {
LOGGER.error("", e);
}
}
public void loadAllPluginsFromEclipseWorkspace(Path file, boolean showExceptions) throws PluginException, IOException {
if (file != null && Files.isDirectory(file)) {
for (Path project : PathUtils.list(file)) {
if (Files.isDirectory(project)) {
Path pluginDir = project.resolve("plugin");
if (Files.exists(pluginDir)) {
Path pluginFile = pluginDir.resolve("plugin.xml");
if (Files.exists(pluginFile)) {
if (showExceptions) {
loadPluginsFromEclipseProject(project);
} else {
loadPluginsFromEclipseProjectNoExceptions(project);
}
}
}
}
}
}
}
public void loadAllPluginsFromEclipseWorkspaces(Path directory, boolean showExceptions) throws PluginException, IOException {
if (!Files.isDirectory(directory)) {
return;
}
if (Files.exists(directory.resolve("plugin/plugin.xml"))) {
if (showExceptions) {
loadPluginsFromEclipseProject(directory);
} else {
loadPluginsFromEclipseProjectNoExceptions(directory);
}
}
loadAllPluginsFromEclipseWorkspace(directory, showExceptions);
for (Path workspace : PathUtils.list(directory)) {
if (Files.isDirectory(workspace)) {
loadAllPluginsFromEclipseWorkspace(workspace, showExceptions);
}
}
}
public PluginBundle loadPlugin(PluginBundleVersionIdentifier pluginBundleVersionIdentifier, Path target, SPluginBundle sPluginBundle, SPluginBundleVersion pluginBundleVersion, List<SPluginInformation> plugins,
ClassLoader parentClassLoader) throws Exception {
PluginBundle pluginBundle = null;
// Stage 1, load all plugins from the JAR file and initialize them
try {
pluginBundle = loadPluginsFromJar(pluginBundleVersionIdentifier, target, sPluginBundle, pluginBundleVersion, parentClassLoader);
if (plugins.isEmpty()) {
LOGGER.warn("No plugins given to install for bundle " + sPluginBundle.getName());
}
for (SPluginInformation sPluginInformation : plugins) {
if (sPluginInformation.isEnabled()) {
PluginContext pluginContext = pluginBundle.getPluginContext(sPluginInformation.getIdentifier());
if (pluginContext == null) {
LOGGER.info("No plugin context found for " + sPluginInformation.getIdentifier());
} else {
PluginConfiguration pluginConfiguration = PluginConfiguration.fromDefaults(pluginContext.getPlugin().getSystemSettingsDefinition());
pluginContext.initialize(pluginConfiguration);
}
}
}
} catch (Exception e) {
if (pluginBundle != null) {
pluginBundle.close();
}
pluginBundleVersionIdentifierToPluginBundle.remove(pluginBundleVersionIdentifier);
pluginBundleIdentifierToPluginBundle.remove(pluginBundleVersionIdentifier.getPluginBundleIdentifier());
Files.deleteIfExists(target);
throw e;
}
// Stage 2, if all went well, notify the listeners of this plugin, if
// anything goes wrong in the notifications, the plugin bundle will be
// uninstalled
try {
long pluginBundleVersionId = pluginManager.pluginBundleInstalled(pluginBundle);
for (SPluginInformation sPluginInformation : plugins) {
if (sPluginInformation.isEnabled()) {
PluginContext pluginContext = pluginBundle.getPluginContext(sPluginInformation.getIdentifier());
if (pluginContext != null) {
pluginManager.pluginInstalled(pluginBundleVersionId, pluginContext, sPluginInformation);
}
}
}
return pluginBundle;
} catch (Exception e) {
uninstall(pluginBundleVersionIdentifier);
LOGGER.error("", e);
throw e;
}
}
public void uninstall(PluginBundleVersionIdentifier pluginBundleVersionIdentifier) {
PluginBundle pluginBundle = pluginBundleVersionIdentifierToPluginBundle.get(pluginBundleVersionIdentifier);
if (pluginBundle == null) {
return;
}
try {
pluginBundle.close();
pluginBundleVersionIdentifierToPluginBundle.remove(pluginBundleVersionIdentifier);
pluginBundleIdentifierToPluginBundle.remove(pluginBundleVersionIdentifier.getPluginBundleIdentifier());
pluginBundleIdentifierToCurrentPluginBundleVersionIdentifier.remove(pluginBundleVersionIdentifier.getPluginBundleIdentifier());
for (PluginContext pluginContext : pluginBundle) {
pluginManager.removeImplementation(pluginContext);
}
Path target = pluginsDir.resolve(pluginBundleVersionIdentifier.getFileName());
Files.delete(target);
for (PluginContext pluginContext : pluginBundle) {
pluginManager.pluginUninstalled(pluginContext);
}
pluginManager.pluginBundleUninstalled(pluginBundle);
} catch (IOException e) {
LOGGER.error("", e);
}
}
public PluginBundle update(PluginBundleVersionIdentifier pluginBundleVersionIdentifier, SPluginBundle sPluginBundle, SPluginBundleVersion pluginBundleVersion, Path jarFile, Path pomFile, List<SPluginInformation> plugins)
throws Exception {
PluginBundle existingPluginBundle = pluginBundleIdentifierToPluginBundle.get(pluginBundleVersionIdentifier.getPluginBundleIdentifier());
if (existingPluginBundle == null) {
throw new UserException("No previous version of plugin bundle " + pluginBundleVersionIdentifier.getPluginBundleIdentifier() + " found");
}
try {
existingPluginBundle.close();
if (pluginBundleIdentifierToPluginBundle.remove(pluginBundleVersionIdentifier.getPluginBundleIdentifier()) == null) {
LOGGER.warn("Previous version of " + pluginBundleVersionIdentifier.getPluginBundleIdentifier() + " not found");
}
PluginBundleVersionIdentifier currentVersion = pluginBundleIdentifierToCurrentPluginBundleVersionIdentifier.get(pluginBundleVersionIdentifier.getPluginBundleIdentifier());
if (pluginBundleIdentifierToCurrentPluginBundleVersionIdentifier.remove(pluginBundleVersionIdentifier.getPluginBundleIdentifier()) == null) {
LOGGER.warn("Previous version of " + pluginBundleVersionIdentifier.getPluginBundleIdentifier() + " not found");
}
if (pluginBundleVersionIdentifierToPluginBundle.remove(currentVersion) == null) {
LOGGER.warn("Previous version (" + currentVersion + ") of " + pluginBundleVersionIdentifier.getPluginBundleIdentifier() + " not found");
}
for (PluginContext pluginContext : existingPluginBundle) {
pluginManager.removeImplementation(pluginContext);
}
// TODO in case the update fails (new plugin does not load successfully), we need to be able to replace the removed file... So we should not remove it here but rename it and then remove it later on
if (existingPluginBundle.getPluginBundle().getInstalledVersion().getType() == SPluginBundleType.MAVEN) {
Path target = pluginsDir.resolve(currentVersion.getFileName());
Files.delete(target);
}
// for (PluginContext pluginContext : existingPluginBundle) {
// pluginChangeListener.pluginUninstalled(pluginContext);
} catch (IOException e) {
LOGGER.error("", e);
}
Path target = pluginsDir.resolve(pluginBundleVersionIdentifier.getFileName());
if (Files.exists(target)) {
throw new PluginException("This plugin has already been installed " + target.getFileName().toString());
}
Files.copy(jarFile, target);
MavenXpp3Reader mavenreader = new MavenXpp3Reader();
Model model = null;
try (FileReader fileReader = new FileReader(pomFile.toFile())) {
model = mavenreader.read(fileReader);
}
DelegatingClassLoader delegatingClassLoader = new DelegatingClassLoader(getClass().getClassLoader());
for (org.apache.maven.model.Dependency dependency : model.getDependencies()) {
if (dependency.getGroupId().equals("org.opensourcebim") && (dependency.getArtifactId().equals("shared") || dependency.getArtifactId().equals("pluginbase"))) {
// TODO Skip, we should also check the version though
} else {
PluginBundleIdentifier pluginBundleIdentifier = new PluginBundleIdentifier(dependency.getGroupId(), dependency.getArtifactId());
if (pluginBundleIdentifierToPluginBundle.containsKey(pluginBundleIdentifier)) {
// if (false) {
// VersionRange versionRange =
// VersionRange.createFromVersion(dependency.getVersion());
// String version =
// pluginBundleIdentifierToPluginBundle.get(pluginBundleIdentifier).getPluginBundleVersion().getVersion();
// ArtifactVersion artifactVersion = new
// DefaultArtifactVersion(version);
// if (versionRange.containsVersion(artifactVersion)) {
// } else {
// throw new Exception("Required dependency " +
// pluginBundleIdentifier + " is installed, but it's version
// (" + version + ") does not comply to the required version
// (" + dependency.getVersion() + ")");
// } else {
LOGGER.info("Skipping strict dependency checking for dependency " + dependency.getArtifactId());
} else {
if (dependency.getGroupId().equals("org.opensourcebim") && (dependency.getArtifactId().equals("shared") || dependency.getArtifactId().equals("pluginbase"))) {
throw new Exception("Required dependency " + pluginBundleIdentifier + " is not installed");
} else {
MavenPluginLocation mavenPluginLocation = mavenPluginRepository.getPluginLocation(model.getRepositories().get(0).getUrl(), dependency.getGroupId(), dependency.getArtifactId());
try {
Path depJarFile = mavenPluginLocation.getVersionJar(dependency.getVersion());
FileJarClassLoader jarClassLoader = new FileJarClassLoader(pluginManager, delegatingClassLoader, depJarFile);
jarClassLoaders.add(jarClassLoader);
delegatingClassLoader.add(jarClassLoader);
} catch (Exception e) {
}
}
}
}
}
PluginBundle pluginBundle = null;
// Stage 1, load all plugins from the JAR file and initialize them
try {
pluginBundle = loadPluginsFromJar(pluginBundleVersionIdentifier, target, sPluginBundle, pluginBundleVersion, delegatingClassLoader);
for (SPluginInformation sPluginInformation : plugins) {
if (sPluginInformation.isEnabled()) {
PluginContext pluginContext = pluginBundle.getPluginContext(sPluginInformation.getIdentifier());
PluginContext previousContext = existingPluginBundle.getPluginContext(pluginContext.getIdentifier());
// TODO when there was no previous plugin (new plugin in bundle for example), we should use the default system settings of the particular plugin... not null
pluginContext.getPlugin().init(pluginContext, previousContext == null ? null : previousContext.getSystemSettings());
}
}
} catch (Exception e) {
Files.delete(target);
LOGGER.error("", e);
throw e;
}
// Stage 2, if all went well, notify the listeners of this plugin, if
// anything goes wrong in the notifications, the plugin bundle will be
// uninstalled
try {
long pluginBundleVersionId = pluginManager.pluginBundleUpdated(pluginBundle);
for (SPluginInformation sPluginInformation : plugins) {
if (sPluginInformation.isEnabled()) {
PluginContext pluginContext = pluginBundle.getPluginContext(sPluginInformation.getIdentifier());
pluginManager.pluginUpdated(pluginBundleVersionId, pluginContext, sPluginInformation);
}
}
return pluginBundle;
} catch (Exception e) {
uninstall(pluginBundleVersionIdentifier);
LOGGER.error("", e);
throw e;
}
}
public PluginBundle getPluginBundle(PluginBundleIdentifier pluginIdentifier) {
return pluginBundleIdentifierToPluginBundle.get(pluginIdentifier);
}
public Collection<PluginBundle> getPluginBundles() {
return pluginBundleVersionIdentifierToPluginBundle.values();
}
public void close() {
for (FileJarClassLoader fileJarClassLoader : jarClassLoaders) {
try {
fileJarClassLoader.close();
} catch (IOException e) {
LOGGER.error("", e);
}
}
}
private SPluginBundleVersion createPluginBundleVersionFromMavenModel(Model model, boolean isLocalDev) {
SPluginBundleVersion sPluginBundleVersion = new SPluginBundleVersion();
sPluginBundleVersion.setType(isLocalDev ? SPluginBundleType.LOCAL_DEV : SPluginBundleType.MAVEN);
sPluginBundleVersion.setGroupId(model.getGroupId());
sPluginBundleVersion.setArtifactId(model.getArtifactId());
sPluginBundleVersion.setVersion(model.getVersion());
sPluginBundleVersion.setDescription(model.getDescription());
sPluginBundleVersion.setRepository("local");
sPluginBundleVersion.setMismatch(false); // TODO
sPluginBundleVersion.setOrganization(model.getOrganization().getName());
sPluginBundleVersion.setName(model.getName());
return sPluginBundleVersion;
}
public SPluginBundle extractPluginBundleFromJar(Path jarFilePath) throws PluginException {
String filename = jarFilePath.getFileName().toString();
PluginBundleVersionIdentifier pluginBundleVersionIdentifier = PluginBundleVersionIdentifier.fromFileName(filename);
try (JarFile jarFile = new JarFile(jarFilePath.toFile())) {
String pomLocation = "META-INF/maven/" + pluginBundleVersionIdentifier.getPluginBundleIdentifier().getGroupId() + "/" + pluginBundleVersionIdentifier.getPluginBundleIdentifier().getArtifactId() + "/" + "pom.xml";
ZipEntry pomEntry = jarFile.getEntry(pomLocation);
if (pomEntry == null) {
throw new PluginException("No pom.xml found in JAR file " + jarFilePath.toString() + ", " + pomLocation);
}
MavenXpp3Reader mavenreader = new MavenXpp3Reader();
Model model = mavenreader.read(jarFile.getInputStream(pomEntry));
SPluginBundle sPluginBundle = new SPluginBundle();
sPluginBundle.setOrganization(model.getOrganization().getName());
sPluginBundle.setName(model.getName());
return sPluginBundle;
} catch (IOException e) {
throw new PluginException(e);
} catch (XmlPullParserException e) {
throw new PluginException(e);
}
}
}
|
package de.sormuras.bach.task;
import de.sormuras.bach.Bach;
import de.sormuras.bach.Task;
public class BuildTask implements Task {
@Override
public void execute(Bach bach) throws InterruptedException {
bach.execute(new SanityTask());
Thread.sleep(123); // Here be dragons!
bach.execute(new SummaryTask());
}
}
|
package org.jdesktop.swingx;
import java.awt.Dimension;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import javax.swing.Action;
import javax.swing.ActionMap;
import javax.swing.JCheckBox;
import javax.swing.JLabel;
import javax.swing.JTextField;
import javax.swing.SwingUtilities;
import javax.swing.UIManager;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import org.jdesktop.swingx.action.AbstractActionExt;
import org.jdesktop.swingx.action.ActionContainerFactory;
import org.jdesktop.swingx.action.BoundAction;
import org.jdesktop.swingx.plaf.LookAndFeelAddons;
/**
* Common base class of ui clients.
*
* Implements basic synchronization between PatternModel state and
* actions bound to it.
*
*
*
* PENDING: extending JXPanel is a convenience measure, should be extracted
* into a dedicated controller.
* PENDING: should be re-visited when swingx goes binding-aware
*
* @author Jeanette Winzenburg
*/
public abstract class AbstractPatternPanel extends JXPanel {
public static final String SEARCH_FIELD_LABEL = "searchFieldLabel";
public static final String SEARCH_FIELD_MNEMONIC = SEARCH_FIELD_LABEL + ".mnemonic";
public static final String SEARCH_TITLE = "searchTitle";
public static final String MATCH_ACTION_COMMAND = "match";
static {
// Hack to enforce loading of SwingX framework ResourceBundle
LookAndFeelAddons.getAddon();
}
protected JLabel searchLabel;
protected JTextField searchField;
protected JCheckBox matchCheck;
protected PatternModel patternModel;
private ActionContainerFactory actionFactory;
/**
* Callback action bound to MATCH_ACTION_COMMAND.
*/
public abstract void match();
/**
* convenience method for type-cast to AbstractActionExt.
*
* @param key Key to retrieve action
* @return Action bound to this key
* @see AbstractActionExt
*/
protected AbstractActionExt getAction(String key) {
// PENDING: outside clients might add different types?
return (AbstractActionExt) getActionMap().get(key);
}
/**
* creates and registers all actions for the default the actionMap.
*/
protected void initActions() {
initPatternActions();
initExecutables();
}
/**
* creates and registers all "executable" actions.
* Meaning: the actions bound to a callback method on this.
*
* PENDING: not quite correctly factored? Name?
*
*/
protected void initExecutables() {
Action execute = createBoundAction(MATCH_ACTION_COMMAND, "match");
getActionMap().put(JXDialog.EXECUTE_ACTION_COMMAND,
execute);
getActionMap().put(MATCH_ACTION_COMMAND, execute);
refreshEmptyFromModel();
}
/**
* creates actions bound to PatternModel's state.
*/
protected void initPatternActions() {
ActionMap map = getActionMap();
map.put(PatternModel.MATCH_CASE_ACTION_COMMAND,
createModelStateAction(PatternModel.MATCH_CASE_ACTION_COMMAND,
"setCaseSensitive", getPatternModel().isCaseSensitive()));
map.put(PatternModel.MATCH_WRAP_ACTION_COMMAND,
createModelStateAction(PatternModel.MATCH_WRAP_ACTION_COMMAND,
"setWrapping", getPatternModel().isWrapping()));
map.put(PatternModel.MATCH_BACKWARDS_ACTION_COMMAND,
createModelStateAction(PatternModel.MATCH_BACKWARDS_ACTION_COMMAND,
"setBackwards", getPatternModel().isBackwards()));
map.put(PatternModel.MATCH_INCREMENTAL_ACTION_COMMAND,
createModelStateAction(PatternModel.MATCH_INCREMENTAL_ACTION_COMMAND,
"setIncremental", getPatternModel().isIncremental()));
}
/**
* tries to find a String value from the UIManager, prefixing the
* given key with the UIPREFIX.
*
* TODO: move to utilities?
*
* @param key <code>String</code> that specifyes the value in UIManager
* @return the <code>String</code> as returned by the UIManager or key itself
* if no value bound to this key in UIManager
*/
protected String getUIString(String key) {
String text = UIManager.getString(PatternModel.SEARCH_PREFIX + key);
return text != null ? text : key;
}
/**
* creates, configures and returns a bound state action on a boolean property
* of the PatternModel.
*
* @param command the actionCommand - same as key to find localizable resources
* @param methodName the method on the PatternModel to call on item state changed
* @param initial the initial value of the property
* @return newly created action
*/
protected AbstractActionExt createModelStateAction(String command, String methodName, boolean initial) {
String actionName = getUIString(command);
BoundAction action = new BoundAction(actionName,
command);
action.setStateAction();
action.registerCallback(getPatternModel(), methodName);
action.setSelected(initial);
return action;
}
/**
* creates, configures and returns a bound action to the given method of
* this.
*
* @param actionCommand the actionCommand, same as key to find localizable resources
* @param methodName the method to call an actionPerformed.
* @return newly created action
*/
protected AbstractActionExt createBoundAction(String actionCommand, String methodName) {
String actionName = getUIString(actionCommand);
BoundAction action = new BoundAction(actionName,
actionCommand);
action.registerCallback(this, methodName);
return action;
}
/**
* called from listening to pattern property of PatternModel.
*
* This implementation calls match() if the model is in
* incremental state.
*
*/
protected void refreshPatternFromModel() {
if (getPatternModel().isIncremental()) {
match();
}
}
/**
* returns the patternModel. Lazyly creates and registers a
* propertyChangeListener if null.
*
* @return current <code>PatternModel</code> if it exists or newly created
* one if it was not initialized before this call
*/
protected PatternModel getPatternModel() {
if (patternModel == null) {
patternModel = createPatternModel();
patternModel.addPropertyChangeListener(getPatternModelListener());
}
return patternModel;
}
/**
* factory method to create the PatternModel.
* Hook for subclasses to install custom models.
*
* @return newly created <code>PatternModel</code>
*/
protected PatternModel createPatternModel() {
PatternModel l = new PatternModel();
return l;
}
/**
* creates and returns a PropertyChangeListener to the PatternModel.
*
* NOTE: the patternModel is totally under control of this class - currently
* there's no need to keep a reference to the listener.
*
* @return created and bound to appropriate callback methods
* <code>PropertyChangeListener</code>
*/
protected PropertyChangeListener getPatternModelListener() {
PropertyChangeListener l = new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent evt) {
String property = evt.getPropertyName();
if ("pattern".equals(property)) {
refreshPatternFromModel();
} else if ("rawText".equals(property)) {
refreshDocumentFromModel();
} else if ("caseSensitive".equals(property)){
getAction(PatternModel.MATCH_CASE_ACTION_COMMAND).
setSelected(((Boolean) evt.getNewValue()).booleanValue());
} else if ("wrapping".equals(property)) {
getAction(PatternModel.MATCH_WRAP_ACTION_COMMAND).
setSelected(((Boolean) evt.getNewValue()).booleanValue());
} else if ("backwards".equals(property)) {
getAction(PatternModel.MATCH_BACKWARDS_ACTION_COMMAND).
setSelected(((Boolean) evt.getNewValue()).booleanValue());
} else if ("incremental".equals(property)) {
getAction(PatternModel.MATCH_INCREMENTAL_ACTION_COMMAND).
setSelected(((Boolean) evt.getNewValue()).booleanValue());
} else if ("empty".equals(property)) {
refreshEmptyFromModel();
}
}
};
return l;
}
/**
* called from listening to empty property of PatternModel.
*
* this implementation synch's the enabled state of the action with
* MATCH_ACTION_COMMAND to !empty.
*
*/
protected void refreshEmptyFromModel() {
boolean enabled = !getPatternModel().isEmpty();
getAction(MATCH_ACTION_COMMAND).setEnabled(enabled);
}
/**
* callback method from listening to searchField.
*
*/
protected void refreshModelFromDocument() {
getPatternModel().setRawText(searchField.getText());
}
/**
* callback method that updates document from the search field
*
*/
protected void refreshDocumentFromModel() {
if (searchField.getText().equals(getPatternModel().getRawText())) return;
SwingUtilities.invokeLater(new Runnable() {
public void run() {
searchField.setText(getPatternModel().getRawText());
}
});
}
/**
* Create <code>DocumentListener</code> for the search field that calls
* corresponding callback method whenever the search field contents is being changed
*
* @return newly created <code>DocumentListener</code>
*/
protected DocumentListener getSearchFieldListener() {
DocumentListener l = new DocumentListener() {
public void changedUpdate(DocumentEvent ev) {
// JW - really?? we've a PlainDoc without Attributes
refreshModelFromDocument();
}
public void insertUpdate(DocumentEvent ev) {
refreshModelFromDocument();
}
public void removeUpdate(DocumentEvent ev) {
refreshModelFromDocument();
}
};
return l;
}
/**
* configure and bind components to/from PatternModel
*/
protected void bind() {
bindSearchLabel();
searchField.getDocument().addDocumentListener(getSearchFieldListener());
getActionContainerFactory().configureButton(matchCheck,
(AbstractActionExt) getActionMap().get(PatternModel.MATCH_CASE_ACTION_COMMAND),
null);
}
/**
* Configures the searchLabel.
* Here: sets text and mnenomic properties form ui values,
* configures as label for searchField.
*/
protected void bindSearchLabel() {
searchLabel.setText(getUIString(SEARCH_FIELD_LABEL));
String mnemonic = getUIString(SEARCH_FIELD_MNEMONIC);
if (mnemonic != SEARCH_FIELD_MNEMONIC) {
searchLabel.setDisplayedMnemonic(mnemonic.charAt(0));
}
searchLabel.setLabelFor(searchField);
}
/**
* @return current <code>ActionContainerFactory</code>.
* Will lazily create new factory if it does not exist
*/
protected ActionContainerFactory getActionContainerFactory() {
if (actionFactory == null) {
actionFactory = new ActionContainerFactory(null);
}
return actionFactory;
}
/**
* Initialize all the incorporated components and models
*/
protected void initComponents() {
searchLabel = new JLabel();
searchField = new JTextField(getSearchFieldWidth()) {
public Dimension getMaximumSize() {
Dimension superMax = super.getMaximumSize();
superMax.height = getPreferredSize().height;
return superMax;
}
};
matchCheck = new JCheckBox();
}
/**
* @return width in characters of the search field
*/
protected int getSearchFieldWidth() {
return 15;
}
}
|
public class Test {
public static void main(String [] args) {
System.out.println("Hello, world");
System.out.println("vim is better...always");
}
}
|
package com.openhackathon.guacamole;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Connect2OpenHackathon {
private Logger logger = LoggerFactory.getLogger(Connect2OpenHackathon.class.getClass());
private URL url = null ;
private BufferedReader in = null;
private String urlString = null ;
public Connect2OpenHackathon(String urlSTring) throws Exception{
this.urlString = urlSTring;
}
/*check user withn cookies */
public String getGuacamoleJSONString(String connectionName,String tokenString) {
String result = "" ;
HttpURLConnection conn = null ;
try {
url = new URL(urlString+"?id="+connectionName);
HttpURLConnection.setFollowRedirects(false);
conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
conn.setUseCaches(false);
conn.setRequestProperty("token", tokenString);
logger.info("======================Two request-parameters,connectionName:" + connectionName + ", token:" +tokenString);
logger.debug("======================send http-request to open-hackathon");
conn.connect();
int status = conn.getResponseCode();
if (status != 200) {
logger.error("OpenHackathon http reponse code is :" + conn.getResponseCode());
logger.debug("user may have not login , please do it before your request !!!");
return null ;
}
in = new BufferedReader(new InputStreamReader(conn.getInputStream()));
String line;
while ((line = in.readLine()) != null) {
result += line;
}
} catch (Exception e) {
logger.error("Exception when connect with OSSLAB to check User Cookies BBB");
e.printStackTrace();
}
finally {
try {
if (in != null) {
in.close();
}
if (conn != null) {
conn.disconnect();
}
} catch (Exception e2) {
e2.printStackTrace();
}
}
return result;
}
}
|
package org.flymine.web.widget;
import org.intermine.objectstore.ObjectStore;
import org.intermine.pathquery.Constraints;
import org.intermine.pathquery.PathQuery;
import org.intermine.web.logic.bag.InterMineBag;
import org.intermine.web.logic.widget.WidgetURLQuery;
/**
* Builds a query to get all the genes (in bag) associated with specified go term.
* @author Julie Sullivan
*/
public class TiffinURLQuery implements WidgetURLQuery
{
private InterMineBag bag;
private String key;
private ObjectStore os;
private static final String DATASET = "Tiffin";
/**
* @param key which bar the user clicked on
* @param bag bag
* @param os object store
*/
public TiffinURLQuery(ObjectStore os, InterMineBag bag, String key) {
this.bag = bag;
this.key = key;
this.os = os;
}
/**
* {@inheritDoc}
*/
public PathQuery generatePathQuery() {
PathQuery q = new PathQuery(os.getModel());
String viewStrings = "Gene.secondaryIdentifier";
String tiffin = "Gene.upstreamIntergenicRegion.overlappingFeatures[TFBindingSite].motif.primaryIdentifier";
// String tiffin = "TFBindingSite.motif.primaryIdentifier";
q.setView(viewStrings);
// q.addView(tiffin);
q.addConstraint(bag.getType(), Constraints.in(bag.getName()));
q.addConstraint(tiffin, Constraints.eq (key));
q.addConstraint("TFBindingSite.dataSets.title", Constraints.eq(DATASET));
q.setConstraintLogic("A and B and C");
q.syncLogicExpression("and");
q.setOrderBy(tiffin);
return q;
}
}
|
package com.rarchives.ripme.ripper.rippers;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
import com.rarchives.ripme.utils.Http;
public class LusciousRipper extends AbstractHTMLRipper {
public LusciousRipper(URL url) throws IOException {
super(url);
}
@Override
public String getDomain() {
return "luscious.net";
}
@Override
public String getHost() {
return "luscious";
}
@Override
public Document getFirstPage() throws IOException {
// "url" is an instance field of the superclass
Document page = Http.url(url).get();
URL firstUrl = new URL("https://luscious.net" + page.select("div > div.item.thumbnail.ic_container > a").first().attr("href"));
LOGGER.info("First page is " + "https://luscious.net" + page.select("div > div.album_cover_item > a").first().attr("href"));
return Http.url(firstUrl).get();
}
@Override
public List<String> getURLsFromPage(Document page) {
List<String> urls = new ArrayList<>();
Elements urlElements = page.select(".icon-download");
for (Element e : urlElements) {
urls.add(e.attr("href"));
}
// This is here for pages with mp4s instead of images
String video_image = "";
video_image = page.select("div > video > source").attr("src");
if (!video_image.equals("")) {
urls.add(video_image);
}
return urls;
}
@Override
public Document getNextPage(Document doc) throws IOException {
// Find next page
String nextPageUrl = "https://luscious.net" + doc.select("a.image_link[rel=next]").attr("href");
// The more_like_this is here so we don't try to download the page that comes after the end of an album
if (nextPageUrl == "https://luscious.net" ||
nextPageUrl.contains("more_like_this")) {
throw new IOException("No more pages");
}
return Http.url(nextPageUrl).get();
}
@Override
public String getGID(URL url) throws MalformedURLException {
Pattern p = Pattern
.compile("^https?://luscious\\.net/albums/([-_.0-9a-zA-Z]+).*$");
Matcher m = p.matcher(url.toExternalForm());
if (m.matches()) {
return m.group(1);
}
throw new MalformedURLException("Expected luscious.net URL format: "
+ "luscious.net/albums/albumname - got " + url
+ " instead");
}
@Override
public void downloadURL(URL url, int index) {
addURLToDownload(url, getPrefix(index));
}
}
|
package mockit;
import java.util.*;
import org.junit.*;
import static org.junit.Assert.*;
public final class ExpectationsWithInvocationCountsTest
{
private final CodeUnderTest codeUnderTest = new CodeUnderTest();
static class CodeUnderTest
{
private final Collaborator dependency = new Collaborator();
void doSomething()
{
dependency.provideSomeService();
}
void doSomethingElse()
{
dependency.simpleOperation(1, "b", null);
}
}
static class Collaborator
{
Collaborator() {}
@SuppressWarnings({"UnusedDeclaration"})
Collaborator(int value) {}
void provideSomeService() {}
@SuppressWarnings({"UnusedDeclaration"})
final void simpleOperation(int a, String b, Date c) {}
}
@Test
public void expectOnce()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService();
}
};
codeUnderTest.doSomething();
}
@Test(expected = AssertionError.class)
public void expectOnceButReplayTwice()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService();
}
};
codeUnderTest.doSomething();
codeUnderTest.doSomething();
fail("Should not get here");
}
@Test(expected = AssertionError.class)
public void expectOnceButReplayThreeTimes()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService();
}
};
codeUnderTest.doSomething();
try {
codeUnderTest.doSomething();
}
finally {
codeUnderTest.doSomething();
}
fail("Should not get here");
}
@Test
public void expectTwiceByRepeatingTheExpectation()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService();
mock.provideSomeService();
}
};
codeUnderTest.doSomething();
codeUnderTest.doSomething();
}
@Test
public void expectTwiceByUsingInvocationCount()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService(); repeats(2);
mock.simpleOperation(1, "b", null);
}
};
codeUnderTest.doSomething();
codeUnderTest.doSomething();
codeUnderTest.doSomethingElse();
}
@Test(expected = AssertionError.class)
public void expectTwiceByUsingInvocationCountButReplayOnlyOnce()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService(); repeats(2);
mock.simpleOperation(1, "b", null);
}
};
codeUnderTest.doSomething();
codeUnderTest.doSomethingElse();
}
@Test
public void expectAtLeastOnceAndReplayTwice()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService(); repeatsAtLeast(1);
mock.simpleOperation(1, "b", null);
}
};
codeUnderTest.doSomething();
codeUnderTest.doSomething();
codeUnderTest.doSomethingElse();
}
@Test(expected = AssertionError.class)
public void expectAtLeastTwiceButReplayOnceWithSingleExpectation()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService(); repeatsAtLeast(2);
}
};
codeUnderTest.doSomething();
}
@Test(expected = AssertionError.class)
public void expectAtLeastTwiceButReplayOnceWithTwoConsecutiveExpectations()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService(); repeatsAtLeast(2);
mock.simpleOperation(1, "b", null);
}
};
codeUnderTest.doSomething();
codeUnderTest.doSomethingElse();
}
@Test
public void repeatsAtLeastOverwritingUpperLimit()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService(); repeatsAtMost(2); repeatsAtLeast(1);
}
};
codeUnderTest.doSomething();
codeUnderTest.doSomething();
codeUnderTest.doSomething();
}
@Test
public void expectAtMostTwiceAndReplayOnce()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService(); repeatsAtMost(2);
mock.simpleOperation(1, "b", null);
}
};
codeUnderTest.doSomething();
codeUnderTest.doSomethingElse();
}
@Test(expected = AssertionError.class)
public void expectAtMostOnceButReplayTwice()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService(); repeatsAtMost(1);
mock.simpleOperation(1, "b", null);
}
};
codeUnderTest.doSomething();
codeUnderTest.doSomething();
codeUnderTest.doSomethingElse();
}
@Test
public void repeatsAtMostOverwritingLowerLimit()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService(); repeatsAtLeast(2); repeatsAtMost(3);
}
};
codeUnderTest.doSomething();
}
@Test
public void expectSameMethodOnceOrTwiceThenOnceButReplayEachExpectationOnlyOnce(
final Collaborator mock)
{
new Expectations()
{
{
mock.simpleOperation(1, "", null); repeats(1, 2);
mock.simpleOperation(2, "", null);
}
};
mock.simpleOperation(1, "", null);
mock.simpleOperation(2, "", null);
}
@Test
public void expectTwoOrThreeTimes()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService(); repeats(2, 3);
mock.simpleOperation(1, "b", null);
}
};
codeUnderTest.doSomething();
codeUnderTest.doSomething();
codeUnderTest.doSomethingElse();
}
@Test
public void expectZeroOrMoreTimesAndReplayTwice()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService(); repeats(0, -1);
mock.simpleOperation(1, "b", null);
}
};
codeUnderTest.doSomething();
codeUnderTest.doSomething();
codeUnderTest.doSomethingElse();
}
@Test
public void expectZeroOrMoreTimesAndReplayNone()
{
new Expectations()
{
Collaborator mock;
{
mock.provideSomeService(); repeats(0, 0);
mock.simpleOperation(1, "b", null);
}
};
codeUnderTest.doSomethingElse();
}
}
|
package cpw.mods.fml.common.network;
import java.lang.reflect.Method;
import java.util.Set;
import java.util.logging.Level;
import net.minecraft.item.Item;
import com.google.common.base.Strings;
import cpw.mods.fml.common.FMLCommonHandler;
import cpw.mods.fml.common.FMLLog;
import cpw.mods.fml.common.ModContainer;
import cpw.mods.fml.common.discovery.ASMDataTable;
import cpw.mods.fml.common.discovery.ASMDataTable.ASMData;
import cpw.mods.fml.common.versioning.DefaultArtifactVersion;
import cpw.mods.fml.common.versioning.InvalidVersionSpecificationException;
import cpw.mods.fml.common.versioning.VersionRange;
import cpw.mods.fml.relauncher.Side;
public class NetworkModHandler
{
private static Object connectionHandlerDefaultValue;
private static Object packetHandlerDefaultValue;
private static Object clientHandlerDefaultValue;
private static Object serverHandlerDefaultValue;
private static Object tinyPacketHandlerDefaultValue;
private static int assignedIds = 1;
private int localId;
private int networkId;
private ModContainer container;
private NetworkMod mod;
private Method checkHandler;
private VersionRange acceptableRange;
private ITinyPacketHandler tinyPacketHandler;
public NetworkModHandler(ModContainer container, NetworkMod modAnnotation)
{
this.container = container;
this.mod = modAnnotation;
this.localId = assignedIds++;
this.networkId = this.localId;
// Skip over the map object because it has special network id meaning
if (Item.field_77744_bd.field_77779_bT == assignedIds)
{
assignedIds++;
}
}
public NetworkModHandler(ModContainer container, Class<?> networkModClass, ASMDataTable table)
{
this(container, networkModClass.getAnnotation(NetworkMod.class));
if (this.mod == null)
{
return;
}
Set<ASMData> versionCheckHandlers = table.getAnnotationsFor(container).get(NetworkMod.VersionCheckHandler.class.getName());
String versionCheckHandlerMethod = null;
for (ASMData vch : versionCheckHandlers)
{
if (vch.getClassName().equals(networkModClass.getName()))
{
versionCheckHandlerMethod = vch.getObjectName();
break;
}
}
if (versionCheckHandlerMethod != null)
{
try
{
Method checkHandlerMethod = networkModClass.getDeclaredMethod(versionCheckHandlerMethod, String.class);
if (checkHandlerMethod.isAnnotationPresent(NetworkMod.VersionCheckHandler.class))
{
this.checkHandler = checkHandlerMethod;
}
}
catch (Exception e)
{
FMLLog.log(Level.WARNING, e, "The declared version check handler method %s on network mod id %s is not accessible", versionCheckHandlerMethod, container.getModId());
}
}
configureNetworkMod(container);
}
protected void configureNetworkMod(ModContainer container)
{
if (this.checkHandler == null)
{
String versionBounds = mod.versionBounds();
if (!Strings.isNullOrEmpty(versionBounds))
{
try
{
this.acceptableRange = VersionRange.createFromVersionSpec(versionBounds);
}
catch (InvalidVersionSpecificationException e)
{
FMLLog.log(Level.WARNING, e, "Invalid bounded range %s specified for network mod id %s", versionBounds, container.getModId());
}
}
}
FMLLog.finest("Testing mod %s to verify it accepts its own version in a remote connection", container.getModId());
boolean acceptsSelf = acceptVersion(container.getVersion());
if (!acceptsSelf)
{
FMLLog.severe("The mod %s appears to reject its own version number (%s) in its version handling. This is likely a severe bug in the mod!", container.getModId(), container.getVersion());
}
else
{
FMLLog.finest("The mod %s accepts its own version (%s)", container.getModId(), container.getVersion());
}
tryCreatingPacketHandler(container, mod.packetHandler(), mod.channels(), null);
if (FMLCommonHandler.instance().getSide().isClient())
{
if (mod.clientPacketHandlerSpec() != getClientHandlerSpecDefaultValue())
{
tryCreatingPacketHandler(container, mod.clientPacketHandlerSpec().packetHandler(), mod.clientPacketHandlerSpec().channels(), Side.CLIENT);
}
}
if (mod.serverPacketHandlerSpec() != getServerHandlerSpecDefaultValue())
{
tryCreatingPacketHandler(container, mod.serverPacketHandlerSpec().packetHandler(), mod.serverPacketHandlerSpec().channels(), Side.SERVER);
}
if (mod.connectionHandler() != getConnectionHandlerDefaultValue())
{
IConnectionHandler instance;
try
{
instance = mod.connectionHandler().newInstance();
}
catch (Exception e)
{
FMLLog.log(Level.SEVERE, e, "Unable to create connection handler instance %s", mod.connectionHandler().getName());
throw new FMLNetworkException(e);
}
NetworkRegistry.instance().registerConnectionHandler(instance);
}
if (mod.tinyPacketHandler()!=getTinyPacketHandlerDefaultValue())
{
try
{
tinyPacketHandler = mod.tinyPacketHandler().newInstance();
}
catch (Exception e)
{
FMLLog.log(Level.SEVERE, e, "Unable to create tiny packet handler instance %s", mod.tinyPacketHandler().getName());
throw new FMLNetworkException(e);
}
}
}
/**
* @param container
*/
private void tryCreatingPacketHandler(ModContainer container, Class<? extends IPacketHandler> clazz, String[] channels, Side side)
{
if (side!=null && side.isClient() && ! FMLCommonHandler.instance().getSide().isClient())
{
return;
}
if (clazz!=getPacketHandlerDefaultValue())
{
if (channels.length==0)
{
FMLLog.log(Level.WARNING, "The mod id %s attempted to register a packet handler without specifying channels for it", container.getModId());
}
else
{
IPacketHandler instance;
try
{
instance = clazz.newInstance();
}
catch (Exception e)
{
FMLLog.log(Level.SEVERE, e, "Unable to create a packet handler instance %s for mod %s", clazz.getName(), container.getModId());
throw new FMLNetworkException(e);
}
for (String channel : channels)
{
NetworkRegistry.instance().registerChannel(instance, channel, side);
}
}
}
else if (channels.length > 0)
{
FMLLog.warning("The mod id %s attempted to register channels without specifying a packet handler", container.getModId());
}
}
/**
* @return the default {@link NetworkMod#connectionHandler()} annotation value
*/
private Object getConnectionHandlerDefaultValue()
{
try {
if (connectionHandlerDefaultValue == null)
{
connectionHandlerDefaultValue = NetworkMod.class.getMethod("connectionHandler").getDefaultValue();
}
return connectionHandlerDefaultValue;
}
catch (NoSuchMethodException e)
{
throw new RuntimeException("Derp?", e);
}
}
/**
* @return the default {@link NetworkMod#packetHandler()} annotation value
*/
private Object getPacketHandlerDefaultValue()
{
try {
if (packetHandlerDefaultValue == null)
{
packetHandlerDefaultValue = NetworkMod.class.getMethod("packetHandler").getDefaultValue();
}
return packetHandlerDefaultValue;
}
catch (NoSuchMethodException e)
{
throw new RuntimeException("Derp?", e);
}
}
/**
* @return the default {@link NetworkMod#tinyPacketHandler()} annotation value
*/
private Object getTinyPacketHandlerDefaultValue()
{
try {
if (tinyPacketHandlerDefaultValue == null)
{
tinyPacketHandlerDefaultValue = NetworkMod.class.getMethod("tinyPacketHandler").getDefaultValue();
}
return tinyPacketHandlerDefaultValue;
}
catch (NoSuchMethodException e)
{
throw new RuntimeException("Derp?", e);
}
}
/**
* @return the {@link NetworkMod#clientPacketHandlerSpec()} default annotation value
*/
private Object getClientHandlerSpecDefaultValue()
{
try {
if (clientHandlerDefaultValue == null)
{
clientHandlerDefaultValue = NetworkMod.class.getMethod("clientPacketHandlerSpec").getDefaultValue();
}
return clientHandlerDefaultValue;
}
catch (NoSuchMethodException e)
{
throw new RuntimeException("Derp?", e);
}
}
/**
* @return the default {@link NetworkMod#serverPacketHandlerSpec()} annotation value
*/
private Object getServerHandlerSpecDefaultValue()
{
try {
if (serverHandlerDefaultValue == null)
{
serverHandlerDefaultValue = NetworkMod.class.getMethod("serverPacketHandlerSpec").getDefaultValue();
}
return serverHandlerDefaultValue;
}
catch (NoSuchMethodException e)
{
throw new RuntimeException("Derp?", e);
}
}
public boolean requiresClientSide()
{
return mod.clientSideRequired();
}
public boolean requiresServerSide()
{
return mod.serverSideRequired();
}
public boolean acceptVersion(String version)
{
if (checkHandler != null)
{
try
{
return (Boolean)checkHandler.invoke(container.getMod(), version);
}
catch (Exception e)
{
FMLLog.log(Level.WARNING, e, "There was a problem invoking the checkhandler method %s for network mod id %s", checkHandler.getName(), container.getModId());
return false;
}
}
if (acceptableRange!=null)
{
return acceptableRange.containsVersion(new DefaultArtifactVersion(version));
}
return container.getVersion().equals(version);
}
public int getLocalId()
{
return localId;
}
public int getNetworkId()
{
return networkId;
}
public ModContainer getContainer()
{
return container;
}
public NetworkMod getMod()
{
return mod;
}
public boolean isNetworkMod()
{
return mod != null;
}
public void setNetworkId(int value)
{
this.networkId = value;
}
public boolean hasTinyPacketHandler()
{
return tinyPacketHandler != null;
}
public ITinyPacketHandler getTinyPacketHandler()
{
return tinyPacketHandler;
}
}
|
package org.helioviewer.jhv.layers;
import java.awt.Component;
import java.awt.geom.Rectangle2D;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import org.helioviewer.jhv.JHVGlobals;
import org.helioviewer.jhv.base.math.IcoSphere;
import org.helioviewer.jhv.base.math.Mat4;
import org.helioviewer.jhv.base.math.Quat;
import org.helioviewer.jhv.base.math.Vec3;
import org.helioviewer.jhv.base.scale.GridScale;
import org.helioviewer.jhv.base.time.JHVDate;
import org.helioviewer.jhv.camera.Camera;
import org.helioviewer.jhv.camera.CameraHelper;
import org.helioviewer.jhv.display.Displayer;
import org.helioviewer.jhv.display.Viewport;
import org.helioviewer.jhv.gui.ImageViewerGui;
import org.helioviewer.jhv.io.APIRequest;
import org.helioviewer.jhv.io.LoadRemoteTask;
import org.helioviewer.jhv.opengl.GLImage;
import org.helioviewer.jhv.opengl.GLImage.DifferenceMode;
import org.helioviewer.jhv.opengl.GLSLShader;
import org.helioviewer.jhv.opengl.GLSLSolarShader;
import org.helioviewer.jhv.opengl.GLText;
import org.helioviewer.jhv.renderable.gui.AbstractRenderable;
import org.helioviewer.jhv.viewmodel.imagedata.ImageData;
import org.helioviewer.jhv.viewmodel.imagedata.ImageDataHandler;
import org.helioviewer.jhv.viewmodel.metadata.MetaData;
import org.helioviewer.jhv.viewmodel.view.View;
import org.jetbrains.annotations.NotNull;
import com.jogamp.common.nio.Buffers;
import com.jogamp.opengl.GL2;
import com.jogamp.opengl.util.awt.TextRenderer;
public class ImageLayer extends AbstractRenderable implements ImageDataHandler {
private int positionBufferID;
private int indexBufferID;
private int indexBufferSize;
private final GLImage glImage = new GLImage();
private final ImageLayerOptions optionsPanel;
private LoadRemoteTask worker;
private View view;
private static final String loading = "Loading...";
public static ImageLayer createImageLayer() {
ImageLayer imageLayer = new ImageLayer();
ImageViewerGui.getRenderableContainer().addBeforeRenderable(imageLayer);
return imageLayer;
}
public void load(APIRequest req) {
if (!req.equals(getAPIRequest())) {
if (worker != null)
worker.cancel(true);
worker = new LoadRemoteTask(this, req, 0);
JHVGlobals.getExecutorService().execute(worker);
}
}
public void unload() {
if (view == null) // not changing view
ImageViewerGui.getRenderableContainer().removeRenderable(this);
else {
worker = null;
Displayer.display();
}
}
private ImageLayer() {
optionsPanel = new ImageLayerOptions(this);
}
@Override
public void init(@NotNull GL2 gl) {
glImage.init(gl);
FloatBuffer positionBuffer = IcoSphere.IcoSphere.a;
IntBuffer indexBuffer = IcoSphere.IcoSphere.b;
positionBufferID = generate(gl);
gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, positionBufferID);
gl.glBufferData(GL2.GL_ARRAY_BUFFER, positionBuffer.capacity() * Buffers.SIZEOF_FLOAT, positionBuffer, GL2.GL_STATIC_DRAW);
gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, 0);
indexBufferID = generate(gl);
indexBufferSize = indexBuffer.capacity();
gl.glBindBuffer(GL2.GL_ELEMENT_ARRAY_BUFFER, indexBufferID);
gl.glBufferData(GL2.GL_ELEMENT_ARRAY_BUFFER, indexBufferSize * Buffers.SIZEOF_INT, indexBuffer, GL2.GL_STATIC_DRAW);
gl.glBindBuffer(GL2.GL_ELEMENT_ARRAY_BUFFER, 0);
}
@Override
public void setVisible(boolean isVisible) {
super.setVisible(isVisible);
if (Displayer.multiview) {
Layers.arrangeMultiView(true);
}
}
private float opacity = -1;
public void setView(View _view) {
if (view != null)
unsetView();
view = _view;
worker = null; // drop reference
setVisible(true); // enable optionsPanel
ImageViewerGui.getRenderableContainerPanel().setOptionsPanel(this);
view.setImageLayer(this);
view.setDataHandler(this);
Layers.addLayer(view);
ImageViewerGui.getRenderableContainer().fireListeners();
if (Displayer.multiview) {
Layers.arrangeMultiView(true);
} else if (opacity == -1) { // first time
if (Layers.isCor(view.getName()))
opacity = 1;
else {
int count = 0;
for (int i = 0; i < Layers.getNumLayers(); i++) {
if (!Layers.isCor(Layers.getLayer(i).getName()))
count++;
}
opacity = (float) (1. / (count == 0 ? 1 : count /* satisfy coverity */));
}
optionsPanel.setOpacity(opacity);
}
optionsPanel.setLUT(view.getDefaultLUT());
}
private void unsetView() {
if (view != null) {
Layers.removeLayer(view);
view.setDataHandler(null);
view.setImageLayer(null);
view.abolish();
view = null;
ImageViewerGui.getRenderableContainer().fireListeners();
}
imageData = prevImageData = baseImageData = null;
}
@Override
public void remove(@NotNull GL2 gl) {
if (worker != null) {
worker.cancel(true);
worker = null;
}
unsetView();
if (Displayer.multiview) {
Layers.arrangeMultiView(true);
}
dispose(gl);
}
@Override
public void prerender(@NotNull GL2 gl) {
if (imageData == null) {
return;
}
glImage.streamImage(gl, imageData, prevImageData, baseImageData);
}
@Override
public void render(@NotNull Camera camera, @NotNull Viewport vp, @NotNull GL2 gl) {
_render(camera, vp, gl, new double[] { 1., 1., 0., 1. }, GLSLSolarShader.ortho, GridScale.ortho);
}
@Override
public void renderMiniview(@NotNull Camera camera, @NotNull Viewport vp, @NotNull GL2 gl) {
_render(camera, vp, gl, new double[] { 0., 0., 0., 0. }, GLSLSolarShader.ortho, GridScale.ortho);
}
@Override
public void renderScale(@NotNull Camera camera, @NotNull Viewport vp, @NotNull GL2 gl, @NotNull GLSLSolarShader shader, @NotNull GridScale scale) {
_render(camera, vp, gl, new double[] { 1., 1., 1., 1. }, shader, scale);
}
private void _render(Camera camera, Viewport vp, GL2 gl, double[] depthrange, GLSLSolarShader shader, GridScale scale) {
if (imageData == null) {
return;
}
if (!isVisible[vp.idx])
return;
shader.bind(gl);
{
glImage.applyFilters(gl, imageData, prevImageData, baseImageData, shader);
shader.setViewport(vp.x, vp.yGL, vp.width, vp.height);
shader.filter(gl);
camera.push(imageData.getViewpoint());
Mat4 vpmi = CameraHelper.getOrthoMatrixInverse(camera, vp);
if (Displayer.mode == Displayer.DisplayMode.ORTHO)
vpmi.translate(new Vec3(-camera.getCurrentTranslation().x, -camera.getCurrentTranslation().y, 0.));
else
vpmi.translate(new Vec3(-camera.getCurrentTranslation().x / vp.aspect, -camera.getCurrentTranslation().y, 0.));
Quat q = camera.getRotation();
shader.bindMatrix(gl, vpmi.getFloatArray());
shader.bindCameraDifferenceRotationQuat(gl, Quat.rotateWithConjugate(q, imageData.getMetaData().getCenterRotation()));
DifferenceMode diffMode = glImage.getDifferenceMode();
if (diffMode == DifferenceMode.BaseRotation) {
shader.bindDiffCameraDifferenceRotationQuat(gl, Quat.rotateWithConjugate(q, baseImageData.getMetaData().getCenterRotation()));
} else if (diffMode == DifferenceMode.RunningRotation) {
shader.bindDiffCameraDifferenceRotationQuat(gl, Quat.rotateWithConjugate(q, prevImageData.getMetaData().getCenterRotation()));
}
shader.bindAngles(gl, imageData.getMetaData().getViewpointL());
shader.setPolarRadii(gl, scale.getYstart(), scale.getYstop());
camera.pop();
enablePositionVBO(gl);
enableIndexVBO(gl);
{
gl.glVertexPointer(3, GL2.GL_FLOAT, 3 * Buffers.SIZEOF_FLOAT, 0);
if (shader == GLSLSolarShader.ortho) {
shader.bindIsDisc(gl, 1);
gl.glDepthRange(depthrange[2], depthrange[3]);
gl.glDrawElements(GL2.GL_TRIANGLES, indexBufferSize - 6, GL2.GL_UNSIGNED_INT, 0);
shader.bindIsDisc(gl, 0);
}
gl.glDepthRange(depthrange[0], depthrange[1]);
gl.glDrawElements(GL2.GL_TRIANGLES, 6, GL2.GL_UNSIGNED_INT, (indexBufferSize - 6) * Buffers.SIZEOF_INT);
gl.glDepthRange(0, 1);
}
disableIndexVBO(gl);
disablePositionVBO(gl);
gl.glColorMask(true, true, true, true);
}
GLSLShader.unbind(gl);
}
@Override
public void renderFullFloat(@NotNull Camera camera, @NotNull Viewport vp, @NotNull GL2 gl) {
if (imageData == null || worker != null) { // loading something
int delta = (int) (vp.height * 0.01);
TextRenderer renderer = GLText.getRenderer(GLText.TEXT_SIZE_LARGE);
Rectangle2D rect = renderer.getBounds(loading);
renderer.beginRendering(vp.width, vp.height, true);
renderer.draw(loading, (int) (vp.width - rect.getWidth() - delta), (int) (vp.height - rect.getHeight() - delta));
renderer.endRendering();
}
}
private static int generate(GL2 gl) {
int[] tmpId = new int[1];
gl.glGenBuffers(1, tmpId, 0);
return tmpId[0];
}
private void enableIndexVBO(GL2 gl) {
gl.glBindBuffer(GL2.GL_ELEMENT_ARRAY_BUFFER, indexBufferID);
}
private static void disableIndexVBO(GL2 gl) {
gl.glBindBuffer(GL2.GL_ELEMENT_ARRAY_BUFFER, 0);
}
private void enablePositionVBO(GL2 gl) {
gl.glEnableClientState(GL2.GL_VERTEX_ARRAY);
gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, positionBufferID);
}
private static void disablePositionVBO(GL2 gl) {
gl.glDisableClientState(GL2.GL_VERTEX_ARRAY);
gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, 0);
}
private void deletePositionVBO(GL2 gl) {
gl.glDeleteBuffers(1, new int[] { positionBufferID }, 0);
}
private void deleteIndexVBO(GL2 gl) {
gl.glDeleteBuffers(1, new int[] { indexBufferID }, 0);
}
@Override
public Component getOptionsPanel() {
return optionsPanel;
}
@NotNull
@Override
public String getName() {
return view == null || worker != null ? loading : view.getName();
}
@Override
public String getTimeString() {
if (imageData == null) {
return "N/A";
}
return imageData.getMetaData().getViewpoint().time.toString();
}
@Override
public boolean isDeletable() {
return true;
}
@Override
public void dispose(@NotNull GL2 gl) {
disablePositionVBO(gl);
disableIndexVBO(gl);
deletePositionVBO(gl);
deleteIndexVBO(gl);
glImage.dispose(gl);
}
public boolean isActiveImageLayer() {
return Layers.getActiveView() == view;
}
public void setActiveImageLayer() {
if (view != null)
Layers.setActiveView(view);
}
private ImageData imageData;
private ImageData prevImageData;
private ImageData baseImageData;
private void setImageData(ImageData newImageData) {
int frame = newImageData.getMetaData().getFrameNumber();
if (frame == 0) {
baseImageData = newImageData;
}
if (imageData == null || (prevImageData != null && prevImageData.getMetaData().getFrameNumber() - frame > 2)) {
prevImageData = newImageData;
} else if (frame != imageData.getMetaData().getFrameNumber()) {
prevImageData = imageData;
}
imageData = newImageData;
}
public ImageData getImageData() {
return imageData;
}
public MetaData getMetaData() {
return imageData == null ? view.getMetaData(new JHVDate(0)) : imageData.getMetaData();
}
@Override
public void handleData(@NotNull ImageData newImageData) {
setImageData(newImageData);
ImageViewerGui.getRenderableContainer().fireTimeUpdated(this);
Displayer.display();
}
@Override
public boolean isDownloading() {
return view != null && view.isDownloading();
}
void setOpacity(float opacity) {
optionsPanel.setOpacity(opacity);
}
GLImage getGLImage() {
return glImage;
}
View getView() {
return view;
}
public APIRequest getAPIRequest() {
return view == null ? null : view.getAPIRequest();
}
double getAutoBrightness() {
return imageData.getAutoBrightness();
}
}
|
package io.mangoo.utils;
import java.io.UnsupportedEncodingException;
import java.security.SecureRandom;
import java.util.Objects;
import java.util.Optional;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.apache.commons.codec.binary.Base32;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.google.common.base.Charsets;
import io.mangoo.crypto.totp.TOTP;
import io.mangoo.enums.HmacShaAlgorithm;
import io.mangoo.enums.Required;
/**
*
* @author svenkubiak
*
*/
public class TotpUtils {
private static final Logger LOG = LogManager.getLogger(TotpUtils.class);
private static final Base32 base32 = new Base32();
private static final int DIGITS = 6;
private static final int MAX_CHARACTERS = 32;
private static final int THIRTY_SECONDS = 30;
private static final int ITERATIONS = 26;
private static final int BYTES_SECRET = 64;
private TotpUtils() {
}
/**
* Generates a 64 byte (512 bit) secret, best used with HMAC_SHA512
*
* @return A 64 characters random string based on SecureRandom
*/
public static Optional<String> createSecret() {
StringBuilder stringBuilder = new StringBuilder(BYTES_SECRET);
Random random = new SecureRandom();
for (int i = 0; i < BYTES_SECRET; i++) {
int value = random.nextInt(MAX_CHARACTERS);
if (value < ITERATIONS) {
stringBuilder.append((char) ('A' + value));
} else {
stringBuilder.append((char) ('2' + (value - ITERATIONS)));
}
}
return Optional.of(stringBuilder.toString());
}
/**
* Creates the current TOTP based on the given secret and HMAC algorithm
*
* @param secret The secret to use
* @param hmacShaAlgorithm The HMAC algorithm to use
* @return
*/
public static Optional<String> getTotp(String secret, HmacShaAlgorithm hmacShaAlgorithm) {
Objects.requireNonNull(secret, Required.SECRET.toString());
String value = null;
try {
TOTP builder = TOTP.key(secret.getBytes(Charsets.US_ASCII.toString()))
.timeStep(TimeUnit.SECONDS.toMillis(THIRTY_SECONDS))
.digits(DIGITS)
.hmacSha(hmacShaAlgorithm)
.build();
value = builder.value();
} catch (UnsupportedEncodingException e) {
LOG.error("Failed to create TOTP", e);
}
return Optional.ofNullable(value);
}
/**
* Verifies a given TOTP based on a given secret and HMAC algorithm
*
* @param secret The secret to use
* @param totp The TOTP to verify
* @param hmacShaAlgorithm The HMAC algorithm to use
*
* @return True if the TOTP is valid, false otherwise
*/
public static boolean verifiedTotp(String secret, String totp, HmacShaAlgorithm hmacShaAlgorithm) {
Objects.requireNonNull(secret, Required.SECRET.toString());
Objects.requireNonNull(totp, Required.TOTP.toString());
String value = null;
try {
TOTP builder = TOTP.key(secret.getBytes(Charsets.US_ASCII.toString()))
.timeStep(TimeUnit.SECONDS.toMillis(THIRTY_SECONDS))
.digits(DIGITS)
.hmacSha(hmacShaAlgorithm)
.build();
value = builder.value();
} catch (UnsupportedEncodingException e) {
LOG.error("Failed to verify TOTP", e);
}
return totp.equals(value);
}
/**
* Generates a QR code to share a secret with a user
*
* @param name The name of the account
* @param issuer The name of the issuer
* @param secret The secret to use
* @param hmacShaAlgorithm The HMAC algorithm to use
*
* @return An URL to google charts API with the QR code
*/
public static String getQRCode(String name, String issuer, String secret, HmacShaAlgorithm hmacShaAlgorithm) {
Objects.requireNonNull(name, Required.ACCOUNT_NAME.toString());
Objects.requireNonNull(secret, Required.SECRET.toString());
Objects.requireNonNull(issuer, Required.ISSUER.toString());
Objects.requireNonNull(hmacShaAlgorithm, Required.ALGORITHM.toString());
final StringBuilder buffer = new StringBuilder();
buffer.append("https://chart.googleapis.com/chart")
.append("?chs=200x200&cht=qr&chl=200x200&chld=M|0&cht=qr&chl=")
.append(getOtpauthURL(name, issuer, secret, hmacShaAlgorithm));
return buffer.toString();
}
/**
* Generates a otpauth code to share a secret with a user
*
* @param name The name of the account
* @param issuer The name of the issuer
* @param secret The secret to use
* @param hmacShaAlgorithm The HMAC algorithm to use
*
* @return An otpauth url
*/
public static String getOtpauthURL(String name, String issuer, String secret, HmacShaAlgorithm hmacShaAlgorithm) {
final StringBuilder buffer = new StringBuilder();
buffer.append("otpauth://totp/")
.append(name)
.append("?secret=")
.append(base32.encodeAsString(secret.getBytes(Charsets.UTF_8)).replaceAll("=", ""))
.append("&algorithm=")
.append(hmacShaAlgorithm.getAlgorithm())
.append("&issuer=")
.append(issuer);
return buffer.toString();
}
}
|
package com.redhat.ceylon.compiler.js;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.redhat.ceylon.cmr.api.RepositoryManager;
import com.redhat.ceylon.cmr.ceylon.CeylonUtils;
import com.redhat.ceylon.cmr.ceylon.RepoUsingTool;
import com.redhat.ceylon.common.tool.Argument;
import com.redhat.ceylon.common.tool.Description;
import com.redhat.ceylon.common.tool.Option;
import com.redhat.ceylon.common.tool.OptionArgument;
import com.redhat.ceylon.common.tool.Summary;
import com.redhat.ceylon.compiler.Options;
import com.redhat.ceylon.compiler.loader.JsModuleManagerFactory;
import com.redhat.ceylon.compiler.typechecker.TypeChecker;
import com.redhat.ceylon.compiler.typechecker.TypeCheckerBuilder;
import com.redhat.ceylon.compiler.typechecker.context.PhasedUnit;
import com.redhat.ceylon.compiler.typechecker.io.VirtualFile;
@Summary("Compiles Ceylon source code to JavaScript and directly produces " +
"module and source archives in a module repository")
public class CeylonCompileJsTool extends RepoUsingTool {
private boolean profile = false;
private boolean optimize = false;
private boolean modulify = true;
private boolean indent = true;
private boolean comments = false;
private boolean skipSrc = false;
private String user = null;
private String pass = null;
private String out = "modules";
private String encoding;
private List<String> src = Collections.singletonList("source");
private List<String> files = Collections.emptyList();
public CeylonCompileJsTool() {
super(CeylonCompileJsMessages.RESOURCE_BUNDLE);
}
@OptionArgument(argumentName="encoding")
@Description("Sets the encoding used for reading source files (default: platform-specific)")
public void setEncoding(String encoding) {
this.encoding = encoding;
}
public String getEncoding(){
return encoding;
}
public boolean isProfile() {
return profile;
}
@Option
@Description("Time the compilation phases (results are printed to standard error)")
public void setProfile(boolean profile) {
this.profile = profile;
}
public boolean isOptimize() {
return optimize;
}
@Option
@Description("Create prototype-style JS code")
public void setOptimize(boolean optimize) {
this.optimize = optimize;
}
public boolean isModulify() {
return modulify;
}
@Option(longName="no-module")
@Description("Do **not** wrap generated code as CommonJS module")
public void setNoModulify(boolean nomodulify) {
this.modulify = !nomodulify;
}
public boolean isIndent() {
return indent;
}
@Option
@Description("Do **not** indent code")
public void setNoIndent(boolean noindent) {
this.indent = !noindent;
}
@Option
@Description("Equivalent to `--no-indent` `--no-comments`")
public void setCompact(boolean compact) {
this.setNoIndent(compact);
this.setNoComments(compact);
}
public boolean isComments() {
return comments;
}
@Option
@Description("Do **not** generate any comments")
public void setNoComments(boolean nocomments) {
this.comments = !nocomments;
}
public String getUser() {
return user;
}
@OptionArgument(argumentName="name")
@Description("Sets the user name for use with an authenticated output repository" +
"(no default).")
public void setUser(String user) {
this.user = user;
}
public String getPass() {
return pass;
}
@OptionArgument(argumentName="secret")
@Description("Sets the password for use with an authenticated output repository" +
"(no default).")
public void setPass(String pass) {
this.pass = pass;
}
public List<String> getRepos() {
return getRepositoryAsStrings();
}
public List<String> getSrc() {
return src;
}
@OptionArgument(longName="src", argumentName="dirs")
@Description("Path to source files. " +
"Can be specified multiple times; you can also specify several " +
"paths separated by your operating system's `PATH` separator." +
" (default: `./source`)")
public void setSrc(List<String> src) {
this.src = src;
}
@OptionArgument(longName="source", argumentName="dirs")
@Description("An alias for `--src`" +
" (default: `./source`)")
public void setSource(List<String> source) {
setSrc(source);
}
public String getOut() {
return out;
}
@Option
@Description("Do **not** generate .src archive - useful when doing joint compilation")
public void setSkipSrcArchive(boolean skip) {
skipSrc = skip;
}
public boolean isSkipSrcArchive() {
return skipSrc;
}
@OptionArgument(argumentName="url")
@Description("Specifies the output module repository (which must be publishable). " +
"(default: `./modules`)")
public void setOut(String out) {
this.out = out;
}
@Argument(argumentName="moduleOrFile", multiplicity="+")
public void setModule(List<String> moduleOrFile) {
this.files = moduleOrFile;
}
@Override
public void run() throws Exception {
final Options opts = new Options()
.cwd(cwd)
.repos(getRepos())
.sources(getSrc())
.systemRepo(systemRepo)
.cacheRepo(cacheRepo)
.outDir(getOut())
.user(getUser())
.pass(getPass())
.optimize(isOptimize())
.modulify(isModulify())
.indent(isIndent())
.comment(isComments())
.verbose(getVerbose())
.profile(isProfile())
.stdin(false)
.generateSourceArchive(!skipSrc)
.encoding(encoding)
.offline(offline)
.noDefaultRepos(noDefRepos);
run(opts, files);
}
private static void addFilesToCompilationSet(Options opts, File dir, List<String> onlyFiles) {
for (File e : dir.listFiles()) {
String n = e.getName().toLowerCase();
if (e.isFile() && (n.endsWith(".ceylon") || n.endsWith(".js"))) {
String path = normalizePath(e.getPath());
if (opts.isVerbose()) {
System.out.println("Adding to compilation set: " + path);
}
if (!onlyFiles.contains(path)) {
onlyFiles.add(path);
}
} else if (e.isDirectory()) {
addFilesToCompilationSet(opts, e, onlyFiles);
}
}
}
private static String normalizePath(String path) {
return path.replace('\\', '/');
}
public static void run(Options opts, List<String> files) throws IOException {
final TypeChecker typeChecker;
if (opts.hasVerboseFlag("cmr")) {
System.out.printf("Using repositories: %s%n", opts.getRepos());
}
final RepositoryManager repoman = CeylonUtils.repoManager()
.cwd(opts.getCwd())
.systemRepo(opts.getSystemRepo())
.cacheRepo(opts.getCacheRepo())
.noDefaultRepos(opts.getNoDefaultRepos())
.userRepos(opts.getRepos())
.outRepo(opts.getOutDir())
.offline(opts.getOffline())
.buildManager();
final List<String> onlyFiles = new ArrayList<String>();
long t0, t1, t2, t3, t4;
final TypeCheckerBuilder tcb;
if (opts.isStdin()) {
VirtualFile src = new VirtualFile() {
@Override
public boolean isFolder() {
return false;
}
@Override
public String getName() {
return "SCRIPT.ceylon";
}
@Override
public String getPath() {
return getName();
}
@Override
public InputStream getInputStream() {
return System.in;
}
@Override
public List<VirtualFile> getChildren() {
return new ArrayList<VirtualFile>(0);
}
@Override
public int hashCode() {
return getPath().hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof VirtualFile) {
return ((VirtualFile) obj).getPath().equals(getPath());
}
else {
return super.equals(obj);
}
}
};
t0 = System.nanoTime();
tcb = new TypeCheckerBuilder()
.addSrcDirectory(src);
} else {
t0=System.nanoTime();
tcb = new TypeCheckerBuilder();
final List<File> roots = new ArrayList<File>(opts.getSrcDirs().size());
for (String _srcdir : opts.getSrcDirs()) {
roots.add(new File(_srcdir));
}
final Set<String> modfilters = new HashSet<String>();
for (String filedir : files) {
File f = new File(filedir);
boolean once=false;
if (f.exists() && f.isFile()) {
for (File root : roots) {
if (f.getAbsolutePath().startsWith(root.getAbsolutePath() + File.separatorChar)) {
if (opts.isVerbose()) {
System.out.printf("Adding %s to compilation set%n", filedir);
}
onlyFiles.add(normalizePath(filedir));
once=true;
break;
}
}
if (!once) {
throw new CompilerErrorException(String.format("%s is not in any source path: %n", f.getAbsolutePath()));
}
} else if ("default".equals(filedir)) {
//Default module: load every file in the source directories recursively,
//except any file that exists in directories and subdirectories where we find a module.ceylon file
//Typechecker takes care of all that if we add default to module filters
if (opts.isVerbose()) {
System.out.println("Adding default module filter");
}
modfilters.add("default");
f = null;
} else {
//Parse, may be a module name
String[] modpath = filedir.split("\\.");
f = null;
for (File root : roots) {
File _f = root;
for (String pe : modpath) {
_f = new File(_f, pe);
if (!(_f.exists() && _f.isDirectory())) {
System.err.printf("ceylonc-js: Could not find source files for module: %s%n", filedir);
_f=null;
break;
}
}
if (_f != null) {
f = _f;
}
}
if (f == null) {
throw new CompilerErrorException(String.format("ceylonc-js: file not found: %s%n", filedir));
} else {
if (opts.isVerbose()) {
System.out.println("Adding to module filters: " + filedir);
}
addFilesToCompilationSet(opts, f, onlyFiles);
modfilters.add(filedir);
f = null;
}
}
if (f != null) {
if ("module.ceylon".equals(f.getName().toLowerCase())) {
String _f = f.getParentFile().getAbsolutePath();
for (File root : roots) {
if (root.getAbsolutePath().startsWith(_f)) {
_f = _f.substring(root.getAbsolutePath().length()+1).replace(File.separator, ".");
modfilters.add(_f);
if (opts.isVerbose()) {
System.out.println("Adding to module filters: " + _f);
}
}
}
} else {
for (File root : roots) {
File middir = f.getParentFile();
while (middir != null && !middir.getAbsolutePath().equals(root.getAbsolutePath())) {
if (new File(middir, "module.ceylon").exists()) {
String _f = middir.getAbsolutePath().substring(root.getAbsolutePath().length()+1).replace(
File.separatorChar, '.');
modfilters.add(_f);
if (opts.isVerbose()) {
System.out.println("Adding to module filters: " + _f);
}
}
middir = middir.getParentFile();
}
}
}
} //f!= null
} //loop over files
for (File root : roots) {
tcb.addSrcDirectory(root);
}
if (!modfilters.isEmpty()) {
ArrayList<String> _modfilters = new ArrayList<String>(modfilters.size());
_modfilters.addAll(modfilters);
tcb.setModuleFilters(_modfilters);
}
tcb.statistics(opts.isProfile());
JsModuleManagerFactory.setVerbose(opts.isVerbose());
tcb.moduleManagerFactory(new JsModuleManagerFactory(opts.getEncoding()));
}
//getting the type checker does process all types in the source directory
tcb.verbose(opts.isVerbose()).setRepositoryManager(repoman);
tcb.usageWarnings(false);
typeChecker = tcb.getTypeChecker();
if (!onlyFiles.isEmpty()) {
for (PhasedUnit pu : typeChecker.getPhasedUnits().getPhasedUnits()) {
if (!onlyFiles.contains(normalizePath(pu.getUnitFile().getPath()))) {
typeChecker.getPhasedUnits().removePhasedUnitForRelativePath(pu.getPathRelativeToSrcDir());
}
}
}
t1=System.nanoTime();
typeChecker.process();
t2=System.nanoTime();
JsCompiler jsc = new JsCompiler(typeChecker, opts);
if (!onlyFiles.isEmpty()) { jsc.setFiles(onlyFiles); }
t3=System.nanoTime();
if (!jsc.generate()) {
int count = jsc.printErrors(System.out);
throw new CompilerErrorException(String.format("%d errors.", count));
}
t4=System.nanoTime();
if (opts.isProfile() || opts.hasVerboseFlag("benchmark")) {
System.err.println("PROFILING INFORMATION");
System.err.printf("TypeChecker creation: %6d nanos%n", t1-t0);
System.err.printf("TypeChecker processing: %6d nanos%n", t2-t1);
System.err.printf("JS compiler creation: %6d nanos%n", t3-t2);
System.err.printf("JS compilation: %6d nanos%n", t4-t3);
System.out.println("Compilation finished.");
}
}
}
|
package cpw.mods.fml.relauncher;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import net.minecraft.launchwrapper.ITweaker;
import net.minecraft.launchwrapper.Launch;
import net.minecraft.launchwrapper.LaunchClassLoader;
import org.apache.logging.log4j.Level;
import com.google.common.base.Strings;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.ObjectArrays;
import com.google.common.primitives.Ints;
import cpw.mods.fml.common.FMLLog;
import cpw.mods.fml.common.asm.transformers.ModAccessTransformer;
import cpw.mods.fml.common.launcher.FMLInjectionAndSortingTweaker;
import cpw.mods.fml.common.launcher.FMLTweaker;
import cpw.mods.fml.common.toposort.TopologicalSort;
import cpw.mods.fml.relauncher.IFMLLoadingPlugin.DependsOn;
import cpw.mods.fml.relauncher.IFMLLoadingPlugin.MCVersion;
import cpw.mods.fml.relauncher.IFMLLoadingPlugin.Name;
import cpw.mods.fml.relauncher.IFMLLoadingPlugin.SortingIndex;
import cpw.mods.fml.relauncher.IFMLLoadingPlugin.TransformerExclusions;
public class CoreModManager {
private static final Attributes.Name COREMODCONTAINSFMLMOD = new Attributes.Name("FMLCorePluginContainsFMLMod");
private static final Attributes.Name MODTYPE = new Attributes.Name("ModType");
private static final Attributes.Name MODSIDE = new Attributes.Name("ModSide");
private static String[] rootPlugins = { "cpw.mods.fml.relauncher.FMLCorePlugin", "net.minecraftforge.classloading.FMLForgePlugin" };
private static List<String> loadedCoremods = Lists.newArrayList();
private static List<FMLPluginWrapper> loadPlugins;
private static boolean deobfuscatedEnvironment;
private static FMLTweaker tweaker;
private static File mcDir;
private static List<String> reparsedCoremods = Lists.newArrayList();
private static List<String> accessTransformers = Lists.newArrayList();
private static class FMLPluginWrapper implements ITweaker {
public final String name;
public final IFMLLoadingPlugin coreModInstance;
public final List<String> predepends;
public final File location;
public final int sortIndex;
public FMLPluginWrapper(String name, IFMLLoadingPlugin coreModInstance, File location, int sortIndex, String... predepends)
{
super();
this.name = name;
this.coreModInstance = coreModInstance;
this.location = location;
this.sortIndex = sortIndex;
this.predepends = Lists.newArrayList(predepends);
}
@Override
public String toString()
{
return String.format("%s {%s}", this.name, this.predepends);
}
@Override
public void acceptOptions(List<String> args, File gameDir, File assetsDir, String profile)
{
// NO OP
}
@Override
public void injectIntoClassLoader(LaunchClassLoader classLoader)
{
FMLRelaunchLog.fine("Injecting coremod %s {%s} class transformers", name, coreModInstance.getClass().getName());
if (coreModInstance.getASMTransformerClass() != null) for (String transformer : coreModInstance.getASMTransformerClass())
{
FMLRelaunchLog.finer("Registering transformer %s", transformer);
classLoader.registerTransformer(transformer);
}
FMLRelaunchLog.fine("Injection complete");
FMLRelaunchLog.fine("Running coremod plugin for %s {%s}", name, coreModInstance.getClass().getName());
Map<String, Object> data = new HashMap<String, Object>();
data.put("mcLocation", mcDir);
data.put("coremodList", loadPlugins);
data.put("runtimeDeobfuscationEnabled", !deobfuscatedEnvironment);
FMLRelaunchLog.fine("Running coremod plugin %s", name);
data.put("coremodLocation", location);
coreModInstance.injectData(data);
String setupClass = coreModInstance.getSetupClass();
if (setupClass != null)
{
try
{
IFMLCallHook call = (IFMLCallHook) Class.forName(setupClass, true, classLoader).newInstance();
Map<String, Object> callData = new HashMap<String, Object>();
callData.put("runtimeDeobfuscationEnabled", !deobfuscatedEnvironment);
callData.put("mcLocation", mcDir);
callData.put("classLoader", classLoader);
callData.put("coremodLocation", location);
callData.put("deobfuscationFileName", FMLInjectionData.debfuscationDataName());
call.injectData(callData);
call.call();
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
FMLRelaunchLog.fine("Coremod plugin class %s run successfully", coreModInstance.getClass().getSimpleName());
String modContainer = coreModInstance.getModContainerClass();
if (modContainer != null)
{
FMLInjectionData.containers.add(modContainer);
}
}
@Override
public String getLaunchTarget()
{
return "";
}
@Override
public String[] getLaunchArguments()
{
return new String[0];
}
}
public static void handleLaunch(File mcDir, LaunchClassLoader classLoader, FMLTweaker tweaker)
{
CoreModManager.mcDir = mcDir;
CoreModManager.tweaker = tweaker;
try
{
// Are we in a 'decompiled' environment?
byte[] bs = classLoader.getClassBytes("net.minecraft.world.World");
if (bs != null)
{
FMLRelaunchLog.info("Managed to load a deobfuscated Minecraft name- we are in a deobfuscated environment. Skipping runtime deobfuscation");
deobfuscatedEnvironment = true;
}
}
catch (IOException e1)
{
}
if (!deobfuscatedEnvironment)
{
FMLRelaunchLog.fine("Enabling runtime deobfuscation");
}
tweaker.injectCascadingTweak("cpw.mods.fml.common.launcher.FMLInjectionAndSortingTweaker");
try
{
classLoader.registerTransformer("cpw.mods.fml.common.asm.transformers.PatchingTransformer");
}
catch (Exception e)
{
FMLRelaunchLog.log(Level.ERROR, e, "The patch transformer failed to load! This is critical, loading cannot continue!");
throw Throwables.propagate(e);
}
loadPlugins = new ArrayList<FMLPluginWrapper>();
for (String rootPluginName : rootPlugins)
{
loadCoreMod(classLoader, rootPluginName, new File(FMLTweaker.getJarLocation()));
}
if (loadPlugins.isEmpty())
{
throw new RuntimeException("A fatal error has occured - no valid fml load plugin was found - this is a completely corrupt FML installation.");
}
FMLRelaunchLog.fine("All fundamental core mods are successfully located");
// Now that we have the root plugins loaded - lets see what else might
// be around
String commandLineCoremods = System.getProperty("fml.coreMods.load", "");
for (String coreModClassName : commandLineCoremods.split(","))
{
if (coreModClassName.isEmpty())
{
continue;
}
FMLRelaunchLog.info("Found a command line coremod : %s", coreModClassName);
loadCoreMod(classLoader, coreModClassName, null);
}
discoverCoreMods(mcDir, classLoader);
}
private static void discoverCoreMods(File mcDir, LaunchClassLoader classLoader)
{
ModListHelper.parseModList(mcDir);
FMLRelaunchLog.fine("Discovering coremods");
File coreMods = setupCoreModDir(mcDir);
FilenameFilter ff = new FilenameFilter() {
@Override
public boolean accept(File dir, String name)
{
return name.endsWith(".jar");
}
};
FilenameFilter derpfilter = new FilenameFilter() {
@Override
public boolean accept(File dir, String name)
{
return name.endsWith(".jar.zip");
}
};
File[] derplist = coreMods.listFiles(derpfilter);
if (derplist != null && derplist.length > 0)
{
FMLRelaunchLog.severe("FML has detected several badly downloaded jar files, which have been named as zip files. You probably need to download them again, or they may not work properly");
for (File f : derplist)
{
FMLRelaunchLog.severe("Problem file : %s", f.getName());
}
}
File[] coreModList = coreMods.listFiles(ff);
File versionedModDir = new File(coreMods, FMLInjectionData.mccversion);
if (versionedModDir.isDirectory())
{
File[] versionedCoreMods = versionedModDir.listFiles(ff);
coreModList = ObjectArrays.concat(coreModList, versionedCoreMods, File.class);
}
ObjectArrays.concat(coreModList, ModListHelper.additionalMods.values().toArray(new File[0]), File.class);
coreModList = FileListHelper.sortFileList(coreModList);
for (File coreMod : coreModList)
{
FMLRelaunchLog.fine("Examining for coremod candidacy %s", coreMod.getName());
JarFile jar = null;
Attributes mfAttributes;
try
{
jar = new JarFile(coreMod);
if (jar.getManifest() == null)
{
// Not a coremod and no access transformer list
continue;
}
ModAccessTransformer.addJar(jar);
mfAttributes = jar.getManifest().getMainAttributes();
}
catch (IOException ioe)
{
FMLRelaunchLog.log(Level.ERROR, ioe, "Unable to read the jar file %s - ignoring", coreMod.getName());
continue;
}
finally
{
if (jar != null)
{
try
{
jar.close();
}
catch (IOException e)
{
// Noise
}
}
}
String cascadedTweaker = mfAttributes.getValue("TweakClass");
if (cascadedTweaker != null)
{
FMLRelaunchLog.info("Loading tweaker %s from %s", cascadedTweaker, coreMod.getName());
Integer sortOrder = Ints.tryParse(Strings.nullToEmpty(mfAttributes.getValue("TweakOrder")));
sortOrder = (sortOrder == null ? Integer.valueOf(0) : sortOrder);
handleCascadingTweak(coreMod, jar, cascadedTweaker, classLoader, sortOrder);
loadedCoremods.add(coreMod.getName());
continue;
}
List<String> modTypes = mfAttributes.containsKey(MODTYPE) ? Arrays.asList(mfAttributes.getValue(MODTYPE).split(",")) : ImmutableList.of("FML");
if (!modTypes.contains("FML"))
{
FMLRelaunchLog.fine("Adding %s to the list of things to skip. It is not an FML mod, it has types %s", coreMod.getName(), modTypes);
loadedCoremods.add(coreMod.getName());
continue;
}
String modSide = mfAttributes.containsKey(MODSIDE) ? mfAttributes.getValue(MODSIDE) : "BOTH";
if (! ("BOTH".equals(modSide) || FMLLaunchHandler.side.name().equals(modSide)))
{
FMLRelaunchLog.fine("Mod %s has ModSide meta-inf value %s, and we're %s. It will be ignored", coreMod.getName(), modSide, FMLLaunchHandler.side.name());
loadedCoremods.add(coreMod.getName());
continue;
}
String fmlCorePlugin = mfAttributes.getValue("FMLCorePlugin");
if (fmlCorePlugin == null)
{
// Not a coremod
FMLRelaunchLog.fine("Not found coremod data in %s", coreMod.getName());
continue;
}
// Support things that are mod jars, but not FML mod jars
try
{
classLoader.addURL(coreMod.toURI().toURL());
if (!mfAttributes.containsKey(COREMODCONTAINSFMLMOD))
{
FMLRelaunchLog.finer("Adding %s to the list of known coremods, it will not be examined again", coreMod.getName());
loadedCoremods.add(coreMod.getName());
}
else
{
FMLRelaunchLog.finer("Found FMLCorePluginContainsFMLMod marker in %s, it will be examined later for regular @Mod instances",
coreMod.getName());
reparsedCoremods.add(coreMod.getName());
}
}
catch (MalformedURLException e)
{
FMLRelaunchLog.log(Level.ERROR, e, "Unable to convert file into a URL. weird");
continue;
}
loadCoreMod(classLoader, fmlCorePlugin, coreMod);
}
}
private static Method ADDURL;
private static void handleCascadingTweak(File coreMod, JarFile jar, String cascadedTweaker, LaunchClassLoader classLoader, Integer sortingOrder)
{
try
{
// Have to manually stuff the tweaker into the parent classloader
if (ADDURL == null)
{
ADDURL = URLClassLoader.class.getDeclaredMethod("addURL", URL.class);
ADDURL.setAccessible(true);
}
ADDURL.invoke(classLoader.getClass().getClassLoader(), coreMod.toURI().toURL());
classLoader.addURL(coreMod.toURI().toURL());
CoreModManager.tweaker.injectCascadingTweak(cascadedTweaker);
tweakSorting.put(cascadedTweaker,sortingOrder);
}
catch (Exception e)
{
FMLRelaunchLog.log(Level.INFO, e, "There was a problem trying to load the mod dir tweaker %s", coreMod.getAbsolutePath());
}
}
/**
* @param mcDir
* the minecraft home directory
* @return the coremod directory
*/
private static File setupCoreModDir(File mcDir)
{
File coreModDir = new File(mcDir, "mods");
try
{
coreModDir = coreModDir.getCanonicalFile();
}
catch (IOException e)
{
throw new RuntimeException(String.format("Unable to canonicalize the coremod dir at %s", mcDir.getName()), e);
}
if (!coreModDir.exists())
{
coreModDir.mkdir();
}
else if (coreModDir.exists() && !coreModDir.isDirectory())
{
throw new RuntimeException(String.format("Found a coremod file in %s that's not a directory", mcDir.getName()));
}
return coreModDir;
}
public static List<String> getLoadedCoremods()
{
return loadedCoremods;
}
public static List<String> getReparseableCoremods()
{
return reparsedCoremods;
}
private static FMLPluginWrapper loadCoreMod(LaunchClassLoader classLoader, String coreModClass, File location)
{
String coreModName = coreModClass.substring(coreModClass.lastIndexOf('.') + 1);
try
{
FMLRelaunchLog.fine("Instantiating coremod class %s", coreModName);
classLoader.addTransformerExclusion(coreModClass);
Class<?> coreModClazz = Class.forName(coreModClass, true, classLoader);
Name coreModNameAnn = coreModClazz.getAnnotation(IFMLLoadingPlugin.Name.class);
if (coreModNameAnn != null && !Strings.isNullOrEmpty(coreModNameAnn.value()))
{
coreModName = coreModNameAnn.value();
FMLRelaunchLog.finer("coremod named %s is loading", coreModName);
}
MCVersion requiredMCVersion = coreModClazz.getAnnotation(IFMLLoadingPlugin.MCVersion.class);
if (!Arrays.asList(rootPlugins).contains(coreModClass) && (requiredMCVersion == null || Strings.isNullOrEmpty(requiredMCVersion.value())))
{
FMLRelaunchLog.log(Level.WARN, "The coremod %s does not have a MCVersion annotation, it may cause issues with this version of Minecraft",
coreModClass);
}
else if (requiredMCVersion != null && !FMLInjectionData.mccversion.equals(requiredMCVersion.value()))
{
FMLRelaunchLog.log(Level.ERROR, "The coremod %s is requesting minecraft version %s and minecraft is %s. It will be ignored.", coreModClass,
requiredMCVersion.value(), FMLInjectionData.mccversion);
return null;
}
else if (requiredMCVersion != null)
{
FMLRelaunchLog.log(Level.DEBUG, "The coremod %s requested minecraft version %s and minecraft is %s. It will be loaded.", coreModClass,
requiredMCVersion.value(), FMLInjectionData.mccversion);
}
TransformerExclusions trExclusions = coreModClazz.getAnnotation(IFMLLoadingPlugin.TransformerExclusions.class);
if (trExclusions != null)
{
for (String st : trExclusions.value())
{
classLoader.addTransformerExclusion(st);
}
}
DependsOn deplist = coreModClazz.getAnnotation(IFMLLoadingPlugin.DependsOn.class);
String[] dependencies = new String[0];
if (deplist != null)
{
dependencies = deplist.value();
}
SortingIndex index = coreModClazz.getAnnotation(IFMLLoadingPlugin.SortingIndex.class);
int sortIndex = index != null ? index.value() : 0;
IFMLLoadingPlugin plugin = (IFMLLoadingPlugin) coreModClazz.newInstance();
String accessTransformerClass = plugin.getAccessTransformerClass();
if (accessTransformerClass != null)
{
FMLRelaunchLog.log(Level.DEBUG, "Added access transformer class %s to enqueued access transformers", accessTransformerClass);
accessTransformers.add(accessTransformerClass);
}
FMLPluginWrapper wrap = new FMLPluginWrapper(coreModName, plugin, location, sortIndex, dependencies);
loadPlugins.add(wrap);
FMLRelaunchLog.fine("Enqueued coremod %s", coreModName);
return wrap;
}
catch (ClassNotFoundException cnfe)
{
if (!Lists.newArrayList(rootPlugins).contains(coreModClass))
FMLRelaunchLog.log(Level.ERROR, cnfe, "Coremod %s: Unable to class load the plugin %s", coreModName, coreModClass);
else
FMLRelaunchLog.fine("Skipping root plugin %s", coreModClass);
}
catch (ClassCastException cce)
{
FMLRelaunchLog.log(Level.ERROR, cce, "Coremod %s: The plugin %s is not an implementor of IFMLLoadingPlugin", coreModName, coreModClass);
}
catch (InstantiationException ie)
{
FMLRelaunchLog.log(Level.ERROR, ie, "Coremod %s: The plugin class %s was not instantiable", coreModName, coreModClass);
}
catch (IllegalAccessException iae)
{
FMLRelaunchLog.log(Level.ERROR, iae, "Coremod %s: The plugin class %s was not accessible", coreModName, coreModClass);
}
return null;
}
@SuppressWarnings("unused")
private static void sortCoreMods()
{
TopologicalSort.DirectedGraph<FMLPluginWrapper> sortGraph = new TopologicalSort.DirectedGraph<FMLPluginWrapper>();
Map<String, FMLPluginWrapper> pluginMap = Maps.newHashMap();
for (FMLPluginWrapper plug : loadPlugins)
{
sortGraph.addNode(plug);
pluginMap.put(plug.name, plug);
}
for (FMLPluginWrapper plug : loadPlugins)
{
for (String dep : plug.predepends)
{
if (!pluginMap.containsKey(dep))
{
FMLRelaunchLog.log(Level.ERROR, "Missing coremod dependency - the coremod %s depends on coremod %s which isn't present.", plug.name, dep);
throw new RuntimeException();
}
sortGraph.addEdge(plug, pluginMap.get(dep));
}
}
try
{
loadPlugins = TopologicalSort.topologicalSort(sortGraph);
FMLRelaunchLog.fine("Sorted coremod list %s", loadPlugins);
}
catch (Exception e)
{
FMLLog.log(Level.ERROR, e, "There was a problem performing the coremod sort");
throw Throwables.propagate(e);
}
}
public static void injectTransformers(LaunchClassLoader classLoader)
{
Launch.blackboard.put("fml.deobfuscatedEnvironment", deobfuscatedEnvironment);
tweaker.injectCascadingTweak("cpw.mods.fml.common.launcher.FMLDeobfTweaker");
tweakSorting.put("cpw.mods.fml.common.launcher.FMLDeobfTweaker", Integer.valueOf(1000));
}
public static void injectCoreModTweaks(FMLInjectionAndSortingTweaker fmlInjectionAndSortingTweaker)
{
@SuppressWarnings("unchecked")
List<ITweaker> tweakers = (List<ITweaker>) Launch.blackboard.get("Tweaks");
// Add the sorting tweaker first- it'll appear twice in the list
tweakers.add(0, fmlInjectionAndSortingTweaker);
for (FMLPluginWrapper wrapper : loadPlugins)
{
tweakers.add(wrapper);
}
}
private static Map<String,Integer> tweakSorting = Maps.newHashMap();
public static void sortTweakList()
{
@SuppressWarnings("unchecked")
List<ITweaker> tweakers = (List<ITweaker>) Launch.blackboard.get("Tweaks");
// Basically a copy of Collections.sort pre 8u20, optimized as we know we're an array list.
ITweaker[] toSort = tweakers.toArray(new ITweaker[tweakers.size()]);
Arrays.sort(toSort, new Comparator<ITweaker>() {
@Override
public int compare(ITweaker o1, ITweaker o2)
{
Integer first = null;
Integer second = null;
if (o1 instanceof FMLInjectionAndSortingTweaker)
{
first = Integer.MIN_VALUE;
}
if (o2 instanceof FMLInjectionAndSortingTweaker)
{
second = Integer.MIN_VALUE;
}
if (o1 instanceof FMLPluginWrapper)
{
first = ((FMLPluginWrapper) o1).sortIndex;
}
else if (first == null)
{
first = tweakSorting.get(o1.getClass().getName());
}
if (o2 instanceof FMLPluginWrapper)
{
second = ((FMLPluginWrapper) o2).sortIndex;
}
else if (second == null)
{
second = tweakSorting.get(o2.getClass().getName());
}
if (first == null)
{
first = 0;
}
if (second == null)
{
second = 0;
}
return Ints.saturatedCast((long)first - (long)second);
}
});
// Basically a copy of Collections.sort, optimized as we know we're an array list.
for (int j = 0; j < toSort.length; j++) {
tweakers.set(j, toSort[j]);
}
}
public static List<String> getAccessTransformers()
{
return accessTransformers;
}
}
|
package com.salesforce.storm.spout.sideline;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.salesforce.storm.spout.sideline.config.SidelineSpoutConfig;
import com.salesforce.storm.spout.sideline.filter.FilterChainStep;
import com.salesforce.storm.spout.sideline.filter.NegatingFilterChainStep;
import com.salesforce.storm.spout.sideline.consumer.ConsumerState;
import com.salesforce.storm.spout.sideline.persistence.SidelinePayload;
import com.salesforce.storm.spout.sideline.trigger.SidelineRequest;
import com.salesforce.storm.spout.sideline.trigger.SidelineRequestIdentifier;
import com.salesforce.storm.spout.sideline.trigger.SidelineType;
import com.salesforce.storm.spout.sideline.trigger.StartingTrigger;
import com.salesforce.storm.spout.sideline.trigger.StoppingTrigger;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Spout instance.
*/
public class SidelineSpout extends DynamicSpout {
private static final Logger logger = LoggerFactory.getLogger(SidelineSpout.class);
/**
* Starting Trigger
*
* This is an instance that is responsible for telling the sideline spout when to begin sidelining.
*/
private StartingTrigger startingTrigger;
/**
* Stopping Trigger
*
* This is an instance is responsible for telling the sideline spout when to stop sidelining
*/
private StoppingTrigger stoppingTrigger;
/**
* This is our main Virtual Spout instance which consumes from the configured namespace.
* TODO: Do we need access to this here? Could this be moved into the Coordinator?
*/
private VirtualSpout fireHoseSpout;
public SidelineSpout(Map config) {
super(config);
}
/**
* Set a starting trigger on the spout for starting a sideline request.
* @param startingTrigger An implementation of a starting trigger
*/
public void setStartingTrigger(StartingTrigger startingTrigger) {
this.startingTrigger = startingTrigger;
}
/**
* Set a trigger on the spout for stopping a sideline request.
* @param stoppingTrigger An implementation of a stopping trigger
*/
public void setStoppingTrigger(StoppingTrigger stoppingTrigger) {
this.stoppingTrigger = stoppingTrigger;
}
/**
* Starts a sideline request.
* @param sidelineRequest A representation of the request that is being started
*/
public SidelineRequestIdentifier startSidelining(SidelineRequest sidelineRequest) {
logger.info("Received START sideline request");
// Store the offset that this request was made at, when the sideline stops we will begin processing at
// this offset
final ConsumerState startingState = fireHoseSpout.getCurrentState();
for (final ConsumerPartition consumerPartition : startingState.getConsumerPartitions()) {
// Store in request manager
getPersistenceAdapter().persistSidelineRequestState(
SidelineType.START,
sidelineRequest.id, // TODO: Now that this is in the request, we should change the persistence adapter
sidelineRequest,
consumerPartition.partition(),
startingState.getOffsetForNamespaceAndPartition(consumerPartition),
null
);
}
// Add our new filter steps
fireHoseSpout.getFilterChain().addStep(sidelineRequest.id, sidelineRequest.step);
// Update start count metric
getMetricsRecorder().count(getClass(), "start-sideline", 1L);
return sidelineRequest.id;
}
/**
* Stops a sideline request.
* @param sidelineRequest A representation of the request that is being stopped
*/
public void stopSidelining(SidelineRequest sidelineRequest) {
final SidelineRequestIdentifier id = fireHoseSpout.getFilterChain().findStep(sidelineRequest.step);
if (id == null) {
logger.error(
"Received STOP sideline request, but I don't actually have any filter chain steps for it! Make sure you check that your filter implements an equals() method. {} {}",
sidelineRequest.step,
fireHoseSpout.getFilterChain().getSteps()
);
return;
}
logger.info("Received STOP sideline request");
// Remove the steps associated with this sideline request
final FilterChainStep step = fireHoseSpout.getFilterChain().removeSteps(id);
// Create a negated version of the step we just pulled from the firehose
final FilterChainStep negatedStep = new NegatingFilterChainStep(step);
// This is the state that the VirtualSidelineSpout should end with
final ConsumerState endingState = fireHoseSpout.getCurrentState();
// We'll construct a consumer state from the various partition data stored for this sideline request
final ConsumerState.ConsumerStateBuilder startingStateBuilder = ConsumerState.builder();
// We are looping over the current partitions for the firehose, functionally this is the collection of partitions
// assigned to this particular sideline spout instance
for (final ConsumerPartition consumerPartition : endingState.getConsumerPartitions()) {
// This is the state that the VirtualSidelineSpout should start with
final SidelinePayload sidelinePayload = getPersistenceAdapter().retrieveSidelineRequest(id, consumerPartition.partition());
// Add this partition to the starting consumer state
startingStateBuilder.withPartition(consumerPartition, sidelinePayload.startingOffset);
// Persist the side line request state with the new negated version of the steps.
getPersistenceAdapter().persistSidelineRequestState(
SidelineType.STOP,
id,
new SidelineRequest(id, negatedStep), // Persist the negated steps, so they load properly on resume
consumerPartition.partition(),
sidelinePayload.startingOffset,
endingState.getOffsetForNamespaceAndPartition(consumerPartition)
);
}
// Build our starting state, this is a map of partition and offset
final ConsumerState startingState = startingStateBuilder.build();
openVirtualSpout(
id,
negatedStep,
startingState,
endingState
);
// Update stop count metric
getMetricsRecorder().count(getClass(), "stop-sideline", 1L);
}
/**
* Open a virtual spout (like when a sideline stop request is made)
* @param id Id of the sideline request
* @param step Filter chain step (it will be negate)
* @param startingState Starting consumer state
* @param endingState Ending consumer state
*/
private void openVirtualSpout(
final SidelineRequestIdentifier id,
final FilterChainStep step,
final ConsumerState startingState,
final ConsumerState endingState
) {
// Generate our virtualSpoutId using the payload id.
final VirtualSpoutIdentifier virtualSpoutId = generateVirtualSpoutId(id);
// This info is repeated in VirtualSidelineSpout.open(), not needed here.
logger.debug("Starting VirtualSidelineSpout {} with starting state {} and ending state", virtualSpoutId, startingState, endingState);
// Create spout instance.
final VirtualSpout spout = new VirtualSpout(
getSpoutConfig(),
getTopologyContext(),
getFactoryManager(),
getMetricsRecorder(),
startingState,
endingState
);
// TODO: Sort this out so that we can track the sideline request identifier inside of the virtual spout identifier
spout.setVirtualSpoutId(virtualSpoutId);
// Add the supplied filter chain step to the new virtual spout's filter chain
spout.getFilterChain().addStep(id, step);
// Now pass the new "resumed" spout over to the coordinator to open and run
addVirtualSpout(spout);
}
void onOpen(Map topologyConfig, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
// If we have a starting trigger (technically they're optional but if you don't have one why are you using this spout), set the spout proxy on it
if (startingTrigger != null) {
startingTrigger.setSidelineSpout(new SpoutTriggerProxy(this));
}
// If we have a stopping trigger (technically they're optional but if you don't have one why are you using this spout), set the spout proxy on it
if (stoppingTrigger != null) {
stoppingTrigger.setSidelineSpout(new SpoutTriggerProxy(this));
}
// Create the main spout for the namespace, we'll dub it the 'firehose'
fireHoseSpout = new VirtualSpout(
getSpoutConfig(),
getTopologyContext(),
getFactoryManager(),
getMetricsRecorder()
);
// TODO: Add callbacks fro onOpen and onClose
/**
fireHoseSpout.onOpen((Consumer consumer, String identifier) -> {
})
**/
// This isn't a sideline request, but just make something up anyhow, who cares!
fireHoseSpout.setVirtualSpoutId(generateVirtualSpoutId(new SidelineRequestIdentifier("main")));
// Our main firehose spout instance.
addVirtualSpout(fireHoseSpout);
final String topic = (String) getSpoutConfigItem(SidelineSpoutConfig.KAFKA_TOPIC);
final List<SidelineRequestIdentifier> existingRequestIds = getPersistenceAdapter().listSidelineRequests();
logger.info("Found {} existing sideline requests that need to be resumed", existingRequestIds.size());
for (SidelineRequestIdentifier id : existingRequestIds) {
final ConsumerState.ConsumerStateBuilder startingStateBuilder = ConsumerState.builder();
final ConsumerState.ConsumerStateBuilder endingStateStateBuilder = ConsumerState.builder();
SidelinePayload payload = null;
final Set<Integer> partitions = getPersistenceAdapter().listSidelineRequestPartitions(id);
for (final Integer partition : partitions) {
payload = getPersistenceAdapter().retrieveSidelineRequest(id, partition);
if (payload == null) {
continue;
}
startingStateBuilder.withPartition(topic, partition, payload.startingOffset);
// We only have an ending offset on STOP requests
if (payload.endingOffset != null) {
endingStateStateBuilder.withPartition(topic, partition, payload.endingOffset);
}
}
if (payload == null) {
logger.warn("Sideline request {} did not have any partitions persisted", id);
continue;
}
// Resuming a start request means we apply the previous filter chain to the fire hose
if (payload.type.equals(SidelineType.START)) {
logger.info("Resuming START sideline {} {}", payload.id, payload.request.step);
fireHoseSpout.getFilterChain().addStep(
payload.id,
payload.request.step
);
}
// Resuming a stopped request means we spin up a new sideline spout
if (payload.type.equals(SidelineType.STOP)) {
openVirtualSpout(
payload.id,
payload.request.step,
startingStateBuilder.build(),
endingStateStateBuilder.build()
);
}
}
// If we have a starting trigger (technically they're optional but if you don't have one why are you using this spout), open it
if (startingTrigger != null) {
startingTrigger.open(getSpoutConfig());
} else {
logger.warn("Sideline spout is configured without a starting trigger");
}
// If we have a stopping trigger (technically they're optional but if you don't have one why are you using this spout), open it
if (stoppingTrigger != null) {
stoppingTrigger.open(getSpoutConfig());
} else {
logger.warn("Sideline spout is configured without a stopping trigger");
}
}
void onClose() {
// If we have a starting trigger (technically they're optional but if you don't have one why are you using this spout), close it
if (startingTrigger != null) {
startingTrigger.close();
}
// If we have a stopping trigger (technically they're optional but if you don't have one why are you using this spout), close it
if (stoppingTrigger != null) {
stoppingTrigger.close();
}
}
void onActivate() {}
void onDeactivate() {}
/**
* Generates a VirtualSpoutId from a sideline request id.
*
* @param sidelineRequestIdentifier Sideline request to use for constructing the id
* @return Generated VirtualSpoutId.
*/
VirtualSpoutIdentifier generateVirtualSpoutId(final SidelineRequestIdentifier sidelineRequestIdentifier) {
Preconditions.checkArgument(
!Strings.isNullOrEmpty(sidelineRequestIdentifier.toString()),
"SidelineRequestIdentifier cannot be null or empty!"
);
// Also prefixed with our configured prefix
final String prefix = (String) getSpoutConfigItem(SidelineSpoutConfig.CONSUMER_ID_PREFIX);
// return it
return new SidelineVirtualSpoutIdentifier(prefix, sidelineRequestIdentifier);
}
}
|
package com.cradle.iitc_mobile.share;
import android.app.ActionBar;
import android.app.FragmentTransaction;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.Uri;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.NavUtils;
import android.support.v4.view.ViewPager;
import android.view.MenuItem;
import com.cradle.iitc_mobile.Log;
import com.cradle.iitc_mobile.R;
import java.io.File;
import java.util.ArrayList;
public class ShareActivity extends FragmentActivity implements ActionBar.TabListener {
private static final String EXTRA_TYPE = "share-type";
private static final int REQUEST_START_INTENT = 1;
private static final String TYPE_FILE = "file";
private static final String TYPE_PERMALINK = "permalink";
private static final String TYPE_PORTAL_LINK = "portal_link";
private static final String TYPE_STRING = "string";
public static Intent forFile(final Context context, final File file, final String type) {
return new Intent(context, ShareActivity.class)
.putExtra(EXTRA_TYPE, TYPE_FILE)
.putExtra("uri", Uri.fromFile(file))
.putExtra("type", type);
}
public static Intent forPosition(final Context context, final double lat, final double lng, final int zoom,
final String title, final boolean isPortal) {
return new Intent(context, ShareActivity.class)
.putExtra(EXTRA_TYPE, isPortal ? TYPE_PORTAL_LINK : TYPE_PERMALINK)
.putExtra("lat", lat)
.putExtra("lng", lng)
.putExtra("zoom", zoom)
.putExtra("title", title)
.putExtra("isPortal", isPortal);
}
public static Intent forString(final Context context, final String str) {
return new Intent(context, ShareActivity.class)
.putExtra(EXTRA_TYPE, TYPE_STRING)
.putExtra("shareString", str);
}
private IntentComparator mComparator;
private FragmentAdapter mFragmentAdapter;
private IntentGenerator mGenerator;
private SharedPreferences mSharedPrefs = null;
private ViewPager mViewPager;
private void addTab(final ArrayList<Intent> intents, final int label, final int icon) {
final IntentListFragment fragment = new IntentListFragment();
final Bundle args = new Bundle();
args.putParcelableArrayList("intents", intents);
args.putString("title", getString(label));
args.putInt("icon", icon);
fragment.setArguments(args);
mFragmentAdapter.add(fragment);
}
private String getIntelUrl(final String ll, final int zoom, final boolean isPortal) {
final String scheme = mSharedPrefs.getBoolean("pref_force_https", true) ? "https" : "http";
String url = scheme + "://intel.ingress.com/intel?ll=" + ll + "&z=" + zoom;
if (isPortal) {
url += "&pll=" + ll;
}
return url;
}
private void setSelected(final int position) {
// Activity not fully loaded yet (may occur during tab creation)
if (mSharedPrefs == null) return;
mSharedPrefs
.edit()
.putInt("pref_share_selected_tab", position)
.apply();
}
@Override
protected void onActivityResult(final int requestCode, final int resultCode, final Intent data) {
if (REQUEST_START_INTENT == requestCode) {
setResult(resultCode, data);
// parent activity can now clean up
finish();
return;
}
super.onActivityResult(requestCode, resultCode, data);
}
@Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_share);
mComparator = new IntentComparator(this);
mGenerator = new IntentGenerator(this);
mFragmentAdapter = new FragmentAdapter(getSupportFragmentManager());
mSharedPrefs = PreferenceManager.getDefaultSharedPreferences(this);
final ActionBar actionBar = getActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
final Intent intent = getIntent();
final String type = intent.getStringExtra(EXTRA_TYPE);
// from portallinks/permalinks we build 3 intents (share / geo / vanilla-intel-link)
if (TYPE_PERMALINK.equals(type) || TYPE_PORTAL_LINK.equals(type)) {
final String title = intent.getStringExtra("title");
final String ll = intent.getDoubleExtra("lat", 0) + "," + intent.getDoubleExtra("lng", 0);
final int zoom = intent.getIntExtra("zoom", 0);
final String url = getIntelUrl(ll, zoom, TYPE_PORTAL_LINK.equals(type));
actionBar.setTitle(title);
addTab(mGenerator.getShareIntents(title, url),
R.string.tab_share,
R.drawable.ic_action_share);
addTab(mGenerator.getGeoIntents(title, ll, zoom),
R.string.tab_map,
R.drawable.ic_action_place);
addTab(mGenerator.getBrowserIntents(title, url),
R.string.tab_browser,
R.drawable.ic_action_web_site);
} else if (TYPE_STRING.equals(type)) {
final String title = getString(R.string.app_name);
final String shareString = intent.getStringExtra("shareString");
addTab(mGenerator.getShareIntents(title, shareString), R.string.tab_share, R.drawable.ic_action_share);
} else if (TYPE_FILE.equals(type)) {
final Uri uri = intent.getParcelableExtra("uri");
final String mime = intent.getStringExtra("type");
addTab(mGenerator.getShareIntents(uri, mime), R.string.tab_share, R.drawable.ic_action_share);
} else {
Log.w("Unknown sharing type: " + type);
setResult(RESULT_CANCELED);
finish();
return;
}
mViewPager = (ViewPager) findViewById(R.id.pager);
mViewPager.setAdapter(mFragmentAdapter);
mViewPager.setOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() {
@Override
public void onPageSelected(final int position) {
if (actionBar.getNavigationMode() != ActionBar.NAVIGATION_MODE_STANDARD) {
actionBar.setSelectedNavigationItem(position);
}
setSelected(position);
}
});
for (int i = 0; i < mFragmentAdapter.getCount(); i++) {
final IntentListFragment fragment = (IntentListFragment) mFragmentAdapter.getItem(i);
actionBar.addTab(actionBar
.newTab()
.setText(fragment.getTitle())
.setIcon(fragment.getIcon())
.setTabListener(this));
}
// read the selected tab from prefs before enabling tab mode
// setNavigationMode calls our OnPageChangeListener, resetting the pref to 0
final int selected = mSharedPrefs.getInt("pref_share_selected_tab", 0);
if (mFragmentAdapter.getCount() > 1) {
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);
}
if (selected < mFragmentAdapter.getCount()) {
mViewPager.setCurrentItem(selected);
if (actionBar.getNavigationMode() != ActionBar.NAVIGATION_MODE_STANDARD) {
actionBar.setSelectedNavigationItem(selected);
}
}
}
@Override
protected void onDestroy() {
super.onDestroy();
mComparator.save();
}
public IntentComparator getIntentComparator() {
return mComparator;
}
public void launch(final Intent intent) {
mComparator.trackIntentSelection(intent);
mGenerator.cleanup(intent);
// we should wait for the new intent to be finished so the calling activity (IITC_Mobile) can clean up
startActivityForResult(intent, REQUEST_START_INTENT);
}
@Override
public boolean onOptionsItemSelected(final MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
NavUtils.navigateUpFromSameTask(this);
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onTabReselected(final ActionBar.Tab tab, final FragmentTransaction fragmentTransaction) {
}
@Override
public void onTabSelected(final ActionBar.Tab tab, final FragmentTransaction fragmentTransaction) {
final int position = tab.getPosition();
mViewPager.setCurrentItem(position);
setSelected(position);
}
@Override
public void onTabUnselected(final ActionBar.Tab tab, final FragmentTransaction fragmentTransaction) {
}
}
|
package org.dvb.event;
import java.awt.BDJHelper;
import java.util.Iterator;
import java.util.LinkedList;
import javax.tv.xlet.XletContext;
import org.davic.resources.ResourceClient;
import org.davic.resources.ResourceServer;
import org.davic.resources.ResourceStatusEvent;
import org.davic.resources.ResourceStatusListener;
import org.havi.ui.HScene;
import org.videolan.BDJAction;
import org.videolan.BDJXletContext;
import org.videolan.GUIManager;
import org.videolan.Logger;
public class EventManager implements ResourceServer {
private static final Object instanceLock = new Object();
public static EventManager getInstance() {
synchronized (instanceLock) {
if (instance == null)
instance = new EventManager();
return instance;
}
}
public static void shutdown() {
EventManager e;
synchronized (instanceLock) {
e = instance;
instance = null;
}
if (e != null) {
e.exclusiveUserEventListener.clear();
e.sharedUserEventListener.clear();
e.exclusiveAWTEventListener.clear();
e.resourceStatusEventListeners.clear();
}
}
public boolean addUserEventListener(UserEventListener listener, ResourceClient client, UserEventRepository userEvents)
throws IllegalArgumentException {
if (client == null)
throw new IllegalArgumentException();
BDJXletContext context = BDJXletContext.getCurrentContext();
synchronized (this) {
if (!cleanupReservedEvents(userEvents))
return false;
exclusiveUserEventListener.add(new UserEventItem(context, listener, client, userEvents));
sendResourceStatusEvent(new UserEventUnavailableEvent(userEvents));
return true;
}
}
public void addUserEventListener(UserEventListener listener, UserEventRepository userEvents) {
if (listener == null || userEvents == null)
throw new NullPointerException();
BDJXletContext context = BDJXletContext.getCurrentContext();
synchronized (this) {
sharedUserEventListener.add(new UserEventItem(context, listener, null, userEvents));
}
}
public void removeUserEventListener(UserEventListener listener) {
BDJXletContext context = BDJXletContext.getCurrentContext();
synchronized (this) {
for (Iterator it = sharedUserEventListener.iterator(); it.hasNext(); ) {
UserEventItem item = (UserEventItem)it.next();
if ((item.context == context) && (item.listener == listener))
it.remove();
}
for (Iterator it = exclusiveUserEventListener.iterator(); it.hasNext(); ) {
UserEventItem item = (UserEventItem)it.next();
if ((item.context == context) && (item.listener == listener)) {
sendResourceStatusEvent(new UserEventAvailableEvent(item.userEvents));
it.remove();
}
}
}
}
public boolean addExclusiveAccessToAWTEvent(ResourceClient client, UserEventRepository userEvents)
throws IllegalArgumentException {
if (client == null)
throw new IllegalArgumentException();
BDJXletContext context = BDJXletContext.getCurrentContext();
synchronized (this) {
if (!cleanupReservedEvents(userEvents))
return false;
exclusiveAWTEventListener.add(new UserEventItem(context, null, client, userEvents));
sendResourceStatusEvent(new UserEventUnavailableEvent(userEvents));
return true;
}
}
public void removeExclusiveAccessToAWTEvent(ResourceClient client) {
BDJXletContext context = BDJXletContext.getCurrentContext();
synchronized (this) {
for (Iterator it = exclusiveAWTEventListener.iterator(); it.hasNext(); ) {
UserEventItem item = (UserEventItem)it.next();
if ((item.context == context) && (item.client == client)) {
sendResourceStatusEvent(new UserEventAvailableEvent(item.userEvents));
it.remove();
}
}
}
}
public void addResourceStatusEventListener(ResourceStatusListener listener) {
synchronized (this) {
resourceStatusEventListeners.add(listener);
}
}
public void removeResourceStatusEventListener(ResourceStatusListener listener) {
synchronized (this) {
resourceStatusEventListeners.remove(listener);
}
}
private void sendResourceStatusEvent(ResourceStatusEvent event) {
for (Iterator it = resourceStatusEventListeners.iterator(); it.hasNext(); )
((ResourceStatusListener)it.next()).statusChanged(event);
}
public void receiveKeyEvent(int type, int modifiers, int keyCode) {
receiveKeyEventN(type, modifiers, keyCode);
}
public boolean receiveKeyEventN(int type, int modifiers, int keyCode) {
UserEvent ue = new UserEvent(this, 1, type, keyCode, modifiers, System.currentTimeMillis());
HScene focusHScene = GUIManager.getInstance().getFocusHScene();
boolean result = false;
if (focusHScene != null) {
BDJXletContext context = focusHScene.getXletContext();
for (Iterator it = exclusiveAWTEventListener.iterator(); it.hasNext(); ) {
UserEventItem item = (UserEventItem)it.next();
if (item.context == null || item.context.isReleased()) {
logger.error("Removing exclusive AWT event listener for " + item.context);
it.remove();
continue;
}
if (item.context == context) {
if (item.userEvents.contains(ue)) {
result = BDJHelper.postKeyEvent(type, modifiers, keyCode);
logger.info("Key posted to exclusive AWT event listener, r=" + result);
return true;
}
}
}
} else {
logger.info("No focused HScene found !");
}
for (Iterator it = exclusiveUserEventListener.iterator(); it.hasNext(); ) {
UserEventItem item = (UserEventItem)it.next();
if (item.context == null || item.context.isReleased()) {
logger.error("Removing exclusive UserEvent listener for " + item.context);
it.remove();
continue;
}
if (item.userEvents.contains(ue)) {
item.context.putUserEvent(new UserEventAction(item, ue));
logger.info("Key posted to exclusive UE listener");
return true;
}
}
result = BDJHelper.postKeyEvent(type, modifiers, keyCode);
for (Iterator it = sharedUserEventListener.iterator(); it.hasNext(); ) {
UserEventItem item = (UserEventItem)it.next();
if (item.context == null || item.context.isReleased()) {
logger.error("Removing UserEvent listener for " + item.context);
it.remove();
continue;
}
if (item.userEvents.contains(ue)) {
item.context.putUserEvent(new UserEventAction(item, ue));
logger.info("Key posted to shared UE listener");
result = true;
}
}
return result;
}
private boolean cleanupReservedEvents(UserEventRepository userEvents) {
BDJXletContext context = BDJXletContext.getCurrentContext();
for (Iterator it = exclusiveUserEventListener.iterator(); it.hasNext(); ) {
UserEventItem item = (UserEventItem)it.next();
if (item.context == context)
continue;
if (hasOverlap(userEvents, item.userEvents)) {
try {
if (!item.client.requestRelease(item.userEvents, null))
return false;
} catch (Exception e) {
logger.error("requestRelease() failed: " + e.getClass());
e.printStackTrace();
return false;
}
sendResourceStatusEvent(new UserEventAvailableEvent(item.userEvents));
it.remove();
}
}
for (Iterator it = exclusiveAWTEventListener.iterator(); it.hasNext(); ) {
UserEventItem item = (UserEventItem)it.next();
if (item.context == context)
continue;
if (hasOverlap(userEvents, item.userEvents)) {
try {
if (!item.client.requestRelease(item.userEvents, null))
return false;
} catch (Exception e) {
logger.error("requestRelease() failed: " + e.getClass());
e.printStackTrace();
return false;
}
sendResourceStatusEvent(new UserEventAvailableEvent(item.userEvents));
it.remove();
}
}
return true;
}
private boolean hasOverlap(UserEventRepository userEvents1, UserEventRepository userEvents2) {
UserEvent[] evts1 = userEvents1.getUserEvent();
UserEvent[] evts2 = userEvents2.getUserEvent();
for (int i = 0; i < evts1.length; i++) {
UserEvent evt1 = evts1[i];
for (int j = 0; j < evts2.length; j++) {
UserEvent evt2 = evts2[j];
if ((evt1.getFamily() == evt2.getFamily()) && (evt1.getCode() != evt2.getCode()))
return true;
}
}
return false;
}
private static class UserEventItem {
public UserEventItem(BDJXletContext context, UserEventListener listener,
ResourceClient client, UserEventRepository userEvents) {
this.context = context;
this.listener = listener;
this.client = client;
this.userEvents = userEvents.getNewInstance();
if (context == null) {
logger.error("Missing xlet context: " + Logger.dumpStack());
}
}
public final BDJXletContext context;
public final UserEventListener listener;
public final ResourceClient client;
public final UserEventRepository userEvents;
}
private static class UserEventAction extends BDJAction {
public UserEventAction(UserEventItem item, UserEvent event) {
this.listener = item.listener;
this.event = event;
}
protected void doAction() {
listener.userEventReceived(event);
}
private UserEventListener listener;
private UserEvent event;
}
private LinkedList exclusiveUserEventListener = new LinkedList();
private LinkedList sharedUserEventListener = new LinkedList();
private LinkedList exclusiveAWTEventListener = new LinkedList();
private LinkedList resourceStatusEventListeners = new LinkedList();
private static EventManager instance = null;
private static final Logger logger = Logger.getLogger(EventManager.class.getName());
}
|
package dr.app.beagle.evomodel.treelikelihood;
import beagle.*;
import dr.app.beagle.evomodel.parsers.TreeLikelihoodParser;
import dr.app.beagle.evomodel.sitemodel.BranchSiteModel;
import dr.app.beagle.evomodel.sitemodel.SiteRateModel;
import dr.app.beagle.evomodel.substmodel.EigenDecomposition;
import dr.evolution.alignment.PatternList;
import dr.evolution.tree.NodeRef;
import dr.evolution.tree.Tree;
import dr.evolution.util.TaxonList;
import dr.evomodel.branchratemodel.BranchRateModel;
import dr.evomodel.branchratemodel.DefaultBranchRateModel;
import dr.evomodel.tree.TreeModel;
import dr.inference.model.Model;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
/**
* BeagleTreeLikelihoodModel - implements a Likelihood Function for sequences on a tree.
*
* @author Andrew Rambaut
* @author Alexei Drummond
* @author Marc Suchard
* @version $Id$
*/
public class BeagleTreeLikelihood extends AbstractTreeLikelihood {
// This property is a comma-delimited list of resource numbers (0 == CPU) to
// allocate each BEAGLE instance to. If less than the number of instances then
// will wrap around.
private static final String RESOURCE_ORDER_PROPERTY = "beagle.resource.order";
private static final String PREFERRED_FLAGS_PROPERTY = "beagle.preferred.flags";
private static final String REQUIRED_FLAGS_PROPERTY = "beagle.required.flags";
private static final String SCALING_PROPERTY = "beagle.scaling";
private static int instanceCount = 0;
private static List<Integer> resourceOrder = null;
private static final int RESCALE_FREQUENCY = 10000;
public BeagleTreeLikelihood(PatternList patternList,
TreeModel treeModel,
BranchSiteModel branchSiteModel,
SiteRateModel siteRateModel,
BranchRateModel branchRateModel,
boolean useAmbiguities,
PartialsRescalingScheme rescalingScheme
) {
super(TreeLikelihoodParser.TREE_LIKELIHOOD, patternList, treeModel);
try {
final Logger logger = Logger.getLogger("dr.evomodel");
logger.info("Using BEAGLE TreeLikelihood");
this.siteRateModel = siteRateModel;
addModel(this.siteRateModel);
this.branchSiteModel = branchSiteModel;
addModel(branchSiteModel);
if (branchRateModel != null) {
this.branchRateModel = branchRateModel;
logger.info(" Branch rate model used: " + branchRateModel.getModelName());
} else {
this.branchRateModel = new DefaultBranchRateModel();
}
addModel(this.branchRateModel);
this.categoryCount = this.siteRateModel.getCategoryCount();
this.tipCount = treeModel.getExternalNodeCount();
internalNodeCount = nodeCount - tipCount;
int compactPartialsCount = tipCount;
if (useAmbiguities) {
// if we are using ambiguities then we don't use tip partials
compactPartialsCount = 0;
}
// one partials buffer for each tip and two for each internal node (for store restore)
partialBufferHelper = new BufferIndexHelper(nodeCount, tipCount);
// two eigen buffers: for store and restore.
eigenBufferHelper = new BufferIndexHelper(1, 0);
// two matrices for each node less the root
matrixBufferHelper = new BufferIndexHelper(nodeCount, 0);
// one scaling buffer for each internal node plus an extra for the accumulation, then doubled for store/restore
scaleBufferHelper = new BufferIndexHelper(getScaleBufferCount(), 0);
// Attempt to get the resource order from the System Property
if (resourceOrder == null) {
resourceOrder = new ArrayList<Integer>();
String r = System.getProperty(RESOURCE_ORDER_PROPERTY);
if (r != null) {
String[] parts = r.split(",");
for (String part : parts) {
try {
int n = Integer.parseInt(part.trim());
resourceOrder.add(n);
} catch (NumberFormatException nfe) {
System.err.println("Invalid entry '"+part+"' in "+RESOURCE_ORDER_PROPERTY);
}
}
}
}
// first set the rescaling scheme to use from the parser
this.rescalingScheme = rescalingScheme;
// then allow it to be overriden from the command line
if (System.getProperty(SCALING_PROPERTY) != null) {
this.rescalingScheme = PartialsRescalingScheme.parseFromString(System.getProperty(SCALING_PROPERTY));
}
int[] resourceList = null;
long preferenceFlags = 0;
long requirementFlags = 0;
if (resourceOrder.size() > 0) {
// added the zero on the end so that a CPU is selected if requested resource fails
resourceList = new int[] { resourceOrder.get(instanceCount % resourceOrder.size()), 0 };
}
if (System.getProperty(PREFERRED_FLAGS_PROPERTY) != null) {
preferenceFlags = Long.valueOf(System.getProperty(PREFERRED_FLAGS_PROPERTY));
}
if (System.getProperty(REQUIRED_FLAGS_PROPERTY) != null) {
requirementFlags = Long.valueOf(System.getProperty(REQUIRED_FLAGS_PROPERTY));
}
if (this.rescalingScheme == PartialsRescalingScheme.DEFAULT) {
// the default is now to try and let BEAGLE do it
// preferenceFlags |= BeagleFlag.SCALING_AUTO.getMask(); // NOT WORKING YET
preferenceFlags |= BeagleFlag.SCALING_MANUAL.getMask();
} else {
preferenceFlags |= BeagleFlag.SCALING_MANUAL.getMask();
}
if (preferenceFlags == 0 && resourceList == null) { // else determine dataset characteristics
if ( stateCount == 4 && patternList.getPatternCount() < 1000) // TODO determine good cut-off
preferenceFlags |= BeagleFlag.PROCESSOR_CPU.getMask();
}
if (branchSiteModel.canReturnComplexDiagonalization()) {
requirementFlags |= BeagleFlag.EIGEN_COMPLEX.getMask();
}
instanceCount ++;
beagle = BeagleFactory.loadBeagleInstance(
tipCount,
partialBufferHelper.getBufferCount(),
compactPartialsCount,
stateCount,
patternCount,
eigenBufferHelper.getBufferCount(), // eigenBufferCount
matrixBufferHelper.getBufferCount(),
categoryCount,
scaleBufferHelper.getBufferCount(), // Always allocate; they may become necessary
resourceList,
preferenceFlags,
requirementFlags
);
InstanceDetails instanceDetails = beagle.getDetails();
ResourceDetails resourceDetails = null;
if (instanceDetails != null) {
resourceDetails = BeagleFactory.getResourceDetails(instanceDetails.getResourceNumber());
if (resourceDetails != null) {
StringBuilder sb = new StringBuilder(" Using BEAGLE resource ");
sb.append(resourceDetails.getNumber()).append(": ");
sb.append(resourceDetails.getName()).append("\n");
if (resourceDetails.getDescription() != null) {
String[] description = resourceDetails.getDescription().split("\\|");
for (String desc : description) {
if (desc.trim().length() > 0) {
sb.append(" ").append(desc.trim()).append("\n");
}
}
}
sb.append(" with instance flags: ").append(instanceDetails.toString());
logger.info(sb.toString());
} else {
logger.info(" Error retrieving BEAGLE resource for instance: " + instanceDetails.toString());
}
} else {
logger.info(" No external BEAGLE resources available, or resource list/requirements not met, using Java implementation");
}
logger.info(" " + (useAmbiguities ? "Using" : "Ignoring") + " ambiguities in tree likelihood.");
logger.info(" With " + patternList.getPatternCount() + " unique site patterns.");
for (int i = 0; i < tipCount; i++) {
// Find the id of tip i in the patternList
String id = treeModel.getTaxonId(i);
int index = patternList.getTaxonIndex(id);
if (index == -1) {
throw new TaxonList.MissingTaxonException("Taxon, " + id + ", in tree, " + treeModel.getId() +
", is not found in patternList, " + patternList.getId());
} else {
if (useAmbiguities) {
setPartials(beagle, patternList, index, i);
} else {
setStates(beagle, patternList, index, i);
}
}
}
beagle.setPatternWeights(patternWeights);
if (this.rescalingScheme == PartialsRescalingScheme.DEFAULT &&
resourceDetails != null &&
(resourceDetails.getFlags() & BeagleFlag.SCALING_AUTO.getMask()) == 0) {
// If auto scaling in BEAGLE is not supported then do it here
// this.rescalingScheme = PartialsRescalingScheme.NONE;
// this.rescalingScheme = PartialsRescalingScheme.ALWAYS;
this.rescalingScheme = PartialsRescalingScheme.DYNAMIC;
logger.info(" Auto rescaling not supported in BEAGLE, using : " + this.rescalingScheme.getText());
} else {
logger.info(" Using rescaling scheme : " + this.rescalingScheme.getText());
}
updateSubstitutionModel = true;
updateSiteModel = true;
} catch (TaxonList.MissingTaxonException mte) {
throw new RuntimeException(mte.toString());
}
hasInitialized = true;
}
public SiteRateModel getSiteRateModel() {
return siteRateModel;
}
public BranchRateModel getBranchRateModel() {
return branchRateModel;
}
protected int getScaleBufferCount() {
return internalNodeCount + 1;
}
/**
* Sets the partials from a sequence in an alignment.
* @param beagle beagle
* @param patternList patternList
* @param sequenceIndex sequenceIndex
* @param nodeIndex nodeIndex
*/
protected final void setPartials(Beagle beagle,
PatternList patternList,
int sequenceIndex,
int nodeIndex) {
double[] partials = new double[patternCount * stateCount * categoryCount];
boolean[] stateSet;
int v = 0;
for (int i = 0; i < patternCount; i++) {
int state = patternList.getPatternState(sequenceIndex, i);
stateSet = dataType.getStateSet(state);
for (int j = 0; j < stateCount; j++) {
if (stateSet[j]) {
partials[v] = 1.0;
} else {
partials[v] = 0.0;
}
v++;
}
}
// if there is more than one category then replicate the partials for each
int n = patternCount * stateCount;
int k = n;
for (int i = 1; i < categoryCount; i++) {
System.arraycopy(partials, 0, partials, k, n);
k += n;
}
beagle.setPartials(nodeIndex, partials);
}
/**
* Sets the partials from a sequence in an alignment.
* @param beagle beagle
* @param patternList patternList
* @param sequenceIndex sequenceIndex
* @param nodeIndex nodeIndex
*/
protected final void setStates(Beagle beagle,
PatternList patternList,
int sequenceIndex,
int nodeIndex) {
int i;
int[] states = new int[patternCount];
for (i = 0; i < patternCount; i++) {
states[i] = patternList.getPatternState(sequenceIndex, i);
}
beagle.setTipStates(nodeIndex, states);
}
// ModelListener IMPLEMENTATION
/**
* Handles model changed events from the submodels.
*/
protected void handleModelChangedEvent(Model model, Object object, int index) {
if (model == treeModel) {
if (object instanceof TreeModel.TreeChangedEvent) {
if (((TreeModel.TreeChangedEvent) object).isNodeChanged()) {
// If a node event occurs the node and its two child nodes
// are flagged for updating (this will result in everything
// above being updated as well. Node events occur when a node
// is added to a branch, removed from a branch or its height or
// rate changes.
updateNodeAndChildren(((TreeModel.TreeChangedEvent) object).getNode());
} else if (((TreeModel.TreeChangedEvent) object).isTreeChanged()) {
// Full tree events result in a complete updating of the tree likelihood
// This event type is now used for EmpiricalTreeDistributions.
// System.err.println("Full tree update event - these events currently aren't used\n" +
// "so either this is in error or a new feature is using them so remove this message.");
updateAllNodes();
} else {
// Other event types are ignored (probably trait changes).
//System.err.println("Another tree event has occured (possibly a trait change).");
}
}
} else if (model == branchRateModel) {
if (index == -1) {
if (COUNT_TOTAL_OPERATIONS)
totalRateUpdateAllCount++;
updateAllNodes();
} else {
if (COUNT_TOTAL_OPERATIONS)
totalRateUpdateSingleCount++;
updateNode(treeModel.getNode(index));
}
} else if (model == branchSiteModel) {
updateSubstitutionModel = true;
updateAllNodes();
} else if (model == siteRateModel) {
updateSiteModel = true;
updateAllNodes();
} else {
throw new RuntimeException("Unknown componentChangedEvent");
}
super.handleModelChangedEvent(model, object, index);
}
// Model IMPLEMENTATION
/**
* Stores the additional state other than model components
*/
protected void storeState() {
partialBufferHelper.storeState();
eigenBufferHelper.storeState();
matrixBufferHelper.storeState();
if (useScaleFactors) { // Only store when actually used
scaleBufferHelper.storeState();
System.arraycopy(scaleBufferIndices, 0, storedScaleBufferIndices, 0, scaleBufferIndices.length);
}
super.storeState();
}
/**
* Restore the additional stored state
*/
protected void restoreState() {
updateSiteModel = true; // this is required to upload the categoryRates to BEAGLE after the restore
partialBufferHelper.restoreState();
eigenBufferHelper.restoreState();
matrixBufferHelper.restoreState();
if (useScaleFactors ) {
scaleBufferHelper.restoreState();
int[] tmp = storedScaleBufferIndices;
storedScaleBufferIndices = scaleBufferIndices;
scaleBufferIndices = tmp;
}
super.restoreState();
}
// Likelihood IMPLEMENTATION
/**
* Calculate the log likelihood of the current state.
*
* @return the log likelihood.
*/
protected double calculateLogLikelihood() {
if (patternLogLikelihoods == null) {
patternLogLikelihoods = new double[patternCount];
}
if (matrixUpdateIndices == null) {
matrixUpdateIndices = new int[nodeCount];
branchLengths = new double[nodeCount];
scaleBufferIndices = new int[internalNodeCount];
storedScaleBufferIndices = new int[internalNodeCount];
}
if (operations == null) {
operations = new int[internalNodeCount * Beagle.OPERATION_TUPLE_SIZE];
}
recomputeScaleFactors = false;
if (this.rescalingScheme == PartialsRescalingScheme.ALWAYS) {
useScaleFactors = true;
recomputeScaleFactors = true;
} else if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
useScaleFactors = true;
if (rescalingCount == 0) {
recomputeScaleFactors = true;
// System.err.println("Recomputing scale factors");
}
rescalingCount ++;
if (rescalingCount > RESCALE_FREQUENCY) {
rescalingCount = 0;
}
}
branchUpdateCount = 0;
operationCount = 0;
final NodeRef root = treeModel.getRoot();
traverse(treeModel, root, null, true);
if (updateSubstitutionModel) {
// we are currently assuming a homogeneous model...
EigenDecomposition ed = branchSiteModel.getEigenDecomposition(0, 0);
eigenBufferHelper.flipOffset(0);
beagle.setEigenDecomposition(
eigenBufferHelper.getOffsetIndex(0),
ed.getEigenVectors(),
ed.getInverseEigenVectors(),
ed.getEigenValues());
}
if (updateSiteModel) {
double[] categoryRates = this.siteRateModel.getCategoryRates();
beagle.setCategoryRates(categoryRates);
}
beagle.updateTransitionMatrices(
eigenBufferHelper.getOffsetIndex(0),
matrixUpdateIndices,
null,
null,
branchLengths,
branchUpdateCount);
if (COUNT_TOTAL_OPERATIONS) {
totalMatrixUpdateCount += branchUpdateCount;
}
if (COUNT_TOTAL_OPERATIONS) {
totalOperationCount += operationCount;
}
double logL;
boolean done = true;
do {
beagle.updatePartials(operations, operationCount, -1);
int rootIndex = partialBufferHelper.getOffsetIndex(root.getNumber());
double[] categoryWeights = this.siteRateModel.getCategoryProportions();
double[] frequencies = branchSiteModel.getStateFrequencies(0);
int cumulateScaleBufferIndex = Beagle.NONE;
if (useScaleFactors) {
if (recomputeScaleFactors) {
scaleBufferHelper.flipOffset(internalNodeCount);
cumulateScaleBufferIndex = scaleBufferHelper.getOffsetIndex(internalNodeCount);
beagle.resetScaleFactors(cumulateScaleBufferIndex);
beagle.accumulateScaleFactors(scaleBufferIndices, internalNodeCount, cumulateScaleBufferIndex);
} else {
cumulateScaleBufferIndex = scaleBufferHelper.getOffsetIndex(internalNodeCount);
}
}
// these could be set only when they change but store/restore would need to be considered
beagle.setCategoryWeights(0, categoryWeights);
beagle.setStateFrequencies(0, frequencies);
double[] sumLogLikelihoods = new double[1];
beagle.calculateRootLogLikelihoods(new int[] { rootIndex }, new int[] { 0 }, new int[] { 0 },
new int[] { cumulateScaleBufferIndex }, 1, sumLogLikelihoods);
logL = sumLogLikelihoods[0];
if (Double.isNaN(logL) || Double.isInfinite(logL)) {
// logL = Double.NEGATIVE_INFINITY;
if (!done) {
if (rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
// we have had a potential under/over flow so attempt a rescaling
recomputeScaleFactors = true;
// traverse again but without flipping partials indices as we
// just want to overwrite the last attempt. We will flip the
// scale buffer indices though as we are recomputing them.
traverse(treeModel, root, null, false);
done = false;
} else {
// either we are not rescaling or always rescaling
// so just return the likelihood...
done = true;
}
} else {
// we have already tried a rescale
// so just return the likelihood...
done = true;
}
} else {
done = true;
}
} while (!done);
// If these are needed...
//beagle.getSiteLogLikelihoods(patternLogLikelihoods);
/**
* Traverse the tree calculating partial likelihoods.
* @param tree tree
* @param node node
* @param operatorNumber operatorNumber
* @param flip flip
* @return boolean
*/
private boolean traverse(Tree tree, NodeRef node, int[] operatorNumber, boolean flip) {
boolean update = false;
int nodeNum = node.getNumber();
NodeRef parent = tree.getParent(node);
if (operatorNumber != null) {
operatorNumber[0] = -1;
}
// First update the transition probability matrix(ices) for this branch
if (parent != null && updateNode[nodeNum]) {
final double branchRate = branchRateModel.getBranchRate(tree, node);
// Get the operational time of the branch
final double branchTime = branchRate * (tree.getNodeHeight(parent) - tree.getNodeHeight(node));
if (branchTime < 0.0) {
throw new RuntimeException("Negative branch length: " + branchTime);
}
if (flip) {
// first flip the matrixBufferHelper
matrixBufferHelper.flipOffset(nodeNum);
}
// then set which matrix to update
matrixUpdateIndices[branchUpdateCount] = matrixBufferHelper.getOffsetIndex(nodeNum);
branchLengths[branchUpdateCount] = branchTime;
branchUpdateCount++;
update = true;
}
// If the node is internal, update the partial likelihoods.
if (!tree.isExternal(node)) {
// Traverse down the two child nodes
NodeRef child1 = tree.getChild(node, 0);
final int[] op1 = { -1 };
final boolean update1 = traverse(tree, child1, op1, flip);
NodeRef child2 = tree.getChild(node, 1);
final int[] op2 = { -1 };
final boolean update2 = traverse(tree, child2, op2, flip);
// If either child node was updated then update this node too
if (update1 || update2) {
int x = operationCount * Beagle.OPERATION_TUPLE_SIZE;
if (flip) {
// first flip the partialBufferHelper
partialBufferHelper.flipOffset(nodeNum);
}
operations[x] = partialBufferHelper.getOffsetIndex(nodeNum);
if (useScaleFactors) {
// get the index of this scaling buffer
int n = nodeNum - tipCount;
if (recomputeScaleFactors) {
// flip the indicator: can take either n or (internalNodeCount + 1) - n
scaleBufferHelper.flipOffset(n);
// store the index
scaleBufferIndices[n] = scaleBufferHelper.getOffsetIndex(n);
operations[x + 1] = scaleBufferIndices[n]; // Write new scaleFactor
operations[x + 2] = Beagle.NONE;
} else {
operations[x + 1] = Beagle.NONE;
operations[x + 2] = scaleBufferIndices[n]; // Read existing scaleFactor
}
} else {
operations[x + 1] = Beagle.NONE; // Not using scaleFactors
operations[x + 2] = Beagle.NONE;
}
operations[x + 3] = partialBufferHelper.getOffsetIndex(child1.getNumber()); // source node 1
operations[x + 4] = matrixBufferHelper.getOffsetIndex(child1.getNumber()); // source matrix 1
operations[x + 5] = partialBufferHelper.getOffsetIndex(child2.getNumber()); // source node 2
operations[x + 6] = matrixBufferHelper.getOffsetIndex(child2.getNumber()); // source matrix 2
operationCount ++;
update = true;
}
}
return update;
}
// INSTANCE VARIABLES
private int[] matrixUpdateIndices;
private double[] branchLengths;
private int branchUpdateCount;
private int[] scaleBufferIndices;
private int[] storedScaleBufferIndices;
private int[] operations;
private int operationCount;
protected BufferIndexHelper partialBufferHelper;
private final BufferIndexHelper eigenBufferHelper;
protected BufferIndexHelper matrixBufferHelper;
protected BufferIndexHelper scaleBufferHelper;
protected final int tipCount;
protected final int internalNodeCount;
private PartialsRescalingScheme rescalingScheme;
protected boolean useScaleFactors = false;
private boolean recomputeScaleFactors = false;
private int rescalingCount = 0;
/**
* the branch-site model for these sites
*/
protected final BranchSiteModel branchSiteModel;
/**
* the site model for these sites
*/
protected final SiteRateModel siteRateModel;
/**
* the branch rate model
*/
protected final BranchRateModel branchRateModel;
/**
* the pattern likelihoods
*/
protected double[] patternLogLikelihoods = null;
/**
* the number of rate categories
*/
protected int categoryCount;
/**
* an array used to transfer tip partials
*/
protected double[] tipPartials;
/**
* the BEAGLE library instance
*/
protected Beagle beagle;
/**
* Flag to specify that the substitution model has changed
*/
protected boolean updateSubstitutionModel;
protected boolean storedUpdateSubstitutionModel;
/**
* Flag to specify that the site model has changed
*/
protected boolean updateSiteModel;
protected boolean storedUpdateSiteModel;
// private boolean dynamicRescaling = false;
protected class BufferIndexHelper {
/**
*
* @param maxIndexValue the number of possible input values for the index
* @param minIndexValue the minimum index value to have the mirrored buffers
*/
BufferIndexHelper(int maxIndexValue, int minIndexValue) {
this.maxIndexValue = maxIndexValue;
this.minIndexValue = minIndexValue;
offsetCount = maxIndexValue - minIndexValue;
indexOffsets = new int[offsetCount];
storedIndexOffsets = new int[offsetCount];
}
public int getBufferCount() {
return 2 * offsetCount + minIndexValue;
}
void flipOffset(int i) {
if (i >= minIndexValue) {
indexOffsets[i - minIndexValue] = offsetCount - indexOffsets[i - minIndexValue];
} // else do nothing
}
int getOffsetIndex(int i) {
if (i < minIndexValue) {
return i;
}
return indexOffsets[i - minIndexValue] + i;
}
void getIndices(int[] outIndices) {
for (int i = 0; i < maxIndexValue; i++) {
outIndices[i] = getOffsetIndex(i);
}
}
void storeState() {
System.arraycopy(indexOffsets, 0, storedIndexOffsets, 0, indexOffsets.length);
}
void restoreState() {
int[] tmp = storedIndexOffsets;
storedIndexOffsets = indexOffsets;
indexOffsets = tmp;
}
private final int maxIndexValue;
private final int minIndexValue;
private final int offsetCount;
private int[] indexOffsets;
private int[] storedIndexOffsets;
}
}
|
package Ontology.Mappings.Tests;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import Ontology.Mappings.DataConverter;
import Ontology.Mappings.NestedSubAtomic;
import Ontology.Mappings.SpaceSeparation;
import Util.Pair;
import Util.SerializationConvenience;
import Util.SerializationFormatEnum;
import Util.StorageToken;
public class NestedSubAtomicTest
{
private NestedSubAtomic nestedSubAtomic;
private StorageToken sampleData;
@Before
public void setUp() throws Exception
{
List<Pair<Class<?>, String>> indices = new ArrayList<>();
indices.add(new Pair<Class<?>, String>(String.class, "foo"));
DataConverter converter = new SpaceSeparation();
nestedSubAtomic = new NestedSubAtomic(indices, converter, 1);
String foo = "test pass";
sampleData = new StorageToken();
sampleData.setItem("foo", foo);
}
@After
public void tearDown() throws Exception
{
this.nestedSubAtomic = null;
this.sampleData = null;
}
@Test
public void testSaveToToken()
{
String serialized = SerializationConvenience.serializeObject(nestedSubAtomic, SerializationFormatEnum.JSON_FORMAT);
NestedSubAtomic copy = (NestedSubAtomic) SerializationConvenience.nativeizeObject(serialized, SerializationFormatEnum.JSON_FORMAT);
Assert.assertEquals(nestedSubAtomic, copy);
}
@Test
public void testRetrieveFieldData()
{
String data = (String)nestedSubAtomic.retrieveFieldData(sampleData);
Assert.assertEquals("pass", data);
}
@Test
public void testStoreData()
{
StorageToken token = new StorageToken(new HashMap<>(), "testToken", "none");
nestedSubAtomic.storeData(token, "pass");
String result = (String)nestedSubAtomic.retrieveFieldData(token);
Assert.assertEquals("pass", result);
}
@Test
public void testOutOfOrderStorage()
{
List<Pair<Class<?>, String>> indices = new ArrayList<>();
indices.add(new Pair<Class<?>, String>(String.class, "foo"));
DataConverter converter = new SpaceSeparation();
NestedSubAtomic otherNestedSubAtomic = new NestedSubAtomic(indices, converter, 0);
StorageToken token = new StorageToken(new HashMap<>(), "testToken", "none");
nestedSubAtomic.storeData(token, "pass");
otherNestedSubAtomic.storeData(token, "test");
String firstResult = (String)nestedSubAtomic.retrieveFieldData(token);
String secondResult = (String)otherNestedSubAtomic.retrieveFieldData(token);
Assert.assertEquals("pass", firstResult);
Assert.assertEquals("test", secondResult);
}
}
|
package com.telekom.m2m.cot.restsdk.library.sensor;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.telekom.m2m.cot.restsdk.library.Fragment;
/**
* The Mobile fragment is part of the sensor library as well as the device control library. It has more attributes
* in the sensor lib. Just use the appropriate constructor to create one or the other.
*/
public class Mobile implements Fragment {
public final String imsi;
public final String imei;
public final String currentOperator;
public final String currentBand;
public final String connType;
public final String rssi;
public final String ecn0;
public final String rcsp;
public final String mnc;
public final String lac;
public final String cellId;
public final String msisdn;
public final String iccid;
// This flag differentiates between the simple version (device management lib; false) and the
// complete, verbose version (sensor lib; true).
private boolean isComplete = true;
/**
* This is the partial constructor for c8y_Mobile as described in the device management library.
*
* @param imei the International Mobile Equipment Number.
* @param cellId the identifier of the current cell tower.
* @param iccid the Integrated Curcuit Identifier.
*/
public Mobile(String imei, String cellId, String iccid) {
this(null, imei, null, null, null, null, null, null, null, null, cellId, null, iccid);
isComplete = false;
}
/**
* This is the full constructor for c8y_Mobile as described in the sensor library.
*
* @param imsi the International Mobile Subscriber Identifier.
* @param imei the International Mobile Equipment Number.
* @param currentOperator the received mobile operator string, e.g. Telekom.
* @param currentBand the current mobile band, e.g. WCDMA2100.
* @param connType the current connection type, e.g. 3g
* @param rssi the Receive Signal Strength Indicator value as text.
* @param ecn0 EcNo is the RSCP divided by the RSSI
* @param rcsp Received Signal Code Power as text.
* @param mnc the Mobile Network Code, e.g. 1 (for Telekom Deutschland).
* @param lac Location Area Code, e.g. 38833 (for Bonn Area in Telekom Deutschland network).
* @param cellId the identifier of the current cell tower.
* @param msisdn the Mobile Station Integrated Services Digital Network Number.
* @param iccid the Integrated Curcuit Identifier.
*/
public Mobile(String imsi,
String imei,
String currentOperator,
String currentBand,
String connType,
String rssi,
String ecn0,
String rcsp,
String mnc,
String lac,
String cellId,
String msisdn,
String iccid) {
this.imsi = imsi;
this.imei = imei;
this.currentOperator = currentOperator;
this.currentBand = currentBand;
this.connType = connType;
this.rssi = rssi;
this.ecn0 = ecn0;
this.rcsp = rcsp;
this.mnc = mnc;
this.lac = lac;
this.cellId = cellId;
this.msisdn = msisdn;
this.iccid = iccid;
}
@Override
public String getId() {
return "c8y_Mobile";
}
@Override
public JsonElement getJson() {
JsonObject object = new JsonObject();
object.addProperty("imei", imei);
object.addProperty("cellId", cellId);
object.addProperty("iccid", iccid);
if (isComplete) {
object.addProperty("imsi", imsi);
object.addProperty("currentOperator", currentOperator);
object.addProperty("currentBand", currentBand);
object.addProperty("connType", connType);
object.addProperty("rssi", rssi);
object.addProperty("ecn0", ecn0);
object.addProperty("rcsp", rcsp);
object.addProperty("mnc", mnc);
object.addProperty("lac", lac);
object.addProperty("msisdn", msisdn);
}
return object;
}
}
|
package dr.evomodel.continuous;
import dr.evolution.tree.NodeAttributeProvider;
import dr.evolution.tree.NodeRef;
import dr.evolution.tree.Tree;
import dr.evomodel.branchratemodel.BranchRateModel;
import dr.evomodel.tree.TreeModel;
import dr.inference.model.*;
import dr.inference.loggers.LogColumn;
import dr.inference.loggers.NumberColumn;
import dr.xml.*;
import dr.math.MathUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import java.util.*;
import java.util.logging.Logger;
/**
* @author Marc Suchard
*/
public abstract class AbstractMultivariateTraitLikelihood extends AbstractModelLikelihood implements NodeAttributeProvider {
public static final String TRAIT_LIKELIHOOD = "multivariateTraitLikelihood";
public static final String TRAIT_NAME = "traitName";
public static final String ROOT_PRIOR = "rootPrior";
public static final String MODEL = "diffusionModel";
public static final String TREE = "tree";
public static final String TRAIT_PARAMETER = "traitParameter";
public static final String SET_TRAIT = "setOutcomes";
public static final String MISSING = "missingIndicator";
public static final String CACHE_BRANCHES = "cacheBranches";
public static final String REPORT_MULTIVARIATE = "reportAsMultivariate";
public static final String DEFAULT_TRAIT_NAME = "trait";
public static final String RANDOMIZE = "randomize";
public static final String RANDOMIZE_LOWER = "lower";
public static final String RANDOMIZE_UPPER = "upper";
public static final String CHECK = "check";
public static final String USE_TREE_LENGTH = "useTreeLength";
public static final String SCALE_BY_TIME = "scaleByTime";
public static final String SUBSTITUTIONS = "substitutions";
public static final String SAMPLING_DENSITY = "samplingDensity";
public static final String INTEGRATE = "integrateInternalTraits";
public static final String JITTER = "jitter";
public static final String WINDOW = "window";
public static final String DUPLICATES = "duplicatesOnly";
public AbstractMultivariateTraitLikelihood(String traitName,
TreeModel treeModel,
MultivariateDiffusionModel diffusionModel,
CompoundParameter traitParameter,
List<Integer> missingIndices,
boolean cacheBranches,
boolean scaleByTime,
boolean useTreeLength,
BranchRateModel rateModel,
Model samplingDensity,
boolean reportAsMultivariate) {
super(TRAIT_LIKELIHOOD);
this.traitName = traitName;
this.treeModel = treeModel;
this.rateModel = rateModel;
this.diffusionModel = diffusionModel;
this.traitParameter = traitParameter;
this.missingIndices = missingIndices;
addModel(treeModel);
addModel(diffusionModel);
if (rateModel != null) {
hasRateModel = true;
addModel(rateModel);
}
if (samplingDensity != null) {
addModel(samplingDensity);
}
if (traitParameter != null)
addVariable(traitParameter);
this.reportAsMultivariate = reportAsMultivariate;
this.cacheBranches = cacheBranches;
if (cacheBranches) {
cachedLogLikelihoods = new double[treeModel.getNodeCount()];
storedCachedLogLikelihood = new double[treeModel.getNodeCount()];
validLogLikelihoods = new boolean[treeModel.getNodeCount()];
storedValidLogLikelihoods = new boolean[treeModel.getNodeCount()];
}
this.scaleByTime = scaleByTime;
this.useTreeLength = useTreeLength;
StringBuffer sb = new StringBuffer("Creating multivariate diffusion model:\n");
sb.append("\tTrait: ").append(traitName).append("\n");
sb.append("\tDiffusion process: ").append(diffusionModel.getId()).append("\n");
sb.append("\tHeterogenity model: ").append(rateModel != null ? rateModel.getId() : "homogeneous").append("\n");
sb.append("\tTree normalization: ").append(scaleByTime ? (useTreeLength ? "length" : "height") : "off").append("\n");
if (scaleByTime) {
recalculateTreeLength();
if (useTreeLength) {
sb.append("\tInitial tree length: ").append(treeLength).append("\n");
} else {
sb.append("\tInitial tree height: ").append(treeLength).append("\n");
}
}
sb.append(extraInfo());
sb.append("\tPlease cite Suchard, Lemey and Rambaut (in preparation) if you publish results using this model.");
Logger.getLogger("dr.evomodel").info(sb.toString());
recalculateTreeLength();
}
protected abstract String extraInfo();
public String getTraitName() {
return traitName;
}
public double getRescaledBranchLength(NodeRef node) {
double length = treeModel.getBranchLength(node);
if (hasRateModel)
length *= rateModel.getBranchRate(treeModel, node);
if (scaleByTime)
return length / treeLength;
return length;
}
// ModelListener IMPLEMENTATION
protected void handleModelChangedEvent(Model model, Object object, int index) {
if (!cacheBranches) {
likelihoodKnown = false;
if (model == treeModel)
recalculateTreeLength();
return;
}
if (model == diffusionModel) {
updateAllNodes();
}
// fireTreeEvents sends two events here when a node trait is changed,
// ignoring object instance Parameter case
else if (model == treeModel) {
if (object instanceof TreeModel.TreeChangedEvent) {
TreeModel.TreeChangedEvent event = (TreeModel.TreeChangedEvent) object;
if (event.isHeightChanged()) {
recalculateTreeLength();
if (useTreeLength || (scaleByTime && treeModel.isRoot(event.getNode())))
updateAllNodes();
else {
updateNodeAndChildren(event.getNode());
}
} else if (event.isNodeParameterChanged()) {
updateNodeAndChildren(event.getNode());
} else if (event.isNodeChanged()) {
recalculateTreeLength();
if (useTreeLength || (scaleByTime && treeModel.isRoot(event.getNode())))
updateAllNodes();
else {
updateNodeAndChildren(event.getNode());
}
} else {
throw new RuntimeException("Unexpected TreeModel TreeChangedEvent occuring in AbstractMultivariateTraitLikelihood");
}
} else if (object instanceof Parameter) {
// Ignoring
} else {
throw new RuntimeException("Unexpected TreeModel event occuring in AbstractMultivariateTraitLikelihood");
}
} else if (model == rateModel) {
if (index == -1) {
updateAllNodes();
} else {
if (((Parameter)object).getDimension() == 2*(treeModel.getNodeCount()-1))
updateNode(treeModel.getNode(index)); // This is a branch specific update
else
updateAllNodes(); // Probably an epoch model
}
} else {
throw new RuntimeException("Unknown componentChangedEvent");
}
}
private void updateAllNodes() {
for(int i=0; i<treeModel.getNodeCount(); i++)
validLogLikelihoods[i] = false;
likelihoodKnown = false;
}
private void updateNode(NodeRef node) {
validLogLikelihoods[node.getNumber()] = false;
likelihoodKnown = false;
}
private void updateNodeAndChildren(NodeRef node) {
validLogLikelihoods[node.getNumber()] = false;
for(int i=0; i<treeModel.getChildCount(node); i++)
validLogLikelihoods[treeModel.getChild(node,i).getNumber()] = false;
likelihoodKnown = false;
}
public void recalculateTreeLength() {
if (!scaleByTime)
return;
if (useTreeLength) {
treeLength = 0;
for (int i = 0; i < treeModel.getNodeCount(); i++) {
NodeRef node = treeModel.getNode(i);
if (!treeModel.isRoot(node))
treeLength += treeModel.getBranchLength(node); // Bug was here
}
} else { // Normalizing by tree height.
treeLength = treeModel.getNodeHeight(treeModel.getRoot());
}
}
// VariableListener IMPLEMENTATION
protected final void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
// All parameter changes are handled first by the treeModel
if (!cacheBranches)
likelihoodKnown = false;
}
// Model IMPLEMENTATION
/**
* Stores the precalculated state: in this case the intervals
*/
protected void storeState() {
storedLikelihoodKnown = likelihoodKnown;
storedLogLikelihood = logLikelihood;
storedTreeLength = treeLength;
if (cacheBranches) {
System.arraycopy(cachedLogLikelihoods,0,storedCachedLogLikelihood,0,treeModel.getNodeCount());
System.arraycopy(validLogLikelihoods,0,storedValidLogLikelihoods,0,treeModel.getNodeCount());
}
}
/**
* Restores the precalculated state: that is the intervals of the tree.
*/
protected void restoreState() {
likelihoodKnown = storedLikelihoodKnown;
logLikelihood = storedLogLikelihood;
treeLength = storedTreeLength;
if (cacheBranches) {
double[] tmp = storedCachedLogLikelihood;
storedCachedLogLikelihood = cachedLogLikelihoods;
cachedLogLikelihoods = tmp;
boolean[] tmp2 = storedValidLogLikelihoods;
storedValidLogLikelihoods = validLogLikelihoods;
validLogLikelihoods = tmp2;
}
}
protected void acceptState() {
} // nothing to do
public TreeModel getTreeModel() {
return treeModel;
}
public MultivariateDiffusionModel getDiffusionModel() {
return diffusionModel;
}
// public boolean getInSubstitutionTime() {
// return inSubstitutionTime;
// Likelihood IMPLEMENTATION
public Model getModel() {
return this;
}
public String toString() {
return getClass().getName() + "(" + getLogLikelihood() + ")";
}
public final double getLogLikelihood() {
if (!likelihoodKnown) {
logLikelihood = calculateLogLikelihood();
likelihoodKnown = true;
}
return logLikelihood;
}
public abstract double getLogDataLikelihood();
public void makeDirty() {
likelihoodKnown = false;
if (cacheBranches)
updateAllNodes();
}
public LogColumn[] getColumns() {
return new LogColumn[]{
new LikelihoodColumn(getId()+".joint"),
new NumberColumn(getId()+".data") {
public double getDoubleValue() {
return getLogDataLikelihood();
}
}
};
}
public abstract double calculateLogLikelihood();
public double getMaxLogLikelihood() {
return maxLogLikelihood;
}
// Loggable IMPLEMENTATION
private String[] attributeLabel = null;
public String[] getNodeAttributeLabel() {
if (attributeLabel == null) {
double[] trait = treeModel.getMultivariateNodeTrait(treeModel.getRoot(), traitName);
if (trait.length == 1 || reportAsMultivariate)
attributeLabel = new String[]{traitName};
else {
attributeLabel = new String[trait.length];
for (int i = 1; i <= trait.length; i++)
attributeLabel[i - 1] = traitName + i;
}
}
return attributeLabel;
}
protected abstract double[] traitForNode(TreeModel tree, NodeRef node, String traitName);
public String[] getAttributeForNode(Tree tree, NodeRef node) {
// double trait[] = treeModel.getMultivariateNodeTrait(node, traitName);
double trait[] = traitForNode(treeModel, node, traitName);
String[] value;
if (!reportAsMultivariate || trait.length == 1) {
value = new String[trait.length];
for (int i = 0; i < trait.length; i++)
value[i] = Double.toString(trait[i]);
} else {
StringBuffer sb = new StringBuffer("{");
for (int i = 0; i < trait.length - 1; i++)
sb.append(Double.toString(trait[i])).append(",");
sb.append(Double.toString(trait[trait.length - 1])).append("}");
value = new String[]{sb.toString()};
}
return value;
}
public void randomize(Parameter trait, double[] lower, double[] upper) {
// Draws each dimension in each trait from U[lower, upper)
for(int i = 0; i < trait.getDimension(); i++) {
final int whichLower = i % lower.length;
final int whichUpper = i % upper.length;
final double newValue = MathUtils.uniform(lower[whichLower],upper[whichUpper]);
trait.setParameterValue(i, newValue);
}
//diffusionModel.randomize(trait);
}
public void jitter(Parameter trait, int dim, double[] window, boolean duplicates, boolean verbose) {
int numTraits = trait.getDimension() / dim;
boolean[] update = new boolean[numTraits];
if (!duplicates) {
Arrays.fill(update, true);
} else {
DoubleArray[] traitArray = new DoubleArray[numTraits];
for (int i = 0; i < numTraits; i++) {
double[] x = new double[dim];
for (int j = 0; j < dim; j++) {
x[j] = trait.getParameterValue(i * dim + j);
}
traitArray[i] = new DoubleArray(x,i);
}
Arrays.sort(traitArray);
// Mark duplicates
for (int i = 1; i < numTraits; i++) {
if (traitArray[i].compareTo(traitArray[i-1]) == 0) {
update[traitArray[i-1].getIndex()] = true;
update[traitArray[i].getIndex()] = true;
}
}
}
for (int i = 0; i < numTraits; i++) {
if (update[i]) {
StringBuffer sb1 = null;
StringBuffer sb2 = null;
if (verbose) {
sb1 = new StringBuffer();
sb2 = new StringBuffer();
}
for (int j = 0; j < dim; j++) {
final double oldValue = trait.getParameterValue(i * dim + j);
final double newValue = window[j % window.length] * (MathUtils.nextDouble() - 0.5) +
oldValue;
trait.setParameterValue(i * dim + j, newValue);
if (verbose) {
sb1.append(" ").append(oldValue);
sb2.append(" ").append(newValue);
}
}
if (verbose) {
Logger.getLogger("dr.evomodel.continuous").info(
" Replacing trait #"+(i+1)+" Old:"+sb1.toString()+" New: "+sb2.toString()
);
}
}
}
}
class DoubleArray implements Comparable {
double[] value;
int index;
DoubleArray(double[] value, int index) {
this.value = value;
this.index = index;
}
public double[] getValues() {
return value;
}
public int getIndex() {
return index;
}
public int compareTo(Object o) {
double[] x = ((DoubleArray) o).getValues();
for(int i = 0; i < value.length; i++) {
if (value[i] > x[i]) {
return 1;
} else if (value[i] < x[i]) {
return -1;
}
}
return 0;
}
}
public void check(Parameter trait) throws XMLParseException {
diffusionModel.check(trait);
}
// XMLElement IMPLEMENTATION
public Element createElement(Document d) {
throw new RuntimeException("Not implemented yet!");
}
// XMLObjectParser
public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return TRAIT_LIKELIHOOD;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
MultivariateDiffusionModel diffusionModel = (MultivariateDiffusionModel) xo.getChild(MultivariateDiffusionModel.class);
TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
CompoundParameter traitParameter = (CompoundParameter) xo.getElementFirstChild(TRAIT_PARAMETER);
boolean cacheBranches = xo.getAttribute(CACHE_BRANCHES, false);
boolean integrate = xo.getAttribute(INTEGRATE,false);
BranchRateModel rateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
List<Integer> missingIndices = null;
String traitName = DEFAULT_TRAIT_NAME;
if (xo.hasAttribute(TRAIT_NAME)) {
traitName = xo.getStringAttribute(TRAIT_NAME);
// Fill in attributeValues
int taxonCount = treeModel.getTaxonCount();
for (int i = 0; i < taxonCount; i++) {
String taxonName = treeModel.getTaxonId(i);
String paramName = taxonName + "." + traitName;
Parameter traitParam = getTraitParameterByName(traitParameter, paramName);
if (traitParam == null)
throw new RuntimeException("Missing trait parameters at tree tips");
String object = (String) treeModel.getTaxonAttribute(i, traitName);
if (object == null)
throw new RuntimeException("Trait \"" + traitName + "\" not found for taxa \"" + taxonName + "\"");
else {
StringTokenizer st = new StringTokenizer(object);
int count = st.countTokens();
if (count != traitParam.getDimension())
throw new RuntimeException("Trait length must match trait parameter dimension");
for (int j = 0; j < count; j++) {
String oneValue = st.nextToken();
double value = Double.NaN;
if (oneValue.compareTo("NA") == 0) {
// Missing values not yet handled.
} else {
try {
value = new Double(oneValue);
} catch (NumberFormatException e) {
throw new RuntimeException(e.getMessage());
}
}
traitParam.setParameterValue(j, value);
}
}
}
// Find missing values
double[] allValues = traitParameter.getParameterValues();
missingIndices = new ArrayList<Integer>();
for (int i = 0; i < allValues.length; i++) {
if ((new Double(allValues[i])).isNaN()) {
traitParameter.setParameterValue(i, 0);
missingIndices.add(i);
}
}
if (xo.hasChildNamed(MISSING)) {
XMLObject cxo = xo.getChild(MISSING);
Parameter missingParameter = new Parameter.Default(allValues.length, 0.0);
for (int i : missingIndices) {
missingParameter.setParameterValue(i, 1.0);
}
missingParameter.addBounds(new Parameter.DefaultBounds(1.0, 0.0, allValues.length));
/* CompoundParameter missingParameter = new CompoundParameter(MISSING);
System.err.println("TRAIT: "+traitParameter.toString());
System.err.println("CNT: "+traitParameter.getNumberOfParameters());
for(int i : missingIndices) {
Parameter thisParameter = traitParameter.getIndicatorParameter(i);
missingParameter.addVariable(thisParameter);
}*/
ParameterParser.replaceParameter(cxo, missingParameter);
}
}
Model samplingDensity = null;
if (xo.hasChildNamed(SAMPLING_DENSITY)) {
XMLObject cxo = xo.getChild(SAMPLING_DENSITY);
samplingDensity = (Model) cxo.getChild(Model.class);
}
boolean useTreeLength = xo.getAttribute(USE_TREE_LENGTH, false);
boolean scaleByTime = xo.getAttribute(SCALE_BY_TIME, false);
boolean reportAsMultivariate = false;
if (xo.hasAttribute(REPORT_MULTIVARIATE) && xo.getBooleanAttribute(REPORT_MULTIVARIATE))
reportAsMultivariate = true;
if (integrate)
return new IntegratedMultivariateTraitLikelihood(traitName, treeModel, diffusionModel,
traitParameter, missingIndices, cacheBranches,
scaleByTime, useTreeLength, rateModel, samplingDensity, reportAsMultivariate, null);
AbstractMultivariateTraitLikelihood like =
new SampledMultivariateTraitLikelihood(traitName, treeModel, diffusionModel,
traitParameter, missingIndices, cacheBranches,
scaleByTime, useTreeLength, rateModel, samplingDensity, reportAsMultivariate);
if (xo.hasChildNamed(RANDOMIZE)) {
XMLObject cxo = xo.getChild(RANDOMIZE);
Parameter traits = (Parameter) cxo.getChild(Parameter.class);
double[] randomizeLower;
double[] randomizeUpper;
if (cxo.hasAttribute(RANDOMIZE_LOWER)) {
randomizeLower = cxo.getDoubleArrayAttribute(RANDOMIZE_LOWER);
} else {
randomizeLower = new double[] { -90.0 };
}
if (cxo.hasAttribute(RANDOMIZE_UPPER)) {
randomizeUpper = cxo.getDoubleArrayAttribute(RANDOMIZE_UPPER);
} else {
randomizeUpper = new double[] { +90.0 };
}
like.randomize(traits, randomizeLower, randomizeUpper);
}
if (xo.hasChildNamed(JITTER)) {
XMLObject cxo = xo.getChild(JITTER);
Parameter traits = (Parameter) cxo.getChild(Parameter.class);
double[] window = cxo.getDoubleArrayAttribute(WINDOW); // Must be included, no default value
boolean duplicates = cxo.getAttribute(DUPLICATES,true); // default = true
like.jitter(traits, diffusionModel.getPrecisionmatrix().length, window, duplicates, true);
}
if (xo.hasChildNamed(CHECK)) {
XMLObject cxo = xo.getChild(CHECK);
Parameter check = (Parameter) cxo.getChild(Parameter.class);
like.check(check);
}
return like;
}
private Parameter getTraitParameterByName(CompoundParameter traits, String name) {
for (int i = 0; i < traits.getNumberOfParameters(); i++) {
Parameter found = traits.getParameter(i);
if (found.getStatisticName().compareTo(name) == 0)
return found;
}
return null;
}
|
package com.almasb.fxgl.physics;
import com.almasb.fxgl.physics.box2d.collision.shapes.ShapeType;
import javafx.geometry.Dimension2D;
import javafx.geometry.Point2D;
/**
* Defines bounding shapes to be used for hit boxes.
*
* @author Almas Baimagambetov (AlmasB) (almaslvl@gmail.com)
*/
public final class BoundingShape {
final ShapeType type;
final Object data;
final Dimension2D size;
private BoundingShape(ShapeType type, Object data, Dimension2D size) {
this.type = type;
this.data = data;
this.size = size;
}
/**
* @return 2d size of this bounding shape
*/
Dimension2D getSize() {
return size;
}
/**
* @return true if the type of this shape is a circle
*/
public boolean isCircle() {
return type == ShapeType.CIRCLE;
}
/**
* @return true if the type of this shape is a rectangle/square
*/
public boolean isRectangle() {
return type == ShapeType.POLYGON;
}
/**
* Constructs new circular bounding shape with given radius.
*
* @param radius circle radius
* @return circular bounding shape
*/
public static BoundingShape circle(double radius) {
return new BoundingShape(ShapeType.CIRCLE, new Dimension2D(radius * 2, radius * 2), new Dimension2D(radius * 2, radius * 2));
}
/**
* Constructs new rectangular bounding shape with given width and height.
*
* @param width box width
* @param height box height
* @return rectangular bounding shape
*/
public static BoundingShape box(double width, double height) {
return new BoundingShape(ShapeType.POLYGON, new Dimension2D(width, height), new Dimension2D(width, height));
}
public static BoundingShape chain(Point2D... points) {
if (points.length < 2)
throw new IllegalArgumentException("Chain shape requires at least 2 points. Given points: " + points.length);
double maxX = points[0].getX();
double maxY = points[0].getY();
for (Point2D p : points) {
if (p.getX() > maxX) {
maxX = p.getX();
}
if (p.getY() > maxY) {
maxY = p.getY();
}
}
return new BoundingShape(ShapeType.CHAIN, points, new Dimension2D(maxX, maxY));
}
}
|
package io.reon.http;
import org.json.JSONArray;
import org.json.JSONObject;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
public class ResponseBuilder extends MessageBuilder<Response> {
private ResponseBuilder(Response response) {
that = response;
}
public static ResponseBuilder startWith(StatusCode statusCode) {
return new ResponseBuilder(new Response(statusCode));
}
public static ResponseBuilder with(Response response) {
return new ResponseBuilder(response);
}
public static ResponseBuilder ok() {
return startWith(StatusCode.OK);
}
public static ResponseBuilder notFound() {
return startWith(StatusCode.NOT_FOUND).withClose();
}
public static ResponseBuilder error(HttpException ex) {
return startWith(ex.getStatusCode()).withClose();
}
public static ResponseBuilder methodNotAllowed() {
return startWith(StatusCode.METHOD_NOT_ALLOWED).withClose();
}
public static ResponseBuilder unauthorized() {
return startWith(StatusCode.UNAUTHORIZED);
}
public static ResponseBuilder serviceUnavailable() {
return startWith(StatusCode.SERVICE_UNAVAILABLE).withClose();
}
public static ResponseBuilder internalError(Exception e) {
return startWith(StatusCode.INTERNAL_ERROR).withClose();
}
public static ResponseBuilder forbidden() {
return startWith(StatusCode.FORBIDDEN);
}
public static ResponseBuilder found(String location) {
return startWith(StatusCode.FOUND).withLocation(location);
}
private ResponseBuilder withLocation(String location) {
return (ResponseBuilder) withUpdatedHeader(Headers.RESPONSE.LOCATION, location);
}
@Override
public ResponseBuilder withId(String id) {
return (ResponseBuilder) super.withId(id);
}
public ResponseBuilder withKeepAlive() {
return (ResponseBuilder) super.withKeepAlive();
}
public ResponseBuilder withClose() {
return (ResponseBuilder) withUpdatedHeader(Headers.RESPONSE.CONNECTION, "close");
}
public ResponseBuilder withCookie(Cookie cookie) {
return (ResponseBuilder) withHeader(Headers.RESPONSE.SET_COOKIE, cookie.toString());
}
public ResponseBuilder withCookies(Cookies cookies) {
for (Cookie cookie : cookies.all()) withCookie(cookie);
return this;
}
public ResponseBuilder withReason(String reason) {
that.setReason(reason);
return this;
}
public ResponseBuilder withContentType(String contentType) {
return (ResponseBuilder) super.withContentType(contentType);
}
public ResponseBuilder withContentTypeFrom(String filename) {
return (ResponseBuilder) super.withContentTypeFrom(filename);
}
public ResponseBuilder withBody(File file) throws FileNotFoundException {
return (ResponseBuilder) super.withBody(file);
}
public ResponseBuilder withBody(String s) {
return (ResponseBuilder) super.withBody(s);
}
public ResponseBuilder withBody(InputStream is) {
return (ResponseBuilder) super.withBody(is);
}
public ResponseBuilder withBody(byte[] data) {
return (ResponseBuilder) super.withBody(data);
}
@Override
public ResponseBuilder withBody(JSONObject jsonObject) {
return (ResponseBuilder) super.withBody(jsonObject);
}
@Override
public ResponseBuilder withBody(JSONArray jsonObject) {
return (ResponseBuilder) super.withBody(jsonObject);
}
public ResponseBuilder withBody(Response response) {
return with(response);
}
public ResponseBuilder withLength(long length) {
return (ResponseBuilder) withUpdatedHeader(Headers.RESPONSE.CONTENT_LEN, Long.toString(length));
}
public ResponseBuilder withChunks() {
return (ResponseBuilder) super.withChunks();
}
public ResponseBuilder withIdentity() {
return (ResponseBuilder) super.withIdentity();
}
public ResponseBuilder withTransferEncoding(String value) {
return (ResponseBuilder) super.withTransferEncoding(value);
}
public ResponseBuilder withCharset(String charset) {
that.charset = charset;
return this;
}
}
|
// RMG - Reaction Mechanism Generator
// RMG Team (rmg_dev@mit.edu)
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
// all copies or substantial portions of the Software.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
package jing.rxnSys;
import java.io.*;
import jing.rxnSys.ReactionSystem;
import jing.rxn.*;
import jing.chem.*;
import java.util.*;
import jing.mathTool.UncertainDouble;
import jing.param.*;
import jing.chemUtil.*;
import jing.chemParser.*;
//## package jing::rxnSys
// jing\rxnSys\ReactionModelGenerator.java
//## class ReactionModelGenerator
public class ReactionModelGenerator {
protected LinkedList timeStep; //## attribute timeStep
protected ReactionModel reactionModel; //gmagoon 9/24/07
protected String workingDirectory; //## attribute workingDirectory
// protected ReactionSystem reactionSystem;
protected LinkedList reactionSystemList; //10/24/07 gmagoon: changed from reactionSystem to reactionSystemList
protected int paraInfor;//svp
protected boolean error;//svp
protected boolean sensitivity;//svp
protected LinkedList species;//svp
// protected InitialStatus initialStatus;//svp
protected LinkedList initialStatusList; //10/23/07 gmagoon: changed from initialStatus to initialStatusList
protected double rtol;//svp
protected static double atol;
protected PrimaryReactionLibrary primaryReactionLibrary;//9/24/07 gmagoon
protected ReactionModelEnlarger reactionModelEnlarger;//9/24/07 gmagoon
protected LinkedHashSet speciesSeed;//9/24/07 gmagoon;
protected ReactionGenerator reactionGenerator;//9/24/07 gmagoon
protected LibraryReactionGenerator lrg;// = new LibraryReactionGenerator();//9/24/07 gmagoon: moved from ReactionSystem.java;10/4/07 gmagoon: postponed initialization of lrg til later
//10/23/07 gmagoon: added additional variables
protected LinkedList tempList;
protected LinkedList presList;
protected LinkedList validList;//10/24/07 gmagoon: added
//10/25/07 gmagoon: moved variables from modelGeneration()
protected LinkedList initList = new LinkedList();
protected LinkedList beginList = new LinkedList();
protected LinkedList endList = new LinkedList();
protected LinkedList lastTList = new LinkedList();
protected LinkedList currentTList = new LinkedList();
protected LinkedList lastPList = new LinkedList();
protected LinkedList currentPList = new LinkedList();
protected LinkedList conditionChangedList = new LinkedList();
protected LinkedList reactionChangedList = new LinkedList();
protected int numConversions;//5/6/08 gmagoon: moved from initializeReactionSystem() to be an attribute so it can be accessed by modelGenerator()
protected String equationOfState;
// 24Jun2009 MRH: variable stores the first temperature encountered in the condition.txt file
// This temperature is used to select the "best" kinetics from the rxn library
protected static Temperature temp4BestKinetics;
// This is the new "PrimaryReactionLibrary"
protected SeedMechanism seedMechanism;
protected PrimaryThermoLibrary primaryThermoLibrary;
protected PrimaryTransportLibrary primaryTransportLibrary;
protected boolean restart = false;
protected boolean readrestart = false;
protected boolean writerestart = false;
protected LinkedHashSet restartCoreSpcs = new LinkedHashSet();
protected LinkedHashSet restartEdgeSpcs = new LinkedHashSet();
protected LinkedHashSet restartCoreRxns = new LinkedHashSet();
protected LinkedHashSet restartEdgeRxns = new LinkedHashSet();
// Constructors
private HashSet specs = new HashSet();
//public static native long getCpuTime();
//static {System.loadLibrary("cpuTime");}
public static boolean rerunFame = false;
protected static double tolerance;//can be interpreted as "coreTol" (vs. edgeTol)
protected static double termTol;
protected static double edgeTol;
protected static int minSpeciesForPruning;
protected static int maxEdgeSpeciesAfterPruning;
public int limitingReactantID = 1;
//## operation ReactionModelGenerator()
public ReactionModelGenerator() {
workingDirectory = System.getProperty("RMG.workingDirectory");
}
//## operation initializeReactionSystem()
//10/24/07 gmagoon: changed name to initializeReactionSystems
public void initializeReactionSystems() throws InvalidSymbolException, IOException {
//#[ operation initializeReactionSystem()
try {
String initialConditionFile = System.getProperty("jing.rxnSys.ReactionModelGenerator.conditionFile");
if (initialConditionFile == null) {
System.out.println("undefined system property: jing.rxnSys.ReactionModelGenerator.conditionFile");
System.exit(0);
}
//double sandeep = getCpuTime();
//System.out.println(getCpuTime()/1e9/60);
FileReader in = new FileReader(initialConditionFile);
BufferedReader reader = new BufferedReader(in);
//TemperatureModel temperatureModel = null;//10/27/07 gmagoon: commented out
//PressureModel pressureModel = null;//10/27/07 gmagoon: commented out
// ReactionModelEnlarger reactionModelEnlarger = null;//10/9/07 gmagoon: commented out: unneeded now and causes scope problems
FinishController finishController = null;
//DynamicSimulator dynamicSimulator = null;//10/27/07 gmagoon: commented out and replaced with following line
LinkedList dynamicSimulatorList = new LinkedList();
//PrimaryReactionLibrary primaryReactionLibrary = null;//10/14/07 gmagoon: see below
setPrimaryReactionLibrary(null);//10/14/07 gmagoon: changed to use setPrimaryReactionLibrary
double [] conversionSet = new double[50];
String line = ChemParser.readMeaningfulLine(reader);
/*if (line.startsWith("Restart")){
StringTokenizer st = new StringTokenizer(line);
String token = st.nextToken();
token = st.nextToken();
if (token.equalsIgnoreCase("true")) {
//Runtime.getRuntime().exec("cp Restart/allSpecies.txt Restart/allSpecies1.txt");
//Runtime.getRuntime().exec("echo >> allSpecies.txt");
restart = true;
}
else if (token.equalsIgnoreCase("false")) {
Runtime.getRuntime().exec("rm Restart/allSpecies.txt");
restart = false;
}
else throw new InvalidSymbolException("UnIdentified Symbol "+token+" after Restart:");
}
else throw new InvalidSymbolException("Can't find Restart!");*/
//line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Database")){//svp
line = ChemParser.readMeaningfulLine(reader);
}
else throw new InvalidSymbolException("Can't find database!");
// if (line.startsWith("PrimaryThermoLibrary")){//svp
// line = ChemParser.readMeaningfulLine(reader);
// else throw new InvalidSymbolException("Can't find primary thermo library!");
/*
* Added by MRH on 15-Jun-2009
* Give user the option to change the maximum carbon, oxygen,
* and/or radical number for all species. These lines will be
* optional in the condition.txt file. Values are hard-
* coded into RMG (in ChemGraph.java), but any user-
* defined input will override these values.
*/
/*
* Moved from before InitialStatus to before PrimaryThermoLibary
* by MRH on 27-Oct-2009
* Overriding default values of maximum number of "X" per
* chemgraph should come before RMG attempts to make any
* chemgraph. The first instance RMG will attempt to make a
* chemgraph is in reading the primary thermo library.
*/
line = readMaxAtomTypes(line,reader);
// if (line.startsWith("MaxCarbonNumber")) {
// StringTokenizer st = new StringTokenizer(line);
// String dummyString = st.nextToken(); // This should hold "MaxCarbonNumberPerSpecies:"
// int maxCNum = Integer.parseInt(st.nextToken());
// ChemGraph.setMaxCarbonNumber(maxCNum);
// System.out.println("Note: Overriding RMG-defined MAX_CARBON_NUM with user-defined value: " + maxCNum);
// line = ChemParser.readMeaningfulLine(reader);
// if (line.startsWith("MaxOxygenNumber")) {
// StringTokenizer st = new StringTokenizer(line);
// String dummyString = st.nextToken(); // This should hold "MaxOxygenNumberPerSpecies:"
// int maxONum = Integer.parseInt(st.nextToken());
// ChemGraph.setMaxOxygenNumber(maxONum);
// System.out.println("Note: Overriding RMG-defined MAX_OXYGEN_NUM with user-defined value: " + maxONum);
// line = ChemParser.readMeaningfulLine(reader);
// if (line.startsWith("MaxRadicalNumber")) {
// StringTokenizer st = new StringTokenizer(line);
// String dummyString = st.nextToken(); // This should hold "MaxRadicalNumberPerSpecies:"
// int maxRadNum = Integer.parseInt(st.nextToken());
// ChemGraph.setMaxRadicalNumber(maxRadNum);
// System.out.println("Note: Overriding RMG-defined MAX_RADICAL_NUM with user-defined value: " + maxRadNum);
// line = ChemParser.readMeaningfulLine(reader);
// if (line.startsWith("MaxSulfurNumber")) {
// StringTokenizer st = new StringTokenizer(line);
// String dummyString = st.nextToken(); // This should hold "MaxSulfurNumberPerSpecies:"
// int maxSNum = Integer.parseInt(st.nextToken());
// ChemGraph.setMaxSulfurNumber(maxSNum);
// System.out.println("Note: Overriding RMG-defined MAX_SULFUR_NUM with user-defined value: " + maxSNum);
// line = ChemParser.readMeaningfulLine(reader);
// if (line.startsWith("MaxSiliconNumber")) {
// StringTokenizer st = new StringTokenizer(line);
// String dummyString = st.nextToken(); // This should hold "MaxSiliconNumberPerSpecies:"
// int maxSiNum = Integer.parseInt(st.nextToken());
// ChemGraph.setMaxSiliconNumber(maxSiNum);
// System.out.println("Note: Overriding RMG-defined MAX_SILICON_NUM with user-defined value: " + maxSiNum);
// line = ChemParser.readMeaningfulLine(reader);
// if (line.startsWith("MaxHeavyAtom")) {
// StringTokenizer st = new StringTokenizer(line);
// String dummyString = st.nextToken(); // This should hold "MaxHeavyAtomPerSpecies:"
// int maxHANum = Integer.parseInt(st.nextToken());
// ChemGraph.setMaxHeavyAtomNumber(maxHANum);
// System.out.println("Note: Overriding RMG-defined MAX_HEAVYATOM_NUM with user-defined value: " + maxHANum);
// line = ChemParser.readMeaningfulLine(reader);
/*
* Read in the Primary Thermo Library
* MRH 7-Jul-2009
*/
if (line.startsWith("PrimaryThermoLibrary:")) {
/*
* MRH 27Feb2010:
* Changing the "read in Primary Thermo Library information" code
* into it's own method.
*
* Other modules (e.g. PopulateReactions) will be utilizing the exact code.
* Rather than copying and pasting code into other modules, just have
* everything call this new method: readAndMakePTL
*/
readAndMakePTL(reader);
} else throw new InvalidSymbolException("Error reading condition.txt file: "
+ "Could not locate PrimaryThermoLibrary field");
line = ChemParser.readMeaningfulLine(reader);
/*
* MRH 17-May-2010:
* Added primary transport library field
*/
if (line.toLowerCase().startsWith("primarytransportlibrary")) {
readAndMakePTransL(reader);
} else throw new InvalidSymbolException("Error reading condition.txt file: "
+ "Could not locate PrimaryTransportLibrary field.");
line = ChemParser.readMeaningfulLine(reader);
// Extra forbidden structures may be specified after the Primary Thermo Library
if (line.startsWith("ForbiddenStructures:")) {
readExtraForbiddenStructures(reader);
line = ChemParser.readMeaningfulLine(reader);
}
if (line.toLowerCase().startsWith("readrestart")) {
StringTokenizer st = new StringTokenizer(line);
String tempString = st.nextToken(); // "ReadRestart:"
tempString = st.nextToken();
if (tempString.toLowerCase().equals("yes")) {
readrestart = true;
readRestartSpecies();
} else readrestart = false;
line = ChemParser.readMeaningfulLine(reader);
} else throw new InvalidSymbolException("Cannot locate ReadRestart field");
if (line.toLowerCase().startsWith("writerestart")) {
StringTokenizer st = new StringTokenizer(line);
String tempString = st.nextToken(); // "WriteRestart:"
tempString = st.nextToken();
if (tempString.toLowerCase().equals("yes"))
writerestart = true;
else writerestart = false;
line = ChemParser.readMeaningfulLine(reader);
} else throw new InvalidSymbolException("Cannot locate WriteRestart field");
// read temperature model
//gmagoon 10/23/07: modified to handle multiple temperatures; note that this requires different formatting of units in condition.txt
if (line.startsWith("TemperatureModel:")) {
createTModel(line);
// StringTokenizer st = new StringTokenizer(line);
// String name = st.nextToken();
// String modelType = st.nextToken();
// //String t = st.nextToken();
// String unit = st.nextToken();
// unit = ChemParser.removeBrace(unit);
// if (modelType.equals("Constant")) {
// tempList = new LinkedList();
// //read first temperature
// double t = Double.parseDouble(st.nextToken());
// tempList.add(new ConstantTM(t, unit));
// Temperature temp = new Temperature(t, unit);//10/29/07 gmagoon: added this line and next two lines to set Global.lowTemperature and Global.highTemperature
// Global.lowTemperature = (Temperature)temp.clone();
// Global.highTemperature = (Temperature)temp.clone();
// //read remaining temperatures
// while (st.hasMoreTokens()) {
// t = Double.parseDouble(st.nextToken());
// tempList.add(new ConstantTM(t, unit));
// temp = new Temperature(t,unit);//10/29/07 gmagoon: added this line and next two "if" statements to set Global.lowTemperature and Global.highTemperature
// if(temp.getK() < Global.lowTemperature.getK())
// Global.lowTemperature = (Temperature)temp.clone();
// if(temp.getK() > Global.highTemperature.getK())
// Global.highTemperature = (Temperature)temp.clone();
// // Global.temperature = new Temperature(t,unit);
//10/23/07 gmagoon: commenting out; further updates needed to get this to work
//else if (modelType.equals("Curved")) {
// String t = st.nextToken();
// // add reading curved temperature function here
// temperatureModel = new CurvedTM(new LinkedList());
// else {
// throw new InvalidSymbolException("condition.txt: Unknown TemperatureModel = " + modelType);
}
else throw new InvalidSymbolException("condition.txt: can't find TemperatureModel!");
// read in pressure model
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("PressureModel:")) {
createPModel(line);
// StringTokenizer st = new StringTokenizer(line);
// String name = st.nextToken();
// String modelType = st.nextToken();
// //String p = st.nextToken();
// String unit = st.nextToken();
// unit = ChemParser.removeBrace(unit);
// if (modelType.equals("Constant")) {
// presList = new LinkedList();
// //read first pressure
// double p = Double.parseDouble(st.nextToken());
// Pressure pres = new Pressure(p, unit);
// Global.lowPressure = (Pressure)pres.clone();
// Global.highPressure = (Pressure)pres.clone();
// presList.add(new ConstantPM(p, unit));
// //read remaining temperatures
// while (st.hasMoreTokens()) {
// p = Double.parseDouble(st.nextToken());
// presList.add(new ConstantPM(p, unit));
// pres = new Pressure(p, unit);
// if(pres.getBar() < Global.lowPressure.getBar())
// Global.lowPressure = (Pressure)pres.clone();
// if(pres.getBar() > Global.lowPressure.getBar())
// Global.highPressure = (Pressure)pres.clone();
// //Global.pressure = new Pressure(p, unit);
// //10/23/07 gmagoon: commenting out; further updates needed to get this to work
// //else if (modelType.equals("Curved")) {
// // // add reading curved pressure function here
// // pressureModel = new CurvedPM(new LinkedList());
// else {
// throw new InvalidSymbolException("condition.txt: Unknown PressureModel = " + modelType);
}
else throw new InvalidSymbolException("condition.txt: can't find PressureModel!");
// after PressureModel comes an optional line EquationOfState
// if "EquationOfState: Liquid" is found then initial concentrations are assumed to be correct
// if it is ommited, then initial concentrations are normalised to ensure PV=NRT (ideal gas law)
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("EquationOfState")) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken();
String eosType = st.nextToken();
if (eosType.equals("Liquid")) {
equationOfState="Liquid";
System.out.println("Equation of state: Liquid. Relying on concentrations in input file to get density correct; not checking PV=NRT");
}
line = ChemParser.readMeaningfulLine(reader);
}
// Read in InChI generation
if (line.startsWith("InChIGeneration:")) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken();
String inchiOnOff = st.nextToken().toLowerCase();
if (inchiOnOff.equals("on")) {
Species.useInChI = true;
} else if (inchiOnOff.equals("off")) {
Species.useInChI = false;
}
else throw new InvalidSymbolException("condition.txt: Unknown InChIGeneration flag: " + inchiOnOff);
line = ChemParser.readMeaningfulLine(reader);
}
// Read in Solvation effects
if (line.startsWith("Solvation:")) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken();
String solvationOnOff = st.nextToken().toLowerCase();
if (solvationOnOff.equals("on")) {
Species.useSolvation = true;
} else if (solvationOnOff.equals("off")) {
Species.useSolvation = false;
}
else throw new InvalidSymbolException("condition.txt: Unknown solvation flag: " + solvationOnOff);
line = ChemParser.readMeaningfulLine(reader);
}
//line = ChemParser.readMeaningfulLine(reader);//read in reactants or thermo line
// Read in optional QM thermo generation
if (line.startsWith("ThermoMethod:")) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken();
String thermoMethod = st.nextToken().toLowerCase();
if (thermoMethod.equals("qm")) {
ChemGraph.useQM = true;
if(st.hasMoreTokens()){//override the default qmprogram ("both") if there are more; current options: "gaussian03" and "mopac" and of course, "both"
QMTP.qmprogram = st.nextToken().toLowerCase();
}
line=ChemParser.readMeaningfulLine(reader);
if(line.startsWith("QMForCyclicsOnly:")){
StringTokenizer st2 = new StringTokenizer(line);
String nameCyc = st2.nextToken();
String option = st2.nextToken().toLowerCase();
if (option.equals("on")) {
ChemGraph.useQMonCyclicsOnly = true;
}
}
else{
System.out.println("condition.txt: Can't find 'QMForCyclicsOnly:' field");
System.exit(0);
}
line=ChemParser.readMeaningfulLine(reader);
if(line.startsWith("MaxRadNumForQM:")){
StringTokenizer st3 = new StringTokenizer(line);
String nameRadNum = st3.nextToken();
Global.maxRadNumForQM = Integer.parseInt(st3.nextToken());
}
else{
System.out.println("condition.txt: Can't find 'MaxRadNumForQM:' field");
System.exit(0);
}
}//otherwise, the flag useQM will remain false by default and the traditional group additivity approach will be used
line = ChemParser.readMeaningfulLine(reader);//read in reactants
}
// // Read in Solvation effects
// if (line.startsWith("Solvation:")) {
// StringTokenizer st = new StringTokenizer(line);
// String name = st.nextToken();
// String solvationOnOff = st.nextToken().toLowerCase();
// if (solvationOnOff.equals("on")) {
// Species.useSolvation = true;
// } else if (solvationOnOff.equals("off")) {
// Species.useSolvation = false;
// else throw new InvalidSymbolException("condition.txt: Unknown solvation flag: " + solvationOnOff);
// else throw new InvalidSymbolException("condition.txt: Cannot find solvation flag.");
// read in reactants
//10/4/07 gmagoon: moved to initializeCoreEdgeReactionModel
//LinkedHashSet p_speciesSeed = new LinkedHashSet();//gmagoon 10/4/07: changed to p_speciesSeed
//setSpeciesSeed(p_speciesSeed);//gmagoon 10/4/07: added
LinkedHashMap speciesSet = new LinkedHashMap();
/*
* 7/Apr/2010: MRH
* Neither of these variables are utilized
*/
// LinkedHashMap speciesStatus = new LinkedHashMap();
// int speciesnum = 1;
//System.out.println(line);
if (line.startsWith("InitialStatus")) {
speciesSet = populateInitialStatusListWithReactiveSpecies(reader);
// line = ChemParser.readMeaningfulLine(reader);
// while (!line.equals("END")) {
// StringTokenizer st = new StringTokenizer(line);
// String index = st.nextToken();
// String name = null;
// if (!index.startsWith("(")) name = index;
// else name = st.nextToken();
// //if (restart) name += "("+speciesnum+")";
// // 24Jun2009: MRH
// // Check if the species name begins with a number.
// // If so, terminate the program and inform the user to choose
// // a different name. This is implemented so that the chem.inp
// // file generated will be valid when run in Chemkin
// try {
// int doesNameBeginWithNumber = Integer.parseInt(name.substring(0,1));
// System.out.println("\nA species name should not begin with a number." +
// " Please rename species: " + name + "\n");
// System.exit(0);
// } catch (NumberFormatException e) {
// // We're good
// speciesnum ++;
// if (!(st.hasMoreTokens())) throw new InvalidSymbolException("Couldn't find concentration of species: "+name);
// String conc = st.nextToken();
// double concentration = Double.parseDouble(conc);
// String unit = st.nextToken();
// unit = ChemParser.removeBrace(unit);
// if (unit.equals("mole/l") || unit.equals("mol/l") || unit.equals("mole/liter") || unit.equals("mol/liter")) {
// concentration /= 1000;
// unit = "mol/cm3";
// else if (unit.equals("mole/m3") || unit.equals("mol/m3")) {
// concentration /= 1000000;
// unit = "mol/cm3";
// else if (unit.equals("molecule/cm3") || unit.equals("molecules/cm3")) {
// concentration /= 6.022e23;
// else if (!unit.equals("mole/cm3") && !unit.equals("mol/cm3")) {
// throw new InvalidUnitException("Species Concentration in condition.txt!");
// //GJB to allow "unreactive" species that only follow user-defined library reactions.
// // They will not react according to RMG reaction families
// boolean IsReactive = true;
// boolean IsConstantConcentration = false;
// while (st.hasMoreTokens()) {
// String reactive = st.nextToken().trim();
// if (reactive.equalsIgnoreCase("unreactive"))
// IsReactive = false;
// if (reactive.equalsIgnoreCase("constantconcentration"))
// IsConstantConcentration=true;
// Graph g = ChemParser.readChemGraph(reader);
// ChemGraph cg = null;
// try {
// cg = ChemGraph.make(g);
// catch (ForbiddenStructureException e) {
// System.out.println("Forbidden Structure:\n" + e.getMessage());
// throw new InvalidSymbolException("A species in the input file has a forbidden structure.");
// //System.out.println(name);
// Species species = Species.make(name,cg);
// species.setReactivity(IsReactive); // GJB
// species.setConstantConcentration(IsConstantConcentration);
// speciesSet.put(name, species);
// getSpeciesSeed().add(species);
// double flux = 0;
// int species_type = 1; // reacted species
// SpeciesStatus ss = new SpeciesStatus(species,species_type,concentration,flux);
// speciesStatus.put(species, ss);
// line = ChemParser.readMeaningfulLine(reader);
// ReactionTime initial = new ReactionTime(0,"S");
// //10/23/07 gmagoon: modified for handling multiple temperature, pressure conditions; note: concentration within speciesStatus (and list of conversion values) should not need to be modified for each T,P since this is done within isTPCconsistent in ReactionSystem
// initialStatusList = new LinkedList();
// for (Iterator iter = tempList.iterator(); iter.hasNext(); ) {
// TemperatureModel tm = (TemperatureModel)iter.next();
// for (Iterator iter2 = presList.iterator(); iter2.hasNext(); ){
// PressureModel pm = (PressureModel)iter2.next();
// // LinkedHashMap speStat = (LinkedHashMap)speciesStatus.clone();//10/31/07 gmagoon: trying creating multiple instances of speciesStatus to address issues with concentration normalization (last normalization seems to apply to all)
// Set ks = speciesStatus.keySet();
// LinkedHashMap speStat = new LinkedHashMap();
// for (Iterator iter3 = ks.iterator(); iter3.hasNext();){//11/1/07 gmagoon: perform deep copy; (is there an easier or more elegant way to do this?)
// SpeciesStatus ssCopy = (SpeciesStatus)speciesStatus.get(iter3.next());
// speStat.put(ssCopy.getSpecies(),new SpeciesStatus(ssCopy.getSpecies(),ssCopy.getSpeciesType(),ssCopy.getConcentration(),ssCopy.getFlux()));
// initialStatusList.add(new InitialStatus(speStat,tm.getTemperature(initial),pm.getPressure(initial)));
}
else throw new InvalidSymbolException("condition.txt: can't find InitialStatus!");
// read in inert gas concentration
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("InertGas:")) {
populateInitialStatusListWithInertSpecies(reader);
// line = ChemParser.readMeaningfulLine(reader);
// while (!line.equals("END")) {
// StringTokenizer st = new StringTokenizer(line);
// String name = st.nextToken().trim();
// String conc = st.nextToken();
// double inertConc = Double.parseDouble(conc);
// String unit = st.nextToken();
// unit = ChemParser.removeBrace(unit);
// if (unit.equals("mole/l") || unit.equals("mol/l") || unit.equals("mole/liter") || unit.equals("mol/liter")) {
// inertConc /= 1000;
// unit = "mol/cm3";
// else if (unit.equals("mole/m3") || unit.equals("mol/m3")) {
// inertConc /= 1000000;
// unit = "mol/cm3";
// else if (unit.equals("molecule/cm3") || unit.equals("molecules/cm3")) {
// inertConc /= 6.022e23;
// unit = "mol/cm3";
// else if (!unit.equals("mole/cm3") && !unit.equals("mol/cm3")) {
// throw new InvalidUnitException("Inert Gas Concentration not recognized: " + unit);
// //SystemSnapshot.putInertGas(name,inertConc);
// for(Iterator iter=initialStatusList.iterator();iter.hasNext(); ){//6/23/09 gmagoon: needed to change this to accommodate non-static inertConc
// ((InitialStatus)iter.next()).putInertGas(name,inertConc);
// line = ChemParser.readMeaningfulLine(reader);
}
else throw new InvalidSymbolException("condition.txt: can't find Inert gas concentration!");
// read in spectroscopic data estimator
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("SpectroscopicDataEstimator:")) {
setSpectroscopicDataMode(line);
// StringTokenizer st = new StringTokenizer(line);
// String name = st.nextToken();
// String sdeType = st.nextToken().toLowerCase();
// if (sdeType.equals("frequencygroups") || sdeType.equals("default")) {
// SpectroscopicData.mode = SpectroscopicData.Mode.FREQUENCYGROUPS;
// else if (sdeType.equals("therfit") || sdeType.equals("threefrequencymodel")) {
// SpectroscopicData.mode = SpectroscopicData.Mode.THREEFREQUENCY;
// else if (sdeType.equals("off") || sdeType.equals("none")) {
// SpectroscopicData.mode = SpectroscopicData.Mode.OFF;
// else throw new InvalidSymbolException("condition.txt: Unknown SpectroscopicDataEstimator = " + sdeType);
}
else throw new InvalidSymbolException("condition.txt: can't find SpectroscopicDataEstimator!");
// pressure dependence and related flags
line = ChemParser.readMeaningfulLine(reader);
if (line.toLowerCase().startsWith("pressuredependence:"))
line = setPressureDependenceOptions(line,reader);
else
throw new InvalidSymbolException("condition.txt: can't find PressureDependence flag!");
// include species (optional)
/*
*
* MRH 3-APR-2010:
* This if statement is no longer necessary and was causing an error
* when the PressureDependence field was set to "off"
*/
// if (!PDepRateConstant.getMode().name().equals("CHEBYSHEV") &&
// !PDepRateConstant.getMode().name().equals("PDEPARRHENIUS"))
// line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("IncludeSpecies")) {
StringTokenizer st = new StringTokenizer(line);
String iS = st.nextToken();
String fileName = st.nextToken();
HashSet includeSpecies = readIncludeSpecies(fileName);
((RateBasedRME)reactionModelEnlarger).addIncludeSpecies(includeSpecies);
line = ChemParser.readMeaningfulLine(reader);
}
// read in finish controller
if (line.startsWith("FinishController")) {
line = ChemParser.readMeaningfulLine(reader);
StringTokenizer st = new StringTokenizer(line);
String index = st.nextToken();
String goal = st.nextToken();
String type = st.nextToken();
TerminationTester tt;
if (type.startsWith("Conversion")) {
LinkedList spc = new LinkedList();
while (st.hasMoreTokens()) {
String name = st.nextToken();
Species spe = (Species)speciesSet.get(name);
setLimitingReactantID(spe.getID());
if (spe == null) throw new InvalidConversionException("Unknown reactant: " + name);
String conv = st.nextToken();
double conversion;
try {
if (conv.endsWith("%")) {
conversion = Double.parseDouble(conv.substring(0,conv.length()-1))/100;
}
else {
conversion = Double.parseDouble(conv);
}
conversionSet[49] = conversion;
}
catch (NumberFormatException e) {
throw new NumberFormatException("wrong number format for conversion in initial condition file!");
}
SpeciesConversion sc = new SpeciesConversion(spe, conversion);
spc.add(sc);
}
tt = new ConversionTT(spc);
}
else if (type.startsWith("ReactionTime")) {
double time = Double.parseDouble(st.nextToken());
String unit = ChemParser.removeBrace(st.nextToken());
ReactionTime rt = new ReactionTime(time, unit);
tt = new ReactionTimeTT(rt);
}
else {
throw new InvalidSymbolException("condition.txt: Unknown FinishController = " + type);
}
line = ChemParser.readMeaningfulLine(reader);
st = new StringTokenizer(line, ":");
String temp = st.nextToken();
String tol = st.nextToken();
try {
if (tol.endsWith("%")) {
tolerance = Double.parseDouble(tol.substring(0,tol.length()-1))/100;
}
else {
tolerance = Double.parseDouble(tol);
}
}
catch (NumberFormatException e) {
throw new NumberFormatException("wrong number format for conversion in initial condition file!");
}
ValidityTester vt = null;
if (reactionModelEnlarger instanceof RateBasedRME) vt = new RateBasedVT(tolerance);
else if (reactionModelEnlarger instanceof RateBasedPDepRME) vt = new RateBasedPDepVT(tolerance);
else throw new InvalidReactionModelEnlargerException();
finishController = new FinishController(tt, vt);
}
else throw new InvalidSymbolException("condition.txt: can't find FinishController!");
// read in dynamic simulator
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("DynamicSimulator")) {
StringTokenizer st = new StringTokenizer(line,":");
String temp = st.nextToken();
String simulator = st.nextToken().trim();
//read in non-negative option if it exists: syntax would be something like this: "DynamicSimulator: DASSL: non-negative"
if (st.hasMoreTokens()){
if (st.nextToken().trim().toLowerCase().equals("non-negative")){
if(simulator.toLowerCase().equals("dassl")) JDAS.nonnegative = true;
else{
System.err.println("Non-negative option is currently only supported for DASSL. Switch to DASSL solver or remove non-negative option.");
System.exit(0);
}
}
}
numConversions = 0;//5/6/08 gmagoon: moved declaration from initializeReactionSystem() to be an attribute so it can be accessed by modelGenerator()
//int numConversions = 0;
boolean autoflag = false;//5/2/08 gmagoon: updating the following if/else-if block to consider input where we want to check model validity within the ODE solver at each time step; this will be indicated by the use of a string beginning with "AUTO" after the "TimeStep" or "Conversions" line
// read in time step
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("TimeStep:") && finishController.terminationTester instanceof ReactionTimeTT) {
st = new StringTokenizer(line);
temp = st.nextToken();
while (st.hasMoreTokens()) {
temp = st.nextToken();
if (temp.startsWith("AUTO")){//note potential opportunity for making case insensitive by using: temp.toUpperCase().startsWith("AUTO")
autoflag=true;
}
else if (!autoflag){//use "else if" to make sure additional numbers are not read in case numbers are erroneously used following AUTO; note that there could still be a problem if numbers come before "AUTO"
double tStep = Double.parseDouble(temp);
String unit = "sec";
setTimeStep(new ReactionTime(tStep, unit));
}
}
((ReactionTimeTT)finishController.terminationTester).setTimeSteps(timeStep);
}
else if (line.startsWith("Conversions:") && finishController.terminationTester instanceof ConversionTT){
st = new StringTokenizer(line);
temp = st.nextToken();
int i=0;
SpeciesConversion sc = (SpeciesConversion)((ConversionTT)finishController.terminationTester).speciesGoalConversionSet.get(0);
Species convSpecies = sc.species;
Iterator iter = ((InitialStatus)(initialStatusList.get(0))).getSpeciesStatus();//10/23/07 gmagoon: changed to use first element of initialStatusList, as subsequent operations should not be affected by which one is chosen
double initialConc = 0;
while (iter.hasNext()){
SpeciesStatus sps = (SpeciesStatus)iter.next();
if (sps.species.equals(convSpecies)) initialConc = sps.concentration;
}
while (st.hasMoreTokens()){
temp=st.nextToken();
if (temp.startsWith("AUTO")){
autoflag=true;
}
else if (!autoflag){
double conv = Double.parseDouble(temp);
conversionSet[i] = (1-conv) * initialConc;
i++;
}
}
conversionSet[i] = (1 - conversionSet[49])* initialConc;
numConversions = i+1;
}
else throw new InvalidSymbolException("condition.txt: can't find time step for dynamic simulator!");
if (temp.startsWith("AUTOPRUNE")){//for the AUTOPRUNE case, read in additional lines for termTol and edgeTol
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("TerminationTolerance:")) {
st = new StringTokenizer(line);
temp = st.nextToken();
termTol = Double.parseDouble(st.nextToken());
}
else {
System.out.println("Cannot find TerminationTolerance in condition.txt");
System.exit(0);
}
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("PruningTolerance:")) {
st = new StringTokenizer(line);
temp = st.nextToken();
edgeTol = Double.parseDouble(st.nextToken());
}
else {
System.out.println("Cannot find PruningTolerance in condition.txt");
System.exit(0);
}
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("MinSpeciesForPruning:")) {
st = new StringTokenizer(line);
temp = st.nextToken();
minSpeciesForPruning = Integer.parseInt(st.nextToken());
}
else {
System.out.println("Cannot find MinSpeciesForPruning in condition.txt");
System.exit(0);
}
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("MaxEdgeSpeciesAfterPruning:")) {
st = new StringTokenizer(line);
temp = st.nextToken();
maxEdgeSpeciesAfterPruning = Integer.parseInt(st.nextToken());
}
else {
System.out.println("Cannot find MaxEdgeSpeciesAfterPruning in condition.txt");
System.exit(0);
}
//print header for pruning log (based on restart format)
BufferedWriter bw = null;
try {
File f = new File("Pruning/edgeReactions.txt");
bw = new BufferedWriter(new FileWriter("Pruning/edgeReactions.txt", true));
String EaUnits = ArrheniusKinetics.getEaUnits();
bw.write("UnitsOfEa: " + EaUnits);
bw.newLine();
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw != null) {
bw.flush();
bw.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
else if (temp.startsWith("AUTO")){//in the non-autoprune case (i.e. original AUTO functionality), we set the new parameters to values that should reproduce original functionality
termTol = tolerance;
edgeTol = 0;
minSpeciesForPruning = 999999;//arbitrary high number (actually, the value here should not matter, since pruning should not be done)
maxEdgeSpeciesAfterPruning = 999999;
}
// read in atol
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Atol:")) {
st = new StringTokenizer(line);
temp = st.nextToken();
atol = Double.parseDouble(st.nextToken());
}
else throw new InvalidSymbolException("condition.txt: can't find Atol for dynamic simulator!");
// read in rtol
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Rtol:")) {
st = new StringTokenizer(line);
temp = st.nextToken();
String rel_tol = st.nextToken();
if (rel_tol.endsWith("%"))
rtol = Double.parseDouble(rel_tol.substring(0,rel_tol.length()-1));
else
rtol = Double.parseDouble(rel_tol);
}
else throw new InvalidSymbolException("condition.txt: can't find Rtol for dynamic simulator!");
if (simulator.equals("DASPK")) {
paraInfor = 0;//svp
// read in SA
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Error bars")) {//svp
st = new StringTokenizer(line,":");
temp = st.nextToken();
String sa = st.nextToken().trim();
if (sa.compareToIgnoreCase("on")==0) {
paraInfor = 1;
error = true;
}
else if (sa.compareToIgnoreCase("off")==0) {
paraInfor = 0;
error = false;
}
else throw new InvalidSymbolException("condition.txt: can't find error on/off information!");
}
else throw new InvalidSymbolException("condition.txt: can't find SA information!");
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Display sensitivity coefficients")){//svp
st = new StringTokenizer(line,":");
temp = st.nextToken();
String sa = st.nextToken().trim();
if (sa.compareToIgnoreCase("on")==0){
paraInfor = 1;
sensitivity = true;
}
else if (sa.compareToIgnoreCase("off")==0){
if (paraInfor != 1){
paraInfor = 0;
}
sensitivity = false;
}
else throw new InvalidSymbolException("condition.txt: can't find SA on/off information!");
//10/23/07 gmagoon: changed below from dynamicSimulator to dynamicSimulatorList
//6/25/08 gmagoon: changed loop to use i index, and updated DASPK constructor to pass i (mirroring changes to DASSL
//6/25/08 gmagoon: updated to pass autoflag and validity tester; this requires FinishController block of input file to be present before DynamicSimulator block, but this requirement may have already existed anyway, particularly in construction of conversion/time step lists; *perhaps we should formalize this requirement by checking to make sure validityTester is not null?
for (int i = 0;i < initialStatusList.size();i++) {
dynamicSimulatorList.add(new JDASPK(rtol, atol, 0, (InitialStatus)initialStatusList.get(i), i,finishController.getValidityTester(), autoflag, termTol, tolerance));
}
}
species = new LinkedList();
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Display sensitivity information") ){
line = ChemParser.readMeaningfulLine(reader);
System.out.println(line);
while (!line.equals("END")){
st = new StringTokenizer(line);
String name = st.nextToken();
if (name.toUpperCase().equals("ALL")) ReactionSystem.printAllSens = true; //gmagoon 12/22/09: if the line contains the word "all", turn on the flag to print out sensitivity information for everything
species.add(name);
line = ChemParser.readMeaningfulLine(reader);
}
}
}
else if (simulator.equals("DASSL")) {
//10/23/07 gmagoon: changed below from dynamicSimulator to dynamicSimulatorList
// for (Iterator iter = initialStatusList.iterator(); iter.hasNext(); ) {
// dynamicSimulatorList.add(new JDASSL(rtol, atol, 0, (InitialStatus)iter.next()));
//11/1/07 gmagoon: changed loop to use i index, and updated DASSL constructor to pass i
//5/5/08 gmagoon: updated to pass autoflag and validity tester; this requires FinishController block of input file to be present before DynamicSimulator block, but this requirement may have already existed anyway, particularly in construction of conversion/time step lists; *perhaps we should formalize this requirement by checking to make sure validityTester is not null?
for (int i = 0;i < initialStatusList.size();i++) {
dynamicSimulatorList.add(new JDASSL(rtol, atol, 0, (InitialStatus)initialStatusList.get(i), i, finishController.getValidityTester(), autoflag, termTol, tolerance));
}
}
else if (simulator.equals("Chemkin")) {
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("ReactorType")) {
st = new StringTokenizer(line, ":");
temp = st.nextToken();
String reactorType = st.nextToken().trim();
//10/23/07 gmagoon: changed below from dynamicSimulator to dynamicSimulatorList
for (Iterator iter = initialStatusList.iterator(); iter.hasNext(); ) {
//dynamicSimulatorList.add(new JDASPK(rtol, atol, 0, (InitialStatus)iter.next()));
dynamicSimulatorList.add(new Chemkin(rtol, atol, reactorType));//11/4/07 gmagoon: fixing apparent cut/paste error
}
}
}
else throw new InvalidSymbolException("condition.txt: Unknown DynamicSimulator = " + simulator);
//10/23/07 gmagoon: changed below from dynamicSimulator to dynamicSimulatorList; note: although conversionSet should actually be different for each T,P condition, it will be modified in isTPCconsistent within ReactionSystem
for (Iterator iter = dynamicSimulatorList.iterator(); iter.hasNext(); ) {
double [] cs = conversionSet.clone();//11/1/07 gmagoon: trying to make sure multiple instances of conversionSet are used
((DynamicSimulator)(iter.next())).addConversion(cs, numConversions);
}
}
else throw new InvalidSymbolException("condition.txt: can't find DynamicSimulator!");
// read in reaction model enlarger
/* Read in the Primary Reaction Library
* The user can specify as many PRLs,
* including none, as they like.
*/
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("PrimaryReactionLibrary:")) {
readAndMakePRL(reader);
} else throw new InvalidSymbolException("condition.txt: can't find PrimaryReactionLibrary");
/*
* Added by MRH 12-Jun-2009
*
* The SeedMechanism acts almost exactly as the old
* PrimaryReactionLibrary did. Whatever is in the SeedMechanism
* will be placed in the core at the beginning of the simulation.
* The user can specify as many seed mechanisms as they like, with
* the priority (in the case of duplicates) given to the first
* instance. There is no on/off flag.
*/
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("SeedMechanism:")) {
int numMechs = 0;
line = ChemParser.readMeaningfulLine(reader);
while (!line.equals("END")) {
String[] tempString = line.split("Name: ");
String name = tempString[tempString.length-1].trim();
line = ChemParser.readMeaningfulLine(reader);
tempString = line.split("Location: ");
String location = tempString[tempString.length-1].trim();
line = ChemParser.readMeaningfulLine(reader);
tempString = line.split("GenerateReactions: ");
String generateStr = tempString[tempString.length-1].trim();
boolean generate = true;
if (generateStr.equalsIgnoreCase("yes") ||
generateStr.equalsIgnoreCase("on") ||
generateStr.equalsIgnoreCase("true")){
generate = true;
System.out.println("Will generate cross-reactions between species in seed mechanism " + name);
} else if(generateStr.equalsIgnoreCase("no") ||
generateStr.equalsIgnoreCase("off") ||
generateStr.equalsIgnoreCase("false")) {
generate = false;
System.out.println("Will NOT initially generate cross-reactions between species in seed mechanism "+ name);
System.out.println("This may have unintended consequences");
}
else {
System.err.println("Input file invalid");
System.err.println("Please include a 'GenerateReactions: yes/no' line for seed mechanism "+name);
System.exit(0);
}
String path = System.getProperty("jing.rxn.ReactionLibrary.pathName");
path += "/" + location;
if (numMechs==0) {
setSeedMechanism(new SeedMechanism(name, path, generate));
++numMechs;
}
else {
getSeedMechanism().appendSeedMechanism(name, path, generate);
++numMechs;
}
line = ChemParser.readMeaningfulLine(reader);
}
if (numMechs != 0) System.out.println("Seed Mechanisms in use: " + getSeedMechanism().getName());
else setSeedMechanism(null);
} else throw new InvalidSymbolException("Error reading condition.txt file: "
+ "Could not locate SeedMechanism field");
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("ChemkinUnits")) {
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Verbose:")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken();
String OnOff = st.nextToken().toLowerCase();
if (OnOff.equals("off")) {
ArrheniusKinetics.setVerbose(false);
} else if (OnOff.equals("on")) {
ArrheniusKinetics.setVerbose(true);
}
line = ChemParser.readMeaningfulLine(reader);
}
/*
* MRH 3MAR2010:
* Adding user option regarding chemkin file
*
* New field: If user would like the empty SMILES string
* printed with each species in the thermochemistry portion
* of the generated chem.inp file
*/
if (line.toUpperCase().startsWith("SMILES")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // Should be "SMILES:"
String OnOff = st.nextToken().toLowerCase();
if (OnOff.equals("off")) {
Chemkin.setSMILES(false);
} else if (OnOff.equals("on")) {
Chemkin.setSMILES(true);
/*
* MRH 9MAR2010:
* MRH decided not to generate an InChI for every new species
* during an RMG simulation (especially since it is not used
* for anything). Instead, they will only be generated in the
* post-processing, if the user asked for InChIs.
*/
//Species.useInChI = true;
}
line = ChemParser.readMeaningfulLine(reader);
}
if (line.startsWith("A")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // Should be "A:"
String units = st.nextToken();
if (units.equals("moles") || units.equals("molecules"))
ArrheniusKinetics.setAUnits(units);
else {
System.err.println("Units for A were not recognized: " + units);
System.exit(0);
}
} else throw new InvalidSymbolException("Error reading condition.txt file: "
+ "Could not locate Chemkin units A field.");
line = ChemParser.readMeaningfulLine(reader);
if (line.startsWith("Ea")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // Should be "Ea:"
String units = st.nextToken();
if (units.equals("kcal/mol") || units.equals("cal/mol") ||
units.equals("kJ/mol") || units.equals("J/mol") || units.equals("Kelvins"))
ArrheniusKinetics.setEaUnits(units);
else {
System.err.println("Units for Ea were not recognized: " + units);
System.exit(0);
}
} else throw new InvalidSymbolException("Error reading condition.txt file: "
+ "Could not locate Chemkin units Ea field.");
} else throw new InvalidSymbolException("Error reading condition.txt file: "
+ "Could not locate ChemkinUnits field.");
in.close();
// LinkedList temperatureArray = new LinkedList();
// LinkedList pressureArray = new LinkedList();
// Iterator iterIS = initialStatusList.iterator();
// for (Iterator iter = tempList.iterator(); iter.hasNext(); ) {
// TemperatureModel tm = (TemperatureModel)iter.next();
// for (Iterator iter2 = presList.iterator(); iter2.hasNext(); ){
// PressureModel pm = (PressureModel)iter2.next();
// InitialStatus is = (InitialStatus)iterIS.next();
// temperatureArray.add(tm.getTemperature(is.getTime()));
// pressureArray.add(pm.getPressure(is.getTime()));
// PDepNetwork.setTemperatureArray(temperatureArray);
// PDepNetwork.setPressureArray(pressureArray);
//10/4/07 gmagoon: moved to modelGeneration()
//ReactionGenerator p_reactionGenerator = new TemplateReactionGenerator();//10/4/07 gmagoon: changed to p_reactionGenerator from reactionGenerator
// setReactionGenerator(p_reactionGenerator);//10/4/07 gmagoon: added
/*
* MRH 12-Jun-2009
* A TemplateReactionGenerator now requires a Temperature be passed to it.
* This allows RMG to determine the "best" kinetic parameters to use
* in the mechanism generation. For now, I choose to pass the first
* temperature in the list of temperatures. RMG only outputs one mechanism,
* even for multiple temperature/pressure systems, so we can only have one
* set of kinetics.
*/
Temperature t = new Temperature(300,"K");
for (Iterator iter = tempList.iterator(); iter.hasNext();) {
TemperatureModel tm = (TemperatureModel)iter.next();
t = tm.getTemperature(new ReactionTime(0,"sec"));
setTemp4BestKinetics(t);
break;
}
setReactionGenerator(new TemplateReactionGenerator()); //11/4/07 gmagoon: moved from modelGeneration; mysteriously, moving this later moves "Father" lines up in output at runtime, immediately after condition file (as in original code); previously, these Father lines were just before "Can't read primary reaction library files!"
lrg = new LibraryReactionGenerator();//10/10/07 gmagoon: moved from modelGeneration (sequence lrg increases species id, and the different sequence was causing problems as main species id was 6 instead of 1); //10/31/07 gmagoon: restored this line from 10/10/07 backup: somehow it got lost along the way; 11/5/07 gmagoon: changed to use "lrg =" instead of setLibraryReactionGenerator
//10/24/07 gmagoon: updated to use multiple reactionSystem variables
reactionSystemList = new LinkedList();
// LinkedList temperatureArray = new LinkedList();//10/30/07 gmagoon: added temperatureArray variable for passing to PDepNetwork; 11/6/07 gmagoon: moved before initialization of lrg;
Iterator iter3 = initialStatusList.iterator();
Iterator iter4 = dynamicSimulatorList.iterator();
int i = 0;//10/30/07 gmagoon: added
for (Iterator iter = tempList.iterator(); iter.hasNext(); ) {
TemperatureModel tm = (TemperatureModel)iter.next();
//InitialStatus is = (InitialStatus)iter3.next();//10/31/07 gmagoon: fixing apparent bug by moving these inside inner "for loop"
//DynamicSimulator ds = (DynamicSimulator)iter4.next();
for (Iterator iter2 = presList.iterator(); iter2.hasNext(); ){
PressureModel pm = (PressureModel)iter2.next();
InitialStatus is = (InitialStatus)iter3.next();//10/31/07 gmagoon: moved from outer "for loop""
DynamicSimulator ds = (DynamicSimulator)iter4.next();
// temperatureArray.add(tm.getTemperature(is.getTime()));//10/30/07 gmagoon: added; //10/31/07 added .getTemperature(is.getTime()); 11/6/07 gmagoon: moved before initialization of lrg;
//11/1/07 gmagoon: trying to make a deep copy of terminationTester when it is instance of ConversionTT
// TerminationTester termTestCopy;
// if (finishController.getTerminationTester() instanceof ConversionTT){
// ConversionTT termTest = (ConversionTT)finishController.getTerminationTester();
// LinkedList spcCopy = (LinkedList)(termTest.getSpeciesGoalConversionSetList().clone());
// termTestCopy = new ConversionTT(spcCopy);
// else{
// termTestCopy = finishController.getTerminationTester();
FinishController fc = new FinishController(finishController.getTerminationTester(), finishController.getValidityTester());//10/31/07 gmagoon: changed to create new finishController instance in each case (apparently, the finish controller becomes associated with reactionSystem in setFinishController within ReactionSystem); alteratively, could use clone, but might need to change FinishController to be "cloneable"
// FinishController fc = new FinishController(termTestCopy, finishController.getValidityTester());
reactionSystemList.add(new ReactionSystem(tm, pm, reactionModelEnlarger, fc, ds, getPrimaryReactionLibrary(), getReactionGenerator(), getSpeciesSeed(), is, getReactionModel(),lrg, i, equationOfState));
i++;//10/30/07 gmagoon: added
System.out.println("Created reaction system "+i+"\n");
}
}
// PDepNetwork.setTemperatureArray(temperatureArray);//10/30/07 gmagoon: passing temperatureArray to PDepNetwork; 11/6/07 gmagoon: moved before initialization of lrg;
}
catch (IOException e) {
System.err.println("Error reading reaction system initialization file.");
throw new IOException("Input file error: " + e.getMessage());
}
}
public void setReactionModel(ReactionModel p_ReactionModel) {
reactionModel = p_ReactionModel;
}
public void modelGeneration() {
//long begin_t = System.currentTimeMillis();
try{
ChemGraph.readForbiddenStructure();
setSpeciesSeed(new LinkedHashSet());//10/4/07 gmagoon moved from initializeCoreEdgeReactionModel
// setReactionGenerator(new TemplateReactionGenerator());//10/4/07 gmagoon: moved inside initializeReactionSystem; 11/3-4/07 gmagoon: probably reverted on or before 10/10/07 (although I have not investigated this change in detail); //11/4/07 gmagoon: moved inside initializeReactionSystems
// setLibraryReactionGenerator(new LibraryReactionGenerator());//10/10/07 gmagoon: moved after initializeReactionSystem
// initializeCoreEdgeReactionModel();//10/4/07 gmagoon moved from below to run initializeCoreEdgeReactionModel before initializeReactionSystem; 11/3-4/07 gmagoon: probably reverted on or before 10/10/07
initializeReactionSystems();
}
catch (IOException e) {
System.err.println(e.getMessage());
System.exit(0);
}
catch (InvalidSymbolException e) {
System.err.println(e.getMessage());
System.exit(0);
}
//10/31/07 gmagoon: initialize validList (to false) before initializeCoreEdgeReactionModel is called
validList = new LinkedList();
for (Integer i = 0; i<reactionSystemList.size();i++) {
validList.add(false);
}
initializeCoreEdgeReactionModel();//10/4/07 gmagoon: moved before initializeReactionSystem; 11/3-4/07 gmagoon: probably reverted on or before 10/10/07
//10/24/07 gmagoon: changed to use reactionSystemList
// LinkedList initList = new LinkedList();//10/25/07 gmagoon: moved these variables to apply to entire class
// LinkedList beginList = new LinkedList();
// LinkedList endList = new LinkedList();
// LinkedList lastTList = new LinkedList();
// LinkedList currentTList = new LinkedList();
// LinkedList lastPList = new LinkedList();
// LinkedList currentPList = new LinkedList();
// LinkedList conditionChangedList = new LinkedList();
// LinkedList reactionChangedList = new LinkedList();
//5/6/08 gmagoon: determine whether there are intermediate time/conversion steps, type of termination tester is based on characteristics of 1st reaction system (it is assumed that they are all identical in terms of type of termination tester)
boolean intermediateSteps = true;
ReactionSystem rs0 = (ReactionSystem)reactionSystemList.get(0);
if (rs0.finishController.terminationTester instanceof ReactionTimeTT){
if (timeStep == null){
intermediateSteps = false;
}
}
else if (numConversions==1){ //if we get to this block, we presumably have a conversion terminationTester; this required moving numConversions to be attribute...alternative to using numConversions is to access one of the DynamicSimulators and determine conversion length
intermediateSteps=false;
}
//10/24/07 gmagoon: note: each element of for loop could be done in parallel if desired; some modifications would be needed
for (Iterator iter = reactionSystemList.iterator(); iter.hasNext(); ) {
ReactionSystem rs = (ReactionSystem)iter.next();
if ((reactionModelEnlarger instanceof RateBasedPDepRME)) {//1/2/09 gmagoon and rwest: only call initializePDepNetwork for P-dep cases
rs.initializePDepNetwork();
}
ReactionTime init = rs.getInitialReactionTime();
initList.add(init);
ReactionTime begin = init;
beginList.add(begin);
ReactionTime end;
if (rs.finishController.terminationTester instanceof ReactionTimeTT){
//5/5/08 gmagoon: added below if statement to avoid null pointer exception in cases where there are no intermediate time steps specified
if (!(timeStep==null)){
end = (ReactionTime)timeStep.get(0);
}
else{
end= ((ReactionTimeTT)rs.finishController.terminationTester).finalTime;
}
//end = (ReactionTime)timeStep.get(0);
endList.add(end);
}
else{
end = new ReactionTime(1e6,"sec");
endList.add(end);
}
// int iterationNumber = 1;
lastTList.add(rs.getTemperature(init));
currentTList.add(rs.getTemperature(init));
lastPList.add(rs.getPressure(init));
currentPList.add(rs.getPressure(init));
conditionChangedList.add(false);
reactionChangedList.add(false);//10/31/07 gmagoon: added
//Chemkin.writeChemkinInputFile(reactionSystem.getReactionModel(),reactionSystem.getPresentStatus());
}
int iterationNumber = 1;
LinkedList terminatedList = new LinkedList();//10/24/07 gmagoon: this may not be necessary, as if one reactionSystem is terminated, I think all should be terminated
//validList = new LinkedList();//10/31/07 gmagoon: moved before initializeCoreEdgeReactionModel
//10/24/07 gmagoon: initialize allTerminated and allValid to true; these variables keep track of whether all the reactionSystem variables satisfy termination and validity, respectively
boolean allTerminated = true;
boolean allValid = true;
// IF RESTART IS TURNED ON
// Update the systemSnapshot for each ReactionSystem in the reactionSystemList
if (readrestart) {
for (Integer i=0; i<reactionSystemList.size(); i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
InitialStatus is = rs.getInitialStatus();
putRestartSpeciesInInitialStatus(is,i);
rs.appendUnreactedSpeciesStatus((InitialStatus)initialStatusList.get(i), rs.getPresentTemperature());
}
}
//10/24/07 gmagoon: note: each element of for loop could be done in parallel if desired; some modifications would be needed
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
ReactionTime begin = (ReactionTime)beginList.get(i);
ReactionTime end = (ReactionTime)endList.get(i);
endList.set(i,rs.solveReactionSystem(begin, end, true, true, true, iterationNumber-1));
Chemkin.writeChemkinInputFile(rs);
boolean terminated = rs.isReactionTerminated();
terminatedList.add(terminated);
if(!terminated)
allTerminated = false;
boolean valid = rs.isModelValid();
//validList.add(valid);
validList.set(i, valid);//10/31/07 gmagoon: validList initialization moved before initializeCoreEdgeReactionModel
if(!valid)
allValid = false;
reactionChangedList.set(i,false);
}
//9/1/09 gmagoon: if we are using QM, output a file with the CHEMKIN name, the RMG name, the (modified) InChI, and the (modified) InChIKey
if (ChemGraph.useQM){
writeInChIs(getReactionModel());
}
writeDictionary(getReactionModel());
//System.exit(0);
System.out.println("The model core has " + ((CoreEdgeReactionModel)getReactionModel()).getReactedReactionSet().size() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
System.out.println("The model edge has " + ((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSet().size() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getUnreactedSpeciesSet().size() + " species.");
StringBuilder print_info = Global.diagnosticInfo;
print_info.append("\nMolecule \t Flux\t\tTime\t \t\t \t Core \t \t Edge \t \t memory\n");
print_info.append(" \t moleular \t characteristic \t findspecies \t moveUnreactedToReacted \t enlarger \t restart1 \t totalEnlarger \t resetSystem \t readSolverFile\t writeSolverFile \t justSolver \t SolverIterations \t solverSpeciesStatus \t Totalsolver \t gc \t restart+diagnosis \t chemkin thermo \t chemkin reactions \t validitytester \t Species \t Reactions\t Species\t Reactions \t memory used \t allSpecies \t TotalTime \t findRateConstant\t identifyReactedSites \t reactChemGraph \t makespecies\t CheckReverseReaction \t makeTemplateReaction \t getReactionfromStruc \t genReverseFromReac");
print_info.append("\t\t\t\t\t\t\t" + ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size()+ "\t" + ((CoreEdgeReactionModel)getReactionModel()).getReactedReactionSet().size() + "\t" + ((CoreEdgeReactionModel)getReactionModel()).getUnreactedSpeciesSet().size() + "\t" + ((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSetIncludingReverseSize() + "\t"+Global.makeSpecies+"\n");
double solverMin = 0;
double vTester = 0;
/*if (!restart){
writeRestartFile();
writeCoreReactions();
writeAllReactions();
}*/
//System.exit(0);
SpeciesDictionary dictionary = SpeciesDictionary.getInstance();
System.out.println("Species dictionary size: "+dictionary.size());
double tAtInitialization = Global.tAtInitialization;
//10/24/07: changed to use allTerminated and allValid
// step 2: iteratively grow reaction system
while (!allTerminated || !allValid) {
while (!allValid) {
//writeCoreSpecies();
double pt = System.currentTimeMillis();
//prune the reaction model (this will only do something in the AUTO case)
pruneReactionModel();
garbageCollect();
// ENLARGE THE MODEL!!! (this is where the good stuff happens)
enlargeReactionModel();
double totalEnlarger = (System.currentTimeMillis() - pt)/1000/60;
//PDepNetwork.completeNetwork(reactionSystem.reactionModel.getSpeciesSet());
//10/24/07 gmagoon: changed to use reactionSystemList
if ((reactionModelEnlarger instanceof RateBasedPDepRME)) {//1/2/09 gmagoon and rwest: only call initializePDepNetwork for P-dep cases
for (Iterator iter = reactionSystemList.iterator(); iter.hasNext(); ) {
ReactionSystem rs = (ReactionSystem)iter.next();
rs.initializePDepNetwork();
}
//reactionSystem.initializePDepNetwork();
}
pt = System.currentTimeMillis();
//10/24/07 gmagoon: changed to use reactionSystemList
for (Iterator iter = reactionSystemList.iterator(); iter.hasNext(); ) {
ReactionSystem rs = (ReactionSystem)iter.next();
rs.resetSystemSnapshot();
}
//reactionSystem.resetSystemSnapshot();
double resetSystem = (System.currentTimeMillis() - pt)/1000/60;
//10/24/07 gmagoon: changed to use reactionSystemList
for (Integer i = 0; i<reactionSystemList.size();i++) {
//reactionChanged = true;
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
reactionChangedList.set(i,true);
// begin = init;
beginList.set(i, (ReactionTime)initList.get(i));
if (rs.finishController.terminationTester instanceof ReactionTimeTT){
//5/5/08 gmagoon: added below if statement to avoid null pointer exception in cases where there are no intermediate time steps specified
if (!(timeStep==null)){
endList.set(i,(ReactionTime)timeStep.get(0));
}
else{
endList.set(i, ((ReactionTimeTT)rs.finishController.terminationTester).finalTime);
}
// endList.set(i, (ReactionTime)timeStep.get(0));
//end = (ReactionTime)timeStep.get(0);
}
else
endList.set(i, new ReactionTime(1e6,"sec"));
//end = new ReactionTime(1e6,"sec");
// iterationNumber = 1;//10/24/07 gmagoon: moved outside of loop
currentTList.set(i,rs.getTemperature((ReactionTime)beginList.get(i)));
currentPList.set(i,rs.getPressure((ReactionTime)beginList.get(i)));
conditionChangedList.set(i,!(((Temperature)currentTList.get(i)).equals((Temperature)lastTList.get(i))) || !(((Pressure)currentPList.get(i)).equals((Pressure)lastPList.get(i))));
//currentT = reactionSystem.getTemperature(begin);
//currentP = reactionSystem.getPressure(begin);
//conditionChanged = (!currentT.equals(lastT) || !currentP.equals(lastP));
}
iterationNumber = 1;
double startTime = System.currentTimeMillis();
//10/24/07 gmagoon: changed to use reactionSystemList
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
boolean reactionChanged = (Boolean)reactionChangedList.get(i);
boolean conditionChanged = (Boolean)conditionChangedList.get(i);
ReactionTime begin = (ReactionTime)beginList.get(i);
ReactionTime end = (ReactionTime)endList.get(i);
endList.set(i,rs.solveReactionSystem(begin, end, false, reactionChanged, conditionChanged, iterationNumber-1));
//end = reactionSystem.solveReactionSystem(begin, end, false, reactionChanged, conditionChanged, iterationNumber-1);
}
solverMin = solverMin + (System.currentTimeMillis()-startTime)/1000/60;
startTime = System.currentTimeMillis();
//10/24/07 gmagoon: changed to use reactionSystemList
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
Chemkin.writeChemkinInputFile(rs);
//Chemkin.writeChemkinInputFile(reactionSystem);
}
//9/1/09 gmagoon: if we are using QM, output a file with the CHEMKIN name, the RMG name, the (modified) InChI, and the (modified) InChIKey
if (ChemGraph.useQM){
writeInChIs(getReactionModel());
}
writeDictionary(getReactionModel());
double chemkint = (System.currentTimeMillis()-startTime)/1000/60;
if (writerestart) {
/*
* Rename current restart files:
* In the event RMG fails while writing the restart files,
* user won't lose any information
*/
String[] restartFiles = {"Restart/coreReactions.txt", "Restart/coreSpecies.txt",
"Restart/edgeReactions.txt", "Restart/edgeSpecies.txt", "Restart/lindemannReactions.txt",
"Restart/pdepnetworks.txt", "Restart/thirdBodyReactions.txt", "Restart/troeReactions.txt"};
writeBackupRestartFiles(restartFiles);
writeCoreSpecies();
writeCoreReactions();
writeEdgeSpecies();
writeEdgeReactions();
if (PDepNetwork.generateNetworks == true) writePDepNetworks();
/*
* Remove backup restart files from Restart folder
*/
removeBackupRestartFiles(restartFiles);
}
//10/24/07 gmagoon: changed to use reactionSystemList
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
System.out.println("For reaction system: "+(i+1)+" out of "+reactionSystemList.size());
System.out.println("At this time: " + ((ReactionTime)endList.get(i)).toString());
Species spe = SpeciesDictionary.getSpeciesFromID(getLimitingReactantID());
double conv = rs.getPresentConversion(spe);
System.out.print("Conversion of " + spe.getName() + " is:");
System.out.println(conv);
}
System.out.println("Running Time is: " + String.valueOf((System.currentTimeMillis()-tAtInitialization)/1000/60) + " minutes.");
System.out.println("The model edge has " + ((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSet().size() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getUnreactedSpeciesSet().size() + " species.");
//10/24/07 gmagoon: note: all reaction systems should use the same core, but I will display for each reactionSystem for testing purposes:
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
System.out.println("For reaction system: "+(i+1)+" out of "+reactionSystemList.size());
if (rs.getDynamicSimulator() instanceof JDASPK){
JDASPK solver = (JDASPK)rs.getDynamicSimulator();
System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
}
else{
JDASSL solver = (JDASSL)rs.getDynamicSimulator();
System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
}
}
// if (reactionSystem.getDynamicSimulator() instanceof JDASPK){
// JDASPK solver = (JDASPK)reactionSystem.getDynamicSimulator();
// System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
//else{
// JDASSL solver = (JDASSL)reactionSystem.getDynamicSimulator();
// System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
startTime = System.currentTimeMillis();
double mU = memoryUsed();
double gc = (System.currentTimeMillis()-startTime)/1000/60;
startTime = System.currentTimeMillis();
//10/24/07 gmagoon: updating to use reactionSystemList
allValid = true;
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
boolean valid = rs.isModelValid();
if(!valid)
allValid = false;
validList.set(i,valid);
//valid = reactionSystem.isModelValid();
}
vTester = vTester + (System.currentTimeMillis()-startTime)/1000/60;
startTime = System.currentTimeMillis();
writeDiagnosticInfo();
writeEnlargerInfo();
double restart2 = (System.currentTimeMillis()-startTime)/1000/60;
int allSpecies, allReactions;
allSpecies = SpeciesDictionary.getInstance().size();
print_info.append(totalEnlarger + "\t" + resetSystem + "\t" + Global.readSolverFile + "\t" + Global.writeSolverFile + "\t" + Global.solvertime + "\t" + Global.solverIterations + "\t" + Global.speciesStatusGenerator + "\t" + solverMin + "\t" + gc + "\t" + restart2 + "\t" + Global.chemkinThermo + '\t' + Global.chemkinReaction + "\t" + vTester + "\t" + ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size()+ "\t" + ((CoreEdgeReactionModel)getReactionModel()).getReactedReactionSet().size() + "\t" + ((CoreEdgeReactionModel)getReactionModel()).getUnreactedSpeciesSet().size() + "\t" + ((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSetIncludingReverseSize() + "\t" + mU + "\t" + allSpecies + "\t" + (System.currentTimeMillis()-Global.tAtInitialization)/1000/60 + "\t"+ String.valueOf(Global.RT_findRateConstant)+"\t"+Global.RT_identifyReactedSites+"\t"+Global.RT_reactChemGraph+"\t"+Global.makeSpecies+"\t"+Global.checkReactionReverse+"\t"+Global.makeTR+ "\t" + Global.getReacFromStruc + "\t" + Global.generateReverse+"\n");
}
//5/6/08 gmagoon: in order to handle cases where no intermediate time/conversion steps are used, only evaluate the next block of code when there are intermediate time/conversion steps
double startTime = System.currentTimeMillis();
if(intermediateSteps){
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
reactionChangedList.set(i, false);
//reactionChanged = false;
Temperature currentT = (Temperature)currentTList.get(i);
Pressure currentP = (Pressure)currentPList.get(i);
lastTList.set(i,(Temperature)currentT.clone()) ;
lastPList.set(i,(Pressure)currentP.clone());
//lastT = (Temperature)currentT.clone();
//lastP = (Pressure)currentP.clone();
currentTList.set(i,rs.getTemperature((ReactionTime)beginList.get(i)));
currentPList.set(i,rs.getPressure((ReactionTime)beginList.get(i)));
conditionChangedList.set(i,!(((Temperature)currentTList.get(i)).equals((Temperature)lastTList.get(i))) || !(((Pressure)currentPList.get(i)).equals((Pressure)lastPList.get(i))));
//currentP = reactionSystem.getPressure(begin);
//conditionChanged = (!currentT.equals(lastT) || !currentP.equals(lastP));
beginList.set(i,((SystemSnapshot)(rs.getSystemSnapshotEnd().next())).time);
// begin=((SystemSnapshot)(reactionSystem.getSystemSnapshotEnd().next())).time;
if (rs.finishController.terminationTester instanceof ReactionTimeTT){
if (iterationNumber < timeStep.size()){
endList.set(i,(ReactionTime)timeStep.get(iterationNumber));
//end = (ReactionTime)timeStep.get(iterationNumber);
}
else
endList.set(i, ((ReactionTimeTT)rs.finishController.terminationTester).finalTime);
//end = ((ReactionTimeTT)reactionSystem.finishController.terminationTester).finalTime;
}
else
endList.set(i,new ReactionTime(1e6,"sec"));
//end = new ReactionTime(1e6,"sec");
}
iterationNumber++;
startTime = System.currentTimeMillis();//5/6/08 gmagoon: moved declaration outside of if statement so it can be accessed in subsequent vTester line; previous steps are probably so fast that I could eliminate this line without much effect on normal operation with intermediate steps
//double startTime = System.currentTimeMillis();
//10/24/07 gmagoon: changed to use reactionSystemList
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
boolean reactionChanged = (Boolean)reactionChangedList.get(i);
boolean conditionChanged = (Boolean)conditionChangedList.get(i);
ReactionTime begin = (ReactionTime)beginList.get(i);
ReactionTime end = (ReactionTime)endList.get(i);
endList.set(i,rs.solveReactionSystem(begin, end, false, reactionChanged, false, iterationNumber-1));
// end = reactionSystem.solveReactionSystem(begin, end, false, reactionChanged, false, iterationNumber-1);
}
solverMin = solverMin + (System.currentTimeMillis()-startTime)/1000/60;
startTime = System.currentTimeMillis();
//5/6/08 gmagoon: changed to separate validity and termination testing, and termination testing is done last...termination testing should be done even if there are no intermediate conversions; however, validity is guaranteed if there are no intermediate conversions based on previous conditional if statement
allValid = true;
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
boolean valid = rs.isModelValid();
validList.set(i,valid);
if(!valid)
allValid = false;
}
}//5/6/08 gmagoon: end of block for intermediateSteps
allTerminated = true;
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
boolean terminated = rs.isReactionTerminated();
terminatedList.set(i,terminated);
if(!terminated){
allTerminated = false;
System.out.println("Reaction System "+(i+1)+" has not reached its termination criterion");
if (rs.isModelValid()&& runKillableToPreventInfiniteLoop(intermediateSteps, iterationNumber)) {
System.out.println("although it seems to be valid (complete), so it was not interrupted for being invalid.");
System.out.println("This probably means there was an error with the ODE solver, and we risk entering an endless loop.");
System.out.println("Stopping.");
throw new Error();
}
}
}
// //10/24/07 gmagoon: changed to use reactionSystemList
// allTerminated = true;
// allValid = true;
// for (Integer i = 0; i<reactionSystemList.size();i++) {
// ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
// boolean terminated = rs.isReactionTerminated();
// terminatedList.set(i,terminated);
// if(!terminated)
// allTerminated = false;
// boolean valid = rs.isModelValid();
// validList.set(i,valid);
// if(!valid)
// allValid = false;
// //terminated = reactionSystem.isReactionTerminated();
// //valid = reactionSystem.isModelValid();
//10/24/07 gmagoon: changed to use reactionSystemList, allValid
if (allValid) {
//10/24/07 gmagoon: changed to use reactionSystemList
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
System.out.println("For reaction system: "+(i+1)+" out of "+reactionSystemList.size());
System.out.println("At this reaction time: " + ((ReactionTime)endList.get(i)).toString());
Species spe = SpeciesDictionary.getSpeciesFromID(getLimitingReactantID());
double conv = rs.getPresentConversion(spe);
System.out.print("Conversion of " + spe.getName() + " is:");
System.out.println(conv);
}
//System.out.println("At this time: " + end.toString());
//Species spe = SpeciesDictionary.getSpeciesFromID(1);
//double conv = reactionSystem.getPresentConversion(spe);
//System.out.print("current conversion = ");
//System.out.println(conv);
Runtime runTime = Runtime.getRuntime();
System.out.print("Memory used: ");
System.out.println(runTime.totalMemory());
System.out.print("Free memory: ");
System.out.println(runTime.freeMemory());
//runTime.gc();
/* if we're not calling runTime.gc() then don't bother printing this:
System.out.println("After garbage collection:");
System.out.print("Memory used: ");
System.out.println(runTime.totalMemory());
System.out.print("Free memory: ");
System.out.println(runTime.freeMemory());
*/
//10/24/07 gmagoon: note: all reaction systems should use the same core, but I will display for each reactionSystem for testing purposes:
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
System.out.println("For reaction system: "+(i+1)+" out of "+reactionSystemList.size());
if (rs.getDynamicSimulator() instanceof JDASPK){
JDASPK solver = (JDASPK)rs.getDynamicSimulator();
System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
}
else{
JDASSL solver = (JDASSL)rs.getDynamicSimulator();
System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
System.out.println("(although rs.getReactionModel().getReactionNumber() returns "+rs.getReactionModel().getReactionNumber()+")");
}
}
// if (reactionSystem.getDynamicSimulator() instanceof JDASPK){
// JDASPK solver = (JDASPK)reactionSystem.getDynamicSimulator();
// System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
// else{
// JDASSL solver = (JDASSL)reactionSystem.getDynamicSimulator();
// System.out.println("The model core has " + solver.getReactionSize() + " reactions and "+ ((CoreEdgeReactionModel)getReactionModel()).getReactedSpeciesSet().size() + " species.");
}
vTester = vTester + (System.currentTimeMillis()-startTime)/1000/60;//5/6/08 gmagoon: for case where intermediateSteps = false, this will use startTime declared just before intermediateSteps loop, and will only include termination testing, but no validity testing
}
//System.out.println("Performing model reduction");
if (paraInfor != 0){
System.out.println("Model Generation performed. Now generating sensitivity data.");
//10/24/07 gmagoon: updated to use reactionSystemList
LinkedList dynamicSimulator2List = new LinkedList();
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
//6/25/08 gmagoon: updated to pass index i
//6/25/08 gmagoon: updated to pass (dummy) finishController and autoflag (set to false here);
dynamicSimulator2List.add(new JDASPK(rtol, atol, paraInfor, (InitialStatus)initialStatusList.get(i),i));
//DynamicSimulator dynamicSimulator2 = new JDASPK(rtol, atol, paraInfor, initialStatus);
((DynamicSimulator)dynamicSimulator2List.get(i)).addConversion(((JDASPK)rs.dynamicSimulator).conversionSet, ((JDASPK)rs.dynamicSimulator).conversionSet.length);
//dynamicSimulator2.addConversion(((JDASPK)reactionSystem.dynamicSimulator).conversionSet, ((JDASPK)reactionSystem.dynamicSimulator).conversionSet.length);
rs.setDynamicSimulator((DynamicSimulator)dynamicSimulator2List.get(i));
//reactionSystem.setDynamicSimulator(dynamicSimulator2);
int numSteps = rs.systemSnapshot.size() -1;
rs.resetSystemSnapshot();
beginList.set(i, (ReactionTime)initList.get(i));
//begin = init;
if (rs.finishController.terminationTester instanceof ReactionTimeTT){
endList.set(i,((ReactionTimeTT)rs.finishController.terminationTester).finalTime);
//end = ((ReactionTimeTT)reactionSystem.finishController.terminationTester).finalTime;
}
else{
ReactionTime end = (ReactionTime)endList.get(i);
endList.set(i, end.add(end));
//end = end.add(end);
}
terminatedList.set(i, false);
//terminated = false;
ReactionTime begin = (ReactionTime)beginList.get(i);
ReactionTime end = (ReactionTime)endList.get(i);
rs.solveReactionSystemwithSEN(begin, end, true, false, false);
//reactionSystem.solveReactionSystemwithSEN(begin, end, true, false, false);
}
}
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
Chemkin.writeChemkinInputFile(getReactionModel(),rs.getPresentStatus());
}
//9/1/09 gmagoon: if we are using QM, output a file with the CHEMKIN name, the RMG name, the (modified) InChI, and the (modified) InChIKey
if (ChemGraph.useQM){
writeInChIs(getReactionModel());
}
writeDictionary(getReactionModel());
System.out.println("Model Generation Completed");
return;
}
//9/1/09 gmagoon: this function writes a "dictionary" with Chemkin name, RMG name, (modified) InChI, and InChIKey
//this is based off of writeChemkinFile in ChemkinInputFile.java
private void writeInChIs(ReactionModel p_reactionModel) {
StringBuilder result=new StringBuilder();
for (Iterator iter = ((CoreEdgeReactionModel)p_reactionModel).core.getSpecies(); iter.hasNext(); ) {
Species species = (Species) iter.next();
result.append(species.getChemkinName() + "\t"+species.getName() + "\t" + species.getChemGraph().getModifiedInChIAnew() + "\t" + species.getChemGraph().getModifiedInChIKeyAnew()+ "\n");
}
String file = "inchiDictionary.txt";
try {
FileWriter fw = new FileWriter(file);
fw.write(result.toString());
fw.close();
}
catch (Exception e) {
System.out.println("Error in writing InChI file inchiDictionary.txt!");
System.out.println(e.getMessage());
System.exit(0);
}
}
//9/14/09 gmagoon: function to write dictionary, based on code copied from RMG.java
private void writeDictionary(ReactionModel rm){
CoreEdgeReactionModel cerm = (CoreEdgeReactionModel)rm;
//Write core species to RMG_Dictionary.txt
String coreSpecies ="";
Iterator iter = cerm.getSpecies();
if (Species.useInChI) {
while (iter.hasNext()){
int i=1;
Species spe = (Species) iter.next();
coreSpecies = coreSpecies + spe.getChemkinName() + " " + spe.getInChI() + "\n"+spe.getChemGraph().toString(i)+"\n\n";
}
} else {
while (iter.hasNext()){
int i=1;
Species spe = (Species) iter.next();
coreSpecies = coreSpecies + spe.getChemkinName() + "\n"+spe.getChemGraph().toString(i)+"\n\n";
}
}
try{
File rmgDictionary = new File("RMG_Dictionary.txt");
FileWriter fw = new FileWriter(rmgDictionary);
fw.write(coreSpecies);
fw.close();
}
catch (IOException e) {
System.out.println("Could not write RMG_Dictionary.txt");
System.exit(0);
}
// If we have solvation on, then every time we write the dictionary, also write the solvation properties
if (Species.useSolvation) {
writeSolvationProperties(rm);
}
}
private void writeSolvationProperties(ReactionModel rm){
//Write core species to RMG_Solvation_Properties.txt
CoreEdgeReactionModel cerm = (CoreEdgeReactionModel)rm;
StringBuilder result = new StringBuilder();
result.append("ChemkinName\tChemicalFormula\tMolecularWeight\tRadius\tDiffusivity\tAbrahamS\tAbrahamB\tAbrahamE\tAbrahamL\tAbrahamA\tChemkinName\n\n");
Iterator iter = cerm.getSpecies();
while (iter.hasNext()){
Species spe = (Species)iter.next();
result.append(spe.getChemkinName() + "\t");
result.append(spe.getChemGraph().getChemicalFormula()+ "\t");
result.append(spe.getMolecularWeight() + "\t");
result.append(spe.getChemGraph().getRadius()+ "\t");
result.append(spe.getChemGraph().getDiffusivity()+ "\t");
result.append(spe.getChemGraph().getAbramData().toString()+ "\t");
result.append(spe.getChemkinName() + "\n");
}
try{
File rmgSolvationProperties = new File("RMG_Solvation_Properties.txt");
FileWriter fw = new FileWriter(rmgSolvationProperties);
fw.write(result.toString() );
fw.close();
}
catch (IOException e) {
System.out.println("Could not write RMG_Solvation_Properties.txt");
System.exit(0);
}
}
/*
* MRH 23MAR2010:
* Commenting out deprecated parseRestartFiles method
*/
// private void parseRestartFiles() {
// parseAllSpecies();
// parseCoreSpecies();
// parseEdgeSpecies();
// parseAllReactions();
// parseCoreReactions();
/*
* MRH 23MAR2010:
* Commenting out deprecated parseEdgeReactions method
*/
// private void parseEdgeReactions() {
// SpeciesDictionary dictionary = SpeciesDictionary.getInstance();
// //HasMap speciesMap = dictionary.dictionary;
// try{
// File coreReactions = new File("Restart/edgeReactions.txt");
// FileReader fr = new FileReader(coreReactions);
// BufferedReader reader = new BufferedReader(fr);
// String line = ChemParser.readMeaningfulLine(reader);
// boolean found = false;
// LinkedHashSet reactionSet = new LinkedHashSet();
// while (line != null){
// Reaction reaction = ChemParser.parseEdgeArrheniusReaction(dictionary,line,1,1);
// boolean added = reactionSet.add(reaction);
// if (!added){
// if (reaction.hasResonanceIsomerAsReactant()){
// //Structure reactionStructure = reaction.getStructure();
// found = getResonanceStructure(reaction,"reactants", reactionSet);
// if (reaction.hasResonanceIsomerAsProduct() && !found){
// //Structure reactionStructure = reaction.getStructure();
// found = getResonanceStructure(reaction,"products", reactionSet);
// if (!found){
// System.out.println("Cannot add reaction "+line+" to the Reaction Edge. All resonance isomers have already been added");
// System.exit(0);
// else found = false;
// //Reaction reverse = reaction.getReverseReaction();
// //if (reverse != null) reactionSet.add(reverse);
// line = ChemParser.readMeaningfulLine(reader);
// ((CoreEdgeReactionModel)getReactionModel()).addReactionSet(reactionSet);
// catch (IOException e){
// System.out.println("Could not read the corespecies restart file");
// System.exit(0);
/*
* MRH 23MAR2010:
* Commenting out deprecated parseAllSpecies method
*/
// public void parseCoreReactions() {
// SpeciesDictionary dictionary = SpeciesDictionary.getInstance();
// int i=1;
// //HasMap speciesMap = dictionary.dictionary;
// try{
// File coreReactions = new File("Restart/coreReactions.txt");
// FileReader fr = new FileReader(coreReactions);
// BufferedReader reader = new BufferedReader(fr);
// String line = ChemParser.readMeaningfulLine(reader);
// boolean found = false;
// LinkedHashSet reactionSet = new LinkedHashSet();
// while (line != null){
// Reaction reaction = ChemParser.parseCoreArrheniusReaction(dictionary,line,1,1);//,((CoreEdgeReactionModel)reactionSystem.reactionModel));
// boolean added = reactionSet.add(reaction);
// if (!added){
// if (reaction.hasResonanceIsomerAsReactant()){
// //Structure reactionStructure = reaction.getStructure();
// found = getResonanceStructure(reaction,"reactants", reactionSet);
// if (reaction.hasResonanceIsomerAsProduct() && !found){
// //Structure reactionStructure = reaction.getStructure();
// found = getResonanceStructure(reaction,"products", reactionSet);
// if (!found){
// System.out.println("Cannot add reaction "+line+" to the Reaction Core. All resonance isomers have already been added");
// //System.exit(0);
// else found = false;
// Reaction reverse = reaction.getReverseReaction();
// if (reverse != null) {
// reactionSet.add(reverse);
// //System.out.println(2 + "\t " + line);
// //else System.out.println(1 + "\t" + line);
// line = ChemParser.readMeaningfulLine(reader);
// i=i+1;
// ((CoreEdgeReactionModel)getReactionModel()).addReactedReactionSet(reactionSet);
// catch (IOException e){
// System.out.println("Could not read the coreReactions restart file");
// System.exit(0);
/*
* MRH 23MAR2010:
* Commenting out deprecated parseAllSpecies method
*/
// private void parseAllReactions() {
// SpeciesDictionary dictionary = SpeciesDictionary.getInstance();
// int i=1;
// //HasMap speciesMap = dictionary.dictionary;
// try{
// File allReactions = new File("Restart/allReactions.txt");
// FileReader fr = new FileReader(allReactions);
// BufferedReader reader = new BufferedReader(fr);
// String line = ChemParser.readMeaningfulLine(reader);
// boolean found = false;
// LinkedHashSet reactionSet = new LinkedHashSet();
// OuterLoop:
// while (line != null){
// Reaction reaction = ChemParser.parseArrheniusReaction(dictionary,line,1,1,((CoreEdgeReactionModel)getReactionModel()));
// if (((CoreEdgeReactionModel)getReactionModel()).categorizeReaction(reaction)==-1){
// boolean added = reactionSet.add(reaction);
// if (!added){
// found = false;
// if (reaction.hasResonanceIsomerAsReactant()){
// //Structure reactionStructure = reaction.getStructure();
// found = getResonanceStructure(reaction,"reactants", reactionSet);
// if (reaction.hasResonanceIsomerAsProduct() && !found){
// //Structure reactionStructure = reaction.getStructure();
// found = getResonanceStructure(reaction,"products", reactionSet);
// if (!found){
// Iterator iter = reactionSet.iterator();
// while (iter.hasNext()){
// Reaction reacTemp = (Reaction)iter.next();
// if (reacTemp.equals(reaction)){
// reactionSet.remove(reacTemp);
// reactionSet.add(reaction);
// break;
// //System.out.println("Cannot add reaction "+line+" to the Reaction Core. All resonance isomers have already been added");
// //System.exit(0);
// //else found = false;
// /*Reaction reverse = reaction.getReverseReaction();
// if (reverse != null && ((CoreEdgeReactionModel)reactionSystem.reactionModel).isReactedReaction(reaction)) {
// reactionSet.add(reverse);
// //System.out.println(2 + "\t " + line);
// }*/
// //else System.out.println(1 + "\t" + line);
// i=i+1;
// line = ChemParser.readMeaningfulLine(reader);
// ((CoreEdgeReactionModel)getReactionModel()).addReactionSet(reactionSet);
// catch (IOException e){
// System.out.println("Could not read the corespecies restart file");
// System.exit(0);
private boolean getResonanceStructure(Reaction p_Reaction, String rOrP, LinkedHashSet reactionSet) {
Structure reactionStructure = p_Reaction.getStructure();
//Structure tempreactionStructure = new Structure(reactionStructure.getReactantList(),reactionStructure.getProductList());
boolean found = false;
if (rOrP.equals("reactants")){
Iterator originalreactants = reactionStructure.getReactants();
HashSet tempHashSet = new HashSet();
while(originalreactants.hasNext()){
tempHashSet.add(originalreactants.next());
}
Iterator reactants = tempHashSet.iterator();
while(reactants.hasNext() && !found){
ChemGraph reactant = (ChemGraph)reactants.next();
if (reactant.getSpecies().hasResonanceIsomers()){
Iterator chemGraphIterator = reactant.getSpecies().getResonanceIsomers();
ChemGraph newChemGraph ;//= (ChemGraph)chemGraphIterator.next();
while(chemGraphIterator.hasNext() && !found){
newChemGraph = (ChemGraph)chemGraphIterator.next();
reactionStructure.removeReactants(reactant);
reactionStructure.addReactants(newChemGraph);
reactant = newChemGraph;
if (reactionSet.add(p_Reaction)){
found = true;
}
}
}
}
}
else{
Iterator originalproducts = reactionStructure.getProducts();
HashSet tempHashSet = new HashSet();
while(originalproducts.hasNext()){
tempHashSet.add(originalproducts.next());
}
Iterator products = tempHashSet.iterator();
while(products.hasNext() && !found){
ChemGraph product = (ChemGraph)products.next();
if (product.getSpecies().hasResonanceIsomers()){
Iterator chemGraphIterator = product.getSpecies().getResonanceIsomers();
ChemGraph newChemGraph ;//= (ChemGraph)chemGraphIterator.next();
while(chemGraphIterator.hasNext() && !found){
newChemGraph = (ChemGraph)chemGraphIterator.next();
reactionStructure.removeProducts(product);
reactionStructure.addProducts(newChemGraph);
product = newChemGraph;
if (reactionSet.add(p_Reaction)){
found = true;
}
}
}
}
}
return found;
}
public void parseCoreSpecies() {
// String restartFileContent ="";
//int speciesCount = 0;
//boolean added;
SpeciesDictionary dictionary = SpeciesDictionary.getInstance();
try{
File coreSpecies = new File ("Restart/coreSpecies.txt");
FileReader fr = new FileReader(coreSpecies);
BufferedReader reader = new BufferedReader(fr);
String line = ChemParser.readMeaningfulLine(reader);
//HashSet speciesSet = new HashSet();
// if (reactionSystem == null){//10/24/07 gmagoon: commenting out since contents of if was already commented out anyway
// //ReactionSystem reactionSystem = new ReactionSystem();
setReactionModel(new CoreEdgeReactionModel());//10/4/07 gmagoon:changed to setReactionModel
while (line!=null) {
StringTokenizer st = new StringTokenizer(line);
String index = st.nextToken();
int ID = Integer.parseInt(index);
Species spe = dictionary.getSpeciesFromID(ID);
if (spe == null)
System.out.println("There was no species with ID "+ID +" in the species dictionary");
((CoreEdgeReactionModel)getReactionModel()).addReactedSpecies(spe);
line = ChemParser.readMeaningfulLine(reader);
}
}
catch (IOException e){
System.out.println("Could not read the corespecies restart file");
System.exit(0);
}
}
public static void garbageCollect(){
System.gc();
}
public static long memoryUsed(){
garbageCollect();
Runtime rT = Runtime.getRuntime();
long uM, tM, fM;
tM = rT.totalMemory();
fM = rT.freeMemory();
uM = tM - fM;
System.out.println("After garbage collection:");
System.out.print("Memory used: ");
System.out.println(tM);
System.out.print("Free memory: ");
System.out.println(fM);
return uM;
}
private HashSet readIncludeSpecies(String fileName) {
HashSet speciesSet = new HashSet();
try {
File includeSpecies = new File (fileName);
FileReader fr = new FileReader(includeSpecies);
BufferedReader reader = new BufferedReader(fr);
String line = ChemParser.readMeaningfulLine(reader);
while (line!=null) {
StringTokenizer st = new StringTokenizer(line);
String index = st.nextToken();
String name = null;
if (!index.startsWith("(")) name = index;
else name = st.nextToken().trim();
Graph g = ChemParser.readChemGraph(reader);
ChemGraph cg = null;
try {
cg = ChemGraph.make(g);
}
catch (ForbiddenStructureException e) {
System.out.println("Forbidden Structure:\n" + e.getMessage());
System.out.println("Included species file "+fileName+" contains a forbidden structure.");
System.exit(0);
}
Species species = Species.make(name,cg);
//speciesSet.put(name, species);
speciesSet.add(species);
line = ChemParser.readMeaningfulLine(reader);
System.out.println(line);
}
}
catch (IOException e){
System.out.println("Could not read the included species file" + fileName);
System.exit(0);
}
return speciesSet;
}
/*
* MRH 23MAR2010:
* Commenting out deprecated parseAllSpecies method
*/
// public LinkedHashSet parseAllSpecies() {
// // String restartFileContent ="";
// int speciesCount = 0;
// LinkedHashSet speciesSet = new LinkedHashSet();
// boolean added;
// try{
// long initialTime = System.currentTimeMillis();
// File coreSpecies = new File ("allSpecies.txt");
// BufferedReader reader = new BufferedReader(new FileReader(coreSpecies));
// String line = ChemParser.readMeaningfulLine(reader);
// int i=0;
// while (line!=null) {
// StringTokenizer st = new StringTokenizer(line);
// String index = st.nextToken();
// String name = null;
// if (!index.startsWith("(")) name = index;
// else name = st.nextToken().trim();
// int ID = getID(name);
// name = getName(name);
// Graph g = ChemParser.readChemGraph(reader);
// ChemGraph cg = null;
// try {
// cg = ChemGraph.make(g);
// catch (ForbiddenStructureException e) {
// System.out.println("Forbidden Structure:\n" + e.getMessage());
// System.exit(0);
// Species species;
// if (ID == 0)
// species = Species.make(name,cg);
// else
// species = Species.make(name,cg,ID);
// speciesSet.add(species);
// double flux = 0;
// int species_type = 1;
// line = ChemParser.readMeaningfulLine(reader);
// System.out.println(line);
// catch (IOException e){
// System.out.println("Could not read the allSpecies restart file");
// System.exit(0);
// return speciesSet;
private String getName(String name) {
//int id;
String number = "";
int index=0;
if (!name.endsWith(")")) return name;
else {
char [] nameChars = name.toCharArray();
String temp = String.copyValueOf(nameChars);
int i=name.length()-2;
//char test = "(";
while (i>0){
if (name.charAt(i)== '(') {
index=i;
i=0;
}
else i = i-1;
}
}
number = name.substring(0,index);
return number;
}
private int getID(String name) {
int id;
String number = "";
if (!name.endsWith(")")) return 0;
else {
char [] nameChars = name.toCharArray();
int i=name.length()-2;
//char test = "(";
while (i>0){
if (name.charAt(i)== '(') i=0;
else{
number = name.charAt(i)+number;
i = i-1;
}
}
}
id = Integer.parseInt(number);
return id;
}
/*
* MRH 23MAR2010:
* Commenting out deprecated parseAllSpecies method
*/
// private void parseEdgeSpecies() {
// // String restartFileContent ="";
// SpeciesDictionary dictionary = SpeciesDictionary.getInstance();
// try{
// File edgeSpecies = new File ("Restart/edgeSpecies.txt");
// FileReader fr = new FileReader(edgeSpecies);
// BufferedReader reader = new BufferedReader(fr);
// String line = ChemParser.readMeaningfulLine(reader);
// //HashSet speciesSet = new HashSet();
// while (line!=null) {
// StringTokenizer st = new StringTokenizer(line);
// String index = st.nextToken();
// int ID = Integer.parseInt(index);
// Species spe = dictionary.getSpeciesFromID(ID);
// if (spe == null)
// System.out.println("There was no species with ID "+ID +" in the species dictionary");
// //reactionSystem.reactionModel = new CoreEdgeReactionModel();
// ((CoreEdgeReactionModel)getReactionModel()).addUnreactedSpecies(spe);
// line = ChemParser.readMeaningfulLine(reader);
// catch (IOException e){
// System.out.println("Could not read the edgepecies restart file");
// System.exit(0);
/*private int calculateAllReactionsinReactionTemplate() {
int totalnum = 0;
TemplateReactionGenerator trg = (TemplateReactionGenerator)reactionSystem.reactionGenerator;
Iterator iter = trg.getReactionTemplateLibrary().getReactionTemplate();
while (iter.hasNext()){
ReactionTemplate rt = (ReactionTemplate)iter.next();
totalnum += rt.getNumberOfReactions();
}
return totalnum;
}*/
private void writeEnlargerInfo() {
try {
File diagnosis = new File("enlarger.xls");
FileWriter fw = new FileWriter(diagnosis);
fw.write(Global.enlargerInfo.toString());
fw.close();
}
catch (IOException e) {
System.out.println("Cannot write enlarger file");
System.exit(0);
}
}
private void writeDiagnosticInfo() {
try {
File diagnosis = new File("diagnosis.xls");
FileWriter fw = new FileWriter(diagnosis);
fw.write(Global.diagnosticInfo.toString());
fw.close();
}
catch (IOException e) {
System.out.println("Cannot write diagnosis file");
System.exit(0);
}
}
//10/25/07 gmagoon: I don't think this is used, but I will update to use reactionSystem and reactionTime as parameter to access temperature; commented-out usage of writeRestartFile will need to be modified
//Is still incomplete.
public void writeRestartFile(ReactionSystem p_rs, ReactionTime p_time ) {
//writeCoreSpecies(p_rs);
//writeCoreReactions(p_rs, p_time);
//writeEdgeSpecies();
//writeAllReactions(p_rs, p_time);
//writeEdgeReactions(p_rs, p_time);
//String restartFileName;
//String restartFileContent="";
}
/*
* MRH 25MAR2010
* This method is no longer used
*/
/*Only write the forward reactions in the model core.
The reverse reactions are generated from the forward reactions.*/
//10/25/07 gmagoon: added reaction system and reaction time as parameters and eliminated use of Global.temperature
// private void writeEdgeReactions(ReactionSystem p_rs, ReactionTime p_time) {
// StringBuilder restartFileContent =new StringBuilder();
// int reactionCount = 1;
// try{
// File coreSpecies = new File ("Restart/edgeReactions.txt");
// FileWriter fw = new FileWriter(coreSpecies);
// for(Iterator iter=((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSet().iterator();iter.hasNext();){
// Reaction reaction = (Reaction) iter.next();
// //if (reaction.getDirection()==1){
// //restartFileContent = restartFileContent + "("+ reactionCount + ") "+species.getChemkinName() + " " + reactionSystem.getPresentConcentration(species) + " (mol/cm3) \n";
// restartFileContent = restartFileContent.append(reaction.toRestartString(p_rs.getTemperature(p_time)) + "\n");
// reactionCount = reactionCount + 1;
// //restartFileContent += "\nEND";
// fw.write(restartFileContent.toString());
// fw.close();
// catch (IOException e){
// System.out.println("Could not write the restart edgereactions file");
// System.exit(0);
/*
* MRH 25MAR2010:
* This method is no longer used
*/
//10/25/07 gmagoon: added reaction system and reaction time as parameters and eliminated use of Global.temperature
// private void writeAllReactions(ReactionSystem p_rs, ReactionTime p_time) {
// StringBuilder restartFileContent = new StringBuilder();
// int reactionCount = 1;
// try{
// File allReactions = new File ("Restart/allReactions.txt");
// FileWriter fw = new FileWriter(allReactions);
// for(Iterator iter=getReactionModel().getReaction();iter.hasNext();){
// Reaction reaction = (Reaction) iter.next();
// //restartFileContent = restartFileContent + "("+ reactionCount + ") "+species.getChemkinName() + " " + reactionSystem.getPresentConcentration(species) + " (mol/cm3) \n";
// restartFileContent = restartFileContent.append(reaction.toRestartString(p_rs.getTemperature(p_time)) + "\n");
// for(Iterator iter=((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSet().iterator();iter.hasNext();){
// Reaction reaction = (Reaction) iter.next();
// //if (reaction.getDirection()==1){
// //restartFileContent = restartFileContent + "("+ reactionCount + ") "+species.getChemkinName() + " " + reactionSystem.getPresentConcentration(species) + " (mol/cm3) \n";
// restartFileContent = restartFileContent.append(reaction.toRestartString(p_rs.getTemperature(p_time)) + "\n");
// //restartFileContent += "\nEND";
// fw.write(restartFileContent.toString());
// fw.close();
// catch (IOException e){
// System.out.println("Could not write the restart edgereactions file");
// System.exit(0);
private void writeEdgeSpecies() {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter("Restart/edgeSpecies.txt"));
for(Iterator iter=((CoreEdgeReactionModel)getReactionModel()).getUnreactedSpeciesSet().iterator();iter.hasNext();){
Species species = (Species) iter.next();
bw.write(species.getChemkinName());
bw.newLine();
int dummyInt = 0;
bw.write(species.getChemGraph().toStringWithoutH(dummyInt));
bw.newLine();
}
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw != null) {
bw.flush();
bw.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
private void writePrunedEdgeSpecies(Species species) {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter("Pruning/edgeSpecies.txt", true));
bw.write(species.getChemkinName());
bw.newLine();
int dummyInt = 0;
bw.write(species.getChemGraph().toString(dummyInt));
bw.newLine();
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw != null) {
bw.flush();
bw.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
/*
* MRH 25MAR2010:
* This method is no longer used
*/
//10/25/07 gmagoon: added reaction system and reaction time as parameters and eliminated use of Global.temperature
// private void writeCoreReactions(ReactionSystem p_rs, ReactionTime p_time) {
// StringBuilder restartFileContent = new StringBuilder();
// int reactionCount = 0;
// try{
// File coreSpecies = new File ("Restart/coreReactions.txt");
// FileWriter fw = new FileWriter(coreSpecies);
// for(Iterator iter=getReactionModel().getReaction();iter.hasNext();){
// Reaction reaction = (Reaction) iter.next();
// if (reaction.getDirection()==1){
// //restartFileContent = restartFileContent + "("+ reactionCount + ") "+species.getChemkinName() + " " + reactionSystem.getPresentConcentration(species) + " (mol/cm3) \n";
// restartFileContent = restartFileContent.append(reaction.toRestartString(p_rs.getTemperature(p_time)) + "\n");
// reactionCount = reactionCount + 1;
// //restartFileContent += "\nEND";
// fw.write(restartFileContent.toString());
// fw.close();
// catch (IOException e){
// System.out.println("Could not write the restart corereactions file");
// System.exit(0);
private void writeCoreSpecies() {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter("Restart/coreSpecies.txt"));
for(Iterator iter=getReactionModel().getSpecies();iter.hasNext();){
Species species = (Species) iter.next();
bw.write(species.getChemkinName());
bw.newLine();
int dummyInt = 0;
bw.write(species.getChemGraph().toStringWithoutH(dummyInt));
bw.newLine();
}
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw != null) {
bw.flush();
bw.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
private void writeCoreReactions() {
BufferedWriter bw_rxns = null;
BufferedWriter bw_troe = null;
BufferedWriter bw_lindemann = null;
BufferedWriter bw_thirdbody = null;
try {
bw_rxns = new BufferedWriter(new FileWriter("Restart/coreReactions.txt"));
bw_troe = new BufferedWriter(new FileWriter("Restart/troeReactions.txt"));
bw_lindemann = new BufferedWriter(new FileWriter("Restart/lindemannReactions.txt"));
bw_thirdbody = new BufferedWriter(new FileWriter("Restart/thirdBodyReactions.txt"));
String EaUnits = ArrheniusKinetics.getEaUnits();
String AUnits = ArrheniusKinetics.getAUnits();
bw_rxns.write("UnitsOfEa: " + EaUnits);
bw_rxns.newLine();
bw_troe.write("Unit:\nA: mol/cm3/s\nE: " + EaUnits + "\n\nReactions:");
bw_troe.newLine();
bw_lindemann.write("Unit:\nA: mol/cm3/s\nE: " + EaUnits + "\n\nReactions:");
bw_lindemann.newLine();
bw_thirdbody.write("Unit:\nA: mol/cm3/s\nE: " + EaUnits + "\n\nReactions :");
bw_thirdbody.newLine();
CoreEdgeReactionModel cerm = (CoreEdgeReactionModel)getReactionModel();
LinkedHashSet allcoreRxns = cerm.core.reaction;
for(Iterator iter=allcoreRxns.iterator(); iter.hasNext();){
Reaction reaction = (Reaction) iter.next();
if (reaction.isForward()) {
if (reaction instanceof TROEReaction) {
TROEReaction troeRxn = (TROEReaction) reaction;
bw_troe.write(troeRxn.toRestartString(new Temperature(298,"K")));
bw_troe.newLine();
}
else if (reaction instanceof LindemannReaction) {
LindemannReaction lindeRxn = (LindemannReaction) reaction;
bw_lindemann.write(lindeRxn.toRestartString(new Temperature(298,"K")));
bw_lindemann.newLine();
}
else if (reaction instanceof ThirdBodyReaction) {
ThirdBodyReaction tbRxn = (ThirdBodyReaction) reaction;
bw_thirdbody.write(tbRxn.toRestartString(new Temperature(298,"K")));
bw_thirdbody.newLine();
}
else {
//bw.write(reaction.toChemkinString(new Temperature(298,"K")));
bw_rxns.write(reaction.toRestartString(new Temperature(298,"K"),false));
bw_rxns.newLine();
}
}
}
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw_rxns != null) {
bw_rxns.flush();
bw_rxns.close();
}
if (bw_troe != null) {
bw_troe.flush();
bw_troe.close();
}
if (bw_lindemann != null) {
bw_lindemann.flush();
bw_lindemann.close();
}
if (bw_thirdbody != null) {
bw_thirdbody.flush();
bw_thirdbody.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
private void writeEdgeReactions() {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter("Restart/edgeReactions.txt"));
String EaUnits = ArrheniusKinetics.getEaUnits();
bw.write("UnitsOfEa: " + EaUnits);
bw.newLine();
CoreEdgeReactionModel cerm = (CoreEdgeReactionModel)getReactionModel();
LinkedHashSet alledgeRxns = cerm.edge.reaction;
for(Iterator iter=alledgeRxns.iterator(); iter.hasNext();){
Reaction reaction = (Reaction) iter.next();
if (reaction.isForward()) {
//bw.write(reaction.toChemkinString(new Temperature(298,"K")));
bw.write(reaction.toRestartString(new Temperature(298,"K"),false));
bw.newLine();
} else if (reaction.getReverseReaction().isForward()) {
//bw.write(reaction.getReverseReaction().toChemkinString(new Temperature(298,"K")));
bw.write(reaction.getReverseReaction().toRestartString(new Temperature(298,"K"),false));
bw.newLine();
} else
System.out.println("Could not determine forward direction for following rxn: " + reaction.toString());
}
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw != null) {
bw.flush();
bw.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
//gmagoon 4/5/10: based on Mike's writeEdgeReactions
private void writePrunedEdgeReaction(Reaction reaction) {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter("Pruning/edgeReactions.txt", true));
if (reaction.isForward()) {
bw.write(reaction.toChemkinString(new Temperature(298,"K")));
// bw.write(reaction.toRestartString(new Temperature(298,"K")));
bw.newLine();
} else if (reaction.getReverseReaction().isForward()) {
bw.write(reaction.getReverseReaction().toChemkinString(new Temperature(298,"K")));
//bw.write(reaction.getReverseReaction().toRestartString(new Temperature(298,"K")));
bw.newLine();
} else
System.out.println("Could not determine forward direction for following rxn: " + reaction.toString());
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw != null) {
bw.flush();
bw.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
private void writePDepNetworks() {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter("Restart/pdepnetworks.txt"));
int numFameTemps = PDepRateConstant.getTemperatures().length;
int numFamePress = PDepRateConstant.getPressures().length;
int numChebyTemps = ChebyshevPolynomials.getNT();
int numChebyPress = ChebyshevPolynomials.getNP();
int numPlog = PDepArrheniusKinetics.getNumPressures();
String EaUnits = ArrheniusKinetics.getEaUnits();
bw.write("UnitsOfEa: " + EaUnits);
bw.newLine();
bw.write("NumberOfFameTemps: " + numFameTemps);
bw.newLine();
bw.write("NumberOfFamePress: " + numFamePress);
bw.newLine();
bw.write("NumberOfChebyTemps: " + numChebyTemps);
bw.newLine();
bw.write("NumberOfChebyPress: " + numChebyPress);
bw.newLine();
bw.write("NumberOfPLogs: " + numPlog);
bw.newLine();
bw.newLine();
LinkedList allNets = PDepNetwork.getNetworks();
int netCounter = 0;
for(Iterator iter=allNets.iterator(); iter.hasNext();){
PDepNetwork pdepnet = (PDepNetwork) iter.next();
++netCounter;
bw.write("PDepNetwork #" + netCounter);
bw.newLine();
// Write netReactionList
LinkedList netRxns = pdepnet.getNetReactions();
bw.write("netReactionList:");
bw.newLine();
for (Iterator iter2=netRxns.iterator(); iter2.hasNext();) {
PDepReaction currentPDepRxn = (PDepReaction)iter2.next();
bw.write(currentPDepRxn.toString());
bw.newLine();
bw.write(writeRatesAndParameters(currentPDepRxn,numFameTemps,
numFamePress,numChebyTemps,numChebyPress,numPlog));
PDepReaction currentPDepReverseRxn = currentPDepRxn.getReverseReaction();
bw.write(currentPDepReverseRxn.toString());
bw.newLine();
bw.write(writeRatesAndParameters(currentPDepReverseRxn,numFameTemps,
numFamePress,numChebyTemps,numChebyPress,numPlog));
}
// Write nonincludedReactionList
LinkedList nonIncludeRxns = pdepnet.getNonincludedReactions();
bw.write("nonIncludedReactionList:");
bw.newLine();
for (Iterator iter2=nonIncludeRxns.iterator(); iter2.hasNext();) {
PDepReaction currentPDepRxn = (PDepReaction)iter2.next();
bw.write(currentPDepRxn.toString());
bw.newLine();
bw.write(writeRatesAndParameters(currentPDepRxn,numFameTemps,
numFamePress,numChebyTemps,numChebyPress,numPlog));
PDepReaction currentPDepReverseRxn = currentPDepRxn.getReverseReaction();
bw.write(currentPDepReverseRxn.toString());
bw.newLine();
bw.write(writeRatesAndParameters(currentPDepReverseRxn,numFameTemps,
numFamePress,numChebyTemps,numChebyPress,numPlog));
}
// Write pathReactionList
LinkedList pathRxns = pdepnet.getPathReactions();
bw.write("pathReactionList:");
bw.newLine();
for (Iterator iter2=pathRxns.iterator(); iter2.hasNext();) {
PDepReaction currentPDepRxn = (PDepReaction)iter2.next();
bw.write(currentPDepRxn.getDirection() + "\t" + currentPDepRxn.toChemkinString(new Temperature(298,"K")));
bw.newLine();
}
bw.newLine();
bw.newLine();
}
} catch (FileNotFoundException ex) {
ex.printStackTrace();
} catch (IOException ex) {
ex.printStackTrace();
} finally {
try {
if (bw != null) {
bw.flush();
bw.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
public String writeRatesAndParameters(PDepReaction pdeprxn, int numFameTemps,
int numFamePress, int numChebyTemps, int numChebyPress, int numPlog) {
StringBuilder sb = new StringBuilder();
// Write the rate coefficients
double[][] rateConstants = pdeprxn.getPDepRate().getRateConstants();
for (int i=0; i<numFameTemps; i++) {
for (int j=0; j<numFamePress; j++) {
sb.append(rateConstants[i][j] + "\t");
}
sb.append("\n");
}
sb.append("\n");
// If chebyshev polynomials are present, write them
if (numChebyTemps != 0) {
ChebyshevPolynomials chebyPolys = pdeprxn.getPDepRate().getChebyshev();
for (int i=0; i<numChebyTemps; i++) {
for (int j=0; j<numChebyPress; j++) {
sb.append(chebyPolys.getAlpha(i,j) + "\t");
}
sb.append("\n");
}
sb.append("\n");
}
// If plog parameters are present, write them
else if (numPlog != 0) {
PDepArrheniusKinetics kinetics = pdeprxn.getPDepRate().getPDepArrheniusKinetics();
for (int i=0; i<numPlog; i++) {
double Hrxn = pdeprxn.calculateHrxn(new Temperature(298,"K"));
sb.append(kinetics.pressures[i].getPa() + "\t" + kinetics.getKinetics(i).toChemkinString(Hrxn,new Temperature(298,"K"),false) + "\n");
}
sb.append("\n");
}
return sb.toString();
}
public LinkedList getTimeStep() {
return timeStep;
}
public void setTimeStep(ReactionTime p_timeStep) {
if (timeStep == null)
timeStep = new LinkedList();
timeStep.add(p_timeStep);
}
public String getWorkingDirectory() {
return workingDirectory;
}
public void setWorkingDirectory(String p_workingDirectory) {
workingDirectory = p_workingDirectory;
}
//svp
public boolean getError(){
return error;
}
//svp
public boolean getSensitivity(){
return sensitivity;
}
public LinkedList getSpeciesList() {
return species;
}
//gmagoon 10/24/07: commented out getReactionSystem and setReactionSystem
// public ReactionSystem getReactionSystem() {
// return reactionSystem;
//11/2/07 gmagoon: adding accessor method for reactionSystemList
public LinkedList getReactionSystemList(){
return reactionSystemList;
}
//added by gmagoon 9/24/07
// public void setReactionSystem(ReactionSystem p_ReactionSystem) {
// reactionSystem = p_ReactionSystem;
//copied from ReactionSystem.java by gmagoon 9/24/07
public ReactionModel getReactionModel() {
return reactionModel;
}
public void readRestartSpecies() {
System.out.println("Reading in species from Restart folder");
// Read in core species -- NOTE code is almost duplicated in Read in edge species (second part of procedure)
try {
FileReader in = new FileReader("Restart/coreSpecies.txt");
BufferedReader reader = new BufferedReader(in);
String line = ChemParser.readMeaningfulLine(reader);
while (line != null) {
// The first line of a new species is the user-defined name
String totalSpeciesName = line;
String[] splitString1 = totalSpeciesName.split("[(]");
String[] splitString2 = splitString1[splitString1.length-1].split("[)]");
// The remaining lines are the graph
Graph g = ChemParser.readChemGraph(reader);
// Make the ChemGraph, assuming it does not contain a forbidden structure
ChemGraph cg = null;
try {
cg = ChemGraph.make(g);
} catch (ForbiddenStructureException e) {
System.out.println("Error reading graph: Graph contains a forbidden structure.\n" + g.toString());
System.exit(0);
}
// Make the species
int intLocation = totalSpeciesName.indexOf("(" + splitString2[0] + ")");
Species species = Species.make(totalSpeciesName.substring(0,intLocation),cg,Integer.parseInt(splitString2[0]));
// Add the new species to the set of species
restartCoreSpcs.add(species);
/*int species_type = 1; // reacted species
for (int i=0; i<numRxnSystems; i++) {
SpeciesStatus ss = new SpeciesStatus(species,species_type,y[i],yprime[i]);
speciesStatus[i].put(species, ss);
}*/
line = ChemParser.readMeaningfulLine(reader);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
// Read in edge species
try {
FileReader in = new FileReader("Restart/edgeSpecies.txt");
BufferedReader reader = new BufferedReader(in);
String line = ChemParser.readMeaningfulLine(reader);
while (line != null) {
// The first line of a new species is the user-defined name
String totalSpeciesName = line;
String[] splitString1 = totalSpeciesName.split("[(]");
String[] splitString2 = splitString1[splitString1.length-1].split("[)]"); // Change JDM to reflect MRH 2-11-2010
// The remaining lines are the graph
Graph g = ChemParser.readChemGraph(reader);
// Make the ChemGraph, assuming it does not contain a forbidden structure
ChemGraph cg = null;
try {
cg = ChemGraph.make(g);
} catch (ForbiddenStructureException e) {
System.out.println("Error reading graph: Graph contains a forbidden structure.\n" + g.toString());
System.exit(0);
}
// Make the species
Species species = Species.make(splitString1[0],cg,Integer.parseInt(splitString2[0]));
// Add the new species to the set of species
restartEdgeSpcs.add(species);
line = ChemParser.readMeaningfulLine(reader);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public void readRestartReactions() {
// Grab the IDs from the core species
int[] coreSpcsIds = new int[restartCoreSpcs.size()];
int i = 0;
for (Iterator iter = restartCoreSpcs.iterator(); iter.hasNext();) {
Species spcs = (Species)iter.next();
coreSpcsIds[i] = spcs.getID();
++i;
}
System.out.println("Reading reactions from Restart folder");
// Read in core reactions
try {
FileReader in = new FileReader("Restart/coreReactions.txt");
BufferedReader reader = new BufferedReader(in);
String line = ChemParser.readMeaningfulLine(reader);
// Determine units of Ea
StringTokenizer st = new StringTokenizer(line);
String tempString = st.nextToken();
String EaUnits = st.nextToken();
line = ChemParser.readMeaningfulLine(reader);
while (line != null) {
if (!line.trim().equals("DUP")) {
Reaction r = ChemParser.parseRestartReaction(line,coreSpcsIds,"core",EaUnits);
Iterator rxnIter = restartCoreRxns.iterator();
boolean foundRxn = false;
while (rxnIter.hasNext()) {
Reaction old = (Reaction)rxnIter.next();
if (old.equals(r)) {
old.addAdditionalKinetics(r.getKinetics()[0],1);
foundRxn = true;
break;
}
}
if (!foundRxn) {
if (r.hasReverseReaction()) r.generateReverseReaction();
restartCoreRxns.add(r);
}
}
line = ChemParser.readMeaningfulLine(reader);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
try {
SeedMechanism.readThirdBodyReactions("Restart/thirdBodyReactions.txt");
} catch (IOException e1) {
e1.printStackTrace();
}
try {
SeedMechanism.readLindemannReactions("Restart/lindemannReactions.txt");
} catch (IOException e1) {
e1.printStackTrace();
}
try {
SeedMechanism.readTroeReactions("Restart/troeReactions.txt");
} catch (IOException e1) {
e1.printStackTrace();
}
restartCoreRxns.addAll(SeedMechanism.reactionSet);
// Read in edge reactions
try {
FileReader in = new FileReader("Restart/edgeReactions.txt");
BufferedReader reader = new BufferedReader(in);
String line = ChemParser.readMeaningfulLine(reader);
// Determine units of Ea
StringTokenizer st = new StringTokenizer(line);
String tempString = st.nextToken();
String EaUnits = st.nextToken();
line = ChemParser.readMeaningfulLine(reader);
while (line != null) {
if (!line.trim().equals("DUP")) {
Reaction r = ChemParser.parseRestartReaction(line,coreSpcsIds,"edge",EaUnits);
Iterator rxnIter = restartEdgeRxns.iterator();
boolean foundRxn = false;
while (rxnIter.hasNext()) {
Reaction old = (Reaction)rxnIter.next();
if (old.equals(r)) {
old.addAdditionalKinetics(r.getKinetics()[0],1);
foundRxn = true;
break;
}
}
if (!foundRxn) {
r.generateReverseReaction();
restartEdgeRxns.add(r);
}
}
line = ChemParser.readMeaningfulLine(reader);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public LinkedHashMap getRestartSpeciesStatus(int i) {
LinkedHashMap speciesStatus = new LinkedHashMap();
try {
FileReader in = new FileReader("Restart/coreSpecies.txt");
BufferedReader reader = new BufferedReader(in);
Integer numRxnSystems = Integer.parseInt(ChemParser.readMeaningfulLine(reader));
String line = ChemParser.readMeaningfulLine(reader);
while (line != null) {
// The first line of a new species is the user-defined name
String totalSpeciesName = line;
String[] splitString1 = totalSpeciesName.split("[(]");
String[] splitString2 = splitString1[1].split("[)]");
double y = 0.0;
double yprime = 0.0;
for (int j=0; j<numRxnSystems; j++) {
StringTokenizer st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
if (j == i) {
y = Double.parseDouble(st.nextToken());
yprime = Double.parseDouble(st.nextToken());
}
}
// The remaining lines are the graph
Graph g = ChemParser.readChemGraph(reader);
// Make the ChemGraph, assuming it does not contain a forbidden structure
ChemGraph cg = null;
try {
cg = ChemGraph.make(g);
} catch (ForbiddenStructureException e) {
System.out.println("Error reading graph: Graph contains a forbidden structure.\n" + g.toString());
System.exit(0);
}
// Make the species
Species species = Species.make(splitString1[0],cg);
// Add the new species to the set of species
//restartCoreSpcs.add(species);
int species_type = 1; // reacted species
SpeciesStatus ss = new SpeciesStatus(species,species_type,y,yprime);
speciesStatus.put(species, ss);
line = ChemParser.readMeaningfulLine(reader);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return speciesStatus;
}
public void putRestartSpeciesInInitialStatus(InitialStatus is, int i) {
try {
FileReader in = new FileReader("Restart/coreSpecies.txt");
BufferedReader reader = new BufferedReader(in);
String line = ChemParser.readMeaningfulLine(reader);
while (line != null) {
// The first line of a new species is the user-defined name
String totalSpeciesName = line;
String[] splitString1 = totalSpeciesName.split("[(]");
String[] splitString2 = splitString1[1].split("[)]");
// The remaining lines are the graph
Graph g = ChemParser.readChemGraph(reader);
// Make the ChemGraph, assuming it does not contain a forbidden structure
ChemGraph cg = null;
try {
cg = ChemGraph.make(g);
} catch (ForbiddenStructureException e) {
System.out.println("Error reading graph: Graph contains a forbidden structure.\n" + g.toString());
System.exit(0);
}
// Make the species
Species species = Species.make(splitString1[0],cg);
// Add the new species to the set of species
//restartCoreSpcs.add(species);
if (is.getSpeciesStatus(species) == null) {
SpeciesStatus ss = new SpeciesStatus(species,1,0.0,0.0);
is.putSpeciesStatus(ss);
}
line = ChemParser.readMeaningfulLine(reader);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public void readPDepNetworks() {
SpeciesDictionary sd = SpeciesDictionary.getInstance();
LinkedList allNetworks = PDepNetwork.getNetworks();
try {
FileReader in = new FileReader("Restart/pdepnetworks.txt");
BufferedReader reader = new BufferedReader(in);
StringTokenizer st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
String tempString = st.nextToken();
String EaUnits = st.nextToken();
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
tempString = st.nextToken();
int numFameTs = Integer.parseInt(st.nextToken());
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
tempString = st.nextToken();
int numFamePs = Integer.parseInt(st.nextToken());
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
tempString = st.nextToken();
int numChebyTs = Integer.parseInt(st.nextToken());
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
tempString = st.nextToken();
int numChebyPs = Integer.parseInt(st.nextToken());
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
tempString = st.nextToken();
int numPlogs = Integer.parseInt(st.nextToken());
double[][] rateCoefficients = new double[numFameTs][numFamePs];
double[][] chebyPolys = new double[numChebyTs][numChebyPs];
Kinetics[] plogKinetics = new Kinetics[numPlogs];
String line = ChemParser.readMeaningfulLine(reader); // line should be "PDepNetwork
while (line != null) {
line = ChemParser.readMeaningfulLine(reader); // line should now be "netReactionList:"
PDepNetwork newNetwork = new PDepNetwork();
LinkedList netRxns = newNetwork.getNetReactions();
LinkedList nonincludeRxns = newNetwork.getNonincludedReactions();
line = ChemParser.readMeaningfulLine(reader); // line is either data or "nonIncludedReactionList"
// If line is "nonincludedreactionlist", we need to skip over this while loop
if (!line.toLowerCase().startsWith("nonincludedreactionlist")) {
while (!line.toLowerCase().startsWith("nonincludedreactionlist")) {
// Read in the forward rxn
String[] reactsANDprods = line.split("\\
PDepIsomer Reactants = null;
String reacts = reactsANDprods[0].trim();
if (reacts.contains("+")) {
String[] indivReacts = reacts.split("[+]");
String name = indivReacts[0].trim();
Species spc1 = sd.getSpeciesFromChemkinName(name);
if (spc1 == null) {
spc1 = getSpeciesBySPCName(name,sd);
}
name = indivReacts[1].trim();
Species spc2 = sd.getSpeciesFromChemkinName(name);
if (spc2 == null) {
spc2 = getSpeciesBySPCName(name,sd);
}
Reactants = new PDepIsomer(spc1,spc2);
} else {
String name = reacts.trim();
Species spc = sd.getSpeciesFromChemkinName(name);
if (spc == null) {
spc = getSpeciesBySPCName(name,sd);
}
Reactants = new PDepIsomer(spc);
}
PDepIsomer Products = null;
String prods = reactsANDprods[1].trim();
if (prods.contains("+")) {
String[] indivProds = prods.split("[+]");
String name = indivProds[0].trim();
Species spc1 = sd.getSpeciesFromChemkinName(name);
if (spc1 == null) {
spc1 = getSpeciesBySPCName(name,sd);
}
name = indivProds[1].trim();
Species spc2 = sd.getSpeciesFromChemkinName(name);
if (spc2 == null) {
spc2 = getSpeciesBySPCName(name,sd);
}
Products = new PDepIsomer(spc1,spc2);
} else {
String name = prods.trim();
Species spc = sd.getSpeciesFromChemkinName(name);
if (spc == null) {
spc = getSpeciesBySPCName(name,sd);
}
Products = new PDepIsomer(spc);
}
newNetwork.addIsomer(Reactants);
newNetwork.addIsomer(Products);
for (int i=0; i<numFameTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numFamePs; j++) {
rateCoefficients[i][j] = Double.parseDouble(st.nextToken());
}
}
PDepRateConstant pdepk = null;
if (numChebyTs > 0) {
for (int i=0; i<numChebyTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numChebyPs; j++) {
chebyPolys[i][j] = Double.parseDouble(st.nextToken());
}
}
ChebyshevPolynomials chebyshev = new ChebyshevPolynomials(numChebyTs,
ChebyshevPolynomials.getTlow(), ChebyshevPolynomials.getTup(),
numChebyPs, ChebyshevPolynomials.getPlow(), ChebyshevPolynomials.getPup(),
chebyPolys);
pdepk = new PDepRateConstant(rateCoefficients,chebyshev);
} else if (numPlogs > 0) {
for (int i=0; i<numPlogs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
Pressure p = new Pressure(Double.parseDouble(st.nextToken()),"Pa");
UncertainDouble dA = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
UncertainDouble dn = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
double Ea = Double.parseDouble(st.nextToken());
if (EaUnits.equals("cal/mol"))
Ea = Ea / 1000;
else if (EaUnits.equals("J/mol"))
Ea = Ea / 4.184 / 1000;
else if (EaUnits.equals("kJ/mol"))
Ea = Ea / 4.184;
else if (EaUnits.equals("Kelvins"))
Ea = Ea * 1.987;
UncertainDouble dE = new UncertainDouble(Ea,0.0,"A");
ArrheniusKinetics k = new ArrheniusKinetics(dA, dn, dE, "", 1, "", "");
PDepArrheniusKinetics pdepAK = new PDepArrheniusKinetics(i);
pdepAK.setKinetics(i, p, k);
pdepk = new PDepRateConstant(rateCoefficients,pdepAK);
}
}
PDepReaction forward = new PDepReaction(Reactants, Products, pdepk);
// Read in the reverse reaction
line = ChemParser.readMeaningfulLine(reader);
for (int i=0; i<numFameTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numFamePs; j++) {
rateCoefficients[i][j] = Double.parseDouble(st.nextToken());
}
}
pdepk = null;
if (numChebyTs > 0) {
for (int i=0; i<numChebyTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numChebyPs; j++) {
chebyPolys[i][j] = Double.parseDouble(st.nextToken());
}
}
ChebyshevPolynomials chebyshev = new ChebyshevPolynomials(numChebyTs,
ChebyshevPolynomials.getTlow(), ChebyshevPolynomials.getTup(),
numChebyPs, ChebyshevPolynomials.getPlow(), ChebyshevPolynomials.getPup(),
chebyPolys);
pdepk = new PDepRateConstant(rateCoefficients,chebyshev);
} else if (numPlogs > 0) {
for (int i=0; i<numPlogs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
Pressure p = new Pressure(Double.parseDouble(st.nextToken()),"Pa");
UncertainDouble dA = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
UncertainDouble dn = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
double Ea = Double.parseDouble(st.nextToken());
if (EaUnits.equals("cal/mol"))
Ea = Ea / 1000;
else if (EaUnits.equals("J/mol"))
Ea = Ea / 4.184 / 1000;
else if (EaUnits.equals("kJ/mol"))
Ea = Ea / 4.184;
else if (EaUnits.equals("Kelvins"))
Ea = Ea * 1.987;
UncertainDouble dE = new UncertainDouble(Ea,0.0,"A");
ArrheniusKinetics k = new ArrheniusKinetics(dA, dn, dE, "", 1, "", "");
PDepArrheniusKinetics pdepAK = new PDepArrheniusKinetics(i);
pdepAK.setKinetics(i, p, k);
pdepk = new PDepRateConstant(rateCoefficients,pdepAK);
}
}
PDepReaction reverse = new PDepReaction(Products, Reactants, pdepk);
reverse.setReverseReaction(forward);
forward.setReverseReaction(reverse);
netRxns.add(forward);
line = ChemParser.readMeaningfulLine(reader);
}
}
// This loop ends once line == "nonIncludedReactionList"
line = ChemParser.readMeaningfulLine(reader); // line is either data or "pathReactionList"
if (!line.toLowerCase().startsWith("pathreactionList")) {
while (!line.toLowerCase().startsWith("pathreactionlist")) {
// Read in the forward rxn
String[] reactsANDprods = line.split("\\
PDepIsomer Reactants = null;
String reacts = reactsANDprods[0].trim();
if (reacts.contains("+")) {
String[] indivReacts = reacts.split("[+]");
String name = indivReacts[0].trim();
Species spc1 = sd.getSpeciesFromChemkinName(name);
if (spc1 == null) {
spc1 = getSpeciesBySPCName(name,sd);
}
name = indivReacts[1].trim();
Species spc2 = sd.getSpeciesFromChemkinName(name);
if (spc2 == null) {
spc2 = getSpeciesBySPCName(name,sd);
}
Reactants = new PDepIsomer(spc1,spc2);
} else {
String name = reacts.trim();
Species spc = sd.getSpeciesFromChemkinName(name);
if (spc == null) {
spc = getSpeciesBySPCName(name,sd);
}
Reactants = new PDepIsomer(spc);
}
PDepIsomer Products = null;
String prods = reactsANDprods[1].trim();
if (prods.contains("+")) {
String[] indivProds = prods.split("[+]");
String name = indivProds[0].trim();
Species spc1 = sd.getSpeciesFromChemkinName(name);
if (spc1 == null) {
spc1 = getSpeciesBySPCName(name,sd);
}
name = indivProds[1].trim();
Species spc2 = sd.getSpeciesFromChemkinName(name);
if (spc2 == null) {
spc2 = getSpeciesBySPCName(name,sd);
}
Products = new PDepIsomer(spc1,spc2);
} else {
String name = prods.trim();
Species spc = sd.getSpeciesFromChemkinName(name);
if (spc == null) {
spc = getSpeciesBySPCName(name,sd);
}
Products = new PDepIsomer(spc);
}
newNetwork.addIsomer(Reactants);
newNetwork.addIsomer(Products);
for (int i=0; i<numFameTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numFamePs; j++) {
rateCoefficients[i][j] = Double.parseDouble(st.nextToken());
}
}
PDepRateConstant pdepk = null;
if (numChebyTs > 0) {
for (int i=0; i<numChebyTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numChebyPs; j++) {
chebyPolys[i][j] = Double.parseDouble(st.nextToken());
}
}
ChebyshevPolynomials chebyshev = new ChebyshevPolynomials(numChebyTs,
ChebyshevPolynomials.getTlow(), ChebyshevPolynomials.getTup(),
numChebyPs, ChebyshevPolynomials.getPlow(), ChebyshevPolynomials.getPup(),
chebyPolys);
pdepk = new PDepRateConstant(rateCoefficients,chebyshev);
} else if (numPlogs > 0) {
for (int i=0; i<numPlogs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
Pressure p = new Pressure(Double.parseDouble(st.nextToken()),"Pa");
UncertainDouble dA = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
UncertainDouble dn = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
double Ea = Double.parseDouble(st.nextToken());
if (EaUnits.equals("cal/mol"))
Ea = Ea / 1000;
else if (EaUnits.equals("J/mol"))
Ea = Ea / 4.184 / 1000;
else if (EaUnits.equals("kJ/mol"))
Ea = Ea / 4.184;
else if (EaUnits.equals("Kelvins"))
Ea = Ea * 1.987;
UncertainDouble dE = new UncertainDouble(Ea,0.0,"A");
ArrheniusKinetics k = new ArrheniusKinetics(dA, dn, dE, "", 1, "", "");
PDepArrheniusKinetics pdepAK = new PDepArrheniusKinetics(i);
pdepAK.setKinetics(i, p, k);
pdepk = new PDepRateConstant(rateCoefficients,pdepAK);
}
}
PDepReaction forward = new PDepReaction(Reactants, Products, pdepk);
// Read in the reverse reaction
line = ChemParser.readMeaningfulLine(reader);
for (int i=0; i<numFameTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numFamePs; j++) {
rateCoefficients[i][j] = Double.parseDouble(st.nextToken());
}
}
pdepk = null;
if (numChebyTs > 0) {
for (int i=0; i<numChebyTs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
for (int j=0; j<numChebyPs; j++) {
chebyPolys[i][j] = Double.parseDouble(st.nextToken());
}
}
ChebyshevPolynomials chebyshev = new ChebyshevPolynomials(numChebyTs,
ChebyshevPolynomials.getTlow(), ChebyshevPolynomials.getTup(),
numChebyPs, ChebyshevPolynomials.getPlow(), ChebyshevPolynomials.getPup(),
chebyPolys);
pdepk = new PDepRateConstant(rateCoefficients,chebyshev);
} else if (numPlogs > 0) {
for (int i=0; i<numPlogs; i++) {
st = new StringTokenizer(ChemParser.readMeaningfulLine(reader));
Pressure p = new Pressure(Double.parseDouble(st.nextToken()),"Pa");
UncertainDouble dA = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
UncertainDouble dn = new UncertainDouble(Double.parseDouble(st.nextToken()),0.0,"A");
double Ea = Double.parseDouble(st.nextToken());
if (EaUnits.equals("cal/mol"))
Ea = Ea / 1000;
else if (EaUnits.equals("J/mol"))
Ea = Ea / 4.184 / 1000;
else if (EaUnits.equals("kJ/mol"))
Ea = Ea / 4.184;
else if (EaUnits.equals("Kelvins"))
Ea = Ea * 1.987;
UncertainDouble dE = new UncertainDouble(Ea,0.0,"A");
ArrheniusKinetics k = new ArrheniusKinetics(dA, dn, dE, "", 1, "", "");
PDepArrheniusKinetics pdepAK = new PDepArrheniusKinetics(i);
pdepAK.setKinetics(i, p, k);
pdepk = new PDepRateConstant(rateCoefficients,pdepAK);
}
}
PDepReaction reverse = new PDepReaction(Products, Reactants, pdepk);
reverse.setReverseReaction(forward);
forward.setReverseReaction(reverse);
nonincludeRxns.add(forward);
line = ChemParser.readMeaningfulLine(reader);
}
}
// This loop ends once line == "pathReactionList"
line = ChemParser.readMeaningfulLine(reader); // line is either data or "PDepNetwork #_" or null (end of file)
while (line != null && !line.toLowerCase().startsWith("pdepnetwork")) {
st = new StringTokenizer(line);
int direction = Integer.parseInt(st.nextToken());
// First token is the rxn structure: A+B=C+D
// Note: Up to 3 reactants/products allowed
// : Either "=" or "=>" will separate reactants and products
String structure = st.nextToken();
// Separate the reactants from the products
boolean generateReverse = false;
String[] reactsANDprods = structure.split("\\=>");
if (reactsANDprods.length == 1) {
reactsANDprods = structure.split("[=]");
generateReverse = true;
}
sd = SpeciesDictionary.getInstance();
LinkedList r = ChemParser.parseReactionSpecies(sd, reactsANDprods[0]);
LinkedList p = ChemParser.parseReactionSpecies(sd, reactsANDprods[1]);
Structure s = new Structure(r,p);
s.setDirection(direction);
// Next three tokens are the modified Arrhenius parameters
double rxn_A = Double.parseDouble(st.nextToken());
double rxn_n = Double.parseDouble(st.nextToken());
double rxn_E = Double.parseDouble(st.nextToken());
if (EaUnits.equals("cal/mol"))
rxn_E = rxn_E / 1000;
else if (EaUnits.equals("J/mol"))
rxn_E = rxn_E / 4.184 / 1000;
else if (EaUnits.equals("kJ/mol"))
rxn_E = rxn_E / 4.184;
else if (EaUnits.equals("Kelvins"))
rxn_E = rxn_E * 1.987;
UncertainDouble uA = new UncertainDouble(rxn_A,0.0,"A");
UncertainDouble un = new UncertainDouble(rxn_n,0.0,"A");
UncertainDouble uE = new UncertainDouble(rxn_E,0.0,"A");
// The remaining tokens are comments
String comments = "";
while (st.hasMoreTokens()) {
comments += st.nextToken();
}
ArrheniusKinetics[] k = new ArrheniusKinetics[1];
k[0] = new ArrheniusKinetics(uA,un,uE,"",1,"",comments);
Reaction pathRxn = new Reaction();
// if (direction == 1)
// pathRxn = Reaction.makeReaction(s,k,generateReverse);
// else
// pathRxn = Reaction.makeReaction(s.generateReverseStructure(),k,generateReverse);
pathRxn = Reaction.makeReaction(s,k,generateReverse);
PDepIsomer Reactants = new PDepIsomer(r);
PDepIsomer Products = new PDepIsomer(p);
PDepReaction pdeppathrxn = new PDepReaction(Reactants,Products,pathRxn);
newNetwork.addReaction(pdeppathrxn,true);
line = ChemParser.readMeaningfulLine(reader);
}
PDepNetwork.getNetworks().add(newNetwork);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* MRH 14Jan2010
*
* getSpeciesBySPCName
*
* Input: String name - Name of species, normally chemical formula followed
* by "J"s for radicals, and then (#)
* SpeciesDictionary sd
*
* This method was originally written as a complement to the method readPDepNetworks.
* jdmo found a bug with the readrestart option. The bug was that the method was
* attempting to add a null species to the Isomer list. The null species resulted
* from searching the SpeciesDictionary by chemkinName (e.g. C4H8OJJ(48)), when the
* chemkinName present in the dictionary was SPC(48).
*
*/
public Species getSpeciesBySPCName(String name, SpeciesDictionary sd) {
String[] nameFromNumber = name.split("\\(");
String newName = "SPC(" + nameFromNumber[1];
return sd.getSpeciesFromChemkinName(newName);
}
/**
* MRH 12-Jun-2009
*
* Function initializes the model's core and edge.
* The initial core species always consists of the species contained
* in the condition.txt file. If seed mechanisms exist, those species
* (and the reactions given in the seed mechanism) are also added to
* the core.
* The initial edge species/reactions are determined by reacting the core
* species by one full iteration.
*/
public void initializeCoreEdgeModel() {
LinkedHashSet allInitialCoreSpecies = new LinkedHashSet();
LinkedHashSet allInitialCoreRxns = new LinkedHashSet();
if (readrestart) {
readRestartReactions();
if (PDepNetwork.generateNetworks) readPDepNetworks();
allInitialCoreSpecies.addAll(restartCoreSpcs);
allInitialCoreRxns.addAll(restartCoreRxns);
}
// Add the species from the condition.txt (input) file
allInitialCoreSpecies.addAll(getSpeciesSeed());
// Add the species from the seed mechanisms, if they exist
if (hasSeedMechanisms()) {
allInitialCoreSpecies.addAll(getSeedMechanism().getSpeciesSet());
allInitialCoreRxns.addAll(getSeedMechanism().getReactionSet());
}
CoreEdgeReactionModel cerm = new CoreEdgeReactionModel(allInitialCoreSpecies, allInitialCoreRxns);
if (readrestart) {
cerm.addUnreactedSpeciesSet(restartEdgeSpcs);
cerm.addUnreactedReactionSet(restartEdgeRxns);
}
setReactionModel(cerm);
PDepNetwork.reactionModel = getReactionModel();
PDepNetwork.reactionSystem = (ReactionSystem) getReactionSystemList().get(0);
// Determine initial set of reactions and edge species using only the
// species enumerated in the input file and the seed mechanisms as the core
if (!readrestart) {
LinkedHashSet reactionSet;
if (hasSeedMechanisms() && getSeedMechanism().shouldGenerateReactions()) {
reactionSet = getReactionGenerator().react(allInitialCoreSpecies);
}
else {
reactionSet = new LinkedHashSet();
for (Iterator iter = speciesSeed.iterator(); iter.hasNext(); ) {
Species spec = (Species) iter.next();
reactionSet.addAll(getReactionGenerator().react(allInitialCoreSpecies, spec));
}
}
reactionSet.addAll(getLibraryReactionGenerator().react(allInitialCoreSpecies));
// Set initial core-edge reaction model based on above results
if (reactionModelEnlarger instanceof RateBasedRME) {
Iterator iter = reactionSet.iterator();
while (iter.hasNext()){
Reaction r = (Reaction)iter.next();
cerm.addReaction(r);
}
}
else {
// Only keep the reactions involving bimolecular reactants and bimolecular products
Iterator iter = reactionSet.iterator();
while (iter.hasNext()){
Reaction r = (Reaction)iter.next();
if (r.getReactantNumber() > 1 && r.getProductNumber() > 1){
cerm.addReaction(r);
}
else {
cerm.categorizeReaction(r.getStructure());
PDepNetwork.addReactionToNetworks(r);
}
}
}
}
for (Integer i = 0; i < reactionSystemList.size(); i++) {
ReactionSystem rs = (ReactionSystem) reactionSystemList.get(i);
rs.setReactionModel(getReactionModel());
}
// We cannot return a system with no core reactions, so if this is a case we must add to the core
while (getReactionModel().isEmpty() && !PDepNetwork.hasCoreReactions((CoreEdgeReactionModel) getReactionModel())) {
for (Integer i = 0; i < reactionSystemList.size(); i++) {
ReactionSystem rs = (ReactionSystem) reactionSystemList.get(i);
if (reactionModelEnlarger instanceof RateBasedPDepRME)
rs.initializePDepNetwork();
rs.appendUnreactedSpeciesStatus((InitialStatus)initialStatusList.get(i), rs.getPresentTemperature());
}
enlargeReactionModel();
}
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
rs.setReactionModel(getReactionModel());
}
return;
}
//## operation initializeCoreEdgeModelWithPRL()
//9/24/07 gmagoon: moved from ReactionSystem.java
public void initializeCoreEdgeModelWithPRL() {
//#[ operation initializeCoreEdgeModelWithPRL()
initializeCoreEdgeModelWithoutPRL();
CoreEdgeReactionModel cerm = (CoreEdgeReactionModel)getReactionModel();
LinkedHashSet primarySpeciesSet = getPrimaryReactionLibrary().getSpeciesSet(); //10/14/07 gmagoon: changed to use getPrimaryReactionLibrary
LinkedHashSet primaryReactionSet = getPrimaryReactionLibrary().getReactionSet();
cerm.addReactedSpeciesSet(primarySpeciesSet);
cerm.addPrimaryReactionSet(primaryReactionSet);
LinkedHashSet newReactions = getReactionGenerator().react(cerm.getReactedSpeciesSet());
if (reactionModelEnlarger instanceof RateBasedRME)
cerm.addReactionSet(newReactions);
else {
Iterator iter = newReactions.iterator();
while (iter.hasNext()){
Reaction r = (Reaction)iter.next();
if (r.getReactantNumber() == 2 && r.getProductNumber() == 2){
cerm.addReaction(r);
}
}
}
return;
//
}
//## operation initializeCoreEdgeModelWithoutPRL()
//9/24/07 gmagoon: moved from ReactionSystem.java
protected void initializeCoreEdgeModelWithoutPRL() {
//#[ operation initializeCoreEdgeModelWithoutPRL()
CoreEdgeReactionModel cerm = new CoreEdgeReactionModel(new LinkedHashSet(getSpeciesSeed()));
setReactionModel(cerm);
PDepNetwork.reactionModel = getReactionModel();
PDepNetwork.reactionSystem = (ReactionSystem) getReactionSystemList().get(0);
// Determine initial set of reactions and edge species using only the
// species enumerated in the input file as the core
LinkedHashSet reactionSet = getReactionGenerator().react(getSpeciesSeed());
reactionSet.addAll(getLibraryReactionGenerator().react(getSpeciesSeed()));
// Set initial core-edge reaction model based on above results
if (reactionModelEnlarger instanceof RateBasedRME) {
// Only keep the reactions involving bimolecular reactants and bimolecular products
Iterator iter = reactionSet.iterator();
while (iter.hasNext()){
Reaction r = (Reaction)iter.next();
cerm.addReaction(r);
}
}
else {
// Only keep the reactions involving bimolecular reactants and bimolecular products
Iterator iter = reactionSet.iterator();
while (iter.hasNext()){
Reaction r = (Reaction)iter.next();
if (r.getReactantNumber() > 1 && r.getProductNumber() > 1){
cerm.addReaction(r);
}
else {
cerm.categorizeReaction(r.getStructure());
PDepNetwork.addReactionToNetworks(r);
}
}
}
//10/9/07 gmagoon: copy reactionModel to reactionSystem; there may still be scope problems, particularly in above elseif statement
//10/24/07 gmagoon: want to copy same reaction model to all reactionSystem variables; should probably also make similar modifications elsewhere; may or may not need to copy in ...WithPRL function
for (Integer i = 0; i < reactionSystemList.size(); i++) {
ReactionSystem rs = (ReactionSystem) reactionSystemList.get(i);
rs.setReactionModel(getReactionModel());
}
//reactionSystem.setReactionModel(getReactionModel());
// We cannot return a system with no core reactions, so if this is a case we must add to the core
while (getReactionModel().isEmpty()&&!PDepNetwork.hasCoreReactions((CoreEdgeReactionModel) getReactionModel())) {
for (Integer i = 0; i < reactionSystemList.size(); i++) {
ReactionSystem rs = (ReactionSystem) reactionSystemList.get(i);
if (reactionModelEnlarger instanceof RateBasedPDepRME)
rs.initializePDepNetwork();
rs.appendUnreactedSpeciesStatus((InitialStatus)initialStatusList.get(i), rs.getPresentTemperature());
}
enlargeReactionModel();
}
for (Integer i = 0; i<reactionSystemList.size();i++) {
ReactionSystem rs = (ReactionSystem)reactionSystemList.get(i);
rs.setReactionModel(getReactionModel());
}
return;
//
}
//## operation initializeCoreEdgeReactionModel()
//9/24/07 gmagoon: moved from ReactionSystem.java
public void initializeCoreEdgeReactionModel() {
System.out.println("\nInitializing core-edge reaction model");
// setSpeciesSeed(new LinkedHashSet());//10/4/07 gmagoon:moved from initializeReactionSystem; later moved to modelGeneration()
//#[ operation initializeCoreEdgeReactionModel()
// if (hasPrimaryReactionLibrary()) initializeCoreEdgeModelWithPRL();
// else initializeCoreEdgeModelWithoutPRL();
/*
* MRH 12-Jun-2009
*
* I've lumped the initializeCoreEdgeModel w/ and w/o a seed mechanism
* (which used to be the PRL) into one function. Before, RMG would
* complete one iteration (construct the edge species/rxns) before adding
* the seed mechanism to the rxn, thereby possibly estimating kinetic
* parameters for a rxn that exists in a seed mechanism
*/
initializeCoreEdgeModel();
//
}
//9/24/07 gmagoon: copied from ReactionSystem.java
public ReactionGenerator getReactionGenerator() {
return reactionGenerator;
}
//10/4/07 gmagoon: moved from ReactionSystem.java
public void setReactionGenerator(ReactionGenerator p_ReactionGenerator) {
reactionGenerator = p_ReactionGenerator;
}
//9/25/07 gmagoon: moved from ReactionSystem.java
//10/24/07 gmagoon: changed to use reactionSystemList
//## operation enlargeReactionModel()
public void enlargeReactionModel() {
//#[ operation enlargeReactionModel()
if (reactionModelEnlarger == null) throw new NullPointerException("ReactionModelEnlarger");
System.out.println("\nEnlarging reaction model");
reactionModelEnlarger.enlargeReactionModel(reactionSystemList, reactionModel, validList);
return;
//
}
public void pruneReactionModel() {
HashMap prunableSpeciesMap = new HashMap();
//check whether all the reaction systems reached target conversion/time
boolean allReachedTarget = true;
for (Integer i = 0; i < reactionSystemList.size(); i++) {
JDAS ds = (JDAS)((ReactionSystem) reactionSystemList.get(i)).getDynamicSimulator();
if (!ds.targetReached) allReachedTarget = false;
}
JDAS ds0 = (JDAS)((ReactionSystem) reactionSystemList.get(0)).getDynamicSimulator(); //get the first reactionSystem dynamic simulator
//prune the reaction model if AUTO is being used, and all reaction systems have reached target time/conversion, and edgeTol is non-zero (and positive, obviously), and if there are a sufficient number of species in the reaction model (edge + core)
if ( JDAS.autoflag &&
allReachedTarget &&
edgeTol>0 &&
(((CoreEdgeReactionModel)reactionModel).getEdge().getSpeciesNumber()+reactionModel.getSpeciesNumber())>= minSpeciesForPruning){
int numberToBePruned = ((CoreEdgeReactionModel)reactionModel).getEdge().getSpeciesNumber() - maxEdgeSpeciesAfterPruning;
Iterator iter = JDAS.edgeID.keySet().iterator();//determine the maximum edge flux ratio for each edge species
while(iter.hasNext()){
Species spe = (Species)iter.next();
Integer id = (Integer)JDAS.edgeID.get(spe);
double maxmaxRatio = ds0.maxEdgeFluxRatio[id-1];
boolean prunable = ds0.prunableSpecies[id-1];
for (Integer i = 1; i < reactionSystemList.size(); i++) {//go through the rest of the reaction systems to see if there are higher max flux ratios
JDAS ds = (JDAS)((ReactionSystem) reactionSystemList.get(i)).getDynamicSimulator();
if(ds.maxEdgeFluxRatio[id-1] > maxmaxRatio) maxmaxRatio = ds.maxEdgeFluxRatio[id-1];
if(prunable && !ds.prunableSpecies[id-1]) prunable = false;//I can't imagine a case where this would occur (if the conc. is zero at one condition, it should be zero at all conditions), but it is included for completeness
}
//if the maximum max edge flux ratio is less than the edge inclusion threshhold and the species is "prunable" (i.e. it doesn't have any reactions producing it with zero flux), schedule the species for pruning
if( prunable){ // && maxmaxRatio < edgeTol
prunableSpeciesMap.put(spe, maxmaxRatio);
// at this point prunableSpecies includes ALL prunable species, no matter how large their flux
}
}
// sort the prunableSpecies by maxmaxRatio
// i.e. sort the map by values
List prunableSpeciesList = new LinkedList(prunableSpeciesMap.entrySet());
Collections.sort(prunableSpeciesList, new Comparator() {
public int compare(Object o1, Object o2) {
return ((Comparable) ((Map.Entry) (o1)).getValue())
.compareTo(((Map.Entry) (o2)).getValue());
}
});
List speciesToPrune = new LinkedList();
for (Iterator it = prunableSpeciesList.iterator(); it.hasNext();) {
Map.Entry entry = (Map.Entry)it.next();
Species spe = (Species)entry.getKey();
double maxmaxRatio = (Double)entry.getValue();
if (maxmaxRatio < edgeTol)
{
System.out.println("Edge species "+spe.getChemkinName() +" has a maximum flux ratio ("+maxmaxRatio+") lower than edge inclusion threshhold and will be pruned.");
speciesToPrune.add(spe);
}
else if ( numberToBePruned - speciesToPrune.size() > 0 ) {
System.out.println("Edge species "+spe.getChemkinName() +" has a low maximum flux ratio ("+maxmaxRatio+") and will be pruned to reduce the edge size to the maximum ("+maxEdgeSpeciesAfterPruning+").");
speciesToPrune.add(spe);
}
else break; // no more to be pruned
}
//now, speciesToPrune has been filled with species that should be pruned from the edge
System.out.println("Pruning...");
//prune species from the edge
//remove species from the edge and from the species dictionary and from edgeID
iter = speciesToPrune.iterator();
while(iter.hasNext()){
Species spe = (Species)iter.next();
writePrunedEdgeSpecies(spe);
((CoreEdgeReactionModel)getReactionModel()).getUnreactedSpeciesSet().remove(spe);
//SpeciesDictionary.getInstance().getSpeciesSet().remove(spe);
SpeciesDictionary.getInstance().remove(spe);
JDAS.edgeID.remove(spe);
}
//remove reactions from the edge involving pruned species
iter = ((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSet().iterator();
HashSet toRemove = new HashSet();
while(iter.hasNext()){
Reaction reaction = (Reaction)iter.next();
if (reactionPrunableQ(reaction, speciesToPrune)) toRemove.add(reaction);
}
iter = toRemove.iterator();
while(iter.hasNext()){
Reaction reaction = (Reaction)iter.next();
writePrunedEdgeReaction(reaction);
reaction.setReactionTemplate(null);//remove from ReactionTemplate's reactionDictionaryByStructure
((CoreEdgeReactionModel)getReactionModel()).getUnreactedReactionSet().remove(reaction);
//ReactionTemplate.getReactionDictionaryByStructure().remove(reaction.getStructure());//remove from ReactionTemplate's reactionDictionaryByStructure
}
//remove reactions from PDepNetworks in PDep cases
if (reactionModelEnlarger instanceof RateBasedPDepRME) {
iter = PDepNetwork.getNetworks().iterator();
HashSet pdnToRemove = new HashSet();
while (iter.hasNext()){
PDepNetwork pdn = (PDepNetwork)iter.next();
//remove path reactions
Iterator rIter = pdn.getPathReactions().iterator();
toRemove = new HashSet();
while(rIter.hasNext()){
Reaction reaction = (Reaction)rIter.next();
if (reactionPrunableQ(reaction, speciesToPrune)) toRemove.add(reaction);
}
Iterator iterRem = toRemove.iterator();
while(iterRem.hasNext()){
Reaction reaction = (Reaction)iterRem.next();
reaction.setReactionTemplate(null);//remove from ReactionTemplate's reactionDictionaryByStructure
pdn.getPathReactions().remove(reaction);
}
//remove net reactions
rIter = pdn.getNetReactions().iterator();
toRemove = new HashSet();
while(rIter.hasNext()){
Reaction reaction = (Reaction)rIter.next();
if (reactionPrunableQ(reaction, speciesToPrune)) toRemove.add(reaction);
}
iterRem = toRemove.iterator();
while(iterRem.hasNext()){
Reaction reaction = (Reaction)iterRem.next();
reaction.setReactionTemplate(null);//remove from ReactionTemplate's reactionDictionaryByStructure
pdn.getNetReactions().remove(reaction);
}
//remove isomers
Iterator iIter = pdn.getIsomers().iterator();
toRemove = new HashSet();
while(iIter.hasNext()){
PDepIsomer pdi = (PDepIsomer)iIter.next();
Iterator isIter = pdi.getSpeciesListIterator();
while(isIter.hasNext()){
Species spe = (Species)isIter.next();
if (speciesToPrune.contains(spe)&&!toRemove.contains(spe)) toRemove.add(pdi);
}
}
iterRem = toRemove.iterator();
while(iterRem.hasNext()){
PDepIsomer pdi = (PDepIsomer)iterRem.next();
pdn.getIsomers().remove(pdi);
}
//remove nonincluded reactions
rIter = pdn.getNonincludedReactions().iterator();
toRemove = new HashSet();
while(rIter.hasNext()){
Reaction reaction = (Reaction)rIter.next();
if (reactionPrunableQ(reaction, speciesToPrune)) toRemove.add(reaction);
}
iterRem = toRemove.iterator();
while(iterRem.hasNext()){
Reaction reaction = (Reaction)iterRem.next();
reaction.setReactionTemplate(null);//remove from ReactionTemplate's reactionDictionaryByStructure
pdn.getNonincludedReactions().remove(reaction);
}
//remove the entire network if the network has no path or net reactions
if(pdn.getPathReactions().size()==0&&pdn.getNetReactions().size()==0) pdnToRemove.add(pdn);
}
iter = pdnToRemove.iterator();
while (iter.hasNext()){
PDepNetwork pdn = (PDepNetwork)iter.next();
PDepNetwork.getNetworks().remove(pdn);
}
}
}
return;
}
//determines whether a reaction can be removed; returns true ; cf. categorizeReaction() in CoreEdgeReactionModel
//returns true if the reaction involves reactants or products that are in p_prunableSpecies; otherwise returns false
public boolean reactionPrunableQ(Reaction p_reaction, Collection p_prunableSpecies){
Iterator iter = p_reaction.getReactants();
while (iter.hasNext()) {
Species spe = (Species)iter.next();
if (p_prunableSpecies.contains(spe))
return true;
}
iter = p_reaction.getProducts();
while (iter.hasNext()) {
Species spe = (Species)iter.next();
if (p_prunableSpecies.contains(spe))
return true;
}
return false;
}
public boolean hasPrimaryReactionLibrary() {
if (primaryReactionLibrary == null) return false;
return (primaryReactionLibrary.size() > 0);
}
public boolean hasSeedMechanisms() {
if (getSeedMechanism() == null) return false;
return (seedMechanism.size() > 0);
}
//9/25/07 gmagoon: moved from ReactionSystem.java
public PrimaryReactionLibrary getPrimaryReactionLibrary() {
return primaryReactionLibrary;
}
//9/25/07 gmagoon: moved from ReactionSystem.java
public void setPrimaryReactionLibrary(PrimaryReactionLibrary p_PrimaryReactionLibrary) {
primaryReactionLibrary = p_PrimaryReactionLibrary;
}
//10/4/07 gmagoon: added
public LinkedHashSet getSpeciesSeed() {
return speciesSeed;
}
//10/4/07 gmagoon: added
public void setSpeciesSeed(LinkedHashSet p_speciesSeed) {
speciesSeed = p_speciesSeed;
}
//10/4/07 gmagoon: added
public LibraryReactionGenerator getLibraryReactionGenerator() {
return lrg;
}
//10/4/07 gmagoon: added
public void setLibraryReactionGenerator(LibraryReactionGenerator p_lrg) {
lrg = p_lrg;
}
public static Temperature getTemp4BestKinetics() {
return temp4BestKinetics;
}
public static void setTemp4BestKinetics(Temperature firstSysTemp) {
temp4BestKinetics = firstSysTemp;
}
public SeedMechanism getSeedMechanism() {
return seedMechanism;
}
public void setSeedMechanism(SeedMechanism p_seedMechanism) {
seedMechanism = p_seedMechanism;
}
public PrimaryThermoLibrary getPrimaryThermoLibrary() {
return primaryThermoLibrary;
}
public void setPrimaryThermoLibrary(PrimaryThermoLibrary p_primaryThermoLibrary) {
primaryThermoLibrary = p_primaryThermoLibrary;
}
public static double getAtol(){
return atol;
}
public boolean runKillableToPreventInfiniteLoop(boolean intermediateSteps, int iterationNumber) {
ReactionSystem rs0 = (ReactionSystem)reactionSystemList.get(0);
if (!intermediateSteps)//if there are no intermediate steps (for example when using AUTO method), return true;
return true;
//if there are intermediate steps, the run is killable if the iteration number exceeds the number of time steps / conversions
else if (rs0.finishController.terminationTester instanceof ReactionTimeTT){
if (iterationNumber - 1 > timeStep.size()){ //-1 correction needed since when this is called, iteration number has been incremented
return true;
}
}
else //the case where intermediate conversions are specified
if (iterationNumber - 1 > numConversions){ //see above; it is possible there is an off-by-one error here, so further testing will be needed
return true;
}
return false; //return false if none of the above criteria are met
}
public void readAndMakePRL(BufferedReader reader) throws IOException {
int Ilib = 0;
String line = ChemParser.readMeaningfulLine(reader);
while (!line.equals("END")) {
String[] tempString = line.split("Name: ");
String name = tempString[tempString.length-1].trim();
line = ChemParser.readMeaningfulLine(reader);
tempString = line.split("Location: ");
String location = tempString[tempString.length-1].trim();
String path = System.getProperty("jing.rxn.ReactionLibrary.pathName");
path += "/" + location;
if (Ilib==0) {
setPrimaryReactionLibrary(new PrimaryReactionLibrary(name, path));
Ilib++;
}
else {
getPrimaryReactionLibrary().appendPrimaryReactionLibrary(name, path);
Ilib++;
}
line = ChemParser.readMeaningfulLine(reader);
}
if (Ilib==0) {
setPrimaryReactionLibrary(null);
}
else System.out.println("Primary Reaction Libraries in use: " + getPrimaryReactionLibrary().getName());
}
public void readAndMakePTL(BufferedReader reader) {
int numPTLs = 0;
String line = ChemParser.readMeaningfulLine(reader);
while (!line.equals("END")) {
String[] tempString = line.split("Name: ");
String name = tempString[tempString.length-1].trim();
line = ChemParser.readMeaningfulLine(reader);
tempString = line.split("Location: ");
String path = tempString[tempString.length-1].trim();
if (numPTLs==0) {
setPrimaryThermoLibrary(new PrimaryThermoLibrary(name,path));
++numPTLs;
}
else {
getPrimaryThermoLibrary().appendPrimaryThermoLibrary(name,path);
++numPTLs;
}
line = ChemParser.readMeaningfulLine(reader);
}
if (numPTLs == 0) setPrimaryThermoLibrary(null);
}
public void readExtraForbiddenStructures(BufferedReader reader) throws IOException {
System.out.println("Reading extra forbidden structures from input file.");
String line = ChemParser.readMeaningfulLine(reader);
while (!line.equals("END")) {
StringTokenizer token = new StringTokenizer(line);
String fgname = token.nextToken();
Graph fgGraph = null;
try {
fgGraph = ChemParser.readFGGraph(reader);
}
catch (InvalidGraphFormatException e) {
System.out.println("Invalid functional group in "+fgname);
throw new InvalidFunctionalGroupException(fgname + ": " + e.getMessage());
}
if (fgGraph == null) throw new InvalidFunctionalGroupException(fgname);
FunctionalGroup fg = FunctionalGroup.makeForbiddenStructureFG(fgname, fgGraph);
ChemGraph.addForbiddenStructure(fg);
line = ChemParser.readMeaningfulLine(reader);
System.out.println(" Forbidden structure: "+fgname);
}
}
public void setSpectroscopicDataMode(String line) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken();
String sdeType = st.nextToken().toLowerCase();
if (sdeType.equals("frequencygroups") || sdeType.equals("default")) {
SpectroscopicData.mode = SpectroscopicData.Mode.FREQUENCYGROUPS;
}
else if (sdeType.equals("therfit") || sdeType.equals("threefrequencymodel")) {
SpectroscopicData.mode = SpectroscopicData.Mode.THREEFREQUENCY;
}
else if (sdeType.equals("off") || sdeType.equals("none")) {
SpectroscopicData.mode = SpectroscopicData.Mode.OFF;
}
else throw new InvalidSymbolException("condition.txt: Unknown SpectroscopicDataEstimator = " + sdeType);
}
/**
* Sets the pressure dependence options to on or off. If on, checks for
* more options and sets them as well.
* @param line The current line in the condition file; should start with "PressureDependence:"
* @param reader The reader currently being used to parse the condition file
*/
public String setPressureDependenceOptions(String line, BufferedReader reader) throws InvalidSymbolException {
// Determine pressure dependence mode
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken(); // Should be "PressureDependence:"
String pDepType = st.nextToken();
if (pDepType.toLowerCase().equals("off")) {
// No pressure dependence
reactionModelEnlarger = new RateBasedRME();
PDepNetwork.generateNetworks = false;
line = ChemParser.readMeaningfulLine(reader);
}
else if (pDepType.toLowerCase().equals("modifiedstrongcollision") ||
pDepType.toLowerCase().equals("reservoirstate") ||
pDepType.toLowerCase().equals("chemdis")) {
reactionModelEnlarger = new RateBasedPDepRME();
PDepNetwork.generateNetworks = true;
// Set pressure dependence method
if (pDepType.toLowerCase().equals("reservoirstate"))
((RateBasedPDepRME) reactionModelEnlarger).setPDepKineticsEstimator(new FastMasterEqn(FastMasterEqn.Mode.RESERVOIRSTATE));
else if (pDepType.toLowerCase().equals("modifiedstrongcollision"))
((RateBasedPDepRME) reactionModelEnlarger).setPDepKineticsEstimator(new FastMasterEqn(FastMasterEqn.Mode.STRONGCOLLISION));
//else if (pDepType.toLowerCase().equals("chemdis"))
// ((RateBasedPDepRME) reactionModelEnlarger).setPDepKineticsEstimator(new Chemdis());
else
throw new InvalidSymbolException("condition.txt: Unknown PressureDependence mode = " + pDepType);
RateBasedPDepRME pdepModelEnlarger = (RateBasedPDepRME) reactionModelEnlarger;
// Turn on spectroscopic data estimation if not already on
if (pdepModelEnlarger.getPDepKineticsEstimator() instanceof FastMasterEqn && SpectroscopicData.mode == SpectroscopicData.Mode.OFF) {
System.out.println("Warning: Spectroscopic data needed for pressure dependence; switching SpectroscopicDataEstimator to FrequencyGroups.");
SpectroscopicData.mode = SpectroscopicData.Mode.FREQUENCYGROUPS;
}
else if (pdepModelEnlarger.getPDepKineticsEstimator() instanceof Chemdis && SpectroscopicData.mode != SpectroscopicData.Mode.THREEFREQUENCY) {
System.out.println("Warning: Switching SpectroscopicDataEstimator to three-frequency model.");
SpectroscopicData.mode = SpectroscopicData.Mode.THREEFREQUENCY;
}
// Next line must be PDepKineticsModel
line = ChemParser.readMeaningfulLine(reader);
if (line.toLowerCase().startsWith("pdepkineticsmodel:")) {
st = new StringTokenizer(line);
name = st.nextToken();
String pDepKinType = st.nextToken();
if (pDepKinType.toLowerCase().equals("chebyshev")) {
PDepRateConstant.setMode(PDepRateConstant.Mode.CHEBYSHEV);
// Default is to cubic order for basis functions
FastMasterEqn.setNumTBasisFuncs(4);
FastMasterEqn.setNumPBasisFuncs(4);
}
else if (pDepKinType.toLowerCase().equals("pdeparrhenius"))
PDepRateConstant.setMode(PDepRateConstant.Mode.PDEPARRHENIUS);
else if (pDepKinType.toLowerCase().equals("rate"))
PDepRateConstant.setMode(PDepRateConstant.Mode.RATE);
else
throw new InvalidSymbolException("condition.txt: Unknown PDepKineticsModel = " + pDepKinType);
// For Chebyshev polynomials, optionally specify the number of
// temperature and pressure basis functions
// Such a line would read, e.g.: "PDepKineticsModel: Chebyshev 4 4"
if (st.hasMoreTokens() && PDepRateConstant.getMode() == PDepRateConstant.Mode.CHEBYSHEV) {
try {
int numTBasisFuncs = Integer.parseInt(st.nextToken());
int numPBasisFuncs = Integer.parseInt(st.nextToken());
FastMasterEqn.setNumTBasisFuncs(numTBasisFuncs);
FastMasterEqn.setNumPBasisFuncs(numPBasisFuncs);
}
catch (NoSuchElementException e) {
throw new InvalidSymbolException("condition.txt: Missing number of pressure basis functions for Chebyshev polynomials.");
}
}
}
else
throw new InvalidSymbolException("condition.txt: Missing PDepKineticsModel after PressureDependence line.");
// Determine temperatures and pressures to use
// These can be specified automatically using TRange and PRange or
// manually using Temperatures and Pressures
Temperature[] temperatures = null;
Pressure[] pressures = null;
String Tunits = "K";
Temperature Tmin = new Temperature(300.0, "K");
Temperature Tmax = new Temperature(2000.0, "K");
int Tnumber = 8;
String Punits = "bar";
Pressure Pmin = new Pressure(0.01, "bar");
Pressure Pmax = new Pressure(100.0, "bar");
int Pnumber = 5;
// Read next line of input
line = ChemParser.readMeaningfulLine(reader);
boolean done = !(line.toLowerCase().startsWith("trange:") ||
line.toLowerCase().startsWith("prange:") ||
line.toLowerCase().startsWith("temperatures:") ||
line.toLowerCase().startsWith("pressures:"));
// Parse lines containing pressure dependence options
// Possible options are "TRange:", "PRange:", "Temperatures:", and "Pressures:"
// You must specify either TRange or Temperatures and either PRange or Pressures
// The order does not matter
while (!done) {
st = new StringTokenizer(line);
name = st.nextToken();
if (line.toLowerCase().startsWith("trange:")) {
Tunits = ChemParser.removeBrace(st.nextToken());
Tmin = new Temperature(Double.parseDouble(st.nextToken()), Tunits);
Tmax = new Temperature(Double.parseDouble(st.nextToken()), Tunits);
Tnumber = Integer.parseInt(st.nextToken());
}
else if (line.toLowerCase().startsWith("prange:")) {
Punits = ChemParser.removeBrace(st.nextToken());
Pmin = new Pressure(Double.parseDouble(st.nextToken()), Punits);
Pmax = new Pressure(Double.parseDouble(st.nextToken()), Punits);
Pnumber = Integer.parseInt(st.nextToken());
}
else if (line.toLowerCase().startsWith("temperatures:")) {
Tnumber = Integer.parseInt(st.nextToken());
Tunits = ChemParser.removeBrace(st.nextToken());
temperatures = new Temperature[Tnumber];
for (int i = 0; i < Tnumber; i++) {
temperatures[i] = new Temperature(Double.parseDouble(st.nextToken()), Tunits);
}
Tmin = temperatures[0];
Tmax = temperatures[Tnumber-1];
}
else if (line.toLowerCase().startsWith("pressures:")) {
Pnumber = Integer.parseInt(st.nextToken());
Punits = ChemParser.removeBrace(st.nextToken());
pressures = new Pressure[Pnumber];
for (int i = 0; i < Pnumber; i++) {
pressures[i] = new Pressure(Double.parseDouble(st.nextToken()), Punits);
}
Pmin = pressures[0];
Pmax = pressures[Pnumber-1];
}
// Read next line of input
line = ChemParser.readMeaningfulLine(reader);
done = !(line.toLowerCase().startsWith("trange:") ||
line.toLowerCase().startsWith("prange:") ||
line.toLowerCase().startsWith("temperatures:") ||
line.toLowerCase().startsWith("pressures:"));
}
// Set temperatures and pressures (if not already set manually)
if (temperatures == null) {
temperatures = new Temperature[Tnumber];
if (PDepRateConstant.getMode() == PDepRateConstant.Mode.CHEBYSHEV) {
// Use the Gauss-Chebyshev points
// The formula for the Gauss-Chebyshev points was taken from
// the Chemkin theory manual
for (int i = 1; i <= Tnumber; i++) {
double T = -Math.cos((2 * i - 1) * Math.PI / (2 * Tnumber));
T = 2.0 / ((1.0/Tmax.getK() - 1.0/Tmin.getK()) * T + 1.0/Tmax.getK() + 1.0/Tmin.getK());
temperatures[i-1] = new Temperature(T, "K");
}
}
else {
// Distribute equally on a 1/T basis
double slope = (1.0/Tmax.getK() - 1.0/Tmin.getK()) / (Tnumber - 1);
for (int i = 0; i < Tnumber; i++) {
double T = 1.0/(slope * i + 1.0/Tmin.getK());
temperatures[i] = new Temperature(T, "K");
}
}
}
if (pressures == null) {
pressures = new Pressure[Pnumber];
if (PDepRateConstant.getMode() == PDepRateConstant.Mode.CHEBYSHEV) {
// Use the Gauss-Chebyshev points
// The formula for the Gauss-Chebyshev points was taken from
// the Chemkin theory manual
for (int i = 1; i <= Pnumber; i++) {
double P = -Math.cos((2 * i - 1) * Math.PI / (2 * Pnumber));
P = Math.pow(10, 0.5 * ((Math.log10(Pmax.getBar()) - Math.log10(Pmin.getBar())) * P + Math.log10(Pmax.getBar()) + Math.log10(Pmin.getBar())));
pressures[i-1] = new Pressure(P, "bar");
}
}
else {
// Distribute equally on a log P basis
double slope = (Math.log10(Pmax.getBar()) - Math.log10(Pmin.getBar())) / (Pnumber - 1);
for (int i = 0; i < Pnumber; i++) {
double P = Math.pow(10, slope * i + Math.log10(Pmin.getBar()));
pressures[i] = new Pressure(P, "bar");
}
}
}
FastMasterEqn.setTemperatures(temperatures);
PDepRateConstant.setTemperatures(temperatures);
PDepRateConstant.setTMin(Tmin);
PDepRateConstant.setTMax(Tmax);
ChebyshevPolynomials.setTlow(Tmin);
ChebyshevPolynomials.setTup(Tmax);
FastMasterEqn.setPressures(pressures);
PDepRateConstant.setPressures(pressures);
PDepRateConstant.setPMin(Pmin);
PDepRateConstant.setPMax(Pmax);
ChebyshevPolynomials.setPlow(Pmin);
ChebyshevPolynomials.setPup(Pmax);
/*
* New option for input file: DecreaseGrainSize
* User now has the option to re-run fame with additional grains
* (smaller grain size) when the p-dep rate exceeds the
* high-P-limit rate.
* Default value: off
*/
if (line.toLowerCase().startsWith("decreasegrainsize")) {
st = new StringTokenizer(line);
String tempString = st.nextToken(); // "DecreaseGrainSize:"
tempString = st.nextToken().trim().toLowerCase();
if (tempString.equals("on") || tempString.equals("yes") ||
tempString.equals("true")) {
rerunFame = true;
} else rerunFame = false;
line = ChemParser.readMeaningfulLine(reader);
}
}
else {
throw new InvalidSymbolException("condition.txt: Unknown PressureDependence = " + pDepType);
}
return line;
}
public void createTModel(String line) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken();
String modelType = st.nextToken();
String unit = st.nextToken();
unit = ChemParser.removeBrace(unit);
if (modelType.equals("Constant")) {
tempList = new LinkedList();
//read first temperature
double t = Double.parseDouble(st.nextToken());
tempList.add(new ConstantTM(t, unit));
Temperature temp = new Temperature(t, unit);//10/29/07 gmagoon: added this line and next two lines to set Global.lowTemperature and Global.highTemperature
Global.lowTemperature = (Temperature)temp.clone();
Global.highTemperature = (Temperature)temp.clone();
//read remaining temperatures
while (st.hasMoreTokens()) {
t = Double.parseDouble(st.nextToken());
tempList.add(new ConstantTM(t, unit));
temp = new Temperature(t,unit);//10/29/07 gmagoon: added this line and next two "if" statements to set Global.lowTemperature and Global.highTemperature
if(temp.getK() < Global.lowTemperature.getK())
Global.lowTemperature = (Temperature)temp.clone();
if(temp.getK() > Global.highTemperature.getK())
Global.highTemperature = (Temperature)temp.clone();
}
}
else {
throw new InvalidSymbolException("condition.txt: Unknown TemperatureModel = " + modelType);
}
}
public void createPModel(String line) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken();
String modelType = st.nextToken();
String unit = st.nextToken();
unit = ChemParser.removeBrace(unit);
if (modelType.equals("Constant")) {
presList = new LinkedList();
//read first pressure
double p = Double.parseDouble(st.nextToken());
Pressure pres = new Pressure(p, unit);
Global.lowPressure = (Pressure)pres.clone();
Global.highPressure = (Pressure)pres.clone();
presList.add(new ConstantPM(p, unit));
//read remaining temperatures
while (st.hasMoreTokens()) {
p = Double.parseDouble(st.nextToken());
presList.add(new ConstantPM(p, unit));
pres = new Pressure(p, unit);
if(pres.getBar() < Global.lowPressure.getBar())
Global.lowPressure = (Pressure)pres.clone();
if(pres.getBar() > Global.lowPressure.getBar())
Global.highPressure = (Pressure)pres.clone();
}
}
else {
throw new InvalidSymbolException("condition.txt: Unknown PressureModel = " + modelType);
}
}
public LinkedHashMap populateInitialStatusListWithReactiveSpecies(BufferedReader reader) throws IOException {
LinkedHashMap speciesSet = new LinkedHashMap();
LinkedHashMap speciesStatus = new LinkedHashMap();
String line = ChemParser.readMeaningfulLine(reader);
while (!line.equals("END")) {
StringTokenizer st = new StringTokenizer(line);
String index = st.nextToken();
String name = null;
if (!index.startsWith("(")) name = index;
else name = st.nextToken();
//if (restart) name += "("+speciesnum+")";
// 24Jun2009: MRH
// Check if the species name begins with a number.
// If so, terminate the program and inform the user to choose
// a different name. This is implemented so that the chem.inp
// file generated will be valid when run in Chemkin
try {
int doesNameBeginWithNumber = Integer.parseInt(name.substring(0,1));
System.out.println("\nA species name should not begin with a number." +
" Please rename species: " + name + "\n");
System.exit(0);
} catch (NumberFormatException e) {
// We're good
}
if (!(st.hasMoreTokens())) throw new InvalidSymbolException("Couldn't find concentration of species: "+name);
String conc = st.nextToken();
double concentration = Double.parseDouble(conc);
String unit = st.nextToken();
unit = ChemParser.removeBrace(unit);
if (unit.equals("mole/l") || unit.equals("mol/l") || unit.equals("mole/liter") || unit.equals("mol/liter")) {
concentration /= 1000;
unit = "mol/cm3";
}
else if (unit.equals("mole/m3") || unit.equals("mol/m3")) {
concentration /= 1000000;
unit = "mol/cm3";
}
else if (unit.equals("molecule/cm3") || unit.equals("molecules/cm3")) {
concentration /= 6.022e23;
}
else if (!unit.equals("mole/cm3") && !unit.equals("mol/cm3")) {
throw new InvalidUnitException("Species Concentration in condition.txt!");
}
//GJB to allow "unreactive" species that only follow user-defined library reactions.
// They will not react according to RMG reaction families
boolean IsReactive = true;
boolean IsConstantConcentration = false;
while (st.hasMoreTokens()) {
String reactive = st.nextToken().trim();
if (reactive.equalsIgnoreCase("unreactive"))
IsReactive = false;
if (reactive.equalsIgnoreCase("constantconcentration"))
IsConstantConcentration=true;
}
Graph g = ChemParser.readChemGraph(reader);
ChemGraph cg = null;
try {
cg = ChemGraph.make(g);
}
catch (ForbiddenStructureException e) {
System.out.println("Forbidden Structure:\n" + e.getMessage());
throw new InvalidSymbolException("A species in the input file has a forbidden structure.");
}
//System.out.println(name);
Species species = Species.make(name,cg);
species.setReactivity(IsReactive); // GJB
species.setConstantConcentration(IsConstantConcentration);
speciesSet.put(name, species);
getSpeciesSeed().add(species);
double flux = 0;
int species_type = 1; // reacted species
SpeciesStatus ss = new SpeciesStatus(species,species_type,concentration,flux);
speciesStatus.put(species, ss);
line = ChemParser.readMeaningfulLine(reader);
}
ReactionTime initial = new ReactionTime(0,"S");
//10/23/07 gmagoon: modified for handling multiple temperature, pressure conditions; note: concentration within speciesStatus (and list of conversion values) should not need to be modified for each T,P since this is done within isTPCconsistent in ReactionSystem
initialStatusList = new LinkedList();
for (Iterator iter = tempList.iterator(); iter.hasNext(); ) {
TemperatureModel tm = (TemperatureModel)iter.next();
for (Iterator iter2 = presList.iterator(); iter2.hasNext(); ){
PressureModel pm = (PressureModel)iter2.next();
// LinkedHashMap speStat = (LinkedHashMap)speciesStatus.clone();//10/31/07 gmagoon: trying creating multiple instances of speciesStatus to address issues with concentration normalization (last normalization seems to apply to all)
Set ks = speciesStatus.keySet();
LinkedHashMap speStat = new LinkedHashMap();
for (Iterator iter3 = ks.iterator(); iter3.hasNext();){//11/1/07 gmagoon: perform deep copy; (is there an easier or more elegant way to do this?)
SpeciesStatus ssCopy = (SpeciesStatus)speciesStatus.get(iter3.next());
speStat.put(ssCopy.getSpecies(),new SpeciesStatus(ssCopy.getSpecies(),ssCopy.getSpeciesType(),ssCopy.getConcentration(),ssCopy.getFlux()));
}
initialStatusList.add(new InitialStatus(speStat,tm.getTemperature(initial),pm.getPressure(initial)));
}
}
return speciesSet;
}
public void populateInitialStatusListWithInertSpecies(BufferedReader reader) {
String line = ChemParser.readMeaningfulLine(reader);
while (!line.equals("END")) {
StringTokenizer st = new StringTokenizer(line);
String name = st.nextToken().trim();
String conc = st.nextToken();
double inertConc = Double.parseDouble(conc);
String unit = st.nextToken();
unit = ChemParser.removeBrace(unit);
if (unit.equals("mole/l") || unit.equals("mol/l") || unit.equals("mole/liter") || unit.equals("mol/liter")) {
inertConc /= 1000;
unit = "mol/cm3";
}
else if (unit.equals("mole/m3") || unit.equals("mol/m3")) {
inertConc /= 1000000;
unit = "mol/cm3";
}
else if (unit.equals("molecule/cm3") || unit.equals("molecules/cm3")) {
inertConc /= 6.022e23;
unit = "mol/cm3";
}
else if (!unit.equals("mole/cm3") && !unit.equals("mol/cm3")) {
throw new InvalidUnitException("Inert Gas Concentration not recognized: " + unit);
}
//SystemSnapshot.putInertGas(name,inertConc);
for(Iterator iter=initialStatusList.iterator();iter.hasNext(); ){//6/23/09 gmagoon: needed to change this to accommodate non-static inertConc
((InitialStatus)iter.next()).putInertGas(name,inertConc);
}
line = ChemParser.readMeaningfulLine(reader);
}
}
public String readMaxAtomTypes(String line, BufferedReader reader) {
if (line.startsWith("MaxCarbonNumber")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // This should hold "MaxCarbonNumberPerSpecies:"
int maxCNum = Integer.parseInt(st.nextToken());
ChemGraph.setMaxCarbonNumber(maxCNum);
System.out.println("Note: Overriding RMG-defined MAX_CARBON_NUM with user-defined value: " + maxCNum);
line = ChemParser.readMeaningfulLine(reader);
}
if (line.startsWith("MaxOxygenNumber")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // This should hold "MaxOxygenNumberPerSpecies:"
int maxONum = Integer.parseInt(st.nextToken());
ChemGraph.setMaxOxygenNumber(maxONum);
System.out.println("Note: Overriding RMG-defined MAX_OXYGEN_NUM with user-defined value: " + maxONum);
line = ChemParser.readMeaningfulLine(reader);
}
if (line.startsWith("MaxRadicalNumber")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // This should hold "MaxRadicalNumberPerSpecies:"
int maxRadNum = Integer.parseInt(st.nextToken());
ChemGraph.setMaxRadicalNumber(maxRadNum);
System.out.println("Note: Overriding RMG-defined MAX_RADICAL_NUM with user-defined value: " + maxRadNum);
line = ChemParser.readMeaningfulLine(reader);
}
if (line.startsWith("MaxSulfurNumber")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // This should hold "MaxSulfurNumberPerSpecies:"
int maxSNum = Integer.parseInt(st.nextToken());
ChemGraph.setMaxSulfurNumber(maxSNum);
System.out.println("Note: Overriding RMG-defined MAX_SULFUR_NUM with user-defined value: " + maxSNum);
line = ChemParser.readMeaningfulLine(reader);
}
if (line.startsWith("MaxSiliconNumber")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // This should hold "MaxSiliconNumberPerSpecies:"
int maxSiNum = Integer.parseInt(st.nextToken());
ChemGraph.setMaxSiliconNumber(maxSiNum);
System.out.println("Note: Overriding RMG-defined MAX_SILICON_NUM with user-defined value: " + maxSiNum);
line = ChemParser.readMeaningfulLine(reader);
}
if (line.startsWith("MaxHeavyAtom")) {
StringTokenizer st = new StringTokenizer(line);
String dummyString = st.nextToken(); // This should hold "MaxHeavyAtomPerSpecies:"
int maxHANum = Integer.parseInt(st.nextToken());
ChemGraph.setMaxHeavyAtomNumber(maxHANum);
System.out.println("Note: Overriding RMG-defined MAX_HEAVYATOM_NUM with user-defined value: " + maxHANum);
line = ChemParser.readMeaningfulLine(reader);
}
return line;
}
public ReactionModelEnlarger getReactionModelEnlarger() {
return reactionModelEnlarger;
}
public LinkedList getTempList() {
return tempList;
}
public LinkedList getPressList() {
return presList;
}
public LinkedList getInitialStatusList() {
return initialStatusList;
}
public void writeBackupRestartFiles(String[] listOfFiles) {
for (int i=0; i<listOfFiles.length; i++) {
File temporaryRestartFile = new File(listOfFiles[i]);
if (temporaryRestartFile.exists()) temporaryRestartFile.renameTo(new File(listOfFiles[i]+"~"));
}
}
public void removeBackupRestartFiles(String[] listOfFiles) {
for (int i=0; i<listOfFiles.length; i++) {
File temporaryRestartFile = new File(listOfFiles[i]+"~");
temporaryRestartFile.delete();
}
}
public static boolean rerunFameWithAdditionalGrains() {
return rerunFame;
}
public void setLimitingReactantID(int id) {
limitingReactantID = id;
}
public int getLimitingReactantID() {
return limitingReactantID;
}
public void readAndMakePTransL(BufferedReader reader) {
int numPTLs = 0;
String line = ChemParser.readMeaningfulLine(reader);
while (!line.equals("END")) {
String[] tempString = line.split("Name: ");
String name = tempString[tempString.length-1].trim();
line = ChemParser.readMeaningfulLine(reader);
tempString = line.split("Location: ");
String path = tempString[tempString.length-1].trim();
if (numPTLs==0) {
setPrimaryTransportLibrary(new PrimaryTransportLibrary(name,path));
++numPTLs;
}
else {
getPrimaryTransportLibrary().appendPrimaryTransportLibrary(name,path);
++numPTLs;
}
line = ChemParser.readMeaningfulLine(reader);
}
if (numPTLs == 0) setPrimaryTransportLibrary(null);
}
public PrimaryTransportLibrary getPrimaryTransportLibrary() {
return primaryTransportLibrary;
}
public void setPrimaryTransportLibrary(PrimaryTransportLibrary p_primaryTransportLibrary) {
primaryTransportLibrary = p_primaryTransportLibrary;
}
}
|
package edu.iu.grid.oim.view.divrep.form;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.LinkedHashMap;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.log4j.Logger;
import com.divrep.DivRep;
import com.divrep.DivRepEvent;
import com.divrep.DivRepEventListener;
import com.divrep.common.DivRepButton;
import com.divrep.common.DivRepDate;
import com.divrep.common.DivRepSelectBox;
import com.divrep.common.DivRepStaticContent;
import com.divrep.common.DivRepTextBox;
import edu.iu.grid.oim.model.CertificateRequestStatus;
import edu.iu.grid.oim.model.UserContext;
import edu.iu.grid.oim.model.db.CertificateRequestUserModel;
import edu.iu.grid.oim.model.db.VOModel;
import edu.iu.grid.oim.model.db.record.CertificateRequestUserRecord;
import edu.iu.grid.oim.model.db.record.VORecord;
import edu.iu.grid.oim.view.divrep.VOSelector;
public class CertificateSearchUserForm extends DivRep
{
private static final long serialVersionUID = -7040235927943722402L;
static Logger log = Logger.getLogger(CertificateSearchUserForm.class);
private UserContext context;
//private Authorization auth;
private String base_url;
private DivRepStaticContent message;
private DivRepTextBox request_id;
private DivRepTextBox serial_id;
private DivRepTextBox dn_contains;
private DivRepSelectBox status;
private VOSelector vo;
private DivRepDate request_after;
private DivRepDate request_before;
private DivRepSelectBox signer;
private DivRepButton submit;
private Tab active_tab;
private ArrayList<Tab> tabs;
private abstract class Tab {
String id;
String name;
public Tab(String id, String name) {
this.id = id;
this.name = name;
}
public String getID() { return id; }
public void renderTab(PrintWriter out) {
if(active_tab == this) {
out.write("<li class=\"active\">");
} else {
out.write("<li>");
}
out.write("<a href=\"#"+id+"\" data-toggle=\"tab\">"+name+"</a>");
out.write("</li>");
}
public void renderPane(PrintWriter out) {
if(active_tab == this) {
out.write("<div class=\"tab-pane active\" id=\""+id+"\">");
} else {
out.write("<div class=\"tab-pane\" id=\""+id+"\">");
}
render(out);
out.write("</div>");
}
abstract void render(PrintWriter out);
abstract ArrayList<CertificateRequestUserRecord> search(CertificateRequestUserModel model);
}
class OtherTab extends Tab {
public OtherTab() {
super("other", "Others");
}
@Override
void render(PrintWriter out) {
out.write("<div class=\"row-fluid\">");
out.write("<div class=\"span4\">");
dn_contains.render(out);
status.render(out);
vo.render(out);
signer.render(out);
out.write("</div>"); //span4
out.write("<div class=\"span4 duration\">");
request_after.render(out);
out.write("</div>");//span4
out.write("<div class=\"span4 duration\">");
request_before.render(out);
out.write("</div>"); //span4
out.write("</div>"); //row-fluid
}
ArrayList<CertificateRequestUserRecord> search(CertificateRequestUserModel model) {
ArrayList<CertificateRequestUserRecord> recs = new ArrayList<CertificateRequestUserRecord>();
try {
String status_str = null;
if(status.getValue() != null) {
status_str = CertificateRequestStatus.toStatus(status.getValue());
}
String dn_str = null;
if(dn_contains.getValue() != null && !dn_contains.getValue().trim().isEmpty()) {
dn_str = dn_contains.getValue();
}
recs = model.search(dn_str, status_str, vo.getValue(), request_after.getValue(), request_before.getValue());
if(recs.isEmpty()) {
message.setHtml("<p class=\"alert\">No matching user certificates.</p>");
}
} catch (SQLException e) {
log.error("Failed to search by dn_contains", e);
}
return recs;
}
}
class RequestTab extends Tab {
public RequestTab() {
super("request", "Request ID");
}
@Override
void render(PrintWriter out) {
request_id.render(out);
}
@Override
ArrayList<CertificateRequestUserRecord> search(CertificateRequestUserModel model) {
ArrayList<CertificateRequestUserRecord> recs = new ArrayList<CertificateRequestUserRecord>();
if(request_id.getValue() != null && !request_id.getValue().isEmpty()) {
try {
Integer int_id = Integer.parseInt(request_id.getValue());
//if id is speciied, it takes precedence
CertificateRequestUserRecord rec = model.get(int_id);
if(rec != null) {
recs.add(rec);
} else {
message.setHtml("<p class=\"alert\">No matching user certificate with request ID: " + request_id.getValue() + "</p>");
}
} catch (NumberFormatException e) {
//maybe not number
message.setHtml("<p class=\"alert\">Please specify an integer: " + request_id.getValue() + "</p>");
} catch (SQLException e) {
log.error("Failed to search by reques_id", e);
}
}
return recs;
}
}
class SerialTab extends Tab {
public SerialTab() {
super("serial", "Serial Number");
}
@Override
void render(PrintWriter out) {
serial_id.render(out);
}
@Override
ArrayList<CertificateRequestUserRecord> search(CertificateRequestUserModel model) {
ArrayList<CertificateRequestUserRecord> recs = new ArrayList<CertificateRequestUserRecord>();
if(serial_id.getValue() != null && !serial_id.getValue().isEmpty()) {
try {
//if id is speciied, it takes precedence
CertificateRequestUserRecord rec = model.getBySerialID(serial_id.getValue());
if(rec != null) {
recs.add(rec);
} else {
message.setHtml("<p class=\"alert\">No matching user certificate with serial number: " + serial_id.getValue() + "</p>");
}
} catch (SQLException e) {
log.error("Failed to search by serial_id", e);
}
}
return recs;
}
}
public CertificateSearchUserForm(final HttpServletRequest request, final UserContext context) {
super(context.getPageRoot());
this.context = context;
base_url = request.getRequestURI();
message = new DivRepStaticContent(this, "");
request_id = new DivRepTextBox(this);
request_id.setLabel("Request ID");
//request_id.setWidth(200);
serial_id = new DivRepTextBox(this);
serial_id.setLabel("Serial Number");
dn_contains = new DivRepTextBox(this);
dn_contains.setLabel("DN Contains");
//dn_contains.setWidth(210);
status = new DivRepSelectBox(this);
status.setLabel("Status");
status.setNullLabel("(Any)");
LinkedHashMap<Integer, String> keyvalues = new LinkedHashMap();
int i = 0;
while(true) {
String st = CertificateRequestStatus.toStatus(i);
if(st == null) break;
keyvalues.put(i, st);
++i;
}
status.setValues(keyvalues);
vo = new VOSelector(this, context);
vo.setLabel("VO");
vo.setNullLabel("(Any)");
signer = new DivRepSelectBox(this);
signer.setLabel("Signer");
signer.setNullLabel("(Any)");
LinkedHashMap<Integer, String> keyvalues2 = new LinkedHashMap();
keyvalues2.put(0, "CILogon");
keyvalues2.put(1, "Digicert");
signer.setValues(keyvalues2);
request_after = new DivRepDate(this);
//Calendar today = new GregorianCalendar();
Calendar last_year = new GregorianCalendar();
last_year.add(Calendar.MONTH, -6);
request_after.setValue(last_year.getTime());
request_after.setLabel("Requested After");
request_before = new DivRepDate(this);
request_before.setLabel("Requested Before");
/* doesn't work
//set min date
try {
String string = "January 1, 2012";
Date date = new SimpleDateFormat("MMMM d, yyyy", Locale.ENGLISH).parse(string);
request_after.setMinDate(date);
request_before.setMinDate(date);
} catch (ParseException e1) {
log.error("Failed to set min date");
}
*/
submit = new DivRepButton(this, "Search");
submit.addClass("btn");
submit.addClass("btn-primary");
//submit.addClass("pull-right");
submit.addEventListener(new DivRepEventListener() {
@Override
public void handleEvent(DivRepEvent e) {
//construct parameter and refresh
StringBuffer url = new StringBuffer(base_url);
url.append("?");
if(request_id.getValue() != null) {
url.append("request_id="+request_id.getValue()+"&");
}
if(serial_id.getValue() != null) {
url.append("serial_id="+serial_id.getValue()+"&");
}
if(dn_contains.getValue() != null) {
url.append("dn_contains="+StringEscapeUtils.escapeHtml(dn_contains.getValue())+"&");
}
if(status.getValue() != null) {
url.append("status="+status.getValue()+"&");
}
if(vo.getValue() != null) {
url.append("vo="+vo.getValue()+"&");
}
if(signer.getValue() != null) {
url.append("signer="+signer.getValue()+"&");
}
if(request_after.getValue() != null) {
url.append("request_after="+request_after.getValue().getTime()+"&");
}
if(request_before.getValue() != null) {
url.append("request_before="+request_before.getValue().getTime()+"&");
}
url.append("active="+active_tab.getID());
submit.redirect(url.toString());
}
});
tabs = new ArrayList<Tab>();
tabs.add(new RequestTab());
tabs.add(new SerialTab());
tabs.add(new OtherTab());
//set current values
request_id.setValue(request.getParameter("request_id"));
serial_id.setValue(request.getParameter("serial_id"));
dn_contains.setValue(request.getParameter("dn_contains"));
if(request.getParameter("vo") != null) {
vo.setValue(Integer.parseInt(request.getParameter("vo")));
}
if(request.getParameter("signer") != null) {
signer.setValue(Integer.parseInt(request.getParameter("signer")));
}
//DateFormat df = new SimpleDateFormat("EEE MMM dd kk:mm:ss z yyyy");
if(request.getParameter("request_after") != null) {
long time = Long.parseLong(request.getParameter("request_after"));
Date d = new Date(time);
request_after.setValue(d);
}
if(request.getParameter("request_before") != null) {
long time = Long.parseLong(request.getParameter("request_before"));
Date d = new Date(time);
request_before.setValue(d);
}
if(request.getParameter("status") != null) {
status.setValue(Integer.parseInt(request.getParameter("status")));
}
if(request.getParameter("active") != null) {
active_tab = findTab(request.getParameter("active"));
} else {
active_tab = tabs.get(0);
}
}
protected void onEvent(DivRepEvent e) {
if(e.action.equals("shown")) {
active_tab = findTab(e.value.substring(1));//remove # from #tabid
}
log.debug("selecting tab:" + e.value);
}
private Tab findTab(String id) {
for(Tab tab : tabs) {
if(tab.getID().equals(id)) {
return tab;
}
}
return null;
}
public ArrayList<CertificateRequestUserRecord> search() {
//ArrayList<CertificateRequestUserRecord> recs = new ArrayList<CertificateRequestUserRecord>();
CertificateRequestUserModel model = new CertificateRequestUserModel(context);
return active_tab.search(model);
}
@Override
public void render(PrintWriter out) {
out.write("<div id=\""+getNodeID()+"\">");
//form
out.write("<div class=\"tabbable\">");
//tabs
out.write("<ul class=\"nav nav-tabs\">");
for(Tab tab : tabs) {
tab.renderTab(out);
}
out.write("</ul>");//nav-tabs
//tab panes
out.write("<div class=\"tab-content\">");
for(Tab tab : tabs) {
tab.renderPane(out);
}
out.write("</div>");//tab-content
out.write("</div>");//tabbable
out.write("<div class=\"form-actions\">");
submit.render(out);
out.write("</div>");
message.render(out);
out.write("<script>\n");
out.write("$('#"+getNodeID()+" a[data-toggle=\"tab\"]').on('shown', function (e) {\n");
out.write("divrep(\""+getNodeID()+"\", e, e.target.hash);");
out.write("})\n");
out.write("</script>\n");
out.write("</div>");
}
}
|
package com.jme.math;
import java.util.logging.Level;
import com.jme.system.JmeException;
import com.jme.util.LoggingSystem;
/**
* <code>Matrix</code> defines and maintains a 4x4 matrix. This matrix is
* intended for use in a translation and rotational capacity. It provides
* convinience methods for creating the matrix from a multitude of sources.
*
* @author Mark Powell
* @version $Id: Matrix4f.java,v 1.2 2004-02-01 17:29:37 mojomonkey Exp $
*/
public class Matrix4f {
private float matrix[][];
/**
* Constructor instantiates a new <code>Matrix</code> that is set to the
* identity matrix.
*
*/
public Matrix4f() {
matrix = new float[4][4];
loadIdentity();
}
/**
* Constructor instantiates a new <code>Matrix</code> that is set to the
* provided matrix. This constructor copies a given Matrix. If the
* provided matrix is null, the constructor sets the matrix to the
* identity.
* @param mat the matrix to copy.
*/
public Matrix4f(Matrix4f mat) {
copy(mat);
}
/**
* <code>copy</code> transfers the contents of a given matrix to this
* matrix. If a null matrix is supplied, this matrix is set to the
* identity matrix.
* @param matrix the matrix to copy.
*/
public void copy(Matrix4f matrix) {
if (null == matrix) {
loadIdentity();
} else {
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 4; j++) {
this.matrix[i][j] = matrix.matrix[i][j];
}
}
}
}
/**
* <code>get</code> retrieves a value from the matrix at the given
* position. If the position is invalid a <code>JmeException</code>
* is thrown.
* @param i the row index.
* @param j the colum index.
* @return the value at (i, j).
*/
public float get(int i, int j) {
if (i < 0 || i > 3 || j < 0 || j > 3) {
LoggingSystem.getLogger().log(
Level.WARNING,
"Invalid matrix index.");
throw new JmeException("Invalid indices into matrix.");
}
return matrix[i][j];
}
/**
* <code>getColumn</code> returns one of three columns specified by the
* parameter. This column is returned as a float array of length 4.
*
* @param i the column to retrieve. Must be between 0 and 3.
* @return the column specified by the index.
*/
public float[] getColumn(int i) {
if (i < 0 || i > 3) {
LoggingSystem.getLogger().log(
Level.WARNING,
"Invalid column index.");
throw new JmeException("Invalid column index. " + i);
}
return new float[] { matrix[0][i], matrix[1][i], matrix[2][i] };
}
/**
*
* <code>setColumn</code> sets a particular column of this matrix to that
* represented by the provided vector.
* @param i the column to set.
* @param column the data to set.
*/
public void setColumn(int i, float[] column) {
if (i < 0 || i > 3) {
LoggingSystem.getLogger().log(
Level.WARNING,
"Invalid column index.");
throw new JmeException("Invalid column index. " + i);
}
if (column.length != 4) {
LoggingSystem.getLogger().log(
Level.WARNING,
"Column is not length 4. Ignoring.");
return;
}
matrix[0][i] = column[0];
matrix[1][i] = column[1];
matrix[2][i] = column[2];
matrix[3][i] = column[3];
}
/**
* <code>set</code> places a given value into the matrix at the given
* position. If the position is invalid a <code>JmeException</code>
* is thrown.
* @param i the row index.
* @param j the colum index.
* @param value the value for (i, j).
*/
public void set(int i, int j, float value) {
if (i < 0 || i > 3 || j < 0 || j > 3) {
LoggingSystem.getLogger().log(
Level.WARNING,
"Invalid matrix index.");
throw new JmeException("Invalid indices into matrix.");
}
matrix[i][j] = value;
}
/**
* <code>set</code> sets the values of this matrix from an array of
* values.
* @param matrix the matrix to set the value to.
* @throws MonkeyRuntimeException if the array is not of size 16.
*/
public void set(float[][] matrix) {
if (matrix.length != 4 || matrix[0].length != 4) {
throw new JmeException("Array must be of size 16.");
}
this.matrix = matrix;
}
/**
* <code>set</code> sets the values of this matrix from an array of
* values;
* @param matrix the matrix to set the value to.
*/
public void set(float[] matrix) {
if (matrix.length != 16) {
throw new JmeException("Array must be of size 16.");
}
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 4; j++) {
this.matrix[i][j] = matrix[j * 4 + i];
}
}
}
/**
*
* <code>set</code> defines the values of the matrix based on a supplied
* <code>Quaternion</code>. It should be noted that all previous values
* will be overridden.
* @param quat the quaternion to create a rotational matrix from.
*/
public void set(Quaternion quaternion) {
loadIdentity();
matrix[0][0] =
(float) (1.0
- 2.0 * quaternion.y * quaternion.y
- 2.0 * quaternion.z * quaternion.z);
matrix[1][0] =
(float) (2.0 * quaternion.x * quaternion.y
+ 2.0 * quaternion.w * quaternion.z);
matrix[2][0] =
(float) (2.0 * quaternion.x * quaternion.z
- 2.0 * quaternion.w * quaternion.y);
matrix[0][1] =
(float) (2.0 * quaternion.x * quaternion.y
- 2.0 * quaternion.w * quaternion.z);
matrix[1][1] =
(float) (1.0
- 2.0 * quaternion.x * quaternion.x
- 2.0 * quaternion.z * quaternion.z);
matrix[2][1] =
(float) (2.0 * quaternion.y * quaternion.z
+ 2.0 * quaternion.w * quaternion.x);
matrix[0][2] =
(float) (2.0 * quaternion.x * quaternion.z
+ 2.0 * quaternion.w * quaternion.y);
matrix[1][2] =
(float) (2.0 * quaternion.y * quaternion.z
- 2.0 * quaternion.w * quaternion.x);
matrix[2][2] =
(float) (1.0
- 2.0 * quaternion.x * quaternion.x
- 2.0 * quaternion.y * quaternion.y);
}
/**
* <code>loadIdentity</code> sets this matrix to the identity matrix,
* namely all zeros with ones along the diagonal.
*
*/
public void loadIdentity() {
matrix = new float[4][4];
matrix[0][0] = matrix[1][1] = matrix[2][2] = matrix[3][3] = 1;
}
/**
* <code>multiply</code> multiplies this matrix by a scalar.
* @param scalar the scalar to multiply this matrix by.
*/
public void multiply(float scalar) {
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 4; j++) {
matrix[i][j] *= scalar;
}
}
}
/**
* <code>multiply</code> multiplies this matrix with another matrix. The
* result matrix will then be returned.
* This matrix will be on the left hand side, while the parameter matrix
* will be on the right.
* @param in2 the matrix to multiply this matrix by.
* @return the resultant matrix
* @throws MonkeyRuntimeException if matrix is null.
*/
public Matrix4f mult(Matrix4f in2) {
Matrix4f out = new Matrix4f();
out.matrix[0][0] =
matrix[0][0] * in2.matrix[0][0]
+ matrix[0][1] * in2.matrix[1][0]
+ matrix[0][2] * in2.matrix[2][0];
out.matrix[0][1] =
matrix[0][0] * in2.matrix[0][1]
+ matrix[0][1] * in2.matrix[1][1]
+ matrix[0][2] * in2.matrix[2][1];
out.matrix[0][2] =
matrix[0][0] * in2.matrix[0][2]
+ matrix[0][1] * in2.matrix[1][2]
+ matrix[0][2] * in2.matrix[2][2];
out.matrix[0][3] =
matrix[0][0] * in2.matrix[0][3]
+ matrix[0][1] * in2.matrix[1][3]
+ matrix[0][2] * in2.matrix[2][3]
+ matrix[0][3];
out.matrix[1][0] =
matrix[1][0] * in2.matrix[0][0]
+ matrix[1][1] * in2.matrix[1][0]
+ matrix[1][2] * in2.matrix[2][0];
out.matrix[1][1] =
matrix[1][0] * in2.matrix[0][1]
+ matrix[1][1] * in2.matrix[1][1]
+ matrix[1][2] * in2.matrix[2][1];
out.matrix[1][2] =
matrix[1][0] * in2.matrix[0][2]
+ matrix[1][1] * in2.matrix[1][2]
+ matrix[1][2] * in2.matrix[2][2];
out.matrix[1][3] =
matrix[1][0] * in2.matrix[0][3]
+ matrix[1][1] * in2.matrix[1][3]
+ matrix[1][2] * in2.matrix[2][3]
+ matrix[1][3];
out.matrix[2][0] =
matrix[2][0] * in2.matrix[0][0]
+ matrix[2][1] * in2.matrix[1][0]
+ matrix[2][2] * in2.matrix[2][0];
out.matrix[2][1] =
matrix[2][0] * in2.matrix[0][1]
+ matrix[2][1] * in2.matrix[1][1]
+ matrix[2][2] * in2.matrix[2][1];
out.matrix[2][2] =
matrix[2][0] * in2.matrix[0][2]
+ matrix[2][1] * in2.matrix[1][2]
+ matrix[2][2] * in2.matrix[2][2];
out.matrix[2][3] =
matrix[2][0] * in2.matrix[0][3]
+ matrix[2][1] * in2.matrix[1][3]
+ matrix[2][2] * in2.matrix[2][3]
+ matrix[2][3];
out.matrix[3][0] =
this.matrix[0][0] * in2.get(3, 0)
+ this.matrix[1][0] * in2.get(3, 1)
+ this.matrix[2][0] * in2.get(3, 2)
+ this.matrix[3][0];
out.matrix[3][1] =
this.matrix[0][1] * in2.get(3, 0)
+ this.matrix[1][1] * in2.get(3, 1)
+ this.matrix[2][1] * in2.get(3, 2)
+ this.matrix[3][1];
out.matrix[3][2] =
this.matrix[0][2] * in2.get(3, 0)
+ this.matrix[1][2] * in2.get(3, 1)
+ this.matrix[2][2] * in2.get(3, 2)
+ this.matrix[3][2];
out.matrix[3][3] = 1;
return out;
}
/**
* <code>mult</code> multiplies a vector about a rotation matrix. The
* resulting vector is returned.
* @param m the rotation matrix.
* @return the rotated vector.
*/
public Vector3f mult(Vector3f vec) {
if (null == vec) {
LoggingSystem.getLogger().log(
Level.WARNING,
"Source vector is" + " null, null result returned.");
return null;
}
Vector3f product = new Vector3f();
product.x =
matrix[0][0] * vec.x + matrix[0][1] * vec.y + matrix[0][2] * vec.z;
product.y =
matrix[1][0] * vec.x + matrix[1][1] * vec.y + matrix[1][2] * vec.z;
product.z =
matrix[2][0] * vec.x + matrix[2][1] * vec.y + matrix[2][2] * vec.z;
return product;
}
/**
* <code>add</code> adds the values of a parameter matrix to this matrix.
* @param matrix the matrix to add to this.
*/
public void add(Matrix4f matrix) {
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 4; j++) {
this.matrix[i][j] += matrix.get(i, j);
}
}
}
/**
* <code>setTranslation</code> will set the matrix's translation values.
* @param translation the new values for the translation.
* @throws MonkeyRuntimeException if translation is not size 3.
*/
public void setTranslation(float[] translation) {
if (translation.length != 3) {
throw new JmeException("Translation size must be 3.");
}
matrix[3][0] = translation[0];
matrix[3][1] = translation[1];
matrix[3][2] = translation[2];
}
/**
* <code>setInverseTranslation</code> will set the matrix's inverse
* translation values.
* @param translation the new values for the inverse translation.
* @throws MonkeyRuntimeException if translation is not size 3.
*/
public void setInverseTranslation(float[] translation) {
if (translation.length != 3) {
throw new JmeException("Translation size must be 3.");
}
matrix[3][0] = -translation[0];
matrix[3][1] = -translation[1];
matrix[3][2] = -translation[2];
}
/**
* <code>angleRotation</code> sets this matrix to that
* of a rotation about three axes (x, y, z). Where each
* axis has a specified rotation in degrees. These rotations
* are expressed in a single <code>Vector3f</code> object.
* @param angles the angles to rotate.
*/
public void angleRotation(Vector3f angles) {
float angle;
float sr, sp, sy, cr, cp, cy;
angle = (float) (angles.z * (Math.PI * 2 / 360));
sy = (float) java.lang.Math.sin(angle);
cy = (float) java.lang.Math.cos(angle);
angle = (float) (angles.y * (Math.PI * 2 / 360));
sp = (float) java.lang.Math.sin(angle);
cp = (float) java.lang.Math.cos(angle);
angle = (float) (angles.x * (Math.PI * 2 / 360));
sr = (float) java.lang.Math.sin(angle);
cr = (float) java.lang.Math.cos(angle);
// matrix = (Z * Y) * X
matrix[0][0] = cp * cy;
matrix[1][0] = cp * sy;
matrix[2][0] = -sp;
matrix[0][1] = sr * sp * cy + cr * -sy;
matrix[1][1] = sr * sp * sy + cr * cy;
matrix[2][1] = sr * cp;
matrix[0][2] = (cr * sp * cy + -sr * -sy);
matrix[1][2] = (cr * sp * sy + -sr * cy);
matrix[2][2] = cr * cp;
matrix[0][3] = 0.0f;
matrix[1][3] = 0.0f;
matrix[2][3] = 0.0f;
}
/**
* <code>setRotationQuaternion</code> builds a rotation from a
* <code>Quaternion</code>.
* @param quat the quaternion to build the rotation from.
* @throws MonkeyRuntimeException if quat is null.
*/
public void setRotationQuaternion(Quaternion quat) {
if (null == quat) {
throw new JmeException("Quat may not be null.");
}
matrix[0][0] =
(float) (1.0 - 2.0 * quat.y * quat.y - 2.0 * quat.z * quat.z);
matrix[0][1] = (float) (2.0 * quat.x * quat.y + 2.0 * quat.w * quat.z);
matrix[0][2] = (float) (2.0 * quat.x * quat.z - 2.0 * quat.w * quat.y);
matrix[1][0] = (float) (2.0 * quat.x * quat.y - 2.0 * quat.w * quat.z);
matrix[1][1] =
(float) (1.0 - 2.0 * quat.x * quat.x - 2.0 * quat.z * quat.z);
matrix[1][2] = (float) (2.0 * quat.y * quat.z + 2.0 * quat.w * quat.x);
matrix[2][0] = (float) (2.0 * quat.x * quat.z + 2.0 * quat.w * quat.y);
matrix[2][1] = (float) (2.0 * quat.y * quat.z - 2.0 * quat.w * quat.x);
matrix[2][2] =
(float) (1.0 - 2.0 * quat.x * quat.x - 2.0 * quat.y * quat.y);
}
/**
* <code>setInverseRotationRadians</code> builds an inverted rotation
* from Euler angles that are in radians.
* @param angles the Euler angles in radians.
* @throws JmeException if angles is not size 3.
*/
public void setInverseRotationRadians(float[] angles) {
if (angles.length != 3) {
throw new JmeException("Angles must be of size 3.");
}
double cr = Math.cos(angles[0]);
double sr = Math.sin(angles[0]);
double cp = Math.cos(angles[1]);
double sp = Math.sin(angles[1]);
double cy = Math.cos(angles[2]);
double sy = Math.sin(angles[2]);
matrix[0][0] = (float) (cp * cy);
matrix[1][0] = (float) (cp * sy);
matrix[2][0] = (float) (-sp);
double srsp = sr * sp;
double crsp = cr * sp;
matrix[0][1] = (float) (srsp * cy - cr * sy);
matrix[1][1] = (float) (srsp * sy + cr * cy);
matrix[2][1] = (float) (sr * cp);
matrix[0][2] = (float) (crsp * cy + sr * sy);
matrix[1][2] = (float) (crsp * sy - sr * cy);
matrix[2][2] = (float) (cr * cp);
}
/**
* <code>setInverseRotationDegrees</code> builds an inverted rotation
* from Euler angles that are in degrees.
* @param angles the Euler angles in degrees.
* @throws JmeException if angles is not size 3.
*/
public void setInverseRotationDegrees(float[] angles) {
if (angles.length != 3) {
throw new JmeException("Angles must be of size 3.");
}
float vec[] = new float[3];
vec[0] = (float) (angles[0] * 180.0 / Math.PI);
vec[1] = (float) (angles[1] * 180.0 / Math.PI);
vec[2] = (float) (angles[2] * 180.0 / Math.PI);
setInverseRotationRadians(vec);
}
/**
*
* <code>inverseTranslateVect</code> translates a given Vector3f by the
* translation part of this matrix.
* @param Vector3f the Vector3f to be translated.
* @throws JmeException if the size of the Vector3f is not 3.
*/
public void inverseTranslateVect(float[] Vector3f) {
if (Vector3f.length != 3) {
throw new JmeException("Vector3f must be of size 3.");
}
Vector3f[0] = Vector3f[0] - matrix[3][0];
Vector3f[1] = Vector3f[1] - matrix[3][1];
Vector3f[2] = Vector3f[2] - matrix[3][2];
}
/**
*
* <code>inverseRotateVect</code> rotates a given Vector3f by the rotation
* part of this matrix.
* @param Vector3f the Vector3f to be rotated.
* @throws JmeException if the size of the Vector3f is not 3.
*/
public void inverseRotateVect(float[] vec) {
if (vec.length != 3) {
throw new JmeException("Vector3f must be of size 3.");
}
vec[0] =
vec[0] * matrix[0][0]
+ vec[1] * matrix[0][1]
+ vec[2] * matrix[0][2];
vec[1] =
vec[0] * matrix[1][0]
+ vec[1] * matrix[1][1]
+ vec[2] * matrix[1][2];
vec[2] =
vec[0] * matrix[2][0]
+ vec[1] * matrix[2][1]
+ vec[2] * matrix[2][2];
}
/**
* <code>inverseRotate</code> uses the rotational part of
* the matrix to rotate a vector in the opposite direction.
* @param v the vector to rotate.
* @return the rotated vector.
*/
public Vector3f inverseRotate(Vector3f v) {
Vector3f out = new Vector3f();
out.x = v.x * matrix[0][0] + v.y * matrix[1][0] + v.z * matrix[2][0];
out.y = v.x * matrix[0][1] + v.y * matrix[1][1] + v.z * matrix[2][1];
out.z = v.x * matrix[0][2] + v.y * matrix[1][2] + v.z * matrix[2][2];
return out;
}
/**
* <code>toString</code> returns the string representation of this object.
* It is in a format of a 4x4 matrix. For example, an identity matrix would
* be represented by the following string.
* com.jme.math.Matrix3f<br>
* [<br>
* 1.0 0.0 0.0 0.0<br>
* 0.0 1.0 0.0 0.0<br>
* 0.0 0.0 1.0 0.0<br>
* 0.0 0.0 0.0 1.0 <br>
* ]<br>
*
* @return the string representation of this object.
*/
public String toString() {
String result = "com.jme.math.Matrix4f\n[\n";
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 4; j++) {
result += " " + matrix[i][j] + " ";
}
result += "\n";
}
result += "]";
return result;
}
}
|
package mod._streams.uno;
import com.sun.star.io.XActiveDataSink;
import com.sun.star.io.XActiveDataSource;
import com.sun.star.io.XDataOutputStream;
import com.sun.star.io.XInputStream;
import com.sun.star.io.XOutputStream;
import com.sun.star.lang.XMultiServiceFactory;
import com.sun.star.uno.UnoRuntime;
import com.sun.star.uno.XInterface;
import java.io.PrintWriter;
import java.util.Vector;
import lib.StatusException;
import lib.TestCase;
import lib.TestEnvironment;
import lib.TestParameters;
/**
* Test for object which is represented by service
* <code>com.sun.star.io.DataInputStream</code>.
* <ul>
* <li> <code>com::sun::star::io::XInputStream</code></li>
* <li> <code>com::sun::star::io::XDataInputStream</code></li>
* <li> <code>com::sun::star::io::XConnectable</code></li>
* <li> <code>com::sun::star::io::XActiveDataSink</code></li>
* </ul>
* @see com.sun.star.io.DataInputStream
* @see com.sun.star.io.XInputStream
* @see com.sun.star.io.XDataInputStream
* @see com.sun.star.io.XConnectable
* @see com.sun.star.io.XActiveDataSink
* @see ifc.io._XInputStream
* @see ifc.io._XDataInputStream
* @see ifc.io._XConnectable
* @see ifc.io._XActiveDataSink
*/
public class DataInputStream extends TestCase {
/**
* Creates a Testenvironment for the interfaces to be tested.
* Creates <code>com.sun.star.io.DataInputStream</code> object,
* connects it to <code>com.sun.star.io.DataOutputStream</code>
* through <code>com.sun.star.io.Pipe</code>. All of possible data
* types are written into <code>DataOutputStream</code>.
* Object relations created :
* <ul>
* <li> <code>'StreamData'</code> for
* {@link ifc.io._XDataInputStream}(the data that should be written into
* the stream) </li>
* <li> <code>'ByteData'</code> for
* {@link ifc.io._XInputStream}(the data that should be written into
* the stream) </li>
* <li> <code>'StreamWriter'</code> for
* {@link ifc.io._XDataInputStream}
* {@link ifc.io._XInputStream}(a stream to write data to) </li>
* <li> <code>'Connectable'</code> for
* {@link ifc.io._XConnectable}(another object that can be connected) </li>
* <li> <code>'InputStream'</code> for
* {@link ifc.io._XActiveDataSink}(an input stream to set and get) </li>
* </ul>
*/
protected TestEnvironment createTestEnvironment(TestParameters Param, PrintWriter log) {
Object oInterface = null;
XMultiServiceFactory xMSF = (XMultiServiceFactory)Param.getMSF();;
try {
oInterface = xMSF.createInstance("com.sun.star.io.DataInputStream");
} catch(com.sun.star.uno.Exception e) {
e.printStackTrace(log);
throw new StatusException("Couldn't create instance", e);
}
XInterface oObj = (XInterface) oInterface;
// creating and connecting DataOutputStream to the
// DataInputStream created through the Pipe
XActiveDataSink xDataSink = (XActiveDataSink)
UnoRuntime.queryInterface(XActiveDataSink.class, oObj);
XInterface oPipe = null;
try {
oPipe = (XInterface)
xMSF.createInstance("com.sun.star.io.Pipe");
} catch(com.sun.star.uno.Exception e) {
e.printStackTrace(log);
throw new StatusException("Couldn't create instance", e);
}
XInputStream xPipeInput = (XInputStream)
UnoRuntime.queryInterface(XInputStream.class, oPipe);
XOutputStream xPipeOutput = (XOutputStream)
UnoRuntime.queryInterface(XOutputStream.class, oPipe);
XInterface oDataOutput = null;
try {
oDataOutput = (XInterface)
xMSF.createInstance("com.sun.star.io.DataOutputStream");
} catch(com.sun.star.uno.Exception e) {
e.printStackTrace(log);
throw new StatusException("Couldn't create instance", e);
}
XDataOutputStream xDataOutput = (XDataOutputStream)
UnoRuntime.queryInterface(XDataOutputStream.class, oDataOutput) ;
XActiveDataSource xDataSource = (XActiveDataSource)
UnoRuntime.queryInterface(XActiveDataSource.class, oDataOutput) ;
xDataSource.setOutputStream(xPipeOutput) ;
xDataSink.setInputStream(xPipeInput) ;
// all data types for writing to an XDataInputStream
Vector data = new Vector() ;
data.add(new Boolean(true)) ;
data.add(new Byte((byte)123)) ;
data.add(new Character((char)1234)) ;
data.add(new Short((short)1234)) ;
data.add(new Integer(123456)) ;
data.add(new Float(1.234)) ;
data.add(new Double(1.23456)) ;
data.add("DataInputStream") ;
// information for writing to the pipe
byte[] byteData = new byte[] {
1, 2, 3, 4, 5, 6, 7, 8 } ;
// createing a connectable object for XConnectable interface
XInterface xConnect = null;
try {
xConnect = (XInterface)xMSF.createInstance(
"com.sun.star.io.DataInputStream") ;
} catch (Exception e) {
log.println("Can't create DataInputStream");
e.printStackTrace(log);
throw new StatusException("Can't create DataInputStream", e);
}
// creating an input stream to set in XActiveDataSink
XInterface oDataInput = null;
try {
oDataInput = (XInterface) xMSF.createInstance(
"com.sun.star.io.Pipe" );
} catch (com.sun.star.uno.Exception e) {
log.println("Can't create new in stream") ;
e.printStackTrace(log) ;
throw new StatusException("Can't create input stream", e) ;
}
log.println("creating a new environment for object");
TestEnvironment tEnv = new TestEnvironment( oObj );
// adding sequence of data that must be read
// by XDataInputStream interface methods
tEnv.addObjRelation("StreamData", data) ;
// add a writer
tEnv.addObjRelation("StreamWriter", xDataOutput);
// add a connectable
tEnv.addObjRelation("Connectable", xConnect);
// add an inputStream
tEnv.addObjRelation("InputStream", oDataInput);
tEnv.addObjRelation("ByteData", byteData);
return tEnv;
} // finish method getTestEnvironment
}
|
package webCrawler;
import java.io.IOException;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
public class finalSearch {
static String[] google1 = new String[9];
static String[] google2 = new String[9];
//AAPL
public static final String GOOGLE_SEARCH_URL1 = "https:
//INTC
public static final String GOOGLE_SEARCH_URL2 = "https:
//DOW
public static final String GOOGLE_SEARCH_URL3 = "https:
//TSLA
public static final String GOOGLE_SEARCH_URL4 = "https:
//MSFT
public static final String GOOGLE_SEARCH_URL5 = "https:
public static final String GOOGLE_SEARCH_URL6 = "https:
//JPM
public static final String GOOGLE_SEARCH_URL7 = "https:
//AMZN
public static final String GOOGLE_SEARCH_URL8 = "https:
//XOM
public static final String GOOGLE_SEARCH_URL9 = "https:
private static void setter(){
google1[0] = GOOGLE_SEARCH_URL1;
google1[1] = GOOGLE_SEARCH_URL2;
google1[2] = GOOGLE_SEARCH_URL3;
google1[3] = GOOGLE_SEARCH_URL4;
google1[4] = GOOGLE_SEARCH_URL5;
google1[5] = GOOGLE_SEARCH_URL6;
google1[6] = GOOGLE_SEARCH_URL7;
google1[7] = GOOGLE_SEARCH_URL8;
google1[8] = GOOGLE_SEARCH_URL9;
google2[0] = "APPLE";
google2[1] = "INTEL";
google2[2] = "DOW JONES";
google2[3] = "TESLA";
google2[4] = "MICROSOFT";
google2[5] = "GENERAL ELECTRIC";
google2[6] = "JPMORGAN";
google2[7] = "AMAZON";
google2[8] = "EXXON MOBILE";
}
/**
* Search for one word in phrase. The search is not
* case
* sensitive. This method will check that the given goal
* is not a substring of a longer string (so, for
* example, "I know" does not contain "no").
*
* @param statement
* the string to search
* @param goal
* the string to search for
* @param startPos
* the character of the string to begin the
* search at
* @return the index of the first occurrence of goal in
* statement or -1 if it's not found
*/
private int findKeyword(String statement, String goal, int startPos){
String phrase = statement.trim().toLowerCase();
goal = goal.toLowerCase();
// The only change to incorporate the startPos is in
// the line below
int psn = phrase.indexOf(goal, startPos);
// Refinement--make sure the goal isn't part of a
// word
while (psn >= 0){
// Find the string of length 1 before and after
// the word
String before = " ", after = " ";
if (psn > 0){
before = phrase.substring(psn - 1, psn);
}
if (psn + goal.length() < phrase.length()){
after = phrase.substring(
psn + goal.length(),
psn + goal.length() + 1);
}
// If before and after aren't letters, we've
// found the word
if (((before.compareTo("a") < 0) || (before
.compareTo("z") > 0)) // before is not a
// letter
&& ((after.compareTo("a") < 0) || (after
.compareTo("z") > 0)))
{
return psn;
}
// The last position didn't work, so let's find
// the next, if there is one.
psn = phrase.indexOf(goal, psn + 1);
}
return -1;
}
/**
if (findKeyword(statement, "swim") >= 0
|| findKeyword(statement, "run") >= 0
|| findKeyword(statement, "football") >= 0
|| findKeyword(statement, "soccer") >= 0
|| findKeyword(statement, "baseball") >= 0
|| findKeyword(statement, "badminton") >= 0
|| findKeyword(statement, "tennis") >= 0
|| findKeyword(statement, "basketball") >= 0)
**/
public static void main(String[] args) throws IOException {
finalSearch.setter();
int counter = 0;
int num = 10;
for (String i : google1){
System.out.println(google2[counter]+" NEWS");
counter++;
String searchURL = i + "?q="+i+"&num="+num;
//without proper User-Agent, we will get 403 error
Document doc = Jsoup.connect(searchURL).userAgent("Chrome").get();
//below will print HTML data, save it to a file and open in browser to compare
//System.out.println(doc.html());
Elements results = doc.select("h3.r > a");
for (Element result : results) {
String linkHref = result.attr("href");
String linkText = result.text();
System.out.println("Text:: " + linkText + ", URL:: " + linkHref.substring(6, linkHref.indexOf("&")));
}
System.out.println();
}
}
}
|
package edu.psu.compbio.seqcode.projects.shaun;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import edu.psu.compbio.seqcode.genome.Genome;
import edu.psu.compbio.seqcode.genome.GenomeConfig;
import edu.psu.compbio.seqcode.genome.Species;
import edu.psu.compbio.seqcode.genome.location.Point;
import edu.psu.compbio.seqcode.genome.location.Region;
import edu.psu.compbio.seqcode.gse.datasets.motifs.CountsBackgroundModel;
import edu.psu.compbio.seqcode.gse.gsebricks.verbs.motifs.WeightMatrixScoreProfile;
import edu.psu.compbio.seqcode.gse.gsebricks.verbs.motifs.WeightMatrixScorer;
import edu.psu.compbio.seqcode.gse.tools.utils.Args;
import edu.psu.compbio.seqcode.gse.utils.ArgParser;
import edu.psu.compbio.seqcode.gse.utils.NotFoundException;
import edu.psu.compbio.seqcode.gse.utils.Pair;
import edu.psu.compbio.seqcode.gse.utils.RealValuedHistogram;
import edu.psu.compbio.seqcode.gse.utils.io.RegionFileUtilities;
import edu.psu.compbio.seqcode.gse.utils.sequence.SequenceUtils;
public class KmerAnalysisSandbox {
private Genome gen;
private int k=6;
private int win=200;
private List<Region> posRegions=null;
private List<Point> posPeaks=null;
private List<String> posSeq=null;
private List<String> posLines=null;
private boolean negLoaded=false;
private List<Region> negRegions=null;
private List<Point> negPeaks=null;
private List<String> negSeq=null;
private double negPseudoCount=1;
private int numRand=100000; //Only used if simulating or randomly picking regions
private int histoBinSize=5;
public static void main(String[] args) throws IOException, ParseException {
ArgParser ap = new ArgParser(args);
GenomeConfig gConfig = new GenomeConfig(args);
if(!ap.hasKey("species") || (!ap.hasKey("k"))|| (!ap.hasKey("win"))|| (!ap.hasKey("peaks"))) {
System.err.println("Usage:\n " +
"KmerAnalysisSandbox \n" +
"Required:\n" +
"\t--species <organism;genome>\n" +
"\t--k <kmer length>\n" +
"\t--win <window around peak to examine>\n" +
"\t--peaks <peaks file>\n" +
"Options:\n" +
"\t--seq <genome fasta seq directory>\n" +
"\t--model <ranked kmer file>\n" +
"\t--neg <filename or random>\n" +
"\t--numrand <number of random positions if random negative selected)\n" +
"\t--out <output filename>\n" +
"\t--rankthres <threshold for kmer matching>\n" +
"\t--enumerate [make a k-mer model]\n" +
"\t--roc [TP vs FP at the peaks level]\n" +
"\t--peakswithkmers [print peaks where the regions contain one of the top kmers]\n" +
"\t--kmerhits [print hits to the top kmers in the peaks]\n" +
"\t--kmerdisthist \n" +
"\t--recenterpeaks [recenter peaks to the nearest top kmer (where region contains kmer]\n" +
"\n");
}else{
Genome currgen = gConfig.getGenome();
int k = ap.hasKey("k") ? new Integer(ap.getKeyValue("k")).intValue():6;
int win = ap.hasKey("win") ? new Integer(ap.getKeyValue("win")).intValue():200;
String peaksFile = ap.hasKey("peaks") ? ap.getKeyValue("peaks") : null;
String neg = ap.hasKey("neg") ? ap.getKeyValue("neg") : null;
String out = ap.hasKey("out") ? ap.getKeyValue("out") : "out";
int rankThres = ap.hasKey("rankthres") ? new Integer(ap.getKeyValue("rankthres")).intValue():-1;
int numrand = ap.hasKey("numrand") ? new Integer(ap.getKeyValue("numrand")).intValue():-1;
KmerAnalysisSandbox analyzer = new KmerAnalysisSandbox(currgen, k, win, peaksFile);
analyzer.setNumRand(numrand);
KmerModel model = null;
if(ap.hasKey("enumerate")){
model = analyzer.estimateKmerModel(neg);
analyzer.printKmerModel(model, out);
}else if(ap.hasKey("model")){
model = analyzer.loadKmerModel(ap.getKeyValue("model"));
}
if(model != null){
if(ap.hasKey("roc")){
analyzer.ROC(model, neg);
}
if(ap.hasKey("peakswithkmers") && rankThres>0){
analyzer.printPeaksWithKmers(model, rankThres);
}
if(ap.hasKey("kmerhits") && rankThres>0){
analyzer.printKmersAtPeaks(model, rankThres);
}
if(ap.hasKey("recenterpeaks") && rankThres>0){
analyzer.printRecenteredPeaksWithKmers(model, rankThres, out, neg);
}
if(ap.hasKey("kmerdisthist") && rankThres>0){
analyzer.peak2motifHisto(model, rankThres);
}
}
}
}
public KmerAnalysisSandbox(Genome g, int k, int win, String pFile){
gen = g;
this.k=k;
this.win=win;
posRegions = RegionFileUtilities.loadRegionsFromPeakFile(gen, pFile, win);
posPeaks = RegionFileUtilities.loadPeaksFromPeakFile(gen, pFile, win);
posLines = RegionFileUtilities.loadLinesFromFile(pFile);
posSeq = RegionFileUtilities.getSequencesForRegions(posRegions, null);
}
//Accessors
public void setNumRand(int r){if(r>0){numRand=r;}}
/**
* Make a new Kmer model, which is just a ranked list of Kmers.
* @param negative
* @return
*/
public KmerModel estimateKmerModel(String negative){
List<Kmer> kmerList = new ArrayList<Kmer>();
System.err.println("Estimating Kmer models");
loadNegativeRegions(negative);
//Enumerate all k-mers in positive and negative sequences.
//Reuse BackgroundModel code for this purpose
System.err.println("Counting kmers in positive sequences");
CountsBackgroundModel posCounts = CountsBackgroundModel.modelFromSeqList(gen, posSeq, k);
posCounts.degenerateStrands();
System.err.println("Counting kmers in negative sequences");
CountsBackgroundModel negCounts = CountsBackgroundModel.modelFromSeqList(gen, negSeq, k);
negCounts.degenerateStrands();
//Go through all k-mers, and calculate pos/neg enrichment
System.err.println("Positive vs negative enrichment");
List<String> kmers = RegionFileUtilities.getAllKmers(k);
double posTotal=0, negTotal=0;
for(String kmer : kmers){
String revkmer = SequenceUtils.reverseComplement(kmer);
if(RegionFileUtilities.seq2int(kmer) <= RegionFileUtilities.seq2int(revkmer)){//Count each k-mer once only
posTotal += posCounts.getKmerCount(kmer);
negTotal += negCounts.getKmerCount(kmer)+negPseudoCount;
}
}
for(String kmer : kmers){
String revkmer = SequenceUtils.reverseComplement(kmer);
if(RegionFileUtilities.seq2int(kmer) <= RegionFileUtilities.seq2int(revkmer)){//Count each k-mer once only
double posFreq = (double)(posCounts.getKmerCount(kmer)/posTotal);
double negFreq = (double)((negCounts.getKmerCount(kmer)+negPseudoCount)/negTotal);
double eScore = posFreq/negFreq;
Kmer currK = new Kmer(kmer, eScore, posFreq, negFreq);
kmerList.add(currK);
}
}
KmerModel model = new KmerModel(kmerList);
return model;
}
/**
* TP vs FP at level of peaks
* @param model
*/
public void ROC(KmerModel model, String negative){
loadNegativeRegions(negative);
double ROCAUC=0;
double numPosPassed=0, numNegPassed=0;
double totalPos = (double) posRegions.size();
double totalNeg = (double) negRegions.size();
boolean[] posPassed = new boolean[posRegions.size()];
for(int i=0; i<posRegions.size(); i++){posPassed[i]=false;}
boolean[] negPassed = new boolean[negRegions.size()];
for(int i=0; i<negRegions.size(); i++){negPassed[i]=false;}
double TP=0, FP=0, lastTP=0, lastFP=0;
System.out.println("Kmer\tRevKmer\tEnrich\tPosFreq\tNegFreq\tTP\tFP");
//Iterate through ranked list
for(Kmer mer : model.getKmerList()){
//Look for matches to this mer and its reverse complement in the positive seqs
for(int s=0; s<posSeq.size(); s++){
String seq = posSeq.get(s);
if(!posPassed[s]){
if(seq.indexOf(mer.kmer)!=-1 || seq.indexOf(mer.revkmer)!=-1){
posPassed[s]=true;
numPosPassed++;
}
}
}
//Look for matches to this mer and its reverse complement in the negative seqs
for(int s=0; s<negSeq.size(); s++){
String seq = negSeq.get(s);
if(!negPassed[s]){
if(seq.indexOf(mer.kmer)!=-1 || seq.indexOf(mer.revkmer)!=-1){
negPassed[s]=true;
numNegPassed++;
}
}
}
TP = (numPosPassed/totalPos);
FP = (numNegPassed/totalNeg);
System.out.println(mer+"\t"+TP+"\t"+FP);
ROCAUC += ((FP-lastFP)*lastTP) + ((TP-lastTP)*(FP-lastFP)/2);
lastFP = FP;
lastTP = TP;
if(TP==1 && FP==1)
break;
}
System.out.println("\n\n#ROCAUC\t"+ROCAUC);
}
/**
* Print the peaks that contain any of the top rankThres k-mers
* @param model
* @param rankThres
*/
public void printPeaksWithKmers(KmerModel model, int rankThres){
int kmerCount=0;
boolean[] posPassed = new boolean[posRegions.size()];
for(int i=0; i<posRegions.size(); i++){posPassed[i]=false;}
for(Kmer mer : model.getKmerList()){
//Look for matches to this mer and its reverse complement in the positive seqs
for(int s=0; s<posSeq.size(); s++){
String seq = posSeq.get(s);
if(!posPassed[s]){
if(seq.indexOf(mer.kmer)!=-1 || seq.indexOf(mer.revkmer)!=-1){
posPassed[s]=true;
System.out.println(posLines.get(s));
}
}
}
kmerCount++;
if(kmerCount>rankThres)
break;
}
}
/**
* Print the peaks that contain any of the top rankThres k-mers
* @param model
* @param rankThres
*/
public void printKmersAtPeaks(KmerModel model, int rankThres){
int kmerCount=0;
boolean[] posPassed = new boolean[posRegions.size()];
for(int i=0; i<posRegions.size(); i++){posPassed[i]=false;}
for(Kmer mer : model.getKmerList()){
//Look for matches to this mer and its reverse complement in the positive seqs
for(int s=0; s<posSeq.size(); s++){
String seq = posSeq.get(s);
Region r = posRegions.get(s);
Point p = posPeaks.get(s);
for(int z=0; z<seq.length()-k; z++){
String subseq = seq.substring(z, z+k);
int start = r.getStart()+z;
int stop = r.getStart()+z+k-1;
if(subseq.equals(mer.kmer)){
System.out.println(r.getChrom()+":"+start+"-"+stop+":+\t"+mer.enrichment+"\t"+mer.kmer);
}else if(subseq.equals(mer.revkmer)){
System.out.println(r.getChrom()+":"+start+"-"+stop+":-\t"+mer.enrichment+"\t"+mer.kmer);
}
}
}
kmerCount++;
if(kmerCount>rankThres)
break;
}
}
//Histogram of all matches to the motif vs distance to peak
public void peak2motifHisto(KmerModel model, int rankThres){
int binSize = 10, kmerCount=0;
int numBins = (k+(win/2))/binSize;
double [] motifDistHisto = new double [numBins+1];
for(int h=0; h<=numBins; h++){motifDistHisto[h]=0;}
for(Kmer mer : model.getKmerList()){
//Look for matches to this mer and its reverse complement in the positive seqs
for(int s=0; s<posSeq.size(); s++){
String seq = posSeq.get(s);
Region r = posRegions.get(s);
Point p = posPeaks.get(s);
for(int z=0; z<seq.length()-k; z++){
String subseq = seq.substring(z, z+k);
if(subseq.equals(mer.kmer) || subseq.equals(mer.revkmer)){
int dist = (p.getLocation()-r.getStart())-z;
motifDistHisto[(Math.abs(dist))/binSize]++;
}
}
}
kmerCount++;
if(kmerCount>rankThres)
break;
}
System.out.println("\nPeak-to-Motif Histogram\nDistance\tMotifCounts\tMotifCountsNorm");
for(int h=0; h<=numBins; h++){
int bin =h*binSize;
System.out.println(bin+"\t"+motifDistHisto[h]+"\t"+motifDistHisto[h]/(double)posRegions.size());
}
}
/**
* Print the peaks that contain any of the top rankThres k-mers, recentering the peak on the closest match.
* If negative is null, you just don't get a histogram of expected shifts.
* @param model
* @param rankThres
*/
public void printRecenteredPeaksWithKmers(KmerModel model, int rankThres, String outFile, String negative){
HashMap<Point,Integer> posClosestKmerShift = new HashMap<Point, Integer>();
HashMap<Point,Kmer> posClosestKmer = new HashMap<Point, Kmer>();
HashMap<Point,Integer> negClosestKmerShift = new HashMap<Point, Integer>();
RealValuedHistogram posShiftHisto = new RealValuedHistogram(-win/2, win/2, (win/histoBinSize));
RealValuedHistogram negShiftHisto = new RealValuedHistogram(-win/2, win/2, (win/histoBinSize));
int kmerCount=0;
for(Kmer mer : model.getKmerList()){
Pattern pfor = Pattern.compile(mer.kmer);
Pattern prev = Pattern.compile(mer.revkmer);
//Look for matches to this mer and its reverse complement in the positive seqs
for(int s=0; s<posRegions.size(); s++){
Region r = posRegions.get(s);
Point p = posPeaks.get(s);
int peakOffset = p.getLocation()-r.getStart();
int currShift = posClosestKmerShift.containsKey(p) ? posClosestKmerShift.get(p) : 10000000;
//Forward matches
Matcher m = pfor.matcher(posSeq.get(s));
while(m.find()){
int koffset = ((m.start()+m.end())/2)-peakOffset;
if(Math.abs(koffset)<Math.abs(currShift)){
posClosestKmerShift.put(p, koffset);
posClosestKmer.put(p, mer);
}
}
//Reverse matches
m = prev.matcher(posSeq.get(s));
while(m.find()){
int koffset = ((m.start()+m.end())/2)-peakOffset;
if(Math.abs(koffset)<Math.abs(currShift)){
posClosestKmerShift.put(p, koffset);
posClosestKmer.put(p, mer);
}
}
}
kmerCount++;
if(kmerCount>rankThres)
break;
}
//Print the shifted positive peaks
try{
FileWriter fw = new FileWriter(outFile);
for(Point p : posPeaks){
if(posClosestKmerShift.containsKey(p)){
Point shifted = new Point(p.getGenome(), p.getChrom(), p.getLocation()+posClosestKmerShift.get(p));
int shift = posClosestKmerShift.get(p);
fw.write(shifted+"\t"+shift+"\t"+posClosestKmer.get(p).kmer+"\n");
//Add to histogram
posShiftHisto.addValue(shift);
}
}
System.out.println("#Positive peak kmer shift histogram:");
posShiftHisto.printContents();
fw.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
//Process negative sequences if provided
if(negative!=null){
loadNegativeRegions(negative);
kmerCount=0;
for(Kmer mer : model.getKmerList()){
Pattern pfor = Pattern.compile(mer.kmer);
Pattern prev = Pattern.compile(mer.revkmer);
//Look for matches to this mer and its reverse complement in the positive seqs
for(int s=0; s<negRegions.size(); s++){
Region r = negRegions.get(s);
Point p = negPeaks.get(s);
int peakOffset = p.getLocation()-r.getStart();
int currShift = negClosestKmerShift.containsKey(p) ? negClosestKmerShift.get(p) : 10000000;
//Forward matches
Matcher m = pfor.matcher(negSeq.get(s));
while(m.find()){
int koffset = ((m.start()+m.end())/2)-peakOffset;
if(Math.abs(koffset)<Math.abs(currShift))
negClosestKmerShift.put(p, koffset);
}
//Reverse matches
m = prev.matcher(negSeq.get(s));
while(m.find()){
int koffset = ((m.start()+m.end())/2)-peakOffset;
if(Math.abs(koffset)<Math.abs(currShift))
negClosestKmerShift.put(p, koffset);
}
}
kmerCount++;
if(kmerCount>rankThres)
break;
}
for(Point p : negClosestKmerShift.keySet()){
int shift = negClosestKmerShift.get(p);
////Add to histogram
negShiftHisto.addValue(shift);
}
System.out.println("#Negative peak kmer shift histogram:");
negShiftHisto.printContents();
}
}
/**
* Print the kmerModel to an output file
* @param out
*/
public void printKmerModel(KmerModel model, String out){
try{
FileWriter fw = new FileWriter(out);
for(Kmer curr : model.getKmerList()){
fw.write(curr.toString()+"\n");
}
fw.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* Load a kmer model from a file
* @param filename
* @return
*/
public KmerModel loadKmerModel(String filename){
List<Kmer> kList = new ArrayList<Kmer>();
try{
File kFile = new File(filename);
if(!kFile.isFile()){System.err.println("Invalid kmer model file name");System.exit(1);}
BufferedReader reader = new BufferedReader(new FileReader(kFile));
String line = null;
while ((line = reader.readLine()) != null) {
line = line.trim();
String[] words = line.split("\\s+");
String k = words[0];
Double enrich = new Double(words[2]);
kList.add(new Kmer(k, enrich));
}reader.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return new KmerModel(kList);
}
/**
* Load negative regions & sequences
* @param negative
*/
private void loadNegativeRegions(String negative){
if(negative==null){System.err.println("Negative sequences required for this functionality."); System.exit(1);}
System.err.println("Fetching negative sequences");
if(!negLoaded){
//Get the negative sequences first
if(negative==null || negative.equals("random")){
negRegions = RegionFileUtilities.randomRegionPick(gen, posRegions, numRand, win);
negPeaks = RegionFileUtilities.regions2midpoints(negRegions);
negSeq = RegionFileUtilities.getSequencesForRegions(negRegions, null);
}else{
negRegions = RegionFileUtilities.loadRegionsFromPeakFile(gen, negative, win);
negPeaks = RegionFileUtilities.loadPeaksFromPeakFile(gen, negative, win);
negSeq = RegionFileUtilities.getSequencesForRegions(negRegions, null);
}
negLoaded=true;
}
}
/**
* Kmer: represents a scored string.
* The score itself is implementation-specific, so don't rely on any given interpretation.
* Think of this instead as a rankable String.
* @author mahony
*
*/
public class Kmer implements Comparable<Kmer>{
public String kmer;
public String revkmer;
public Double enrichment;
public Double posFreq;
public Double negFreq;
public Kmer(String k, double e){ this(k, e, -1, -1);}
public Kmer(String k, double e, double p, double n){
kmer = k;
revkmer = SequenceUtils.reverseComplement(k);
enrichment = e;
posFreq=p;
negFreq=n;
}
public int compareTo(Kmer k){
if(enrichment > k.enrichment)
return -1;
else if (enrichment < k.enrichment)
return 1;
return 0;
}
public String toString(){
return(new String(kmer+"\t"+revkmer+"\t"+enrichment+"\t"+posFreq+"\t"+negFreq));
}
}
public class KmerModel{
private List<Kmer> kmers=null;
public KmerModel(List<Kmer> m){
kmers = m;
Collections.sort(kmers);
}
public List<Kmer> getKmerList(){return kmers;}
}
}
|
package com.rapid.utils;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.SSLSession;
import javax.net.ssl.X509TrustManager;
public class Https {
public static class TrustAllCerts implements X509TrustManager {
@Override
public void checkClientTrusted(X509Certificate[] arg0, String arg1) throws CertificateException {
}
@Override
public void checkServerTrusted(X509Certificate[] arg0, String arg1) throws CertificateException {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return null;
}
}
public class InvalidCertificateHostVerifier implements HostnameVerifier {
@Override
public boolean verify(String paramString, SSLSession paramSSLSession) {
return true;
}
}
}
|
package edu.stanford.prpl.junction.api.sample.datalog;
import java.net.URL;
import edu.stanford.prpl.junction.api.JunctionAPI;
import edu.stanford.prpl.junction.api.object.OutboundObjectStream;
import edu.stanford.prpl.junction.api.query.JunctionQuery;
import edu.stanford.prpl.junction.api.query.JunctionQueryHandler;
import edu.stanford.prpl.junction.impl.JunctionManager;
import edu.stanford.prpl.junction.impl.JunctionManagerFactory;
public class DatalogActor {
// JunctionManager extends/implements JunctionAPI
public static void main(String[] argv) {
try {
JunctionAPI jm = new JunctionManagerFactory().create(new URL("http://your.com/mine"));
jm.registerQueryHandler(
new JunctionQueryHandler() {
public boolean supportsQuery(JunctionQuery query) {
//return DatalogStoredQuery.supports(query);
return DatalogQuery.supports(query);
}
public void handleQuery(JunctionQuery query, OutboundObjectStream results) {
// query text:
/*
if (!(query instanceof StoredQuery)) {
results.close(); // todo: set error message
return;
}
*/
String queryText = query.getQueryText();
// get results
// when result comes in:
//results.write(jsonObject.getBytes());
// no more results
results.close();
}
}
);
} catch (Exception e) {
System.err.println("fail.");
e.printStackTrace();
}
}
}
|
package com.redomar.game;
import com.redomar.game.audio.AudioHandler;
import com.redomar.game.entities.Dummy;
import com.redomar.game.entities.Player;
import com.redomar.game.entities.Vendor;
import com.redomar.game.gfx.Screen;
import com.redomar.game.gfx.SpriteSheet;
import com.redomar.game.level.LevelHandler;
import com.redomar.game.lib.Font;
import com.redomar.game.lib.Time;
import com.redomar.game.script.PrintTypes;
import com.redomar.game.script.Printing;
import org.apache.commons.lang3.text.WordUtils;
import javax.swing.*;
import java.awt.*;
import java.awt.im.InputContext;
import java.awt.image.BufferStrategy;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferInt;
public class Game extends Canvas implements Runnable {
// Setting the size and name of the frame/canvas
private static final long serialVersionUID = 1L;
private static final String game_Version = "v1.8.3 Alpha";
private static final int WIDTH = 160;
private static final int HEIGHT = (WIDTH / 3 * 2);
private static final int SCALE = 3;
private static final String NAME = "Game";
private static Game game;
private static Time time = new Time();
private static int Jdata_Host;
private static String Jdata_UserName = "";
private static String Jdata_IP = "127.0.0.1";
private static boolean changeLevel = false;
private static boolean npc = false;
private static int map = 0;
private static int shirtCol;
private static int faceCol;
private static boolean[] alternateCols = new boolean[2];
private static int fps;
private static int tps;
private static int steps;
private static boolean devMode;
private static boolean closingMode;
private static JFrame frame;
private static AudioHandler backgroundMusic;
private static boolean running = false;
private static InputHandler input;
private static MouseHandler mouse;
private static InputContext context;
private int tickCount = 0;
private BufferedImage image = new BufferedImage(WIDTH, HEIGHT,
BufferedImage.TYPE_INT_RGB);
private int[] pixels = ((DataBufferInt) image.getRaster().getDataBuffer())
.getData();
private int[] colours = new int[6 * 6 * 6];
private BufferedImage image2 = new BufferedImage(WIDTH, HEIGHT - 30,
BufferedImage.TYPE_INT_RGB);
private Screen screen;
private WindowHandler window;
private LevelHandler level;
private Player player;
private Dummy dummy;
private Vendor vendor;
private Font font = new Font();
private String nowPlaying;
private boolean notActive = true;
private int trigger = 0;
private Printing print = new Printing();
/**
* @author Redomar
* @version Alpha 1.8.3
*/
public Game() {
context = InputContext.getInstance();
setMinimumSize(new Dimension(WIDTH * SCALE, HEIGHT * SCALE));
setMaximumSize(new Dimension(WIDTH * SCALE, HEIGHT * SCALE));
setPreferredSize(new Dimension(WIDTH * SCALE, HEIGHT * SCALE));
setFrame(new JFrame(NAME));
getFrame().setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
getFrame().setLayout(new BorderLayout());
getFrame().add(this, BorderLayout.CENTER);
getFrame().pack();
getFrame().setResizable(false);
getFrame().setLocationRelativeTo(null);
getFrame().setVisible(true);
requestFocus();
setDevMode(false);
setClosing(false);
}
public static void npcSpawn() {
if (isNpc() == true) {
game.setDummy(new Dummy(game.level, "Dummy", 100, 150, 500,
543));
game.level.addEntity(Game.getDummy());
}
}
public static void npcKill() {
if (isNpc() == false) {
game.level.removeEntity(Game.getDummy());
}
}
public static JFrame getFrame() {
return Game.frame;
}
public static void setFrame(JFrame frame) {
Game.frame = frame;
}
public static Player getPlayer() {
return game.player;
}
public void setPlayer(Player player) {
game.player = player;
}
public static LevelHandler getLevel() {
return game.level;
}
public void setLevel(LevelHandler level) {
this.level = level;
}
public static Time getTime() {
return Game.time;
}
public void setTime(Time time) {
Game.time = time;
}
public static Game getGame() {
return game;
}
public static void setGame(Game game) {
Game.game = game;
}
public static boolean isRunning() {
return running;
}
public static void setRunning(boolean running) {
Game.running = running;
}
public static boolean isChangeLevel() {
return changeLevel;
}
public static void setChangeLevel(boolean changeLevel) {
Game.changeLevel = changeLevel;
}
public static int getMap() {
return map;
}
public void setMap(String Map_str) {
setLevel(new LevelHandler(Map_str));
if (alternateCols[0]) {
Game.setShirtCol(240);
}
if (!alternateCols[0]) {
Game.setShirtCol(111);
}
if (alternateCols[1]) {
Game.setFaceCol(310);
}
if (!alternateCols[1]) {
Game.setFaceCol(543);
}
setPlayer(new Player(level, 100, 100, input,
getJdata_UserName(), shirtCol, faceCol));
level.addEntity(player);
}
public static void setMap(int map) {
Game.map = map;
}
public static boolean isNpc() {
return npc;
}
public static void setNpc(boolean npc) {
Game.npc = npc;
}
public static Dummy getDummy() {
return game.dummy;
}
public void setDummy(Dummy dummy) {
this.dummy = dummy;
}
public static String getJdata_IP() {
return Jdata_IP;
}
public static void setJdata_IP(String jdata_IP) {
Jdata_IP = jdata_IP;
}
public static int getJdata_Host() {
return Jdata_Host;
}
public static void setJdata_Host(int jdata_Host) {
Jdata_Host = jdata_Host;
}
public static String getJdata_UserName() {
return Jdata_UserName;
}
public static void setJdata_UserName(String jdata_UserName) {
Jdata_UserName = jdata_UserName;
}
public static String getGameVersion() {
return game_Version;
}
public static int getShirtCol() {
return shirtCol;
}
public static void setShirtCol(int shirtCol) {
Game.shirtCol = shirtCol;
}
public static int getFaceCol() {
return faceCol;
}
public static void setFaceCol(int faceCol) {
Game.faceCol = faceCol;
}
public static boolean[] getAlternateCols() {
return alternateCols;
}
public static void setAlternateCols(boolean[] alternateCols) {
Game.alternateCols = alternateCols;
}
public static void setAlternateColsR(boolean alternateCols) {
Game.alternateCols[1] = alternateCols;
}
public static void setAlternateColsS(boolean alternateCols) {
Game.alternateCols[0] = alternateCols;
}
public static void setBackgroundMusic(AudioHandler backgroundMusic) {
Game.backgroundMusic = backgroundMusic;
}
public static AudioHandler getBackgroundMusic(){
return Game.backgroundMusic;
}
public static InputHandler getInput() {
return input;
}
public void setInput(InputHandler input) {
Game.input = input;
}
public static MouseHandler getMouse() {
return mouse;
}
public static void setMouse(MouseHandler mouse) {
Game.mouse = mouse;
}
public static boolean isDevMode() {
return devMode;
}
public static void setDevMode(boolean devMode) {
Game.devMode = devMode;
}
public static boolean isClosing() {
return closingMode;
}
public static void setClosing(boolean closing) {
Game.closingMode = closing;
}
public void init() {
setGame(this);
int index = 0;
for (int r = 0; r < 6; r++) {
for (int g = 0; g < 6; g++) {
for (int b = 0; b < 6; b++) {
int rr = (r * 255 / 5);
int gg = (g * 255 / 5);
int bb = (b * 255 / 5);
colours[index++] = rr << 16 | gg << 8 | bb;
}
}
}
screen = new Screen(WIDTH, HEIGHT, new SpriteSheet("/sprite_sheet.png"));
input = new InputHandler(this);
setMouse(new MouseHandler(this));
setWindow(new WindowHandler(this));
setMap("/levels/custom_level.png");
setMap(1);
game.setVendor(new Vendor(level, "Vendor", 215, 215, 304, 543));
level.addEntity(getVendor());
}
public synchronized void start() {
Game.setRunning(true);
new Thread(this, "GAME").start();
}
public synchronized void stop() {
Game.setRunning(false);
}
public void run() {
long lastTime = System.nanoTime();
double nsPerTick = 1000000000D / 60D;
int ticks = 0;
int frames = 0;
long lastTimer = System.currentTimeMillis();
double delta = 0;
init();
while (Game.isRunning()) {
long now = System.nanoTime();
delta += (now - lastTime) / nsPerTick;
lastTime = now;
boolean shouldRender = false;
while (delta >= 1) {
ticks++;
tick();
delta -= 1;
shouldRender = true;
}
try {
Thread.sleep(2);
} catch (InterruptedException e) {
e.printStackTrace();
}
if (shouldRender) {
frames++;
render();
}
if (System.currentTimeMillis() - lastTimer >= 1000) {
lastTimer += 1000;
getFrame().setTitle(
"JavaGame - Version "
+ WordUtils.capitalize(game_Version).substring(
1, game_Version.length()));
fps = frames;
tps = ticks;
frames = 0;
ticks = 0;
}
}
}
public void tick() {
setTickCount(getTickCount() + 1);
level.tick();
}
public void render() {
BufferStrategy bs = getBufferStrategy();
if (bs == null) {
createBufferStrategy(3);
return;
}
int xOffset = (int) getPlayer().getX() - (screen.getWidth() / 2);
int yOffset = (int) getPlayer().getY() - (screen.getHeight() / 2);
level.renderTiles(screen, xOffset, yOffset);
/*
* for (int x = 0; x < level.width; x++) { int colour = Colours.get(-1,
* -1, -1, 000); if (x % 10 == 0 && x != 0) { colour = Colours.get(-1,
* -1, -1, 500); } Font.render((x % 10) + "", screen, 0 + (x * 8), 0,
* colour, 1); }
*/
level.renderEntities(screen);
level.renderProjectileEntities(screen);
for (int y = 0; y < screen.getHeight(); y++) {
for (int x = 0; x < screen.getWidth(); x++) {
int colourCode = screen.getPixels()[x + y * screen.getWidth()];
if (colourCode < 255) {
pixels[x + y * WIDTH] = colours[colourCode];
}
}
}
if (isChangeLevel() == true && getTickCount() % 60 == 0) {
Game.setChangeLevel(true);
setChangeLevel(false);
}
if (changeLevel == true) {
print.print("Teleported into new world", PrintTypes.GAME);
if (getMap() == 1) {
setMap("/levels/water_level.png");
if (getDummy() != null) { // Gave nullPointerException(); upon
// entering new world.
level.removeEntity(getDummy());
setNpc(false);
}
level.removeEntity(getVendor());
setMap(2);
} else if (getMap() == 2) {
setMap("/levels/custom_level.png");
level.removeEntity(getDummy());
setNpc(false);
level.addEntity(getVendor());
setMap(1);
}
changeLevel = false;
}
Graphics g = bs.getDrawGraphics();
g.drawRect(0, 0, getWidth(), getHeight());
g.drawImage(image, 0, 0, getWidth(), getHeight() - 30, null);
status(g, isDevMode(), isClosing());
// Font.render("Hi", screen, 0, 0, Colours.get(-1, -1, -1, 555), 1);
g.drawImage(image2, 0, getHeight() - 30, getWidth(), getHeight(), null);
g.setColor(Color.WHITE);
g.setFont(font.getSegoe());
g.drawString(
"Welcome "
+ WordUtils.capitalizeFully(player
.getSanitisedUsername()), 3, getHeight() - 17);
g.setColor(Color.ORANGE);
if (context.getLocale().getCountry().equals("BE")
|| context.getLocale().getCountry().equals("FR")) {
g.drawString("Press A to quit", (getWidth() / 2)
- ("Press A to quit".length() * 3), getHeight() - 17);
} else {
g.drawString("Press Q to quit", (getWidth() / 2)
- ("Press Q to quit".length() * 3), getHeight() - 17);
}
g.setColor(Color.YELLOW);
g.drawString(time.getTime(), (getWidth() - 58), (getHeight() - 3));
g.setColor(Color.GREEN);
if(backgroundMusic.getActive()) {
g.drawString("MUSIC is ON ", 3, getHeight() - 3);
}
g.dispose();
bs.show();
}
private void status(Graphics g, boolean TerminalMode, boolean TerminalQuit) {
if (TerminalMode == true) {
g.setColor(Color.CYAN);
g.drawString("JavaGame Stats", 0, 10);
g.drawString("FPS/TPS: " + fps + "/" + tps, 0, 25);
if ((player.getNumSteps() & 15) == 15) {
steps += 1;
}
g.drawString("Foot Steps: " + steps, 0, 40);
g.drawString(
"NPC: " + WordUtils.capitalize(String.valueOf(isNpc())), 0,
55);
g.drawString("Mouse: " + getMouse().getX() + "x |"
+ getMouse().getY() + "y", 0, 70);
if (getMouse().getButton() != -1)
g.drawString("Button: " + getMouse().getButton(), 0, 85);
g.setColor(Color.CYAN);
g.fillRect(getMouse().getX() - 12, getMouse().getY() - 12, 24, 24);
}
if (TerminalQuit == true) {
g.setColor(Color.BLACK);
g.fillRect(0, 0, getWidth(), getHeight());
g.setColor(Color.RED);
g.drawString("Shutting down the Game", (getWidth() / 2) - 70,
(getHeight() / 2) - 8);
g.dispose();
}
}
public WindowHandler getWindow() {
return window;
}
public void setWindow(WindowHandler window) {
this.window = window;
}
public String getNowPlaying() {
return nowPlaying;
}
public void setNowPlaying(String nowPlaying) {
this.nowPlaying = nowPlaying;
}
public int getTickCount() {
return tickCount;
}
public void setTickCount(int tickCount) {
this.tickCount = tickCount;
}
public Vendor getVendor() {
return vendor;
}
public void setVendor(Vendor vendor) {
this.vendor = vendor;
}
}
|
// 2015 competition robot code.
// Cleaned up and reorganized in preparation for 2016. No changes to
// functionality other than adding support for xbox controller and
// demonstration code for gyro and internal accelerometer.
package Team4450.Robot8;
import java.io.IOException;
import java.util.Properties;
import Team4450.Lib.*;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.DriverStation;
import edu.wpi.first.wpilibj.Gyro;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.RobotDrive;
import edu.wpi.first.wpilibj.SampleRobot;
import edu.wpi.first.wpilibj.CameraServer;
import edu.wpi.first.wpilibj.vision.AxisCamera;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import edu.wpi.first.wpilibj.PowerDistributionPanel;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the SimpleRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the build.properties file.
*/
public class Robot extends SampleRobot
{
static final String PROGRAM_NAME = "RAC8-11.24.15-01";
// Motor pwm port assignments (0=front-left, 1=rear-left, 2=front-right, 3=rear-right)
final RobotDrive robotDrive = new RobotDrive(0,1,2,3);
final Joystick utilityStick = new Joystick(2); // 0 old ds configuration
final Joystick leftStick = new Joystick(0);
final Joystick rightStick = new Joystick(1);
final Joystick launchPad = new Joystick(3);
final Joystick gamePad = new Joystick(4);
final Compressor compressor = new Compressor(0);
final Gyro gyro = new Gyro(0);
public Properties robotProperties;
AxisCamera camera = null;
CameraServer usbCameraServer = null;
DriverStation ds = null;
DriverStation.Alliance alliance;
int location;
Thread monitorBatteryThread, monitorDistanceThread, monitorCompressorThread;
public CameraFeed cameraThread;
static final String CAMERA_IP = "10.44.50.11";
static final int USB_CAMERA = 2;
static final int IP_CAMERA = 3;
public Robot() throws IOException
{
// Set up our custom logger.
try
{
Util.CustomLogger.setup();
}
catch (Throwable e) {e.printStackTrace(Util.logPrintStream);}
try
{
Util.consoleLog(PROGRAM_NAME);
robotDrive.stopMotor();
robotDrive.setExpiration(0.1);
ds = DriverStation.getInstance();
// IP Camera object used for vision processing.
//camera = AxisCamera.getInstance(CAMERA_IP);
robotDrive.setInvertedMotor(RobotDrive.MotorType.kFrontLeft, true);
robotDrive.setInvertedMotor(RobotDrive.MotorType.kRearLeft, true);
robotDrive.setInvertedMotor(RobotDrive.MotorType.kFrontRight, true);
robotDrive.setInvertedMotor(RobotDrive.MotorType.kRearRight, true);
Util.consoleLog("%s %s", PROGRAM_NAME, "end");
}
catch (Throwable e) {e.printStackTrace(Util.logPrintStream);}
}
public void robotInit()
{
try
{
Util.consoleLog();
LCD.clearAll();
LCD.printLine(1, "Mode: RobotInit");
// Read properties file from RoboRio "disk".
robotProperties = Util.readProperties();
SmartDashboard.putString("Program", PROGRAM_NAME);
//SmartDashboard.putBoolean("CompressorEnabled", false);
SmartDashboard.putBoolean("CompressorEnabled", Boolean.parseBoolean(robotProperties.getProperty("CompressorEnabledByDefault")));
// Reset PDB sticky faults.
PowerDistributionPanel PDP = new PowerDistributionPanel();
PDP.clearStickyFaults();
// Set starting camera feed on driver station to USB-HW.
SmartDashboard.putNumber("CameraSelect", USB_CAMERA);
// Start usb camera feed server on roboRIO. usb camera name is set by roboRIO.
// If camera feed stops working, check roboRIO name assignment.
// Note this is not used if we do dual usb cameras, which are handled by the
// cameraFeed class. The function below is the standard WpiLib server which
// can be used for a single usb camera.
//StartUSBCameraServer("cam0");
// Start the battery, compressor, camera feed and distance monitoring Tasks.
monitorBatteryThread = new MonitorBattery(ds);
monitorBatteryThread.start();
monitorCompressorThread = new MonitorCompressor();
monitorCompressorThread.start();
// Start camera server using our class for dual usb cameras.
cameraThread = new CameraFeed(this);
cameraThread.start();
// Start thread to monitor distance sensor.
monitorDistanceThread = new MonitorDistanceMBX(this);
monitorDistanceThread.start();
Util.consoleLog("end");
}
catch (Throwable e) {e.printStackTrace(Util.logPrintStream);}
}
public void disabled()
{
try
{
Util.consoleLog();
LCD.printLine(1, "Mode: Disabled");
// Reset driver station LEDs.
SmartDashboard.putBoolean("Disabled", true);
SmartDashboard.putBoolean("Auto Mode", false);
SmartDashboard.putBoolean("Teleop Mode", false);
SmartDashboard.putBoolean("FMS", ds.isFMSAttached());
Util.consoleLog("end");
}
catch (Throwable e) {e.printStackTrace(Util.logPrintStream);}
}
public void autonomous()
{
try
{
Util.consoleLog();
LCD.clearAll();
LCD.printLine(1, "Mode: Autonomous");
SmartDashboard.putBoolean("Disabled", false);
SmartDashboard.putBoolean("Auto Mode", true);
// Make available the alliance (red/blue) and staring position as
// set on the driver station or FMS.
alliance = ds.getAlliance();
location = ds.getLocation();
// This code turns off the automatic compressor management if requested by DS.
compressor.setClosedLoopControl(SmartDashboard.getBoolean("CompressorEnabled", true));
// Start autonomous process contained in the MyAutonomous class.
Autonomous autonomous = new Autonomous(this);
autonomous.execute();
autonomous.dispose();
SmartDashboard.putBoolean("Auto Mode", false);
Util.consoleLog("end");
}
catch (Throwable e) {e.printStackTrace(Util.logPrintStream);}
}
public void operatorControl()
{
try
{
Util.consoleLog();
LCD.clearAll();
LCD.printLine(1, "Mode: Teleop");
SmartDashboard.putBoolean("Disabled", false);
SmartDashboard.putBoolean("Teleop Mode", true);
alliance = ds.getAlliance();
location = ds.getLocation();
Util.consoleLog("Alliance=%s, Location=%d, FMS=%b", alliance.name(), location, ds.isFMSAttached());
// This code turns off the automatic compressor management if requested by DS.
compressor.setClosedLoopControl(SmartDashboard.getBoolean("CompressorEnabled", true));
// Start operator control process contained in the MyTeleop class.
Teleop teleOp = new Teleop(this);
teleOp.OperatorControl();
teleOp.dispose();
Util.consoleLog("end");
}
catch (Throwable e) {e.printStackTrace(Util.logPrintStream);}
}
public void test()
{
}
// Start WpiLib usb camera server for single selected camera.
public void StartUSBCameraServer(String cameraName)
{
Util.consoleLog(cameraName);
usbCameraServer = CameraServer.getInstance();
usbCameraServer.setQuality(30);
usbCameraServer.startAutomaticCapture(cameraName);
}
}
|
package org.rakam.util;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.DateTimeFormatterBuilder;
import org.joda.time.format.DateTimeParser;
import org.joda.time.format.DateTimePrinter;
import org.joda.time.format.ISODateTimeFormat;
import java.time.ZoneOffset;
import java.util.concurrent.TimeUnit;
public class DateTimeUtils {
private DateTimeUtils() {
}
public static final java.time.format.DateTimeFormatter TIMESTAMP_FORMATTER = java.time.format.DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS").withZone(ZoneOffset.UTC);
private static final DateTimeFormatter DATE_FORMATTER = ISODateTimeFormat.date().withZoneUTC();
private static final DateTimeFormatter TIMESTAMP_WITHOUT_TIME_ZONE_FORMATTER;
private static final DateTimeFormatter TIMESTAMP_WITH_TIME_ZONE_FORMATTER;
public static int parseDate(String value) {
return (int) TimeUnit.MILLISECONDS.toDays(DATE_FORMATTER.parseMillis(value));
}
static {
DateTimeParser[] timestampWithoutTimeZoneParser = {
DateTimeFormat.forPattern("yyyy-M-d").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m:s").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSS").getParser()};
DateTimePrinter timestampWithoutTimeZonePrinter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS").getPrinter();
TIMESTAMP_WITHOUT_TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder()
.append(timestampWithoutTimeZonePrinter, timestampWithoutTimeZoneParser)
.toFormatter()
.withOffsetParsed();
DateTimeParser[] timestampWithTimeZoneParser = {
DateTimeFormat.forPattern("yyyy-M-dZ").getParser(),
DateTimeFormat.forPattern("yyyy-M-d Z").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:mZ").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m Z").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m:sZ").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m:s Z").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSSZ").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSS Z").getParser(),
DateTimeFormat.forPattern("yyyy-M-dZZZ").getParser(),
DateTimeFormat.forPattern("yyyy-M-d ZZZ").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:mZZZ").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m ZZZ").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m:sZZZ").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m:s ZZZ").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSSZZZ").getParser(),
DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSS ZZZ").getParser()};
DateTimePrinter timestampWithTimeZonePrinter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS ZZZ").getPrinter();
TIMESTAMP_WITH_TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder()
.append(timestampWithTimeZonePrinter, timestampWithTimeZoneParser)
.toFormatter()
.withOffsetParsed();
}
public static long parseTimestamp(Number timestampWithTimeZone) {
return timestampWithTimeZone.longValue();
}
public static long parseTimestamp(Object timestampWithTimeZone) {
return timestampWithTimeZone instanceof Number ?
parseTimestamp((Number) timestampWithTimeZone) :
parseTimestamp(timestampWithTimeZone.toString());
}
public static long parseTimestamp(String timestampWithTimeZone) {
// If it's in ISO format the last character must be 'Z'
try {
return ISODateTimeFormat.dateTimeParser().parseMillis(timestampWithTimeZone);
}
catch (Exception e) {
try {
return TIMESTAMP_WITHOUT_TIME_ZONE_FORMATTER.parseMillis(timestampWithTimeZone);
}
catch (Exception ex) {
return TIMESTAMP_WITH_TIME_ZONE_FORMATTER.parseMillis(timestampWithTimeZone);
}
}
}
public static void main(String[] args)
{
parseTimestamp("2014-06-23T01:05:30-07:00");
}
}
|
package at.pria.koza.harmonic;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
import at.pria.koza.harmonic.proto.HarmonicP.StateP;
import at.pria.koza.polybuf.PolybufConfig;
import at.pria.koza.polybuf.PolybufException;
import at.pria.koza.polybuf.PolybufIO;
import at.pria.koza.polybuf.PolybufInput;
import at.pria.koza.polybuf.PolybufOutput;
import at.pria.koza.polybuf.PolybufSerializable;
import at.pria.koza.polybuf.proto.Polybuf.Obj;
import com.google.protobuf.GeneratedMessage.GeneratedExtension;
/**
* <p>
* A {@code BranchManager} is a wrapper for an {@link Engine}. It enables easy and efficient synchronization
* between multiple BranchManagers by managing metadata for the states in the engine, and named branches for which
* updates may be published between engines. At the moment, a branch manager does not operate on existing and
* possibly already modified engines. Instead, it creates a new engine and controls all access to it. This may
* change in the future.
* </p>
* <p>
* This class does not provide network protocols for achieving this ends. It is the backend containing logic to
* create and process the information necessary for such messages, but does not mandate any specific protocol
* formats to be used to transport that information.
* </p>
*
* @version V1.0 29.07.2013
* @author SillyFreak
*/
public class BranchManager {
public static final String BRANCH_DEFAULT = "default";
private final Engine engine;
private final Map<String, MetaState[]> branches = new HashMap<>();
private final Map<Long, MetaState> states = new HashMap<>();
private final List<BranchListener> branchListeners = new ArrayList<>();
private String currentBranch;
//ctors & misc
/**
* <p>
* Creates a new branch manager
* </p>
*
* @see Engine#Engine()
*/
public BranchManager() {
this(new Engine());
}
/**
* <p>
* Creates a new branch manager.
* </p>
*
* @param spectating whether the engine will only spectate or also execute actions
*
* @see Engine#Engine(boolean)
*/
public BranchManager(boolean spectating) {
this(new Engine(spectating));
}
/**
* <p>
* Creates a new branch manager.
* </p>
*
* @param id the ID to be used for the engine
*
* @see Engine#Engine(int)
*/
public BranchManager(int id) {
this(new Engine(id));
}
/**
* <p>
* Creates a new branch manager.
* </p>
*/
private BranchManager(Engine engine) {
this.engine = engine;
//put the root
currentBranch = BRANCH_DEFAULT;
createBranch(currentBranch, engine.getState(0l));
}
/**
* <p>
* Returns the engine underlying this BranchManager. The engine must not be modified mannually.
* </p>
*
* @return
*/
public Engine getEngine() {
return engine;
}
//listeners
public void addBranchListener(BranchListener l) {
branchListeners.add(l);
}
public void removeBranchListener(BranchListener l) {
branchListeners.remove(l);
}
protected void fireBranchCreated(BranchManager mgr, String branch, State head) {
synchronized(branchListeners) {
for(ListIterator<BranchListener> it = branchListeners.listIterator(branchListeners.size()); it.hasPrevious();) {
it.previous().branchCreated(mgr, branch, head);
}
}
}
protected void fireBranchMoved(BranchManager mgr, String branch, State prevHead, State newHead) {
synchronized(branchListeners) {
for(ListIterator<BranchListener> it = branchListeners.listIterator(branchListeners.size()); it.hasPrevious();) {
it.previous().branchMoved(mgr, branch, prevHead, newHead);
}
}
}
//branch mgmt
public void createBranchHere(String branch) {
createBranch(branch, getBranchTip(currentBranch));
}
public void createBranch(String branch, State state) {
if(state.getEngine() != engine) throw new IllegalArgumentException();
if(branches.containsKey(branch)) throw new IllegalArgumentException();
branches.put(branch, new MetaState[] {put(state)});
fireBranchCreated(this, branch, state);
}
public State getBranchTip(String branch) {
MetaState[] tip = branches.get(branch);
if(tip == null) throw new IllegalArgumentException();
return tip[0].state;
}
public String getCurrentBranch() {
return currentBranch;
}
public void setCurrentBranch(String branch) {
State tip = getBranchTip(branch);
engine.setHead(tip);
currentBranch = branch;
}
public <T extends Action> T execute(T action) {
MetaState[] tip = branches.get(currentBranch);
State state = new State(tip[0].state, action);
engine.setHead(state);
tip[0] = put(state);
return action;
}
//receive branch sync
/**
* <p>
* Receives an update offer from a remote branch manager. The branch to be updated consists of the branch
* owner's ID in hex (16 digits), a slash and a branch name. The {@code state} contains the full information
* about the branch's tip as the remote BranchManager knows it, and the {@code ancestors} array contains state
* IDs that the remote BranchManager thought this BranchManager already knew, as to allow to communicate deltas
* as small as possible. In the case that this BranchManager was already up to date, or had the parent of the
* new state, and could therefore update immediately, the return value will be the new state's id. Otherwise,
* it will be the latest state's id of which the manager knows it's on the branch; likely either an element of
* the {@code ancestors} array, or {@code 0}.
* </p>
*
* @param engine the id of the offering BranchManager's engine
* @param branch the branch this update belongs to
* @param state the state being the tip of this update
* @param ancestors a list of ancestor state IDs the remote branch manager thought this branch manager might
* already be aware of; most recent first
* @return the most recent state id that this BranchManager knows for the given branch; {@code 0} if the branch
* is unknown; the {@code state}'s id if the full branch is known
*/
public void receiveUpdate(int engine, String branch, Obj state, long[] ancestors, SyncCallback callback) {
MetaState newHead = deserialize(state);
put(newHead);
if(newHead.resolve()) {
//we have all we need
newHead.addEngine(engine);
MetaState[] head = branches.get(branch);
if(head == null) branches.put(branch, head = new MetaState[1]);
MetaState oldHead = head[0];
head[0] = newHead;
if(currentBranch.equals(branch)) this.engine.setHead(head[0].state);
if(oldHead == null) fireBranchCreated(this, branch, newHead.state);
else fireBranchMoved(this, branch, oldHead.state, newHead.state);
} else {
//we need additional states
for(long l:ancestors)
if(states.containsKey(l)) {
callback.receiveUpdateCallback(this.engine.getId(), branch, l);
return;
}
//we know none of the given ancestors
callback.receiveUpdateCallback(this.engine.getId(), branch, 0l);
}
}
/**
* <p>
* Receives the missing states for a previous {@link #receiveUpdate(int, String, Obj, long...) receiveUpdate()}
* call. The BranchManager does not save any transient state between {@code receiveUpdate()} and
* {@code receiveMissing()}, so some information must be added to the parameters again: the source of the
* update; and the branch being updated. To find again the state which was already received, the id of the head
* state of the update must be transmitted again. In addition, a list of states containing the delta between
* the remote and this BranchManager's branch is transmitted.
* </p>
*
* @param engine the id of the offering BranchManager's engine
* @param branch the branch this update belongs to
* @param state the id of the state being the tip of this update
* @param ancestors a list of ancestor states that is missing from the local branch, in chronological order
*/
public void receiveMissing(int engine, String branch, long state, Obj[] ancestors) {
for(Obj obj:ancestors) {
MetaState s = deserialize(obj);
put(s);
if(!s.resolve()) throw new AssertionError();
s.addEngine(engine);
}
MetaState newHead = states.get(state);
if(!newHead.resolve()) throw new AssertionError();
newHead.addEngine(engine);
MetaState[] head = branches.get(branch);
if(head == null) branches.put(branch, head = new MetaState[1]);
MetaState oldHead = head[0];
head[0] = newHead;
if(currentBranch.equals(branch)) this.engine.setHead(head[0].state);
if(oldHead == null) fireBranchCreated(this, branch, newHead.state);
else fireBranchMoved(this, branch, oldHead.state, newHead.state);
}
//send branch sync
/**
* <p>
* Determines which data has to be sent to the {@link BranchManager} identified by {@code engine} to update the
* given branch. If there is anything to update, this method provides this data to the caller through
* {@link SyncCallback#sendUpdateCallback(int, String, Obj, long...) callback.sendUpdateCallback()}.
* </p>
*
* @param engine the engine which should be updated
* @param branch the branch for which updates should be provided
* @param callback a callback to provide the data to the caller
*/
public void sendUpdate(int engine, String branch, SyncCallback callback) {
MetaState[] head = branches.get(branch);
if(head == null || head[0] == null) throw new IllegalArgumentException();
MetaState state;
if(engine == 0) {
state = null;
} else {
state = head[0];
Integer id = engine;
while(state != null && !state.engines.contains(id))
state = state.parent;
if(state == head[0]) return;
head[0].addEngine(engine);
}
long[] ancestors = state == null? new long[0]:new long[] {state.stateId};
callback.sendUpdateCallback(this.engine.getId(), branch, serialize(head[0]), ancestors);
}
/**
* <p>
* Determines which states are missing at the {@link BranchManager} identified by {@code engine} provided the
* known ancestor. If there is anything to update, this method provides this data to the caller through
* {@link SyncCallback#sendMissingCallback(int, String, long, Obj...) callback.sendMissingCallback()}.
* </p>
*
* @param engine the engine which should be updated
* @param branch the branch for which updates should be provided
* @param ancestor the ancestor the remote branch manager reported it knew
* @param callback a callback to provide the data to the caller
*/
public void sendMissing(int engine, String branch, long ancestor, SyncCallback callback) {
MetaState[] head = branches.get(branch);
if(head == null || head[0] == null) throw new IllegalArgumentException();
long headId = head[0].stateId;
if(headId == ancestor) return;
LinkedList<Obj> ancestors = new LinkedList<>();
for(MetaState state = head[0].parent; state.stateId != ancestor; state = state.parent) {
state.addEngine(engine);
ancestors.addFirst(serialize(state));
}
callback.sendMissingCallback(this.engine.getId(), branch, headId,
ancestors.toArray(new Obj[ancestors.size()]));
}
public static interface SyncCallback {
/**
* <p>
* Reports the data needed to call {@link BranchManager#receiveUpdate(int, String, Obj, long...)
* receiveUpdate()} on the receiving BranchManager.
* </p>
*/
public void sendUpdateCallback(int engine, String branch, Obj state, long... ancestors);
/**
* <p>
* Reports the data needed to call {@link BranchManager#sendMissing(int, String, long, SyncCallback)
* sendMissing()} on the sending BranchManager.
* </p>
*/
public void receiveUpdateCallback(int engine, String branch, long ancestor);
/**
* <p>
* Reports the data needed to call {@link BranchManager#receiveMissing(int, String, long, Obj...)
* receiveMissing()} on the receiving BranchManager.
* </p>
*/
public void sendMissingCallback(int engine, String branch, long state, Obj... ancestors);
}
//state mgmt
private MetaState deserialize(Obj state) {
try {
PolybufInput in = new PolybufInput(engine.getConfig());
return (MetaState) in.readObject(state);
} catch(PolybufException | ClassCastException ex) {
throw new IllegalArgumentException(ex);
}
}
private Obj serialize(MetaState state) {
try {
PolybufOutput out = new PolybufOutput(engine.getConfig());
return out.writeObject(state);
} catch(PolybufException ex) {
throw new IllegalArgumentException(ex);
}
}
private void put(MetaState state) {
states.put(state.stateId, state);
}
private MetaState put(State state) {
Long id = state.getId();
MetaState result = states.get(id);
if(result == null) {
states.put(id, result = new MetaState(state));
}
return result;
}
private class MetaState implements PolybufSerializable {
private final long stateId, parentId;
private Obj action;
private MetaState parent;
private State state;
//set of engines known to know this meta state
private final Set<Integer> engines = new HashSet<>();
public MetaState(State state) {
this.state = state;
addEngine(engine.getId());
stateId = state.getId();
if(stateId != 0) {
State parentState = state.getParent();
parentId = parentState.getId();
parent = put(parentState);
} else {
parentId = 0;
}
}
/**
* <p>
* Used to add states received from an engine other than managed by this branch manager.
* {@linkplain #resolve() Resolving} will be necessary before this MetaState can be used.
* </p>
*
* @param state the protobuf serialized form of the state to be added
* @param action the action extracted from that protobuf extension
*/
public MetaState(Obj state) {
StateP p = state.getExtension(State.EXTENSION);
stateId = p.getId();
parentId = p.getParent();
this.action = p.getAction();
}
@Override
public int getTypeId() {
return State.FIELD;
}
/**
* <p>
* Resolves this state. Returns true when the MetaState is now fully initialized, false if it is still not.
* This method must be called for states received from another engine, as the parent state may not be
* present at the time it is received. After all necessary ancestor states were received, then resolving
* will be successful and the state will be added to the underlying engine.
* </p>
*
* @return {@code true} if the MetaState was resolved, so that there is now a corresponding {@link State}
* in the underlying engine; {@code false} otherwise
*/
public boolean resolve() {
if(state != null) return true;
assert stateId != 0;
if(parent == null) parent = states.get(parentId);
if(parent == null || !parent.resolve()) return false;
state = new State(engine, parent.state, stateId, action);
addEngine(engine.getId());
addEngine(state.getEngineId());
return true;
}
public void addEngine(int id) {
engines.add(id);
}
}
//polybuf
public PolybufIO<MetaState> getIO() {
return new IO();
}
public void configure(PolybufConfig config) {
config.add(getIO());
}
private class IO implements PolybufIO<MetaState> {
private final PolybufIO<State> delegate;
public IO() {
delegate = State.getIO(engine);
}
@Override
public int getType() {
return delegate.getType();
}
@Override
public GeneratedExtension<Obj, ?> getExtension() {
return delegate.getExtension();
}
@Override
public void serialize(PolybufOutput out, MetaState object, Obj.Builder obj) throws PolybufException {
delegate.serialize(out, object.state, obj);
}
@Override
public MetaState initialize(PolybufInput in, Obj obj) throws PolybufException {
return new MetaState(obj);
}
@Override
public void deserialize(PolybufInput in, Obj obj, MetaState object) throws PolybufException {}
}
}
|
package dk.aau.cs.qweb.airbase.provenance;
import java.time.LocalDate;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import dk.aau.cs.qweb.airbase.types.Quad;
import dk.aau.cs.qweb.airbase.types.Tuple;
public class ProvenanceIndex {
private static ProvenanceIndex instance = null;
private Map<ProvenanceSignature,String> provenanceMap = new HashMap<ProvenanceSignature,String>();
private Map<String,ProvenanceGraph> provenanceGraphMap = new HashMap<String,ProvenanceGraph>();
private ProvenanceIndex() { }
public static ProvenanceIndex getInstance() {
if(instance == null) {
instance = new ProvenanceIndex();
}
return instance;
}
public String getProvenanceIdentifier(Quad quad, String level, String file, Tuple tuple) {
ProvenanceSignature signature = new ProvenanceSignature(quad,level,file,LocalDate.now(),tuple);
if (provenanceMap.containsKey(signature)) {
return provenanceMap.get(signature);
} else {
String provenanceIdentifier = createProvenanceGraph(signature);
provenanceMap.put(signature, provenanceIdentifier);
return provenanceIdentifier;
}
}
private String createProvenanceGraph(ProvenanceSignature signature) {
ProvenanceGraph provenanceGraph = new ProvenanceGraph(signature);
provenanceGraphMap.put(provenanceGraph.getProvenanceIdentifier(), provenanceGraph);
return provenanceGraph.getProvenanceIdentifier();
}
public Set<Quad> getProvenanceTriples() {
Set<Quad> provenanceQuads = new HashSet<Quad>();
for (ProvenanceGraph provenanceGraph : provenanceGraphMap.values()) {
provenanceQuads.addAll(provenanceGraph.getQuads());
}
return provenanceQuads;
}
public Set<Quad> getProvenanceGraph(String provenanceIdentifier) {
Set<Quad> provenanceQuads = new HashSet<Quad>();
provenanceQuads.addAll(provenanceGraphMap.get(provenanceIdentifier).getQuads());
return provenanceQuads;
}
}
|
package gov.nih.nci.calab.service.submit;
import gov.nih.nci.calab.db.DataAccessProxy;
import gov.nih.nci.calab.db.IDataAccess;
import gov.nih.nci.calab.domain.AssociatedFile;
import gov.nih.nci.calab.domain.DerivedDataFile;
import gov.nih.nci.calab.domain.InstrumentType;
import gov.nih.nci.calab.domain.Keyword;
import gov.nih.nci.calab.domain.LabFile;
import gov.nih.nci.calab.domain.Manufacturer;
import gov.nih.nci.calab.domain.OutputFile;
import gov.nih.nci.calab.domain.Report;
import gov.nih.nci.calab.domain.nano.characterization.Characterization;
import gov.nih.nci.calab.domain.nano.function.Agent;
import gov.nih.nci.calab.domain.nano.function.AgentTarget;
import gov.nih.nci.calab.domain.nano.function.Function;
import gov.nih.nci.calab.domain.nano.function.Linkage;
import gov.nih.nci.calab.domain.nano.particle.Nanoparticle;
import gov.nih.nci.calab.dto.characterization.composition.CompositionBean;
import gov.nih.nci.calab.dto.characterization.invitro.CFU_GMBean;
import gov.nih.nci.calab.dto.characterization.invitro.CYP450Bean;
import gov.nih.nci.calab.dto.characterization.invitro.Caspase3ActivationBean;
import gov.nih.nci.calab.dto.characterization.invitro.CellViabilityBean;
import gov.nih.nci.calab.dto.characterization.invitro.ChemotaxisBean;
import gov.nih.nci.calab.dto.characterization.invitro.CoagulationBean;
import gov.nih.nci.calab.dto.characterization.invitro.ComplementActivationBean;
import gov.nih.nci.calab.dto.characterization.invitro.CytokineInductionBean;
import gov.nih.nci.calab.dto.characterization.invitro.EnzymeInductionBean;
import gov.nih.nci.calab.dto.characterization.invitro.GlucuronidationSulphationBean;
import gov.nih.nci.calab.dto.characterization.invitro.HemolysisBean;
import gov.nih.nci.calab.dto.characterization.invitro.LeukocyteProliferationBean;
import gov.nih.nci.calab.dto.characterization.invitro.NKCellCytotoxicActivityBean;
import gov.nih.nci.calab.dto.characterization.invitro.OxidativeBurstBean;
import gov.nih.nci.calab.dto.characterization.invitro.OxidativeStressBean;
import gov.nih.nci.calab.dto.characterization.invitro.PhagocytosisBean;
import gov.nih.nci.calab.dto.characterization.invitro.PlasmaProteinBindingBean;
import gov.nih.nci.calab.dto.characterization.invitro.PlateAggregationBean;
import gov.nih.nci.calab.dto.characterization.invitro.ROSBean;
import gov.nih.nci.calab.dto.characterization.physical.MolecularWeightBean;
import gov.nih.nci.calab.dto.characterization.physical.MorphologyBean;
import gov.nih.nci.calab.dto.characterization.physical.PurityBean;
import gov.nih.nci.calab.dto.characterization.physical.ShapeBean;
import gov.nih.nci.calab.dto.characterization.physical.SizeBean;
import gov.nih.nci.calab.dto.characterization.physical.SolubilityBean;
import gov.nih.nci.calab.dto.characterization.physical.StabilityBean;
import gov.nih.nci.calab.dto.characterization.physical.SurfaceBean;
import gov.nih.nci.calab.dto.common.LabFileBean;
import gov.nih.nci.calab.dto.function.FunctionBean;
import gov.nih.nci.calab.exception.CalabException;
import gov.nih.nci.calab.service.security.UserService;
import gov.nih.nci.calab.service.util.CalabConstants;
import gov.nih.nci.calab.service.util.CananoConstants;
import gov.nih.nci.calab.service.util.PropertyReader;
import gov.nih.nci.calab.service.util.StringUtils;
import gov.nih.nci.calab.service.util.file.HttpFileUploadSessionData;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import org.apache.log4j.Logger;
import org.apache.struts.upload.FormFile;
/**
* This class includes service calls involved in creating nanoparticles and
* adding functions and characterizations for nanoparticles, as well as
* creating reports.
*
* @author pansu
*
*/
public class SubmitNanoparticleService {
private static Logger logger = Logger
.getLogger(SubmitNanoparticleService.class);
/**
* Update keywords and visibilities for the particle with the given name and
* type
*
* @param particleType
* @param particleName
* @param keywords
* @param visibilities
* @throws Exception
*/
public void createNanoparticle(String particleType, String particleName,
String[] keywords, String[] visibilities) throws Exception {
// save nanoparticle to the database
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
try {
ida.open();
// get the existing particle from database created during sample
// creation
List results = ida.search("from Nanoparticle where name='"
+ particleName + "' and type='" + particleType + "'");
Nanoparticle particle = null;
for (Object obj : results) {
particle = (Nanoparticle) obj;
}
if (particle == null) {
throw new CalabException("No such particle in the database");
}
particle.getKeywordCollection().clear();
if (keywords != null) {
for (String keyword : keywords) {
Keyword keywordObj = new Keyword();
keywordObj.setName(keyword);
particle.getKeywordCollection().add(keywordObj);
}
}
} catch (Exception e) {
ida.rollback();
logger
.error("Problem updating particle with name: "
+ particleName);
throw e;
} finally {
ida.close();
}
// remove existing visiblities for the nanoparticle
UserService userService = new UserService(CalabConstants.CSM_APP_NAME);
List<String> currentVisibilities = userService.getAccessibleGroups(
particleName, CalabConstants.CSM_READ_ROLE);
for (String visiblity : currentVisibilities) {
userService.removeAccessibleGroup(particleName, visiblity,
CalabConstants.CSM_READ_ROLE);
}
// set new visibilities for the nanoparticle
// by default, always set visibility to NCL_PI and NCL_Researcher to
// be true
for (String defaultGroup : CananoConstants.DEFAULT_VISIBLE_GROUPS) {
userService.secureObject(particleName, defaultGroup,
CalabConstants.CSM_READ_ROLE);
}
if (visibilities != null) {
for (String visibility : visibilities) {
userService.secureObject(particleName, visibility,
CalabConstants.CSM_READ_ROLE);
}
}
}
/**
* Save characterization to the database.
*
* @param particleType
* @param particleName
* @param achar
* @throws Exception
*/
private void addParticleCharacterization(String particleType,
String particleName, Characterization achar) throws Exception {
// if ID is not set save to the database otherwise update
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
Nanoparticle particle = null;
int existingViewTitleCount = -1;
try {
ida.open();
/*
* if (achar.getInstrument() != null)
* ida.store(achar.getInstrument());
*/
if (achar.getInstrument() != null) {
Manufacturer manuf = achar.getInstrument().getManufacturer();
String manufacturerQuery = " from Manufacturer manufacturer where manufacturer.name = '"
+ manuf.getName() + "'";
List result = ida.search(manufacturerQuery);
Manufacturer manufacturer = null;
boolean newManufacturer = false;
for (Object obj : result) {
manufacturer = (Manufacturer) obj;
}
if (manufacturer == null) {
newManufacturer = true;
manufacturer = manuf;
ida.store(manufacturer);
}
InstrumentType iType = achar.getInstrument()
.getInstrumentType();
String instrumentTypeQuery = " from InstrumentType instrumentType left join fetch instrumentType.manufacturerCollection where instrumentType.name = '"
+ iType.getName() + "'";
result = ida.search(instrumentTypeQuery);
InstrumentType instrumentType = null;
for (Object obj : result) {
instrumentType = (InstrumentType) obj;
}
if (instrumentType == null) {
instrumentType = iType;
ida.createObject(instrumentType);
HashSet<Manufacturer> manufacturers = new HashSet<Manufacturer>();
manufacturers.add(manufacturer);
instrumentType.setManufacturerCollection(manufacturers);
} else {
if (newManufacturer) {
instrumentType.getManufacturerCollection().add(
manufacturer);
}
}
ida.store(instrumentType);
achar.getInstrument().setInstrumentType(instrumentType);
achar.getInstrument().setManufacturer(manufacturer);
ida.store(achar.getInstrument());
}
if (achar.getCharacterizationProtocol() != null) {
ida.store(achar.getCharacterizationProtocol());
}
// check if viewTitle is already used the same type of
// characterization for the same particle
String viewTitleQuery = "";
if (achar.getId() == null) {
viewTitleQuery = "select count(achar.identificationName) from Nanoparticle particle join particle.characterizationCollection achar where particle.name='"
+ particleName
+ "' and particle.type='"
+ particleType
+ "' and achar.identificationName='"
+ achar.getIdentificationName()
+ "' and achar.name='"
+ achar.getName() + "'";
} else {
viewTitleQuery = "select count(achar.identificationName) from Nanoparticle particle join particle.characterizationCollection achar where particle.name='"
+ particleName
+ "' and particle.type='"
+ particleType
+ "' and achar.identificationName='"
+ achar.getIdentificationName()
+ "' and achar.name='"
+ achar.getName() + "' and achar.id!=" + achar.getId();
}
List viewTitleResult = ida.search(viewTitleQuery);
for (Object obj : viewTitleResult) {
existingViewTitleCount = ((Integer) (obj)).intValue();
}
if (existingViewTitleCount == 0) {
// if ID exists, do update
if (achar.getId() != null) {
ida.store(achar);
} else {// get the existing particle and compositions
// from database
// created
// during sample
// creation
List results = ida
.search("select particle from Nanoparticle particle left join fetch particle.characterizationCollection where particle.name='"
+ particleName
+ "' and particle.type='"
+ particleType + "'");
for (Object obj : results) {
particle = (Nanoparticle) obj;
}
if (particle != null) {
particle.getCharacterizationCollection().add(achar);
}
}
}
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem saving characterization: ");
throw e;
} finally {
ida.close();
}
if (existingViewTitleCount > 0) {
throw new CalabException(
"The view title is already in use. Please enter a different one.");
}
}
/**
* Save Instrument to the database.
*
* @param particleType
* @param particleName
* @param achar
* @throws Exception
*/
/*
* private Instrument addInstrument(Instrument instrument) throws Exception {
* Instrument rInstrument = null; // if ID is not set save to the database
* otherwise update IDataAccess ida = (new DataAccessProxy())
* .getInstance(IDataAccess.HIBERNATE);
*
* //int existingInstrumentCount = -1; Instrument existingInstrument = null;
* try { ida.open(); // check if instrument is already existed String
* viewQuery = ""; if (instrument.getId() == null) { viewQuery = "select
* instrument from Instrument instrument where instrument.type='" +
* instrument.getType() + "' and instrument.manufacturer='" +
* instrument.getManufacturer() + "'"; } else { viewQuery = "select
* instrument from Instrument instrument where instrument.type='" +
* instrument.getType() + "' and instrument.manufacturer='" +
* instrument.getManufacturer() + "' and instrument.id!=" +
* instrument.getId(); } List viewTitleResult = ida.search(viewQuery);
*
* for (Object obj : viewTitleResult) { existingInstrument = (Instrument)
* obj; } if (existingInstrument == null) { ida.store(instrument);
* rInstrument = instrument; } else { rInstrument = existingInstrument; } }
* catch (Exception e) { e.printStackTrace(); ida.rollback();
* logger.error("Problem saving characterization: "); throw e; } finally {
* ida.close(); } return rInstrument; }
*/
/**
* Saves the particle composition to the database
*
* @param particleType
* @param particleName
* @param composition
* @throws Exception
*/
public void addParticleComposition(String particleType,
String particleName, CompositionBean composition) throws Exception {
Characterization doComp = composition.getDomainObj();
addParticleCharacterization(particleType, particleName, doComp);
}
/**
* Saves the size characterization to the database
*
* @param particleType
* @param particleName
* @param size
* @throws Exception
*/
public void addParticleSize(String particleType, String particleName,
SizeBean size) throws Exception {
Characterization doSize = size.getDomainObj();
// TODO think about how to deal with characterization file.
/*
* if (doSize.getInstrument() != null) { Instrument instrument =
* addInstrument(doSize.getInstrument());
* doSize.setInstrument(instrument); }
*/
addParticleCharacterization(particleType, particleName, doSize);
}
/**
* Saves the size characterization to the database
*
* @param particleType
* @param particleName
* @param size
* @throws Exception
*/
public void addParticleSurface(String particleType, String particleName,
SurfaceBean surface) throws Exception {
Characterization doSurface = surface.getDomainObj();
// TODO think about how to deal with characterization file.
/*
* if (doSize.getInstrument() != null) { Instrument instrument =
* addInstrument(doSize.getInstrument());
* doSize.setInstrument(instrument); }
*/
addParticleCharacterization(particleType, particleName, doSurface);
}
/**
* Saves the molecular weight characterization to the database
*
* @param particleType
* @param particleName
* @param molecularWeight
* @throws Exception
*/
public void addParticleMolecularWeight(String particleType,
String particleName, MolecularWeightBean molecularWeight)
throws Exception {
Characterization doMolecularWeight = molecularWeight.getDomainObj();
// TODO think about how to deal with characterization file.
/*
* if (doSize.getInstrument() != null) { Instrument instrument =
* addInstrument(doSize.getInstrument());
* doSize.setInstrument(instrument); }
*/
addParticleCharacterization(particleType, particleName,
doMolecularWeight);
}
/**
* Saves the morphology characterization to the database
*
* @param particleType
* @param particleName
* @param morphology
* @throws Exception
*/
public void addParticleMorphology(String particleType, String particleName,
MorphologyBean morphology) throws Exception {
Characterization doMorphology = morphology.getDomainObj();
// TODO think about how to deal with characterization file.
/*
* if (doSize.getInstrument() != null) { Instrument instrument =
* addInstrument(doSize.getInstrument());
* doSize.setInstrument(instrument); }
*/
addParticleCharacterization(particleType, particleName, doMorphology);
}
/**
* Saves the shape characterization to the database
*
* @param particleType
* @param particleName
* @param shape
* @throws Exception
*/
public void addParticleShape(String particleType, String particleName,
ShapeBean shape) throws Exception {
Characterization doShape = shape.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, doShape);
}
/**
* Saves the stability characterization to the database
*
* @param particleType
* @param particleName
* @param stability
* @throws Exception
*/
public void addParticleStability(String particleType, String particleName,
StabilityBean stability) throws Exception {
Characterization doStability = stability.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, doStability);
}
/**
* Saves the purity characterization to the database
*
* @param particleType
* @param particleName
* @param purity
* @throws Exception
*/
public void addParticlePurity(String particleType, String particleName,
PurityBean purity) throws Exception {
Characterization doPurity = purity.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, doPurity);
}
/**
* Saves the solubility characterization to the database
*
* @param particleType
* @param particleName
* @param solubility
* @throws Exception
*/
public void addParticleSolubility(String particleType, String particleName,
SolubilityBean solubility) throws Exception {
Characterization doSolubility = solubility.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, doSolubility);
}
/**
* Saves the invitro hemolysis characterization to the database
*
* @param particleType
* @param particleName
* @param hemolysis
* @throws Exception
*/
public void addHemolysis(String particleType, String particleName,
HemolysisBean hemolysis) throws Exception {
Characterization doHemolysis = hemolysis.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, doHemolysis);
}
/**
* Saves the invitro coagulation characterization to the database
*
* @param particleType
* @param particleName
* @param coagulation
* @throws Exception
*/
public void addCoagulation(String particleType, String particleName,
CoagulationBean coagulation) throws Exception {
Characterization doCoagulation = coagulation.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, doCoagulation);
}
/**
* Saves the invitro plate aggregation characterization to the database
*
* @param particleType
* @param particleName
* @param plateAggregation
* @throws Exception
*/
public void addPlateAggregation(String particleType, String particleName,
PlateAggregationBean plateAggregation) throws Exception {
Characterization doPlateAggregation = plateAggregation.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName,
doPlateAggregation);
}
/**
* Saves the invitro Complement Activation characterization to the database
*
* @param particleType
* @param particleName
* @param hemolysis
* @throws Exception
*/
public void addComplementActivation(String particleType,
String particleName, ComplementActivationBean complementActivation)
throws Exception {
Characterization doComplementActivation = complementActivation
.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName,
doComplementActivation);
}
/**
* Saves the invitro chemotaxis characterization to the database
*
* @param particleType
* @param particleName
* @param hemolysis
* @throws Exception
*/
public void addChemotaxis(String particleType, String particleName,
ChemotaxisBean chemotaxis) throws Exception {
Characterization doChemotaxis = chemotaxis.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, doChemotaxis);
}
/**
* Saves the invitro NKCellCytotoxicActivity characterization to the
* database
*
* @param particleType
* @param particleName
* @param hemolysis
* @throws Exception
*/
public void addNKCellCytotoxicActivity(String particleType,
String particleName,
NKCellCytotoxicActivityBean nkCellCytotoxicActivity)
throws Exception {
Characterization doNKCellCytotoxicActivity = nkCellCytotoxicActivity
.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName,
doNKCellCytotoxicActivity);
}
/**
* Saves the invitro LeukocyteProliferation characterization to the database
*
* @param particleType
* @param particleName
* @param hemolysis
* @throws Exception
*/
public void addLeukocyteProliferation(String particleType,
String particleName,
LeukocyteProliferationBean leukocyteProliferation) throws Exception {
Characterization doLeukocyteProliferation = leukocyteProliferation
.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName,
doLeukocyteProliferation);
}
/**
* Saves the invitro CFU_GM characterization to the database
*
* @param particleType
* @param particleName
* @param hemolysis
* @throws Exception
*/
public void addCFU_GM(String particleType, String particleName,
CFU_GMBean cfu_gm) throws Exception {
Characterization doCFU_GM = cfu_gm.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, doCFU_GM);
}
/**
* Saves the invitro OxidativeBurst characterization to the database
*
* @param particleType
* @param particleName
* @param hemolysis
* @throws Exception
*/
public void addOxidativeBurst(String particleType, String particleName,
OxidativeBurstBean oxidativeBurst) throws Exception {
Characterization doOxidativeBurst = oxidativeBurst.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName,
doOxidativeBurst);
}
/**
* Saves the invitro Phagocytosis characterization to the database
*
* @param particleType
* @param particleName
* @param hemolysis
* @throws Exception
*/
public void addPhagocytosis(String particleType, String particleName,
PhagocytosisBean phagocytosis) throws Exception {
Characterization doPhagocytosis = phagocytosis.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, doPhagocytosis);
}
/**
* Saves the invitro CytokineInduction characterization to the database
*
* @param particleType
* @param particleName
* @param hemolysis
* @throws Exception
*/
public void addCytokineInduction(String particleType, String particleName,
CytokineInductionBean cytokineInduction) throws Exception {
Characterization doCytokineInduction = cytokineInduction.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName,
doCytokineInduction);
}
/**
* Saves the invitro plasma protein binding characterization to the database
*
* @param particleType
* @param particleName
* @param plasmaProteinBinding
* @throws Exception
*/
public void addProteinBinding(String particleType, String particleName,
PlasmaProteinBindingBean plasmaProteinBinding) throws Exception {
Characterization doProteinBinding = plasmaProteinBinding.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName,
doProteinBinding);
}
/**
* Saves the invitro CellViability binding characterization to the database
*
* @param particleType
* @param particleName
* @param plasmaProteinBinding
* @throws Exception
*/
public void addCellViability(String particleType, String particleName,
CellViabilityBean cellViability) throws Exception {
Characterization doCellViability = cellViability.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, doCellViability);
}
/**
* Saves the invitro EnzymeInduction binding characterization to the
* database
*
* @param particleType
* @param particleName
* @param plasmaProteinBinding
* @throws Exception
*/
public void addEnzymeInduction(String particleType, String particleName,
EnzymeInductionBean enzymeInduction) throws Exception {
Characterization EnzymeInduction = enzymeInduction.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, EnzymeInduction);
}
/**
* Saves the invitro OxidativeStress characterization to the database
*
* @param particleType
* @param particleName
* @param hemolysis
* @throws Exception
*/
public void addOxidativeStress(String particleType, String particleName,
OxidativeStressBean oxidativeStress) throws Exception {
Characterization doOxidativeStress = oxidativeStress.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName,
doOxidativeStress);
}
/**
* Saves the invitro Caspase3Activation characterization to the database
*
* @param particleType
* @param particleName
* @param hemolysis
* @throws Exception
*/
public void addCaspase3Activation(String particleType, String particleName,
Caspase3ActivationBean caspase3Activation) throws Exception {
Characterization doCaspase3Activation = caspase3Activation
.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName,
doCaspase3Activation);
}
/**
* Saves the invitro GlucuronidationSulphation characterization to the
* database
*
* @param particleType
* @param particleName
* @param hemolysis
* @throws Exception
*/
public void addGlucuronidationSulphation(String particleType,
String particleName,
GlucuronidationSulphationBean glucuronidationSulphation)
throws Exception {
Characterization doGlucuronidationSulphation = glucuronidationSulphation
.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName,
doGlucuronidationSulphation);
}
/**
* Saves the invitro CYP450 characterization to the database
*
* @param particleType
* @param particleName
* @param hemolysis
* @throws Exception
*/
public void addCYP450(String particleType, String particleName,
CYP450Bean cyp450) throws Exception {
Characterization doCYP450 = cyp450.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, doCYP450);
}
/**
* Saves the invitro ROS characterization to the database
*
* @param particleType
* @param particleName
* @param hemolysis
* @throws Exception
*/
public void addROS(String particleType, String particleName, ROSBean ros)
throws Exception {
Characterization doROS = ros.getDomainObj();
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, doROS);
}
/**
* Save the characterization file into the database and file system
*
* @param particleName
* @param file
* @param title
* @param description
* @param comments
* @param keywords
* @param visibilities
*/
public LabFileBean saveCharacterizationFile(
String particleName, FormFile file, String title,
String description, String comments, String[] keywords,
String[] visibilities, String path, String fileNumber,
String rootPath) throws Exception {
// TODO saves file to the file system
HttpFileUploadSessionData sData = new HttpFileUploadSessionData();
String tagFileName = sData.getTimeStamp() + "_" + file.getFileName();
String outputFilename = rootPath + path + tagFileName;
FileOutputStream oStream = new FileOutputStream(
new File(outputFilename));
this.saveFile(file.getInputStream(), oStream);
DerivedDataFile dataFile = new DerivedDataFile();
dataFile.setDescription(description);
dataFile.setFilename(file.getFileName());
if (keywords != null && keywords.length > 0) {
for (int i = 0; i < keywords.length; i++) {
Keyword keyword = new Keyword();
keyword.setName(keywords[i]);
dataFile.getKeywordCollection().add(keyword);
}
}
// TODO need to remove the predefine the root path from outputFilename
dataFile.setPath(path + tagFileName);
dataFile.setTitle(title);
// TODO saves file to the database
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
try {
ida.open();
ida.store(dataFile);
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem saving characterization File: ");
throw e;
} finally {
ida.close();
}
LabFileBean fileBean = new LabFileBean(
dataFile);
UserService userService = new UserService(CalabConstants.CSM_APP_NAME);
if (visibilities != null) {
for (String visibility : visibilities) {
// by default, always set visibility to NCL_PI and
// NCL_Researcher to
// be true
// TODO once the files is successfully saved, use fileId instead
// of fileName
for (String defaultGroup : CananoConstants.DEFAULT_VISIBLE_GROUPS) {
userService.secureObject(fileBean.getId(), defaultGroup,
CalabConstants.CSM_READ_ROLE);
}
userService.secureObject(fileBean.getId(), visibility,
CalabConstants.CSM_READ_ROLE);
}
}
return fileBean;
}
/**
* Save the characterization file into the database and file system The file
* is a workflow output file
*
* @param fileId
* @param title
* @param description
* @param keywords
* @param visibilities
*/
public LabFileBean saveCharacterizationFile(String fileId,
String title, String description, String[] keywords,
String[] visibilities) throws Exception {
LabFileBean fileBean = getFile(fileId);
fileBean.setTitle(title);
fileBean.setDescription(description);
DerivedDataFile dataFile = fileBean.getDomainObject();
// Retrieve all existing keywords
Collection<String> words = new ArrayList<String>();
for (Keyword keyword : dataFile.getKeywordCollection()) {
words.add(keyword.getName());
}
// only add the new keyword
if (keywords != null && keywords.length > 0) {
for (int i = 0; i < keywords.length; i++) {
if (!words.contains(keywords[i])) {
Keyword keyword = new Keyword();
keyword.setName(keywords[i]);
dataFile.getKeywordCollection().add(keyword);
}
}
}
// TODO saves file to the database
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
try {
ida.open();
ida.createObject(dataFile);
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem saving characterization File: ");
throw e;
} finally {
ida.close();
}
UserService userService = new UserService(CalabConstants.CSM_APP_NAME);
fileBean = new LabFileBean(dataFile);
if (visibilities != null) {
for (String visibility : visibilities) {
// by default, always set visibility to NCL_PI and
// NCL_Researcher to
// be true
// TODO once the files is successfully saved, use fileId instead
// of fileName
for (String defaultGroup : CananoConstants.DEFAULT_VISIBLE_GROUPS) {
userService.secureObject(fileBean.getId(), defaultGroup,
CalabConstants.CSM_READ_ROLE);
}
userService.secureObject(fileBean.getId(), visibility,
CalabConstants.CSM_READ_ROLE);
}
}
return fileBean;
}
public void saveFile(InputStream is, FileOutputStream os) {
byte[] bytes = new byte[32768];
try {
int numRead = 0;
while ((numRead = is.read(bytes)) > 0) {
os.write(bytes, 0, numRead);
}
os.close();
} catch (Exception e) {
}
}
public void addParticleFunction(String particleType, String particleName,
FunctionBean function) throws Exception {
Function doFunction = function.getDomainObj();
// if ID is not set save to the database otherwise update
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
Nanoparticle particle = null;
int existingViewTitleCount = -1;
try {
ida.open();
if (doFunction.getLinkageCollection() != null) {
for (Linkage linkage : doFunction.getLinkageCollection()) {
Agent agent = linkage.getAgent();
if (agent != null) {
for (AgentTarget agentTarget : agent
.getAgentTargetCollection()) {
ida.store(agentTarget);
}
ida.store(agent);
}
ida.store(linkage);
}
}
// check if viewTitle is already used the same type of
// function for the same particle
String viewTitleQuery = "";
if (function.getId() == null) {
viewTitleQuery = "select count(function.identificationName) from Nanoparticle particle join particle.functionCollection function where particle.name='"
+ particleName
+ "' and particle.type='"
+ particleType
+ "' and function.identificationName='"
+ doFunction.getIdentificationName()
+ "' and function.type='" + doFunction.getType() + "'";
} else {
viewTitleQuery = "select count(function.identificationName) from Nanoparticle particle join particle.functionCollection function where particle.name='"
+ particleName
+ "' and particle.type='"
+ particleType
+ "' and function.identificationName='"
+ doFunction.getIdentificationName()
+ "' and function.id!="
+ function.getId()
+ " and function.type='" + doFunction.getType() + "'";
}
List viewTitleResult = ida.search(viewTitleQuery);
for (Object obj : viewTitleResult) {
existingViewTitleCount = ((Integer) (obj)).intValue();
}
if (existingViewTitleCount == 0) {
// if ID exists, do update
if (doFunction.getId() != null) {
ida.store(doFunction);
} else {// get the existing particle and compositions
// from database
// created
// during sample
// creation
List results = ida
.search("select particle from Nanoparticle particle left join fetch particle.functionCollection where particle.name='"
+ particleName
+ "' and particle.type='"
+ particleType + "'");
for (Object obj : results) {
particle = (Nanoparticle) obj;
}
if (particle != null) {
// ida.store(doFunction);
particle.getFunctionCollection().add(doFunction);
}
}
}
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem saving characterization: ");
throw e;
} finally {
ida.close();
}
if (existingViewTitleCount > 0) {
throw new CalabException(
"The view title is already in use. Please enter a different one.");
}
}
/**
* Load the file for the given fileId from the database
*
* @param fileId
* @return
*/
public LabFileBean getFile(String fileId) throws Exception {
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
LabFileBean fileBean = null;
try {
ida.open();
LabFile charFile = (LabFile) ida.load(LabFile.class, StringUtils
.convertToLong(fileId));
fileBean = new LabFileBean(charFile);
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem getting file with file ID: " + fileId);
throw e;
} finally {
ida.close();
}
return fileBean;
}
/**
* Get the list of all run output files associated with a particle
*
* @param particleName
* @return
* @throws Exception
*/
public List<LabFileBean> getAllRunFiles(String particleName)
throws Exception {
List<LabFileBean> runFiles = new ArrayList<LabFileBean>();
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
try {
ida.open();
String query = "select distinct outFile from Run run join run.outputFileCollection outFile join run.runSampleContainerCollection runContainer where runContainer.sampleContainer.sample.name='"
+ particleName + "'";
List results = ida.search(query);
for (Object obj : results) {
OutputFile file = (OutputFile) obj;
// active status only
if (file.getDataStatus() == null) {
LabFileBean fileBean = new LabFileBean();
fileBean.setId(file.getId().toString());
fileBean.setName(file.getFilename());
fileBean.setPath(file.getPath());
runFiles.add(fileBean);
}
}
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem getting run files for particle: "
+ particleName);
throw e;
} finally {
ida.close();
}
return runFiles;
}
public void createReport(String[] particleNames, String reportType,
FormFile report, String title, String description, String comment,
String[] visibilities) throws Exception {
// TODO saves reportFile to the file system
String rootPath = PropertyReader.getProperty(
CalabConstants.FILEUPLOAD_PROPERTY, "fileRepositoryDir");
if (rootPath.charAt(rootPath.length() - 1) == File.separatorChar)
rootPath = rootPath.substring(0, rootPath.length() - 1);
String path = File.separator + "reports" + File.separator;
File pathDir = new File(rootPath + path);
if (!pathDir.exists())
pathDir.mkdirs();
HttpFileUploadSessionData sData = new HttpFileUploadSessionData();
String tagFileName = sData.getTimeStamp() + "_" + report.getFileName();
String outputFilename = rootPath + path + tagFileName;
FileOutputStream oStream = new FileOutputStream(
new File(outputFilename));
this.saveFile(report.getInputStream(), oStream);
LabFile dataFile = null;
if (reportType.equalsIgnoreCase(CananoConstants.NCL_REPORT))
dataFile = new Report();
else
dataFile = new AssociatedFile();
dataFile.setDescription(description);
dataFile.setFilename(report.getFileName());
dataFile.setPath(path + tagFileName);
dataFile.setTitle(title.toUpperCase()); //convert to upper case
Date date = new Date();
dataFile.setCreatedDate(date);
dataFile.setComments(comment);
// TODO daves reportFile path to the database
// look up the samples for each particleNames
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
try {
ida.open();
ida.store(dataFile);
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem saving report File: ");
throw e;
} finally {
ida.close();
}
Nanoparticle particle = null;
for (String particleName : particleNames) {
try {
ida.open();
List results = ida
.search("select particle from Nanoparticle particle left join fetch particle.reportCollection where particle.name='"
+ particleName + "'");
for (Object obj : results) {
particle = (Nanoparticle) obj;
}
if (particle != null) {
if (reportType.equalsIgnoreCase(CananoConstants.NCL_REPORT))
particle.getReportCollection().add((Report)dataFile);
else
particle.getAssociatedFileCollection().add((AssociatedFile)dataFile);
}
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem saving report File: ");
throw e;
} finally {
ida.close();
}
}
UserService userService = new UserService(CalabConstants.CSM_APP_NAME);
// String fileName = report.getFileName();
for (String visibility : visibilities) {
// by default, always set visibility to NCL_PI and NCL_Researcher to
// be true
// TODO once the files is successfully saved, use fileId instead of
// fileName
userService.secureObject(dataFile.getId().toString(), visibility,
CalabConstants.CSM_READ_ROLE);
}
for (String defaultGroup : CananoConstants.DEFAULT_VISIBLE_GROUPS) {
userService.secureObject(dataFile.getId().toString(), defaultGroup,
CalabConstants.CSM_READ_ROLE);
}
}
}
|
package cat.udl.eps.butterp.main;
import cat.udl.eps.butterp.data.*;
import cat.udl.eps.butterp.data.Integer;
import cat.udl.eps.butterp.environment.Environment;
public class Primitives {
public static void loadPrimitives(Environment env) {
env.bindGlobal(Symbol.NIL, Symbol.NIL);
env.bindGlobal(Symbol.TRUE, Symbol.TRUE);
loadPrimitiveFunctions(env);
loadPrimitiveSpecials(env);
/*
An example of a predefined Function:
env.bindGlobal(new Symbol("function"), new Function() {
@Override
public SExpression apply(SExpression evargs, Environment env) {
throw new UnsupportedOperationException("not implemented yet");
}
});
*/
/*
An example of a predefined Special:
env.bindGlobal(new Symbol("special"), new Special() {
@Override
public SExpression applySpecial(SExpression args, Environment env) {
throw new UnsupportedOperationException("not implemented yet");
}
});
*/
}
/**
* Loads all the primitive functions into the given environment
* @param env The environment that will hold the newly defined functions.
*/
private static void loadPrimitiveFunctions(Environment env) {
loadPrimitiveArithmeticFunctions(env);
loadPrimitiveListFunctions(env);
loadPrimitiveComparisonFunctions(env);
}
/**
* Adds the functions: add and mult
*/
private static void loadPrimitiveArithmeticFunctions(Environment env) {
env.bindGlobal(new Symbol("add"), new Function() {
@Override
public SExpression apply(SExpression evargs, Environment env) {
if (ListOps.isListOf(evargs, Integer.class)) {
int sum = 0;
for (int i = 0; i < ListOps.length(evargs); ++i) {
sum += ((Integer)ListOps.nth(evargs, i)).value;
}
return new Integer(sum);
}
throw new EvaluationError("Invalid add arguments");
}
});
env.bindGlobal(new Symbol("mult"), new Function() {
@Override
public SExpression apply(SExpression evargs, Environment env) {
if (ListOps.isListOf(evargs, Integer.class)) {
int mult = 1;
for (int i = 0; i < ListOps.length(evargs); ++i) {
mult *= ((Integer)ListOps.nth(evargs, i)).value;
}
return new Integer(mult);
}
throw new EvaluationError("Invalid mult arguments");
}
});
}
/**
* Adds the functions: car, cdr, cons and list
*/
private static void loadPrimitiveListFunctions(Environment env) {
env.bindGlobal(new Symbol("car"), new Function() {
@Override
public SExpression apply(SExpression evargs, Environment env) {
int nargs = ListOps.length(evargs);
if (nargs != 1)
throw new EvaluationError(String.format("car: expected 1 arguments, %d given", nargs));
else if (ConsCell.class != ListOps.car(evargs).getClass())
throw new EvaluationError("car: the given argument is not a list");
return ListOps.car(ListOps.car(evargs));
}
});
env.bindGlobal(new Symbol("cdr"), new Function() {
@Override
public SExpression apply(SExpression evargs, Environment env) {
int nargs = ListOps.length(evargs);
if (nargs != 1)
throw new EvaluationError(String.format("car: expected 1 arguments, %d given", nargs));
else if (ConsCell.class != ListOps.car(evargs).getClass())
throw new EvaluationError("car: the given argument is not a list");
return ListOps.cdr(ListOps.car(evargs));
}
});
env.bindGlobal(new Symbol("cons"), new Function() {
@Override
public SExpression apply(SExpression evargs, Environment env) { // TODO: Simplify
int nargs = ListOps.length(evargs);
if (nargs != 2)
throw new EvaluationError(String.format("cons: expected 2 arguments, %d given", nargs));
SExpression firstArg = ListOps.nth(evargs, 0);
SExpression secondArg = ListOps.nth(evargs, 1);
if (Symbol.NIL.equals(secondArg)) {
return ListOps.list(firstArg);
} else if (secondArg instanceof ConsCell) {
return new ConsCell(ListOps.nth(evargs, 0), secondArg);
}
throw new EvaluationError("cons: the second argument must be either nil or a list");
}
});
env.bindGlobal(new Symbol("list"), new Function() {
@Override
public SExpression apply(SExpression evargs, Environment env) {
return evargs;
}
});
}
/**
* Adds the functions: eq
*/
private static void loadPrimitiveComparisonFunctions(Environment env) {
env.bindGlobal(new Symbol("eq"), new Function() {
@Override
public SExpression apply(SExpression evargs, Environment env) {
int nargs = ListOps.length(evargs);
if (nargs != 2)
throw new EvaluationError(String.format("cons: expected 2 arguments, %d given", nargs));
return ListOps.nth(evargs, 0).equals(ListOps.nth(evargs, 1)) ? Symbol.TRUE : Symbol.NIL;
}
});
}
/**
* Loads all the primitive special forms into the given environment
* @param env The environment that will hold the newly defined specials.
*/
private static void loadPrimitiveSpecials(Environment env) {
// The declarations is made in alphabetical order
env.bindGlobal(new Symbol("quote"), new Special() {
@Override
public SExpression applySpecial(SExpression args, Environment env) {
int nargs = ListOps.length(args);
if (nargs != 1)
throw new EvaluationError(String.format("quote: expected 1 argument, received %d", nargs));
return ListOps.car(args);
}
});
}
}
|
package net.fortuna.ical4j.model;
import java.io.Serializable;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.builder.HashCodeBuilder;
import edu.emory.mathcs.backport.java.util.concurrent.CopyOnWriteArrayList;
/**
* $Id$ [Apr 5, 2004]
*
* Defines a list of iCalendar parameters. A parameter list may be specified as unmodifiable at instantiation - useful
* for constant properties that you don't want modified.
* @author Ben Fortuna
*/
public class ParameterList implements Serializable {
private static final long serialVersionUID = -1913059830016450169L;
private List parameters;
/**
* Default constructor. Creates a modifiable parameter list.
*/
public ParameterList() {
this(false);
}
/**
* Constructor.
* @param unmodifiable indicates whether the list should be mutable
*/
public ParameterList(final boolean unmodifiable) {
if (unmodifiable) {
parameters = Collections.unmodifiableList(new ArrayList());
}
else {
parameters = new CopyOnWriteArrayList();
}
}
/**
* Creates a deep copy of the specified parameter list. That is, copies of all parameters in the specified list are
* added to this list.
* @param list a parameter list to copy parameters from
* @param unmodifiable indicates whether the list should be mutable
* @throws URISyntaxException where a parameter in the list specifies an invalid URI value
*/
public ParameterList(final ParameterList list, final boolean unmodifiable)
throws URISyntaxException {
parameters = new CopyOnWriteArrayList();
for (final Iterator i = list.iterator(); i.hasNext();) {
final Parameter parameter = (Parameter) i.next();
parameters.add(parameter.copy());
}
if (unmodifiable) {
parameters = Collections.unmodifiableList(parameters);
}
}
/**
* {@inheritDoc}
*/
public final String toString() {
final StringBuffer buffer = new StringBuffer();
for (final Iterator i = parameters.iterator(); i.hasNext();) {
buffer.append(';');
buffer.append(i.next().toString());
}
return buffer.toString();
}
/**
* Returns the first parameter with the specified name.
* @param aName name of the parameter
* @return the first matching parameter or null if no matching parameters
*/
public final Parameter getParameter(final String aName) {
for (final Iterator i = parameters.iterator(); i.hasNext();) {
final Parameter p = (Parameter) i.next();
if (aName.equalsIgnoreCase(p.getName())) {
return p;
}
}
return null;
}
/**
* Returns a list of parameters with the specified name.
* @param name name of parameters to return
* @return a parameter list
*/
public final ParameterList getParameters(final String name) {
final ParameterList list = new ParameterList();
for (final Iterator i = parameters.iterator(); i.hasNext();) {
final Parameter p = (Parameter) i.next();
if (p.getName().equalsIgnoreCase(name)) {
list.add(p);
}
}
return list;
}
/**
* Add a parameter to the list. Note that this method will not remove existing parameters of the same type. To
* achieve this use {
* @link ParameterList#replace(Parameter) }
* @param parameter the parameter to add
* @return true
* @see List#add(java.lang.Object)
*/
public final boolean add(final Parameter parameter) {
if (parameter == null) {
throw new IllegalArgumentException("Trying to add null Parameter");
}
return parameters.add(parameter);
}
/**
* Replace any parameters of the same type with the one specified.
* @param parameter parameter to add to this list in place of all others with the same name
* @return true if successfully added to this list
*/
public final boolean replace(final Parameter parameter) {
for (final Iterator i = getParameters(parameter.getName()).iterator(); i.hasNext();) {
remove((Parameter) i.next());
}
return add(parameter);
}
/**
* @return boolean indicates if the list is empty
* @see List#isEmpty()
*/
public final boolean isEmpty() {
return parameters.isEmpty();
}
/**
* @return an iterator
* @see List#iterator()
*/
public final Iterator iterator() {
return parameters.iterator();
}
/**
* Remove a parameter from the list.
* @param parameter the parameter to remove
* @return true if the list contained the specified parameter
* @see List#remove(java.lang.Object)
*/
public final boolean remove(final Parameter parameter) {
return parameters.remove(parameter);
}
/**
* Remove all parameters with the specified name.
* @param paramName the name of parameters to remove
*/
public final void removeAll(final String paramName) {
final ParameterList params = getParameters(paramName);
parameters.removeAll(params.parameters);
}
/**
* @return the number of parameters in the list
* @see List#size()
*/
public final int size() {
return parameters.size();
}
/**
* {@inheritDoc}
*/
public final boolean equals(final Object arg0) {
if (arg0 instanceof ParameterList) {
final ParameterList p = (ParameterList) arg0;
return ObjectUtils.equals(parameters, p.parameters);
}
return super.equals(arg0);
}
/**
* {@inheritDoc}
*/
public final int hashCode() {
return new HashCodeBuilder().append(parameters).toHashCode();
}
}
|
package ecse321.fall2014.group3.bomberman.physics;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import com.flowpowered.math.vector.Vector2f;
import ecse321.fall2014.group3.bomberman.Direction;
import ecse321.fall2014.group3.bomberman.Game;
import ecse321.fall2014.group3.bomberman.SubscribableQueue;
import ecse321.fall2014.group3.bomberman.database.Leaderboard.Leader;
import ecse321.fall2014.group3.bomberman.event.EnemyDeathEvent;
import ecse321.fall2014.group3.bomberman.event.Event;
import ecse321.fall2014.group3.bomberman.event.ExitWayOrPowerUPDestroyedEvent;
import ecse321.fall2014.group3.bomberman.event.PlayerLostLifeEvent;
import ecse321.fall2014.group3.bomberman.event.PowerUPCollectedEvent;
import ecse321.fall2014.group3.bomberman.input.Key;
import ecse321.fall2014.group3.bomberman.input.KeyboardState;
import ecse321.fall2014.group3.bomberman.nterface.Interface;
import ecse321.fall2014.group3.bomberman.physics.entity.Entity;
import ecse321.fall2014.group3.bomberman.physics.entity.mob.Player;
import ecse321.fall2014.group3.bomberman.physics.entity.mob.enemy.Balloom;
import ecse321.fall2014.group3.bomberman.physics.entity.mob.enemy.Doll;
import ecse321.fall2014.group3.bomberman.physics.entity.mob.enemy.Enemy;
import ecse321.fall2014.group3.bomberman.physics.entity.mob.enemy.Kondoria;
import ecse321.fall2014.group3.bomberman.physics.entity.mob.enemy.Minvo;
import ecse321.fall2014.group3.bomberman.physics.entity.mob.enemy.Oneal;
import ecse321.fall2014.group3.bomberman.physics.entity.mob.enemy.Ovapi;
import ecse321.fall2014.group3.bomberman.physics.entity.mob.enemy.Pass;
import ecse321.fall2014.group3.bomberman.physics.entity.mob.enemy.Pontan;
import ecse321.fall2014.group3.bomberman.physics.entity.ui.Button;
import ecse321.fall2014.group3.bomberman.physics.entity.ui.Slider;
import ecse321.fall2014.group3.bomberman.physics.entity.ui.TextBox;
import ecse321.fall2014.group3.bomberman.physics.entity.ui.UIBox;
import ecse321.fall2014.group3.bomberman.ticking.TickingElement;
import ecse321.fall2014.group3.bomberman.world.Level;
import ecse321.fall2014.group3.bomberman.world.Map;
import ecse321.fall2014.group3.bomberman.world.World;
import ecse321.fall2014.group3.bomberman.world.tile.Air;
import ecse321.fall2014.group3.bomberman.world.tile.Tile;
import ecse321.fall2014.group3.bomberman.world.tile.powerup.BombPass;
import ecse321.fall2014.group3.bomberman.world.tile.powerup.FlamePass;
import ecse321.fall2014.group3.bomberman.world.tile.powerup.WallPass;
import ecse321.fall2014.group3.bomberman.world.tile.timed.Bomb;
import ecse321.fall2014.group3.bomberman.world.tile.timed.Fire;
import ecse321.fall2014.group3.bomberman.world.tile.wall.Breakable;
public class Physics extends TickingElement {
private static final float PERPENDICULAR_CONTACT_THRESHOLD = 0.05f;
private static final float SLIDING_CONTACT_THRESHOLD = 0.9f;
private static final float OVERLAP_CONTACT_THRESHOLD = 0.5f;
private final Game game;
private final SubscribableQueue<Event> events = new SubscribableQueue<>(false);
private final SweepAndPruneAlgorithm collisionDetection = new SweepAndPruneAlgorithm();
private final Set<Tile> collidableTiles = new HashSet<>();
private final Set<Entity> entities = Collections.newSetFromMap(new ConcurrentHashMap<Entity, Boolean>());
private final Player player = new Player(Vector2f.ZERO);
private final List<Button> buttonOrder = Collections.synchronizedList(new ArrayList<Button>());
private volatile int selectedButtonIndex;
private Level currentLevel;
private long mapVersion = 0;
private TextBox levelStateText;
public Physics(Game game) {
super("Physics", 60);
this.game = game;
}
@Override
public void onStart() {
events.becomePublisher();
game.getWorld().subscribeToEvents();
}
@Override
public void onTick(long dt) {
final Level level = game.getWorld().getLevel();
if (currentLevel != level) {
currentLevel = level;
clearEntities();
if (currentLevel.isMenu()) {
setupMenu();
} else {
setupGame();
}
}
if (currentLevel.isMenu()) {
doMenuTick(dt);
} else {
doGameTick(dt);
}
}
private void clearEntities() {
// Clear collision detection
entities.clear();
collisionDetection.clear();
collidableTiles.clear();
// Clear UI
buttonOrder.clear();
}
private void setupMenu() {
// Add UI entities
final List<UIBox> uiEntities = game.getWorld().getLevel().buildUI(game.getSession().getLevel());
entities.addAll(uiEntities);
for (UIBox uiEntity : uiEntities) {
if (uiEntity instanceof Button) {
buttonOrder.add((Button) uiEntity);
}
}
selectedButtonIndex = 0;
// Add extra entities for leaderboard menu
if (currentLevel == Level.LEADER_BOARD) {
final Leader[] top = game.getLeaderboard().getTop(10);
for (int i = 0; i < top.length && top[i] != null; i++) {
entities.add(new TextBox(new Vector2f(4, Interface.VIEW_HEIGHT_TILE - (6 + i * 0.5f)), Vector2f.ONE, top[i].getFormatted()));
}
}
}
private void doMenuTick(long dt) {
final KeyboardState keyboardState = game.getInput().getKeyboardState();
final int selectedShift = keyboardState.getAndClearPressCount(Key.DOWN) - keyboardState.getAndClearPressCount(Key.UP);
final int sliderShift = keyboardState.getAndClearPressCount(Key.RIGHT) - keyboardState.getAndClearPressCount(Key.LEFT);
final int buttonCount = buttonOrder.size();
final int oldSelected = selectedButtonIndex;
final int newSelected = ((oldSelected + selectedShift) % buttonCount + buttonCount) % buttonCount;
if (buttonCount > 0) {
buttonOrder.get(oldSelected).setSelected(false);
buttonOrder.get(newSelected).setSelected(true);
}
selectedButtonIndex = newSelected;
final Button selectedButton = getSelectedButton();
if (selectedButton instanceof Slider) {
((Slider) selectedButton).add(sliderShift);
}
}
private void setupGame() {
// Add player
entities.add(player);
player.setPosition(new Vector2f(1, 11));
player.clearPowerUPs();
game.getSession().getPowerUPs(player.getPowerUPs());
collisionDetection.add(player);
// Add UI
final World world = game.getWorld();
final String levelString =
currentLevel.isBonus() ? "Bonus level " + -currentLevel.getNumber() : "Level " + currentLevel.getNumber()
+ " | Score " + world.getScore() + " | Timer " + world.getTimer() + "| Lives " + world.getLives();
levelStateText = new TextBox(new Vector2f(Map.WIDTH / 6f, Map.HEIGHT - 1.25f), new Vector2f(2, 2), levelString);
entities.add(levelStateText);
// Add enemies
final List<Vector2f> freePositions = getFreePositions(world.getMap());
Collections.shuffle(freePositions);
// get the number of enemies on the level
int[] enemies = currentLevel.getEnemyForLevel();
int i = 0;
// add balloom
for (int j = 0; j < enemies[0] && i < freePositions.size(); j++, i++) {
Balloom balloom = new Balloom(freePositions.get(i));
entities.add(balloom);
collisionDetection.add(balloom);
}
// add oneal
for (int j = 0; j < enemies[1] && i < freePositions.size(); j++, i++) {
Oneal oneal = new Oneal(freePositions.get(i));
entities.add(oneal);
collisionDetection.add(oneal);
}
// add doll
for (int j = 0; j < enemies[2] && i < freePositions.size(); j++, i++) {
Doll doll = new Doll(freePositions.get(i));
entities.add(doll);
collisionDetection.add(doll);
}
// add minvo
for (int j = 0; j < enemies[3] && i < freePositions.size(); j++, i++) {
Minvo minvo = new Minvo(freePositions.get(i));
entities.add(minvo);
collisionDetection.add(minvo);
}
// add kondoria
for (int j = 0; j < enemies[4] && i < freePositions.size(); j++, i++) {
Kondoria kondoria = new Kondoria(freePositions.get(i));
entities.add(kondoria);
collisionDetection.add(kondoria);
}
// add ovapi
for (int j = 0; j < enemies[5] && i < freePositions.size(); j++, i++) {
Ovapi ovapi = new Ovapi(freePositions.get(i));
entities.add(ovapi);
collisionDetection.add(ovapi);
}
// add pass
for (int j = 0; j < enemies[6] && i < freePositions.size(); j++, i++) {
Pass pass = new Pass(freePositions.get(i));
entities.add(pass);
collisionDetection.add(pass);
}
// add pontan
for (int j = 0; j < enemies[7] && i < freePositions.size(); j++, i++) {
Pontan pontan = new Pontan(freePositions.get(i));
entities.add(pontan);
collisionDetection.add(pontan);
}
}
private void doGameTick(long dt) {
processGameEvents();
final World world = game.getWorld();
final Map map = world.getMap();
final long newVersion = map.getVersion();
if (mapVersion < newVersion) {
for (Tile tile : collidableTiles) {
collisionDetection.remove(tile);
}
collidableTiles.clear();
for (int y = 0; y < Map.HEIGHT; y++) {
for (int x = 0; x < Map.WIDTH; x++) {
final Tile tile = map.getTile(x, y);
if (tile.isCollisionEnabled()) {
collidableTiles.add(tile);
collisionDetection.add(tile);
}
}
}
mapVersion = newVersion;
}
collisionDetection.update();
final float timeSeconds = dt / 1e9f;
// Process player input
final Vector2f inputVector = getInputVector().mul(player.getSpeed() * timeSeconds);
// Compute the motion for the tick
Vector2f movement = inputVector;
for (Collidable collidable : player.getCollisionList()) {
// ghost collidables only report collisions, but don't actually collide
if (collidable.isGhost()) {
continue;
}
// Powerup collision exceptions
if (collidable instanceof Bomb && player.hasPowerUP(BombPass.class)) {
continue;
}
if (collidable instanceof Breakable && player.hasPowerUP(WallPass.class)) {
continue;
}
if (collidable instanceof Fire && player.hasPowerUP(FlamePass.class)) {
continue;
}
// Find the intersection of the collision (a box) and the direction
final Intersection intersection = getIntersection(player, collidable);
final Direction direction = getCollisionDirection(intersection, collidable);
// Allow for a small amount of contact on the sides to prevent the player from getting stuck in adjacent tiles
if (intersection.size.dot(direction.getPerpendicularUnit()) < PERPENDICULAR_CONTACT_THRESHOLD) {
continue;
}
// When the most of the player is intersecting, ignore the collision to prevent him from getting stuck
if (intersection.area / player.getCollisionBox().getArea() >= OVERLAP_CONTACT_THRESHOLD) {
continue;
}
// Block the movement in the direction if sufficient contact
movement = blockDirection(movement, direction.getUnit());
// Attempt to shift the player to the nearest free tile when close to one to ease motion in tight spaces
if (collidable instanceof Tile) {
// Check if the percentage of collision is lower than a threshold, signifying that the player is colliding by a minimum amount
if (intersection.size.dot(direction.getPerpendicularUnit()) / player.getCollisionBox().getSize().dot(direction.getPerpendicularUnit()) < SLIDING_CONTACT_THRESHOLD) {
// Get the direction in which to attempt to shift as a unit
final Vector2f offset = intersection.center.sub(collidable.getPosition());
final Vector2f shiftDirection = direction.getPerpendicularUnit().mul(offset).normalize();
// Check if we can shift, by looking for a path around the tile in the shift direction
final Vector2f adjacentPosition = collidable.getPosition().add(shiftDirection);
if (map.isTile(adjacentPosition, Air.class) && map.isTile(adjacentPosition.sub(direction.getUnit()), Air.class)) {
// Redirect the blocked motion towards the free path
movement = movement.add(shiftDirection.mul(inputVector.dot(direction.getUnit())));
}
}
}
}
// Update player movement
player.setPosition(player.getPosition().add(movement));
player.setVelocity(movement.div(timeSeconds));
// Update enemy positions and remove dead ones
for (Iterator<Entity> iterator = entities.iterator(); iterator.hasNext(); ) {
final Entity entity = iterator.next();
if (entity instanceof Enemy) {
if (entity.isCollidingWith(Fire.class)) {
iterator.remove();
// Adding the enemy score (unique to each enemy) to the total enemy score
events.add(new EnemyDeathEvent((Enemy) entity));
collisionDetection.remove(entity);
}
final Enemy enemy = (Enemy) entity;
final Vector2f currentPosition = enemy.getPosition();
final Vector2f nextPosition = enemy.getAI().nextPosition(enemy, dt, map, player);
enemy.setPosition(nextPosition);
enemy.setVelocity(nextPosition.sub(currentPosition).div(timeSeconds));
}
}
// Update UI
levelStateText.setText(currentLevel.isBonus() ? "Bonus level " + -currentLevel.getNumber() : "Level " + currentLevel.getNumber()
+ " | Score " + world.getScore() + " | Timer " + world.getTimer() + "| Lives " + world.getLives());
}
private void processGameEvents() {
final Queue<Event> worldEvents = game.getWorld().getEvents();
while (!worldEvents.isEmpty()) {
final Event event = worldEvents.poll();
if (event instanceof PlayerLostLifeEvent) {
player.setPosition(new Vector2f(1, 11));
player.onDeath();
} else if (event instanceof PowerUPCollectedEvent) {
player.addPowerUP(((PowerUPCollectedEvent) event).getPowerUP());
} else if (event instanceof ExitWayOrPowerUPDestroyedEvent) {
entities.clear();
entities.add(player);
collisionDetection.clear();
collisionDetection.add(player);
//get highest enemies
int[] enemies = currentLevel.getEnemyForLevel();
int highestEnemy = 0;
for (int i = 0; i < enemies.length; i++) {
if (enemies[i] > 0) {
highestEnemy = i;
}
}
//need to get one higher than highest of level
if (highestEnemy < 7){
highestEnemy++;
}
//get free positions
final World world = game.getWorld();
final List<Vector2f> freePositions = getFreePositions(world.getMap());
Collections.shuffle(freePositions);
int i = 0;
//add 8 of the highest enemy
switch (highestEnemy) {
case 0:
//add ballom
for (int j = 0; j < 8 && i < freePositions.size(); j++, i++) {
Balloom balloom = new Balloom(freePositions.get(i));
entities.add(balloom);
collisionDetection.add(balloom);
}
break;
case 1:
// add oneal
for (int j = 0; j < 8 && i < freePositions.size(); j++, i++) {
Oneal oneal = new Oneal(freePositions.get(i));
entities.add(oneal);
collisionDetection.add(oneal);
}
break;
case 2:
// add doll
for (int j = 0; j < 8 && i < freePositions.size(); j++, i++) {
Doll doll = new Doll(freePositions.get(i));
entities.add(doll);
collisionDetection.add(doll);
}
break;
case 3:
// add minvo
for (int j = 0; j < 8 && i < freePositions.size(); j++, i++) {
Minvo minvo = new Minvo(freePositions.get(i));
entities.add(minvo);
collisionDetection.add(minvo);
}
break;
case 4:
// add kondoria
for (int j = 0; j < 8 && i < freePositions.size(); j++, i++) {
Kondoria kondoria = new Kondoria(freePositions.get(i));
entities.add(kondoria);
collisionDetection.add(kondoria);
}
break;
case 5:
// add ovapi
for (int j = 0; j < 8 && i < freePositions.size(); j++, i++) {
Ovapi ovapi = new Ovapi(freePositions.get(i));
entities.add(ovapi);
collisionDetection.add(ovapi);
}
break;
case 6:
// add pass
for (int j = 0; j < 8 && i < freePositions.size(); j++, i++) {
Pass pass = new Pass(freePositions.get(i));
entities.add(pass);
collisionDetection.add(pass);
}
break;
case 7:
// add pontan
for (int j = 0; j < 8 && i < freePositions.size(); j++, i++) {
Pontan pontan = new Pontan(freePositions.get(i));
entities.add(pontan);
collisionDetection.add(pontan);
}
break;
}
}
}
}
private Vector2f getInputVector() {
final KeyboardState keyboardState = game.getInput().getKeyboardState();
Vector2f input = Vector2f.ZERO;
for (Direction direction : Direction.values()) {
final Key key = direction.getKey();
input = input.add(direction.getUnit().mul(keyboardState.getAndClearPressTime(key) / 1e9f));
}
// Make sure we're not trying to normalize the zero vector
if (input.lengthSquared() > 0) {
input = input.normalize();
}
return input;
}
@Override
public void onStop() {
clearEntities();
events.unsubscribeAll();
mapVersion = 0;
}
public void subscribeToEvents() {
events.subscribe();
}
public Queue<Event> getEvents() {
return events;
}
public Player getPlayer() {
return player;
}
public Set<Entity> getEntities() {
return entities;
}
public Button getSelectedButton() {
if (buttonOrder.size() <= selectedButtonIndex) {
return null;
}
return buttonOrder.get(selectedButtonIndex);
}
private static List<Vector2f> getFreePositions(Map map) {
final List<Vector2f> free = new ArrayList<>();
final List<Air> freeTiles = map.getTiles(Air.class);
for (Air freeTile : freeTiles) {
final Vector2f position = freeTile.getPosition();
// Reject player starting positions
if ((position.getFloorX() != 1 || position.getFloorY() != 11 && position.getFloorY() != 10) && (position.getFloorX() != 2 || position.getFloorY() != 11)) {
free.add(position);
}
}
return free;
}
/**
* Blocks the movement in the desired direction, which is represented as a unit vector.
*
* @param movement The movement as a vector
* @param unitDirection The unit direction to block. Must be a unit to function correctly!
* @return The new movement but with all motion in the given direction removed
*/
private static Vector2f blockDirection(Vector2f movement, Vector2f unitDirection) {
// Check if we have movement in the direction
if (movement.dot(unitDirection) > 0) {
// Get motion in the direction and subtracted from total movement
return movement.sub(movement.mul(unitDirection.abs()));
}
// If we don't have any motion, don't change anything
return movement;
}
/**
* Gets the direction of a collision. This uses the intersection between the two collided objects, which can be obtained with {@link #getIntersection(Collidable, Collidable)}, the object that was
* collided. The direction found points towards the collided object.
*
* @param intersection The intersection from the collision
* @param other The object that was collided
* @return The direction of the collision
*/
private static Direction getCollisionDirection(Intersection intersection, Collidable other) {
final Vector2f offset = other.getPosition().sub(intersection.center);
return Direction.fromUnit(offset);
}
/**
* Gets the collision intersection information for two object that are colliding. If the object aren't colliding, the resulting information is undefined.
*
* @param object The first object of the collision
* @param other The second object of the collision
* @return An intersection object containing the collision information
*/
private static Intersection getIntersection(Collidable object, Collidable other) {
final Vector2f intersectMax = object.getBoxMaxPoint().min(other.getBoxMaxPoint());
final Vector2f intersectMin = object.getBoxMinPoint().max(other.getBoxMinPoint());
return new Intersection(intersectMax, intersectMin);
}
/**
* Represents an intersection between two colliding objects.
*/
private static class Intersection {
/**
* The size of the intersection box as the diagonal vector.
*/
private final Vector2f size;
/**
* The center of the intersection box (halfway up the diagonal).
*/
private final Vector2f center;
/**
* The area of the intersection box
*/
private final float area;
private Intersection(Vector2f max, Vector2f min) {
size = max.sub(min);
center = min.add(size.div(2));
area = size.getX() * size.getY();
}
}
}
|
package org.glassfish.grizzly.bm;
import org.glassfish.grizzly.Grizzly;
import org.glassfish.grizzly.http.server.HttpServer;
import org.glassfish.grizzly.http.server.NetworkListener;
import org.glassfish.grizzly.http.server.RequestExecutorProvider;
import org.glassfish.grizzly.nio.transport.TCPNIOTransport;
/**
* HttpServer
*/
public class Server {
public static final String SERVER_VERSION = "Grizzly/" + Grizzly.getDotedVersion();
// The RequestExecutorProvider, which will run HTTP request processing
// in the same thread
static final RequestExecutorProvider EXECUTOR_PROVIDER =
new RequestExecutorProvider.SameThreadProvider();
public static void main(String[] args) throws Exception {
final int port = args.length > 0
? Integer.parseInt(args[0]) : 8080;
final HttpServer httpServer = new HttpServer();
final NetworkListener networkListener = new NetworkListener(
"http-listener", "0.0.0.0", port);
final TCPNIOTransport transport = networkListener.getTransport();
// force to not initialize worker thread pool
transport.setWorkerThreadPoolConfig(null);
transport.setSelectorRunnersCount(Runtime.getRuntime().availableProcessors() * 2);
// always keep-alive
networkListener.getKeepAlive().setIdleTimeoutInSeconds(-1);
networkListener.getKeepAlive().setMaxRequestsCount(-1);
// disable file-cache
networkListener.getFileCache().setEnabled(false);
httpServer.addListener(networkListener);
httpServer.getServerConfiguration().addHttpHandler(
new RootHttpHandler(), "/");
// httpServer.getServerConfiguration().addHttpHandler(
// new PlainTextHttpHandler(), "/plaintext");
// httpServer.getServerConfiguration().addHttpHandler(
// new JsonHttpHandler(), "/json");
try {
httpServer.start();
System.err.print("Server started.\n");
synchronized (Server.class) {
Server.class.wait();
}
} finally {
httpServer.shutdown();
}
}
}
|
package gov.nih.nci.calab.service.submit;
import gov.nih.nci.calab.db.DataAccessProxy;
import gov.nih.nci.calab.db.HibernateDataAccess;
import gov.nih.nci.calab.db.IDataAccess;
import gov.nih.nci.calab.domain.Instrument;
import gov.nih.nci.calab.domain.InstrumentConfiguration;
import gov.nih.nci.calab.domain.Keyword;
import gov.nih.nci.calab.domain.LabFile;
import gov.nih.nci.calab.domain.OutputFile;
import gov.nih.nci.calab.domain.nano.characterization.Characterization;
import gov.nih.nci.calab.domain.nano.characterization.CharacterizationFileType;
import gov.nih.nci.calab.domain.nano.characterization.DerivedBioAssayData;
import gov.nih.nci.calab.domain.nano.characterization.invitro.CFU_GM;
import gov.nih.nci.calab.domain.nano.characterization.invitro.Caspase3Activation;
import gov.nih.nci.calab.domain.nano.characterization.invitro.CellViability;
import gov.nih.nci.calab.domain.nano.characterization.invitro.Chemotaxis;
import gov.nih.nci.calab.domain.nano.characterization.invitro.Coagulation;
import gov.nih.nci.calab.domain.nano.characterization.invitro.ComplementActivation;
import gov.nih.nci.calab.domain.nano.characterization.invitro.CytokineInduction;
import gov.nih.nci.calab.domain.nano.characterization.invitro.EnzymeInduction;
import gov.nih.nci.calab.domain.nano.characterization.invitro.Hemolysis;
import gov.nih.nci.calab.domain.nano.characterization.invitro.LeukocyteProliferation;
import gov.nih.nci.calab.domain.nano.characterization.invitro.NKCellCytotoxicActivity;
import gov.nih.nci.calab.domain.nano.characterization.invitro.OxidativeBurst;
import gov.nih.nci.calab.domain.nano.characterization.invitro.OxidativeStress;
import gov.nih.nci.calab.domain.nano.characterization.invitro.Phagocytosis;
import gov.nih.nci.calab.domain.nano.characterization.invitro.PlasmaProteinBinding;
import gov.nih.nci.calab.domain.nano.characterization.invitro.PlateletAggregation;
import gov.nih.nci.calab.domain.nano.characterization.physical.MolecularWeight;
import gov.nih.nci.calab.domain.nano.characterization.physical.Morphology;
import gov.nih.nci.calab.domain.nano.characterization.physical.Purity;
import gov.nih.nci.calab.domain.nano.characterization.physical.Shape;
import gov.nih.nci.calab.domain.nano.characterization.physical.Size;
import gov.nih.nci.calab.domain.nano.characterization.physical.Solubility;
import gov.nih.nci.calab.domain.nano.characterization.physical.Surface;
import gov.nih.nci.calab.domain.nano.characterization.physical.composition.ParticleComposition;
import gov.nih.nci.calab.domain.nano.function.Agent;
import gov.nih.nci.calab.domain.nano.function.AgentTarget;
import gov.nih.nci.calab.domain.nano.function.Function;
import gov.nih.nci.calab.domain.nano.function.Linkage;
import gov.nih.nci.calab.domain.nano.particle.Nanoparticle;
import gov.nih.nci.calab.dto.characterization.CharacterizationBean;
import gov.nih.nci.calab.dto.characterization.DerivedBioAssayDataBean;
import gov.nih.nci.calab.dto.characterization.composition.CompositionBean;
import gov.nih.nci.calab.dto.characterization.invitro.CytotoxicityBean;
import gov.nih.nci.calab.dto.characterization.physical.MorphologyBean;
import gov.nih.nci.calab.dto.characterization.physical.ShapeBean;
import gov.nih.nci.calab.dto.characterization.physical.SolubilityBean;
import gov.nih.nci.calab.dto.characterization.physical.SurfaceBean;
import gov.nih.nci.calab.dto.common.LabFileBean;
import gov.nih.nci.calab.dto.function.FunctionBean;
import gov.nih.nci.calab.exception.CalabException;
import gov.nih.nci.calab.service.common.FileService;
import gov.nih.nci.calab.service.security.UserService;
import gov.nih.nci.calab.service.util.CaNanoLabConstants;
import gov.nih.nci.calab.service.util.StringUtils;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.apache.struts.upload.FormFile;
/**
* This class includes service calls involved in creating nanoparticle general
* info and adding functions and characterizations for nanoparticles, as well as
* creating reports.
*
* @author pansu
*
*/
public class SubmitNanoparticleService {
private static Logger logger = Logger
.getLogger(SubmitNanoparticleService.class);
// remove existing visibilities for the data
private UserService userService;
public SubmitNanoparticleService() throws Exception {
userService = new UserService(CaNanoLabConstants.CSM_APP_NAME);
}
/**
* Update keywords and visibilities for the particle with the given name and
* type
*
* @param particleType
* @param particleName
* @param keywords
* @param visibilities
* @throws Exception
*/
public void addParticleGeneralInfo(String particleType,
String particleName, String[] keywords, String[] visibilities)
throws Exception {
// save nanoparticle to the database
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
try {
ida.open();
// get the existing particle from database created during sample
// creation
List results = ida.search("from Nanoparticle where name='"
+ particleName + "' and type='" + particleType + "'");
Nanoparticle particle = null;
for (Object obj : results) {
particle = (Nanoparticle) obj;
}
if (particle == null) {
throw new CalabException("No such particle in the database");
}
particle.getKeywordCollection().clear();
if (keywords != null) {
for (String keyword : keywords) {
Keyword keywordObj = new Keyword();
keywordObj.setName(keyword);
particle.getKeywordCollection().add(keywordObj);
}
}
} catch (Exception e) {
ida.rollback();
logger
.error("Problem updating particle with name: "
+ particleName);
throw e;
} finally {
ida.close();
}
userService.setVisiblity(particleName, visibilities);
}
/**
* Save characterization to the database.
*
* @param particleType
* @param particleName
* @param achar
* @throws Exception
*/
private void addParticleCharacterization(String particleType,
String particleName, Characterization achar,
CharacterizationBean charBean) throws Exception {
// if ID is not set save to the database otherwise update
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
Nanoparticle particle = null;
int existingViewTitleCount = -1;
try {
ida.open();
// check if viewTitle is already used the same type of
// characterization for the same particle
boolean viewTitleUsed = isCharacterizationViewTitleUsed(ida,
particleType, particleName, achar);
if (!viewTitleUsed) {
if (achar.getInstrumentConfiguration() != null) {
addInstrumentConfig(achar.getInstrumentConfiguration(), ida);
}
// if ID exists, do update
if (achar.getId() != null) {
// check if ID is still valid
try {
Characterization storedChara = (Characterization) ida
.load(Characterization.class, achar.getId());
} catch (Exception e) {
throw new Exception(
"This characterization is no longer in the database. Please log in again to refresh.");
}
ida.store(achar);
} else {// get the existing particle and characterizations
// from database created during sample creation
List results = ida
.search("select particle from Nanoparticle particle left join fetch particle.characterizationCollection where particle.name='"
+ particleName
+ "' and particle.type='"
+ particleType + "'");
for (Object obj : results) {
particle = (Nanoparticle) obj;
}
if (particle != null) {
particle.getCharacterizationCollection().add(achar);
}
}
}
if (existingViewTitleCount > 0) {
throw new Exception(
"The view title is already in use. Please enter a different one.");
}
// add new characterization file type if necessary
if (!charBean.getDerivedBioAssayDataList().isEmpty()) {
for (DerivedBioAssayDataBean derivedBioAssayDataBean : charBean
.getDerivedBioAssayDataList()) {
if (derivedBioAssayDataBean.getType().length() > 0)
addCharacterizationFileType(ida,
derivedBioAssayDataBean.getType());
}
}
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem saving characterization: ");
throw e;
} finally {
ida.close();
}
// save file to the file system
// if this block of code is inside the db try catch block, hibernate
// doesn't
// persist derivedBioAssayData
if (!charBean.getDerivedBioAssayDataList().isEmpty()) {
for (DerivedBioAssayDataBean derivedBioAssayDataBean : charBean
.getDerivedBioAssayDataList()) {
saveCharacterizationFile(derivedBioAssayDataBean);
}
}
}
private void addCharacterizationFileType(IDataAccess ida, String type)
throws Exception {
List results = ida
.search("select count(distinct fileType.name) from CharacterizationFileType fileType where fileType.name='"
+ type + "'");
CharacterizationFileType fileType = new CharacterizationFileType();
fileType.setName(type);
int count = -1;
for (Object obj : results) {
count = ((Integer) (obj)).intValue();
}
if (count == 0) {
ida.createObject(fileType);
}
}
/*
* check if viewTitle is already used the same type of characterization for
* the same particle
*/
private boolean isCharacterizationViewTitleUsed(IDataAccess ida,
String particleType, String particleName, Characterization achar)
throws Exception {
String viewTitleQuery = "";
if (achar.getId() == null) {
viewTitleQuery = "select count(achar.identificationName) from Nanoparticle particle join particle.characterizationCollection achar where particle.name='"
+ particleName
+ "' and particle.type='"
+ particleType
+ "' and achar.identificationName='"
+ achar.getIdentificationName()
+ "' and achar.name='"
+ achar.getName() + "'";
} else {
viewTitleQuery = "select count(achar.identificationName) from Nanoparticle particle join particle.characterizationCollection achar where particle.name='"
+ particleName
+ "' and particle.type='"
+ particleType
+ "' and achar.identificationName='"
+ achar.getIdentificationName()
+ "' and achar.name='"
+ achar.getName() + "' and achar.id!=" + achar.getId();
}
List viewTitleResult = ida.search(viewTitleQuery);
int existingViewTitleCount = -1;
for (Object obj : viewTitleResult) {
existingViewTitleCount = ((Integer) (obj)).intValue();
}
if (existingViewTitleCount > 0) {
return true;
} else {
return false;
}
}
/**
* Save the file to the file system if newly uploaded
*
* @param fileBean
*/
public void saveCharacterizationFile(DerivedBioAssayDataBean fileBean)
throws Exception {
FormFile uploadedFile = fileBean.getUploadedFile();
if (uploadedFile != null) {
FileService fileService = new FileService();
String fileName = fileService.writeUploadedFile(uploadedFile,
fileBean.getFullPath(), true);
}
userService.setVisiblity(fileBean.getId(), fileBean
.getVisibilityGroups());
}
private void addInstrumentConfig(InstrumentConfiguration instrumentConfig,
IDataAccess ida) throws Exception {
Instrument instrument = instrumentConfig.getInstrument();
// check if instrument is already in database
List instrumentResults = ida
.search("select instrument from Instrument instrument where instrument.type='"
+ instrument.getType()
+ "' and instrument.manufacturer='"
+ instrument.getManufacturer() + "'");
Instrument storedInstrument = null;
for (Object obj : instrumentResults) {
storedInstrument = (Instrument) obj;
}
if (storedInstrument != null) {
instrument.setId(storedInstrument.getId());
} else {
ida.createObject(instrument);
}
// if new instrumentConfig, save it
if (instrumentConfig.getId() == null) {
ida.createObject(instrumentConfig);
} else {
InstrumentConfiguration storedInstrumentConfig = (InstrumentConfiguration) ida
.load(InstrumentConfiguration.class, instrumentConfig
.getId());
storedInstrumentConfig.setDescription(instrumentConfig
.getDescription());
storedInstrumentConfig.setInstrument(instrument);
}
}
/**
* Saves the particle composition to the database
*
* @param particleType
* @param particleName
* @param composition
* @throws Exception
*/
public void addParticleComposition(String particleType,
String particleName, CompositionBean composition) throws Exception {
ParticleComposition doComp = composition.getDomainObj();
addParticleCharacterization(particleType, particleName, doComp,
composition);
}
/**
* O Saves the size characterization to the database
*
* @param particleType
* @param particleName
* @param size
* @throws Exception
*/
public void addParticleSize(String particleType, String particleName,
CharacterizationBean size) throws Exception {
Size doSize = new Size();
size.updateDomainObj(doSize);
addParticleCharacterization(particleType, particleName, doSize, size);
}
/**
* Saves the size characterization to the database
*
* @param particleType
* @param particleName
* @param surface
* @throws Exception
*/
public void addParticleSurface(String particleType, String particleName,
SurfaceBean surface) throws Exception {
Surface doSurface = new Surface();
surface.updateDomainObj(doSurface);
addParticleCharacterization(particleType, particleName, doSurface,
surface);
}
/**
* Saves the molecular weight characterization to the database
*
* @param particleType
* @param particleName
* @param molecularWeight
* @throws Exception
*/
public void addParticleMolecularWeight(String particleType,
String particleName, CharacterizationBean molecularWeight)
throws Exception {
MolecularWeight doMolecularWeight = new MolecularWeight();
molecularWeight.updateDomainObj(doMolecularWeight);
addParticleCharacterization(particleType, particleName,
doMolecularWeight, molecularWeight);
}
/**
* Saves the morphology characterization to the database
*
* @param particleType
* @param particleName
* @param morphology
* @throws Exception
*/
public void addParticleMorphology(String particleType, String particleName,
MorphologyBean morphology) throws Exception {
Morphology doMorphology = new Morphology();
morphology.updateDomainObj(doMorphology);
addParticleCharacterization(particleType, particleName, doMorphology,
morphology);
}
/**
* Saves the shape characterization to the database
*
* @param particleType
* @param particleName
* @param shape
* @throws Exception
*/
public void addParticleShape(String particleType, String particleName,
ShapeBean shape) throws Exception {
Shape doShape = new Shape();
shape.updateDomainObj(doShape);
addParticleCharacterization(particleType, particleName, doShape, shape);
}
/**
* Saves the purity characterization to the database
*
* @param particleType
* @param particleName
* @param purity
* @throws Exception
*/
public void addParticlePurity(String particleType, String particleName,
CharacterizationBean purity) throws Exception {
Purity doPurity = new Purity();
purity.updateDomainObj(doPurity);
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, doPurity,
purity);
}
/**
* Saves the solubility characterization to the database
*
* @param particleType
* @param particleName
* @param solubility
* @throws Exception
*/
public void addParticleSolubility(String particleType, String particleName,
SolubilityBean solubility) throws Exception {
Solubility doSolubility = new Solubility();
solubility.updateDomainObj(doSolubility);
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, doSolubility,
solubility);
}
/**
* Saves the invitro hemolysis characterization to the database
*
* @param particleType
* @param particleName
* @param hemolysis
* @throws Exception
*/
public void addHemolysis(String particleType, String particleName,
CharacterizationBean hemolysis) throws Exception {
Hemolysis doHemolysis = new Hemolysis();
hemolysis.updateDomainObj(doHemolysis);
// TODO think about how to deal with characterization file.
addParticleCharacterization(particleType, particleName, doHemolysis,
hemolysis);
}
/**
* Saves the invitro coagulation characterization to the database
*
* @param particleType
* @param particleName
* @param coagulation
* @throws Exception
*/
public void addCoagulation(String particleType, String particleName,
CharacterizationBean coagulation) throws Exception {
Coagulation doCoagulation = new Coagulation();
coagulation.updateDomainObj(doCoagulation);
addParticleCharacterization(particleType, particleName, doCoagulation,
coagulation);
}
/**
* Saves the invitro plate aggregation characterization to the database
*
* @param particleType
* @param particleName
* @param plateletAggregation
* @throws Exception
*/
public void addPlateletAggregation(String particleType,
String particleName, CharacterizationBean plateletAggregation)
throws Exception {
PlateletAggregation doPlateletAggregation = new PlateletAggregation();
plateletAggregation.updateDomainObj(doPlateletAggregation);
addParticleCharacterization(particleType, particleName,
doPlateletAggregation, plateletAggregation);
}
/**
* Saves the invitro Complement Activation characterization to the database
*
* @param particleType
* @param particleName
* @param complementActivation
* @throws Exception
*/
public void addComplementActivation(String particleType,
String particleName, CharacterizationBean complementActivation)
throws Exception {
ComplementActivation doComplementActivation = new ComplementActivation();
complementActivation.updateDomainObj(doComplementActivation);
addParticleCharacterization(particleType, particleName,
doComplementActivation, complementActivation);
}
/**
* Saves the invitro chemotaxis characterization to the database
*
* @param particleType
* @param particleName
* @param chemotaxis
* @throws Exception
*/
public void addChemotaxis(String particleType, String particleName,
CharacterizationBean chemotaxis) throws Exception {
Chemotaxis doChemotaxis = new Chemotaxis();
chemotaxis.updateDomainObj(doChemotaxis);
addParticleCharacterization(particleType, particleName, doChemotaxis,
chemotaxis);
}
/**
* Saves the invitro NKCellCytotoxicActivity characterization to the
* database
*
* @param particleType
* @param particleName
* @param nkCellCytotoxicActivity
* @throws Exception
*/
public void addNKCellCytotoxicActivity(String particleType,
String particleName, CharacterizationBean nkCellCytotoxicActivity)
throws Exception {
NKCellCytotoxicActivity doNKCellCytotoxicActivity = new NKCellCytotoxicActivity();
nkCellCytotoxicActivity.updateDomainObj(doNKCellCytotoxicActivity);
addParticleCharacterization(particleType, particleName,
doNKCellCytotoxicActivity, nkCellCytotoxicActivity);
}
/**
* Saves the invitro LeukocyteProliferation characterization to the database
*
* @param particleType
* @param particleName
* @param leukocyteProliferation
* @throws Exception
*/
public void addLeukocyteProliferation(String particleType,
String particleName, CharacterizationBean leukocyteProliferation)
throws Exception {
LeukocyteProliferation doLeukocyteProliferation = new LeukocyteProliferation();
leukocyteProliferation.updateDomainObj(doLeukocyteProliferation);
addParticleCharacterization(particleType, particleName,
doLeukocyteProliferation, leukocyteProliferation);
}
/**
* Saves the invitro CFU_GM characterization to the database
*
* @param particleType
* @param particleName
* @param cfu_gm
* @throws Exception
*/
public void addCFU_GM(String particleType, String particleName,
CharacterizationBean cfu_gm) throws Exception {
CFU_GM doCFU_GM = new CFU_GM();
cfu_gm.updateDomainObj(doCFU_GM);
addParticleCharacterization(particleType, particleName, doCFU_GM,
cfu_gm);
}
/**
* Saves the invitro OxidativeBurst characterization to the database
*
* @param particleType
* @param particleName
* @param oxidativeBurst
* @throws Exception
*/
public void addOxidativeBurst(String particleType, String particleName,
CharacterizationBean oxidativeBurst) throws Exception {
OxidativeBurst doOxidativeBurst = new OxidativeBurst();
oxidativeBurst.updateDomainObj(doOxidativeBurst);
addParticleCharacterization(particleType, particleName,
doOxidativeBurst, oxidativeBurst);
}
/**
* Saves the invitro Phagocytosis characterization to the database
*
* @param particleType
* @param particleName
* @param phagocytosis
* @throws Exception
*/
public void addPhagocytosis(String particleType, String particleName,
CharacterizationBean phagocytosis) throws Exception {
Phagocytosis doPhagocytosis = new Phagocytosis();
phagocytosis.updateDomainObj(doPhagocytosis);
addParticleCharacterization(particleType, particleName, doPhagocytosis,
phagocytosis);
}
/**
* Saves the invitro CytokineInduction characterization to the database
*
* @param particleType
* @param particleName
* @param cytokineInduction
* @throws Exception
*/
public void addCytokineInduction(String particleType, String particleName,
CharacterizationBean cytokineInduction) throws Exception {
CytokineInduction doCytokineInduction = new CytokineInduction();
cytokineInduction.updateDomainObj(doCytokineInduction);
addParticleCharacterization(particleType, particleName,
doCytokineInduction, cytokineInduction);
}
/**
* Saves the invitro plasma protein binding characterization to the database
*
* @param particleType
* @param particleName
* @param plasmaProteinBinding
* @throws Exception
*/
public void addProteinBinding(String particleType, String particleName,
CharacterizationBean plasmaProteinBinding) throws Exception {
PlasmaProteinBinding doProteinBinding = new PlasmaProteinBinding();
plasmaProteinBinding.updateDomainObj(doProteinBinding);
addParticleCharacterization(particleType, particleName,
doProteinBinding, plasmaProteinBinding);
}
/**
* Saves the invitro CellViability binding characterization to the database
*
* @param particleType
* @param particleName
* @param cellViability
* @throws Exception
*/
public void addCellViability(String particleType, String particleName,
CytotoxicityBean cellViability) throws Exception {
CellViability doCellViability = new CellViability();
cellViability.updateDomainObj(doCellViability);
addParticleCharacterization(particleType, particleName,
doCellViability, cellViability);
}
/**
* Saves the invitro EnzymeInduction binding characterization to the
* database
*
* @param particleType
* @param particleName
* @param enzymeInduction
* @throws Exception
*/
public void addEnzymeInduction(String particleType, String particleName,
CharacterizationBean enzymeInduction) throws Exception {
EnzymeInduction doEnzymeInduction = new EnzymeInduction();
enzymeInduction.updateDomainObj(doEnzymeInduction);
addParticleCharacterization(particleType, particleName,
doEnzymeInduction, enzymeInduction);
}
/**
* Saves the invitro OxidativeStress characterization to the database
*
* @param particleType
* @param particleName
* @param oxidativeStress
* @throws Exception
*/
public void addOxidativeStress(String particleType, String particleName,
CharacterizationBean oxidativeStress) throws Exception {
OxidativeStress doOxidativeStress = new OxidativeStress();
oxidativeStress.updateDomainObj(doOxidativeStress);
addParticleCharacterization(particleType, particleName,
doOxidativeStress, oxidativeStress);
}
/**
* Saves the invitro Caspase3Activation characterization to the database
*
* @param particleType
* @param particleName
* @param caspase3Activation
* @throws Exception
*/
public void addCaspase3Activation(String particleType, String particleName,
CytotoxicityBean caspase3Activation) throws Exception {
Caspase3Activation doCaspase3Activation = new Caspase3Activation();
caspase3Activation.updateDomainObj(doCaspase3Activation);
addParticleCharacterization(particleType, particleName,
doCaspase3Activation, caspase3Activation);
}
public void setCharacterizationFile(String particleName,
String characterizationName, LabFileBean fileBean) {
}
public void addParticleFunction(String particleType, String particleName,
FunctionBean function) throws Exception {
Function doFunction = function.getDomainObj();
// if ID is not set save to the database otherwise update
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
Nanoparticle particle = null;
int existingViewTitleCount = -1;
try {
// Have to seperate this section out in a different hibernate
// session.
// check linkage id object type
ida.open();
if (doFunction.getId() != null
&& doFunction.getLinkageCollection() != null) {
for (Linkage linkage : doFunction.getLinkageCollection()) {
// check linkage id object type
if (linkage.getId() != null) {
List result = ida
.search("from Linkage linkage where linkage.id = "
+ linkage.getId());
if (result != null && result.size() > 0) {
Linkage existingObj = (Linkage) result.get(0);
// the the type is different,
if (existingObj.getClass() != linkage.getClass()) {
linkage.setId(null);
ida.removeObject(existingObj);
}
}
}
}
}
ida.close();
ida.open();
if (doFunction.getLinkageCollection() != null) {
for (Linkage linkage : doFunction.getLinkageCollection()) {
Agent agent = linkage.getAgent();
if (agent != null) {
for (AgentTarget agentTarget : agent
.getAgentTargetCollection()) {
ida.store(agentTarget);
}
ida.store(agent);
}
ida.store(linkage);
}
}
boolean viewTitleUsed = isFunctionViewTitleUsed(ida, particleType,
particleName, doFunction);
if (!viewTitleUsed) {
if (doFunction.getId() != null) {
ida.store(doFunction);
} else {// get the existing particle and compositions
// from database created during sample creation
List results = ida
.search("select particle from Nanoparticle particle left join fetch particle.functionCollection where particle.name='"
+ particleName
+ "' and particle.type='"
+ particleType + "'");
for (Object obj : results) {
particle = (Nanoparticle) obj;
}
if (particle != null) {
particle.getFunctionCollection().add(doFunction);
}
}
}
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem saving characterization: ");
throw e;
} finally {
ida.close();
}
if (existingViewTitleCount > 0) {
throw new CalabException(
"The view title is already in use. Please enter a different one.");
}
}
/*
* check if viewTitle is already used the same type of function for the same
* particle
*/
private boolean isFunctionViewTitleUsed(IDataAccess ida,
String particleType, String particleName, Function function)
throws Exception {
// check if viewTitle is already used the same type of
// function for the same particle
String viewTitleQuery = "";
if (function.getId() == null) {
viewTitleQuery = "select count(function.identificationName) from Nanoparticle particle join particle.functionCollection function where particle.name='"
+ particleName
+ "' and particle.type='"
+ particleType
+ "' and function.identificationName='"
+ function.getIdentificationName()
+ "' and function.type='" + function.getType() + "'";
} else {
viewTitleQuery = "select count(function.identificationName) from Nanoparticle particle join particle.functionCollection function where particle.name='"
+ particleName
+ "' and particle.type='"
+ particleType
+ "' and function.identificationName='"
+ function.getIdentificationName()
+ "' and function.id!="
+ function.getId()
+ " and function.type='"
+ function.getType() + "'";
}
List viewTitleResult = ida.search(viewTitleQuery);
int existingViewTitleCount = -1;
for (Object obj : viewTitleResult) {
existingViewTitleCount = ((Integer) (obj)).intValue();
}
if (existingViewTitleCount > 0) {
return true;
} else {
return false;
}
}
/**
* Load the file for the given fileId from the database
*
* @param fileId
* @return
*/
public LabFileBean getFile(String fileId) throws Exception {
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
LabFileBean fileBean = null;
try {
ida.open();
LabFile file = (LabFile) ida.load(LabFile.class, StringUtils
.convertToLong(fileId));
fileBean = new LabFileBean(file);
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem getting file with file ID: " + fileId);
throw e;
} finally {
ida.close();
}
// get visibilities
UserService userService = new UserService(
CaNanoLabConstants.CSM_APP_NAME);
List<String> accessibleGroups = userService.getAccessibleGroups(
fileBean.getId(), CaNanoLabConstants.CSM_READ_ROLE);
String[] visibilityGroups = accessibleGroups.toArray(new String[0]);
fileBean.setVisibilityGroups(visibilityGroups);
return fileBean;
}
/**
* Load the derived data file for the given fileId from the database
*
* @param fileId
* @return
*/
public DerivedBioAssayDataBean getDerivedBioAssayData(String fileId)
throws Exception {
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
DerivedBioAssayDataBean fileBean = null;
try {
ida.open();
DerivedBioAssayData file = (DerivedBioAssayData) ida.load(
DerivedBioAssayData.class, StringUtils
.convertToLong(fileId));
// load keywords
file.getKeywordCollection();
fileBean = new DerivedBioAssayDataBean(file,
CaNanoLabConstants.OUTPUT);
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem getting file with file ID: " + fileId);
throw e;
} finally {
ida.close();
}
// get visibilities
UserService userService = new UserService(
CaNanoLabConstants.CSM_APP_NAME);
List<String> accessibleGroups = userService.getAccessibleGroups(
fileBean.getId(), CaNanoLabConstants.CSM_READ_ROLE);
String[] visibilityGroups = accessibleGroups.toArray(new String[0]);
fileBean.setVisibilityGroups(visibilityGroups);
return fileBean;
}
/**
* Get the list of all run output files associated with a particle
*
* @param particleName
* @return
* @throws Exception
*/
public List<LabFileBean> getAllRunFiles(String particleName)
throws Exception {
List<LabFileBean> runFiles = new ArrayList<LabFileBean>();
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
try {
ida.open();
String query = "select distinct outFile from Run run join run.outputFileCollection outFile join run.runSampleContainerCollection runContainer where runContainer.sampleContainer.sample.name='"
+ particleName + "'";
List results = ida.search(query);
for (Object obj : results) {
OutputFile file = (OutputFile) obj;
// active status only
if (file.getDataStatus() == null) {
LabFileBean fileBean = new LabFileBean();
fileBean.setId(file.getId().toString());
fileBean.setName(file.getFilename());
fileBean.setUri(file.getUri());
runFiles.add(fileBean);
}
}
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem getting run files for particle: "
+ particleName);
throw e;
} finally {
ida.close();
}
return runFiles;
}
/**
* Update the meta data associated with a file stored in the database
*
* @param fileBean
* @throws Exception
*/
public void updateFileMetaData(LabFileBean fileBean) throws Exception {
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
try {
ida.open();
LabFile file = (LabFile) ida.load(LabFile.class, StringUtils
.convertToLong(fileBean.getId()));
file.setTitle(fileBean.getTitle().toUpperCase());
file.setDescription(fileBean.getDescription());
file.setComments(fileBean.getComments());
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem updating file meta data: ");
throw e;
} finally {
ida.close();
}
userService.setVisiblity(fileBean.getId(), fileBean
.getVisibilityGroups());
}
/**
* Update the meta data associated with a file stored in the database
*
* @param fileBean
* @throws Exception
*/
public void updateDerivedBioAssayDataMetaData(
DerivedBioAssayDataBean fileBean) throws Exception {
IDataAccess ida = (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
try {
ida.open();
DerivedBioAssayData file = (DerivedBioAssayData) ida.load(
DerivedBioAssayData.class, StringUtils
.convertToLong(fileBean.getId()));
file.setTitle(fileBean.getTitle().toUpperCase());
file.setDescription(fileBean.getDescription());
file.getKeywordCollection().clear();
if (fileBean.getKeywords() != null) {
for (String keyword : fileBean.getKeywords()) {
Keyword keywordObj = new Keyword();
keywordObj.setName(keyword);
file.getKeywordCollection().add(keywordObj);
}
}
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem updating derived data file meta data: ");
throw e;
} finally {
ida.close();
}
userService.setVisiblity(fileBean.getId(), fileBean
.getVisibilityGroups());
}
/**
* Delete the characterization
*/
public void deleteCharacterization(String strCharId) throws Exception {
// if ID is not set save to the database otherwise update
HibernateDataAccess ida = (HibernateDataAccess) (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
try {
ida.open();
// Get ID
Long charId = Long.parseLong(strCharId);
Object charObj = ida.load(Characterization.class, charId);
ida.delete(charObj);
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem saving characterization: ");
throw e;
} finally {
ida.close();
}
}
/**
* Delete the characterizations
*/
public void deleteCharacterizations(String particleName,
String particleType, String[] charIds) throws Exception {
// if ID is not set save to the database otherwise update
HibernateDataAccess ida = (HibernateDataAccess) (new DataAccessProxy())
.getInstance(IDataAccess.HIBERNATE);
try {
ida.open();
// Get ID
for (String strCharId : charIds) {
Long charId = Long.parseLong(strCharId);
Object charObj = ida.load(Characterization.class, charId);
// deassociate first
String hqlString = "from Nanoparticle particle where particle.characterizationCollection.id = '"
+ strCharId + "'";
List results = ida.search(hqlString);
for (Object obj : results) {
Nanoparticle particle = (Nanoparticle) obj;
particle.getCharacterizationCollection().remove(charObj);
}
// then delete
ida.delete(charObj);
}
} catch (Exception e) {
e.printStackTrace();
ida.rollback();
logger.error("Problem deleting characterization: ");
throw new Exception(
"The characterization is no longer exist in the database, please login again to refresh the view.");
} finally {
ida.close();
}
}
}
|
package ccm.pay2spawn;
import ccm.pay2spawn.network.RedonatePacket;
import ccm.pay2spawn.util.EventHandler;
import ccm.pay2spawn.util.Helper;
import ccm.pay2spawn.util.JsonNBTHelper;
import com.google.common.base.Strings;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import net.minecraft.client.Minecraft;
import scala.sys.process.processInternal;
import java.io.*;
import java.net.URL;
import java.util.ArrayList;
/**
* The thread that does the actual checking with nightdevs donationtracker
*
* @author Dries007
*/
public class DonationCheckerThread extends Thread
{
final int interval;
final String channel;
final String API_Key;
final String URL;
boolean firstrun = true;
JsonArray latest;
public DonationCheckerThread(int interval, String channel, String API_Key)
{
super(DonationCheckerThread.class.getSimpleName());
this.interval = interval;
this.channel = channel;
this.API_Key = API_Key;
this.URL = "http://donationtrack.nightdev.com/api/poll?channel=" + channel + "&key=" + API_Key;
}
ArrayList<String> doneIDs = new ArrayList<>();
ArrayList<JsonObject> backlog = new ArrayList<>();
public synchronized JsonObject getLatestById(int id)
{
return latest.get(id).getAsJsonObject();
}
@Override
public void run()
{
while (true)
{
try
{
for (JsonObject donation : backlog) process(donation);
String input = readUrl(URL);
JsonObject root = JsonNBTHelper.PARSER.parse(input).getAsJsonObject();
if (root.get("status").getAsString().equals("success"))
{
doFileAndHud(root);
latest = root.getAsJsonArray("mostRecent");
for (JsonElement donation : root.getAsJsonArray("mostRecent")) process(donation.getAsJsonObject());
}
else
{
throw new IllegalArgumentException("Could not fetch recent donations.\n Message:" + root.get("error").getAsString());
}
firstrun = false;
doWait(interval);
}
catch (Exception e)
{
if (Minecraft.getMinecraft().running) e.printStackTrace();
}
}
}
private void process(JsonObject donation)
{
if (Minecraft.getMinecraft().thePlayer == null || !Pay2Spawn.enable)
{
if (!backlog.contains(donation)) backlog.add(donation);
}
else if (Pay2Spawn.debug || !doneIDs.contains(donation.get("transactionID").getAsString()))
{
doneIDs.add(donation.get("transactionID").getAsString());
if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) return;
try
{
Pay2Spawn.getRewardsDB().process(donation);
}
catch (Exception e)
{
Pay2Spawn.getLogger().warning("Error processing a donation.");
e.printStackTrace();
}
}
}
private void doWait(int time)
{
try
{
synchronized (this)
{
this.wait(time * 1000);
}
}
catch (InterruptedException e)
{
e.printStackTrace();
}
}
@SuppressWarnings("ResultOfMethodCallIgnored")
private void doFileAndHud(JsonObject root)
{
/**
* Hud
*/
{
/**
* Top
*/
EventHandler.TOP.clear();
P2SConfig.HudSettings hudSettings = Pay2Spawn.getConfig().hud;
if (hudSettings.top != 0)
{
String header = hudSettings.top_header.trim();
if (!Strings.isNullOrEmpty(header)) EventHandler.TOP.add(header);
for (int i = 0; i < hudSettings.top_amount && i < root.getAsJsonArray("top").size(); i++)
{
JsonObject donation = root.getAsJsonArray("top").get(i).getAsJsonObject();
if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) continue;
EventHandler.TOP.add(Helper.formatText(hudSettings.top_format, donation));
}
}
/**
* Recent
*/
EventHandler.RECENT.clear();
if (hudSettings.recent != 0)
{
String header = hudSettings.recent_header.trim();
if (!Strings.isNullOrEmpty(header)) EventHandler.RECENT.add(header);
for (int i = 0; i < hudSettings.recent_amount && i < root.getAsJsonArray("mostRecent").size(); i++)
{
JsonObject donation = root.getAsJsonArray("mostRecent").get(i).getAsJsonObject();
if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) continue;
EventHandler.RECENT.add(Helper.formatText(hudSettings.recent_format, donation));
}
}
}
/**
* File
*/
{
P2SConfig.FileSettings fileSettings = Pay2Spawn.getConfig().file;
/**
* Top
*/
if (fileSettings.top != 0)
{
try
{
String end = (fileSettings.top == 1 ? "\n" : "");
File file = new File(Pay2Spawn.getFolder(), "topList.txt");
//file.delete();
file.createNewFile();
PrintWriter pw = new PrintWriter(file);
for (int i = 0; i < fileSettings.top_amount; i++)
{
if (i == fileSettings.top_amount - 1) end = "";
JsonObject donation = root.getAsJsonArray("top").get(i).getAsJsonObject();
if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) continue;
pw.print(Helper.formatText(fileSettings.top_format, donation) + end);
}
pw.close();
}
catch (IOException e)
{
e.printStackTrace();
}
}
/**
* Recent
*/
if (fileSettings.recent != 0)
{
try
{
String end = (fileSettings.recent == 1 ? "\n" : "");
File file = new File(Pay2Spawn.getFolder(), "recentList.txt");
//file.delete();
file.createNewFile();
PrintWriter pw = new PrintWriter(file);
for (int i = 0; i < fileSettings.recent_amount; i++)
{
if (i == fileSettings.recent_amount - 1) end = "";
JsonObject donation = root.getAsJsonArray("mostRecent").get(i).getAsJsonObject();
if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) continue;
pw.print(Helper.formatText(fileSettings.recent_format, donation) + end);
}
pw.close();
}
catch (IOException e)
{
e.printStackTrace();
}
}
}
}
private String readUrl(String urlString) throws Exception
{
BufferedReader reader = null;
try
{
URL url = new URL(urlString);
reader = new BufferedReader(new InputStreamReader(url.openStream()));
StringBuilder buffer = new StringBuilder();
int read;
char[] chars = new char[1024];
while ((read = reader.read(chars)) != -1) buffer.append(chars, 0, read);
return buffer.toString();
}
finally
{
if (reader != null) reader.close();
}
}
}
|
package VASSAL.i18n;
import java.awt.Component;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.ResourceBundle;
import javax.swing.DefaultListCellRenderer;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.UIManager;
import VASSAL.Info;
import VASSAL.build.module.gamepieceimage.StringEnumConfigurer;
import VASSAL.preferences.Prefs;
public class Resources {
/*
* Translation of VASSAL is handled by standard Java I18N tools.
*
* vassalBundle - Resource Bundle for the VASSAL player interface editorBundle - Resource Bundle for the Module Editor
*
* These are implemented as PropertyResourceBundles, normally to be found in the VASSAL jar file. VASSAL will search
* first in the VASSAL install directory for bundles, then follow the standard Java Class Path
*/
protected static BundleHelper vassalBundle;
protected static BundleHelper editorBundle;
private static VassalPropertyClassLoader bundleLoader = new VassalPropertyClassLoader();
public static final String LOCALE_PREF_KEY = "Locale"; // Preferences key for the user's Locale
protected static Collection<Locale> supportedLocales = Arrays.asList(new Locale[]{Locale.ENGLISH, Locale.GERMAN, Locale.FRENCH, Locale.ITALIAN,
Locale.JAPANESE});
protected static Locale locale = Locale.getDefault();
static {
// If the user has a resource bundle for their default language on their local machine, add it to the list of supported locales
if (ResourceBundle.getBundle("VASSAL.i18n.VASSAL", Locale.getDefault(), bundleLoader).getLocale().getLanguage().equals(Locale.getDefault().getLanguage())) {
List<Locale> tmp = new ArrayList<Locale>();
tmp.add(Locale.getDefault());
tmp.addAll(supportedLocales);
supportedLocales = tmp;
}
ArrayList<String> languages = new ArrayList<String>();
for (Locale l : getSupportedLocales()) {
languages.add(l.getLanguage());
}
Locale myLocale = Locale.getDefault();
String savedLocale = Prefs.getGlobalPrefs().getStoredValue(LOCALE_PREF_KEY);
if (savedLocale == null) {
myLocale = supportedLocales.iterator().next();
}
else {
myLocale = new Locale(savedLocale);
}
Resources.setLocale(myLocale);
StringEnumConfigurer localeConfig = new StringEnumConfigurer(Resources.LOCALE_PREF_KEY, getString("Prefs.language"), languages.toArray(new String[languages
.size()])) {
public Component getControls() {
if (box == null) {
Component c = super.getControls();
box.setRenderer(new DefaultListCellRenderer() {
private static final long serialVersionUID = 1L;
public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) {
JLabel l = (JLabel) super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);
l.setText(new Locale((String) value).getDisplayLanguage());
return l;
}
});
return c;
}
else {
return super.getControls();
}
}
};
localeConfig.setValue(Resources.getLocale().getLanguage());
Prefs.getGlobalPrefs().addOption(getString("Prefs.general_tab"), localeConfig);
}
public static Collection<Locale> getSupportedLocales() {
return supportedLocales;
}
public static Collection<String> getVassalKeys() {
return Collections.list(vassalBundle.getResourceBundle().getKeys());
}
public static Collection<String> getEditorKeys() {
return Collections.list(editorBundle.getResourceBundle().getKeys());
}
/*
* Translation of individual modules is handled differently. There may be multiple Module.properties file active -
* Potentially one in the module plus one in each Extension loaded. These will be read into UberProperties structures
* with each file loaded supplying defaults for subsequent files.
*/
protected static String MODULE_BUNDLE = "Module"; //$NON-NLS-1$
/*
* Commonly used i18n keys used in multiple components. By defining them centrally, they will only have to be
* translated once. Reference to these string should be made as follows:
*
* Resources.getString(Resources.VASSAL)
*/
public static final String VASSAL = "General.VASSAL"; //$NON-NLS-1$
public static final String ADD = "General.add"; //$NON-NLS-1$
public static final String REMOVE = "General.remove"; //$NON-NLS-1$
public static final String INSERT = "General.insert"; //$NON-NLS-1$
public static final String YES = "General.yes"; //$NON-NLS-1$
public static final String NO = "General.no"; //$NON-NLS-1$
public static final String CANCEL = "General.cancel"; //$NON-NLS-1$
public static final String SAVE = "General.save"; //$NON-NLS-1$
public static final String OK = "General.ok"; //$NON-NLS-1$
public static final String MENU = "General.menu"; //$NON-NLS-1$
public static final String LOAD = "General.load"; //$NON-NLS-1$
public static final String QUIT = "General.quit"; //$NON-NLS-1$
public static final String EDIT = "General.edit"; //$NON-NLS-1$
public static final String NEW = "General.new"; //$NON-NLS-1$
public static final String FILE = "General.file"; //$NON-NLS-1$
public static final String TOOLS = "General.tools"; //$NON-NLS-1$
public static final String HELP = "General.help"; //$NON-NLS-1$
public static final String CLOSE = "General.close"; //$NON-NLS-1$
public static final String DATE_DISPLAY = "General.date_display"; //$NON-NLS-1$
public static final String NEXT = "General.next"; //$NON-NLS-1$
public static final String REFRESH = "General.refresh"; //$NON-NLS-1$
public static final String SELECT = "General.select"; //$NON-NLS-1$
/*
* All i18n keys for the Module Editor must commence with "Editor.", This allows us to use a single
* Resources.getString() call for both resource bundles.
*/
public static final String EDITOR_PREFIX = "Editor."; //$NON-NLS-1$
/*
* Common Editor labels that appear in many components.
*/
public static final String BUTTON_TEXT = "Editor.button_text_label"; //$NON-NLS-1$
public static final String TOOLTIP_TEXT = "Editor.tooltip_text_label"; //$NON-NLS-1$
public static final String BUTTON_ICON = "Editor.button_icon_label"; //$NON-NLS-1$
public static final String HOTKEY_LABEL = "Editor.hotkey_label"; //$NON-NLS-1$
public static final String COLOR_LABEL = "Editor.color_label"; //$NON-NLS-1$
public static final String NAME_LABEL = "Editor.name_label"; //$NON-NLS-1$
/**
* Localize a user interface String.
*
* @param id
* String Id
* @return Localized result
*/
public static String getString(String id) {
return getBundleForKey(id).getString(id);
}
protected static BundleHelper getBundleForKey(String id) {
return id.startsWith(EDITOR_PREFIX) ? getEditorBundle() : getVassalBundle();
}
protected static BundleHelper getEditorBundle() {
if (editorBundle == null) {
editorBundle = new BundleHelper(ResourceBundle.getBundle("VASSAL.i18n.Editor", locale, bundleLoader));
}
return editorBundle;
}
protected static BundleHelper getVassalBundle() {
if (vassalBundle == null) {
vassalBundle = new BundleHelper(ResourceBundle.getBundle("VASSAL.i18n.VASSAL", locale, bundleLoader));
}
return vassalBundle;
}
/**
* Localize a VASSAL user interface string
*
* @param id
* String id
* @return Localized result
*/
@Deprecated
public static String getVassalString(String id) {
return getVassalBundle().getString(id);
}
/**
* Localize a VASSAL Module Editor String
*
* @param id
* String Id
* @return Localized Result
*/
@Deprecated
public static String getEditorString(String id) {
return getEditorBundle().getString(id);
}
/**
* Localize a string using the supplied resource bundle
*
* @param bundle
* Resource bundle
* @param id
* String Id
* @return Localized result
*/
@Deprecated
public static String getString(ResourceBundle bundle, String id) {
String s = null;
try {
s = bundle.getString(id);
}
catch (Exception ex) {
System.err.println("No Translation: " + id);
}
// 2. Worst case, return the key
if (s == null) {
s = id;
}
return s;
}
public static String getString(String id, Object... params) {
return getBundleForKey(id).getString(id, params);
}
/**
* Custom Class Loader for loading VASSAL property files. Check first for files in the VASSAL home directory
*
* @author Brent Easton
*
*/
public static class VassalPropertyClassLoader extends ClassLoader {
public URL getResource(String name) {
URL url = null;
String propFileName = name.substring(name.lastIndexOf('/') + 1);
File propFile = new File(Info.getHomeDir(), propFileName);
if (propFile.exists()) {
try {
url = propFile.toURI().toURL();
}
catch (MalformedURLException e) {
}
}
/*
* No openable file in home dir, so let Java find one for us in the standard classpath.
*/
if (url == null) {
url = this.getClass().getClassLoader().getResource(name);
}
return url;
}
}
public static void setLocale(Locale l) {
locale = l;
editorBundle = null;
vassalBundle = null;
UIManager.put("OptionPane.yesButtonText", getString(YES)); //$NON-NLS-1$
UIManager.put("OptionPane.cancelButtonText", getString(CANCEL)); //$NON-NLS-1$
UIManager.put("OptionPane.noButtonText", getString(NO)); //$NON-NLS-1$
UIManager.put("OptionPane.okButtonText", getString(OK)); //$NON-NLS-1$
}
public static Locale getLocale() {
return locale;
}
}
|
package org.jasig.portal.layout.dlm;
import java.io.StringWriter;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Set;
import java.util.Vector;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jasig.portal.AuthorizationException;
import org.jasig.portal.EntityIdentifier;
import org.jasig.portal.security.IAuthorizationPrincipal;
import org.jasig.portal.security.IPerson;
import org.jasig.portal.services.AuthorizationService;
import org.jasig.portal.utils.DocumentFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
/** Performs merging of layout fragments into a single document containing
* all incorporated layout fragment elements from the set of fragments
* passed in. This merge is trivial, appending all children of each
* fragment into the composite document and recording their identifiers
* in the document identifier cache. No changes are made to the source
* fragments passed in.
*
* @version $Revision$ $Date$
* @since uPortal 2.5
*/
public class ILFBuilder
{
public static final String RCS_ID = "@(#) $Header$";
private static final Log LOG = LogFactory.getLog(ILFBuilder.class);
public static Document constructILF( Document PLF, Vector sequence, IPerson person)
throws javax.xml.parsers.ParserConfigurationException, AuthorizationException
{
if (LOG.isDebugEnabled()) {
LOG.debug("Constructing ILF for IPerson='" + person + "'");
}
// first construct the destination document and root element. The root
// element should be a complete copy of the PLF's root including its
// node identifier in the new document. This requires the use of
// the implementation class to set the identifier for that node
// in the document.
Document result = DocumentFactory.getNewDocument();
Element plfLayout = PLF.getDocumentElement();
Element ilfLayout = (Element) result.importNode( plfLayout, false );
result.appendChild( ilfLayout );
Element plfRoot = (Element) plfLayout.getFirstChild();
Element ilfRoot = (Element) result.importNode( plfRoot, false);
ilfLayout.appendChild(ilfRoot);
if (ilfRoot.getAttribute(Constants.ATT_ID) != null)
ilfRoot.setIdAttribute(Constants.ATT_ID, true);
// build the auth principal for determining if pushed channels can be
// used by this user
EntityIdentifier ei = person.getEntityIdentifier();
AuthorizationService authS = AuthorizationService.instance();
IAuthorizationPrincipal ap = authS.newPrincipal(ei.getKey(),
ei.getType());
// now merge fragments one at a time into ILF document
Enumeration fragments = sequence.elements();
while( fragments.hasMoreElements() )
mergeFragment( (Document) fragments.nextElement(), result, ap );
return result;
}
/**
* Passes the layout root of each of these documents to mergeChildren
* causing all children of newLayout to be merged into compositeLayout
* following merging protocal for distributed layout management.
* @throws AuthorizationException
**/
public static void mergeFragment( Document fragment,
Document composite,
IAuthorizationPrincipal ap )
throws AuthorizationException
{
Element fragmentLayout = fragment.getDocumentElement();
Element fragmentRoot = (Element) fragmentLayout.getFirstChild();
Element compositeLayout = composite.getDocumentElement();
Element compositeRoot = (Element) compositeLayout.getFirstChild();
mergeChildren( fragmentRoot, compositeRoot, ap, new HashSet() );
}
/**
* @param source parent of children
* @param dest receiver of children
* @param ap User's authorization principal for determining if they can view a channel
* @param visitedNodes A Set of nodes from the source tree that have been visited to get to this node, used to ensure a loop doesn't exist in the source tree.
* @throws AuthorizationException
*/
private static void mergeChildren( Element source,
Element dest,
IAuthorizationPrincipal ap,
Set visitedNodes )
throws AuthorizationException
{
//Check for a loop in the DOM, this should never happen but it is good to protect against
if (visitedNodes.contains(source)) {
final String msg = "mergeChildren has encountered a loop in the source DOM. currentNode='" + source + "', currentDepth='" + visitedNodes.size() + "', visitedNodes='" + visitedNodes + "'";
final IllegalStateException ise = new IllegalStateException(msg);
LOG.error(msg, ise);
printNodeToDebug(source, "Source");
printNodeToDebug(dest, "Dest");
throw ise;
}
visitedNodes.add(source);
try {
Document destDoc = dest.getOwnerDocument();
Node item = source.getFirstChild();
while (item != null) {
if (item instanceof Element) {
Element child = (Element) item;
Element newChild = null;
if( null != child && mergeAllowed( child, ap ))
{
newChild = (Element) destDoc.importNode( child, false );
dest.appendChild( newChild );
String id = newChild.getAttribute(Constants.ATT_ID);
if (id != null && ! id.equals(""))
newChild.setIdAttribute(Constants.ATT_ID, true);
mergeChildren( child, newChild, ap, visitedNodes );
}
}
item = item.getNextSibling();
}
}
finally {
visitedNodes.remove(source);
}
}
/**
* Tests to see if channels to be merged from ILF can be rendered by the
* end user. If not then they are discarded from the merge.
*
* @param child
* @param person
* @return
* @throws AuthorizationException
* @throws NumberFormatException
*/
private static boolean mergeAllowed( Element child,
IAuthorizationPrincipal ap )
throws AuthorizationException
{
if (! child.getTagName().equals("channel"))
return true;
String channelPublishId = child.getAttribute("chanID");
return ap.canRender(Integer.parseInt(channelPublishId));
}
private static void printNodeToDebug(Node n, String name) throws TransformerFactoryConfigurationError {
if (!LOG.isDebugEnabled()) {
return;
}
final StringWriter writer = new StringWriter();
try {
final TransformerFactory transFactory = TransformerFactory.newInstance();
final Transformer trans = transFactory.newTransformer();
final Source xmlSource = new DOMSource(n);
final Result transResult = new StreamResult(writer);
trans.transform(xmlSource, transResult);
final String xmlStr = writer.toString();
LOG.debug(name + " DOM Tree:\n\n" + xmlStr);
}
catch (Exception e) {
LOG.error("Error printing out " + name + " DOM Tree", e);
final String xmlStr = writer.toString();
LOG.debug("Partial " + name + " DOM Tree:\n\n" + xmlStr);
}
}
}
|
package com.illposed.osc;
import com.illposed.osc.utility.AddressSelector;
import com.illposed.osc.utility.OSCByteArrayToJavaConverter;
import com.illposed.osc.utility.OSCPacketDispatcher;
import com.illposed.osc.utility.OSCPatternAddressSelector;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.SocketException;
import java.nio.charset.Charset;
/**
* OSCPortIn is the class that listens for OSC messages.
*
* An example based on
* {@link com.illposed.osc.OSCPortTest#testReceiving()}:
* <pre>
receiver = new OSCPortIn(OSCPort.DEFAULT_SC_OSC_PORT());
OSCListener listener = new OSCListener() {
public void acceptMessage(java.util.Date time, OSCMessage message) {
System.out.println("Message received!");
}
};
receiver.addListener("/message/receiving", listener);
receiver.startListening();
* </pre>
*
* Then, using a program such as SuperCollider or sendOSC, send a message
* to this computer, port {@link #DEFAULT_SC_OSC_PORT},
* with the address "/message/receiving".
*
* @author Chandrasekhar Ramakrishnan
*/
public class OSCPortIn extends OSCPort implements Runnable {
// state for listening
private boolean listening;
private final OSCByteArrayToJavaConverter converter;
private final OSCPacketDispatcher dispatcher;
/**
* Create an OSCPort that listens on the specified port.
* Strings will be decoded using the systems default character set.
* @param port UDP port to listen on.
* @throws SocketException if the port number is invalid,
* or there is already a socket listening on it
*/
public OSCPortIn(int port) throws SocketException {
super(new DatagramSocket(port), port);
this.converter = new OSCByteArrayToJavaConverter();
this.dispatcher = new OSCPacketDispatcher();
}
/**
* Create an OSCPort that listens on the specified port,
* and decodes strings with a specific character set.
* @param port UDP port to listen on.
* @param charset how to decode strings read from incoming packages.
* This includes message addresses and string parameters.
* @throws SocketException if the port number is invalid,
* or there is already a socket listening on it
*/
public OSCPortIn(int port, Charset charset) throws SocketException {
this(port);
this.converter.setCharset(charset);
}
/**
* Buffers were 1500 bytes in size, but were
* increased to 1536, as this is a common MTU.
*/
private static final int BUFFER_SIZE = 1536;
/**
* Run the loop that listens for OSC on a socket until
* {@link #isListening()} becomes false.
* @see java.lang.Runnable#run()
*/
public void run() {
byte[] buffer = new byte[BUFFER_SIZE];
DatagramPacket packet = new DatagramPacket(buffer, BUFFER_SIZE);
DatagramSocket socket = getSocket();
while (listening) {
try {
try {
socket.receive(packet);
} catch (SocketException ex) {
if (listening) {
throw ex;
} else {
// if we closed the socket while receiving data,
// the exception is expected/normal, so we hide it
continue;
}
}
OSCPacket oscPacket = converter.convert(buffer,
packet.getLength());
dispatcher.dispatchPacket(oscPacket);
} catch (IOException e) {
e.printStackTrace(); // XXX This may not be a good idea, as this could easily lead to a never ending series of exceptions thrown (due to the non-exited while loop), and because the user of the lib may want to handle this case himself
}
}
}
/**
* Start listening for incoming OSCPackets
*/
public void startListening() {
listening = true;
Thread thread = new Thread(this);
thread.start();
}
/**
* Stop listening for incoming OSCPackets
*/
public void stopListening() {
listening = false;
}
/**
* Am I listening for packets?
*/
public boolean isListening() {
return listening;
}
* like "/??/mixer/*", see {@link OSCPatternAddressSelector} for details
* @param listener will be notified of incoming packets, if they match
*/
public void addListener(String addressSelector, OSCListener listener) {
this.addListener(new OSCPatternAddressSelector(addressSelector), listener);
}
/**
* Registers a listener that will be notified of incoming messages,
* if their address matches the given selector.
* @param addressSelector a custom address selector
* @param listener will be notified of incoming packets, if they match
*/
public void addListener(AddressSelector addressSelector, OSCListener listener) {
dispatcher.addListener(addressSelector, listener);
}
}
|
package edu.ucdenver.ccp.common.properties;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import org.apache.commons.io.IOUtils;
/**
* Utility class for dealing with Java Properties objects
*
* @author bill
*
*/
public class PropertiesUtil {
/**
* Loads a properties file from the specified path
*
* @param path
* @return
*/
public static Properties loadProperties(String propertiesFileName) {
return loadProperties(new File(propertiesFileName));
}
/**
* Loads a properties from a File object
*
* @param file
* @return
*/
public static Properties loadProperties(File file) {
Properties properties = new Properties();
InputStream fis = null;
try {
fis = new FileInputStream(file);
properties.load(fis);
} catch (IOException ex) {
String path = file == null ? null : file.getAbsolutePath();
String message = "Problem loading properties file: " + path;
throw new RuntimeException(message, ex);
} finally {
IOUtils.closeQuietly(fis);
}
return properties;
}
/**
* Returns true if there is a property with the name specified in propertyName
*
* @param properties
* @param propertyName
* @return
*/
public static boolean hasProperty(Properties properties, String propertyName) {
return (properties.getProperty(propertyName) != null);
}
public static String getPropertyValue(Properties properties, String propertyName) throws IllegalArgumentException {
String propertyValue = properties.getProperty(propertyName);
if (propertyValue == null) {
throw new IllegalArgumentException(String.format(
"Expected to find property named \"%s\", but none could be found.", propertyName));
}
return propertyValue;
}
/**
* Converts the input Properties into a <code>Map<String, String></code>
*
* @param properties
* @return
*/
public static Map<String, String> getPropertiesMap(Properties properties) {
Map<String, String> propertiesMap = new HashMap<String, String>();
for (Entry<Object, Object> entry : properties.entrySet())
propertiesMap.put(entry.getKey().toString(), entry.getValue().toString());
return propertiesMap;
}
/**
* Loads a <code>Properties</code> object from the <code>InputStream</code> and returns a
* <code>Map<String, String></code> contains the property key and values
*
* @param inputStream
* @return
* @throws IOException
*/
public static Map<String, String> getPropertiesMap(InputStream inputStream) throws IOException {
try {
Properties properties = new Properties();
properties.load(inputStream);
return getPropertiesMap(properties);
} finally {
inputStream.close();
}
}
}
|
package io.bitsquare.gui.main.overlays;
import io.bitsquare.common.Timer;
import io.bitsquare.common.UserThread;
import io.bitsquare.common.util.Utilities;
import io.bitsquare.gui.main.MainView;
import io.bitsquare.gui.util.Transitions;
import io.bitsquare.locale.BSResources;
import io.bitsquare.user.Preferences;
import javafx.animation.Interpolator;
import javafx.animation.KeyFrame;
import javafx.animation.KeyValue;
import javafx.animation.Timeline;
import javafx.beans.value.ChangeListener;
import javafx.collections.ObservableList;
import javafx.geometry.HPos;
import javafx.geometry.Insets;
import javafx.geometry.Orientation;
import javafx.scene.PerspectiveCamera;
import javafx.scene.Scene;
import javafx.scene.control.*;
import javafx.scene.input.KeyCode;
import javafx.scene.layout.*;
import javafx.scene.paint.Color;
import javafx.scene.transform.Rotate;
import javafx.stage.Modality;
import javafx.stage.Stage;
import javafx.stage.StageStyle;
import javafx.stage.Window;
import javafx.util.Duration;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import static io.bitsquare.gui.util.FormBuilder.addCheckBox;
public abstract class Overlay<T extends Overlay> {
protected final Logger log = LoggerFactory.getLogger(this.getClass());
// Enum
private enum AnimationType {
FadeInAtCenter,
SlideDownFromCenterTop,
SlideFromRightTop,
ScaleDownToCenter,
ScaleFromCenter,
ScaleYFromCenter
}
private enum ChangeBackgroundType {
BlurLight,
BlurUltraLight,
Darken
}
protected enum Type {
Undefined(AnimationType.ScaleFromCenter, ChangeBackgroundType.BlurLight),
Notification(AnimationType.SlideFromRightTop, ChangeBackgroundType.BlurLight),
BackgroundInfo(AnimationType.SlideDownFromCenterTop, ChangeBackgroundType.BlurUltraLight),
Feedback(AnimationType.SlideDownFromCenterTop, ChangeBackgroundType.Darken),
Information(AnimationType.FadeInAtCenter, ChangeBackgroundType.BlurLight),
Instruction(AnimationType.ScaleFromCenter, ChangeBackgroundType.BlurLight),
Attention(AnimationType.ScaleFromCenter, ChangeBackgroundType.BlurLight),
Confirmation(AnimationType.ScaleYFromCenter, ChangeBackgroundType.BlurLight),
Warning(AnimationType.ScaleDownToCenter, ChangeBackgroundType.BlurLight),
Error(AnimationType.ScaleDownToCenter, ChangeBackgroundType.BlurLight);
public AnimationType animationType;
public ChangeBackgroundType changeBackgroundType;
Type(AnimationType animationType, ChangeBackgroundType changeBackgroundType) {
this.animationType = animationType;
this.changeBackgroundType = changeBackgroundType;
}
}
protected final static double DEFAULT_WIDTH = 600;
protected int rowIndex = -1;
protected String headLine;
protected String message;
protected String closeButtonText;
protected String actionButtonText;
protected double width = DEFAULT_WIDTH;
protected Pane owner;
protected GridPane gridPane;
protected Button closeButton;
protected Optional<Runnable> closeHandlerOptional = Optional.empty();
protected Optional<Runnable> actionHandlerOptional = Optional.empty();
protected Stage stage;
private boolean showReportErrorButtons;
protected Label messageLabel;
protected String truncatedMessage;
private ProgressIndicator progressIndicator;
private boolean showProgressIndicator;
private Button actionButton;
protected Label headLineLabel;
protected String dontShowAgainId;
protected String dontShowAgainText;
private Preferences preferences;
protected ChangeListener<Number> positionListener;
protected Timer centerTime;
protected double buttonDistance = 20;
protected Type type = Type.Undefined;
protected boolean hideCloseButton;
protected boolean useAnimation = true;
// Public API
public Overlay() {
}
public void show() {
if (dontShowAgainId == null || preferences == null || preferences.showAgain(dontShowAgainId)) {
createGridPane();
addHeadLine();
addSeparator();
if (showProgressIndicator)
addProgressIndicator();
addMessage();
if (showReportErrorButtons)
addReportErrorButtons();
addCloseButton();
addDontShowAgainCheckBox();
applyStyles();
onShow();
}
}
protected void onShow() {
}
public void hide() {
animateHide(() -> {
removeEffectFromBackground();
if (stage != null)
stage.hide();
else
log.warn("Stage is null");
cleanup();
onHidden();
});
}
protected void onHidden() {
}
protected void cleanup() {
if (centerTime != null)
centerTime.stop();
if (owner == null)
owner = MainView.getRootContainer();
Scene rootScene = owner.getScene();
if (rootScene != null) {
Window window = rootScene.getWindow();
if (window != null && positionListener != null) {
window.xProperty().removeListener(positionListener);
window.yProperty().removeListener(positionListener);
window.widthProperty().removeListener(positionListener);
}
}
}
public T onClose(Runnable closeHandler) {
this.closeHandlerOptional = Optional.of(closeHandler);
return (T) this;
}
public T onAction(Runnable actionHandler) {
this.actionHandlerOptional = Optional.of(actionHandler);
return (T) this;
}
public T headLine(String headLine) {
this.headLine = headLine;
return (T) this;
}
public T notification(String message) {
type = Type.Notification;
if (headLine == null)
this.headLine = "Notification";
this.message = message;
setTruncatedMessage();
return (T) this;
}
public T instruction(String message) {
type = Type.Instruction;
if (headLine == null)
this.headLine = "Please note:";
this.message = message;
setTruncatedMessage();
return (T) this;
}
public T attention(String message) {
type = Type.Attention;
if (headLine == null)
this.headLine = "Attention";
this.message = message;
setTruncatedMessage();
return (T) this;
}
public T backgroundInfo(String message) {
type = Type.BackgroundInfo;
if (headLine == null)
this.headLine = "Background information";
this.message = message;
setTruncatedMessage();
return (T) this;
}
public T feedback(String message) {
type = Type.Feedback;
if (headLine == null)
this.headLine = "Completed";
this.message = message;
setTruncatedMessage();
return (T) this;
}
public T confirmation(String message) {
type = Type.Confirmation;
if (headLine == null)
this.headLine = "Confirmation";
this.message = message;
setTruncatedMessage();
return (T) this;
}
public T information(String message) {
type = Type.Information;
if (headLine == null)
this.headLine = "Information";
this.message = message;
setTruncatedMessage();
return (T) this;
}
public T warning(String message) {
type = Type.Warning;
if (headLine == null)
this.headLine = "Warning";
this.message = message;
setTruncatedMessage();
return (T) this;
}
public T error(String message) {
type = Type.Error;
showReportErrorButtons();
if (headLine == null)
this.headLine = "Error";
this.message = message;
setTruncatedMessage();
return (T) this;
}
public T showReportErrorButtons() {
this.showReportErrorButtons = true;
return (T) this;
}
public T message(String message) {
this.message = message;
setTruncatedMessage();
return (T) this;
}
public T closeButtonText(String closeButtonText) {
this.closeButtonText = closeButtonText;
return (T) this;
}
public T actionButtonText(String actionButtonText) {
this.actionButtonText = actionButtonText;
return (T) this;
}
public T width(double width) {
this.width = width;
return (T) this;
}
public T showProgressIndicator() {
this.showProgressIndicator = true;
return (T) this;
}
public T dontShowAgainId(String key, Preferences preferences) {
this.dontShowAgainId = key;
this.preferences = preferences;
return (T) this;
}
public T dontShowAgainText(String dontShowAgainText) {
this.dontShowAgainText = dontShowAgainText;
return (T) this;
}
public T hideCloseButton() {
this.hideCloseButton = true;
return (T) this;
}
public T useAnimation(boolean useAnimation) {
this.useAnimation = useAnimation;
return (T) this;
}
// Protected
protected void createGridPane() {
gridPane = new GridPane();
gridPane.setHgap(5);
gridPane.setVgap(5);
gridPane.setPadding(new Insets(30, 30, 30, 30));
gridPane.setPrefWidth(width);
ColumnConstraints columnConstraints1 = new ColumnConstraints();
columnConstraints1.setHalignment(HPos.RIGHT);
columnConstraints1.setHgrow(Priority.SOMETIMES);
ColumnConstraints columnConstraints2 = new ColumnConstraints();
columnConstraints2.setHgrow(Priority.ALWAYS);
gridPane.getColumnConstraints().addAll(columnConstraints1, columnConstraints2);
}
protected void blurAgain() {
UserThread.runAfter(MainView::blurLight, Transitions.DEFAULT_DURATION, TimeUnit.MILLISECONDS);
}
public void display() {
if (owner == null)
owner = MainView.getRootContainer();
Scene rootScene = owner.getScene();
if (rootScene != null) {
Scene scene = new Scene(gridPane);
scene.getStylesheets().setAll(rootScene.getStylesheets());
scene.setFill(Color.TRANSPARENT);
scene.setOnKeyPressed(e -> {
if (e.getCode() == KeyCode.ESCAPE || e.getCode() == KeyCode.ENTER) {
e.consume();
doClose();
}
});
stage = new Stage();
stage.setScene(scene);
Window window = rootScene.getWindow();
setModality();
stage.initStyle(StageStyle.TRANSPARENT);
stage.show();
layout();
addEffectToBackground();
// On Linux the owner stage does not move the child stage as it does on Mac
// So we need to apply centerPopup. Further with fast movements the handler loses
// the latest position, with a delay it fixes that.
// Also on Mac sometimes the popups are positioned outside of the main app, so keep it for all OS
positionListener = (observable, oldValue, newValue) -> {
if (stage != null) {
layout();
if (centerTime != null)
centerTime.stop();
centerTime = UserThread.runAfter(this::layout, 3);
}
};
window.xProperty().addListener(positionListener);
window.yProperty().addListener(positionListener);
window.widthProperty().addListener(positionListener);
animateDisplay();
}
}
protected void animateDisplay() {
gridPane.setOpacity(0);
Interpolator interpolator = Interpolator.SPLINE(0.25, 0.1, 0.25, 1);
double duration = getDuration(400);
Timeline timeline = new Timeline();
ObservableList<KeyFrame> keyFrames = timeline.getKeyFrames();
if (type.animationType == AnimationType.SlideDownFromCenterTop) {
double startY = -gridPane.getHeight();
keyFrames.add(new KeyFrame(Duration.millis(0),
new KeyValue(gridPane.opacityProperty(), 0, interpolator),
new KeyValue(gridPane.translateYProperty(), startY, interpolator)
));
keyFrames.add(new KeyFrame(Duration.millis(duration),
new KeyValue(gridPane.opacityProperty(), 1, interpolator),
new KeyValue(gridPane.translateYProperty(), -10, interpolator)
));
} else if (type.animationType == AnimationType.ScaleFromCenter) {
double startScale = 0.25;
keyFrames.add(new KeyFrame(Duration.millis(0),
new KeyValue(gridPane.opacityProperty(), 0, interpolator),
new KeyValue(gridPane.scaleXProperty(), startScale, interpolator),
new KeyValue(gridPane.scaleYProperty(), startScale, interpolator)
));
keyFrames.add(new KeyFrame(Duration.millis(duration),
new KeyValue(gridPane.opacityProperty(), 1, interpolator),
new KeyValue(gridPane.scaleXProperty(), 1, interpolator),
new KeyValue(gridPane.scaleYProperty(), 1, interpolator)
));
} else if (type.animationType == AnimationType.ScaleYFromCenter) {
double startYScale = 0.25;
keyFrames.add(new KeyFrame(Duration.millis(0),
new KeyValue(gridPane.opacityProperty(), 0, interpolator),
new KeyValue(gridPane.scaleYProperty(), startYScale, interpolator)
));
keyFrames.add(new KeyFrame(Duration.millis(duration),
new KeyValue(gridPane.opacityProperty(), 1, interpolator),
new KeyValue(gridPane.scaleYProperty(), 1, interpolator)
));
} else if (type.animationType == AnimationType.ScaleDownToCenter) {
double startScale = 1.1;
keyFrames.add(new KeyFrame(Duration.millis(0),
new KeyValue(gridPane.opacityProperty(), 0, interpolator),
new KeyValue(gridPane.scaleXProperty(), startScale, interpolator),
new KeyValue(gridPane.scaleYProperty(), startScale, interpolator)
));
keyFrames.add(new KeyFrame(Duration.millis(duration),
new KeyValue(gridPane.opacityProperty(), 1, interpolator),
new KeyValue(gridPane.scaleXProperty(), 1, interpolator),
new KeyValue(gridPane.scaleYProperty(), 1, interpolator)
));
} else if (type.animationType == AnimationType.FadeInAtCenter) {
keyFrames.add(new KeyFrame(Duration.millis(0),
new KeyValue(gridPane.opacityProperty(), 0, interpolator)
));
keyFrames.add(new KeyFrame(Duration.millis(duration),
new KeyValue(gridPane.opacityProperty(), 1, interpolator)
));
}
timeline.play();
}
protected void animateHide(Runnable onFinishedHandler) {
Interpolator interpolator = Interpolator.SPLINE(0.25, 0.1, 0.25, 1);
double duration = getDuration(200);
Timeline timeline = new Timeline();
ObservableList<KeyFrame> keyFrames = timeline.getKeyFrames();
if (type.animationType == AnimationType.SlideDownFromCenterTop) {
double endY = -gridPane.getHeight();
keyFrames.add(new KeyFrame(Duration.millis(0),
new KeyValue(gridPane.opacityProperty(), 1, interpolator),
new KeyValue(gridPane.translateYProperty(), -10, interpolator)
));
keyFrames.add(new KeyFrame(Duration.millis(duration),
new KeyValue(gridPane.opacityProperty(), 0, interpolator),
new KeyValue(gridPane.translateYProperty(), endY, interpolator)
));
timeline.setOnFinished(e -> onFinishedHandler.run());
timeline.play();
} else if (type.animationType == AnimationType.ScaleFromCenter) {
double endScale = 0.25;
keyFrames.add(new KeyFrame(Duration.millis(0),
new KeyValue(gridPane.opacityProperty(), 1, interpolator),
new KeyValue(gridPane.scaleXProperty(), 1, interpolator),
new KeyValue(gridPane.scaleYProperty(), 1, interpolator)
));
keyFrames.add(new KeyFrame(Duration.millis(duration),
new KeyValue(gridPane.opacityProperty(), 0, interpolator),
new KeyValue(gridPane.scaleXProperty(), endScale, interpolator),
new KeyValue(gridPane.scaleYProperty(), endScale, interpolator)
));
} else if (type.animationType == AnimationType.ScaleYFromCenter) {
gridPane.setRotationAxis(Rotate.X_AXIS);
gridPane.getScene().setCamera(new PerspectiveCamera());
keyFrames.add(new KeyFrame(Duration.millis(0),
new KeyValue(gridPane.rotateProperty(), 0, interpolator),
new KeyValue(gridPane.opacityProperty(), 1, interpolator)
));
keyFrames.add(new KeyFrame(Duration.millis(duration),
new KeyValue(gridPane.rotateProperty(), -90, interpolator),
new KeyValue(gridPane.opacityProperty(), 0, interpolator)
));
} else if (type.animationType == AnimationType.ScaleDownToCenter) {
double endScale = 0.1;
keyFrames.add(new KeyFrame(Duration.millis(0),
new KeyValue(gridPane.opacityProperty(), 1, interpolator),
new KeyValue(gridPane.scaleXProperty(), 1, interpolator),
new KeyValue(gridPane.scaleYProperty(), 1, interpolator)
));
keyFrames.add(new KeyFrame(Duration.millis(duration),
new KeyValue(gridPane.opacityProperty(), 0, interpolator),
new KeyValue(gridPane.scaleXProperty(), endScale, interpolator),
new KeyValue(gridPane.scaleYProperty(), endScale, interpolator)
));
} else if (type.animationType == AnimationType.FadeInAtCenter) {
keyFrames.add(new KeyFrame(Duration.millis(0),
new KeyValue(gridPane.opacityProperty(), 1, interpolator)
));
keyFrames.add(new KeyFrame(Duration.millis(duration),
new KeyValue(gridPane.opacityProperty(), 0, interpolator)
));
}
timeline.setOnFinished(e -> onFinishedHandler.run());
timeline.play();
}
protected void layout() {
if (owner == null)
owner = MainView.getRootContainer();
Scene rootScene = owner.getScene();
if (rootScene != null) {
Window window = rootScene.getWindow();
double titleBarHeight = window.getHeight() - rootScene.getHeight();
stage.setX(Math.round(window.getX() + (owner.getWidth() - stage.getWidth()) / 2));
if (type.animationType == AnimationType.SlideDownFromCenterTop)
stage.setY(Math.round(window.getY() + titleBarHeight));
else
stage.setY(Math.round(window.getY() + titleBarHeight + (owner.getHeight() - stage.getHeight()) / 2));
}
}
protected void addEffectToBackground() {
if (type.changeBackgroundType == ChangeBackgroundType.BlurUltraLight)
MainView.blurUltraLight();
else if (type.changeBackgroundType == ChangeBackgroundType.BlurLight)
MainView.blurLight();
else
MainView.darken();
}
protected void applyStyles() {
if (type.animationType == AnimationType.SlideDownFromCenterTop)
gridPane.setId("popup-bg-top");
else
gridPane.setId("popup-bg");
if (headLineLabel != null)
headLineLabel.setId("popup-headline");
}
protected void setModality() {
stage.initOwner(owner.getScene().getWindow());
stage.initModality(Modality.WINDOW_MODAL);
}
protected void removeEffectFromBackground() {
MainView.removeEffect();
}
protected void addHeadLine() {
if (headLine != null) {
++rowIndex;
/* Label icon = AwesomeDude.createIconLabel(awesomeIcon, "40.0");
icon.getStyleClass().add("popup-icon-" + type);
GridPane.setHalignment(icon, HPos.RIGHT);
GridPane.setRowIndex(icon, ++rowIndex);
GridPane.setColumnIndex(icon, 1);
GridPane.setMargin(icon, new Insets(0, 0, -10, 0));
gridPane.getChildren().add(icon);*/
headLineLabel = new Label(BSResources.get(headLine));
headLineLabel.setMouseTransparent(true);
GridPane.setHalignment(headLineLabel, HPos.LEFT);
GridPane.setRowIndex(headLineLabel, rowIndex);
GridPane.setColumnSpan(headLineLabel, 2);
gridPane.getChildren().addAll(headLineLabel);
}
}
protected void addSeparator() {
if (headLine != null) {
Separator separator = new Separator();
separator.setMouseTransparent(true);
separator.setOrientation(Orientation.HORIZONTAL);
separator.setStyle("-fx-background: #ccc;");
GridPane.setHalignment(separator, HPos.CENTER);
GridPane.setRowIndex(separator, ++rowIndex);
GridPane.setColumnSpan(separator, 2);
gridPane.getChildren().add(separator);
}
}
protected void addMessage() {
if (message != null) {
messageLabel = new Label(truncatedMessage);
messageLabel.setMouseTransparent(true);
messageLabel.setWrapText(true);
GridPane.setHalignment(messageLabel, HPos.LEFT);
GridPane.setHgrow(messageLabel, Priority.ALWAYS);
GridPane.setMargin(messageLabel, new Insets(3, 0, 0, 0));
GridPane.setRowIndex(messageLabel, ++rowIndex);
GridPane.setColumnIndex(messageLabel, 0);
GridPane.setColumnSpan(messageLabel, 2);
gridPane.getChildren().add(messageLabel);
}
}
private void addReportErrorButtons() {
messageLabel.setText(truncatedMessage
+ "\n\nTo help us to improve the software please report the bug at our issue tracker at Github or send it by email to the developers.\n" +
"The error message will be copied to clipboard when you click the below buttons.\n" +
"It will make debugging easier if you can attach the bitsquare.log file which you can find in the application directory.");
Button githubButton = new Button("Report to Github issue tracker");
GridPane.setMargin(githubButton, new Insets(20, 0, 0, 0));
GridPane.setHalignment(githubButton, HPos.RIGHT);
GridPane.setRowIndex(githubButton, ++rowIndex);
GridPane.setColumnIndex(githubButton, 1);
gridPane.getChildren().add(githubButton);
githubButton.setOnAction(event -> {
Utilities.copyToClipboard(message);
Utilities.openWebPage("https://github.com/bitsquare/bitsquare/issues");
});
Button mailButton = new Button("Report by email");
GridPane.setHalignment(mailButton, HPos.RIGHT);
GridPane.setRowIndex(mailButton, ++rowIndex);
GridPane.setColumnIndex(mailButton, 1);
gridPane.getChildren().add(mailButton);
mailButton.setOnAction(event -> {
Utilities.copyToClipboard(message);
Utilities.openMail("manfred@bitsquare.io",
"Error report",
"Error message:\n" + message);
});
}
protected void addProgressIndicator() {
progressIndicator = new ProgressIndicator(-1);
progressIndicator.setMaxSize(36, 36);
progressIndicator.setMouseTransparent(true);
progressIndicator.setPadding(new Insets(0, 0, 20, 0));
GridPane.setHalignment(progressIndicator, HPos.CENTER);
GridPane.setRowIndex(progressIndicator, ++rowIndex);
GridPane.setColumnSpan(progressIndicator, 2);
gridPane.getChildren().add(progressIndicator);
}
protected void addDontShowAgainCheckBox() {
if (dontShowAgainId != null && preferences != null) {
if (dontShowAgainText == null)
dontShowAgainText = "Don't show again";
CheckBox dontShowAgainCheckBox = addCheckBox(gridPane, rowIndex, dontShowAgainText, buttonDistance - 1);
GridPane.setColumnIndex(dontShowAgainCheckBox, 0);
GridPane.setHalignment(dontShowAgainCheckBox, HPos.LEFT);
dontShowAgainCheckBox.setOnAction(e -> preferences.dontShowAgain(dontShowAgainId, dontShowAgainCheckBox.isSelected()));
}
}
protected void addCloseButton() {
closeButton = new Button(closeButtonText == null ? "Close" : closeButtonText);
closeButton.setOnAction(event -> doClose());
if (actionHandlerOptional.isPresent() || actionButtonText != null) {
actionButton = new Button(actionButtonText == null ? "Ok" : actionButtonText);
actionButton.setDefaultButton(true);
//TODO app wide focus
//actionButton.requestFocus();
actionButton.setOnAction(event -> {
hide();
actionHandlerOptional.ifPresent(Runnable::run);
});
Pane spacer = new Pane();
HBox hBox = new HBox();
hBox.setSpacing(10);
hBox.getChildren().addAll(spacer, closeButton, actionButton);
HBox.setHgrow(spacer, Priority.ALWAYS);
GridPane.setHalignment(hBox, HPos.RIGHT);
GridPane.setRowIndex(hBox, ++rowIndex);
GridPane.setColumnSpan(hBox, 2);
GridPane.setMargin(hBox, new Insets(buttonDistance, 0, 0, 0));
gridPane.getChildren().add(hBox);
} else if (!hideCloseButton) {
closeButton.setDefaultButton(true);
GridPane.setHalignment(closeButton, HPos.RIGHT);
if (!showReportErrorButtons)
GridPane.setMargin(closeButton, new Insets(buttonDistance, 0, 0, 0));
GridPane.setRowIndex(closeButton, ++rowIndex);
GridPane.setColumnIndex(closeButton, 1);
gridPane.getChildren().add(closeButton);
}
}
protected void doClose() {
hide();
closeHandlerOptional.ifPresent(Runnable::run);
}
protected void setTruncatedMessage() {
if (message != null && message.length() > 1500)
truncatedMessage = StringUtils.abbreviate(message, 1500);
else
truncatedMessage = message;
}
protected double getDuration(double duration) {
return useAnimation && Preferences.useAnimations() ? duration : 1;
}
@Override
public String toString() {
return "Popup{" +
"headLine='" + headLine + '\'' +
", message='" + message + '\'' +
'}';
}
}
|
package ch.ethz.geco.gecko.rest.api;
import ch.ethz.geco.gecko.ConfigManager;
import ch.ethz.geco.gecko.GECkO;
import ch.ethz.geco.gecko.rest.RequestBuilder;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.StatusLine;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
/**
* A wrapper for the GECO Web API.
*/
public class GecoAPI {
private static final String API_URL = "https://geco.ethz.ch/api/v1/";
private static final ObjectMapper objectMapper = new ObjectMapper();
/**
* Tries to get the website user info of the user with the given discord user ID via the API.
*
* @param discordID the discord user ID we want to query
* @return the user info of the website user with the given discord ID, null otherwise
* @throws NoSuchElementException if there is no user linked to the given discord ID
*/
@Nullable
public static UserInfo getUserInfoByDiscordID(long discordID) throws NoSuchElementException {
try {
HttpResponse response = new RequestBuilder(API_URL + "user/discord/" + discordID)
.addHeader("Authorization", "Token token=" + ConfigManager.getProperties().getProperty("geco_apiKey"))
.ignoreSSL().get();
StatusLine statusLine = response.getStatusLine();
switch (statusLine.getStatusCode()) {
case 200:
// Success
HttpEntity entity = response.getEntity();
StringWriter writer = new StringWriter();
IOUtils.copy(entity.getContent(), writer, StandardCharsets.UTF_8);
String json = writer.toString();
// Deserialize
return objectMapper.readValue(json, UserInfo.class);
case 404:
// User not found
throw new NoSuchElementException();
default:
// Other API errors
GECkO.logger.error("[GecoAPI] An API error occurred: " + statusLine.getStatusCode() + " " + statusLine.getReasonPhrase());
break;
}
} catch (IOException e) {
GECkO.logger.error("[GecoAPI] A Connection error occurred: ");
e.printStackTrace();
}
return null;
}
/**
* A subclass representing a user on the website
*/
public static class UserInfo {
public static class Account {
@JsonProperty("type")
private String type;
@JsonProperty("id")
private String id;
public String getType() {
return type;
}
public String getID() {
return id;
}
}
@JsonProperty("id")
private int userID;
@JsonProperty("name")
private String username;
@JsonProperty("posts")
private int posts;
@JsonProperty("threads")
private int threads;
@JsonProperty("accounts")
private List<Account> accounts;
public int getUserID() {
return userID;
}
public String getUsername() {
return username;
}
public int getPosts() {
return posts;
}
public int getThreads() {
return threads;
}
public List<Account> getAccounts() {
return accounts;
}
/**
* Returns an account by it's type.
*
* @param type the type of the account to return
* @return the account
*/
public Account getAccountByType(String type) {
for (Account account : accounts) {
if (Objects.equals(account.type, type)) {
return account;
}
}
return null;
}
}
/**
* Tries to get the lan user information.
*
* @return a list of lan users if successful, null otherwise
*/
@Nullable
public static List<LanUser> getLanUsers() {
try {
HttpResponse response = new RequestBuilder(API_URL + "lan/seats/").get();
StatusLine statusLine = response.getStatusLine();
switch (statusLine.getStatusCode()) {
case 200:
// Success
HttpEntity entity = response.getEntity();
StringWriter writer = new StringWriter();
IOUtils.copy(entity.getContent(), writer, StandardCharsets.UTF_8);
String json = writer.toString();
return objectMapper.readValue(json, objectMapper.getTypeFactory().constructCollectionType(List.class, LanUser.class));
default:
// Other API errors
GECkO.logger.error("[GecoAPI] An API error occurred: " + statusLine.getStatusCode() + " " + statusLine.getReasonPhrase());
break;
}
} catch (IOException e) {
GECkO.logger.error("[GecoAPI] A Connection error occurred: ");
e.printStackTrace();
}
return null;
}
/**
* A subclass representing a lan user on the website
*/
public static class LanUser {
@JsonProperty("id")
private Integer seatID;
@JsonProperty("seatNumber")
private String seatName;
@JsonProperty("web_user_id")
private Integer webUserID;
@JsonProperty("lan_user_id")
private Integer lanUserID;
@JsonProperty("status")
private Integer status;
@JsonProperty("username")
private String userName;
/**
* Returns the seat ID of this lan user.
*
* @return the seat ID
*/
public Integer getSeatID() {
return seatID;
}
/**
* Returns the seat name of this lan user.
*
* @return the seat name
*/
public String getSeatName() {
return seatName;
}
/**
* Returns the web user ID of this lan user.
*
* @return the web user ID
*/
public Integer getWebUserID() {
return webUserID;
}
/**
* Returns the lan user ID.
*
* @return the lan user ID
*/
public Integer getLanUserID() {
return lanUserID;
}
/**
* Returns the payment status of this lan user.
*
* @return the payment status
*/
public Integer getStatus() {
return status;
}
/**
* Returns the user name of this lan user.
*
* @return the user name
*/
public String getUserName() {
return userName;
}
}
}
|
package VASSAL.launch;
import javax.swing.UIManager;
import javax.swing.UnsupportedLookAndFeelException;
import org.apache.commons.lang.SystemUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import VASSAL.Info;
import VASSAL.tools.ErrorDialog;
/**
* @author Joel Uckelman
* @since 3.1.0
*/
public class StartUp {
private static final Logger logger = LoggerFactory.getLogger(StartUp.class);
public void initSystemProperties() {
initHTTPProxyProperties();
initSystemSpecificProperties();
initUIProperties();
}
protected void initHTTPProxyProperties() {
final String httpProxyHost = "http.proxyHost"; //$NON-NLS-1$
final String proxyHost = "proxyHost"; //$NON-NLS-1$
if (System.getProperty(httpProxyHost) == null &&
System.getProperty(proxyHost) != null) {
System.setProperty(httpProxyHost, System.getProperty(proxyHost));
}
final String httpProxyPort = "http.proxyPort"; //$NON-NLS-1$
final String proxyPort = "proxyPort"; //$NON-NLS-1$
if (System.getProperty(httpProxyPort) == null &&
System.getProperty(proxyPort) != null) {
System.setProperty(httpProxyPort, System.getProperty(proxyPort));
}
}
protected void initUIProperties() {
System.setProperty("swing.aatext", "true"); //$NON-NLS-1$ //$NON-NLS-2$
System.setProperty("swing.boldMetal", "false"); //$NON-NLS-1$ //$NON-NLS-2$
System.setProperty("awt.useSystemAAFontSettings", "on"); //$NON-NLS-1$ //$NON-NLS-2$
if (!SystemUtils.IS_OS_WINDOWS) {
// use native LookAndFeel
// NB: This must be after Mac-specific properties
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
}
catch (ClassNotFoundException e) {
ErrorDialog.bug(e);
}
catch (IllegalAccessException e) {
ErrorDialog.bug(e);
}
catch (InstantiationException e) {
ErrorDialog.bug(e);
}
catch (UnsupportedLookAndFeelException e) {
ErrorDialog.bug(e);
}
}
// Ensure consistent behavior in NOT consuming "mousePressed" events
// upon a JPopupMenu closing (added for Windows L&F, but others might
// also be affected.
UIManager.put("PopupMenu.consumeEventOnClose", Boolean.FALSE);
}
protected void initSystemSpecificProperties() {}
public void startErrorLog() {
// begin the error log
logger.info("Starting"); //$NON-NLS-1$
logger.info("OS " + System.getProperty("os.name") + " " + System.getProperty("os.version")); //$NON-NLS-1$ //$NON-NLS-2$
logger.info("Java version " + System.getProperty("java.version")); //$NON-NLS-1$ //$NON-NLS-2$
logger.info("VASSAL version " + Info.getVersion()); //$NON-NLS-1$
}
}
|
package org.jpos.ee;
import org.hibernate.query.criteria.internal.OrderImpl;
import javax.persistence.NoResultException;
import javax.persistence.criteria.*;
import org.hibernate.query.Query;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class DBManager<T> {
protected DB db;
private Class<T> clazz;
public DBManager(DB db, Class<T> clazz) {
this.db = db;
this.clazz = clazz;
}
/** Convenience method */
public T byId(Long id) {
return db.session().get(clazz, id);
}
public int getItemCount() {
CriteriaBuilder criteriaBuilder = db.session().getCriteriaBuilder();
CriteriaQuery<Long> query = criteriaBuilder.createQuery(Long.class);
Root<T> root = query.from(clazz);
Predicate[] predicates = buildFilters(root);
if (predicates != null)
query.where(predicates);
query.select(criteriaBuilder.count(root));
return db.session().createQuery(query).getSingleResult().intValue();
}
public List<T> getAll(int offset, int limit, Map<String,Boolean> orders) {
CriteriaBuilder criteriaBuilder = db.session().getCriteriaBuilder();
CriteriaQuery<T> query = criteriaBuilder.createQuery(clazz);
Root<T> root = query.from(clazz);
List<Order> orderList = new ArrayList<>();
//ORDERS
if (orders != null) {
for (Map.Entry<String, Boolean> entry : orders.entrySet()) {
OrderImpl order = new OrderImpl(root.get(entry.getKey()), entry.getValue());
orderList.add(order);
}
}
Predicate[] predicates = buildFilters(root);
if (predicates != null)
query.where(predicates);
query.select(root);
query.orderBy(orderList);
Query<T> queryImp = db.session().createQuery(query);
if (limit != -1) {
queryImp.setMaxResults(limit);
}
List<T> list = queryImp
.setFirstResult(offset)
.getResultList();
return list;
}
public List<T> getAll() {
return this.getAll(0,-1,null);
}
public T getItemByParam(String param, Object value) {
return getItemByParam(param,value,false);
}
public T getItemByParam(String param, Object value, boolean withFilter) {
try {
CriteriaQuery<T> query = createQueryByParam(param, value, withFilter);
return db.session().createQuery(query).getSingleResult();
} catch (NoResultException nre) {
return null;
}
}
public List<T> getItemsByParam(String param, Object value) {
return getItemsByParam(param,value,false);
}
public List<T> getItemsByParam(String param, Object value, boolean withFilter) {
try {
CriteriaQuery<T> query = createQueryByParam(param, value, withFilter);
return db.session().createQuery(query).list();
} catch (NoResultException nre) {
return null;
}
}
public List<T> getItemsByParam(int offset, int limit, String param, Object value, boolean withFilter) {
try {
CriteriaQuery<T> query = createQueryByParam(param, value, withFilter);
Query<T> queryImp = db.session().createQuery(query);
if (limit != -1) {
queryImp.setMaxResults(limit);
}
List<T> list = queryImp
.setFirstResult(offset)
.getResultList();
return list;
} catch (NoResultException nre) {
return null;
}
}
private CriteriaQuery<T> createQueryByParam(String param, Object value, boolean withFilter) {
CriteriaBuilder criteriaBuilder = db.session().getCriteriaBuilder();
CriteriaQuery<T> query = criteriaBuilder.createQuery(clazz);
Root<T> root = query.from(clazz);
Predicate equals = criteriaBuilder.equal(root.get(param), value);
query.where(equals);
if (withFilter) {
Predicate[] predicates = buildFilters(root);
if (predicates != null) {
//overrides previous predicates
query.where(criteriaBuilder.and(criteriaBuilder.and(predicates), equals));
}
}
query.select(root);
return query;
}
protected Predicate[] buildFilters(Root<T> root) { return null; }
}
|
package focusedCrawler.link.classifier;
import focusedCrawler.link.frontier.LinkRelevance;
import focusedCrawler.util.parser.LinkNeighborhood;
import focusedCrawler.util.parser.PaginaURL;
public class LinkClassifierImpl implements LinkClassifier{
private final int[] weights = new int[]{2,1,0};
private final int intervalRandom = 100;
private final LNClassifier lnClassifier;
public LinkClassifierImpl(LNClassifier lnClassifier) {
this.lnClassifier = lnClassifier;
}
/**
* This method classifies links based on the priority set by the
* naive bayes link classifier.
* @param page Page
* @return LinkRelevance[]
* @throws LinkClassifierException
*/
public LinkRelevance[] classify(PaginaURL page) throws LinkClassifierException {
LinkRelevance[] linkRelevance = null;
LinkNeighborhood ln = null;
try {
LinkNeighborhood[] lns = page.getLinkNeighboor();
linkRelevance = new LinkRelevance[lns.length];
for (int i = 0; i < lns.length; i++) {
ln = lns[i];
linkRelevance[i] = classify(ln);
}
} catch (Exception ex) {
throw new LinkClassifierException("Failed to classify link [" + ln.getLink().toString()
+ "] from page: " + page.getURL().toString(), ex);
}
return linkRelevance;
}
public LinkRelevance classify(LinkNeighborhood ln) throws LinkClassifierException {
LinkRelevance linkRel = null;
try {
double[] prob = lnClassifier.classify(ln);
int classificationResult = -1;
double maxProb = -1;
for (int i = 0; i < prob.length; i++) {
if(prob[i] > maxProb){
maxProb = prob[i];
classificationResult = i;
}
}
double probability = prob[classificationResult]*100;
if(probability == 100){
probability = 99;
}
classificationResult = weights[classificationResult];
double result = (classificationResult * intervalRandom) + probability ;
linkRel = new LinkRelevance(ln.getLink(),result);
} catch (Exception ex) {
throw new LinkClassifierException("Failed to classify link: "+ln.getLink().toString(), ex);
}
return linkRel;
}
}
|
package hex.deepwater;
import hex.DataInfo;
import hex.Model;
import water.*;
import water.fvec.Frame;
import water.gpu.ImageTrain;
import water.util.*;
/**
* This class contains the state of the Deep Learning model
* This will be shared: one per node
*/
final public class DeepWaterModelInfo extends Iced {
transient ImageTrain _imageTrain; //each node needs to load its own native model
int _height;
int _width;
int _channels;
public TwoDimTable summaryTable;
// compute model size (number of model parameters required for making predictions)
// momenta are not counted here, but they are needed for model building
public long size() {
return 0;
}
Key<Model> _model_id;
public DeepWaterParameters parameters;
public final DeepWaterParameters get_params() { return parameters; }
private long processed_global;
public synchronized long get_processed_global() { return processed_global; }
public synchronized void set_processed_global(long p) { processed_global = p; }
public synchronized void add_processed_global(long p) { processed_global += p; }
private long processed_local;
public synchronized long get_processed_local() { return processed_local; }
public synchronized void set_processed_local(long p) { processed_local = p; }
public synchronized void add_processed_local(long p) { processed_local += p; }
public synchronized long get_processed_total() { return processed_global + processed_local; }
final boolean _classification; // Classification cache (nclasses>1)
final Frame _train; // Prepared training frame
final Frame _valid; // Prepared validation frame
/**
* Dummy constructor, only to be used for deserialization from autobuffer
*/
private DeepWaterModelInfo() {
super(); // key is null
_classification = false;
_train = _valid = null;
}
/**
* Main constructor
* @param params Model parameters
* @param dinfo Data Info
* @param nClasses number of classes (1 for regression, 0 for autoencoder)
* @param train User-given training data frame, prepared by AdaptTestTrain
* @param valid User-specified validation data frame, prepared by AdaptTestTrain
*/
public DeepWaterModelInfo(final DeepWaterParameters params, Key model_id, final DataInfo dinfo, int nClasses, Frame train, Frame valid) {
_classification = nClasses > 1;
_train = train;
_valid = valid;
parameters = (DeepWaterParameters) params.clone(); //make a copy, don't change model's parameters
_model_id = model_id;
DeepWaterParameters.Sanity.modifyParms(parameters, parameters, nClasses); //sanitize the model_info's parameters
_width = 224;
_height = 224;
_channels = 3;
_imageTrain = new ImageTrain(_width,_height,_channels);
_imageTrain.buildNet(nClasses, parameters._mini_batch_size, "inception_bn");
_imageTrain.loadParam(expandPath("~/deepwater/Inception/model.params"));
}
static String expandPath(String path) {
return path.replaceFirst("^~", System.getProperty("user.home"));
}
DeepWaterModelInfo deep_clone() {
AutoBuffer ab = new AutoBuffer();
this.write(ab);
ab.flipForReading();
return (DeepWaterModelInfo) new DeepWaterModelInfo().read(ab);
}
/**
* Create a summary table
* @return TwoDimTable with the summary of the model
*/
TwoDimTable createSummaryTable() {
return null;
}
/**
* Print a summary table
* @return String containing ASCII version of summary table
*/
@Override public String toString() {
StringBuilder sb = new StringBuilder();
if (!get_params()._quiet_mode) {
createSummaryTable();
if (summaryTable!=null) sb.append(summaryTable.toString(1));
}
return sb.toString();
}
/**
* Debugging printout
* @return String with useful info
*/
public String toStringAll() {
StringBuilder sb = new StringBuilder();
sb.append(toString());
sb.append("\nprocessed global: ").append(get_processed_global());
sb.append("\nprocessed local: ").append(get_processed_local());
sb.append("\nprocessed total: ").append(get_processed_total());
sb.append("\n");
return sb.toString();
}
public void add(DeepWaterModelInfo other) {
throw H2O.unimpl();
}
public void mult(double N) {
throw H2O.unimpl();
}
public void div(double N) {
throw H2O.unimpl();
}
}
|
package ch.rasc.sec.security;
import org.h2.server.web.WebServlet;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.embedded.ServletRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.annotation.Order;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.builders.WebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.web.util.matcher.AntPathRequestMatcher;
import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
import scala.annotation.meta.setter;
@Configuration
@EnableGlobalMethodSecurity(securedEnabled = true, prePostEnabled = true)
public class SecurityConfig {
@Bean
protected WebMvcConfigurerAdapter viewControllers() {
return new WebMvcConfigurerAdapter() {
@Override
public void addViewControllers(ViewControllerRegistry registry) {
registry.addViewController("/login").setViewName("login");
}
};
}
@Configuration
public static class DefaultWebSecurityConfigurerAdapter extends
WebSecurityConfigurerAdapter {
@Bean
public PasswordEncoder passwordEncoder() {
return new BCryptPasswordEncoder();
}
@Autowired
public void configureGlobal(UserDetailsService userDetailsService,
AuthenticationManagerBuilder auth) throws Exception {
TotpAuthenticationConfigurer configurer = new TotpAuthenticationConfigurer(
userDetailsService).passwordEncoder(passwordEncoder());
auth.apply(configurer);
}
@Override
public void configure(WebSecurity builder) throws Exception {
builder.ignoring().antMatchers("/robots.txt", "*.png");
}
@Override
protected void configure(HttpSecurity http) throws Exception {
//@formatter:off
http
.authorizeRequests()
.anyRequest()
.authenticated()
.and()
.formLogin()
.authenticationDetailsSource(new TotpWebAuthenticationDetailsSource())
.loginPage("/login").failureUrl("/login?error").permitAll()
.and()
.logout()
.logoutRequestMatcher(new AntPathRequestMatcher("/logout", "GET"));
//@formatter:on
}
}
@Configuration
@Order(1)
public static class H2ConsoleSecurityConfigurationAdapter extends
WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
//@formatter:off
http
|
package org.jfree.chart.panel;
import java.awt.Graphics2D;
import java.awt.Paint;
import java.awt.Rectangle;
import java.awt.Shape;
import java.awt.Stroke;
import java.awt.geom.Line2D;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.jfree.chart.ChartPanel;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.axis.ValueAxis;
import org.jfree.chart.plot.Crosshair;
import org.jfree.chart.plot.PlotOrientation;
import org.jfree.chart.plot.XYPlot;
import org.jfree.text.TextUtilities;
import org.jfree.ui.RectangleAnchor;
import org.jfree.ui.RectangleEdge;
import org.jfree.ui.TextAnchor;
import org.jfree.util.ObjectUtilities;
import org.jfree.util.PublicCloneable;
/**
* An overlay for a {@link ChartPanel} that draws crosshairs on a plot.
*
* @since 1.0.13
*/
public class CrosshairOverlay extends AbstractOverlay implements Overlay,
PropertyChangeListener, PublicCloneable, Cloneable, Serializable {
/** Storage for the crosshairs along the x-axis. */
private List xCrosshairs;
/** Storage for the crosshairs along the y-axis. */
private List yCrosshairs;
/**
* Default constructor.
*/
public CrosshairOverlay() {
super();
this.xCrosshairs = new java.util.ArrayList();
this.yCrosshairs = new java.util.ArrayList();
}
/**
* Adds a crosshair against the domain axis.
*
* @param crosshair the crosshair.
*/
public void addDomainCrosshair(Crosshair crosshair) {
if (crosshair == null) {
throw new IllegalArgumentException("Null 'crosshair' argument.");
}
this.xCrosshairs.add(crosshair);
crosshair.addPropertyChangeListener(this);
}
public void removeDomainCrosshair(Crosshair crosshair) {
if (crosshair == null) {
throw new IllegalArgumentException("Null 'crosshair' argument.");
}
if (this.xCrosshairs.remove(crosshair)) {
crosshair.removePropertyChangeListener(this);
fireOverlayChanged();
}
}
public void clearDomainCrosshairs() {
if (this.xCrosshairs.isEmpty()) {
return; // nothing to do
}
List crosshairs = getDomainCrosshairs();
for (int i = 0; i < crosshairs.size(); i++) {
Crosshair c = (Crosshair) crosshairs.get(i);
this.xCrosshairs.remove(c);
c.removePropertyChangeListener(this);
}
fireOverlayChanged();
}
public List getDomainCrosshairs() {
return new ArrayList(this.xCrosshairs);
}
/**
* Adds a crosshair against the range axis.
*
* @param crosshair the crosshair.
*/
public void addRangeCrosshair(Crosshair crosshair) {
if (crosshair == null) {
throw new IllegalArgumentException("Null 'crosshair' argument.");
}
this.yCrosshairs.add(crosshair);
crosshair.addPropertyChangeListener(this);
}
public void removeRangeCrosshair(Crosshair crosshair) {
if (crosshair == null) {
throw new IllegalArgumentException("Null 'crosshair' argument.");
}
if (this.yCrosshairs.remove(crosshair)) {
crosshair.removePropertyChangeListener(this);
fireOverlayChanged();
}
}
public void clearRangeCrosshairs() {
if (this.yCrosshairs.isEmpty()) {
return; // nothing to do
}
List crosshairs = getRangeCrosshairs();
for (int i = 0; i < crosshairs.size(); i++) {
Crosshair c = (Crosshair) crosshairs.get(i);
this.yCrosshairs.remove(c);
c.removePropertyChangeListener(this);
}
fireOverlayChanged();
}
public List getRangeCrosshairs() {
return new ArrayList(this.yCrosshairs);
}
/**
* Receives a property change event (typically a change in one of the
* crosshairs).
*
* @param e the event.
*/
public void propertyChange(PropertyChangeEvent e) {
fireOverlayChanged();
}
/**
* Paints the crosshairs in the layer.
*
* @param g2 the graphics target.
* @param chartPanel the chart panel.
*/
public void paintOverlay(Graphics2D g2, ChartPanel chartPanel) {
Shape savedClip = g2.getClip();
Rectangle2D dataArea = chartPanel.getScreenDataArea();
g2.clip(dataArea);
JFreeChart chart = chartPanel.getChart();
XYPlot plot = (XYPlot) chart.getPlot();
ValueAxis xAxis = plot.getDomainAxis();
RectangleEdge xAxisEdge = plot.getDomainAxisEdge();
Iterator iterator = this.xCrosshairs.iterator();
while (iterator.hasNext()) {
Crosshair ch = (Crosshair) iterator.next();
if (ch.isVisible()) {
double x = ch.getValue();
double xx = xAxis.valueToJava2D(x, dataArea, xAxisEdge);
if (plot.getOrientation() == PlotOrientation.VERTICAL) {
drawVerticalCrosshair(g2, dataArea, xx, ch);
}
else {
drawHorizontalCrosshair(g2, dataArea, xx, ch);
}
}
}
ValueAxis yAxis = plot.getRangeAxis();
RectangleEdge yAxisEdge = plot.getRangeAxisEdge();
iterator = this.yCrosshairs.iterator();
while (iterator.hasNext()) {
Crosshair ch = (Crosshair) iterator.next();
if (ch.isVisible()) {
double y = ch.getValue();
double yy = yAxis.valueToJava2D(y, dataArea, yAxisEdge);
if (plot.getOrientation() == PlotOrientation.VERTICAL) {
drawHorizontalCrosshair(g2, dataArea, yy, ch);
}
else {
drawVerticalCrosshair(g2, dataArea, yy, ch);
}
}
}
g2.setClip(savedClip);
}
/**
* Draws a crosshair horizontally across the plot.
*
* @param g2 the graphics target.
* @param dataArea the data area.
* @param y the y-value in Java2D space.
* @param crosshair the crosshair.
*/
protected void drawHorizontalCrosshair(Graphics2D g2, Rectangle2D dataArea,
double y, Crosshair crosshair) {
if (y >= dataArea.getMinY() && y <= dataArea.getMaxY()) {
Line2D line = new Line2D.Double(dataArea.getMinX(), y,
dataArea.getMaxX(), y);
Paint savedPaint = g2.getPaint();
Stroke savedStroke = g2.getStroke();
g2.setPaint(crosshair.getPaint());
g2.setStroke(crosshair.getStroke());
g2.draw(line);
if (crosshair.isLabelVisible()) {
String label = crosshair.getLabelGenerator().generateLabel(
crosshair);
RectangleAnchor anchor = crosshair.getLabelAnchor();
Point2D pt = calculateLabelPoint(line, anchor, 5, 5);
float xx = (float) pt.getX();
float yy = (float) pt.getY();
TextAnchor alignPt = textAlignPtForLabelAnchorH(anchor);
Shape hotspot = TextUtilities.calculateRotatedStringBounds(
label, g2, xx, yy, alignPt, 0.0, TextAnchor.CENTER);
if (!dataArea.contains(hotspot.getBounds2D())) {
anchor = flipAnchorV(anchor);
pt = calculateLabelPoint(line, anchor, 5, 5);
xx = (float) pt.getX();
yy = (float) pt.getY();
alignPt = textAlignPtForLabelAnchorH(anchor);
hotspot = TextUtilities.calculateRotatedStringBounds(
label, g2, xx, yy, alignPt, 0.0, TextAnchor.CENTER);
}
g2.setPaint(crosshair.getLabelBackgroundPaint());
g2.fill(hotspot);
g2.setPaint(crosshair.getLabelOutlinePaint());
g2.draw(hotspot);
TextUtilities.drawAlignedString(label, g2, xx, yy, alignPt);
}
g2.setPaint(savedPaint);
g2.setStroke(savedStroke);
}
}
/**
* Draws a crosshair vertically on the plot.
*
* @param g2 the graphics target.
* @param dataArea the data area.
* @param x the x-value in Java2D space.
* @param crosshair the crosshair.
*/
protected void drawVerticalCrosshair(Graphics2D g2, Rectangle2D dataArea,
double x, Crosshair crosshair) {
if (x >= dataArea.getMinX() && x <= dataArea.getMaxX()) {
Line2D line = new Line2D.Double(x, dataArea.getMinY(), x,
dataArea.getMaxY());
Paint savedPaint = g2.getPaint();
Stroke savedStroke = g2.getStroke();
g2.setPaint(crosshair.getPaint());
g2.setStroke(crosshair.getStroke());
g2.draw(line);
if (crosshair.isLabelVisible()) {
String label = crosshair.getLabelGenerator().generateLabel(
crosshair);
RectangleAnchor anchor = crosshair.getLabelAnchor();
Point2D pt = calculateLabelPoint(line, anchor, 5, 5);
float xx = (float) pt.getX();
float yy = (float) pt.getY();
TextAnchor alignPt = textAlignPtForLabelAnchorV(anchor);
Shape hotspot = TextUtilities.calculateRotatedStringBounds(
label, g2, xx, yy, alignPt, 0.0, TextAnchor.CENTER);
if (!dataArea.contains(hotspot.getBounds2D())) {
anchor = flipAnchorH(anchor);
pt = calculateLabelPoint(line, anchor, 5, 5);
xx = (float) pt.getX();
yy = (float) pt.getY();
alignPt = textAlignPtForLabelAnchorV(anchor);
hotspot = TextUtilities.calculateRotatedStringBounds(
label, g2, xx, yy, alignPt, 0.0, TextAnchor.CENTER);
}
g2.setPaint(crosshair.getLabelBackgroundPaint());
g2.fill(hotspot);
g2.setPaint(crosshair.getLabelOutlinePaint());
g2.draw(hotspot);
TextUtilities.drawAlignedString(label, g2, xx, yy, alignPt);
}
g2.setPaint(savedPaint);
g2.setStroke(savedStroke);
}
}
/**
* Calculates the anchor point for a label.
*
* @param line the line for the crosshair.
* @param anchor the anchor point.
* @param deltaX the x-offset.
* @param deltaY the y-offset.
*
* @return The anchor point.
*/
private Point2D calculateLabelPoint(Line2D line, RectangleAnchor anchor,
double deltaX, double deltaY) {
double x = 0.0;
double y = 0.0;
boolean left = (anchor == RectangleAnchor.BOTTOM_LEFT
|| anchor == RectangleAnchor.LEFT
|| anchor == RectangleAnchor.TOP_LEFT);
boolean right = (anchor == RectangleAnchor.BOTTOM_RIGHT
|| anchor == RectangleAnchor.RIGHT
|| anchor == RectangleAnchor.TOP_RIGHT);
boolean top = (anchor == RectangleAnchor.TOP_LEFT
|| anchor == RectangleAnchor.TOP
|| anchor == RectangleAnchor.TOP_RIGHT);
boolean bottom = (anchor == RectangleAnchor.BOTTOM_LEFT
|| anchor == RectangleAnchor.BOTTOM
|| anchor == RectangleAnchor.BOTTOM_RIGHT);
Rectangle rect = line.getBounds();
Point2D pt = RectangleAnchor.coordinates(rect, anchor);
// we expect the line to be vertical or horizontal
if (line.getX1() == line.getX2()) { // vertical
x = line.getX1();
y = (line.getY1() + line.getY2()) / 2.0;
if (left) {
x = x - deltaX;
}
if (right) {
x = x + deltaX;
}
if (top) {
y = Math.min(line.getY1(), line.getY2()) + deltaY;
}
if (bottom) {
y = Math.max(line.getY1(), line.getY2()) - deltaY;
}
}
else { // horizontal
x = (line.getX1() + line.getX2()) / 2.0;
y = line.getY1();
if (left) {
x = Math.min(line.getX1(), line.getX2()) + deltaX;
}
if (right) {
x = Math.max(line.getX1(), line.getX2()) - deltaX;
}
if (top) {
y = y - deltaY;
}
if (bottom) {
y = y + deltaY;
}
}
return new Point2D.Double(x, y);
}
/**
* Returns the text anchor that is used to align a label to its anchor
* point.
*
* @param anchor the anchor.
*
* @return The text alignment point.
*/
private TextAnchor textAlignPtForLabelAnchorV(RectangleAnchor anchor) {
TextAnchor result = TextAnchor.CENTER;
if (anchor.equals(RectangleAnchor.TOP_LEFT)) {
result = TextAnchor.TOP_RIGHT;
}
else if (anchor.equals(RectangleAnchor.TOP)) {
result = TextAnchor.TOP_CENTER;
}
else if (anchor.equals(RectangleAnchor.TOP_RIGHT)) {
result = TextAnchor.TOP_LEFT;
}
else if (anchor.equals(RectangleAnchor.LEFT)) {
result = TextAnchor.HALF_ASCENT_RIGHT;
}
else if (anchor.equals(RectangleAnchor.RIGHT)) {
result = TextAnchor.HALF_ASCENT_LEFT;
}
else if (anchor.equals(RectangleAnchor.BOTTOM_LEFT)) {
result = TextAnchor.BOTTOM_RIGHT;
}
else if (anchor.equals(RectangleAnchor.BOTTOM)) {
result = TextAnchor.BOTTOM_CENTER;
}
else if (anchor.equals(RectangleAnchor.BOTTOM_RIGHT)) {
result = TextAnchor.BOTTOM_LEFT;
}
return result;
}
/**
* Returns the text anchor that is used to align a label to its anchor
* point.
*
* @param anchor the anchor.
*
* @return The text alignment point.
*/
private TextAnchor textAlignPtForLabelAnchorH(RectangleAnchor anchor) {
TextAnchor result = TextAnchor.CENTER;
if (anchor.equals(RectangleAnchor.TOP_LEFT)) {
result = TextAnchor.BOTTOM_LEFT;
}
else if (anchor.equals(RectangleAnchor.TOP)) {
result = TextAnchor.BOTTOM_CENTER;
}
else if (anchor.equals(RectangleAnchor.TOP_RIGHT)) {
result = TextAnchor.BOTTOM_RIGHT;
}
else if (anchor.equals(RectangleAnchor.LEFT)) {
result = TextAnchor.HALF_ASCENT_LEFT;
}
else if (anchor.equals(RectangleAnchor.RIGHT)) {
result = TextAnchor.HALF_ASCENT_RIGHT;
}
else if (anchor.equals(RectangleAnchor.BOTTOM_LEFT)) {
result = TextAnchor.TOP_LEFT;
}
else if (anchor.equals(RectangleAnchor.BOTTOM)) {
result = TextAnchor.TOP_CENTER;
}
else if (anchor.equals(RectangleAnchor.BOTTOM_RIGHT)) {
result = TextAnchor.TOP_RIGHT;
}
return result;
}
private RectangleAnchor flipAnchorH(RectangleAnchor anchor) {
RectangleAnchor result = anchor;
if (anchor.equals(RectangleAnchor.TOP_LEFT)) {
result = RectangleAnchor.TOP_RIGHT;
}
else if (anchor.equals(RectangleAnchor.TOP_RIGHT)) {
result = RectangleAnchor.TOP_LEFT;
}
else if (anchor.equals(RectangleAnchor.LEFT)) {
result = RectangleAnchor.RIGHT;
}
else if (anchor.equals(RectangleAnchor.RIGHT)) {
result = RectangleAnchor.LEFT;
}
else if (anchor.equals(RectangleAnchor.BOTTOM_LEFT)) {
result = RectangleAnchor.BOTTOM_RIGHT;
}
else if (anchor.equals(RectangleAnchor.BOTTOM_RIGHT)) {
result = RectangleAnchor.BOTTOM_LEFT;
}
return result;
}
private RectangleAnchor flipAnchorV(RectangleAnchor anchor) {
RectangleAnchor result = anchor;
if (anchor.equals(RectangleAnchor.TOP_LEFT)) {
result = RectangleAnchor.BOTTOM_LEFT;
}
else if (anchor.equals(RectangleAnchor.TOP_RIGHT)) {
result = RectangleAnchor.BOTTOM_RIGHT;
}
else if (anchor.equals(RectangleAnchor.TOP)) {
result = RectangleAnchor.BOTTOM;
}
else if (anchor.equals(RectangleAnchor.BOTTOM)) {
result = RectangleAnchor.TOP;
}
else if (anchor.equals(RectangleAnchor.BOTTOM_LEFT)) {
result = RectangleAnchor.TOP_LEFT;
}
else if (anchor.equals(RectangleAnchor.BOTTOM_RIGHT)) {
result = RectangleAnchor.TOP_RIGHT;
}
return result;
}
/**
* Tests this overlay for equality with an arbitrary object.
*
* @param obj the object (<code>null</code> permitted).
*
* @return A boolean.
*/
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof CrosshairOverlay)) {
return false;
}
CrosshairOverlay that = (CrosshairOverlay) obj;
if (!this.xCrosshairs.equals(that.xCrosshairs)) {
return false;
}
if (!this.yCrosshairs.equals(that.yCrosshairs)) {
return false;
}
return true;
}
/**
* Returns a clone of this instance.
*
* @return A clone of this instance.
*
* @throws java.lang.CloneNotSupportedException if there is some problem
* with the cloning.
*/
public Object clone() throws CloneNotSupportedException {
CrosshairOverlay clone = (CrosshairOverlay) super.clone();
clone.xCrosshairs = (List) ObjectUtilities.deepClone(this.xCrosshairs);
clone.yCrosshairs = (List) ObjectUtilities.deepClone(this.yCrosshairs);
return clone;
}
}
|
package fr.noogotte.useful_commands.command;
import static fr.noogotte.useful_commands.LocationUtil.getDistantLocation;
import static fr.noogotte.useful_commands.LocationUtil.getTargetBlockLocation;
import java.util.ArrayList;
import java.util.List;
import org.bukkit.ChatColor;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Entity;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.Player;
import fr.aumgn.bukkitutils.command.Command;
import fr.aumgn.bukkitutils.command.NestedCommands;
import fr.aumgn.bukkitutils.command.args.CommandArgs;
import fr.aumgn.bukkitutils.command.exception.CommandError;
import fr.aumgn.bukkitutils.command.exception.CommandUsageError;
import fr.aumgn.bukkitutils.geom.Vector;
import fr.aumgn.bukkitutils.geom.Vector2D;
import fr.aumgn.bukkitutils.util.Util;
@NestedCommands(name = "useful")
public class WorldCommands extends UsefulCommands {
@Command(name = "seed", min = 0, max = 1)
public void seed(CommandSender sender, CommandArgs args) {
List<World> worlds = args.getList(0, World.class).match(sender);
for (World world : worlds) {
sender.sendMessage(ChatColor.GREEN + "Seed de "
+ ChatColor.BLUE + world.getName()
+ ChatColor.GREEN + " : "
+ ChatColor.BLUE + world.getSeed());
}
}
@Command(name = "setspawn", min = 0, max = 2)
public void setSpawn(CommandSender sender, CommandArgs args) {
Vector position = args.getVector(0).value(sender);
World world = args.getWorld(0).value(sender);
world.setSpawnLocation(
position.getBlockX(),
position.getBlockY(),
position.getBlockZ());
sender.sendMessage(ChatColor.GREEN + "Vous avez défini le spawn !");
}
@Command(name = "time", min = 1, max = 2)
public void time(CommandSender sender, CommandArgs args) {
String arg = args.get(0);
List<World> worlds = args.getList(1, World.class).value(sender);
int time;
if (arg.equalsIgnoreCase("day")) {
time = 20 * 60;
} else if (arg.equalsIgnoreCase("night")) {
time = 20 * 60 * 11;
} else {
throw new CommandUsageError(
"Argument " + arg + " inconnu.");
}
for (World world : worlds) {
world.setTime(time);
if(time == 20 * 60) {
Util.broadcast("useful.world.time.broadcast", ChatColor.AQUA + sender.getName()
+ ChatColor.GOLD + " a mis le jour dans "
+ ChatColor.AQUA + world.getName());
} else if (time == 20 * 60 * 11) {
Util.broadcast("useful.world.time.broadcast", ChatColor.AQUA + sender.getName()
+ ChatColor.GOLD + " a mis la nuit dans "
+ ChatColor.AQUA + world.getName());
}
}
}
@Command(name = "weather", min = 1, max = 2)
public void weather(CommandSender sender, CommandArgs args) {
String arg = args.get(0);
List<World> worlds = args.getList(1, World.class).value(sender);
boolean storm;
if (arg.equalsIgnoreCase("sun")) {
storm = false;
} else if(arg.equalsIgnoreCase("storm")) {
storm = true;
} else {
throw new CommandUsageError(
"Argument " + arg + " inconnu.");
}
for (World world : worlds) {
world.setStorm(storm);
if(storm == true) {
Util.broadcast("useful.weather.broadcast", ChatColor.AQUA + sender.getName()
+ ChatColor.GOLD + " a mis la pluit dans "
+ ChatColor.AQUA + world.getName());
} else if (storm == false) {
Util.broadcast("useful.weather.broadcast", ChatColor.AQUA + sender.getName()
+ ChatColor.GOLD + " a arrété la pluit dans "
+ ChatColor.AQUA + world.getName());
}
}
}
@Command(name = "spawnmob", flags = "tp", argsFlags = "d", min = 1, max = 3)
public void spawnmob(Player sender, CommandArgs args) {
EntityType entity = args.getEntityType(0).value();
if (!entity.isSpawnable() && isNotAMob(entity)) {
throw new CommandError("Vous ne pouvez pas spawner ce type d'entité");
}
int count = args.getInteger(1).value(1);
List<Location> locations = new ArrayList<Location>();
if (args.hasFlag('t')) {
for (Player target : args.getPlayers(2).value(sender)) {
Location location = getTargetBlockLocation(target, 180)
.toLocation(sender.getWorld());
locations.add(location);
}
} else if (args.hasArgFlag('d')) {
int distance = args.get('d', Integer.class).value();
for (Player target : args.getPlayers(2).value(sender)) {
Location location = getDistantLocation(target, distance)
.toLocation(sender.getWorld());
locations.add(location);
}
} else if (args.hasFlag('p')) {
Vector2D pos2D = args.getVector2D(2).value();
Vector pos = pos2D.toHighest(sender.getWorld());
locations.add(pos.toLocation(sender.getWorld()));
} else {
for (Player target : args.getPlayers(2).value(sender)) {
locations.add(target.getLocation());
}
}
int totalCount = 0;
for (int i = 0; i < count; i++) {
for (Location location : locations) {
location.getWorld().spawnCreature(location, entity);
totalCount++;
}
}
sender.sendMessage(ChatColor.GREEN + "Vous avez spawn "
+ ChatColor.GOLD + totalCount
+ ChatColor.GREEN + " " + entity.getName());
}
@Command(name = "removemob", argsFlags = "wcp", min = 0, max = 2)
public void removemob(CommandSender sender, CommandArgs args) {
List<EntityType> types;
if (args.length() == 0 || args.get(0).equals("*")) {
types = null;
} else {
types = args.getList(0, EntityType.class).value();
for (EntityType type : types) {
if (isNotAMob(type)) {
throw new CommandError(type.getName() + " n'est pas un mob.");
}
}
}
boolean hasRadius = args.length() > 1;
int radius = 0;
Vector from = null;
World world = null;
if (hasRadius) {
radius = args.getInteger(1).value(1);
radius *= radius;
if (args.hasArgFlag('c')) {
if (!args.hasArgFlag('w')) {
throw new CommandUsageError("Vous devez specifier un monde.");
}
from = args.get('c', Vector.class).value();
world = args.get('w', World.class).value();
} else {
Player target = args.get('p', Player.class).value(sender);
from = new Vector(target);
world = target.getWorld();
}
} else {
if (args.hasArgFlag('p')) {
world = args.get('p', Player.class).value().getWorld();
} else {
world = args.get('w', World.class).value(sender);
}
}
int count = 0;
for (Entity entity : world.getEntities()) {
EntityType entityType = entity.getType();
if (isNotAMob(entityType)) {
continue;
}
if (types != null
&& !types.contains(entityType)) {
continue;
}
if (hasRadius
&& new Vector(entity).distanceSq(from) > radius) {
continue;
}
count++;
entity.remove();
}
sender.sendMessage(ChatColor.GREEN + "Vous avez supprimé "
+ ChatColor.GOLD + count
+ ChatColor.GREEN + " mobs");
}
@Command(name = "position", min = 0, max = 1)
public void getPos(CommandSender sender, CommandArgs args) {
List<Player> targets = args.getPlayers(0).match(sender, "useful.world.position.other");
for (Player target : targets) {
Location location = target.getLocation();
sender.sendMessage(ChatColor.UNDERLINE + "" + ChatColor.AQUA + target.getName() + " :");
sender.sendMessage(ChatColor.AQUA + "X: " +
ChatColor.GREEN + location.getX());
sender.sendMessage(ChatColor.AQUA + "Y: " +
ChatColor.GREEN + location.getY());
sender.sendMessage(ChatColor.AQUA + "Z: " +
ChatColor.GREEN + location.getZ());
sender.sendMessage(ChatColor.AQUA + "Pitch: " +
ChatColor.GREEN + location.getPitch());
sender.sendMessage(ChatColor.AQUA + "Yaw: " +
ChatColor.GREEN + location.getYaw());
}
}
private boolean isNotAMob(EntityType type) {
return type.equals(EntityType.ARROW)
|| type.equals(EntityType.BOAT)
|| type.equals(EntityType.COMPLEX_PART)
|| type.equals(EntityType.DROPPED_ITEM)
|| type.equals(EntityType.EGG)
|| type.equals(EntityType.ENDER_CRYSTAL)
|| type.equals(EntityType.ENDER_PEARL)
|| type.equals(EntityType.EXPERIENCE_ORB)
|| type.equals(EntityType.FALLING_BLOCK)
|| type.equals(EntityType.FIREBALL)
|| type.equals(EntityType.FISHING_HOOK)
|| type.equals(EntityType.LIGHTNING)
|| type.equals(EntityType.MINECART)
|| type.equals(EntityType.PAINTING)
|| type.equals(EntityType.PLAYER)
|| type.equals(EntityType.PRIMED_TNT)
|| type.equals(EntityType.SMALL_FIREBALL)
|| type.equals(EntityType.SNOWBALL)
|| type.equals(EntityType.SPLASH_POTION)
|| type.equals(EntityType.THROWN_EXP_BOTTLE)
|| type.equals(EntityType.UNKNOWN)
|| type.equals(EntityType.WEATHER);
}
}
|
package hex.ensemble;
import hex.Model;
import hex.StackedEnsembleModel;
import hex.genmodel.utils.DistributionFamily;
import hex.tree.drf.DRF;
import hex.tree.drf.DRFModel;
import hex.tree.gbm.GBM;
import hex.tree.gbm.GBMModel;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import water.DKV;
import water.Key;
import water.Scope;
import water.TestUtil;
import water.fvec.Frame;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import static hex.genmodel.utils.DistributionFamily.gaussian;
public class StackedEnsembleTest extends TestUtil {
@BeforeClass public static void stall() { stall_till_cloudsize(1); }
private abstract class PrepData { abstract int prep(Frame fr); }
static final String ignored_aircols[] = new String[] { "DepTime", "ArrTime", "AirTime", "ArrDelay", "DepDelay", "TaxiIn",
"TaxiOut", "Cancelled", "CancellationCode", "Diverted", "CarrierDelay", "WeatherDelay", "NASDelay", "SecurityDelay",
"LateAircraftDelay", "IsDepDelayed"};
@Test public void testBasicEnsemble() {
// Regression tests
basicEnsemble("./smalldata/junit/cars.csv",
null,
new StackedEnsembleTest.PrepData() { int prep(Frame fr ) {fr.remove("name").remove(); return ~fr.find("economy (mpg)"); }},
false, gaussian);
basicEnsemble("./smalldata/junit/test_tree_minmax.csv",
null,
new StackedEnsembleTest.PrepData() { int prep(Frame fr) { return fr.find("response"); }
},
false, DistributionFamily.bernoulli);
basicEnsemble("./smalldata/logreg/prostate.csv",
null,
new StackedEnsembleTest.PrepData() { int prep(Frame fr) { fr.remove("ID").remove(); return fr.find("CAPSULE"); }
},
false, DistributionFamily.bernoulli);
basicEnsemble("./smalldata/logreg/prostate_train.csv",
"./smalldata/logreg/prostate_test.csv",
new StackedEnsembleTest.PrepData() { int prep(Frame fr) { return fr.find("CAPSULE"); }
},
false, DistributionFamily.bernoulli);
basicEnsemble("./smalldata/gbm_test/alphabet_cattest.csv",
null,
new StackedEnsembleTest.PrepData() { int prep(Frame fr) { return fr.find("y"); }
},
false, DistributionFamily.bernoulli);
basicEnsemble("./smalldata/airlines/allyears2k_headers.zip",
null,
new StackedEnsembleTest.PrepData() { int prep(Frame fr) {
for( String s : ignored_aircols ) fr.remove(s).remove();
return fr.find("IsArrDelayed"); }
},
false, DistributionFamily.bernoulli);
}
public StackedEnsembleModel.StackedEnsembleOutput basicEnsemble(String training_file, String validation_file, StackedEnsembleTest.PrepData prep, boolean dupeTrainingFrameToValidationFrame, DistributionFamily family) {
Set<Frame> framesBefore = new HashSet<>();
framesBefore.addAll(Arrays.asList( Frame.fetchAll()));
GBMModel gbm = null;
DRFModel drf = null;
StackedEnsembleModel stackedEnsembleModel = null;
Frame training_frame = null, validation_frame = null;
try {
Scope.enter();
training_frame = parse_test_file(training_file);
if (null != validation_file)
validation_frame = parse_test_file(validation_file);
int idx = prep.prep(training_frame); // hack frame per-test
if (null != validation_frame)
prep.prep(validation_frame);
if (family == DistributionFamily.bernoulli || family == DistributionFamily.multinomial || family == DistributionFamily.modified_huber) {
if (!training_frame.vecs()[idx].isCategorical()) {
Scope.track(training_frame.replace(idx, training_frame.vecs()[idx].toCategoricalVec()));
if (null != validation_frame)
Scope.track(validation_frame.replace(idx, validation_frame.vecs()[idx].toCategoricalVec()));
}
}
DKV.put(training_frame); // Update frames after preparing
if (null != validation_frame)
DKV.put(validation_frame);
// Build GBM
GBMModel.GBMParameters gbmParameters = new GBMModel.GBMParameters();
if( idx < 0 ) idx = ~idx;
// Configure GBM
gbmParameters._train = training_frame._key;
gbmParameters._valid = (validation_frame == null ? null : validation_frame._key);
gbmParameters._response_column = training_frame._names[idx];
gbmParameters._ntrees = 5;
gbmParameters._distribution = family;
gbmParameters._max_depth = 4;
gbmParameters._min_rows = 1;
gbmParameters._nbins = 50;
gbmParameters._learn_rate = .2f;
gbmParameters._score_each_iteration = true;
gbmParameters._fold_assignment = Model.Parameters.FoldAssignmentScheme.Modulo;
gbmParameters._keep_cross_validation_predictions = true;
gbmParameters._nfolds = 5;
if( dupeTrainingFrameToValidationFrame ) { // Make a validation frame that's a clone of the training data
validation_frame = new Frame(training_frame);
DKV.put(validation_frame);
gbmParameters._valid = validation_frame._key;
}
// Invoke GBM and block till the end
GBM gbmJob = new GBM(gbmParameters);
// Get the model
gbm = gbmJob.trainModel().get();
Assert.assertTrue(gbmJob.isStopped()); //HEX-1817
// Build DRF
DRFModel.DRFParameters drfParameters = new DRFModel.DRFParameters();
// Configure DRF
drfParameters._train = training_frame._key;
drfParameters._valid = (validation_frame == null ? null : validation_frame._key);
drfParameters._response_column = training_frame._names[idx];
drfParameters._distribution = family;
drfParameters._ntrees = 5;
drfParameters._max_depth = 4;
drfParameters._min_rows = 1;
drfParameters._nbins = 50;
drfParameters._score_each_iteration = true;
drfParameters._fold_assignment = Model.Parameters.FoldAssignmentScheme.Modulo;
drfParameters._keep_cross_validation_predictions = true;
drfParameters._nfolds = 5;
// Invoke DRF and block till the end
DRF drfJob = new DRF(drfParameters);
// Get the model
drf = drfJob.trainModel().get();
Assert.assertTrue(drfJob.isStopped()); //HEX-1817
// Build Stacked Ensemble of previous GBM and DRF
StackedEnsembleModel.StackedEnsembleParameters stackedEnsembleParameters = new StackedEnsembleModel.StackedEnsembleParameters();
// Configure Stacked Ensemble
stackedEnsembleParameters._train = training_frame._key;
stackedEnsembleParameters._valid = (validation_frame == null ? null : validation_frame._key);
stackedEnsembleParameters._response_column = training_frame._names[idx];
stackedEnsembleParameters._base_models = new Key[] {gbm._key,drf._key};
// Invoke Stacked Ensemble and block till end
StackedEnsemble stackedEnsembleJob = new StackedEnsemble(stackedEnsembleParameters);
// Get the stacked ensemble
stackedEnsembleModel = stackedEnsembleJob.trainModel().get();
Assert.assertTrue(stackedEnsembleJob.isStopped());
//return
return stackedEnsembleModel._output;
} finally {
if( training_frame != null ) training_frame .remove();
if( validation_frame != null ) validation_frame.remove();
if( gbm != null ) {
gbm.delete();
for (Key k : gbm._output._cross_validation_predictions) k.remove();
gbm._output._cross_validation_holdout_predictions_frame_id.remove();
gbm.deleteCrossValidationModels();
}
if( drf != null ) {
drf.delete();
for (Key k : drf._output._cross_validation_predictions) k.remove();
drf._output._cross_validation_holdout_predictions_frame_id.remove();
drf.deleteCrossValidationModels();
}
Set<Frame> framesAfter = new HashSet<>(framesBefore);
framesAfter.removeAll(Arrays.asList( Frame.fetchAll()));
Assert.assertEquals("finish with the same number of Frames as we started: " + framesAfter, 0, framesAfter.size());
if( stackedEnsembleModel != null ) {
stackedEnsembleModel.delete();
stackedEnsembleModel.remove();
stackedEnsembleModel._output._metalearner._output._training_metrics.remove();
stackedEnsembleModel._output._metalearner.remove();
}
Scope.exit();
}
}
}
|
package co.phoenixlab.discord;
import co.phoenixlab.common.localization.LocaleStringProvider;
import co.phoenixlab.common.localization.Localizer;
import co.phoenixlab.common.localization.LocalizerImpl;
import co.phoenixlab.discord.api.DiscordApiClient;
import co.phoenixlab.discord.commands.Commands;
import com.google.gson.Gson;
import com.mashape.unirest.http.HttpResponse;
import com.mashape.unirest.http.JsonNode;
import com.mashape.unirest.http.Unirest;
import com.mashape.unirest.http.exceptions.UnirestException;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.Reader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.Locale;
import java.util.Properties;
import java.util.ResourceBundle;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.nio.file.StandardOpenOption.*;
public class VahrhedralBot implements Runnable {
public static final Logger LOGGER = LoggerFactory.getLogger("VahrhedralBot");
public static final Path CONFIG_PATH = Paths.get("config/config.json");
public static final String USER_AGENT = "DiscordBot (https://github.com/vincentzhang96/VahrhedralBot, 12)";
private DiscordApiClient apiClient;
public static void main(String[] args) {
LOGGER.info("Starting Vahrhedral bot");
VahrhedralBot bot = new VahrhedralBot();
try {
bot.run();
} catch (Exception e) {
LOGGER.error("Fatal error while running bot", e);
}
bot.shutdown();
}
private Configuration config;
private Commands commands;
private CommandDispatcher commandDispatcher;
private EventListener eventListener;
private TaskQueue taskQueue;
private String versionInfo;
private Localizer localizer;
public VahrhedralBot() {
taskQueue = new TaskQueue();
eventListener = new EventListener(this);
}
@Override
public void run() {
// Set thread name
Thread.currentThread().setName("VahrhedralBotMain");
// Load Config
try {
config = loadConfiguration();
} catch (IOException e) {
LOGGER.error("Unable to load configuration", e);
return;
}
loadLocalization();
versionInfo = loadVersionInfo();
commandDispatcher = new CommandDispatcher(this, config.getCommandPrefix());
// Required User-Agent
Unirest.setDefaultHeader("User-Agent", USER_AGENT);
apiClient = new DiscordApiClient();
apiClient.getEventBus().register(eventListener);
commands = new Commands(this);
commands.register(commandDispatcher);
try {
apiClient.logIn(config.getEmail(), config.getPassword());
} catch (IOException e) {
LOGGER.error("Unable to log in", e);
}
taskQueue.executeWaiting();
}
private void loadLocalization() {
localizer = new LocalizerImpl(Locale.getDefault());
localizer.registerPluralityRules(LocalizerImpl.defaultPluralityRules());
LocaleStringProvider provider = new LocaleStringProvider() {
ResourceBundle bundle;
@Override
public void setActiveLocale(Locale locale) {
bundle = ResourceBundle.getBundle("co.phoenixlab.discord.resources.locale", locale);
}
@Override
public String get(String key) {
if (!contains(key)) {
return key;
}
return bundle.getString(key);
}
@Override
public boolean contains(String key) {
return bundle.containsKey(key);
}
};
localizer.addLocaleStringProvider(provider);
}
private String loadVersionInfo() {
try {
Properties properties = new Properties();
properties.load(getClass().getResourceAsStream("/git.properties"));
String gitHash = properties.getProperty("git-sha-1");
HttpResponse<JsonNode> ret =
Unirest.get("https://api.github.com/repos/vincentzhang96/VahrhedralBot/commits/" + gitHash).
asJson();
if (ret.getStatus() != 200) {
throw new IOException("Server returned " + ret.getStatus());
}
JSONObject node = ret.getBody().getObject();
JSONObject commitObj = node.getJSONObject("commit");
Instant time = Instant.parse(commitObj.getJSONObject("committer").getString("date"));
return String.format("Commit %s\nURL: %s\nMessage: %s\nDate: %s",
node.getString("sha"),
node.getString("html_url"), commitObj.getString("message"),
DateTimeFormatter.ofPattern("MM/dd HH:mm z").withZone(ZoneId.systemDefault()).format(time));
} catch (IOException | UnirestException | JSONException e) {
LOGGER.warn("Unable to load git commit version info", e);
}
return "N/A";
}
public String getVersionInfo() {
return versionInfo;
}
private Configuration loadConfiguration() throws IOException {
Gson configGson = new Gson();
try (Reader reader = Files.newBufferedReader(CONFIG_PATH, UTF_8)) {
return configGson.fromJson(reader, Configuration.class);
}
}
public boolean saveConfig() {
Gson configGson = new Gson();
try (BufferedWriter writer = Files.newBufferedWriter(CONFIG_PATH, UTF_8, CREATE, WRITE, TRUNCATE_EXISTING)) {
configGson.toJson(config, writer);
writer.flush();
return true;
} catch (IOException e) {
LOGGER.warn("Unable to save config", e);
return false;
}
}
public CommandDispatcher getMainCommandDispatcher() {
return commandDispatcher;
}
public Configuration getConfig() {
return config;
}
public TaskQueue getTaskQueue() {
return taskQueue;
}
public EventListener getEventListener() {
return eventListener;
}
public DiscordApiClient getApiClient() {
return apiClient;
}
public Localizer getLocalizer() {
return localizer;
}
public Commands getCommands() {
return commands;
}
public void shutdown() {
shutdown(0);
}
public void shutdown(int code) {
if (apiClient != null) {
apiClient.stop();
}
try {
Unirest.shutdown();
} catch (IOException e) {
LOGGER.warn("Was unable to cleanly shut down Unirest", e);
}
System.exit(code);
}
}
|
package org.jfree.data.time;
import java.util.Calendar;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import org.jfree.chart.util.ParamChecks;
import org.jfree.data.DefaultKeyedValues2D;
import org.jfree.data.DomainInfo;
import org.jfree.data.Range;
import org.jfree.data.general.DatasetChangeEvent;
import org.jfree.data.xy.AbstractIntervalXYDataset;
import org.jfree.data.xy.IntervalXYDataset;
import org.jfree.data.xy.TableXYDataset;
import org.jfree.util.PublicCloneable;
/**
* A dataset for regular time periods that implements the
* {@link TableXYDataset} interface. Note that the {@link TableXYDataset}
* interface requires all series to share the same set of x-values. When
* adding a new item <code>(x, y)</code> to one series, all other series
* automatically get a new item <code>(x, null)</code> unless a non-null item
* has already been specified.
*
* @see org.jfree.data.xy.TableXYDataset
*/
public class TimeTableXYDataset extends AbstractIntervalXYDataset
implements Cloneable, PublicCloneable, IntervalXYDataset, DomainInfo,
TableXYDataset {
/**
* The data structure to store the values. Each column represents
* a series (elsewhere in JFreeChart rows are typically used for series,
* but it doesn't matter that much since this data structure is private
* and symmetrical anyway), each row contains values for the same
* {@link RegularTimePeriod} (the rows are sorted into ascending order).
*/
private DefaultKeyedValues2D values;
/**
* A flag that indicates that the domain is 'points in time'. If this flag
* is true, only the x-value (and not the x-interval) is used to determine
* the range of values in the domain.
*/
private boolean domainIsPointsInTime;
/**
* The point within each time period that is used for the X value when this
* collection is used as an {@link org.jfree.data.xy.XYDataset}. This can
* be the start, middle or end of the time period.
*/
private TimePeriodAnchor xPosition;
/** A working calendar (to recycle) */
private Calendar workingCalendar;
/**
* Creates a new dataset.
*/
public TimeTableXYDataset() {
// defer argument checking
this(TimeZone.getDefault(), Locale.getDefault());
}
/**
* Creates a new dataset with the given time zone.
*
* @param zone the time zone to use (<code>null</code> not permitted).
*/
public TimeTableXYDataset(TimeZone zone) {
// defer argument checking
this(zone, Locale.getDefault());
}
/**
* Creates a new dataset with the given time zone and locale.
*
* @param zone the time zone to use (<code>null</code> not permitted).
* @param locale the locale to use (<code>null</code> not permitted).
*/
public TimeTableXYDataset(TimeZone zone, Locale locale) {
ParamChecks.nullNotPermitted(zone, "zone");
ParamChecks.nullNotPermitted(locale, "locale");
this.values = new DefaultKeyedValues2D(true);
this.workingCalendar = Calendar.getInstance(zone, locale);
this.xPosition = TimePeriodAnchor.START;
}
/**
* Returns a flag that controls whether the domain is treated as 'points in
* time'.
* <P>
* This flag is used when determining the max and min values for the domain.
* If true, then only the x-values are considered for the max and min
* values. If false, then the start and end x-values will also be taken
* into consideration.
*
* @return The flag.
*
* @see #setDomainIsPointsInTime(boolean)
*/
public boolean getDomainIsPointsInTime() {
return this.domainIsPointsInTime;
}
/**
* Sets a flag that controls whether the domain is treated as 'points in
* time', or time periods. A {@link DatasetChangeEvent} is sent to all
* registered listeners.
*
* @param flag the new value of the flag.
*
* @see #getDomainIsPointsInTime()
*/
public void setDomainIsPointsInTime(boolean flag) {
this.domainIsPointsInTime = flag;
notifyListeners(new DatasetChangeEvent(this, this));
}
/**
* Returns the position within each time period that is used for the X
* value.
*
* @return The anchor position (never <code>null</code>).
*
* @see #setXPosition(TimePeriodAnchor)
*/
public TimePeriodAnchor getXPosition() {
return this.xPosition;
}
/**
* Sets the position within each time period that is used for the X values,
* then sends a {@link DatasetChangeEvent} to all registered listeners.
*
* @param anchor the anchor position (<code>null</code> not permitted).
*
* @see #getXPosition()
*/
public void setXPosition(TimePeriodAnchor anchor) {
ParamChecks.nullNotPermitted(anchor, "anchor");
this.xPosition = anchor;
notifyListeners(new DatasetChangeEvent(this, this));
}
/**
* Adds a new data item to the dataset and sends a
* {@link DatasetChangeEvent} to all registered listeners.
*
* @param period the time period.
* @param y the value for this period.
* @param seriesName the name of the series to add the value.
*
* @see #remove(TimePeriod, Comparable)
*/
public void add(TimePeriod period, double y, Comparable seriesName) {
add(period, new Double(y), seriesName, true);
}
/**
* Adds a new data item to the dataset and, if requested, sends a
* {@link DatasetChangeEvent} to all registered listeners.
*
* @param period the time period (<code>null</code> not permitted).
* @param y the value for this period (<code>null</code> permitted).
* @param seriesName the name of the series to add the value
* (<code>null</code> not permitted).
* @param notify whether dataset listener are notified or not.
*
* @see #remove(TimePeriod, Comparable, boolean)
*/
public void add(TimePeriod period, Number y, Comparable seriesName,
boolean notify) {
// here's a quirk - the API has been defined in terms of a plain
// TimePeriod, which cannot make use of the timezone and locale
// specified in the constructor...so we only do the time zone
// pegging if the period is an instanceof RegularTimePeriod
if (period instanceof RegularTimePeriod) {
RegularTimePeriod p = (RegularTimePeriod) period;
p.peg(this.workingCalendar);
}
this.values.addValue(y, period, seriesName);
if (notify) {
fireDatasetChanged();
}
}
/**
* Removes an existing data item from the dataset.
*
* @param period the (existing!) time period of the value to remove
* (<code>null</code> not permitted).
* @param seriesName the (existing!) series name to remove the value
* (<code>null</code> not permitted).
*
* @see #add(TimePeriod, double, Comparable)
*/
public void remove(TimePeriod period, Comparable seriesName) {
remove(period, seriesName, true);
}
/**
* Removes an existing data item from the dataset and, if requested,
* sends a {@link DatasetChangeEvent} to all registered listeners.
*
* @param period the (existing!) time period of the value to remove
* (<code>null</code> not permitted).
* @param seriesName the (existing!) series name to remove the value
* (<code>null</code> not permitted).
* @param notify whether dataset listener are notified or not.
*
* @see #add(TimePeriod, double, Comparable)
*/
public void remove(TimePeriod period, Comparable seriesName,
boolean notify) {
this.values.removeValue(period, seriesName);
if (notify) {
fireDatasetChanged();
}
}
/**
* Removes all data items from the dataset and sends a
* {@link DatasetChangeEvent} to all registered listeners.
*
* @since 1.0.7
*/
public void clear() {
if (this.values.getRowCount() > 0) {
this.values.clear();
fireDatasetChanged();
}
}
/**
* Returns the time period for the specified item. Bear in mind that all
* series share the same set of time periods.
*
* @param item the item index (0 <= i <= {@link #getItemCount()}).
*
* @return The time period.
*/
public TimePeriod getTimePeriod(int item) {
return (TimePeriod) this.values.getRowKey(item);
}
/**
* Returns the number of items in ALL series.
*
* @return The item count.
*/
@Override
public int getItemCount() {
return this.values.getRowCount();
}
/**
* Returns the number of items in a series. This is the same value
* that is returned by {@link #getItemCount()} since all series
* share the same x-values (time periods).
*
* @param series the series (zero-based index, ignored).
*
* @return The number of items within the series.
*/
@Override
public int getItemCount(int series) {
return getItemCount();
}
/**
* Returns the number of series in the dataset.
*
* @return The series count.
*/
@Override
public int getSeriesCount() {
return this.values.getColumnCount();
}
/**
* Returns the key for a series.
*
* @param series the series (zero-based index).
*
* @return The key for the series.
*/
@Override
public Comparable getSeriesKey(int series) {
return this.values.getColumnKey(series);
}
/**
* Returns the x-value for an item within a series. The x-values may or
* may not be returned in ascending order, that is up to the class
* implementing the interface.
*
* @param series the series (zero-based index).
* @param item the item (zero-based index).
*
* @return The x-value.
*/
@Override
public Number getX(int series, int item) {
return new Double(getXValue(series, item));
}
/**
* Returns the x-value (as a double primitive) for an item within a series.
*
* @param series the series index (zero-based).
* @param item the item index (zero-based).
*
* @return The value.
*/
@Override
public double getXValue(int series, int item) {
TimePeriod period = (TimePeriod) this.values.getRowKey(item);
return getXValue(period);
}
/**
* Returns the starting X value for the specified series and item.
*
* @param series the series (zero-based index).
* @param item the item within a series (zero-based index).
*
* @return The starting X value for the specified series and item.
*
* @see #getStartXValue(int, int)
*/
@Override
public Number getStartX(int series, int item) {
return new Double(getStartXValue(series, item));
}
/**
* Returns the start x-value (as a double primitive) for an item within
* a series.
*
* @param series the series index (zero-based).
* @param item the item index (zero-based).
*
* @return The value.
*/
@Override
public double getStartXValue(int series, int item) {
TimePeriod period = (TimePeriod) this.values.getRowKey(item);
return period.getStart().getTime();
}
/**
* Returns the ending X value for the specified series and item.
*
* @param series the series (zero-based index).
* @param item the item within a series (zero-based index).
*
* @return The ending X value for the specified series and item.
*
* @see #getEndXValue(int, int)
*/
@Override
public Number getEndX(int series, int item) {
return new Double(getEndXValue(series, item));
}
/**
* Returns the end x-value (as a double primitive) for an item within
* a series.
*
* @param series the series index (zero-based).
* @param item the item index (zero-based).
*
* @return The value.
*/
@Override
public double getEndXValue(int series, int item) {
TimePeriod period = (TimePeriod) this.values.getRowKey(item);
return period.getEnd().getTime();
}
/**
* Returns the y-value for an item within a series.
*
* @param series the series (zero-based index).
* @param item the item (zero-based index).
*
* @return The y-value (possibly <code>null</code>).
*/
@Override
public Number getY(int series, int item) {
return this.values.getValue(item, series);
}
/**
* Returns the starting Y value for the specified series and item.
*
* @param series the series (zero-based index).
* @param item the item within a series (zero-based index).
*
* @return The starting Y value for the specified series and item.
*/
@Override
public Number getStartY(int series, int item) {
return getY(series, item);
}
/**
* Returns the ending Y value for the specified series and item.
*
* @param series the series (zero-based index).
* @param item the item within a series (zero-based index).
*
* @return The ending Y value for the specified series and item.
*/
@Override
public Number getEndY(int series, int item) {
return getY(series, item);
}
/**
* Returns the x-value for a time period.
*
* @param period the time period.
*
* @return The x-value.
*/
private long getXValue(TimePeriod period) {
long result = 0L;
if (this.xPosition == TimePeriodAnchor.START) {
result = period.getStart().getTime();
}
else if (this.xPosition == TimePeriodAnchor.MIDDLE) {
long t0 = period.getStart().getTime();
long t1 = period.getEnd().getTime();
result = t0 + (t1 - t0) / 2L;
}
else if (this.xPosition == TimePeriodAnchor.END) {
result = period.getEnd().getTime();
}
return result;
}
/**
* Returns the minimum x-value in the dataset.
*
* @param includeInterval a flag that determines whether or not the
* x-interval is taken into account.
*
* @return The minimum value.
*/
@Override
public double getDomainLowerBound(boolean includeInterval) {
double result = Double.NaN;
Range r = getDomainBounds(includeInterval);
if (r != null) {
result = r.getLowerBound();
}
return result;
}
/**
* Returns the maximum x-value in the dataset.
*
* @param includeInterval a flag that determines whether or not the
* x-interval is taken into account.
*
* @return The maximum value.
*/
@Override
public double getDomainUpperBound(boolean includeInterval) {
double result = Double.NaN;
Range r = getDomainBounds(includeInterval);
if (r != null) {
result = r.getUpperBound();
}
return result;
}
/**
* Returns the range of the values in this dataset's domain.
*
* @param includeInterval a flag that controls whether or not the
* x-intervals are taken into account.
*
* @return The range.
*/
@Override
public Range getDomainBounds(boolean includeInterval) {
List keys = this.values.getRowKeys();
if (keys.isEmpty()) {
return null;
}
TimePeriod first = (TimePeriod) keys.get(0);
TimePeriod last = (TimePeriod) keys.get(keys.size() - 1);
if (!includeInterval || this.domainIsPointsInTime) {
return new Range(getXValue(first), getXValue(last));
}
else {
return new Range(first.getStart().getTime(),
last.getEnd().getTime());
}
}
/**
* Tests this dataset for equality with an arbitrary object.
*
* @param obj the object (<code>null</code> permitted).
*
* @return A boolean.
*/
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof TimeTableXYDataset)) {
return false;
}
TimeTableXYDataset that = (TimeTableXYDataset) obj;
if (this.domainIsPointsInTime != that.domainIsPointsInTime) {
return false;
}
if (this.xPosition != that.xPosition) {
return false;
}
if (!this.workingCalendar.getTimeZone().equals(
that.workingCalendar.getTimeZone())
) {
return false;
}
if (!this.values.equals(that.values)) {
return false;
}
return true;
}
/**
* Returns a clone of this dataset.
*
* @return A clone.
*
* @throws CloneNotSupportedException if the dataset cannot be cloned.
*/
@Override
public Object clone() throws CloneNotSupportedException {
TimeTableXYDataset clone = (TimeTableXYDataset) super.clone();
clone.values = (DefaultKeyedValues2D) this.values.clone();
clone.workingCalendar = (Calendar) this.workingCalendar.clone();
return clone;
}
}
|
package info.u_team.u_team_core.energy;
import net.minecraft.nbt.CompoundNBT;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.Direction;
import net.minecraftforge.common.util.INBTSerializable;
import net.minecraftforge.energy.*;
public class BasicEnergyStorage extends EnergyStorage implements INBTSerializable<CompoundNBT> {
public BasicEnergyStorage(int capacity) {
this(capacity, capacity, capacity, 0);
}
public BasicEnergyStorage(int capacity, int maxTransfer) {
this(capacity, maxTransfer, maxTransfer, 0);
}
public BasicEnergyStorage(int capacity, int maxReceive, int maxExtract) {
this(capacity, maxReceive, maxExtract, 0);
}
public BasicEnergyStorage(int capacity, int maxReceive, int maxExtract, int energy) {
super(capacity, maxReceive, maxExtract, energy);
}
public void setEnergy(int energy) {
this.energy = energy;
}
public void addEnergy(int energy) {
this.energy += energy;
if (this.energy > getMaxEnergyStored()) {
this.energy = getMaxEnergyStored();
} else if (this.energy < 0) {
this.energy = 0;
}
}
@Override
public CompoundNBT serializeNBT() {
final CompoundNBT compound = new CompoundNBT();
compound.putInt("energy", getEnergyStored());
return compound;
}
@Override
public void deserializeNBT(CompoundNBT compound) {
setEnergy(compound.getInt("energy"));
}
public static void setTileEntityEnergy(TileEntity tileEntity, int energy) {
setTileEntityEnergy(tileEntity, null, energy);
}
public static void setTileEntityEnergy(TileEntity tileEntity, Direction side, int energy) {
tileEntity.getCapability(CapabilityEnergy.ENERGY, side).filter(handler -> handler instanceof BasicEnergyStorage).map(handler -> (BasicEnergyStorage) handler).ifPresent(handler -> handler.setEnergy(energy));
}
public static int getTileEntityEnergy(TileEntity tileEntity) {
return getTileEntityEnergy(tileEntity, null);
}
public static int getTileEntityEnergy(TileEntity tileEntity, Direction side) {
return tileEntity.getCapability(CapabilityEnergy.ENERGY, side).map(IEnergyStorage::getEnergyStored).orElse(0);
}
}
|
package codechicken.lib.util;
import codechicken.lib.vec.Matrix4;
import codechicken.lib.vec.Vector3;
import com.google.common.collect.ImmutableMap;
import net.minecraft.client.renderer.Quaternion;
import net.minecraft.client.renderer.TransformationMatrix;
import net.minecraft.client.renderer.Vector3f;
import net.minecraft.client.renderer.model.ItemCameraTransforms.TransformType;
import java.util.HashMap;
import java.util.Map;
public class TransformUtils {
private static final TransformationMatrix flipX = new TransformationMatrix(null, null, new Vector3f(-1, 1, 1), null);
public static final ImmutableMap<TransformType, TransformationMatrix> DEFAULT_BLOCK;
public static final ImmutableMap<TransformType, TransformationMatrix> DEFAULT_ITEM;
public static final ImmutableMap<TransformType, TransformationMatrix> DEFAULT_TOOL;
public static final ImmutableMap<TransformType, TransformationMatrix> DEFAULT_BOW;
public static final ImmutableMap<TransformType, TransformationMatrix> DEFAULT_HANDHELD_ROD;
static {
Map<TransformType, TransformationMatrix> map;
TransformationMatrix thirdPerson;
TransformationMatrix firstPerson;
//@formatter:off
map = new HashMap<>();
thirdPerson = create(0F,2.5F, 0F,75F, 45F, 0F,0.375F );
map.put(TransformType.GUI, create(0F, 0F, 0F,30F,225F, 0F,0.625F));
map.put(TransformType.GROUND, create(0F, 3F, 0F, 0F, 0F, 0F, 0.25F));
map.put(TransformType.FIXED, create(0F, 0F, 0F, 0F, 0F, 0F, 0.5F));
map.put(TransformType.THIRD_PERSON_RIGHT_HAND, thirdPerson);
map.put(TransformType.THIRD_PERSON_LEFT_HAND, flipLeft(thirdPerson));
map.put(TransformType.FIRST_PERSON_RIGHT_HAND, create(0F, 0F, 0F, 0F, 45F, 0F, 0.4F));
map.put(TransformType.FIRST_PERSON_LEFT_HAND, create(0F, 0F, 0F, 0F, 225F, 0F, 0.4F));
DEFAULT_BLOCK = ImmutableMap.copyOf(map);
map = new HashMap<>();
thirdPerson = create( 0F, 3F, 1F, 0F, 0F, 0F, 0.55F);
firstPerson = create(1.13F,3.2F,1.13F, 0F,-90F,25F, 0.68F);
map.put(TransformType.GROUND, create( 0F, 2F, 0F, 0F, 0F, 0F, 0.5F));
map.put(TransformType.HEAD, create( 0F, 13F, 7F, 0F,180F, 0F, 1F));
map.put(TransformType.THIRD_PERSON_RIGHT_HAND, thirdPerson);
map.put(TransformType.THIRD_PERSON_LEFT_HAND, flipLeft(thirdPerson));
map.put(TransformType.FIRST_PERSON_RIGHT_HAND, firstPerson);
map.put(TransformType.FIRST_PERSON_LEFT_HAND, flipLeft(firstPerson));
DEFAULT_ITEM = ImmutableMap.copyOf(map);
map = new HashMap<>();
map.put(TransformType.GROUND, create( 0F, 2F, 0F, 0F, 0F, 0F, 0.5F));
map.put(TransformType.FIXED, create( 0F, 0F, 0F, 0F,180F, 0F, 1F));
map.put(TransformType.THIRD_PERSON_RIGHT_HAND, create( 0F, 4F, 0.5F, 0F,-90F, 55,0.85F));
map.put(TransformType.THIRD_PERSON_LEFT_HAND, create( 0F, 4F, 0.5F, 0F, 90F,-55,0.85F));
map.put(TransformType.FIRST_PERSON_RIGHT_HAND, create(1.13F,3.2F,1.13F, 0F,-90F, 25,0.68F));
map.put(TransformType.FIRST_PERSON_LEFT_HAND, create(1.13F,3.2F,1.13F, 0F, 90F,-25,0.68F));
DEFAULT_TOOL = ImmutableMap.copyOf(map);
map = new HashMap<>();
map.put(TransformType.GROUND, create( 0F, 2F, 0F, 0F, 0F, 0F, 0.5F));
map.put(TransformType.FIXED, create( 0F, 0F, 0F, 0F, 180F, 0F, 1F));
map.put(TransformType.THIRD_PERSON_RIGHT_HAND, create( -1F, -2F, 2.5F,-80F, 260F,-40F, 0.9F));
map.put(TransformType.THIRD_PERSON_LEFT_HAND, create( -1F, -2F, 2.5F,-80F,-280F, 40F, 0.9F));
map.put(TransformType.FIRST_PERSON_RIGHT_HAND, create(1.13F,3.2F,1.13F, 0F, -90F, 25F,0.68F));
map.put(TransformType.FIRST_PERSON_LEFT_HAND, create(1.13F,3.2F,1.13F, 0F, 90F,-25F,0.68F));
DEFAULT_BOW = ImmutableMap.copyOf(map);
map = new HashMap<>();
map.put(TransformType.GROUND, create(0F, 2F, 0F, 0F, 0F, 0F, 0.5F));
map.put(TransformType.THIRD_PERSON_RIGHT_HAND, create(0F, 4F,2.5F, 0F, 90F, 55F,0.85F));
map.put(TransformType.THIRD_PERSON_LEFT_HAND, create(0F, 4F,2.5F, 0F,-90F,-55F,0.85F));
map.put(TransformType.FIRST_PERSON_RIGHT_HAND, create(0F,1.6F,0.8F, 0F, 90F, 25F,0.68F));
map.put(TransformType.FIRST_PERSON_LEFT_HAND, create(0F,1.6F,0.8F, 0F,-90F,-25F,0.68F));
DEFAULT_HANDHELD_ROD = ImmutableMap.copyOf(map);
//@formatter:on
}
/**
* Creates a new TRSRTransformation.
*
* @param tx The x transform.
* @param ty The y transform.
* @param tz The z transform.
* @param rx The x Axis rotation.
* @param ry The y Axis rotation.
* @param rz The z Axis rotation.
* @param s The scale.
* @return The new TRSRTransformation.
*/
public static TransformationMatrix create(float tx, float ty, float tz, float rx, float ry, float rz, float s) {
return create(new Vector3f(tx / 16, ty / 16, tz / 16), new Vector3f(rx, ry, rz), new Vector3f(s, s, s));
}
/**
* Creates a new TRSRTransformation.
*
* @param transform The transform.
* @param rotation The rotation.
* @param scale The scale.
* @return The new TRSRTransformation.
*/
public static TransformationMatrix create(Vector3 transform, Vector3 rotation, Vector3 scale) {
return create(transform.vector3f(), rotation.vector3f(), scale.vector3f());
}
/**
* Creates a new TRSRTransformation.
*
* @param transform The transform.
* @param rotation The rotation.
* @param scale The scale.
* @return The new TRSRTransformation.
*/
public static TransformationMatrix create(Vector3f transform, Vector3f rotation, Vector3f scale) {
return new TransformationMatrix(transform, new Quaternion(rotation.getX(), rotation.getY(), rotation.getZ(), true), scale, null)/*.blockCenterToCorner()*/;
}
/**
* Flips the transform for the left hand.
*
* @param transform The right hand transform.
* @return The new left hand transform.
*/
public static TransformationMatrix flipLeft(TransformationMatrix transform) {
return flipX.compose(transform).compose(flipX);
}
}
|
package algorithms.imageProcessing;
import algorithms.compGeometry.LinesAndAngles;
import algorithms.util.PairIntArray;
import gnu.trove.list.TIntList;
import gnu.trove.list.array.TIntArrayList;
import gnu.trove.set.TIntSet;
import gnu.trove.set.hash.TIntHashSet;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
*
* @author nichole
*/
public class PartialShapeMatcher {
/**
* in sampling the boundaries of the shapes, one can
* choose to use the same number for each (which can result
* in very different spacings for different sized curves)
* or one can choose a set distance between sampling
* points.
* dp is the set distance between sampling points.
The authors use 3 as an example.
*/
protected int dp = 5;
public void overrideSamplingDistance(int d) {
this.dp = d;
}
/**
* NOT READY FOR USE.
*
the spacings used are equidistant, so note that
any scale factor between p and q has to be
applied to the data before using this method.
this method will return a score when completed.
* @param p
* @param q
*/
public double match(PairIntArray p, PairIntArray q) {
if (p.getN() > q.getN()) {
throw new IllegalArgumentException(
"q must be <= p in length.");
}
float[][][] md = createDifferenceMatrices(p, q);
/*
the matrices in md can be analyzed for best
global solution and separately for best local
solution.
This method will return results for a local
solution to create the point correspondence list.
Note that the local best could be two different
kinds of models, so might write two
different methods for the results.
(1) the assumption of same object but with some
amount of occlusion, hence gaps in correspondence.
(2) the assumption of same object but with
some parts being differently oriented, for
an example, the scissors opened versus closed.
*/
List<Sequence> sequences = extractSimilar(md);
return matchArticulated(sequences, md[0].length);
//printing out results for md[0] and md[-3] and +3
// to look at known test data while checking logic
//print("md[0]", md[0]);
//print("md[-3]", md[md.length - 1]);
}
protected List<Sequence> extractSimilar(float[][][] md) {
/*
- choose reasonable upper limit to r, such as sqrt(n)
- for that rMax,
- find the best blocks and find the
avg diff of those and eiher the min and max of reange
or st.dev.
- visit all blocks as currently do and find the best,
which may be more than one index of diff matrices,
within the avg and stdev.
(note, can build a std dev summed area table for this image alone)
- inspect the best results:
- form the data as nPart / nWhole for chains of similar indexes?
- (still reading paretto grontier analysis, but i think that's what it
is composed of)
*/
int n1 = md[0].length;
int rMax = (int)Math.sqrt(n1);
if (rMax < 1) {
rMax = 1;
}
double thresh = 30;
MinDiffs mins = new MinDiffs(n1);
for (int r = 1; r <= rMax; ++r) {
findMinDifferenceMatrix(md, r, thresh, mins);
}
double tolerance = 3.;
DiffMatrixResults equivBest = new DiffMatrixResults(n1);
for (int r = 1; r <= rMax; ++r) {
findEquivalentBest(md, r, mins, tolerance,
equivBest);
}
/*
StringBuilder sb = new StringBuilder();
for (int i = 0; i < n1; ++i) {
sb.append(String.format("[%4d]: ", i));
TIntList list = equivBest.indexes[i];
if (list == null) {
sb.append(" NA");
} else {
list.sort();
for (int j = 0; j < list.size(); ++j) {
sb.append(Integer.toString(list.get(j)));
sb.append(",");
}
}
sb.append(" | ");
System.out.println(sb.toString());
sb.delete(0, sb.length());
}
*/
List<Sequence> sequences = new ArrayList<Sequence>();
for (int idx1 = 0; idx1 < n1; ++idx1) {
TIntList list = equivBest.indexes[idx1];
if (list == null) {
continue;
}
list.sort();
for (int j = 0; j < list.size(); ++j) {
int idx2 = list.get(j);
Sequence s = new Sequence();
s.startIdx1 = idx1;
s.startIdx2 = idx2;
s.stopIdx2 = idx2;
//search through higher index lists to aggregate
int nextLIdx = idx1 + 1;
while (nextLIdx < n1) {
TIntList list2 = equivBest.indexes[nextLIdx];
if (list2 == null) {
break;
}
TIntSet set2 = equivBest.indexSets[nextLIdx];
int idx3 = s.stopIdx2 + 1;
if (set2.contains(idx3)) {
s.stopIdx2 = idx3;
list2.remove(idx3);
set2.remove(idx3);
} else {
break;
}
nextLIdx++;
}
if (s.stopIdx2 - s.startIdx2 > 1) {
sequences.add(s);
}
}
}
System.out.println(sequences.size() + " sequences");
for (int i = 0; i < sequences.size(); ++i) {
Sequence s = sequences.get(i);
int len = s.stopIdx2 - s.startIdx2 + 2;
float frac = (float)len/(float)n1;
System.out.println(String.format(
"seq %d:%d to %d %.4f", s.startIdx1, s.startIdx2,
s.stopIdx2, frac));
}
return sequences;
}
protected double matchArticulated(List<Sequence> srquences,
int n1) {
throw new UnsupportedOperationException("not yet implemented");
}
protected double matchRigidWithOcclusion(List<Sequence> srquences,
int n1) {
throw new UnsupportedOperationException("not yet implemented");
}
// index0 is rotations of p.n, index1 is p.n, index2 is q.n
protected float[][][] createDifferenceMatrices(
PairIntArray p, PairIntArray q) {
if (p.getN() > q.getN()) {
throw new IllegalArgumentException(
"q must be <= p in length.");
}
/*
| a_1_1...a_1_N |
| a_2_1...a_2_N |
| a_N_1...a_N_N |
elements on the diagonal are zero
to shift to different first point as reference,
can shift up k-1 rows and left k-1 columns.
*/
//System.out.println("a1:");
float[][] a1 = createDescriptorMatrix(p);
//System.out.println("a2:");
float[][] a2 = createDescriptorMatrix(q);
int n1 = a1.length;
int n2 = a2.length;
float[][][] md = new float[n1][][];
float[][] prevA2Shifted = null;
for (int i = 0; i < md.length; ++i) {
float[][] shifted2;
if (prevA2Shifted == null) {
shifted2 = copy(a2);
} else {
// shifts by 1 to left and down by 1
rotate(prevA2Shifted);
shifted2 = prevA2Shifted;
}
//M_D^n = A_1(1:M,1:M) - A_2(n:n+M-1,n:n+M-1)
md[i] = subtract(a1, shifted2);
prevA2Shifted = shifted2;
}
for (int i = 0; i < md.length; ++i) {
float[][] mdI = md[i];
for (int x = 0; x < mdI.length; ++x) {
for (int y = 0; y < mdI[x].length; ++y) {
if (x > 0 && y > 0) {
mdI[x][y] += (mdI[x-1][y] + mdI[x][y-1]
- mdI[x-1][y-1]);
} else if (x > 0) {
mdI[x][y] += mdI[x-1][y];
} else if (y > 0) {
mdI[x][y] += mdI[x][y-1];
}
}
}
}
System.out.println("md.length=" + md.length);
return md;
}
protected float[][] createDescriptorMatrix(PairIntArray p) {
int n = (int)Math.ceil((double)p.getN()/dp);
float[][] a = new float[n][];
for (int i = 0; i < n; ++i) {
a[i] = new float[n];
}
/*
P1 Pmid
P2
*/
System.out.println("n=" + n);
for (int i1 = 0; i1 < n; ++i1) {
int start = i1 + 1 + dp;
for (int ii = start; ii < (start + n - 1 - dp); ++ii) {
int i2 = ii;
int imid = i2 - dp;
// wrap around
if (imid > (n - 1)) {
imid -= n;
}
// wrap around
if (i2 > (n - 1)) {
i2 -= n;
}
//System.out.println("i1=" + i1 + " imid=" + imid + " i2=" + i2);
double angleA = LinesAndAngles.calcClockwiseAngle(
p.getX(i1), p.getY(i1),
p.getX(i2), p.getY(i2),
p.getX(imid), p.getY(imid)
);
/*
String str = String.format(
"[%d](%d,%d) [%d](%d,%d) [%d](%d,%d) a=%.4f",
i1, p.getX(i1), p.getY(i1),
i2, p.getX(i2), p.getY(i2),
imid, p.getX(imid), p.getY(imid),
(float) angleA * 180. / Math.PI);
System.out.println(str);
*/
a[i1][i2] = (float)angleA;
}
}
return a;
}
protected int distanceSqEucl(int x1, int y1, int x2, int y2) {
int diffX = x1 - x2;
int diffY = y1 - y2;
return (diffX * diffX + diffY * diffY);
}
private float[][] copy(float[][] a) {
float[][] a2 = new float[a.length][];
for (int i = 0; i < a2.length; ++i) {
a2[i] = Arrays.copyOf(a[i], a[i].length);
}
return a2;
}
private void rotate(float[][] prevShifted) {
// shift x left by 1 first
for (int y = 0; y < prevShifted[0].length; ++y) {
float tmp0 = prevShifted[0][y];
for (int x = 0; x < (prevShifted.length- 1); ++x){
prevShifted[x][y] = prevShifted[x + 1][y];
}
prevShifted[prevShifted.length - 1][y] = tmp0;
}
// shift y down by 1
for (int x = 0; x < prevShifted.length; ++x) {
float tmp0 = prevShifted[x][0];
for (int y = 0; y < (prevShifted[x].length - 1); ++y){
prevShifted[x][y] = prevShifted[x][y + 1];
}
prevShifted[x][prevShifted[x].length - 1] = tmp0;
}
}
private float[][] subtract(float[][] a1, float[][] a2) {
assert(a1.length <= a2.length);
assert(a1[0].length <= a2[0].length);
float[][] output = new float[a1.length][];
for (int i = 0; i < a1.length; ++i) {
output[i] = new float[a1[i].length];
for (int j = 0; j < a1[i].length; ++j) {
output[i][j] = a1[i][j] - a2[i][j];
}
}
return output;
}
private void print(String label, float[][] a) {
StringBuilder sb = new StringBuilder(label);
sb.append("\n");
for (int j = 0; j < a[0].length; ++j) {
sb.append(String.format("row: %3d", j));
for (int i = 0; i < a.length; ++i) {
sb.append(String.format(" %.4f,", a[i][j]));
}
System.out.println(sb.toString());
sb.delete(0, sb.length());
}
}
private class DiffMatrixResults {
private TIntSet[] indexSets = null;
private TIntList[] indexes = null;
public DiffMatrixResults(int n) {
indexes = new TIntList[n];
indexSets = new TIntSet[n];
}
public void add(int index, int value) {
if (indexes[index] == null) {
indexes[index] = new TIntArrayList();
indexSets[index] = new TIntHashSet();
}
if (!indexSets[index].contains(value)) {
indexSets[index].add(value);
indexes[index].add(value);
}
}
}
private class Sequence {
int startIdx1;
int startIdx2 = -1;
int stopIdx2 = -1;
}
private class MinDiffs {
int[] idxs1;
int[] idxs2;
float[] mins;
public MinDiffs(int n) {
idxs1 = new int[n];
idxs2 = new int[n];
mins = new float[n];
Arrays.fill(idxs1, -1);
Arrays.fill(idxs2, -1);
Arrays.fill(mins, Float.MAX_VALUE);
}
}
/**
*
* @param md 3 dimensional array of difference matrices
* @param r block size
* @return
*/
private void findMinDifferenceMatrix(
float[][][] md, int r, double threshold,
MinDiffs output) {
double c = 1./(double)(r*r);
int n1 = md[0].length;
/*
md[n2offset][n1][n2]
md[0 ][0 ][0 ]
[n1-1][0-n1][n2-1]
*/
int[] idxs0 = output.idxs1;
int[] idxs2 = output.idxs2;
float[] mins = output.mins;
for (int iOffset = 0; iOffset < md.length; iOffset++) {
System.out.println("md[" + iOffset + "]:");
float[][] a = md[iOffset];
float sum = 0;
for (int i = 0; i < a.length; i+=r) {
float s1;
if ((i - r) > -1) {
s1 = a[i][i] - a[i - r][i] - a[i][i - r] + a[r][r];
System.out.println(
String.format(
" [%d,%d] %.4f, %.4f, %.4f, %.4f => %.4f",
i, i, a[i][i], a[i - r][i], a[i][i - r],
a[r][r], s1*c));
} else {
s1 = a[i][i];
System.out.println(
String.format(
" [%d,%d] %.4f => %.4f",
i, i, a[i][i], s1*c));
}
s1 *= c;
float absS1 = s1;
if (absS1 < 0) {
absS1 *= -1;
}
if (absS1 > threshold) {
continue;
}
// note, idx from q is i + iOffset
sum += absS1;
if (absS1 < Math.abs(mins[i])) {
int idx2 = i + iOffset;
if (idx2 > n1) {
idx2 -= n1;
}
mins[i] = s1;
idxs0[i] = iOffset;
idxs2[i] = idx2;
// fill in the rest of the diagonal in this block
for (int k = (i-1); k > (i - r); k
if (k < 0) {
break;
}
if (mins[i] < mins[k]) {
idx2 = k + iOffset;
if (idx2 > n1) {
idx2 -= n1;
}
mins[k] = s1;
idxs0[k] = iOffset;
idxs2[k] = idx2;
}
}
}
}
System.out.println("SUM=" + sum);
}
System.out.println("OFFSETS=" + Arrays.toString(idxs0));
System.out.println("idx2=" + Arrays.toString(idxs2));
}
private void findEquivalentBest(float[][][] md, int r,
MinDiffs mins, double tolerance,
DiffMatrixResults output) {
int n1 = mins.idxs1.length;
double c = 1./(double)(r*r);
// capture all "best" within minSigns[i] += 2*variances[i]
for (int iOffset = 0; iOffset < md.length; iOffset++) {
float[][] a = md[iOffset];
for (int i = 0; i < a.length; i+=r) {
if (mins.idxs1[i] == -1) {
continue;
}
double s1;
if ((i - r) > -1) {
s1 = a[i][i] - a[i - r][i] - a[i][i - r] + a[r][r];
} else {
s1 = a[i][i];
}
s1 *= c;
double avg = mins.mins[i];
if (Math.abs(s1 - avg) > tolerance) {
continue;
}
int idx2 = iOffset + i;
if (idx2 > n1) {
idx2 -= n1;
}
output.add(i, idx2);
// fill in the rest of the diagonal in this block
/*for (int k = (i-1); k > (i - r); k--) {
if (k < 0) {
break;
}
if ((k - r) > -1) {
s1 = a[k][k] - a[k - r][k] - a[k][k - r]
+ a[r][r];
} else {
s1 = a[k][k];
}
s1 *= c;
if (Math.abs(s1 - avg) > tolerance) {
continue;
}
idx2 = iOffset + k;
if (idx2 > n1) {
idx2 -= n1;
}
output.add(k, idx2);
}*/
}
}
}
}
|
package io.bdrc.xmltoldmigration.xml2files;
import static io.bdrc.libraries.Models.ADM;
import static io.bdrc.libraries.Models.BDA;
import static io.bdrc.libraries.Models.BDO;
import static io.bdrc.libraries.Models.BDR;
import static io.bdrc.libraries.Models.addReleased;
import static io.bdrc.libraries.Models.createAdminRoot;
import static io.bdrc.libraries.Models.createRoot;
import static io.bdrc.libraries.Models.getAdminRoot;
import static io.bdrc.libraries.Models.getFacetNode;
import static io.bdrc.libraries.Models.setPrefixes;
import static io.bdrc.libraries.GitHelpers.ensureGitRepo;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.namespace.NamespaceContext;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.jena.datatypes.xsd.XSDDatatype;
import org.apache.jena.rdf.model.Literal;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.Property;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.Statement;
import org.apache.jena.rdf.model.StmtIterator;
import org.apache.jena.vocabulary.RDF;
import org.apache.jena.vocabulary.SKOS;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import io.bdrc.libraries.Models.FacetType;
import io.bdrc.xmltoldmigration.MigrationApp;
import io.bdrc.xmltoldmigration.MigrationHelpers;
import io.bdrc.xmltoldmigration.helpers.ExceptionHelper;
import io.bdrc.xmltoldmigration.helpers.ImageListTranslation;
public class EtextMigration {
public static final String TEI_PREFIX = "http:
public static boolean testMode = false;
private static XPath xPath = initXpath();
public static final Map<String, String> distributorToUri = new HashMap<>();
public static boolean addEtextInItem = true;
static {
initDistributorToUri();
}
public static void initDistributorToUri() {
String prefix = BDA+"CP";
distributorToUri.put("DharmaDownload", prefix+"001");
distributorToUri.put("DrikungChetsang", prefix+"002");
distributorToUri.put("eKangyur", prefix+"003");
distributorToUri.put("GuruLamaWorks", prefix+"004");
distributorToUri.put("KarmaDelek", prefix+"005");
distributorToUri.put("PalriParkhang", prefix+"006");
distributorToUri.put("Shechen", prefix+"007");
distributorToUri.put("TulkuSangag", prefix+"008");
distributorToUri.put("UCB-OCR", prefix+"009");
distributorToUri.put("VajraVidya", prefix+"010");
distributorToUri.put("Various", prefix+"011");
}
public static XPath initXpath() {
XPathFactory factory = XPathFactory.newInstance();
XPath xPath = factory.newXPath();
@SuppressWarnings("serial")
HashMap<String, String> prefMap = new HashMap<String, String>() {{
put("tei", TEI_PREFIX);
}};
SimpleNamespaceContext namespaces = new SimpleNamespaceContext(prefMap);
xPath.setNamespaceContext(namespaces);
return xPath;
}
public static final List<String> paginatedProviders = Arrays.asList("UCB-OCR", "eKangyur");
public static final Map<String,Boolean> blackListL2 = new HashMap<>();
public static final Map<String,Boolean> blackListL3 = new HashMap<>();
public static final Map<String,Boolean> blackListL4 = new HashMap<>();
static {
blackListL3.put("UT1KG8475-WCSDT8_B", true); // nonsensical
blackListL3.put("UT1PD45495-012", true);
blackListL3.put("UT3JT13306-329", true);
blackListL4.put("UT22082_007_0014.xml", true); // empty
blackListL4.put("UT1KG14_008_0014.xml", true);
blackListL4.put("UT1KG14_036_0026.xml", true);
blackListL4.put("UT1KG14_053_0038.xml", true);
blackListL3.put("UT1GS53494-I1GS53496", true); // image file names changed too much
blackListL3.put("UT00KG0552-I1PD35566", true); // rest: work is withdrawn
blackListL3.put("UT00KG0549-I1PD35560", true);
blackListL3.put("UT00KG0553-I1PD35568", true);
blackListL3.put("UT00KG0550-I1PD35562", true);
blackListL3.put("UT00KG0554-I1PD35570", true);
blackListL3.put("UT1KG4237-I1PD97704", true);
blackListL2.put("UT1KG4239", true);
blackListL4.put("UT1PD45495-011-0002.xml", true);
blackListL4.put("UT1PD45495-011-0003.xml", true);
blackListL4.put("UT1PD45495-011-0004.xml", true);
blackListL4.put("UT1PD45495-011-0005.xml", true);
blackListL4.put("UT1PD45495-011-0007.xml", true);
blackListL4.put("UT1PD45495-011-0008.xml", true);
blackListL4.put("UT1PD45495-011-0009.xml", true);
blackListL4.put("UT1PD45495-011-00010.xml", true);
blackListL4.put("UT1PD45495-011-00011.xml", true);
blackListL4.put("UT1PD45495-011-00012.xml", true);
blackListL4.put("UT1PD45495-011-00013.xml", true);
blackListL4.put("UT1PD45495-011-00014.xml", true);
blackListL4.put("UT1PD45495-011-00015.xml", true);
blackListL4.put("UT1PD45495-011-00015.xml", true);
blackListL4.put("UT1KG4884-017-0001.xml", true);
blackListL4.put("UT1KG4884-017-0002.xml", true);
blackListL4.put("UT1KG4884-017-0003.xml", true);
blackListL4.put("UT1KG4884-017-0005.xml", true);
blackListL4.put("UT1KG4884-017-0007.xml", true);
blackListL4.put("UT1KG4884-018-0001.xml", true);
blackListL4.put("UT1KG4884-017-0002.xml", true);
blackListL4.put("UT1KG4884-017-0003.xml", true);
blackListL4.put("UT1KG4884-017-0008.xml", true);
}
public static void migrateEtexts() {
System.out.println("migrate etexts");
MigrationApp.createDirIfNotExists(MigrationApp.OUTPUT_DIR+"etexts");
ensureGitRepo("etext", MigrationApp.OUTPUT_DIR);
ensureGitRepo("einstance", MigrationApp.OUTPUT_DIR);
ensureGitRepo("etextcontent", MigrationApp.OUTPUT_DIR);
String dirName = MigrationApp.ETEXT_DIR;
File[] filesL1 = new File(dirName).listFiles();
for (File fl1 : filesL1) {
if (!fl1.isDirectory())
continue;
String distributor = fl1.getName();
String distributorUri = distributorToUri.get(distributor);
boolean isPaginated = paginatedProviders.contains(distributor);
boolean needsPageNameTranslation = distributor.equals("UCB-OCR");
File[] filesL2 = fl1.listFiles();
for (File fl2 : filesL2) {
if (!fl2.isDirectory() || blackListL2.containsKey(fl2.getName()))
continue;
//System.out.println("migrating "+provider+"/"+fl2.getName());
String itemId = null;
Model itemModel = ModelFactory.createDefaultModel();
setPrefixes(itemModel, "item");
boolean firstItemModel = true;
File[] filesL3 = fl2.listFiles();
for (File fl3 : filesL3) {
if (!fl3.isDirectory() || blackListL3.containsKey(fl3.getName())) // blacklisting these which looks erroneous
continue;
File[] filesL4 = fl3.listFiles();
for (File fl4 : filesL4) {
if (!fl4.isFile())
continue;
String name = fl4.getName();
if (name.startsWith("_") || !name.endsWith(".xml") || blackListL4.containsKey(name))
continue;
String id = name.substring(0, name.length()-4).replace('-', '_');
String dstName = MigrationApp.getDstFileName("etextcontent", id, ".txt");
File dstFile = new File(dstName);
EtextInfos ei;
try {
if (!dstFile.exists())
dstFile.createNewFile();
FileOutputStream dst = new FileOutputStream(dstFile);
ei = migrateOneEtext(fl4.getAbsolutePath(), isPaginated, dst, needsPageNameTranslation, itemModel, firstItemModel, distributorUri);
firstItemModel = false;
dst.close();
if (ei == null) {
continue;
}
} catch (IOException e1) {
e1.printStackTrace();
return;
}
if (itemId != null && !ei.eInstanceId.equals(itemId))
ExceptionHelper.logException(ExceptionHelper.ET_GEN, fl2.getName(), fl2.getName(), "got two different itemIds: "+itemId+" and "+ei.eInstanceId);
if (itemId == null) {
itemId = ei.eInstanceId;
}
if (itemId == null) {
System.err.println("arg!");
System.err.println(ei.toString());
continue;
}
String dst = MigrationApp.getDstFileName("etext", ei.etextId);
MigrationHelpers.outputOneModel(ei.etextModel, ei.etextId, dst, "etext");
}
}
if (itemId != null) { // null in the case of blacklisted works
String dst = MigrationApp.getDstFileName("einstance", itemId);
MigrationHelpers.outputOneModel(itemModel, itemId, dst, "einstance");
}
// TODO: write work->item link
}
}
}
public static class EtextInfos {
public Model etextModel;
public String indicatedWorkId;
public String eInstanceId;
public String etextId;
public String abstractWorkId;
public EtextInfos(Model etextModel, String indicatedWorkId, String eInstanceId, String etextId, String abstractWorkId) {
this.etextModel = etextModel;
this.indicatedWorkId = indicatedWorkId;
this.eInstanceId = eInstanceId;
this.etextId = etextId;
this.abstractWorkId = abstractWorkId;
}
}
public static String instanceIdFromWorkId(final String indicatedWorkId) {
return "IE"+indicatedWorkId.substring(1);
}
public static Literal getLiteral(String s, Model m, String etextId) {
// if the first character is ascii then bo-x-ewts, else tibetan
int c = s.charAt(0);
if (c >= 0x0F00 && c <= 0x0FFF)
return m.createLiteral(s, "bo");
if (c <= 0x36F)
return m.createLiteral(s, "bo-x-ewts");
// TODO: replace q with ' ?
ExceptionHelper.logException(ExceptionHelper.ET_GEN, etextId, etextId, "cannot determine language of "+s);
return m.createLiteral(s);
}
public static int getVolumeNumber(String imageGroupId, Model m, String eTextId) {
if (imageGroupId.startsWith("i")) // UT1KG14557_i1KG14561_0000
imageGroupId = "I"+imageGroupId.substring(1);
if (!imageGroupId.startsWith("I")) // UT30012_5742_0000
imageGroupId = "I"+imageGroupId;
final Literal oldId = m.createLiteral(imageGroupId);
final Property volumeNumberP = m.getProperty(BDO, "volumeNumber");
final Property legacyIdP = m.getProperty(ADM, "legacyImageGroupRID");
final List<Statement> sl = m.listStatements(null, legacyIdP, oldId).toList();
if (sl.size() == 0) {
ExceptionHelper.logException(ExceptionHelper.ET_GEN, eTextId, eTextId, "cannot find volume with legacy RID "+imageGroupId);
return 1;
}
if (sl.size() > 1)
System.err.println("two volumes have the legacy ID!");
Resource volumeAdm = sl.get(0).getSubject().asResource();
Resource volume = volumeAdm.getPropertyResourceValue(m.getProperty(ADM, "adminAbout"));
Statement s = volume.getProperty(volumeNumberP);
if (s == null) {
ExceptionHelper.logException(ExceptionHelper.ET_GEN, eTextId, eTextId, "volume with legacy RID "+imageGroupId+" has no volume number");
return 1;
}
return s.getInt();
}
public static Pattern p = Pattern.compile("^UT[^_]+_([^_]+)_(\\d+)$");
public static int[] fillInfosFromId(String eTextId, Model model, Model itemModel) {
Matcher m = p.matcher(eTextId);
if (!m.find()) {
return new int[] {1, 0}; // always the case, only a few cases
}
int seqNum = Integer.parseInt(m.group(2));
int vol = 1;
boolean volumeIsImageGroup = false;
try {
vol = Integer.parseInt(m.group(1));
if (vol > 900) {
volumeIsImageGroup = true; // case of UT21871_4205_0000 : 4205 is not volume 4205, it's I4205
}
} catch (NumberFormatException e) {
volumeIsImageGroup = true;
}
if (volumeIsImageGroup) {
if (itemModel == null) {
ExceptionHelper.logException(ExceptionHelper.ET_ETEXT, eTextId, eTextId, "cannot understand volume name "+m.group(1));
} else {
vol = getVolumeNumber(m.group(1), itemModel, eTextId);
}
}
if (seqNum == 0) {
model.add(model.getResource(BDR+eTextId),
model.getProperty(BDO+"eTextIsVolume"),
model.createTypedLiteral(vol, XSDDatatype.XSDinteger));
} else {
model.add(model.getResource(BDR+eTextId),
model.getProperty(BDO+"eTextInVolume"),
model.createTypedLiteral(vol, XSDDatatype.XSDinteger));
model.add(model.getResource(BDR+eTextId),
model.getProperty(BDO+"eTextVolumeIndex"),
model.createTypedLiteral(seqNum, XSDDatatype.XSDinteger));
}
return new int[] {vol, seqNum};
}
private static String lastWorkId = null;
public static void addInstanceToWork(String workId, String instanceId, String etextId, boolean isPaginated) {
if (workId.equals(lastWorkId))
return;
final String workPath = MigrationApp.getDstFileName("work", workId, ".trig");
final Model workModel = MigrationHelpers.modelFromFileName(workPath);
if (workModel == null) {
ExceptionHelper.logException(ExceptionHelper.ET_GEN, etextId, etextId, "cannot read work model for image name translation on "+workPath);
return;
}
final Resource workR = workModel.getResource(BDR+workId);
Property p = workModel.getProperty(BDO, "workHasInstance");
workR.addProperty(p, workModel.createResource(BDR+instanceId));
MigrationHelpers.outputOneModel(workModel, workId, workPath, "work");
lastWorkId = workId;
}
private static String lastInstanceId = null;
public static void addReproToInstance(String iInstanceId, String eInstanceId, String etextId, boolean sameOriginAs, boolean isPaginated) {
if (iInstanceId.equals(lastInstanceId))
return;
final String workPath = MigrationApp.getDstFileName("iinstance", iInstanceId, ".trig");
final Model workModel = MigrationHelpers.modelFromFileName(workPath);
if (workModel == null) {
ExceptionHelper.logException(ExceptionHelper.ET_GEN, etextId, etextId, "cannot read image instance model for image name translation on "+workPath);
return;
}
final Resource iInstanceR = workModel.getResource(BDR+iInstanceId);
Property p = workModel.getProperty(BDO, "instanceHasReproduction");
iInstanceR.addProperty(p, workModel.createResource(BDR+eInstanceId));
MigrationHelpers.outputOneModel(workModel, iInstanceId, workPath, "iinstance");
lastInstanceId = iInstanceId;
}
public static Resource getItemEtextPart(Model itemModel, String itemId, int volume, int seqNum) {
final Resource item = itemModel.getResource(BDR+itemId);
final Property itemHasVolume = itemModel.getProperty(BDO, "instanceHasVolume");
final Property volumeHasEtext = itemModel.getProperty(BDO, "volumeHasEtext");
Resource volumeRes = null;
StmtIterator si = item.listProperties(itemHasVolume);
while (si.hasNext()) {
Statement s = si.next();
Resource r = s.getResource();
int i = r.getProperty(itemModel.getProperty(BDO, "volumeNumber")).getInt();
if (volume == i) {
volumeRes = r;
break;
}
}
if (volumeRes == null) {
volumeRes = getFacetNode(FacetType.VOLUME, item, itemModel.getResource(BDO+"VolumeEtextAsset"));
item.addProperty(itemHasVolume, volumeRes);
volumeRes.addProperty(itemModel.getProperty(BDO, "volumeNumber"),
itemModel.createTypedLiteral(volume, XSDDatatype.XSDinteger));
volumeRes.addProperty(itemModel.getProperty(BDO, "volumeOf"), item);
}
Resource seqRes = getFacetNode(FacetType.ETEXT_REF, item);
volumeRes.addProperty(volumeHasEtext, seqRes);
if (seqNum != 0)
seqRes.addProperty(itemModel.getProperty(BDO, "seqNum"),
itemModel.createTypedLiteral(seqNum, XSDDatatype.XSDinteger));
else
seqRes.addProperty(itemModel.getProperty(BDO, "seqNum"),
itemModel.createTypedLiteral(1, XSDDatatype.XSDinteger));
// TODO: check for duplicates
return seqRes;
}
private static Model lastModel = null;
private static String lastModelId = null;
public static Model getItemModel(String workId, String etextId) {
String imageItemId = "I"+workId.substring(1)+CommonMigration.IMAGE_ITEM_SUFFIX;
if (lastModelId != null && lastModelId.equals(imageItemId)) {
return lastModel;
}
String imageItemPath = MigrationApp.getDstFileName("iinstance", imageItemId, ".trig");
Model imageItemModel = MigrationHelpers.modelFromFileName(imageItemPath);
if (imageItemModel == null) {
ExceptionHelper.logException(ExceptionHelper.ET_GEN, etextId, etextId, "cannot read item model for image name translation on "+imageItemPath);
return null;
}
lastModelId = imageItemId;
lastModel = imageItemModel;
return imageItemModel;
}
public static EtextInfos migrateOneEtext(String path, boolean isPaginated, OutputStream contentOut, boolean needsPageNameTranslation, Model itemModel, boolean first, String providerUri) {
final Document d = MigrationHelpers.documentFromFileName(path);
Element fileDesc;
try {
fileDesc = (Element) ((NodeList)xPath.evaluate("/tei:TEI/tei:teiHeader/tei:fileDesc",
d.getDocumentElement(), XPathConstants.NODESET)).item(0);
} catch (XPathExpressionException e1) {
// Having to catch this is utter stupidity
e1.printStackTrace();
return null;
}
final Element titleStmt = (Element) fileDesc.getElementsByTagNameNS(TEI_PREFIX, "titleStmt").item(0);
final Element publicationStmt = (Element) fileDesc.getElementsByTagNameNS(TEI_PREFIX, "publicationStmt").item(0);
final Element sourceDesc = (Element) fileDesc.getElementsByTagNameNS(TEI_PREFIX, "sourceDesc").item(0);
final Model etextModel = ModelFactory.createDefaultModel();
setPrefixes(etextModel, "etext");
Element e;
try {
e = (Element) ((NodeList)xPath.evaluate("tei:bibl/tei:idno[@type='TBRC_RID']",
sourceDesc, XPathConstants.NODESET)).item(0);
} catch (XPathExpressionException e1) {
e1.printStackTrace();
return null;
}
final String indicatedWorkId = e.getTextContent().trim();
String eInstanceId = instanceIdFromWorkId(indicatedWorkId);
String iInstanceId = "I"+indicatedWorkId.substring(1)+CommonMigration.IMAGE_ITEM_SUFFIX;
boolean bornDigital = false;
if (WorkMigration.etextInstances.containsKey(indicatedWorkId)) {
eInstanceId = indicatedWorkId;
bornDigital = true;
}
String abstractWorkId = WorkMigration.getAbstractForRid(indicatedWorkId);
String otherAbstractRID = CommonMigration.abstractClusters.get(abstractWorkId);
if (otherAbstractRID != null) {
abstractWorkId = otherAbstractRID;
}
try {
e = (Element) ((NodeList)xPath.evaluate("tei:idno[@type='TBRC_TEXT_RID']",
publicationStmt, XPathConstants.NODESET)).item(0);
} catch (XPathExpressionException e1) {
e1.printStackTrace();
return null;
}
final String etextId = e.getTextContent().trim().replace('-', '_');
Resource etext = createRoot(etextModel, BDR+etextId, BDO+"Etext"+(isPaginated?"Paginated":"NonPaginated"));
if (first) { // initialize the :ItemEtext
Resource workA = itemModel.getResource(BDR+abstractWorkId);
Resource iInstance = itemModel.getResource(BDR+iInstanceId);
Resource item = createRoot(itemModel, BDR+eInstanceId, BDO+"EtextInstance");
// TODO: +(isPaginated?"Paginated":"NonPaginated")
// Item AdminData
Resource admItem = createAdminRoot(item);
admItem.addProperty(itemModel.getProperty(ADM, "contentProvider"), itemModel.createResource(providerUri));
admItem.addProperty(itemModel.getProperty(ADM, "metadataLegal"), itemModel.createResource(BDA+"LD_BDRC_CC0"));
// TODO: not sure how it should work...
//MigrationApp.moveAdminInfo(itemModel, iInstance, admItem);
addReleased(itemModel, admItem);
// Item metadata
if (WorkMigration.addWorkHasItem) {
addInstanceToWork(abstractWorkId, eInstanceId, etextId, isPaginated);
}
if (!bornDigital) {
// false should be true in the case of KarmaDelek and GuruLama
addReproToInstance(iInstanceId, eInstanceId, etextId, false, isPaginated);
}
if (WorkMigration.addItemForWork) {
item.addProperty(itemModel.getProperty(BDO, "instanceOf"), workA);
}
}
if (addEtextInItem)
etextModel.add(etext,
etextModel.getProperty(BDO, "eTextInInstance"),
etextModel.getResource(BDR+eInstanceId));
etextModel.add(etext,
RDF.type,
etextModel.getResource(BDO+"Etext"+(isPaginated?"Paginated":"NonPaginated")));
Resource admEtext = getAdminRoot(etext, true);
addReleased(etextModel, admEtext);
Model imageItemModel = null;
if (isPaginated && !testMode) {
imageItemModel = getItemModel(indicatedWorkId, etextId);
if (imageItemModel == null) {
System.err.println("error: cannot retrieve item model for "+indicatedWorkId);
return null;
}
}
final int[] volSeqNumInfos = fillInfosFromId(etextId, etextModel, imageItemModel);
itemModel.add(getItemEtextPart(itemModel, eInstanceId, volSeqNumInfos[0], volSeqNumInfos[1]),
itemModel.getProperty(BDO, "eTextResource"),
itemModel.createResource(BDR+etextId));
Map<String,Integer> imageNumPageNum = null;
if (needsPageNameTranslation) {
imageNumPageNum = ImageListTranslation.getImageNums(imageItemModel, volSeqNumInfos[0]);
}
final NodeList titles = titleStmt.getElementsByTagNameNS(TEI_PREFIX, "title");
final List<String> titlesList = new ArrayList<String>();
for (int i = 0; i < titles.getLength(); i++) {
Element title = (Element) titles.item(i);
String titleStr = CommonMigration.normalizeString(title.getTextContent());
if (titleStr.isEmpty())
continue;
if (!titlesList.contains(titleStr)) {
etextModel.add(etext,
//etextModel.getProperty(BDO, "eTextTitle"),
SKOS.prefLabel,
getLiteral(titleStr, etextModel, etextId));
}
}
try {
e = (Element) ((NodeList)xPath.evaluate("tei:bibl/tei:idno[@type='SRC_PATH']",
sourceDesc, XPathConstants.NODESET)).item(0);
} catch (XPathExpressionException e1) {
e1.printStackTrace();
return null;
}
etextModel.add(etext,
etextModel.getProperty(BDO, "eTextSourcePath"),
etextModel.createLiteral(e.getTextContent().trim()));
EtextBodyMigration.MigrateBody(d, contentOut, etextModel, etextId, imageNumPageNum, needsPageNameTranslation, isPaginated);
return new EtextInfos(etextModel, indicatedWorkId, eInstanceId, etextId, abstractWorkId);
}
public static class SimpleNamespaceContext implements NamespaceContext {
private final Map<String, String> PREF_MAP = new HashMap<String, String>();
public SimpleNamespaceContext(final Map<String, String> prefMap) {
PREF_MAP.putAll(prefMap);
}
public String getNamespaceURI(String prefix) {
return PREF_MAP.get(prefix);
}
public String getPrefix(String uri) {
throw new UnsupportedOperationException();
}
public Iterator getPrefixes(String uri) {
throw new UnsupportedOperationException();
}
}
}
|
package com.akiban.cserver;
import javax.management.JMX;
import javax.management.MBeanServerConnection;
import javax.management.ObjectName;
import javax.management.remote.JMXConnector;
import javax.management.remote.JMXConnectorFactory;
import javax.management.remote.JMXServiceURL;
import java.io.*;
import java.util.*;
import javax.management.openmbean.CompositeData;
import javax.management.openmbean.CompositeType;
public class CServerJmxManage {
private static final String jmxurl = "service:jmx:rmi:///jndi/rmi://localhost:7071/jmxrmi";
private static JMXServiceURL url ;
private static JMXConnector jmxc ;
private static MBeanServerConnection mbsc ;
private static ObjectName mbean ;
public static void initJmx() throws Exception
{
if (url==null) url = new JMXServiceURL(jmxurl);
if (jmxc==null) jmxc = JMXConnectorFactory.connect(url, null);
if (mbsc==null) mbsc = jmxc.getMBeanServerConnection();
if (mbean==null) mbean = new ObjectName("com.akiban:type=Manage");
}
public static String arrayToString(String[] a, String separator) {
StringBuffer result = new StringBuffer();
if (a.length > 0) {
// result.append(a[0]);
for (int i=1; i<a.length; i++) {
result.append(separator);
result.append(a[i]);
}
}
return result.toString();
}
public static void main(String[] args)
{
try{
initJmx();
if (args.length == 0 )
{
throw new Exception ("methodName must be the first argument");
}
String method = args[0];
int argcnt = args.length-1;
Object[] params = new Object[argcnt];
String[] signature = new String[argcnt];
for (int i = 1; i < args.length; i++ )
{
params[i-1] = new String (args[i]);
signature[i-1] = new String ("java.lang.String");
}
Object retObj = mbsc.invoke(mbean, method, params,signature);
System.out.println ( "JMX call " + method + "(" + arrayToString(args, ",")+ ")" + " returned : " + retObj);
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
}
|
package algorithms.imageProcessing.matching;
import algorithms.MultiArrayMergeSort;
import algorithms.QuickSort;
import algorithms.compGeometry.FurthestPair;
import algorithms.imageProcessing.ColorHistogram;
import algorithms.imageProcessing.FixedSizeSortedVector;
import algorithms.imageProcessing.GreyscaleImage;
import algorithms.imageProcessing.Image;
import algorithms.imageProcessing.ImageIOHelper;
import algorithms.imageProcessing.ImageProcessor;
import algorithms.imageProcessing.SIGMA;
import algorithms.imageProcessing.VanishingPoints;
import algorithms.imageProcessing.features.CorrespondenceList;
import algorithms.imageProcessing.features.ORB;
import algorithms.imageProcessing.features.ORB.Descriptors;
import static algorithms.imageProcessing.features.ORB.convertToImage;
import algorithms.imageProcessing.matching.PartialShapeMatcher.Result;
import algorithms.imageProcessing.matching.ShapeFinder.ShapeFinderResult;
import algorithms.imageProcessing.transform.EpipolarTransformer;
import algorithms.imageProcessing.transform.MatchedPointsTransformationCalculator;
import algorithms.imageProcessing.transform.TransformationParameters;
import algorithms.imageProcessing.transform.Transformer;
import algorithms.misc.Misc;
import algorithms.misc.MiscDebug;
import algorithms.search.NearestNeighbor2D;
import algorithms.util.CorrespondencePlotter;
import algorithms.util.OneDIntArray;
import algorithms.util.PairFloatArray;
import algorithms.util.PairInt;
import algorithms.util.PairIntArray;
import algorithms.util.QuadInt;
import algorithms.util.TwoDFloatArray;
import algorithms.util.TwoDIntArray;
import algorithms.util.VeryLongBitString;
import gnu.trove.iterator.TIntIterator;
import gnu.trove.iterator.TIntObjectIterator;
import gnu.trove.list.TDoubleList;
import gnu.trove.list.TFloatList;
import gnu.trove.list.TIntList;
import gnu.trove.list.array.TDoubleArrayList;
import gnu.trove.list.array.TFloatArrayList;
import gnu.trove.list.array.TIntArrayList;
import gnu.trove.map.TIntIntMap;
import gnu.trove.map.TIntObjectMap;
import gnu.trove.map.TObjectIntMap;
import gnu.trove.map.hash.TIntIntHashMap;
import gnu.trove.map.hash.TIntObjectHashMap;
import gnu.trove.map.hash.TObjectIntHashMap;
import gnu.trove.set.TIntSet;
import gnu.trove.set.hash.TIntHashSet;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.ejml.simple.SimpleMatrix;
/**
* a class to hold various methods related to matching
* the descriptors of ORB.
* See also ObjectMatcher.
*
* @see ORB
* @see ObjectMatcher
*
* @author nichole
*/
public class ORBMatcher {
// vahnishing points for dataset2
private VanishingPoints vp2 = null;
public void setVanishingPointsForSet2(VanishingPoints vp) {
vp2 = vp;
}
/**
* match template image and shape in orb1 and labeledPoints1
* with the same object which is somewhere in the
* segmented labledPoints2 and orb2.
*
* NOTE that if the template or the true object match in dataset2
* are smaller than 32 pixels across, the method may not find the
* object very well so alternative methods should be used in that case
* or pre-processing to correct that.
*
* NOTE also that if precise correspondence is needed, this method should
* probably be followed by partial shape matcher to get better transformation
* and then add transformed matching keypoints to that correspondence.
*
* NOT READY FOR USE yet.
*
* @param orb1
* @param orb2
* @param labeledPoints1
* @param labeledPoints2
* @return
*/
public List<CorrespondenceList> match0(ORB orb1, ORB orb2,
Set<PairInt> labeledPoints1, List<Set<PairInt>> labeledPoints2) {
/*
uses the descriptors given and then optionally makes masks
for them using the labeled points.
-- visits each octave pair
-- calculates cost of descriptors
-- uses the segmentation to calculate every
permutation of 2 pairs of points.
-- filter out high cost pairs.
-- filters out 2 pair combinations with transformation scales not near 1
-- keeps only the top 10 percent cost of items
from the 2 pair list.
-- evaluates the transformation using the transformed
keypoints cost difference, distance from nearest
neighbor and number of matches
-- keeps the best of each j
-- further compares bestJs with SSDs of intersecting
transformed point sets of the matching keypoints
-- top of those best is the returned result
*/
if (!orb1.getDescrChoice().equals(orb2.getDescrChoice())) {
throw new IllegalStateException("orbs must contain same kind of descirptors");
}
int nBands = 3;
if (orb1.getDescrChoice().equals(ORB.DescriptorChoice.HSV)) {
if (orb1.getDescriptorsH() == null || orb2.getDescriptorsH() == null) {
throw new IllegalStateException("hsv descriptors must be created first");
}
} else if (orb1.getDescrChoice().equals(ORB.DescriptorChoice.ALT)) {
if (orb1.getDescriptorsListAlt() == null || orb2.getDescriptorsListAlt() == null) {
throw new IllegalStateException("alt descriptors must be created first");
}
nBands = 1;
} else if (orb1.getDescrChoice().equals(ORB.DescriptorChoice.GREYSCALE)) {
if (orb1.getDescriptorsList() == null || orb2.getDescriptorsList() == null) {
throw new IllegalStateException("descriptors must be created first");
}
nBands = 1;
}
boolean useMasks = false;
if (useMasks) {
// initialize the masks, but discard the maps
TObjectIntMap<PairInt> pointLabels1 = new TObjectIntHashMap<PairInt>();
Set<PairInt> set = labeledPoints1;
for (PairInt p : set) {
pointLabels1.put(p, 0);
}
TObjectIntMap<PairInt> pointLabels2 = new TObjectIntHashMap<PairInt>();
for (int i = 0; i < labeledPoints2.size(); ++i) {
set = labeledPoints2.get(i);
for (PairInt p : set) {
pointLabels2.put(p, i);
}
}
orb1.createDescriptorMasks(pointLabels1);
orb2.createDescriptorMasks(pointLabels2);
}
//TODO: may need to revise this or allow it as a method argument:
int pixTolerance = 10;
MatchedPointsTransformationCalculator tc = new MatchedPointsTransformationCalculator();
Transformer transformer = new Transformer();
TFloatList scales1 = extractScales(orb1.getScalesList());
TFloatList scales2 = extractScales(orb2.getScalesList());
if (Math.abs(scales1.get(0) - 1) > 0.01) {
throw new IllegalArgumentException("logic depends upon first scale" + " level being '1'");
}
if (Math.abs(scales2.get(0) - 1) > 0.01) {
throw new IllegalArgumentException("logic depends upon first scale" + " level being '1'");
}
// a rough estimate of maximum number of matchable points in any
// scale dataset comparison
final int nMaxMatchable = Math.round(0.5F * calculateNMaxMatchable(orb1.getKeyPoint1List(), orb2.getKeyPoint1List()));
//TODO: allow a factor to be passed in
System.out.println("nMaxMatchable=" + nMaxMatchable);
int nMax1 = maxSize(orb1.getKeyPoint1List());
int nMax2 = maxSize(orb2.getKeyPoint1List());
int nMax = nMax1 * nMax2;
double minCostTotal = Double.MAX_VALUE;
double minCost1 = Double.MAX_VALUE;
double minCost2 = Double.MAX_VALUE;
double minCost3 = Double.MAX_VALUE;
float minCostTScale = Float.MAX_VALUE;
//runtime complexity of this vector depends upon the number of items
// it is currently holding, so can set the capacity high and fill vector only
// with items within bitTolerance of best, but too high might affect jvm
// performance.
// (note, can optimize this for very large results by occassionally ejecting
// all values with cost > best + bitTolerance.)
// TODO: a safe size is to set capacity to the number of unique
// transformation parameter sets, but since that isn't known
// until later without refactoring here, will make an assumption for now,
// that size 100 is generous for number of top solutions.
FixedSizeSortedVector<CObject3> minVec = new FixedSizeSortedVector<CObject3>(1, CObject3.class);
int templateSize = calculateObjectSize(labeledPoints1);
// populated on demand
TObjectIntMap<OneDIntArray> labeledPointsSizes2 =
new TObjectIntHashMap<OneDIntArray>();
for (int i = 0; i < scales1.size(); ++i) {
//for (int i = 2; i < 3; ++i) {
float scale1 = scales1.get(i);
// coords are in ref frame of scale=1 of their pyramids
TIntList kpX1 = orb1.getKeyPoint1List().get(i);
TIntList kpY1 = orb1.getKeyPoint0List().get(i);
int n1 = kpX1.size();
TwoDFloatArray octaveImg1 = orb1.getPyramidImages().get(i);
float diag1 = (float) Math.sqrt(octaveImg1.a.length * octaveImg1.a[0].length);
final double maxDist = diag1;
// create data structures in scaled reference frame
TObjectIntMap<PairInt> p1KPIndexMap = new TObjectIntHashMap<PairInt>();
TIntList kpX1_2 = new TIntArrayList(n1);
TIntList kpY1_2 = new TIntArrayList(n1);
for (int i3 = 0; i3 < n1; ++i3) {
int x = Math.round((float) kpX1.get(i3) / scale1);
int y = Math.round((float) kpY1.get(i3) / scale1);
kpX1_2.add(x);
kpY1_2.add(y);
p1KPIndexMap.put(new PairInt(x, y), i3);
}
List<TIntList> pointIndexLists1 = new ArrayList<TIntList>();
int ns = 1;
for (int i3 = 0; i3 < ns; ++i3) {
pointIndexLists1.add(new TIntArrayList());
}
TObjectIntMap<PairInt> pointLabels1 = new TObjectIntHashMap<PairInt>();
Set<PairInt> set = labeledPoints1;
Set<PairInt> setScaled = new HashSet<PairInt>();
TIntList list = pointIndexLists1.get(0);
assert (list != null);
for (PairInt p : set) {
int x = Math.round((float) p.getX() / scale1);
int y = Math.round((float) p.getY() / scale1);
PairInt p2 = new PairInt(x, y);
pointLabels1.put(p2, 0);
int idx = p1KPIndexMap.get(p2);
list.add(idx);
setScaled.add(p2);
}
Set<PairInt> shape = new HashSet<PairInt>(setScaled);
int objDimension = (int) Math.round((float) templateSize / (float) scale1);
int limit = Math.round(1.15F * objDimension);
int limitSq = limit * limit;
PairIntArray a1 = new PairIntArray(kpX1_2.size());
TIntList a1Indexes = new TIntArrayList(kpX1_2.size());
for (int ii = 0; ii < kpX1.size(); ++ii) {
int x = kpX1.get(ii);
int y = kpY1.get(ii);
a1.add(x, y);
a1Indexes.add(ii);
}
for (int j = 0; j < scales2.size(); ++j) {
//for (int j = 0; j < 1; ++j) {
float scale2 = scales2.get(j);
// coords are in ref frame of scale=1 of their pyramids
TIntList kpX2 = orb2.getKeyPoint1List().get(j);
TIntList kpY2 = orb2.getKeyPoint0List().get(j);
int n2 = kpX2.size();
// create data structures in scaled reference frame
TObjectIntMap<PairInt> p2KPIndexMap = new TObjectIntHashMap<PairInt>();
TObjectIntMap<PairInt> p2KPIndexMap_2 = new TObjectIntHashMap<PairInt>();
TIntList kpX2_2 = new TIntArrayList(n2);
TIntList kpY2_2 = new TIntArrayList(n2);
for (int j3 = 0; j3 < n2; ++j3) {
int x = Math.round((float) kpX2.get(j3) / scale2);
int y = Math.round((float) kpY2.get(j3) / scale2);
kpX2_2.add(x);
kpY2_2.add(y);
p2KPIndexMap_2.put(new PairInt(x, y), j3);
p2KPIndexMap.put(new PairInt(kpX2.get(j3), kpY2.get(j3)), j3);
}
List<TIntList> pointIndexLists2 = new ArrayList<TIntList>();
int ns2 = labeledPoints2.size();
for (int j3 = 0; j3 < ns2; ++j3) {
pointIndexLists2.add(new TIntArrayList());
}
TObjectIntMap<PairInt> pointLabels2 = new TObjectIntHashMap<PairInt>();
for (int j3 = 0; j3 < ns2; ++j3) {
Set<PairInt> set2 = labeledPoints2.get(j3);
TIntList list2 = pointIndexLists2.get(j3);
assert (list2 != null);
for (PairInt p : set2) {
int x = Math.round((float) p.getX() / scale2);
int y = Math.round((float) p.getY() / scale2);
PairInt p2 = new PairInt(x, y);
pointLabels2.put(p2, j3);
int idx = p2KPIndexMap_2.get(p2);
list2.add(idx);
}
}
TwoDFloatArray octaveImg2 = orb2.getPyramidImages().get(j);
debugPrint(octaveImg1, octaveImg2, kpX1_2, kpY1_2, kpX2_2, kpY2_2, i, j);
int maxX2 = orb2.getPyramidImages().get(0).a[0].length;
int maxY2 = orb2.getPyramidImages().get(0).a.length;
int maxX2_2 = octaveImg2.a[0].length;
int maxY2_2 = octaveImg2.a.length;
NearestNeighbor2D nn2 = new NearestNeighbor2D(makeSet(kpX2, kpY2), maxX2 + limit, maxY2 + limit);
int nTot = n1 * n2;
//use descriptors with params here to reduce paramsList
int[][] cost = null;
if (useMasks) {
ORB.Descriptors[] desc1 = getDescriptors(orb1, i);
ORB.Descriptors[] desc2 = getDescriptors(orb2, j);
cost = ORB.calcMaskedDescriptorCostMatrixes(desc1, desc2, orb1.getDescriptorsMaskList().get(i), orb2.getDescriptorsMaskList().get(j))[1].a;
} else {
ORB.Descriptors[] desc1 = getDescriptors(orb1, i);
ORB.Descriptors[] desc2 = getDescriptors(orb2, j);
cost = ORB.calcDescriptorCostMatrix(desc1, desc2);
}
//combinations of pairs with same labels
// storing them all to reduce nesting
// quadint is idx1, idx2, idx3, idx4
//TODO: can use the cost to more quickly filter the
// pairs at creation time
List<QuadInt> pairIndexes = createPairLabelIndexes(cost, nBands, pointIndexLists1, kpX1_2, kpY1_2, pointIndexLists2, kpX2_2, kpY2_2);
System.out.println("i=" + i + " j=" + j + " nPairs=" + pairIndexes.size());
FixedSizeSortedVector<CObject4> vecP = new FixedSizeSortedVector<CObject4>(100, //Math.round(0.1f * pairIndexes.size()),
//Math.round(0.01f * pairIndexes.size()),
CObject4.class);
for (int ipi = 0; ipi < pairIndexes.size(); ++ipi) {
QuadInt q = pairIndexes.get(ipi);
int t1X = kpX1_2.get(q.getA());
int t1Y = kpY1_2.get(q.getA());
int t2X = kpX1_2.get(q.getB());
int t2Y = kpY1_2.get(q.getB());
int s1X = kpX2_2.get(q.getC());
int s1Y = kpY2_2.get(q.getC());
int s2X = kpX2_2.get(q.getD());
int s2Y = kpY2_2.get(q.getD());
// transform dataset 1 into frame 2
TransformationParameters params = tc.calulateEuclidean(t1X, t1Y, t2X, t2Y, s1X, s1Y, s2X, s2Y, 0, 0);
float tScale = params.getScale();
if (Math.abs(tScale - 1.0) > 0.15) {
continue;
}
int idx1_1 = p1KPIndexMap.get(new PairInt(t1X, t1Y));
int idx1_2 = p1KPIndexMap.get(new PairInt(t2X, t2Y));
int idx2_1 = p2KPIndexMap_2.get(new PairInt(s1X, s1Y));
int idx2_2 = p2KPIndexMap_2.get(new PairInt(s2X, s2Y));
// a filter for objects too large to be the template object in
// dataset 1.
// caveat is that cannot use partial shape matcher on all
// results in same manner if filter this one out, but it's
// the right logic if not oversegmented or blended into
// other objects.
int label2 = pointLabels2.get(new PairInt(kpX2.get(q.getC()), kpY2.get(q.getC())));
if (labeledPoints2.get(label2).size() < 2) {
continue;
}
OneDIntArray key = new OneDIntArray(new int[]{label2});
if (!labeledPointsSizes2.containsKey(key)) {
Set<PairInt> set2 = labeledPoints2.get(label2);
if (set2.size() < 2) {
continue;
}
int sz = calculateObjectSize(set2);
labeledPointsSizes2.put(key, sz);
}
int regionSize = labeledPointsSizes2.get(key);
if (regionSize > (1.5 * templateSize)) {
continue;
}
int sum = cost[idx1_1][idx2_1] + cost[idx1_2][idx2_2];
CObject4 cObj = new CObject4(sum, params, q);
boolean added = vecP.add(cObj);
}
System.out.println("for i=" + i + " j=" + j + " filtered nPairs=" + vecP.getNumberOfItems());
double minCostJTotal = Double.MAX_VALUE;
double minCostJ1 = Double.MAX_VALUE;
double minCostJ2 = Double.MAX_VALUE;
double minCostJ3 = Double.MAX_VALUE;
float minCostJTScale = Float.MAX_VALUE;
FixedSizeSortedVector<CObject3> vecJ = new FixedSizeSortedVector<CObject3>(1, CObject3.class);
for (int ipi = 0; ipi < vecP.getNumberOfItems(); ++ipi) {
CObject4 c = vecP.getArray()[ipi];
TransformationParameters params = c.params;
float tScale = params.getScale();
QuadInt q = c.q;
int t1X = kpX1_2.get(q.getA());
int t1Y = kpY1_2.get(q.getA());
int t2X = kpX1_2.get(q.getB());
int t2Y = kpY1_2.get(q.getB());
int s1X = kpX2_2.get(q.getC());
int s1Y = kpY2_2.get(q.getC());
int s2X = kpX2_2.get(q.getD());
int s2Y = kpY2_2.get(q.getD());
PairIntArray tr1 = transformer.applyTransformation(params, a1);
// trim to image dimensions
tr1 = trimToImageBounds(octaveImg2, tr1);
if (tr1.getN() == 0) {
continue;
}
//the matched kpx1,kpy1 kpx2,kpy2 coordinate pairs
int[] mp1 = new int[kpX1.size()];
int[] mp2 = new int[kpX1.size()];
double[] distAndCount = sumKeypointDescAndDist(cost, 3, a1Indexes, tr1, kpX1, kpY1, nn2, p2KPIndexMap, maxX2, maxY2, pixTolerance, maxDist, mp1, mp2);
double sumDesc = distAndCount[0];
double sumDist = distAndCount[1];
int np = (int) distAndCount[2];
int count = np;
if (count < 2) {
continue;
}
if (count == 2 && (nMaxMatchable > 2 * count)) {
// TODO: may want to revise this while still discarding
// false positives
continue;
}
if (np < mp1.length) {
mp1 = Arrays.copyOf(mp1, np);
mp2 = Arrays.copyOf(mp2, np);
}
if (count > nMaxMatchable) {
count = nMaxMatchable;
}
double cf = count;
if (cf > nMaxMatchable) {
cf = nMaxMatchable;
}
cf /= nMaxMatchable;
double sum3 = 1.0 - cf;
//sumDesc /= (double)count;
//sumDist /= (double)count;
sumDesc /= distAndCount[2];
sumDist /= distAndCount[2];
double sum = sumDesc + sumDist + sum3;
// if vecJ is filled and sum is not better than last item,
// continue
if (vecJ.getNumberOfItems() == vecJ.getFixedCapacity()) {
if (sum < vecJ.getArray()[vecJ.getNumberOfItems() - 1].cost) {
continue;
}
}
TIntSet labels2 = new TIntHashSet();
PairInt[] m1 = new PairInt[np];
PairInt[] m2 = new PairInt[mp1.length];
for (int j3 = 0; j3 < m1.length; ++j3) {
int idx1 = mp1[j3];
int idx2 = mp2[j3];
assert (idx1 < kpX1.size() && idx1 > -1);
assert (idx2 < kpX2.size() && idx2 > -1);
m1[j3] = new PairInt(kpX1.get(idx1), kpY1.get(idx1));
m2[j3] = new PairInt(kpX2.get(idx2), kpY2.get(idx2));
assert (labeledPoints1.contains(m1[j3]));
assert (p1KPIndexMap.get(new PairInt(kpX1_2.get(idx1), kpY1_2.get(idx1))) == idx1);
assert (p2KPIndexMap_2.get(new PairInt(kpX2_2.get(idx2), kpY2_2.get(idx2))) == idx2);
labels2.add(pointLabels2.get(m2[j3]));
}
// apply a size filter
OneDIntArray keys = new OneDIntArray(
labels2.toArray(new int[labels2.size()]));
Arrays.sort(keys.a);
if (!labeledPointsSizes2.containsKey(keys)) {
Set<PairInt> combined = new HashSet<PairInt>();
for (int k = 0; k < keys.a.length; ++k) {
combined.addAll(labeledPoints2.get(keys.a[k]));
}
if (combined.size() < 2) {
continue;
}
int sz = calculateObjectSize(combined);
labeledPointsSizes2.put(keys, sz);
}
int regionSize = labeledPointsSizes2.get(keys);
if (regionSize > (1.5 * templateSize)) {
continue;
}
CObject2 cObj2 = new CObject2(ipi, sum, sumDesc, sumDist, sum3, m1, m2);
CObject3 cObj = new CObject3(cObj2, sum, 0, params);
cObj.keypointCount = count;
boolean added = vecJ.add(cObj);
if (added) {
minCostJTotal = sum;
minCostJ1 = sumDesc;
minCostJ2 = sumDist;
minCostJ3 = sum3;
minCostJTScale = tScale;
System.out.println(String.format("i=%d j=%d ipi=%d ts=%.2f c=%.2f c1=%.2f c2=%.2f c3=%.2f count=%d", i, j, ipi, tScale, (float) sum, (float) sumDesc, (float) sumDist, (float) sum3, count));
if (true) {
CorrespondencePlotter plotter = new CorrespondencePlotter(ORB.convertToImage(orb1.getPyramidImages().get(i)), ORB.convertToImage(orb2.getPyramidImages().get(j)));
for (int ii = 0; ii < cObj.m1.length; ++ii) {
PairInt p1 = cObj.m1[ii];
PairInt p2 = cObj.m2[ii];
int x1 = Math.round((float) p1.getX() / scale1);
int y1 = Math.round((float) p1.getY() / scale1);
int x2 = Math.round((float) p2.getX() / scale2);
int y2 = Math.round((float) p2.getY() / scale2);
plotter.drawLineInAlternatingColors(x1, y1, x2, y2, 0);
}
String str = Integer.toString(i);
while (str.length() < 3) {
str = "0" + str;
}
String str2 = Integer.toString(j);
while (str2.length() < 3) {
str2 = "0" + str2;
}
str = str + "_" + str2;
try {
plotter.writeImage("_indiv_masked_corres2_" + str + "_" + ipi);
} catch (IOException ex) {
Logger.getLogger(ORB.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
} // end loop over paramsList
if (vecJ.getNumberOfItems() == 0) {
continue;
}
if (false) {
//DEBUG
for (int k = 0; k < vecJ.getNumberOfItems(); ++k) {
CObject3 cobj = vecJ.getArray()[k];
CorrespondencePlotter plotter = new CorrespondencePlotter(ORB.convertToImage(orb1.getPyramidImages().get(i)), ORB.convertToImage(orb2.getPyramidImages().get(j)));
for (int ii = 0; ii < cobj.m1.length; ++ii) {
PairInt p1 = cobj.m1[ii];
PairInt p2 = cobj.m2[ii];
int x1 = Math.round((float) p1.getX() / scale1);
int y1 = Math.round((float) p1.getY() / scale1);
int x2 = Math.round((float) p2.getX() / scale2);
int y2 = Math.round((float) p2.getY() / scale2);
plotter.drawLineInAlternatingColors(x1, y1, x2, y2, 0);
}
String str = Integer.toString(i);
while (str.length() < 3) {
str = "0" + str;
}
String str2 = Integer.toString(j);
while (str2.length() < 3) {
str2 = "0" + str2;
}
str = str + "_" + str2;
try {
plotter.writeImage("_mindiv_masked_corres3_" + str + "_" + MiscDebug.getCurrentTimeFormatted());
} catch (IOException ex) {
Logger.getLogger(ORB.class.getName()).log(Level.SEVERE, null, ex);
}
System.out.println(String.format("* %d %d ts=%.2f c=%.2f c1=%.2f c2=%.2f c3=%.2f", i, j, cobj.params.getScale(), (float) cobj.cost, (float) cobj.costDesc, (float) cobj.costDist, (float) cobj.costCount));
}
}
if (vecJ.getNumberOfItems() == 0) {
System.out.println("no matches for i=" + i + " j=" + j);
continue;
}
// if expand capacity of minVec, add up to capacity here
minVec.add(vecJ.getArray()[0]);
} // end loop over image j
}
if (minVec.getNumberOfItems() == 0) {
return null;
}
List<CorrespondenceList> topResults = new ArrayList<CorrespondenceList>();
for (int i = 0; i < minVec.getNumberOfItems(); ++i) {
CObject3 a = minVec.getArray()[i];
if (a.cost > minCostTotal) {
break;
}
CorrespondenceList cor = new CorrespondenceList(a.params, a.m1, a.m2);
topResults.add(cor);
}
return topResults;
}
/**
* match template image and shape in orb1 and labeledPoints1
* with the same object which is somewhere in the
* segmented labledPoints2 and orb2.
*
* this method matches points on a segmented cell basis to calculate
the minimum cost correspondence with an objective function
consisting of cost from an outer point chord difference matrix,
cost from hsv orb descriptors of keypoints, and an epipolar projection
to remove outliers and find matching inner points (and subsequent
addition of the later costs to the total).
* NOT READY FOR USE yet.
*
* @param orb1
* @param orb2
* @param labeledPoints1
* @param labeledPoints2
* @return
*/
public List<CorrespondenceList> match0Epipolar(ORB orb1, ORB orb2,
Set<PairInt> labeledPoints1, List<Set<PairInt>> labeledPoints2) {
if (!orb1.getDescrChoice().equals(orb2.getDescrChoice())) {
throw new IllegalStateException("orbs must contain same kind of descirptors");
}
int nBands = 3;
if (orb1.getDescrChoice().equals(ORB.DescriptorChoice.HSV)) {
if (orb1.getDescriptorsH() == null || orb2.getDescriptorsH() == null) {
throw new IllegalStateException("hsv descriptors must be created first");
}
} else if (orb1.getDescrChoice().equals(ORB.DescriptorChoice.ALT)) {
if (orb1.getDescriptorsListAlt() == null || orb2.getDescriptorsListAlt() == null) {
throw new IllegalStateException("alt descriptors must be created first");
}
nBands = 1;
} else if (orb1.getDescrChoice().equals(ORB.DescriptorChoice.GREYSCALE)) {
if (orb1.getDescriptorsList() == null || orb2.getDescriptorsList() == null) {
throw new IllegalStateException("descriptors must be created first");
}
nBands = 1;
}
// NOTE: keeping coords in full size reference frames
TFloatList scales1 = extractScales(orb1.getScalesList());
TFloatList scales2 = extractScales(orb2.getScalesList());
if (Math.abs(scales1.get(0) - 1) > 0.01) {
throw new IllegalArgumentException("logic depends upon first scale" + " level being '1'");
}
if (Math.abs(scales2.get(0) - 1) > 0.01) {
throw new IllegalArgumentException("logic depends upon first scale" + " level being '1'");
}
SIGMA sigma = SIGMA.ZEROPOINTFIVE;
float distTol = 5;
EpipolarTransformer eTransformer = new EpipolarTransformer();
List<PairIntArray> bounds1List = createOrderedBounds(orb1, scales1,
labeledPoints1, sigma);
TIntIntMap sizes2Maps = new TIntIntHashMap();
for (int i = 0; i < labeledPoints2.size(); ++i) {
Set<PairInt> set2 = labeledPoints2.get(i);
if (set2.size() < 7) {
continue;
}
int sz = calculateObjectSize(set2);
sizes2Maps.put(i, sz);
}
ImageProcessor imageProcessor = new ImageProcessor();
// -- initialize bounds2MapsList
// -- and populate remaining octaves in sizes2MapsList
List<TIntObjectMap<PairIntArray>> bounds2MapsList = new
ArrayList<TIntObjectMap<PairIntArray>>();
for (int octave2 =1; octave2 < scales2.size(); ++octave2) {
bounds2MapsList.add(new TIntObjectHashMap<PairIntArray>());
}
List<TObjectIntMap<PairInt>> kp1IdxMapList
= new ArrayList<TObjectIntMap<PairInt>>();
for (int octave = 0; octave < scales1.size(); ++octave) {
TObjectIntMap<PairInt> keypoints1IndexMap = new TObjectIntHashMap<PairInt>();
for (int i = 0; i < orb1.getKeyPoint1List().get(octave).size(); ++i) {
int x = orb1.getKeyPoint1List().get(octave).get(i);
int y = orb1.getKeyPoint0List().get(octave).get(i);
keypoints1IndexMap.put(new PairInt(x, y), i);
}
kp1IdxMapList.add(keypoints1IndexMap);
}
List<TObjectIntMap<PairInt>> kp2IdxMapList
= new ArrayList<TObjectIntMap<PairInt>>();
for (int octave = 0; octave < scales2.size(); ++octave) {
TObjectIntMap<PairInt> keypoints2IndexMap = new TObjectIntHashMap<PairInt>();
for (int i = 0; i < orb2.getKeyPoint1List().get(octave).size(); ++i) {
int x = orb2.getKeyPoint1List().get(octave).get(i);
int y = orb2.getKeyPoint0List().get(octave).get(i);
keypoints2IndexMap.put(new PairInt(x, y), i);
}
kp2IdxMapList.add(keypoints2IndexMap);
}
// making a lookup map for keypoint indexes in points2 labeled sets
List<TIntObjectMap<TIntSet>> labels2KPIdxsList =
new ArrayList<TIntObjectMap<TIntSet>>();
for (int octave = 0; octave < scales2.size(); ++octave) {
float scale2 = scales2.get(octave);
TIntObjectMap<TIntSet> labels2KPIdxs = new TIntObjectHashMap<TIntSet>();
labels2KPIdxsList.add(labels2KPIdxs);
TObjectIntMap<PairInt> keypoints2IndexMap = kp2IdxMapList.get(octave);
for (int i = 0; i < labeledPoints2.size(); ++i) {
for (PairInt p : labeledPoints2.get(i)) {
if (keypoints2IndexMap.containsKey(p)) {
int kp2Idx = keypoints2IndexMap.get(p);
TIntSet kpIdxs = labels2KPIdxs.get(i);
if (kpIdxs == null) {
kpIdxs = new TIntHashSet();
labels2KPIdxs.put(i, kpIdxs);
}
kpIdxs.add(kp2Idx);
}
}
}
}
float[] bestCosts = new float[scales1.size()];
List<List<QuadInt>> bestCorres = new ArrayList<List<QuadInt>>();
TIntList bestOctaves1 = new TIntArrayList();
TIntList bestOctaves2 = new TIntArrayList();
for (int octave1 = 0; octave1 < scales1.size(); ++octave1) {
//for (int octave1 = 1; octave1 < 2; ++octave1) {
PairIntArray bounds1 = bounds1List.get(octave1);
if (bounds1 == null || bounds1.getN() < 7) {
continue;
}
//float scale1 = scales1.get(octave1);
TObjectIntMap<PairInt> keypoints1IndexMap = kp1IdxMapList.get(octave1);
int sz1 = calculateObjectSize(bounds1);
int nkp1 = orb1.getKeyPoint0List().get(octave1).size();
int nb1 = bounds1.getN();
float normDesc = nkp1 * nBands * 256;
double maxChordAvg = Double.MIN_VALUE;
List<List<QuadInt>> correspondences = new ArrayList<List<QuadInt>>();
TDoubleList descCosts = new TDoubleArrayList();
TIntList descCounts = new TIntArrayList();
TDoubleList epCosts = new TDoubleArrayList();
TDoubleList chordCosts = new TDoubleArrayList();
TIntList octs2 = new TIntArrayList();
//for (int octave2 = 0; octave2 < scales2.size(); ++octave2) {
for (int octave2 = 0; octave2 < 1; ++octave2) {
//float scale2 = scales2.get(octave2);
TObjectIntMap<PairInt> keypoints2IndexMap = kp2IdxMapList.get(octave2);
TIntObjectMap<TIntSet> labels2KPIdxs =
labels2KPIdxsList.get(octave2);
TwoDFloatArray img2 = orb2.getPyramidImages().get(octave2);
for (int segIdx = 0; segIdx < labeledPoints2.size(); ++segIdx) {
int sz2 = sizes2Maps.get(segIdx);
if (sz2 == 0) {
continue;
}
if ((sz1 > sz2 && Math.abs((float)sz1 / (float)sz2) > 1.15) ||
(sz2 > sz1 && Math.abs((float)sz2 / (float)sz1) > 1.15)) {
continue;
}
System.out.println("octave1=" + octave1 + " octave2=" + octave2 +
" sz1=" + sz1 + " sz2=" + sz2 + " segIdx=" + segIdx);
PairIntArray bounds2 = getOrCreateOrderedBounds(img2,
bounds2MapsList.get(octave2), segIdx,
labeledPoints2.get(segIdx), sigma);
if (bounds2 == null || bounds2.getN() < 7) {
continue;
}
PartialShapeMatcher matcher = new PartialShapeMatcher();
matcher.overrideSamplingDistance(1);
matcher._overrideToThreshhold(0.2f);
matcher.setToRemoveOutliers();
matcher.overrideToStoreMatrix();
PartialShapeMatcher.Result result = matcher.match(
bounds1, bounds2);
if (result == null
|| (matcher.getStoredEpipolarFit() == null)
|| (result.getNumberOfMatches() < 3)) {
continue;
}
int nr = result.getNumberOfMatches();
PairIntArray m1 = new PairIntArray(nr);
PairIntArray m2 = new PairIntArray(nr);
Set<PairInt> matched1 = new HashSet<PairInt>();
Set<PairInt> matched2 = new HashSet<PairInt>();
for (int j = 0; j < nr; ++j) {
int idx1 = result.idx1s.get(j);
int idx2 = result.idx2s.get(j);
int x1 = bounds1.getX(idx1);
int y1 = bounds1.getY(idx1);
int x2 = bounds2.getX(idx2);
int y2 = bounds2.getY(idx2);
m1.add(x1, y1);
m2.add(x2, y2);
matched1.add(new PairInt(x1, y1));
matched2.add(new PairInt(x2, y2));
}
SimpleMatrix fm = matcher.getStoredEpipolarFit().getFundamentalMatrix();
SimpleMatrix matchedLeft =
eTransformer.rewriteInto3ColumnMatrix(m1);
SimpleMatrix matchedRight =
eTransformer.rewriteInto3ColumnMatrix(m2);
// this goes into total epipolar distances at end of block
PairFloatArray distances = eTransformer
.calculateDistancesFromEpipolar(fm,
matchedLeft, matchedRight);
TIntSet kp2Idxs = labels2KPIdxs.get(segIdx);
if (kp2Idxs == null) {
continue;
}
// key=keypoint in this labeled region, value=kp2Index
PairIntArray unmatchedKP2 = new PairIntArray();
TObjectIntMap<PairInt> unmatchedKP2Idxs =
new TObjectIntHashMap<PairInt>();
TIntIterator iter = kp2Idxs.iterator();
while (iter.hasNext()) {
int kp2Idx = iter.next();
int x = orb2.getKeyPoint1List().get(octave2).get(kp2Idx);
int y = orb2.getKeyPoint0List().get(octave2).get(kp2Idx);
PairInt p = new PairInt(x, y);
if (!matched2.contains(p)) {
unmatchedKP2Idxs.put(p, kp2Idx);
unmatchedKP2.add(x, y);
}
}
PairIntArray unmatchedKP1 = new PairIntArray();
TObjectIntMap<PairInt> unmatchedKP1Idxs =
new TObjectIntHashMap<PairInt>();
for (int j = 0; j < orb1.getKeyPoint0List().get(octave1).size();
++j) {
int x = orb1.getKeyPoint1List().get(octave1).get(j);
int y = orb1.getKeyPoint0List().get(octave1).get(j);
PairInt p = new PairInt(x, y);
if (!matched1.contains(p)) {
unmatchedKP1Idxs.put(p, j);
unmatchedKP1.add(x, y);
}
}
SimpleMatrix unmatchedLeft =
eTransformer.rewriteInto3ColumnMatrix(unmatchedKP1);
SimpleMatrix unmatchedRight =
eTransformer.rewriteInto3ColumnMatrix(unmatchedKP2);
SimpleMatrix rightEpipolarLines = fm.mult(unmatchedLeft);
SimpleMatrix leftEpipolarLines = fm.transpose().mult(unmatchedRight);
ORB.Descriptors[] desc1 = getDescriptors(orb1, octave1);
ORB.Descriptors[] desc2 = getDescriptors(orb2, octave2);
int[][] costD = ORB.calcDescriptorCostMatrix(desc1, desc2);
float[] outputDist = new float[2];
int nLeftUnmatched = unmatchedLeft.numCols();
int nRightUnmatched = unmatchedRight.numCols();
float dist, descCost;
float distMax = (float)(Math.sqrt(2) * distTol);
TFloatList totalCost = new TFloatArrayList();
TIntList indexes = new TIntArrayList();
TFloatList eDist = new TFloatArrayList();
TIntList idx1s = new TIntArrayList();
TIntList idx2s = new TIntArrayList();
for (int i = 0; i < nLeftUnmatched; ++i) {
PairInt p1 = new PairInt(unmatchedKP1.getX(i),
unmatchedKP1.getY(i));
int kp1Idx = unmatchedKP1Idxs.get(p1);
for (int j = 0; j < nRightUnmatched; ++j) {
PairInt p2 = new PairInt(unmatchedKP2.getX(j),
unmatchedKP2.getY(j));
int kp2Idx = unmatchedKP2Idxs.get(p2);
eTransformer.calculatePerpDistFromLines(unmatchedLeft,
unmatchedRight, rightEpipolarLines,
leftEpipolarLines, i, j, outputDist);
if (outputDist[0] <= distTol && outputDist[1] <= distTol) {
dist = ((float)Math.sqrt(outputDist[0] * outputDist[0] +
outputDist[1] * outputDist[1]))/distMax;
// normalized descriptor cost
descCost = 1.f - ((nBands * 256.f -
costD[kp1Idx][kp2Idx])
/normDesc);
eDist.add(dist);
totalCost.add(dist + descCost);
indexes.add(indexes.size());
idx1s.add(kp1Idx);
idx2s.add(kp2Idx);
}
}
}
QuickSort.sortBy1stArg(totalCost, indexes);
// choose 2 reference points from result,
// preferably 2 that are keypoints w/ lowest descr costs
// and are far from each other
// returns results as 2 quadints of paired x1,y1,x2,y2
QuadInt[] resultRefs = choose2ReferencePoints(result,
bounds1, bounds2,
keypoints1IndexMap, keypoints2IndexMap, costD);
// these will be stored in the octave2 variables outside this block
double totalDistance = sumDistances(distances);
double totalChordDiffSum = result.chordDiffSum;
double totalDescrSum = 0;
int nDescr = 0;
// new matched keypoint indexes
List<PairInt> addedKPIdxs = new ArrayList<PairInt>();
TIntSet added1 = new TIntHashSet();
TIntSet added2 = new TIntHashSet();
for (int j = 0; j < totalCost.size(); ++j) {
int idx = indexes.get(j);
int kpIdx1 = idx1s.get(idx);
int kpIdx2 = idx2s.get(idx);
if (added1.contains(kpIdx1) || added2.contains(kpIdx2)) {
continue;
}
added1.add(kpIdx1);
added2.add(kpIdx2);
addedKPIdxs.add(new PairInt(kpIdx1, kpIdx2));
totalDistance += eDist.get(idx);
float descrCost = totalCost.get(j) - eDist.get(idx);
totalDescrSum += descrCost;
nDescr++;
int x1 = orb1.getKeyPoint1List().get(octave1).get(kpIdx1);
int y1 = orb1.getKeyPoint0List().get(octave1).get(kpIdx1);
int x2 = orb2.getKeyPoint1List().get(octave2).get(kpIdx2);
int y2 = orb2.getKeyPoint0List().get(octave2).get(kpIdx2);
// calc chord diff for the new points using 2 reference
// points from result.
double chordDiff = matcher.
calculateAChordDifference(
resultRefs[0].getA(), resultRefs[0].getB(),
resultRefs[1].getA(), resultRefs[1].getB(),
x1, y1,
resultRefs[0].getC(), resultRefs[0].getD(),
resultRefs[1].getC(), resultRefs[1].getD(),
x2, y2
);
totalChordDiffSum += chordDiff;
}
System.out.println("nAdded inner points=" + addedKPIdxs.size());
int nTot = result.getNumberOfMatches() + addedKPIdxs.size();
List<QuadInt> corres = new ArrayList<QuadInt>(nTot);
// for any point in result that is a keypoint,
// add the descriptor cost to totalDescrSum
for (int j = 0; j < result.getNumberOfMatches(); ++j) {
int idx1 = result.idx1s.get(j);
int idx2 = result.idx2s.get(j);
int x1 = bounds1.getX(idx1);
int y1 = bounds1.getY(idx1);
PairInt p1 = new PairInt(x1, y1);
int x2 = bounds2.getX(idx2);
int y2 = bounds2.getY(idx2);
PairInt p2 = new PairInt(x2, y2);
if (keypoints1IndexMap.containsKey(p1) &&
keypoints2IndexMap.containsKey(p2)) {
int kpIdx1 = keypoints1IndexMap.get(p1);
int kpIdx2 = keypoints2IndexMap.get(p2);
float c = costD[kpIdx1][kpIdx2];
totalDescrSum += c;
nDescr++;
}
// NOTE: storing the local reference frame to help
// with debugging w/ plotter,
// but this should be changed to p1_fs and p2_fs after
corres.add(new QuadInt(p1, p2));
}
for (int j = 0; j < addedKPIdxs.size(); ++j) {
int kpIdx1 = addedKPIdxs.get(j).getX();
int kpIdx2 = addedKPIdxs.get(j).getY();
int x1 = orb1.getKeyPoint1List().get(octave1).get(kpIdx1);
int y1 = orb1.getKeyPoint0List().get(octave1).get(kpIdx1);
int x2 = orb2.getKeyPoint1List().get(octave2).get(kpIdx2);
int y2 = orb2.getKeyPoint0List().get(octave2).get(kpIdx2);
corres.add(new QuadInt(x1, y1, x2, y2));
}
assert(corres.size() == nTot);
// -- update maxChordAvg
double avgChordDiff = totalChordDiffSum/(double)nTot;
if (avgChordDiff > maxChordAvg) {
maxChordAvg = avgChordDiff;
}
correspondences.add(corres);
descCosts.add(totalDescrSum);
descCounts.add(nDescr);
epCosts.add(totalDistance);
chordCosts.add(totalChordDiffSum);
octs2.add(octave2);
}
}// end loop over octave2
// determine best per octave pair
double bestCost = Double.MAX_VALUE;
List<QuadInt> best = null;
int bestOctave2 = -1;
int nI = correspondences.size();
for (int i = 0; i < nI; ++i) {
double n = correspondences.get(i).size();
double normalizedChord = (chordCosts.get(i)/n)/maxChordAvg;
double normalizedEPDist = (epCosts.get(i)/n)/distTol;
int nDesc = descCounts.get(i);
double normalizedDescr =
1.f - ((nBands * 256.f - (descCosts.get(i)/(double)nDesc))
/normDesc);
double totCost = normalizedChord + normalizedEPDist +
normalizedDescr;
if (totCost < bestCost) {
bestCost = totCost;
best = correspondences.get(i);
bestOctave2 = octs2.get(i);
}
}
if (best == null) {
continue;
}
bestCosts[bestCorres.size()] = (float)bestCost;
bestCorres.add(best);
bestOctaves1.add(octave1);
bestOctaves2.add(bestOctave2);
if (true) {
//DEBUG
for (int k = 0; k < bestCorres.size(); ++k) {
int oct1 = bestOctaves1.get(k);
int oct2 = bestOctaves2.get(k);
float s1 = scales1.get(oct1);
float s2 = scales2.get(oct2);
List<QuadInt> cor = bestCorres.get(k);
CorrespondencePlotter plotter = new CorrespondencePlotter(
ORB.convertToImage(orb1.getPyramidImages().get(oct1)),
ORB.convertToImage(orb2.getPyramidImages().get(oct2)));
for (int ii = 0; ii < cor.size(); ++ii) {
QuadInt q = cor.get(ii);
int x1 = Math.round((float)q.getA()/s1);
int y1 = Math.round((float)q.getB()/s1);
int x2 = Math.round((float)q.getC()/s2);
int y2 = Math.round((float)q.getD()/s2);
plotter.drawLineInAlternatingColors(x1, y1, x2, y2, 0);
}
String str = Integer.toString(oct1);
while (str.length() < 3) {
str = "0" + str;
}
String str2 = Integer.toString(oct2);
while (str2.length() < 3) {
str2 = "0" + str2;
}
str = str + "_" + str2;
try {
plotter.writeImage("_corres3_" + str + "_" +
MiscDebug.getCurrentTimeFormatted());
} catch (IOException ex) {
Logger.getLogger(ORB.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
}
int nl = bestCorres.size();
if (nl == 0) {
return null;
}
if (nl < scales1.size()) {
bestCosts = Arrays.copyOf(bestCosts, nl);
}
int[] indexes = new int[nl];
for (int i = 0; i < nl; ++i) {
indexes[i] = i;
}
QuickSort.sortBy1stArg(bestCosts, indexes, 0, bestCorres.size() - 1);
List<CorrespondenceList> results = new ArrayList<CorrespondenceList>();
for (int i = 0; i < bestCorres.size(); ++i) {
int idx = indexes[i];
List<QuadInt> qs = bestCorres.get(idx);
// points are in full reference frame
results.add(new CorrespondenceList(qs));
}
return results;
}
/**
*
* NOT READY FOR USE yet.
*
* needs the orbs to contain the theta pyramidal images.
* add usage here.
*
* @param orb1
* @param orb2
* @param labeledPoints1
* @param labeledPoints2
* @return
*/
public List<CorrespondenceList> matchSmall(ORB orb1, ORB orb2, Set<PairInt> labeledPoints1, List<Set<PairInt>> labeledPoints2) {
TFloatList scales1 = extractScales(orb1.getScalesList());
TFloatList scales2 = extractScales(orb2.getScalesList());
SIGMA sigma = SIGMA.ZEROPOINTFIVE;
ImageProcessor imageProcessor = new ImageProcessor();
ColorHistogram cHist = new ColorHistogram();
int templateSize = calculateObjectSize(labeledPoints1);
TIntObjectMap<Set<PairInt>> labeledPoints1Lists = new TIntObjectHashMap<Set<PairInt>>();
// key = octave number, value = histograms of cie luv
TIntObjectMap<TwoDIntArray> ch1s = new TIntObjectHashMap<TwoDIntArray>();
// key = octave number, value = ordered boundaries of sets
TIntObjectMap<PairIntArray> labeledBoundaries1 = new TIntObjectHashMap<PairIntArray>();
for (int octave1 = 0; octave1 < scales1.size(); ++octave1) {
float scale1 = scales1.get(octave1);
Set<PairInt> set1 = new HashSet<PairInt>();
for (PairInt p : labeledPoints1) {
PairInt p1 = new PairInt(Math.round((float) p.getX() / scale1), Math.round((float) p.getY() / scale1));
set1.add(p1);
}
labeledPoints1Lists.put(octave1, set1);
Image img = ORB.convertToImage(orb1.getPyramidImages().get(octave1));
int[][] ch = cHist.histogramCIELUV(img, set1);
ch1s.put(octave1, new TwoDIntArray(ch));
PairIntArray bounds = imageProcessor.extractSmoothedOrderedBoundary(new HashSet(set1), sigma, img.getWidth(), img.getHeight());
labeledBoundaries1.put(octave1, bounds);
}
int dp = 1;
float intersectionLimit = 0.5F;
// key = octave number, value = list of labeled sets
TIntObjectMap<List<Set<PairInt>>> labeledPoints2Lists = new TIntObjectHashMap<List<Set<PairInt>>>();
// key = octave number, value = list of histograms of cie lab theta
TIntObjectMap<List<TwoDIntArray>> ch2Lists
= new TIntObjectHashMap<List<TwoDIntArray>>();
// key = octave number, value = list of ordered points in labeled set
TIntObjectMap<List<PairIntArray>> labeledBoundaries2Lists = new TIntObjectHashMap<List<PairIntArray>>();
for (int k = 0; k < labeledPoints2.size(); ++k) {
Set<PairInt> set = labeledPoints2.get(k);
if (set.size() < 7) {
// NOTE: this means that subsequent datasets2 will not be
// lists having same indexes as labeledPoints2
continue;
}
assert(Math.abs(scales2.get(0) - 1) < 0.02);
PairIntArray bounds = imageProcessor.extractSmoothedOrderedBoundary(
new HashSet(set), sigma,
orb2.getPyramidImages().get(0).a[0].length,
orb2.getPyramidImages().get(0).a.length);
for (int octave2 = 0; octave2 < scales2.size(); ++octave2) {
float scale2 = scales2.get(octave2);
Image img = ORB.convertToImage(
orb2.getPyramidImages().get(octave2));
int w2 = img.getWidth();
int h2 = img.getHeight();
Set<PairInt> set2 = new HashSet<PairInt>();
for (PairInt p : set) {
int x = Math.round((float) p.getX() / scale2);
int y = Math.round((float) p.getY() / scale2);
if (x == w2) {
x = w2 - 1;
}
if (y == h2) {
y = h2 - 1;
}
PairInt p2 = new PairInt(x, y);
set2.add(p2);
}
List<Set<PairInt>> list2 = labeledPoints2Lists.get(octave2);
if (list2 == null) {
list2 = new ArrayList<Set<PairInt>>();
labeledPoints2Lists.put(octave2, list2);
}
list2.add(set2);
// create histograms for later comparison w/ template at
// different scales
int[][] ch = cHist.histogramCIELUV(img, set2);
List<TwoDIntArray> ch2List = ch2Lists.get(octave2);
if (ch2List == null) {
ch2List = new ArrayList<TwoDIntArray>();
ch2Lists.put(octave2, ch2List);
}
ch2List.add(new TwoDIntArray(ch));
List<PairIntArray> list3 = labeledBoundaries2Lists.get(octave2);
if (list3 == null) {
list3 = new ArrayList<PairIntArray>();
labeledBoundaries2Lists.put(octave2, list3);
}
PairIntArray bounds2 = reduceBounds(bounds, scale2);
list3.add(bounds2);
assert(labeledBoundaries2Lists.get(octave2).size() ==
labeledPoints2Lists.get(octave2).size());
assert(labeledBoundaries2Lists.get(octave2).size() ==
ch2Lists.get(octave2).size());
}
}
// populated on demand, key=octave, key=segmented cell, value=size
TObjectIntMap<PairInt> size2Map = new TObjectIntHashMap<PairInt>();
// -- compare sets over octaves, first by color histogram intersection,
// then by partial shape matcher
// delaying evaluation of results until end in order to get the
// maximum chord differerence sum, needed for Salukwzde distance.
// for each i, list of Results, chordDiffSums, bounds1, bounds2
// bundling Results and bounds into an object
TIntObjectMap<List<PObject>> resultsMap = new TIntObjectHashMap<List<PObject>>();
TIntObjectMap<TDoubleList> chordDiffSumsMap = new TIntObjectHashMap<TDoubleList>();
TIntObjectMap<TFloatList> intersectionsMap = new TIntObjectHashMap<TFloatList>();
double maxDiffChordSum = Double.MIN_VALUE;
double maxAvgDiffChord = Double.MIN_VALUE;
double maxAvgDist = Double.MIN_VALUE;
for (int i = 0; i < scales1.size(); ++i) {
//for (int i = 2; i < 3; ++i) {
float scale1 = scales1.get(i);
int[][] ch1 = ch1s.get(i).a;
//Set<PairInt> templateSet = labeledPoints1Lists.get(i);
PairIntArray bounds1 = labeledBoundaries1.get(i);
float sz1 = calculateObjectSize(bounds1);
List<PObject> results = new ArrayList<PObject>();
TDoubleList chordDiffSums = new TDoubleArrayList();
TFloatList intersections = new TFloatArrayList();
for (int j = 0; j < scales2.size(); ++j) {
//for (int j = 0; j < 1; ++j) {
float scale2 = scales2.get(j);
List<TwoDIntArray> listOfCH2s = ch2Lists.get(j);
if (listOfCH2s == null) {
continue;
}
List<Set<PairInt>> listOfSets2 = labeledPoints2Lists.get(j);
List<PairIntArray> listOfBounds2 = labeledBoundaries2Lists.get(j);
for (int k = 0; k < listOfCH2s.size(); ++k) {
PairIntArray bounds2 = listOfBounds2.get(k);
PairInt octLabelKey = new PairInt(j, k);
float sz2;
if (size2Map.containsKey(octLabelKey)) {
sz2 = size2Map.get(octLabelKey);
} else {
sz2 = calculateObjectSize(bounds2);
}
if (sz2 == 0) {
continue;
}
if ((sz1 > sz2 && Math.abs((float)sz1 / (float)sz2) > 1.15) ||
(sz2 > sz1 && Math.abs((float)sz2 / (float)sz1) > 1.15)) {
continue;
}
int[][] ch2 = listOfCH2s.get(k).a;
float intersection = cHist.intersection(ch1, ch2);
if (intersection < intersectionLimit) {
continue;
}
System.out.println("p2=" +
listOfSets2.get(k).iterator().next()
+ " sz1=" + sz1 + " sz2=" + sz2
+ " nSet=" + listOfSets2.get(k).size());
PartialShapeMatcher matcher = new PartialShapeMatcher();
matcher.overrideSamplingDistance(dp);
//matcher.setToDebug();
//matcher.setToUseSameNumberOfPoints();
PartialShapeMatcher.Result r = matcher.match(bounds1, bounds2);
if (r == null) {
continue;
}
//NOTE: to increase the abilit to find projected objects
// that have euclidean poses and skew, might consider
// fast ways to approximate an affine and evaluate it
// after the euclidean solution here.
// affine transformations leave parallel lines in the
// transformed space so could look for that in the
// unmatched portion:
// for example, if half of the object is matched,
// could determine the distance of the matched to the
// unmatched and use that with knowledge of the
// euclidean expected distance to approximate a shear.
// for the evaluations to remain easy to compare results
// with other results, would not want to allow too much
// shear...
// in order to add the chord differences, this additional
// calculation needs to be handled in the
// partial shape matcher (but can be left until the end)
double c = r.getChordDiffSum();
results.add(new PObject(r, bounds1, bounds2, scale1, scale2));
chordDiffSums.add(r.getChordDiffSum());
intersections.add(intersection);
if (r.getChordDiffSum() > maxDiffChordSum) {
maxDiffChordSum = r.getChordDiffSum();
}
double avgCD = r.getChordDiffSum() / (double) r.getNumberOfMatches();
if (avgCD > maxAvgDiffChord) {
maxAvgDiffChord = avgCD;
}
double avgDist = r.getDistSum() / (double) r.getNumberOfMatches();
if (avgDist > maxAvgDist) {
maxAvgDist = avgDist;
}
System.out.println(String.format(
"%d %d p in set=%s shape matcher c=%.2f np=%d inter=%.2f dist=%.2f avgDist=%.2f",
i, j, listOfSets2.get(k).iterator().next().toString(),
(float) c, r.getNumberOfMatches(), (float) intersection,
(float) r.getDistSum(), (float) avgDist));
try {
CorrespondencePlotter plotter = new CorrespondencePlotter(bounds1, bounds2);
for (int ii = 0; ii < r.getNumberOfMatches(); ++ii) {
int idx1 = r.getIdx1(ii);
int idx2 = r.getIdx2(ii);
int x1 = bounds1.getX(idx1);
int y1 = bounds1.getY(idx1);
int x2 = bounds2.getX(idx2);
int y2 = bounds2.getY(idx2);
if ((ii % 4) == 0) {
plotter.drawLineInAlternatingColors(x1, y1, x2, y2, 0);
}
}
String strI = Integer.toString(i);
while (strI.length() < 2) {
strI = "0" + strI;
}
String strJ = Integer.toString(j);
while (strJ.length() < 2) {
strJ = "0" + strJ;
}
String strK = Integer.toString(k);
while (strK.length() < 2) {
strK = "0" + strK;
}
String str = strI + strJ + strK;
String filePath = plotter.writeImage("_andr_" + str);
} catch (Throwable t) {
}
} //end loop over k labeled sets of dataset 2
} // end loop over j datasets 2
if (!results.isEmpty()) {
resultsMap.put(i, results);
chordDiffSumsMap.put(i, chordDiffSums);
intersectionsMap.put(i, intersections);
}
} // end loop over i dataset 1
// calculate the Salukwdze distances
/*
for each i, need the max chord diff sum, nPoints in bound1, and best Results
*/
double minSD = Double.MAX_VALUE;
int minSDI = -1;
TIntObjectIterator<List<PObject>> iter = resultsMap.iterator();
for (int i = 0; i < resultsMap.size(); ++i) {
iter.advance();
int idx = iter.key();
//double maxDiffChordSum = chordDiffSumsMap.get(idx).max();
double minCost = Double.MAX_VALUE;
int minCostIdx = -1;
List<PObject> resultsList = resultsMap.get(idx);
for (int j = 0; j < resultsList.size(); ++j) {
PObject obj = resultsList.get(j);
float costIntersection = 1.0F - intersectionsMap.get(idx).get(j);
PartialShapeMatcher.Result r = obj.r;
int nb1 = Math.round((float) obj.bounds1.getN() / (float) dp);
float np = r.getNumberOfMatches();
float countComp = 1.0F - (np / (float) nb1);
float countCompSq = countComp * countComp;
double chordComp = ((float) r.getChordDiffSum() / np) / maxAvgDiffChord;
double chordCompSq = chordComp * chordComp;
double avgDist = r.getDistSum() / np;
double distComp = avgDist / maxAvgDist;
double distCompSq = distComp * distComp;
// Salukwzde uses square sums
//double sd = r.calculateSalukwdzeDistanceSquared(
// maxDiffChordSum, nb1);
// TODO: consider formal analysis of dependencies and hence
// error terms:
//double sd = chordCompSq*countCompSq
// + distCompSq*countCompSq;
//NOTE: The coverage of the matches is currently
// approximated as simply numberMatched/maxNumberMatchable,
// but a term representing the spatial distribution appears
// to be necessary also.
// will try largestNumberGap/maxNumberMatchable.
// TODO: need to improve this in detail later
int lGap = maxNumberOfGaps(obj.bounds1, r)/dp;
float gCountComp = (float)lGap/(float)nb1;
//double sd = chordCompSq + countCompSq + distCompSq;
double sd = chordComp + countComp + gCountComp + distComp
+ costIntersection;
if (sd < minCost) {
minCost = sd;
minCostIdx = j;
}
if (sd < minSD) {
minSD = sd;
minSDI = idx;
}
System.out.println("sd=" + sd + " n1="
+ obj.bounds1.getN() + " n2=" + obj.bounds2.getN()
+ " origN1=" + r.getOriginalN1()
+ " nMatches=" + r.getNumberOfMatches()
+ String.format(
" chord=%.2f count=%.2f spatial=%.2f dist=%.2f inter=%.2f",
(float)chordComp, (float)countComp,
(float)gCountComp, (float)distComp,
(float)costIntersection)
);
}
assert (minCostIdx > -1);
TDoubleList cList = chordDiffSumsMap.get(idx);
TFloatList iList = intersectionsMap.get(idx);
for (int j = resultsList.size() - 1; j > -1; --j) {
if (j != minCostIdx) {
resultsList.remove(j);
cList.removeAt(j);
iList.removeAt(j);
}
}
}
if (resultsMap.size() > 1) {
// TODO: build a test for this.
// possibly need to transform results to same reference
// frame to compare.
// using best SD for now
TIntSet rm = new TIntHashSet();
iter = resultsMap.iterator();
for (int i = 0; i < resultsMap.size(); ++i) {
iter.advance();
int idx = iter.key();
if (idx != minSDI) {
rm.add(idx);
}
}
TIntIterator iter2 = rm.iterator();
while (iter2.hasNext()) {
int idx = iter2.next();
resultsMap.remove(idx);
}
}
List<CorrespondenceList> topResults = new ArrayList<CorrespondenceList>();
iter = resultsMap.iterator();
for (int i = 0; i < resultsMap.size(); ++i) {
iter.advance();
int idx = iter.key();
List<PObject> resultsList = resultsMap.get(idx);
assert (resultsList.size() == 1);
PObject obj = resultsList.get(0);
int n = obj.r.getNumberOfMatches();
if (obj.r.getTransformationParameters() == null) {
continue;
}
PairInt[] m1 = new PairInt[n];
PairInt[] m2 = new PairInt[n];
float scale1 = obj.scale1;
float scale2 = obj.scale2;
for (int ii = 0; ii < n; ++ii) {
int idx1 = obj.r.getIdx1(ii);
int idx2 = obj.r.getIdx2(ii);
int x1 = Math.round(obj.bounds1.getX(idx1) * scale1);
int y1 = Math.round(obj.bounds1.getY(idx1) * scale1);
int x2 = Math.round(obj.bounds2.getX(idx2) * scale2);
int y2 = Math.round(obj.bounds2.getY(idx2) * scale2);
m1[ii] = new PairInt(x1, y1);
m2[ii] = new PairInt(x2, y2);
}
CorrespondenceList cor
= new CorrespondenceList(obj.r.getTransformationParameters(), m1, m2);
topResults.add(cor);
}
return topResults;
}
/**
*
* NOT READY FOR USE yet.
*
* searchs among aggregated adjacent labeled points to find best
* fitting shape and color object where template is
* dataset 1 and the searchable is dataset 2.
*
* @param orb1
* @param orb2
* @param labeledPoints1
* @param labeledPoints2
* @return
*/
public List<CorrespondenceList> matchAggregatedShape(
ORB orb1, ORB orb2, Set<PairInt> labeledPoints1,
List<Set<PairInt>> labeledPoints2) {
TFloatList scales1 = extractScales(orb1.getScalesList());
TFloatList scales2 = extractScales(orb2.getScalesList());
if (Math.abs(scales1.get(0) - 1) > 0.01) {
throw new IllegalArgumentException("logic depends upon first scale" + " level being '1'");
}
if (Math.abs(scales2.get(0) - 1) > 0.01) {
throw new IllegalArgumentException("logic depends upon first scale" + " level being '1'");
}
SIGMA sigma = SIGMA.ZEROPOINTFIVE;
ImageProcessor imageProcessor = new ImageProcessor();
ColorHistogram cHist = new ColorHistogram();
int templateSize = calculateObjectSize(labeledPoints1);
TIntObjectMap<Set<PairInt>> labeledPoints1Lists = new TIntObjectHashMap<Set<PairInt>>();
// key = octave number, value = histograms of cie cie luv
TIntObjectMap<TwoDIntArray> ch1s = new TIntObjectHashMap<TwoDIntArray>();
// key = octave number, value = ordered boundaries of sets
TIntObjectMap<PairIntArray> labeledBoundaries1 = new TIntObjectHashMap<PairIntArray>();
for (int octave1 = 0; octave1 < scales1.size(); ++octave1) {
float scale1 = scales1.get(octave1);
Set<PairInt> set1 = new HashSet<PairInt>();
for (PairInt p : labeledPoints1) {
PairInt p1 = new PairInt(Math.round((float) p.getX() / scale1), Math.round((float) p.getY() / scale1));
set1.add(p1);
}
labeledPoints1Lists.put(octave1, set1);
Image img = ORB.convertToImage(orb1.getPyramidImages().get(octave1));
int[][] ch = cHist.histogramCIELUV(img, set1);
ch1s.put(octave1, new TwoDIntArray(ch));
PairIntArray bounds = imageProcessor.extractSmoothedOrderedBoundary(new HashSet(set1), sigma, img.getWidth(), img.getHeight());
labeledBoundaries1.put(octave1, bounds);
}
int dp = 1;
float intersectionLimit = 0.5F;
// key = octave number, value = list of labeled sets
TIntObjectMap<List<Set<PairInt>>> labeledPoints2Lists = new TIntObjectHashMap<List<Set<PairInt>>>();
// key = octave number, value = list of histograms of cie lab theta
TIntObjectMap<List<TwoDIntArray>> ch2Lists
= new TIntObjectHashMap<List<TwoDIntArray>>();
for (int k = 0; k < labeledPoints2.size(); ++k) {
Set<PairInt> set = labeledPoints2.get(k);
if (set.size() < 7) {
// NOTE: this means that subsequent datasets2 will not be
// lists having same indexes as labeledPoints2
continue;
}
assert(Math.abs(scales2.get(0) - 1) < 0.02);
PairIntArray bounds = imageProcessor.extractSmoothedOrderedBoundary(
new HashSet(set), sigma,
orb2.getPyramidImages().get(0).a[0].length,
orb2.getPyramidImages().get(0).a.length);
for (int octave2 = 0; octave2 < scales2.size(); ++octave2) {
float scale2 = scales2.get(octave2);
Image img = ORB.convertToImage(
orb2.getPyramidImages().get(octave2));
int w2 = img.getWidth();
int h2 = img.getHeight();
Set<PairInt> set2 = new HashSet<PairInt>();
for (PairInt p : set) {
int x = Math.round((float) p.getX() / scale2);
int y = Math.round((float) p.getY() / scale2);
if (x == w2) {
x = w2 - 1;
}
if (y == h2) {
y = h2 - 1;
}
PairInt p2 = new PairInt(x, y);
set2.add(p2);
}
List<Set<PairInt>> list2 = labeledPoints2Lists.get(octave2);
if (list2 == null) {
list2 = new ArrayList<Set<PairInt>>();
labeledPoints2Lists.put(octave2, list2);
}
list2.add(set2);
// create histograms for later comparison w/ template at
// different scales
int[][] ch = cHist.histogramCIELUV(img, set2);
List<TwoDIntArray> ch2List = ch2Lists.get(octave2);
if (ch2List == null) {
ch2List = new ArrayList<TwoDIntArray>();
ch2Lists.put(octave2, ch2List);
}
ch2List.add(new TwoDIntArray(ch));
assert(labeledPoints2Lists.get(octave2).size() ==
ch2Lists.get(octave2).size());
}
}
// populated on demand, key=octave, key=segmented cell, value=size
TObjectIntMap<PairInt> size2Map = new TObjectIntHashMap<PairInt>();
// -- compare sets over octaves:
// aggregated search of adjacent labeled cells to compare their combined
// properties of color histogram and shape to the template.
// delaying evaluation of results until end in order to get the
// maximum chord differerence sum, needed for Salukwzde distance.
// for each i, list of Results, chordDiffSums, bounds1, bounds2
// bundling Results and bounds into an object
TIntObjectMap<List<PObject>> resultsMap = new TIntObjectHashMap<List<PObject>>();
TIntObjectMap<TDoubleList> chordDiffSumsMap = new TIntObjectHashMap<TDoubleList>();
TIntObjectMap<TFloatList> intersectionsMap = new TIntObjectHashMap<TFloatList>();
double maxDiffChordSum = Double.MIN_VALUE;
double maxAvgDiffChord = Double.MIN_VALUE;
double maxAvgDist = Double.MIN_VALUE;
// maps to reuse the aggregated boundaries
// list is octave2 items
// each map key=segmented cell label indexes,
// value = index to map in octave2IndexBoundsMaps
List<Map<OneDIntArray, PairIntArray>> octave2KeyIndexMaps
= new ArrayList<Map<OneDIntArray, PairIntArray>>();
for (int j = 0; j < scales2.size(); ++j) {
octave2KeyIndexMaps.add(new HashMap<OneDIntArray, PairIntArray>());
}
for (int i = 0; i < scales1.size(); ++i) {
//for (int i = 0; i < 1; ++i) {
float scale1 = scales1.get(i);
int[][] ch1 = ch1s.get(i).a;
//Set<PairInt> templateSet = labeledPoints1Lists.get(i);
PairIntArray bounds1 = labeledBoundaries1.get(i);
float sz1 = calculateObjectSize(bounds1);
List<PObject> results = new ArrayList<PObject>();
TDoubleList chordDiffSums = new TDoubleArrayList();
TFloatList intersections = new TFloatArrayList();
for (int j = 0; j < scales2.size(); ++j) {
//for (int j = 0; j < 1; ++j) {
float scale2 = scales2.get(j);
List<TwoDIntArray> listOfCH2s = ch2Lists.get(j);
if (listOfCH2s == null) {
continue;
}
List<Set<PairInt>> listOfSets2 = labeledPoints2Lists.get(j);
Map<OneDIntArray, PairIntArray> keyBoundsMap
= octave2KeyIndexMaps.get(j);
ShapeFinder shapeFinder = new ShapeFinder(
bounds1, ch1, scale1, sz1,
orb1.getPyramidImages().get(i).a[0].length -1,
orb1.getPyramidImages().get(i).a.length - 1,
listOfSets2, listOfCH2s, scale2,
keyBoundsMap,
orb2.getPyramidImages().get(j).a[0].length -1,
orb2.getPyramidImages().get(j).a.length - 1,
intersectionLimit
);
shapeFinder.pyr1 = orb1.getPyramidImages().get(i);
shapeFinder.pyr2 = orb2.getPyramidImages().get(j);
shapeFinder.lbl = Integer.toString(i) + ":" + Integer.toString(j) + "_";
shapeFinder.oct1 = i;
shapeFinder.oct2 = j;
{
//if (i==2&&j==0) {
Image img1 = ORB.convertToImage(orb1.getPyramidImages().get(i));
Image img2 = ORB.convertToImage(orb2.getPyramidImages().get(j));
MiscDebug.writeImage(img2, "AAA_2_" + j);
MiscDebug.writeImage(img1, "AAA_1_" + i);
for (int i2 = 0; i2 < listOfSets2.size(); ++i2) {
int clr = ImageIOHelper.getNextColorRGB(i2);
Set<PairInt> set = listOfSets2.get(i2);
for (PairInt p : set) {
ImageIOHelper.addPointToImage(p.getX(), p.getY(),
img2, 1, clr);
}
}
MiscDebug.writeImage(img2,
"_AAA_2_s_" + j);
}
ShapeFinderResult r = shapeFinder.findAggregated();
if (r == null) {
continue;
}
double c = r.getChordDiffSum();
results.add(new PObject(r, r.bounds1, r.bounds2, scale1, scale2));
chordDiffSums.add(r.getChordDiffSum());
intersections.add(r.intersection);
if (r.getChordDiffSum() > maxDiffChordSum) {
maxDiffChordSum = r.getChordDiffSum();
}
double avgCD = r.getChordDiffSum() / (double) r.getNumberOfMatches();
if (avgCD > maxAvgDiffChord) {
maxAvgDiffChord = avgCD;
}
double avgDist = r.getDistSum() / (double) r.getNumberOfMatches();
if (avgDist > maxAvgDist) {
maxAvgDist = avgDist;
}
System.out.println(String.format(
"%d %d p in set=(%d,%d) shape matcher c=%.2f np=%d inter=%.2f dist=%.2f avgDist=%.2f",
i, j, r.bounds2.getX(0), r.bounds2.getY(0),
(float) c, r.getNumberOfMatches(), (float) r.intersection,
(float) r.getDistSum(), (float) avgDist));
} // end loop over j datasets 2
if (!results.isEmpty()) {
resultsMap.put(i, results);
chordDiffSumsMap.put(i, chordDiffSums);
intersectionsMap.put(i, intersections);
}
} // end loop over i dataset 1
// calculate the Salukwdze distances
/*
for each i, need the max chord diff sum, nPoints in bound1, and best Results
*/
double minSD = Double.MAX_VALUE;
int minSDI = -1;
TIntObjectIterator<List<PObject>> iter = resultsMap.iterator();
for (int i = 0; i < resultsMap.size(); ++i) {
iter.advance();
int idx = iter.key();
//double maxDiffChordSum = chordDiffSumsMap.get(idx).max();
double minCost = Double.MAX_VALUE;
int minCostIdx = -1;
List<PObject> resultsList = resultsMap.get(idx);
for (int j = 0; j < resultsList.size(); ++j) {
PObject obj = resultsList.get(j);
float costIntersection = 1.0F - intersectionsMap.get(idx).get(j);
PartialShapeMatcher.Result r = obj.r;
int nb1 = Math.round((float) obj.bounds1.getN() / (float) dp);
float np = r.getNumberOfMatches();
float countComp = 1.0F - (np / (float) nb1);
float countCompSq = countComp * countComp;
double chordComp = ((float) r.getChordDiffSum() / np) / maxAvgDiffChord;
double chordCompSq = chordComp * chordComp;
double avgDist = r.getDistSum() / np;
double distComp = avgDist / maxAvgDist;
double distCompSq = distComp * distComp;
// Salukwzde uses square sums
//double sd = r.calculateSalukwdzeDistanceSquared(
// maxDiffChordSum, nb1);
// TODO: consider formal analysis of dependencies and hence
// error terms:
//double sd = chordCompSq*countCompSq
// + distCompSq*countCompSq;
//NOTE: The coverage of the matches is currently
// approximated as simply numberMatched/maxNumberMatchable,
// but a term representing the spatial distribution appears
// to be necessary also.
// will try largestNumberGap/maxNumberMatchable.
// TODO: need to improve this in detail later
int lGap = maxNumberOfGaps(obj.bounds1, r)/dp;
float gCountComp = (float)lGap/(float)nb1;
//double sd = chordCompSq + countCompSq + distCompSq;
double sd = chordComp + countComp + gCountComp + distComp
+ costIntersection;
if (sd < minCost) {
minCost = sd;
minCostIdx = j;
}
if (sd < minSD) {
minSD = sd;
minSDI = idx;
}
System.out.println("sd=" + sd + " n1="
+ obj.bounds1.getN() + " n2=" + obj.bounds2.getN()
+ " origN1=" + r.getOriginalN1()
+ " nMatches=" + r.getNumberOfMatches()
+ String.format(
" chord=%.2f count=%.2f spatial=%.2f dist=%.2f inter=%.2f",
(float)chordComp, (float)countComp,
(float)gCountComp, (float)distComp,
(float)costIntersection)
);
}
assert (minCostIdx > -1);
TDoubleList cList = chordDiffSumsMap.get(idx);
TFloatList iList = intersectionsMap.get(idx);
for (int j = resultsList.size() - 1; j > -1; --j) {
if (j != minCostIdx) {
resultsList.remove(j);
cList.removeAt(j);
iList.removeAt(j);
}
}
}
if (resultsMap.size() > 1) {
// TODO: build a test for this.
// possibly need to transform results to same reference
// frame to compare.
// using best SD for now
TIntSet rm = new TIntHashSet();
iter = resultsMap.iterator();
for (int i = 0; i < resultsMap.size(); ++i) {
iter.advance();
int idx = iter.key();
if (idx != minSDI) {
rm.add(idx);
}
}
TIntIterator iter2 = rm.iterator();
while (iter2.hasNext()) {
int idx = iter2.next();
resultsMap.remove(idx);
}
}
List<CorrespondenceList> topResults = new ArrayList<CorrespondenceList>();
iter = resultsMap.iterator();
for (int i = 0; i < resultsMap.size(); ++i) {
iter.advance();
int idx = iter.key();
List<PObject> resultsList = resultsMap.get(idx);
assert (resultsList.size() == 1);
PObject obj = resultsList.get(0);
int n = obj.r.getNumberOfMatches();
PairInt[] m1 = new PairInt[n];
PairInt[] m2 = new PairInt[n];
float scale1 = obj.scale1;
float scale2 = obj.scale2;
for (int ii = 0; ii < n; ++ii) {
int idx1 = obj.r.getIdx1(ii);
int idx2 = obj.r.getIdx2(ii);
int x1 = Math.round(obj.bounds1.getX(idx1) * scale1);
int y1 = Math.round(obj.bounds1.getY(idx1) * scale1);
int x2 = Math.round(obj.bounds2.getX(idx2) * scale2);
int y2 = Math.round(obj.bounds2.getY(idx2) * scale2);
m1[ii] = new PairInt(x1, y1);
m2[ii] = new PairInt(x2, y2);
}
CorrespondenceList cor = new CorrespondenceList(obj.r.getTransformationParameters(), m1, m2);
topResults.add(cor);
}
return topResults;
}
private TFloatList extractScales(List<TFloatList> scalesList) {
TFloatList scales = new TFloatArrayList();
for (int i = 0; i < scalesList.size(); ++i) {
scales.add(scalesList.get(i).get(0));
}
return scales;
}
private int calculateNMaxMatchable(List<TIntList> keypointsX1, List<TIntList> keypointsX2) {
int nMaxM = Integer.MIN_VALUE;
for (int i = 0; i < keypointsX1.size(); ++i) {
int n1 = keypointsX1.get(i).size();
for (int j = 0; j < keypointsX2.size(); ++j) {
int n2 = keypointsX2.get(j).size();
int min = Math.min(n1, n2);
if (min > nMaxM) {
nMaxM = min;
}
}
}
return nMaxM;
}
private int maxSize(List<TIntList> a) {
int maxSz = Integer.MIN_VALUE;
for (TIntList b : a) {
int sz = b.size();
if (sz > maxSz) {
maxSz = sz;
}
}
return maxSz;
}
public static double distance(int x, int y, PairInt b) {
int diffX = x - b.getX();
int diffY = y - b.getY();
double dist = Math.sqrt(diffX * diffX + diffY * diffY);
return dist;
}
public static int distance(PairInt p1, PairInt p2) {
int diffX = p1.getX() - p2.getX();
int diffY = p1.getY() - p2.getY();
return (int) Math.sqrt(diffX * diffX + diffY * diffY);
}
/**
* greedy matching of d1 to d2 by min cost, with unique mappings for
* all indexes.
*
* @param d1
* @param d2
* @return matches - two dimensional int array of indexes in d1 and
* d2 which are matched.
*/
public static int[][] matchDescriptors(VeryLongBitString[] d1, VeryLongBitString[] d2, List<PairInt> keypoints1, List<PairInt> keypoints2) {
int n1 = d1.length;
int n2 = d2.length;
//[n1][n2]
int[][] cost = ORB.calcDescriptorCostMatrix(d1, d2);
int[][] matches = greedyMatch(keypoints1, keypoints2, cost);
// greedy or optimal match can be performed here.
// NOTE: some matching problems might benefit from using the spatial
// information at the same time. for those, will consider adding
// an evaluation term for these descriptors to a specialization of
// PartialShapeMatcher.java
return matches;
}
/**
* greedy matching of d1 to d2 by min difference, with unique mappings for
* all indexes.
* NOTE that if 2 descriptors match equally well, either one
* might get the assignment.
* Consider using instead, matchDescriptors2 which matches
* by descriptor and relative spatial location.
*
* @param d1
* @param d2
* @param keypoints2
* @param keypoints1
* @return matches - two dimensional int array of indexes in d1 and
* d2 which are matched.
*/
public static int[][] matchDescriptors(ORB.Descriptors[] d1, ORB.Descriptors[] d2, List<PairInt> keypoints1, List<PairInt> keypoints2) {
if (d1.length != d2.length) {
throw new IllegalArgumentException("d1 and d2 must" + " be same length");
}
int n1 = d1[0].descriptors.length;
int n2 = d2[0].descriptors.length;
if (n1 != keypoints1.size()) {
throw new IllegalArgumentException("number of descriptors in " + " d1 bitstrings must be same as keypoints1 length");
}
if (n2 != keypoints2.size()) {
throw new IllegalArgumentException("number of descriptors in " + " d2 bitstrings must be same as keypoints2 length");
}
//[n1][n2]
int[][] cost = ORB.calcDescriptorCostMatrix(d1, d2);
int[][] matches = greedyMatch(keypoints1, keypoints2, cost);
// greedy or optimal match can be performed here.
// NOTE: some matching problems might benefit from using the spatial
// information at the same time. for those, will consider adding
// an evaluation term for these descriptors to a specialization of
// PartialShapeMatcher.java
return matches;
}
private static void debugPrint(List<QuadInt> pairs, int i, int j, TwoDFloatArray pyr1, TwoDFloatArray pyr2, float s1, float s2) {
Image img1 = ORB.convertToImage(pyr1);
Image img2 = ORB.convertToImage(pyr2);
try {
for (int ii = 0; ii < pairs.size(); ++ii) {
QuadInt q = pairs.get(ii);
int x1 = Math.round(q.getA() / s1);
int y1 = Math.round(q.getB() / s1);
int x2 = Math.round(q.getC() / s2);
int y2 = Math.round(q.getD() / s2);
ImageIOHelper.addPointToImage(x1, y1, img1, 1, 255, 0, 0);
ImageIOHelper.addPointToImage(x2, y2, img2, 1, 255, 0, 0);
}
String strI = Integer.toString(i);
while (strI.length() < 3) {
strI = "0" + strI;
}
String strJ = Integer.toString(j);
while (strJ.length() < 3) {
strJ = "0" + strJ;
}
String str = "_pairs_" + strI + "_" + strJ + "_";
MiscDebug.writeImage(img1, str + "_" + strI);
MiscDebug.writeImage(img2, str + "_" + strJ);
} catch (Exception e) {
}
}
private static void debugPrint(TwoDFloatArray pyr1, TwoDFloatArray pyr2, TIntList kpX1, TIntList kpY1, TIntList kpX2, TIntList kpY2, float scale1, float scale2, int img1Idx, int img2Idx) {
Image img1 = ORB.convertToImage(pyr1);
Image img2 = ORB.convertToImage(pyr2);
try {
for (int i = 0; i < kpX1.size(); ++i) {
int x1 = (int) (kpX1.get(i) / scale1);
int y1 = (int) (kpY1.get(i) / scale1);
ImageIOHelper.addPointToImage(x1, y1, img1, 1, 255, 0, 0);
}
for (int i = 0; i < kpX2.size(); ++i) {
int x2 = (int) (kpX2.get(i) / scale2);
int y2 = (int) (kpY2.get(i) / scale2);
ImageIOHelper.addPointToImage(x2, y2, img2, 1, 255, 0, 0);
}
String strI = Integer.toString(img1Idx);
while (strI.length() < 3) {
strI = "0" + strI;
}
String strJ = Integer.toString(img2Idx);
while (strJ.length() < 3) {
strJ = "0" + strJ;
}
String str = "_kp_" + strI + "_" + strJ + "_";
MiscDebug.writeImage(img1, str + "_i");
MiscDebug.writeImage(img2, str + "_j");
} catch (Exception e) {
}
}
private static void debugPrint(TwoDFloatArray pyr1, TwoDFloatArray pyr2, TIntList kpX1, TIntList kpY1, TIntList kpX2, TIntList kpY2, int img1Idx, int img2Idx) {
Image img1 = ORB.convertToImage(pyr1);
Image img2 = ORB.convertToImage(pyr2);
try {
for (int i = 0; i < kpX1.size(); ++i) {
int x1 = kpX1.get(i);
int y1 = kpY1.get(i);
ImageIOHelper.addPointToImage(x1, y1, img1, 1, 255, 0, 0);
}
for (int i = 0; i < kpX2.size(); ++i) {
int x2 = kpX2.get(i);
int y2 = kpY2.get(i);
ImageIOHelper.addPointToImage(x2, y2, img2, 1, 255, 0, 0);
}
String strI = Integer.toString(img1Idx);
while (strI.length() < 3) {
strI = "0" + strI;
}
String strJ = Integer.toString(img2Idx);
while (strJ.length() < 3) {
strJ = "0" + strJ;
}
String str = "_kp_" + strI + "_" + strJ + "_";
MiscDebug.writeImage(img1, str + "_i");
MiscDebug.writeImage(img2, str + "_j");
} catch (Exception e) {
}
}
private static Set<PairInt> makeSet(TIntList kpX1, TIntList kpY1) {
Set<PairInt> set = new HashSet<PairInt>();
for (int i = 0; i < kpX1.size(); ++i) {
PairInt p = new PairInt(kpX1.get(i), kpY1.get(i));
set.add(p);
}
return set;
}
private static Descriptors[] getDescriptors(ORB orb, int i) {
ORB.Descriptors[] d = null;
if (orb.getDescrChoice().equals(ORB.DescriptorChoice.ALT)) {
d = new ORB.Descriptors[]{orb.getDescriptorsListAlt().get(i)};
} else if (orb.getDescrChoice().equals(ORB.DescriptorChoice.HSV)) {
d = new ORB.Descriptors[]{orb.getDescriptorsH().get(i), orb.getDescriptorsS().get(i), orb.getDescriptorsV().get(i)};
} else if (orb.getDescrChoice().equals(ORB.DescriptorChoice.GREYSCALE)) {
d = new ORB.Descriptors[]{orb.getDescriptorsList().get(i)};
}
return d;
}
private static List<QuadInt> createPairLabelIndexes(int[][] cost, int nBands, List<TIntList> pointIndexLists1, TIntList kpX1, TIntList kpY1, List<TIntList> pointIndexLists2, TIntList kpX2, TIntList kpY2) {
int costLimit = Math.round((float) (nBands * 256) * 0.65F);
int minP1Diff = 3;
Set<QuadInt> exists = new HashSet<QuadInt>();
// pairs of idx from set1 and idx from set 2
Set<PairInt> skip = new HashSet<PairInt>();
List<QuadInt> pairIndexes = new ArrayList<QuadInt>();
List<PairInt> pair2Indexes = calculatePairIndexes(pointIndexLists2, kpX2, kpY2, minP1Diff);
for (int ii = 0; ii < pointIndexLists1.size(); ++ii) {
TIntList kpIndexes1 = pointIndexLists1.get(ii);
if (kpIndexes1.size() < 2) {
continue;
}
// draw 2 from kpIndexes1
for (int ii1 = 0; ii1 < kpIndexes1.size(); ++ii1) {
int idx1 = kpIndexes1.get(ii1);
int t1X = kpX1.get(idx1);
int t1Y = kpY1.get(idx1);
boolean skipIdx1 = false;
for (int ii2 = 0; ii2 < kpIndexes1.size(); ++ii2) {
if (ii1 == ii2) {
continue;
}
int idx2 = kpIndexes1.get(ii2);
int t2X = kpX1.get(idx2);
int t2Y = kpY1.get(idx2);
if (t1X == t2X && t1Y == t2Y) {
continue;
}
int diffX = t1X - t2X;
int diffY = t1Y - t2Y;
int distSq = diffX * diffX + diffY * diffY;
//if (distSq > limitSq) {
// continue;
if (distSq < minP1Diff * minP1Diff) {
continue;
}
for (PairInt p2Index : pair2Indexes) {
int idx3 = p2Index.getX();
int idx4 = p2Index.getY();
PairInt p13 = new PairInt(idx1, idx3);
if (skip.contains(p13)) {
skipIdx1 = true;
break;
}
PairInt p24 = new PairInt(idx2, idx4);
if (skip.contains(p24)) {
continue;
}
int c13 = cost[idx1][idx3];
// if idx1 and idx3 cost is above limit, skip
if (c13 > costLimit) {
skip.add(p13);
skipIdx1 = true;
break;
}
int c24 = cost[idx2][idx4];
if (c24 > costLimit) {
skip.add(p24);
continue;
}
QuadInt q = new QuadInt(idx1, idx2, idx3, idx4);
QuadInt qChk = new QuadInt(idx2, idx1, idx4, idx3);
if (exists.contains(q) || exists.contains(qChk)) {
continue;
}
/*
int s1X = kpX2.get(idx3);
int s1Y = kpY2.get(idx3);
int s2X = kpX2.get(idx4);
int s2Y = kpY2.get(idx4);
int diffX2 = s1X - s2X;
int diffY2 = s1Y - s2Y;
int distSq2 = diffX2 * diffX2 + diffY2 * diffY2;
//if (distSq2 > limitSq) {
// continue;
//}
if ((distSq2 < minP1Diff * minP1Diff)) {
continue;
}
*/
pairIndexes.add(q);
exists.add(q);
}
if (skipIdx1) {
break;
}
}
}
}
return pairIndexes;
}
public static int calculateObjectSize(Set<PairInt> points) {
// O(N*lg_2(N))
FurthestPair furthestPair = new FurthestPair();
PairInt[] fp = furthestPair.find(points);
if (fp == null || fp.length < 2) {
throw new IllegalArgumentException("did not find a furthest pair" + " in points");
}
double dist = ORBMatcher.distance(fp[0], fp[1]);
return (int) Math.round(dist);
}
public static int calculateObjectSize(PairIntArray points) {
return calculateObjectSize(Misc.convert(points));
}
private static float calculateDiagonal(List<TIntList> keypointsX1, List<TIntList> keypointsY1, int idx) {
TIntList x1 = keypointsX1.get(idx);
TIntList y1 = keypointsY1.get(idx);
int maxX = x1.max();
int maxY = y1.max();
return (float) Math.sqrt(maxX * maxX + maxY * maxY);
}
/**
* NOTE: preliminary results show that this matches the right pattern as
* a subset of the object, but needs to be followed by a slightly larger
* aggregated search by segmentation cells using partial shape matcher
* for example. This was started in ShapeFinder, but needs to be
* adjusted for a search given seed cells and possibly improved for the
* other TODO items).
* @param keypoints1
* @param keypoints2
* @param mT
* @param mS
* @param nn
* @param minMaxXY2
* @param limit
* @param tIndexes
* @param idx1P2CostMap
* @param indexes
* @param costs
* @return
*/
private static List<CorrespondenceList> completeUsingCombinations(List<PairInt> keypoints1, List<PairInt> keypoints2, PairIntArray mT, PairIntArray mS, NearestNeighbor2D nn, int[] minMaxXY2, int limit, TIntList tIndexes, TIntObjectMap<TObjectIntMap<PairInt>> idx1P2CostMap, PairInt[] indexes, int[] costs, int bitTolerance, int nBands) {
int nTop = mT.getN();
System.out.println("have " + nTop + " sets of points for " + " n of k=2 combinations");
// need to make pairs of combinations from mT,mS
// to calcuate euclidean transformations and evaluate them.
// -- can reduce the number of combinations by imposing a
// distance limit on separation of feasible pairs
int limitSq = limit * limit;
MatchedPointsTransformationCalculator tc = new MatchedPointsTransformationCalculator();
Transformer transformer = new Transformer();
// this fixed size sorted vector is faster for shorter arrays.
// TODO: consider ways to robustly set the size from the cost
// statistics to ensure the vector will always contain the
// correct solution even if not in top position.
int nt = mT.getN();
FixedSizeSortedVector<CObject> vec = new FixedSizeSortedVector<CObject>(nt, CObject.class);
double minCost = Double.MAX_VALUE;
//CorrespondenceList minCostCor = null;
//PairIntArray minCostTrT = null;
double[] minCostI = new double[nTop];
double[] minDistI = new double[nTop];
// temporary storage of corresp coords until object construction
int[] m1x = new int[nTop];
int[] m1y = new int[nTop];
int[] m2x = new int[nTop];
int[] m2y = new int[nTop];
int mCount = 0;
for (int i = 0; i < nTop; ++i) {
int t1X = mT.getX(i);
int t1Y = mT.getY(i);
int s1X = mS.getX(i);
int s1Y = mS.getY(i);
// choose all combinations of 2nd point within distance
// limit of point s1.
for (int j = i + 1; j < mS.getN(); ++j) {
int t2X = mT.getX(j);
int t2Y = mT.getY(j);
int s2X = mS.getX(j);
int s2Y = mS.getY(j);
if ((t1X == t2X && t1Y == t2Y) || (s1X == s2X && s1Y == s2Y)) {
continue;
}
int diffX = s1X - s2X;
int diffY = s1Y - s2Y;
int distSq = diffX * diffX + diffY * diffY;
if (distSq > limitSq) {
continue;
}
// -- calculate euclid transformation
// -- evaluate the fit
TransformationParameters params = tc.calulateEuclidean(t1X, t1Y, t2X, t2Y, s1X, s1Y, s2X, s2Y, 0, 0);
float scale = params.getScale();
mCount = 0;
// template object transformed
PairIntArray trT = transformer.applyTransformation(params, mT);
/*
two components to the evaluation and both need normalizations
so that their contributions to total result are
equally weighted.
(1) descriptors:
-- score is sum of each matched (3*256 - cost)
-- the normalization is the maximum possible score,
so will use the number of template points.
--> norm = nTemplate * 3 * 256
-- normalized score = (3*256 - cost)/norm
==> normalized cost = 1 - ((3*256 - cost)/norm)
(2) spatial distances from transformed points:
-- sum of distances within limit
and replacement of distance by limit if no matching
nearest neighbor is found.
-- divide each distance by the transformation scale
to compare same values
-- divide the total sum by the total max possible
--> norm = nTemplate * limit / scale
Then the total cost is (1) + (2) and the min cost
among all of these combinations is the resulting
correspondence list
*/
double maxCost = nBands * 256;
double maxDist = limit / scale;
double sum1 = 0;
double sum2 = 0;
double sum = 0;
for (int k = 0; k < trT.getN(); ++k) {
int xTr = trT.getX(k);
int yTr = trT.getY(k);
int idx1 = tIndexes.get(k);
Set<PairInt> nearest = null;
if ((xTr >= 0) && (yTr >= 0) && (xTr <= (minMaxXY2[1] + limit)) && (yTr <= (minMaxXY2[3] + limit))) {
nearest = nn.findClosest(xTr, yTr, limit);
}
int minC = Integer.MAX_VALUE;
PairInt minCP2 = null;
if (nearest != null && !nearest.isEmpty()) {
TObjectIntMap<PairInt> cMap = idx1P2CostMap.get(idx1);
for (PairInt p2 : nearest) {
if (!cMap.containsKey(p2)) {
continue;
}
int c = cMap.get(p2);
if (c < minC) {
minC = c;
minCP2 = p2;
}
}
}
if (minCP2 != null) {
double scoreNorm = (3 * 256 - minC) / maxCost;
double costNorm = 1.0 - scoreNorm;
sum1 += costNorm;
double dist = ORBMatcher.distance(xTr, yTr, minCP2);
double distNorm = dist / maxDist;
sum2 += distNorm;
m1x[mCount] = keypoints1.get(idx1).getX();
m1y[mCount] = keypoints1.get(idx1).getY();
m2x[mCount] = minCP2.getX();
m2y[mCount] = minCP2.getY();
minCostI[mCount] = costNorm;
minDistI[mCount] = distNorm;
mCount++;
} else {
sum1 += 1;
sum2 += 1;
}
}
sum = sum1 + sum2;
if ((minCost == Double.MAX_VALUE) || (sum < (minCost + bitTolerance))) {
if (sum < minCost) {
minCost = sum;
}
List<PairInt> m1 = new ArrayList<PairInt>();
List<PairInt> m2 = new ArrayList<PairInt>();
CorrespondenceList corr = new CorrespondenceList(params.getScale(), Math.round(params.getRotationInDegrees()), Math.round(params.getTranslationX()), Math.round(params.getTranslationY()), 0, 0, 0, m1, m2);
for (int mi = 0; mi < mCount; ++mi) {
m1.add(new PairInt(m1x[mi], m1y[mi]));
m2.add(new PairInt(m2x[mi], m2y[mi]));
}
CObject cObj = new CObject(sum, corr, trT);
vec.add(cObj);
}
}
}
if (vec.getNumberOfItems() == 0) {
return null;
}
List<CorrespondenceList> topResults = new ArrayList<CorrespondenceList>();
for (int i = 0; i < vec.getNumberOfItems(); ++i) {
CObject a = vec.getArray()[i];
if (a.cost > (minCost + bitTolerance)) {
break;
}
topResults.add(a.cCor);
}
return topResults;
}
private static List<PairInt> calculatePairIndexes(List<TIntList> pointIndexLists2, TIntList kpX2, TIntList kpY2, int minPDiff) {
List<PairInt> pairIndexes = new ArrayList<PairInt>();
Set<PairInt> exists = new HashSet<PairInt>();
// draw 2 pairs from other dataset
for (int jj = 0; jj < pointIndexLists2.size(); ++jj) {
TIntList kpIndexes2 = pointIndexLists2.get(jj);
if (kpIndexes2.size() < 2) {
continue;
}
// draw 2 from kpIndexes2
for (int jj1 = 0; jj1 < kpIndexes2.size(); ++jj1) {
int idx3 = kpIndexes2.get(jj1);
int s1X = kpX2.get(idx3);
int s1Y = kpY2.get(idx3);
for (int jj2 = 0; jj2 < kpIndexes2.size(); ++jj2) {
if (jj1 == jj2) {
continue;
}
int idx4 = kpIndexes2.get(jj2);
int s2X = kpX2.get(idx4);
int s2Y = kpY2.get(idx4);
if (s1X == s2X && s1Y == s2Y) {
continue;
}
PairInt q = new PairInt(idx3, idx4);
if (exists.contains(q)) {
continue;
}
int diffX2 = s1X - s2X;
int diffY2 = s1Y - s2Y;
int distSq2 = diffX2 * diffX2 + diffY2 * diffY2;
//if (distSq2 > limitSq) {
// continue;
if (distSq2 < minPDiff * minPDiff) {
continue;
}
pairIndexes.add(q);
exists.add(q);
}
}
}
return pairIndexes;
}
private static float calculateDiagonal2(List<TwoDFloatArray> pyramidImages, int idx) {
int w = pyramidImages.get(idx).a.length;
int h = pyramidImages.get(idx).a[0].length;
double diag = Math.sqrt(w * w + h * h);
return (float) diag;
}
private static int[][] greedyMatch(List<PairInt> keypoints1, List<PairInt> keypoints2, int[][] cost) {
int n1 = keypoints1.size();
int n2 = keypoints2.size();
// for the greedy match, separating the index information from the cost
// and then sorting by cost
int nTot = n1 * n2;
PairInt[] indexes = new PairInt[nTot];
int[] costs = new int[nTot];
int count = 0;
for (int i = 0; i < n1; ++i) {
for (int j = 0; j < n2; ++j) {
indexes[count] = new PairInt(i, j);
costs[count] = cost[i][j];
count++;
}
}
assert (count == nTot);
QuickSort.sortBy1stArg(costs, indexes);
Set<PairInt> set1 = new HashSet<PairInt>();
Set<PairInt> set2 = new HashSet<PairInt>();
List<PairInt> matches = new ArrayList<PairInt>();
// visit lowest costs (== differences) first
for (int i = 0; i < nTot; ++i) {
PairInt index12 = indexes[i];
int idx1 = index12.getX();
int idx2 = index12.getY();
PairInt p1 = keypoints1.get(idx1);
PairInt p2 = keypoints2.get(idx2);
if (set1.contains(p1) || set2.contains(p2)) {
continue;
}
//System.out.println("p1=" + p1 + " " + " p2=" + p2 + " cost=" + costs[i]);
matches.add(index12);
set1.add(p1);
set2.add(p2);
}
int[][] results = new int[matches.size()][2];
for (int i = 0; i < matches.size(); ++i) {
results[i][0] = matches.get(i).getX();
results[i][1] = matches.get(i).getY();
}
return results;
}
private static double[] sumKeypointDistanceDifference(TIntList a2Indexes, PairIntArray tr2, TIntList kpX2, TIntList kpY2, NearestNeighbor2D nn, TransformationParameters params, int maxX, int maxY, int pixTolerance, double maxDist, int[] m1x, int[] m1y, int[] m2x, int[] m2y) {
double sum2 = 0;
int mCount = 0;
for (int k = 0; k < tr2.getN(); ++k) {
int x2Tr = tr2.getX(k);
int y2Tr = tr2.getY(k);
int idx2 = a2Indexes.get(k);
Set<PairInt> nearest = null;
if ((x2Tr >= 0) && (y2Tr >= 0) && (x2Tr <= (maxX + pixTolerance)) && (y2Tr <= (maxY + pixTolerance))) {
nearest = nn.findClosest(x2Tr, y2Tr, pixTolerance);
}
double minDist = Double.MAX_VALUE;
PairInt minDistP1 = null;
if (nearest != null && !nearest.isEmpty()) {
for (PairInt p11 : nearest) {
double dist = ORBMatcher.distance(x2Tr, y2Tr, p11);
if (dist < minDist) {
minDist = dist;
minDistP1 = p11;
}
}
}
if (minDistP1 != null) {
double dist = minDist;
double distNorm = dist / maxDist;
sum2 += distNorm;
m2x[mCount] = kpX2.get(idx2);
m2y[mCount] = kpY2.get(idx2);
m1x[mCount] = minDistP1.getX();
m1y[mCount] = minDistP1.getY();
mCount++;
} else {
sum2 += 1;
}
} // end loop over trnsformed set 2
return new double[]{sum2, mCount};
}
private static double[] sumKeypointDescAndDist(int[][] cost, int nBands, TIntList a1Indexes, PairIntArray tr1, TIntList kpX1, TIntList kpY1, NearestNeighbor2D nn2, TObjectIntMap<PairInt> p2KPIndexMap, TransformationParameters params, int maxX2, int maxY2, int pixTolerance, double maxDist, PairInt[] m1, PairInt[] m2) {
double sumDesc = 0;
double sumDist = 0;
int count = 0;
double maxDesc = nBands * 256.0;
for (int k = 0; k < tr1.getN(); ++k) {
int x1Tr = tr1.getX(k);
int y1Tr = tr1.getY(k);
int idx1 = a1Indexes.get(k);
Set<PairInt> nearest = null;
if ((x1Tr >= 0) && (y1Tr >= 0) && (x1Tr <= (maxX2 + pixTolerance)) && (y1Tr <= (maxY2 + pixTolerance))) {
nearest = nn2.findClosest(x1Tr, y1Tr, pixTolerance);
}
int minC = Integer.MAX_VALUE;
PairInt minCP2 = null;
int minIdx2 = 0;
if (nearest != null && !nearest.isEmpty()) {
for (PairInt p2 : nearest) {
int idx2 = p2KPIndexMap.get(p2);
int c = cost[idx1][idx2];
if (c < minC) {
minC = c;
minCP2 = p2;
minIdx2 = idx2;
}
}
}
if (minCP2 != null) {
double scoreNorm = (nBands * 256 - minC) / maxDesc;
double costNorm = 1.0 - scoreNorm;
sumDesc += costNorm;
double dist = ORBMatcher.distance(x1Tr, y1Tr, minCP2);
double distNorm = dist / maxDist;
sumDist += distNorm;
m1[count] = new PairInt(kpX1.get(idx1), kpY1.get(idx1));
m2[count] = minCP2;
count++;
} else {
sumDesc += 1;
sumDist += 1;
}
}
return new double[]{sumDesc, sumDist, count};
}
private static double[] sumKeypointDescAndDist(int[][] cost, int nBands, TIntList a1Indexes, PairIntArray tr1, TIntList kpX1, TIntList kpY1, NearestNeighbor2D nn2, TObjectIntMap<PairInt> p2KPIndexMap, int maxX2, int maxY2, int pixTolerance, double maxDist, int[] m1, int[] m2) {
double sumDesc = 0;
double sumDist = 0;
int count = 0;
double maxDesc = nBands * 256.0;
//best first match, after nearest neighbors
// TODO: consider optimal bipartite matching when have an
// implementation of multi-level-buckets
float[] costA = new float[tr1.getN()];
float[] costDesc = new float[tr1.getN()];
float[] costDist = new float[tr1.getN()];
int[] indexes = new int[tr1.getN()];
for (int k = 0; k < tr1.getN(); ++k) {
int x1Tr = tr1.getX(k);
int y1Tr = tr1.getY(k);
int idx1 = a1Indexes.get(k);
Set<PairInt> nearest = null;
if ((x1Tr >= 0) && (y1Tr >= 0) && (x1Tr <= (maxX2 + pixTolerance)) && (y1Tr <= (maxY2 + pixTolerance))) {
nearest = nn2.findClosest(x1Tr, y1Tr, pixTolerance);
}
int minC = Integer.MAX_VALUE;
PairInt minCP2 = null;
int minIdx2 = 0;
if (nearest != null && !nearest.isEmpty()) {
for (PairInt p2 : nearest) {
int idx2 = p2KPIndexMap.get(p2);
int c = cost[idx1][idx2];
if (c < minC) {
minC = c;
minCP2 = p2;
minIdx2 = idx2;
}
}
}
if (minCP2 != null) {
double scoreNorm = (nBands * 256 - minC) / maxDesc;
double costNorm = 1.0 - scoreNorm;
sumDesc += costNorm;
double dist = ORBMatcher.distance(x1Tr, y1Tr, minCP2);
double distNorm = dist / maxDist;
sumDist += distNorm;
m1[count] = idx1;
m2[count] = minIdx2;
costA[count] = (float) (costNorm + distNorm);
costDesc[count] = (float) costNorm;
costDist[count] = (float) distNorm;
indexes[count] = count;
count++;
} else {
sumDesc += 1;
sumDist += 1;
}
}
if (count > 1) {
costA = Arrays.copyOf(costA, count);
indexes = Arrays.copyOf(indexes, count);
QuickSort.sortBy1stArg(costA, indexes);
TIntSet set1 = new TIntHashSet();
TIntSet set2 = new TIntHashSet();
List<PairInt> matched = new ArrayList<PairInt>();
TIntList idxs = new TIntArrayList();
for (int i = 0; i < count; ++i) {
int idx = indexes[i];
int idx1 = m1[idx];
int idx2 = m2[idx];
if (set1.contains(idx1) || set2.contains(idx2)) {
continue;
}
idxs.add(idx);
matched.add(new PairInt(idx1, idx2));
set1.add(idx1);
set2.add(idx2);
}
int nRedundant = count - matched.size();
if (nRedundant > 0) {
sumDesc = 0;
sumDist = 0;
for (int i = 0; i < matched.size(); ++i) {
m1[i] = matched.get(i).getX();
m2[i] = matched.get(i).getY();
int idx = idxs.get(i);
sumDesc += costDesc[idx];
sumDist += costDist[idx];
}
sumDesc += (tr1.getN() - matched.size());
sumDist += (tr1.getN() - matched.size());
count = matched.size();
}
}
return new double[]{sumDesc, sumDist, count};
}
private static PairIntArray trimToImageBounds(TwoDFloatArray octaveImg, PairIntArray a) {
int n0 = octaveImg.a.length;
int n1 = octaveImg.a[0].length;
PairIntArray b = new PairIntArray(a.getN());
for (int i = 0; i < a.getN(); ++i) {
int x = a.getX(i);
int y = a.getY(i);
if (x < 0 || x > (n1 - 1)) {
continue;
} else if (y < 0 || y > (n0 - 1)) {
continue;
}
b.add(x, y);
}
return b;
}
private static PairIntArray reduceBounds(PairIntArray bounds, float scale) {
Set<PairInt> added = new HashSet<PairInt>();
PairIntArray out = new PairIntArray(bounds.getN());
for (int i = 0; i < bounds.getN(); ++i) {
int x = Math.round((float)bounds.getX(i)/scale);
int y = Math.round((float)bounds.getY(i)/scale);
PairInt p = new PairInt(x, y);
if (added.contains(p)) {
continue;
}
out.add(x, y);
added.add(p);
}
return out;
}
public static int maxNumberOfGaps(PairIntArray bounds,
PartialShapeMatcher.Result r) {
TIntSet mIdxs = new TIntHashSet(r.getNumberOfMatches());
for (int i = 0; i < r.getNumberOfMatches(); ++i) {
mIdxs.add(r.getIdx1(i));
}
int maxGapStartIdx = -1;
int maxGap = 0;
int cStartIdx = -1;
int cGap = 0;
// handling for startIdx of 0 to check for wraparound
// of gap at end of block
int gap0 = 0;
for (int i = 0; i < bounds.getN(); ++i) {
if (!mIdxs.contains(i)) {
// is a gap
if (cStartIdx == -1) {
cStartIdx = i;
}
cGap++;
if (i == (bounds.getN() - 1)) {
if (gap0 > 0) {
// 0 1 2 3 4 5
// g g g g
// gap0=2
// cGap=2 cStartIdx=4
if (cStartIdx > (gap0 - 1)) {
gap0 += cGap;
}
}
if (cGap > maxGap) {
maxGap = cGap;
maxGapStartIdx = cStartIdx;
}
if (gap0 > maxGap) {
maxGap = gap0;
maxGapStartIdx = 0;
}
}
} else {
// is not a gap
if (cStartIdx > -1) {
if (cGap > maxGap) {
maxGap = cGap;
maxGapStartIdx = cStartIdx;
}
if (cStartIdx == 0) {
gap0 = cGap;
}
cStartIdx = -1;
cGap = 0;
}
}
}
return maxGap;
}
private List<PairIntArray> createOrderedBounds(ORB orb1, TFloatList scales1,
Set<PairInt> labeledPoints1, SIGMA sigma) {
ImageProcessor imageProcessor = new ImageProcessor();
List<PairIntArray> boundsList = new ArrayList<PairIntArray>();
for (int octave1 = 0; octave1 < scales1.size(); ++octave1) {
float scale1 = scales1.get(octave1);
Set<PairInt> set = new HashSet<PairInt>();
if (octave1 == 0) {
set.addAll(labeledPoints1);
} else {
for (PairInt p : labeledPoints1) {
int x = Math.round((float) p.getX() / scale1);
int y = Math.round((float) p.getY() / scale1);
PairInt p2 = new PairInt(x, y);
set.add(p2);
}
}
PairIntArray bounds = imageProcessor.extractSmoothedOrderedBoundary(
set, sigma,
orb1.getPyramidImages().get(octave1).a[0].length,
orb1.getPyramidImages().get(octave1).a.length);
boundsList.add(bounds);
}
return boundsList;
}
private PairIntArray getOrCreateOrderedBounds(TwoDFloatArray img,
TIntObjectMap<PairIntArray> boundsMap, int segIdx, Set<PairInt> set,
SIGMA sigma) {
PairIntArray bounds = boundsMap.get(segIdx);
if (bounds != null) {
return bounds;
}
ImageProcessor imageProcessor = new ImageProcessor();
bounds = imageProcessor.extractSmoothedOrderedBoundary(
set, sigma, img.a[0].length, img.a.length);
boundsMap.put(segIdx, bounds);
return bounds;
}
private double sumDistances(PairFloatArray distances) {
double sum = 0;
for (int i = 0; i < distances.getN(); ++i) {
float d1 = distances.getX(i);
float d2 = distances.getY(i);
sum += Math.sqrt(d1 * d1 + d2 * d2);
}
return sum;
}
/**
* from matched points in list 1 to list2, choose 2 pairs that have
* small cost and large difference.
* @param result
* @param costD
* @param xPoints1
* @param yPoints1
* @param xPoints2
* @param yPoints2
* @return
*/
private QuadInt[] choose2ReferencePoints(Result result,
PairIntArray bounds1, PairIntArray bounds2,
TObjectIntMap<PairInt> point1KP1Map,
TObjectIntMap<PairInt> point2KP1Map, int[][] costD) {
int n = result.getNumberOfMatches();
float[] costs = new float[n];
PairInt[] points1 = new PairInt[n];
int count = 0;
for (int i = 0; i < n; ++i) {
int idx1 = result.idx1s.get(i);
int idx2 = result.idx2s.get(i);
int x1 = bounds1.getX(idx1);
int y1 = bounds1.getY(idx1);
PairInt p1 = new PairInt(x1, y1);
int x2 = bounds2.getX(idx2);
int y2 = bounds2.getY(idx2);
PairInt p2 = new PairInt(x2, y2);
if (point1KP1Map.containsKey(p1) && point2KP1Map.containsKey(p2)) {
int kpIdx1 = point1KP1Map.get(p1);
int kpIdx2 = point2KP1Map.get(p2);
costs[count] = costD[kpIdx1][kpIdx2];
// these points are in full size reference frae
points1[count] = p1;
count++;
}
}
if (count > 1) {
if (count < n) {
costs = Arrays.copyOf(costs, count);
points1 = Arrays.copyOf(points1, count);
}
QuickSort.sortBy1stArg(costs, points1);
int end = (int)(0.2 * n);
if (end < 10) {
end = n;
}
Set<PairInt> points = new HashSet<PairInt>();
for (int i = 0; i < end; i++) {
PairInt p = points1[i];
points.add(p);
}
FurthestPair fp = new FurthestPair();
PairInt[] furthest = fp.find(points);
assert(furthest != null);
assert(furthest.length == 2);
PairInt[] furthest2 = new PairInt[2];
for (int i = 0; i < n; ++i) {
int idx1 = result.idx1s.get(i);
int idx2 = result.idx2s.get(i);
PairInt p1 = new PairInt(bounds1.getX(idx1),
bounds1.getY(idx1));
if (furthest2[0] == null) {
if (furthest[0].equals(p1)) {
furthest2[0] = new PairInt(bounds2.getX(idx2),
bounds2.getY(idx2));
}
}
if (furthest2[1] == null) {
if (furthest[1].equals(p1)) {
furthest2[1] = new PairInt(bounds2.getX(idx2),
bounds2.getY(idx2));
}
}
if (furthest2[0] != null && furthest2[1] != null) {
break;
}
}
if (furthest2 != null && furthest2.length == 2) {
QuadInt[] refs = new QuadInt[2];
refs[0] = new QuadInt(furthest[0], furthest2[0]);
refs[1] = new QuadInt(furthest[1], furthest2[1]);
return refs;
}
}
// re-do the calculation w/o trying to use descr cost.
Set<PairInt> points = new HashSet<PairInt>(n);
for (int i = 0; i < n; ++i) {
int idx1 = result.idx1s.get(i);
PairInt p1 = new PairInt(bounds1.getX(idx1),
bounds1.getY(idx1));
points.add(p1);
}
FurthestPair fp = new FurthestPair();
PairInt[] furthest = fp.find(points);
assert(furthest != null);
assert(furthest.length == 2);
PairInt[] furthest2 = new PairInt[2];
for (int i = 0; i < n; ++i) {
int idx1 = result.idx1s.get(i);
int idx2 = result.idx2s.get(i);
PairInt p1 = new PairInt(bounds1.getX(idx1),
bounds1.getY(idx1));
if (furthest2[0] == null) {
if (furthest[0].equals(p1)) {
furthest2[0] = new PairInt(bounds2.getX(idx2),
bounds2.getY(idx2));
}
}
if (furthest2[1] == null) {
if (furthest[1].equals(p1)) {
furthest2[1] = new PairInt(bounds2.getX(idx2),
bounds2.getY(idx2));
}
}
if (furthest2[0] != null && furthest2[1] != null) {
break;
}
}
assert (furthest2 != null && furthest2.length == 2);
QuadInt[] refs = new QuadInt[2];
refs[0] = new QuadInt(furthest[0], furthest2[0]);
refs[1] = new QuadInt(furthest[1], furthest2[1]);
return refs;
}
private static class PObject {
final PartialShapeMatcher.Result r;
final PairIntArray bounds1;
final PairIntArray bounds2;
final float scale1;
final float scale2;
public PObject(PartialShapeMatcher.Result result, PairIntArray b1, PairIntArray b2,
float s1, float s2) {
r = result;
bounds1 = b1;
bounds2 = b2;
scale1 = s1;
scale2 = s2;
}
}
private static class CObject implements Comparable<CObject> {
final double cost;
final CorrespondenceList cCor;
final PairIntArray transformedTemplate;
public CObject(double cost, CorrespondenceList cL,
PairIntArray templTr) {
this.cost = cost;
this.cCor = cL;
this.transformedTemplate = templTr;
}
@Override
public int compareTo(CObject other) {
if (cost < other.cost) {
return -1;
} else if (cost > other.cost) {
return 1;
} else {
int n1 = cCor.getPoints1().size();
int n2 = other.cCor.getPoints1().size();
if (n1 > n2) {
return -1;
} else if (n1 < n2) {
return 1;
}
}
return 0;
}
}
private static class CObject4 implements Comparable<CObject4> {
final double cost;
final TransformationParameters params;
final QuadInt q;
public CObject4(double sum, TransformationParameters params,
QuadInt q) {
this.cost = sum;
this.q = q;
this.params = params;
}
@Override
public int compareTo(CObject4 other) {
if (cost < other.cost) {
return -1;
} else if (cost > other.cost) {
return 1;
}
return 0;
}
}
private static class CObject3 implements Comparable<CObject3> {
final double cost;
final double costDesc;
final double costDist;
final double costCount;
final int index;
final PairInt[] m1;
final PairInt[] m2;
final double sumPatch;
final TransformationParameters params;
QuadInt q;
int keypointCount;
public CObject3(CObject2 cObject2, double sum, double sumPatch,
TransformationParameters params) {
this.sumPatch = sumPatch;
this.cost = sum;
this.costDesc = cObject2.costDesc;
this.costDist = cObject2.costDist;
this.costCount = cObject2.costCount;
this.index = cObject2.index;
this.m1 = cObject2.m1;
this.m2 = cObject2.m2;
this.params = params;
}
@Override
public int compareTo(CObject3 other) {
if (cost < other.cost) {
return -1;
} else if (cost > other.cost) {
return 1;
}
return 0;
}
}
private static class CObject2 implements Comparable<CObject2> {
final double cost;
final double costDesc;
final double costDist;
final double costCount;
final int index;
final PairInt[] m1;
final PairInt[] m2;
public CObject2(int index, double cost, double costDesc, double costDist,
double costCount, PairInt[] matched1, PairInt[] matched2) {
this.cost = cost;
this.index = index;
this.m1 = matched1;
this.m2 = matched2;
this.costDesc = costDesc;
this.costDist = costDist;
this.costCount = costCount;
}
@Override
public int compareTo(CObject2 other) {
if (cost < other.cost) {
return -1;
} else if (cost > other.cost) {
return 1;
}
return 0;
}
}
private static void debugPlot(int i, int j,
FixedSizeSortedVector<CObject> vec,
TwoDFloatArray pyr1, TwoDFloatArray pyr2, float s1, float s2) {
Image img1 = convertToImage(pyr1);
Image img2 = convertToImage(pyr2);
try {
CorrespondenceList cor = vec.getArray()[0].cCor;
Image img1Cp = img1.copyImage();
Image img2Cp = img2.copyImage();
CorrespondencePlotter plotter = new CorrespondencePlotter(
img1Cp, img2Cp);
for (int ii = 0; ii < cor.getPoints1().size(); ++ii) {
PairInt p1 = cor.getPoints1().get(ii);
PairInt p2 = cor.getPoints2().get(ii);
int x1 = Math.round(p1.getX() / s1);
int y1 = Math.round(p1.getY() / s1);
int x2 = Math.round(p2.getX() / s2);
int y2 = Math.round(p2.getY() / s2);
plotter.drawLineInAlternatingColors(x1, y1, x2, y2, 0);
}
String strI = Integer.toString(i);
while (strI.length() < 3) {
strI = "0" + strI;
}
String strJ = Integer.toString(j);
while (strJ.length() < 3) {
strJ = "0" + strJ;
}
String str = strI + "_" + strJ + "_";
plotter.writeImage("_MATCH_" + str);
} catch (Exception e) {
}
}
private static void debugPlot2(int i, int j,
FixedSizeSortedVector<CObject3> vec,
TwoDFloatArray pyr1, TwoDFloatArray pyr2, float s1, float s2) {
Image img1 = convertToImage(pyr1);
Image img2 = convertToImage(pyr2);
try {
PairInt[] m1 = vec.getArray()[0].m1;
PairInt[] m2 = vec.getArray()[0].m2;
Image img1Cp = img1.copyImage();
Image img2Cp = img2.copyImage();
CorrespondencePlotter plotter = new CorrespondencePlotter(
img1Cp, img2Cp);
for (int ii = 0; ii < m1.length; ++ii) {
PairInt p1 = m1[ii];
PairInt p2 = m2[ii];
int x1 = Math.round(p1.getX() / s1);
int y1 = Math.round(p1.getY() / s1);
int x2 = Math.round(p2.getX() / s2);
int y2 = Math.round(p2.getY() / s2);
plotter.drawLineInAlternatingColors(x1, y1, x2, y2, 0);
}
String strI = Integer.toString(i);
while (strI.length() < 3) {
strI = "0" + strI;
}
String strJ = Integer.toString(j);
while (strJ.length() < 3) {
strJ = "0" + strJ;
}
String str = strI + "_" + strJ + "_";
plotter.writeImage("_MATCH_" + str);
} catch (Exception e) {
}
}
private static void debugPlot(int i, int j,
FixedSizeSortedVector<CObject> vecD,
FixedSizeSortedVector<CObject> vecF,
TwoDFloatArray pyr1, TwoDFloatArray pyr2, float s1, float s2) {
Image img1 = convertToImage(pyr1);
Image img2 = convertToImage(pyr2);
try {
for (int i0 = 0; i0 < 2; ++i0) {
CorrespondenceList cor = null;
if (i0 == 0) {
cor = vecD.getArray()[0].cCor;
} else {
cor = vecF.getArray()[0].cCor;
}
Image img1Cp = img1.copyImage();
Image img2Cp = img2.copyImage();
CorrespondencePlotter plotter = new CorrespondencePlotter(
img1Cp, img2Cp);
for (int ii = 0; ii < cor.getPoints1().size(); ++ii) {
PairInt p1 = cor.getPoints1().get(ii);
PairInt p2 = cor.getPoints2().get(ii);
int x1 = Math.round(p1.getX()/s1);
int y1 = Math.round(p1.getY()/s1);
int x2 = Math.round(p2.getX()/s2);
int y2 = Math.round(p2.getY()/s2);
plotter.drawLineInAlternatingColors(x1, y1, x2, y2, 0);
}
String strI = Integer.toString(i);
while (strI.length() < 3) {
strI = "0" + strI;
}
String strJ = Integer.toString(j);
while (strJ.length() < 3) {
strJ = "0" + strJ;
}
String str = strI + "_" + strJ + "_";
if (i0 == 0) {
str = str + "factor";
} else {
str = str + "divisor";
}
plotter.writeImage("_MATCH_" + str);
}
} catch(Exception e) {}
}
}
|
package com.binatechnologies.varsim;
import java.io.UnsupportedEncodingException;
import java.util.Arrays;
import java.util.Random;
import org.apache.log4j.Logger;
public class Variant {
private final static Logger log = Logger.getLogger(Variant.class.getName());
// use a seed for reproducibility, should be an option or global
private static final Random _rand = new Random(3333l);
public int idx = 0; // this is hopefully a unique index, for the split variants
public int full_idx = 0; // this is hopefully a unique index, for the whole variants
// this is the type before the variant was split into canonical ones
public OverallType original_type = null;
private int _chr = -1, _pos = -1, _del = -1;
private byte[] _ref;
private String _chr_name;
private FlexSeq[] _alts;
private byte _maternal = 0, _paternal = 0; // -1 for not avaliable
private boolean _isPhased = false; // Phasing
private String _filter;
private String _var_id;
// this is when the reference base is deleted
// if it is the same as the first alt base
private String _ref_deleted;
public Variant() {
// TODO define some methods to determine if a Variant is uninitialised
}
public Variant(String chr_name, int chr, int pos, int del, byte[] ref,
FlexSeq[] alts, byte[] phase, boolean isPhased, String var_id, String filter,
String ref_deleted) {
_filter = filter;
_chr_name = chr_name;
_var_id = var_id;
_chr = chr;
_pos = pos;
_del = del;
// TODO we should put the reference matching code here
_ref = ref.clone();
_ref_deleted = ref_deleted;
_alts = new FlexSeq[alts.length];
for (int i = 0; i < alts.length; i++) {
if (alts[i] != null) {
_alts[i] = new FlexSeq(alts[i]);
} else {
_alts[i] = null;
}
}
_paternal = phase[0];
_maternal = phase[1];
_isPhased = isPhased;
}
public Variant(final Variant var) {
_filter = var._filter;
_chr_name = var._chr_name;
_var_id = var._var_id;
_chr = var._chr;
_pos = var._pos;
_del = var._del;
_ref = var._ref.clone();
_ref_deleted = var._ref_deleted;
_alts = new FlexSeq[var._alts.length];
for (int i = 0; i < var._alts.length; i++) {
if (var._alts[i] != null) {
_alts[i] = new FlexSeq(var._alts[i]);
} else {
_alts[i] = null;
}
}
_paternal = var._paternal;
_maternal = var._maternal;
_isPhased = var._isPhased;
}
/**
* @return Chromosome variant is on
*/
public int chromosome() {
return _chr;
}
/**
* @return Start position of variant
*/
public int position() {
return _pos;
}
// return false if fails
// if return false, nothing is changed
/**
* Tries to move the variant to a new novel position. It checks if the new position is valid
*
* @param pos new novel position
* @param ref reference sequence
* @return return true if it was changed
*/
public boolean setNovelPosition(int pos, SimpleReference ref) {
// replace ref
int len = _ref.length;
if (len > 0) {
byte[] temp_ref = ref.byteRange(_chr, pos, pos + len);
for (byte b : temp_ref) {
if (b == 'N') {
// don't allow N's
log.warn("N found at " + pos + " to " + (pos + len));
return false;
}
}
for (FlexSeq f : _alts) {
if (f.getSeq() != null) {
// make sure there is no prefix the same
for(int i = 0;i<temp_ref.length;i++){
if(i < f.getSeq().length){
if(temp_ref[i] == f.getSeq()[i]){
log.warn("Same ref at alt at " + pos + " to " + (pos + len));
return false;
}else{
break;
}
}
}
}
}
_ref = temp_ref;
}
// replace ref_deleted
len = _ref_deleted.length();
if (len > 0) {
try {
byte[] deleted_temp = ref.byteRange(_chr, pos - len, pos);
_ref_deleted = new String(deleted_temp, "US-ASCII");
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
// do the replacing
_pos = pos;
return true;
}
/**
* @param id variant id. usually the dbSNP id
*/
public void setVarID(String id) {
_var_id = id;
}
/**
* return the length of the deletion.
* This is currently simply the length of the reference sequence that will
* be replaced
*
* @return the length of the deletion
*/
public int deletion() {
return _del;
}
/**
* @return maternal allele index, 0 if reference
*/
public int maternal() {
return _maternal;
}
/**
* @return paternal allele index, 0 if reference
*/
public int paternal() {
return _paternal;
}
/**
* @param ind index of allele
* @return the insertion sequence as a string
*/
public byte[] insertion(int ind) {
if (ind <= 0 || ind > _alts.length)
return null;
return _alts[ind - 1].getSeq();
}
/**
* The length of an alternate allele, this is usually an insertion sequence
* But in the case of SVs, not necessarily
*
* @param ind index of allele
* @return the length of that allele
*/
public int insertion_len(int ind) {
if (ind <= 0 || ind > _alts.length)
return 0;
return _alts[ind - 1].length();
}
// if it is a simple indel, it is just the length
// if it is a complex variant, this is the maximum length of the insertion
// and deletion
public int max_len(int ind) {
if (ind <= 0 || ind > _alts.length)
return 0;
return Math.max(_del, _alts[ind - 1].length());
}
public int max_len() {
int len = 0;
for (int i = 0; i < 2; i++) {
len = Math.max(max_len(get_allele(i)), len);
}
return len;
}
// this is the minimum length of the variants
public int min_len() {
int len = max_len(get_allele(0));
len = Math.min(max_len(get_allele(1)), len);
return len;
}
/*
gets the interval enclosing the variant on the reference genome
*/
public Interval1D get_interval(int ind) {
if (ind == 0 || _del == 0) {
return new Interval1D(_pos, _pos);
}
return new Interval1D(_pos, _pos + _del - 1);
}
/*
gets the interval for the variant, accounting for variant size
*/
public Interval1D get_var_interval(int ind) {
try {
if (ind == 0) {
return new Interval1D(_pos, _pos);
}
return new Interval1D(_pos, _pos + max_len(ind) - 1);
}catch(RuntimeException e){
log.error("Bad variant interval: " + toString());
log.error("_pos: " + _pos);
log.error("ind: " + ind);
log.error("max_len(ind): " + max_len(ind));
log.error(e.getStackTrace());
}
}
// union of intervals from the genotypes
public Interval1D get_geno_interval() {
return get_interval(_paternal).union(get_interval(_maternal));
}
public Interval1D get_geno_var_interval() {
return get_var_interval(_paternal).union(get_var_interval(_maternal));
}
public Genotypes getGeno() {
return new Genotypes(_paternal, _maternal);
}
/*
* 0 = paternal
* 1 = maternal
* otherwise returns -1
*/
public int get_allele(int parent) {
if (parent == 0) {
return _paternal;
} else if (parent == 1) {
return _maternal;
}
return -1;
}
public void set_allele(int parent, byte allele) {
if (parent == 0) {
_paternal = allele;
} else if (parent == 1) {
_maternal = allele;
}
}
// TODO this is wrong, but it only effects the count of variant bases
public int variantBases() {
int ret = _del;
for (int i = 0; i < _alts.length; i++) {
if (_del != _alts[i].length()) {
ret += _alts[i].length();
}
}
return ret;
}
/**
* @param ind index of allele (starts at 1, 0 is reference)
* @return type of allele at index ind
*/
public Type getType(int ind) {
if (ind == 0) {
return Type.Reference;
}
FlexSeq.Type type = _alts[ind - 1].getType();
switch (type) {
case DUP:
return Type.Tandem_Duplication;
case INS:
return Type.Insertion;
case INV:
return Type.Inversion;
default:
break;
}
int inslen = insertion_len(ind);
int dellen = _del;
if (inslen == 0 && dellen == 0) {
return Type.Reference;
} else if (inslen == 1 && dellen == 1) {
return Type.SNP;
} else if (inslen == 0 && dellen > 0) {
return Type.Deletion;
} else if (inslen > 0 && dellen == 0) {
return Type.Insertion;
} else if (inslen == dellen) {
return Type.MNP;
}
return Type.Complex;
}
/**
* @return overall type of the variant considering both alleles
*/
public OverallType getType() {
int[] allele = {get_allele(0), get_allele(1)};
// check Reference
boolean is_ref = true;
for (int a = 0; a < 2; a++) {
if (getType(allele[a]) != Type.Reference) {
is_ref = false;
break;
}
}
if (is_ref) {
return OverallType.Reference;
}
// check SNP
boolean is_snp = true;
for (int a = 0; a < 2; a++) {
if (allele[a] > 0 && getType(allele[a]) != Type.SNP) {
is_snp = false;
break;
}
}
if (is_snp) {
return OverallType.SNP;
}
// check INV
boolean is_inv = true;
for (int a = 0; a < 2; a++) {
if (allele[a] > 0 && getType(allele[a]) != Type.Inversion) {
is_inv = false;
break;
}
}
if (is_inv) {
return OverallType.Inversion;
}
// check DUP
boolean is_dup = true;
for (int a = 0; a < 2; a++) {
if (allele[a] > 0 && getType(allele[a]) != Type.Tandem_Duplication) {
is_dup = false;
break;
}
}
if (is_dup) {
return OverallType.Tandem_Duplication;
}
// check Deletion
boolean is_del = true;
for (int a = 0; a < 2; a++) {
if (allele[a] > 0 && getType(allele[a]) != Type.Deletion) {
is_del = false;
break;
}
}
if (is_del) {
return OverallType.Deletion;
}
// check DUP
boolean is_ins = true;
for (int a = 0; a < 2; a++) {
if (allele[a] > 0 && getType(allele[a]) != Type.Insertion) {
is_ins = false;
break;
}
}
if (is_ins) {
return OverallType.Insertion;
}
/* Treat these as complex for now
// check INDEL
boolean is_indel = true;
for (int a = 0; a < 2; a++) {
if (allele[a] > 0 && !(getType(get_allele(a)) == Type.Deletion || getType(get_allele(a)) == Type.Insertion)) {
is_indel = false;
break;
}
}
if (is_indel) {
return OverallType.INDEL;
}
// check MNP
boolean is_mnp = true;
for (int a = 0; a < 2; a++) {
if (allele[a] > 0 && getType(get_allele(a)) != Type.MNP) {
is_mnp = false;
break;
}
}
if (is_mnp) {
return OverallType.MNP;
}
*/
// otherwise it is complex
return OverallType.Complex;
}
public FlexSeq getAlt(int ind) {
if (ind <= 0 || ind > _alts.length)
return null;
return _alts[ind - 1];
}
public void setAlt(int ind, FlexSeq alt) {
if (ind <= 0 || ind > _alts.length) {
return;
}
_alts[ind - 1] = alt;
}
public String getFilter() {
return _filter;
}
public boolean isPhased() {
return _isPhased;
}
public boolean isRef() {
return _paternal == 0 && _maternal == 0;
}
public String getVar_id() {
return _var_id;
}
public byte[] getRef() {
return _ref;
}
public String getOrig_Ref() {
try {
return _ref_deleted + new String(_ref, "US-ASCII");
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
return "";
}
}
public String getChr_name() {
return _chr_name;
}
public String getRef_deleted() {
return _ref_deleted;
}
public int getCN(int ind) {
if (ind <= 0 || ind > _alts.length)
return 0;
return _alts[ind - 1].getCopy_num();
}
/**
* @return true if any of the alternate alleles has copy number greater than 1
*/
public boolean hasCN() {
boolean CN_positive = false;
for (int i = 0; i < _alts.length; i++) {
if (_alts[i].getCopy_num() > 1) {
CN_positive = true;
}
}
return CN_positive;
}
public StringBuilder alt_string() {
StringBuilder sbStr = new StringBuilder();
for (int i = 0; i < _alts.length; i++) {
if (i > 0) {
sbStr.append(",");
}
if (_alts[i].isSeq()) {
sbStr.append(_ref_deleted + _alts[i].toString());
} else {
sbStr.append(_alts[i].toString());
}
}
return sbStr;
}
public int get_num_alt() {
return _alts.length;
}
public void randomizeHaplotype() {
if (_rand.nextDouble() > 0.5) {
return;
}
byte tmp = _paternal;
_paternal = _maternal;
_maternal = tmp;
return;
}
/*
Returns true if the variant is homozygous
*/
public boolean isHom() {
return (_paternal == _maternal);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Variant variant = (Variant) o;
if (_chr != variant._chr) return false;
if (_del != variant._del) return false;
if (_isPhased != variant._isPhased) return false;
if (_maternal != variant._maternal) return false;
if (_paternal != variant._paternal) return false;
if (_pos != variant._pos) return false;
if (full_idx != variant.full_idx) return false;
if (idx != variant.idx) return false;
if (!Arrays.equals(_alts, variant._alts)) return false;
if (_chr_name != null ? !_chr_name.equals(variant._chr_name) : variant._chr_name != null) return false;
if (_filter != null ? !_filter.equals(variant._filter) : variant._filter != null) return false;
if (!Arrays.equals(_ref, variant._ref)) return false;
if (_ref_deleted != null ? !_ref_deleted.equals(variant._ref_deleted) : variant._ref_deleted != null)
return false;
if (_var_id != null ? !_var_id.equals(variant._var_id) : variant._var_id != null) return false;
return true;
}
@Override
public int hashCode() {
int result = _chr;
result = 31 * result + _pos;
result = 31 * result + _del;
result = 31 * result + (_ref != null ? Arrays.hashCode(_ref) : 0);
result = 31 * result + (_chr_name != null ? _chr_name.hashCode() : 0);
result = 31 * result + (_alts != null ? Arrays.hashCode(_alts) : 0);
result = 31 * result + (int) _maternal;
result = 31 * result + (int) _paternal;
result = 31 * result + (_isPhased ? 1 : 0);
result = 31 * result + (_filter != null ? _filter.hashCode() : 0);
result = 31 * result + (_var_id != null ? _var_id.hashCode() : 0);
result = 31 * result + (_ref_deleted != null ? _ref_deleted.hashCode() : 0);
result = 31 * result + idx;
result = 31 * result + full_idx;
return result;
}
public String getLength() {
StringBuilder len = new StringBuilder();
for (int i = 0; i < _alts.length; i++) {
if (i > 0) {
len.append(',');
}
Type t = getType(i + 1);
if (t == Type.Deletion) {
len.append(-_del); // negative for deletions
} else if (t == Type.Complex) {
int alt_len = _alts[i].length();
if (_del > alt_len) {
len.append(-_del);
} else {
len.append(alt_len);
}
} else {
len.append(_alts[i].length());
}
}
return len.toString();
}
/**
* @param sbStr will build a VCF record without genotype
*/
// TODO, this should be self contained and output a VCF record
private void buildVCFstr(StringBuilder sbStr) {
// chromosome name
sbStr.append(_chr_name);
sbStr.append("\t");
// start position
sbStr.append(_pos - _ref_deleted.length());
sbStr.append('\t');
// variant id
sbStr.append(_var_id);
sbStr.append("\t");
// ref allele
sbStr.append(getOrig_Ref());
sbStr.append("\t");
// alt alleles
sbStr.append(alt_string().toString());
sbStr.append("\t");
// variant quality
sbStr.append(".\t");
// pass label
sbStr.append(_filter);
sbStr.append("\t");
// INFO
if (getType() == OverallType.Tandem_Duplication) {
sbStr.append("SVTYPE=DUP;");
sbStr.append("SVLEN=");
sbStr.append(getLength());
} else if (getType() == OverallType.Inversion) {
sbStr.append("SVTYPE=INV;");
sbStr.append("SVLEN=");
sbStr.append(getLength());
} else {
sbStr.append("SVLEN=");
sbStr.append(getLength());
}
sbStr.append("\t");
// label (GT)
if (hasCN()) {
sbStr.append("CN:GT\t");
} else {
sbStr.append("GT\t");
}
if (hasCN()) {
sbStr.append(String.valueOf(getCN(paternal())));
sbStr.append("|");
sbStr.append(String.valueOf(getCN(maternal())));
sbStr.append(":");
}
}
/**
* @return a VCF record of the variant
*/
public String toString() {
StringBuilder sbStr = new StringBuilder();
buildVCFstr(sbStr);
sbStr.append(paternal());
sbStr.append("|");
sbStr.append(maternal());
return sbStr.toString();
}
/**
* @param paternal specified paternal allele
* @param maternal specified maternal allele
* @return the VCF record with prespecified genotype
*/
public String toString(int paternal, int maternal) {
StringBuilder sbStr = new StringBuilder();
buildVCFstr(sbStr);
// for this one we need to work out which one is added
sbStr.append(_paternal);
sbStr.append("|");
sbStr.append(_maternal);
return sbStr.toString();
}
// type for one allele
public enum Type {
Reference, SNP, Insertion, Deletion, MNP, Inversion, Tandem_Duplication, Complex;
}
// Type for whole variant
public enum OverallType {
//Reference, SNP, INDEL, Deletion, Insertion, MNP, Inversion, Tandem_Duplication, Complex;
Reference, SNP, Deletion, Insertion, Inversion, Tandem_Duplication, Complex;
}
}
|
package io.github.ryotan.code.util;
import java.lang.annotation.Annotation;
import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Optional;
import java.util.function.Predicate;
import java.util.stream.Stream;
import io.github.ryotan.code.CodeEnum;
import io.github.ryotan.code.CodeEnum.Filter;
import io.github.ryotan.code.CodeEnum.ShortLabel;
public final class CodeEnumReflectionUtil {
/**
* hidden constructor for utility class.
*/
private CodeEnumReflectionUtil() {
}
/**
* {@link Enum}{@link CodeEnum}{@code aClass}{@code true}
*
* @param aClass
* @return {@code aClass}{@code true}{@code aClass}{@link Enum}{@link CodeEnum}{@code false}
*/
public static boolean isValidCodeEnumClass(Class<?> aClass) {
if (!Enum.class.isAssignableFrom(aClass) || !CodeEnum.class.isAssignableFrom(aClass)) {
return false;
}
return Stream.of(aClass.getGenericInterfaces()).filter(ParameterizedType.class::isInstance).map(ParameterizedType.class::cast)
.filter(CodeEnumReflectionUtil::isCodeEnumType).anyMatch(type -> hasSameParameterizedType(type, aClass));
}
@SuppressWarnings("unchecked")
public static <C extends CodeEnum<C>> Class<C> getCodeEnumClass(Class<?> code) {
if (!isValidCodeEnumClass(code)) {
throw new IllegalArgumentException(String.format("%s is not a valid CodeEnum class. "
+ "CodeEnum class must be enum and implement CodeEnum<SELF_TYPE>.", code.getName()));
}
return (Class<C>) code;
}
public static <C extends CodeEnum<C>> Predicate<C> getCodeFilter(Class<C> code, String filter) {
return findCodePatternsFromField(code, filter).orElseGet(() -> findCodePatternsFromMethod(code, filter)
.orElseThrow(() -> new IllegalArgumentException(String.format("Code filter '%s' for %s is not found.", filter, code))));
}
/**
* {@code code}{@link Filter}{@code name}
*
* @param code {@link CodeEnum}
* @param name {@link CodeEnum}
* @return {@link Predicate}
*
*/
@SuppressWarnings("unchecked")
private static <C extends CodeEnum<C>> Optional<Predicate<C>> findCodePatternsFromField(Class<C> code, String name) {
try {
Field field = code.getField(name);
if (isTarget(field, Filter.class, Predicate.class) && isValidCodeFilter(field.getGenericType(), code)) {
return Optional.ofNullable((Predicate<C>) field.get(code));
}
} catch (NoSuchFieldException | IllegalAccessException e) {
// nop
}
return Optional.empty();
}
/**
* {@code code}{@link Filter}{@code name}
*
* @param code {@link CodeEnum}
* @param name {@link CodeEnum}
* @return
*/
@SuppressWarnings("unchecked")
private static <C extends CodeEnum<C>> Optional<Predicate<C>> findCodePatternsFromMethod(Class<C> code, String name) {
try {
Method method = code.getMethod(name);
if (isTarget(method, Filter.class, Predicate.class) && isValidCodeFilter(method.getGenericReturnType(), code)) {
return Optional.ofNullable((Predicate<C>) method.invoke(code));
}
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
// nop
}
return Optional.empty();
}
/**
* {@code type}{@link ParameterizedType}{@code code}{@code true}
*
* @param type
* @param code
* @return {@code type}{@code code}{@code true}
*/
private static boolean isValidCodeFilter(Type type, Class<?> code) {
if (type instanceof ParameterizedType) {
final Type[] actualTypeArguments = ((ParameterizedType) type).getActualTypeArguments();
return actualTypeArguments.length == 1 && actualTypeArguments[0].getTypeName().equals(code.getTypeName());
}
return false;
}
public static <C extends CodeEnum<C>> String getShortLabelValue(C code, String name) {
try {
final Field field = code.getClass().getField(name);
final ShortLabel[] shortLabel = field.getAnnotationsByType(ShortLabel.class);
if (shortLabel.length != 0) {
return shortLabel[0].value();
}
} catch (NoSuchFieldException e) {
// nop
}
throw new IllegalArgumentException(String.format("%s.%s is not annotated with '@%s'.",
code.getClass().getSimpleName(), code, ShortLabel.class.getSimpleName()));
}
public static <C extends CodeEnum<C>> String getAnnotatedStringValue(C code, Class<? extends Annotation> marker, String name) {
return findAnnotatedStringValueFromField(code, marker, name)
.orElseGet(() -> findAnnotatedStringValueFromMethod(code, marker, name)
.orElseThrow(() -> new IllegalArgumentException(String.format("The field or method annotated as '@%s' with name '%s'" +
" is not found in %s.%s", marker.getSimpleName(), name, code.getClass().getSimpleName(), code))));
}
/**
* {@code code}{@code name}{@code maker}
*
* @param code {@code CodeEnum}
* @param marker
* @param name
* @return {@code code}{@code name}{@code maker}
*/
private static <C extends CodeEnum<C>> Optional<String> findAnnotatedStringValueFromField(C code, Class<? extends Annotation> marker, String name) {
try {
Field field = code.getClass().getField(name);
if (isTarget(field, marker, String.class)) {
return Optional.ofNullable((String) field.get(code));
}
} catch (NoSuchFieldException | IllegalAccessException e) {
// nop
}
return Optional.empty();
}
/**
* {@code code}{@code name}{@code maker}
*
* @param code {@code CodeEnum}
* @param marker
* @param name
* @return {@code code}{@code name}{@code maker}
*/
private static <C extends CodeEnum<C>> Optional<String> findAnnotatedStringValueFromMethod(C code, Class<? extends Annotation> marker, String name) {
try {
Method method = code.getClass().getMethod(name);
if (isTarget(method, marker, String.class)) {
return Optional.ofNullable((String) method.invoke(code));
}
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
// nop
}
return Optional.empty();
}
/**
* {@code field}{@code maker}{@code field}{@code expectedClass}{@code ture}
*
* @param field
* @param marker
* @param expectedClass
* @return {@code maker}[{@code field}{@code ture}
*/
private static boolean isTarget(Field field, Class<? extends Annotation> marker, Class<?> expectedClass) {
return isAnnotated(field, marker) && expectedClass.isAssignableFrom(field.getType());
}
/**
* {@code method}{@code maker}{@code method}{@code expectedClass}{@code ture}
* @param method
* @param marker
* @param expectedClass
* @return {@code maker}[{@code method}{@code ture}
*/
private static boolean isTarget(Method method, Class<? extends Annotation> marker, Class<?> expectedClass) {
return isAnnotated(method, marker) && expectedClass.isAssignableFrom(method.getReturnType());
}
/**
* {@code maker}{@code annotated}{@code true}
*
* @param annotated
* @param marker
* @return {@code annotated}{@code maker}{@code true}
*/
private static boolean isAnnotated(AnnotatedElement annotated, Class<? extends Annotation> marker) {
return annotated.isAnnotationPresent(marker);
}
/**
* {@code gif}{@link CodeEum}{@code true}
*
* @param gif
* @return {@code gif}{@link CodeEnum}{@code true}
*/
private static boolean isCodeEnumType(ParameterizedType gif) {
return gif.getRawType().getTypeName().equals(CodeEnum.class.getName());
}
/**
* {@code aClass}{@code gif}{@code ture}
*
* @param gif
* @param aClass
* @return {@code aClass}{@code gif}{@code ture}
*/
private static boolean hasSameParameterizedType(ParameterizedType gif, Class<?> aClass) {
final Type[] actualTypeArguments = gif.getActualTypeArguments();
return actualTypeArguments.length == 1 && actualTypeArguments[0].getTypeName().equals(aClass.getName());
}
}
|
package com.bitpay.sdk.model.Invoice;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.math.BigDecimal;
import java.util.Date;
@JsonIgnoreProperties(ignoreUnknown = true)
public class Refund {
private String _guid;
private Double _amount;
private String _currency;
private String _invoice;
private Boolean _preview;
private Boolean _immediate;
private Boolean _buyerPaysRefundFee;
private String _reference;
private Double _refundFee;
private Date _lastRefundNotification;
/**
* Amount to be refunded in terms of the transaction currency.
*/
private BigDecimal _transactionAmount;
/**
* The refund fee expressed in terms of transaction currency.
*/
private BigDecimal _transactionRefundFee;
/**
* The currency used for the invoice transaction.
*/
private String _transactionCurrency;
private String _id;
private Date _requestDate;
private String _status;
public Refund() {
}
// Request fields
@JsonProperty("guid")
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public String getGuid() {
return _guid;
}
@JsonProperty("guid")
public void setGuid(String guid) { this._guid = guid; }
@JsonProperty("amount")
public Double getAmount() {
return _amount;
}
@JsonProperty("amount")
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public void setAmount(Double amount) {
this._amount = amount;
}
@JsonProperty("currency")
public String getCurrency() {
return _currency;
}
@JsonProperty("currency")
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public void setCurrency(String currency) {
this._currency = currency;
}
@JsonProperty("invoice")
public String getInvoice() {
return _invoice;
}
@JsonProperty("invoice")
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public void setInvoice(String invoice) {
this._invoice = invoice;
}
@JsonProperty("preview")
public Boolean getPreview() {
return _preview;
}
@JsonProperty("preview")
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public void setPreview(Boolean preview) {
this._preview = preview;
}
@JsonProperty("immediate")
public Boolean getImmediate() {
return _immediate;
}
@JsonProperty("immediate")
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public void setImmediate(Boolean immediate) {
this._immediate = immediate;
}
@JsonProperty("buyerPaysRefundFee")
public Boolean getBuyerPaysRefundFee() {
return _buyerPaysRefundFee;
}
@JsonProperty("buyerPaysRefundFee")
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public void setBuyerPaysRefundFee(Boolean buyerPaysRefundFee) {
this._buyerPaysRefundFee = buyerPaysRefundFee;
}
@JsonProperty("reference")
public String getReference() {
return _reference;
}
@JsonProperty("reference")
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public void setReference(String reference) {
this._reference = reference;
}
// Response fields
@JsonIgnore
public String getId() {
return _id;
}
@JsonProperty("id")
public void setId(String id) {
this._id = id;
}
@JsonIgnore
public Date getRequestDate() {
return _requestDate;
}
@JsonProperty("requestDate")
public void setRequestDate(Date requestDate) {
this._requestDate = requestDate;
}
@JsonIgnore
public String getStatus() {
return _status;
}
@JsonProperty("status")
public void setStatus(String status) {
this._status = status;
}
/**
* Gets the {@link #_transactionAmount} for a Refund.
*
* @return the transaction amount of the Refund
*
* @see Refund
*/
@JsonProperty("transactionAmount")
public BigDecimal getTransactionAmount() {
return _transactionAmount;
}
/**
* Sets the {@link #_transactionAmount} for a Refund.
*
* @param transactionAmount Amount to be refunded in terms of the transaction currency
*
* @see Refund
*/
@JsonProperty("transactionAmount")
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public void setTransactionAmount(BigDecimal transactionAmount) {
this._transactionAmount = transactionAmount;
}
/**
* Gets the {@link #_transactionRefundFee} for a Refund.
*
* @return the transaction refund fee of the Refund
*
* @see Refund
*/
@JsonProperty("transactionRefundFee")
public BigDecimal getTransactionRefundFee() {
return _transactionRefundFee;
}
/**
* Sets the {@link #_transactionAmount} for a Refund.
*
* @param transactionRefundFee The refund fee expressed in terms of transaction currency
*
* @see Refund
*/
@JsonProperty("transactionRefundFee")
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public void setTransactionRefundFee(BigDecimal transactionRefundFee) {
this._transactionRefundFee = transactionRefundFee;
}
/**
* Gets the {@link #_transactionCurrency} for a Refund.
*
* @return the transaction currency of the Refund
*
* @see Refund
*/
@JsonProperty("transactionCurrency")
public String getTransactionCurrency() {
return _transactionCurrency;
}
/**
* Sets the {@link #_transactionCurrency} for a Refund.
*
* @param transactionCurrency The refund fee expressed in terms of transaction currency
*
* @see Refund
*/
@JsonProperty("transactionCurrency")
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public void setTransactionCurrency(String transactionCurrency) {
this._transactionCurrency = transactionCurrency;
}
@JsonIgnore
public Date getLastRefundNotification() {
return _lastRefundNotification;
}
@JsonProperty("lastRefundNotification")
public void setLastRefundNotification(Date lastRefundNotification) {
this._lastRefundNotification = lastRefundNotification;
}
@JsonIgnore
public Double getRefundFee() {
return _refundFee;
}
@JsonProperty("refundFee")
public void setRefundFee(Double refundFee) {
this._refundFee = refundFee;
}
}
|
package info.guardianproject.otr.app.im.plugin.xmpp;
import info.guardianproject.otr.TorProxyInfo;
import info.guardianproject.otr.app.im.R;
import info.guardianproject.otr.app.im.app.DatabaseUtils;
import info.guardianproject.otr.app.im.app.ImApp;
import info.guardianproject.otr.app.im.engine.Address;
import info.guardianproject.otr.app.im.engine.ChatGroup;
import info.guardianproject.otr.app.im.engine.ChatGroupManager;
import info.guardianproject.otr.app.im.engine.ChatSession;
import info.guardianproject.otr.app.im.engine.ChatSessionManager;
import info.guardianproject.otr.app.im.engine.Contact;
import info.guardianproject.otr.app.im.engine.ContactList;
import info.guardianproject.otr.app.im.engine.ContactListListener;
import info.guardianproject.otr.app.im.engine.ContactListManager;
import info.guardianproject.otr.app.im.engine.ImConnection;
import info.guardianproject.otr.app.im.engine.ImEntity;
import info.guardianproject.otr.app.im.engine.ImErrorInfo;
import info.guardianproject.otr.app.im.engine.ImException;
import info.guardianproject.otr.app.im.engine.Invitation;
import info.guardianproject.otr.app.im.engine.Message;
import info.guardianproject.otr.app.im.engine.Presence;
import info.guardianproject.otr.app.im.plugin.xmpp.auth.GTalkOAuth2;
import info.guardianproject.otr.app.im.provider.Imps;
import info.guardianproject.otr.app.im.provider.ImpsErrorInfo;
import info.guardianproject.util.DNSUtil;
import info.guardianproject.util.Debug;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.security.KeyManagementException;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Random;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.X509TrustManager;
import org.apache.harmony.javax.security.auth.callback.Callback;
import org.apache.harmony.javax.security.auth.callback.CallbackHandler;
import org.jivesoftware.smack.ConnectionConfiguration;
import org.jivesoftware.smack.ConnectionConfiguration.SecurityMode;
import org.jivesoftware.smack.ConnectionListener;
import org.jivesoftware.smack.PacketCollector;
import org.jivesoftware.smack.PacketListener;
import org.jivesoftware.smack.Roster;
import org.jivesoftware.smack.RosterEntry;
import org.jivesoftware.smack.RosterGroup;
import org.jivesoftware.smack.RosterListener;
import org.jivesoftware.smack.SASLAuthentication;
import org.jivesoftware.smack.SmackConfiguration;
import org.jivesoftware.smack.XMPPConnection;
import org.jivesoftware.smack.XMPPException;
import org.jivesoftware.smack.filter.AndFilter;
import org.jivesoftware.smack.filter.PacketFilter;
import org.jivesoftware.smack.filter.PacketIDFilter;
import org.jivesoftware.smack.filter.PacketTypeFilter;
import org.jivesoftware.smack.packet.IQ;
import org.jivesoftware.smack.packet.Message.Body;
import org.jivesoftware.smack.packet.Packet;
import org.jivesoftware.smack.packet.Presence.Mode;
import org.jivesoftware.smack.packet.Presence.Type;
import org.jivesoftware.smack.provider.PrivacyProvider;
import org.jivesoftware.smack.provider.ProviderManager;
import org.jivesoftware.smack.proxy.ProxyInfo;
import org.jivesoftware.smack.proxy.ProxyInfo.ProxyType;
import org.jivesoftware.smackx.Form;
import org.jivesoftware.smackx.FormField;
import org.jivesoftware.smackx.GroupChatInvitation;
import org.jivesoftware.smackx.PrivateDataManager;
import org.jivesoftware.smackx.ServiceDiscoveryManager;
import org.jivesoftware.smackx.bytestreams.socks5.provider.BytestreamsProvider;
import org.jivesoftware.smackx.muc.MultiUserChat;
import org.jivesoftware.smackx.muc.RoomInfo;
import org.jivesoftware.smackx.packet.ChatStateExtension;
import org.jivesoftware.smackx.packet.LastActivity;
import org.jivesoftware.smackx.packet.OfflineMessageInfo;
import org.jivesoftware.smackx.packet.OfflineMessageRequest;
import org.jivesoftware.smackx.packet.SharedGroupsInfo;
import org.jivesoftware.smackx.packet.VCard;
import org.jivesoftware.smackx.provider.AdHocCommandDataProvider;
import org.jivesoftware.smackx.provider.DataFormProvider;
import org.jivesoftware.smackx.provider.DelayInformationProvider;
import org.jivesoftware.smackx.provider.DiscoverInfoProvider;
import org.jivesoftware.smackx.provider.DiscoverItemsProvider;
import org.jivesoftware.smackx.provider.MUCAdminProvider;
import org.jivesoftware.smackx.provider.MUCOwnerProvider;
import org.jivesoftware.smackx.provider.MUCUserProvider;
import org.jivesoftware.smackx.provider.MessageEventProvider;
import org.jivesoftware.smackx.provider.MultipleAddressesProvider;
import org.jivesoftware.smackx.provider.RosterExchangeProvider;
import org.jivesoftware.smackx.provider.StreamInitiationProvider;
import org.jivesoftware.smackx.provider.VCardProvider;
import org.jivesoftware.smackx.provider.XHTMLExtensionProvider;
import org.jivesoftware.smackx.search.UserSearch;
import android.accounts.AccountManager;
import android.content.ContentResolver;
import android.content.Context;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Build;
import android.os.RemoteException;
import android.text.TextUtils;
import android.util.Log;
import de.duenndns.ssl.MemorizingTrustManager;
public class XmppConnection extends ImConnection implements CallbackHandler {
private static final String DISCO_FEATURE = "http://jabber.org/protocol/disco#info";
final static String TAG = "GB.XmppConnection";
private final static boolean PING_ENABLED = true;
private XmppContactListManager mContactListManager;
private Contact mUser;
private boolean mUseTor;
// watch out, this is a different XMPPConnection class than XmppConnection! ;)
// Synchronized by executor thread
private MyXMPPConnection mConnection;
private XmppStreamHandler mStreamHandler;
private Roster mRoster;
private XmppChatSessionManager mSessionManager;
private ConnectionConfiguration mConfig;
// True if we are in the process of reconnecting. Reconnection is retried once per heartbeat.
// Synchronized by executor thread.
private boolean mNeedReconnect;
private boolean mRetryLogin;
private ThreadPoolExecutor mExecutor;
private Timer mTimerPresence;
private ProxyInfo mProxyInfo = null;
private long mAccountId = -1;
private long mProviderId = -1;
private boolean mIsGoogleAuth = false;
private final static String SSLCONTEXT_TYPE = "TLS";
private static SSLContext sslContext;
private Context aContext;
private final static String IS_GOOGLE = "google";
private final static int SOTIMEOUT = 60000;
private PacketCollector mPingCollector;
private String mUsername;
private String mPassword;
private String mResource;
private int mPriority;
private int mGlobalId;
private static int mGlobalCount;
private final Random rndForTorCircuits = new Random();
// Maintains a sequence counting up to the user configured heartbeat interval
private int heartbeatSequence = 0;
LinkedList<String> qAvatar = new LinkedList <String>();
LinkedList<org.jivesoftware.smack.packet.Presence> qPresence = new LinkedList<org.jivesoftware.smack.packet.Presence>();
LinkedList<org.jivesoftware.smack.packet.Packet> qPacket = new LinkedList<org.jivesoftware.smack.packet.Packet>();
public XmppConnection(Context context) throws IOException, KeyStoreException, NoSuchAlgorithmException, CertificateException {
super(context);
synchronized (XmppConnection.class) {
mGlobalId = mGlobalCount++;
}
aContext = context;
Debug.onConnectionStart();
SmackConfiguration.setPacketReplyTimeout(SOTIMEOUT);
// Create a single threaded executor. This will serialize actions on the underlying connection.
createExecutor();
addProviderManagerExtensions();
XmppStreamHandler.addExtensionProviders();
DeliveryReceipts.addExtensionProviders();
ServiceDiscoveryManager.setIdentityName("ChatSecure");
ServiceDiscoveryManager.setIdentityType("phone");
}
public void initUser(long providerId, long accountId) throws ImException
{
ContentResolver contentResolver = mContext.getContentResolver();
Cursor cursor = contentResolver.query(Imps.ProviderSettings.CONTENT_URI,new String[] {Imps.ProviderSettings.NAME, Imps.ProviderSettings.VALUE},Imps.ProviderSettings.PROVIDER + "=?",new String[] { Long.toString(providerId)},null);
if (cursor == null)
throw new ImException("unable to query settings");
Imps.ProviderSettings.QueryMap providerSettings = new Imps.ProviderSettings.QueryMap(
cursor, contentResolver, providerId, false, null);
mProviderId = providerId;
mAccountId = accountId;
mUser = makeUser(providerSettings, contentResolver);
mUseTor = providerSettings.getUseTor();
providerSettings.close();
}
private Contact makeUser(Imps.ProviderSettings.QueryMap providerSettings, ContentResolver contentResolver) {
String userName = Imps.Account.getUserName(contentResolver, mAccountId);
String domain = providerSettings.getDomain();
String xmppName = userName + '@' + domain + '/' + providerSettings.getXmppResource();
return new Contact(new XmppAddress(xmppName), userName);
}
private void createExecutor() {
mExecutor = new ThreadPoolExecutor(1, 1, 1L, TimeUnit.SECONDS,
new LinkedBlockingQueue<Runnable>());
}
private boolean execute(Runnable runnable) {
if (mExecutor == null)
createExecutor (); //if we disconnected, will need to recreate executor here, because join() made it null
try {
mExecutor.execute(runnable);
} catch (RejectedExecutionException ex) {
return false;
}
return true;
}
// Execute a runnable only if we are idle
private boolean executeIfIdle(Runnable runnable) {
if (mExecutor.getActiveCount() + mExecutor.getQueue().size() == 0) {
return execute(runnable);
}
return false;
}
// This runs in executor thread, and since there is only one such thread, we will definitely
// succeed in shutting down the executor if we get here.
public void join() {
final ExecutorService executor = mExecutor;
mExecutor = null;
// This will send us an interrupt, which we will ignore. We will terminate
// anyway after the caller is done. This also drains the executor queue.
if (executor != null)
executor.shutdownNow();
}
// For testing
boolean joinGracefully() throws InterruptedException {
final ExecutorService executor = mExecutor;
mExecutor = null;
// This will send us an interrupt, which we will ignore. We will terminate
// anyway after the caller is done. This also drains the executor queue.
if (executor != null) {
executor.shutdown();
return executor.awaitTermination(1, TimeUnit.SECONDS);
}
return false;
}
public void sendPacket(final org.jivesoftware.smack.packet.Packet packet) {
qPacket.add(packet);
}
void postpone(final org.jivesoftware.smack.packet.Packet packet) {
if (packet instanceof org.jivesoftware.smack.packet.Message) {
boolean groupChat = ((org.jivesoftware.smack.packet.Message) packet).getType().equals( org.jivesoftware.smack.packet.Message.Type.groupchat);
ChatSession session = findOrCreateSession(packet.getTo(), groupChat);
session.onMessagePostponed(packet.getPacketID());
}
}
private boolean mLoadingAvatars = false;
private void loadVCardsAsync ()
{
if (!mLoadingAvatars)
{
execute(new AvatarLoader());
}
}
private class AvatarLoader implements Runnable
{
@Override
public void run () {
mLoadingAvatars = true;
ContentResolver resolver = mContext.getContentResolver();
try
{
while (qAvatar.size()>0)
{
loadVCard (resolver, qAvatar.pop(), null);
}
}
catch (Exception e) {}
mLoadingAvatars = false;
}
}
private boolean loadVCard (ContentResolver resolver, String jid, String hash)
{
try {
boolean loadAvatar = false;
if (hash != null)
loadAvatar = (!DatabaseUtils.doesAvatarHashExist(resolver, Imps.Avatars.CONTENT_URI, jid, hash));
else
{
loadAvatar = DatabaseUtils.hasAvatarContact(resolver, Imps.Avatars.CONTENT_URI, jid);
}
if (!loadAvatar)
{
debug(ImApp.LOG_TAG, "loading vcard for: " + jid);
VCard vCard = new VCard();
// FIXME synchronize this to executor thread
vCard.load(mConnection, jid);
// If VCard is loaded, then save the avatar to the personal folder.
String avatarHash = vCard.getAvatarHash();
if (avatarHash != null)
{
byte[] avatarBytes = vCard.getAvatar();
if (avatarBytes != null)
{
debug(ImApp.LOG_TAG, "found avatar image in vcard for: " + jid);
debug(ImApp.LOG_TAG, "start avatar length: " + avatarBytes.length);
int width = ImApp.DEFAULT_AVATAR_WIDTH;
int height = ImApp.DEFAULT_AVATAR_HEIGHT;
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeByteArray(avatarBytes, 0, avatarBytes.length,options);
options.inSampleSize = DatabaseUtils.calculateInSampleSize(options, width, height);
options.inJustDecodeBounds = false;
Bitmap b = BitmapFactory.decodeByteArray(avatarBytes, 0, avatarBytes.length,options);
b = Bitmap.createScaledBitmap(b, ImApp.DEFAULT_AVATAR_WIDTH, ImApp.DEFAULT_AVATAR_HEIGHT, false);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
b.compress(Bitmap.CompressFormat.JPEG, 80, stream);
byte[] avatarBytesCompressed = stream.toByteArray();
debug(ImApp.LOG_TAG, "compressed avatar length: " + avatarBytesCompressed.length);
DatabaseUtils.insertAvatarBlob(resolver, Imps.Avatars.CONTENT_URI, mProviderId, mAccountId, avatarBytesCompressed, hash, jid);
// int providerId, int accountId, byte[] data, String hash,String contact
return true;
}
}
}
} catch (XMPPException e) {
// Log.d(ImApp.LOG_TAG,"err loading vcard");
if (e.getStreamError() != null)
{
String streamErr = e.getStreamError().getCode();
if (streamErr != null && (streamErr.contains("404") || streamErr.contains("503")))
{
return false;
}
}
}
return false;
}
@Override
protected void doUpdateUserPresenceAsync(Presence presence) {
org.jivesoftware.smack.packet.Presence packet = makePresencePacket(presence);
sendPacket(packet);
mUserPresence = presence;
notifyUserPresenceUpdated();
}
private org.jivesoftware.smack.packet.Presence makePresencePacket(Presence presence) {
String statusText = presence.getStatusText();
Type type = Type.available;
Mode mode = Mode.available;
int priority = mPriority;
final int status = presence.getStatus();
if (status == Presence.AWAY) {
priority = 10;
mode = Mode.away;
} else if (status == Presence.IDLE) {
priority = 15;
mode = Mode.away;
} else if (status == Presence.DO_NOT_DISTURB) {
priority = 5;
mode = Mode.dnd;
} else if (status == Presence.OFFLINE) {
priority = 0;
type = Type.unavailable;
statusText = "Offline";
}
// The user set priority is the maximum allowed
if (priority > mPriority)
priority = mPriority;
org.jivesoftware.smack.packet.Presence packet = new org.jivesoftware.smack.packet.Presence(
type, statusText, priority, mode);
return packet;
}
@Override
public int getCapability() {
return ImConnection.CAPABILITY_SESSION_REESTABLISHMENT | ImConnection.CAPABILITY_GROUP_CHAT;
}
private XmppChatGroupManager mChatGroupManager = null;
@Override
public synchronized ChatGroupManager getChatGroupManager() {
if (mChatGroupManager == null)
mChatGroupManager = new XmppChatGroupManager();
return mChatGroupManager;
}
public class XmppChatGroupManager extends ChatGroupManager
{
private Hashtable<String,MultiUserChat> mMUCs = new Hashtable<String,MultiUserChat>();
public MultiUserChat getMultiUserChat (String chatRoomJid)
{
return mMUCs.get(chatRoomJid);
}
@Override
public boolean createChatGroupAsync(String chatRoomJid, String nickname) throws Exception {
RoomInfo roomInfo = null;
Address address = new XmppAddress (chatRoomJid);
try
{
//first check if the room already exists
roomInfo = MultiUserChat.getRoomInfo(mConnection, chatRoomJid);
}
catch (Exception e)
{
//who knows?
}
if (roomInfo == null)
{
//if the room does not exist, then create one
//should be room@server
String[] parts = chatRoomJid.split("@");
String room = parts[0];
String server = parts[1];
try {
// Create a MultiUserChat using a Connection for a room
MultiUserChat muc = new MultiUserChat(mConnection, chatRoomJid);
try
{
// Create the room
muc.create(nickname);
}
catch (XMPPException iae)
{
if (iae.getMessage().contains("Creation failed"))
{
//some server's don't return the proper 201 create code, so we can just assume the room was created!
}
else
{
throw iae;
}
}
try
{
Form form = muc.getConfigurationForm();
Form submitForm = form.createAnswerForm();
for (Iterator fields = form.getFields();fields.hasNext();){
FormField field = (FormField) fields.next();
if(!FormField.TYPE_HIDDEN.equals(field.getType()) && field.getVariable()!= null){
submitForm.setDefaultAnswer(field.getVariable());
}
}
submitForm.setAnswer("muc#roomconfig_publicroom", true);
muc.sendConfigurationForm(submitForm);
}
catch (XMPPException xe)
{
if (Debug.DEBUG_ENABLED)
Log.w(ImApp.LOG_TAG,"(ignoring) got an error configuring MUC room: " + xe.getLocalizedMessage());
}
muc.join(nickname);
ChatGroup chatGroup = new ChatGroup(address,room,this);
mGroups.put(address.getAddress(), chatGroup);
mMUCs.put(chatRoomJid, muc);
return true;
} catch (XMPPException e) {
Log.e(ImApp.LOG_TAG,"error creating MUC",e);
return false;
}
}
else
{
//otherwise, join the room!
joinChatGroupAsync(address);
return true;
}
}
@Override
public void deleteChatGroupAsync(ChatGroup group) {
String chatRoomJid = group.getAddress().getAddress();
if (mMUCs.containsKey(chatRoomJid))
{
MultiUserChat muc = mMUCs.get(chatRoomJid);
try {
muc.destroy("", null);
mMUCs.remove(chatRoomJid);
} catch (XMPPException e) {
Log.e(ImApp.LOG_TAG,"error destroying MUC",e);
}
}
}
@Override
protected void addGroupMemberAsync(ChatGroup group, Contact contact) {
String chatRoomJid = group.getAddress().getAddress();
if (mMUCs.containsKey(chatRoomJid))
{
MultiUserChat muc = mMUCs.get(chatRoomJid);
}
}
@Override
protected void removeGroupMemberAsync(ChatGroup group, Contact contact) {
// TODO Auto-generated method stub
}
@Override
public void joinChatGroupAsync(Address address) {
String chatRoomJid = address.getAddress();
String[] parts = chatRoomJid.split("@");
String room = parts[0];
String server = parts[1];
String nickname = mUser.getName().split("@")[0];
try {
// Create a MultiUserChat using a Connection for a room
MultiUserChat muc = new MultiUserChat(mConnection, chatRoomJid);
// Create the room
muc.join(nickname);
ChatGroup chatGroup = new ChatGroup(address,room,this);
mGroups.put(address.getAddress(), chatGroup);
mMUCs.put(chatRoomJid, muc);
} catch (XMPPException e) {
Log.e(ImApp.LOG_TAG,"error joining MUC",e);
}
}
@Override
public void leaveChatGroupAsync(ChatGroup group) {
String chatRoomJid = group.getAddress().getAddress();
if (mMUCs.containsKey(chatRoomJid))
{
MultiUserChat muc = mMUCs.get(chatRoomJid);
muc.leave();
mMUCs.remove(chatRoomJid);
}
}
@Override
public void inviteUserAsync(ChatGroup group, Contact invitee) {
String chatRoomJid = group.getAddress().getAddress();
if (mMUCs.containsKey(chatRoomJid))
{
MultiUserChat muc = mMUCs.get(chatRoomJid);
String reason = ""; //no reason for now
muc.invite(invitee.getAddress().getAddress(),reason);
}
}
@Override
public void acceptInvitationAsync(Invitation invitation) {
Address addressGroup = invitation.getGroupAddress();
joinChatGroupAsync (addressGroup);
}
@Override
public void rejectInvitationAsync(Invitation invitation) {
Address addressGroup = invitation.getGroupAddress();
String reason = ""; // no reason for now
MultiUserChat.decline(mConnection, addressGroup.getAddress(),invitation.getSender().getAddress(),reason);
}
};
@Override
public synchronized ChatSessionManager getChatSessionManager() {
if (mSessionManager == null)
mSessionManager = new XmppChatSessionManager();
return mSessionManager;
}
@Override
public synchronized XmppContactListManager getContactListManager() {
if (mContactListManager == null)
mContactListManager = new XmppContactListManager();
return mContactListManager;
}
@Override
public Contact getLoginUser() {
return mUser;
}
@Override
public Map<String, String> getSessionContext() {
// Empty state for now (but must have at least one key)
return Collections.singletonMap("state", "empty");
}
@Override
public int[] getSupportedPresenceStatus() {
return new int[] { Presence.AVAILABLE, Presence.AWAY, Presence.IDLE, Presence.OFFLINE,
Presence.DO_NOT_DISTURB, };
}
@Override
public boolean isUsingTor() {
return mUseTor;
}
@Override
public void loginAsync(long accountId, String passwordTemp, long providerId, boolean retry) {
mAccountId = accountId;
mPassword = passwordTemp;
mProviderId = providerId;
mRetryLogin = retry;
ContentResolver contentResolver = mContext.getContentResolver();
if (mPassword == null)
mPassword = Imps.Account.getPassword(contentResolver, mAccountId);
mIsGoogleAuth = mPassword.startsWith(GTalkOAuth2.NAME);
if (mIsGoogleAuth)
{
mPassword = mPassword.split(":")[1];
}
Cursor cursor = contentResolver.query(Imps.ProviderSettings.CONTENT_URI,new String[] {Imps.ProviderSettings.NAME, Imps.ProviderSettings.VALUE},Imps.ProviderSettings.PROVIDER + "=?",new String[] { Long.toString(mProviderId)},null);
if (cursor == null)
return;
Imps.ProviderSettings.QueryMap providerSettings = new Imps.ProviderSettings.QueryMap(
cursor, contentResolver, mProviderId, false, null);
mUser = makeUser(providerSettings, contentResolver);
providerSettings.close();
execute(new Runnable() {
@Override
public void run() {
do_login();
}
});
}
// Runs in executor thread
private void do_login() {
/*
if (mConnection != null) {
setState(getState(), new ImErrorInfo(ImErrorInfo.CANT_CONNECT_TO_SERVER,
"still trying..."));
return;
}*/
ContentResolver contentResolver = mContext.getContentResolver();
Cursor cursor = contentResolver.query(Imps.ProviderSettings.CONTENT_URI,new String[] {Imps.ProviderSettings.NAME, Imps.ProviderSettings.VALUE},Imps.ProviderSettings.PROVIDER + "=?",new String[] { Long.toString(mProviderId)},null);
if (cursor == null)
return; //not going to work
Imps.ProviderSettings.QueryMap providerSettings = new Imps.ProviderSettings.QueryMap(
cursor, contentResolver, mProviderId, false, null);
// providerSettings is closed in initConnection();
String userName = Imps.Account.getUserName(contentResolver, mAccountId);
String defaultStatus = null;
mNeedReconnect = true;
setState(LOGGING_IN, null);
mUserPresence = new Presence(Presence.AVAILABLE, defaultStatus, Presence.CLIENT_TYPE_MOBILE);
try {
if (userName == null || userName.length() == 0)
throw new XMPPException("empty username not allowed");
initConnectionAndLogin(providerSettings, userName);
setState(LOGGED_IN, null);
debug(TAG, "logged in");
mNeedReconnect = false;
} catch (XMPPException e) {
debug(TAG, "exception thrown on connection",e);
ImErrorInfo info = new ImErrorInfo(ImErrorInfo.CANT_CONNECT_TO_SERVER, e.getMessage());
mRetryLogin = true; // our default behavior is to retry
if (mConnection != null && mConnection.isConnected() && (!mConnection.isAuthenticated())) {
if (mIsGoogleAuth)
{
debug (TAG, "google failed; may need to refresh");
String newPassword = refreshGoogleToken (userName, mPassword,providerSettings.getDomain());
if (newPassword != null)
mPassword = newPassword;
mRetryLogin = true;
}
else
{
debug(TAG, "not authorized - will not retry");
info = new ImErrorInfo(ImErrorInfo.INVALID_USERNAME, "invalid user/password");
mRetryLogin = false;
mNeedReconnect = false;
}
}
if (mRetryLogin && getState() != SUSPENDED) {
debug(TAG, "will retry");
setState(LOGGING_IN, info);
maybe_reconnect();
} else {
debug(TAG, "will not retry");
disconnect();
disconnected(info);
}
} catch (Exception e) {
debug(TAG, "login failed",e);
mRetryLogin = true;
mNeedReconnect = true;
debug(TAG, "will retry");
ImErrorInfo info = new ImErrorInfo(ImErrorInfo.UNKNOWN_ERROR, "keymanagement exception");
setState(LOGGING_IN, info);
}
finally {
providerSettings.close();
if (!cursor.isClosed())
cursor.close();
}
}
private String refreshGoogleToken (String userName, String oldPassword, String domain)
{
String expiredToken = oldPassword;
if (expiredToken.startsWith(IS_GOOGLE))
{
expiredToken = expiredToken.split(":")[1];
}
//invalidate our old one, that is locally cached
AccountManager.get(mContext.getApplicationContext()).invalidateAuthToken("com.google", expiredToken);
//request a new one
String password = GTalkOAuth2.getGoogleAuthToken(userName + '@' + domain, mContext.getApplicationContext());
if (password != null)
{
//now store the new one, for future use until it expires
ImApp.insertOrUpdateAccount(mContext.getContentResolver(), mProviderId, userName,
GTalkOAuth2.NAME + ':' + password );
}
return password;
}
// TODO shouldn't setProxy be handled in Imps/settings?
public void setProxy(String type, String host, int port) {
if (type == null) {
mProxyInfo = ProxyInfo.forNoProxy();
} else {
ProxyInfo.ProxyType pType = ProxyType.valueOf(type);
String username = null;
String password = null;
if (type.equals(TorProxyInfo.PROXY_TYPE) //socks5
&& host.equals(TorProxyInfo.PROXY_HOST) //127.0.0.1
&& port == TorProxyInfo.PROXY_PORT) //9050
{
//if the proxy is for Orbot/Tor then generate random usr/pwd to isolate Tor streams
username = rndForTorCircuits.nextInt(100000)+"";
password = rndForTorCircuits.nextInt(100000)+"";
}
mProxyInfo = new ProxyInfo(pType, host, port, username, password);
}
}
public void initConnection(MyXMPPConnection connection, Contact user, int state) {
mConnection = connection;
mRoster = mConnection.getRoster();
mUser = user;
setState(state, null);
}
private void initConnectionAndLogin (Imps.ProviderSettings.QueryMap providerSettings,String userName) throws XMPPException, KeyManagementException, NoSuchAlgorithmException, IllegalStateException, RuntimeException
{
Debug.onConnectionStart(); //only activates if Debug TRUE is set, so you can leave this in!
initConnection(providerSettings, userName);
mResource = providerSettings.getXmppResource();
//disable compression based on statement by Ge0rg
mConfig.setCompressionEnabled(false);
if (mConnection.isConnected())
{
mConnection.login(mUsername, mPassword, mResource);
String fullJid = mConnection.getUser();
XmppAddress xa = new XmppAddress(fullJid);
mUser = new Contact(xa, xa.getUser());
mStreamHandler.notifyInitialLogin();
initServiceDiscovery();
sendPresencePacket();
mRoster = mConnection.getRoster();
mRoster.setSubscriptionMode(Roster.SubscriptionMode.manual);
getContactListManager().listenToRoster(mRoster);
}
else
{
disconnect();
disconnected(new ImErrorInfo(ImpsErrorInfo.SERVER_UNAVAILABLE,
"not connected on login"));
}
}
// Runs in executor thread
private void initConnection(Imps.ProviderSettings.QueryMap providerSettings, String userName) throws NoSuchAlgorithmException, KeyManagementException, XMPPException {
boolean allowPlainAuth = providerSettings.getAllowPlainAuth();
boolean requireTls = providerSettings.getRequireTls();
boolean doDnsSrv = providerSettings.getDoDnsSrv();
boolean tlsCertVerify = providerSettings.getTlsCertVerify();
boolean useSASL = true;//!allowPlainAuth;
String domain = providerSettings.getDomain();
mPriority = providerSettings.getXmppResourcePrio();
int serverPort = providerSettings.getPort();
String server = providerSettings.getServer();
if ("".equals(server))
server = null;
debug(TAG, "TLS required? " + requireTls);
debug(TAG, "cert verification? " + tlsCertVerify);
if (providerSettings.getUseTor()) {
setProxy(TorProxyInfo.PROXY_TYPE, TorProxyInfo.PROXY_HOST,
TorProxyInfo.PROXY_PORT);
}
else
{
setProxy(null, null, -1);
}
if (mProxyInfo == null)
mProxyInfo = ProxyInfo.forNoProxy();
// If user did not specify a server, and SRV requested then lookup SRV
if (doDnsSrv) {
//java.lang.System.setProperty("java.net.preferIPv4Stack", "true");
//java.lang.System.setProperty("java.net.preferIPv6Addresses", "false");
debug(TAG, "(DNS SRV) resolving: " + domain);
DNSUtil.HostAddress srvHost = DNSUtil.resolveXMPPDomain(domain);
server = srvHost.getHost();
if (serverPort <= 0) {
// If user did not override port, use port from SRV record
serverPort = srvHost.getPort();
}
debug(TAG, "(DNS SRV) resolved: " + domain + "=" + server + ":" + serverPort);
}
if (server != null && server.contains("google.com"))
{
mUsername = userName + '@' + domain;
}
else if (domain.contains("gmail.com"))
{
mUsername = userName + '@' + domain;
}
else if (mIsGoogleAuth)
{
mUsername = userName + '@' + domain;
}
else
{
mUsername = userName;
}
if (serverPort == 0) //if serverPort is set to 0 then use 5222 as default
serverPort = 5222;
// No server requested and SRV lookup wasn't requested or returned nothing - use domain
if (server == null) {
debug(TAG, "(use domain) ConnectionConfiguration(" + domain + ", " + serverPort + ", "
+ domain + ", mProxyInfo);");
if (mProxyInfo == null)
mConfig = new ConnectionConfiguration(domain, serverPort);
else
mConfig = new ConnectionConfiguration(domain, serverPort, mProxyInfo);
//server = domain;
} else {
debug(TAG, "(use server) ConnectionConfiguration(" + server + ", " + serverPort + ", "
+ domain + ", mProxyInfo);");
//String serviceName = domain;
//if (server != null && (!server.endsWith(".onion"))) //if a connect server was manually entered, and is not an .onion address
// serviceName = server;
if (mProxyInfo == null)
mConfig = new ConnectionConfiguration(server, serverPort, domain);
else
mConfig = new ConnectionConfiguration(server, serverPort, domain, mProxyInfo);
}
mConfig.setDebuggerEnabled(Debug.DEBUG_ENABLED);
mConfig.setSASLAuthenticationEnabled(useSASL);
// Android has no support for Kerberos or GSSAPI, so disable completely
SASLAuthentication.unregisterSASLMechanism("KERBEROS_V4");
SASLAuthentication.unregisterSASLMechanism("GSSAPI");
SASLAuthentication.registerSASLMechanism( GTalkOAuth2.NAME, GTalkOAuth2.class );
if (mIsGoogleAuth) //if using google auth enable sasl
SASLAuthentication.supportSASLMechanism( GTalkOAuth2.NAME, 0);
else if (domain.contains("google.com")||domain.contains("gmail.com")) //if not google auth, disable if doing direct google auth
SASLAuthentication.unsupportSASLMechanism( GTalkOAuth2.NAME);
SASLAuthentication.supportSASLMechanism("PLAIN", 1);
SASLAuthentication.supportSASLMechanism("DIGEST-MD5", 2);
if (requireTls) {
MemorizingTrustManager trustManager = ImApp.sImApp.getTrustManager();
if (sslContext == null)
{
sslContext = SSLContext.getInstance(SSLCONTEXT_TYPE);
SecureRandom secureRandom = new java.security.SecureRandom();
sslContext.init(null, new javax.net.ssl.TrustManager[] { trustManager },
secureRandom);
sslContext.getDefaultSSLParameters().getCipherSuites();
if (Build.VERSION.SDK_INT >= 20) {
sslContext.getDefaultSSLParameters().setCipherSuites(XMPPCertPins.SSL_IDEAL_CIPHER_SUITES_API_20);
}
else
{
sslContext.getDefaultSSLParameters().setCipherSuites(XMPPCertPins.SSL_IDEAL_CIPHER_SUITES);
}
}
int currentapiVersion = android.os.Build.VERSION.SDK_INT;
if (currentapiVersion >= 16){
// Enable TLS1.2 and TLS1.1 on supported versions of android
//mConfig.setEnabledProtocols(new String[] { "TLSv1.2", "TLSv1.1", "TLSv1" });
sslContext.getDefaultSSLParameters().setProtocols(new String[] { "TLSv1.2", "TLSv1.1", "TLSv1" });
}
if (currentapiVersion >= android.os.Build.VERSION_CODES.ICE_CREAM_SANDWICH){
mConfig.setEnabledCipherSuites(XMPPCertPins.SSL_IDEAL_CIPHER_SUITES);
}
HostnameVerifier hv = trustManager.wrapHostnameVerifier(HttpsURLConnection.getDefaultHostnameVerifier());
mConfig.setHostnameVerifier(hv);
mConfig.setCustomSSLContext(sslContext);
mConfig.setSecurityMode(SecurityMode.required);
mConfig.setVerifyChainEnabled(true);
mConfig.setVerifyRootCAEnabled(true);
mConfig.setExpiredCertificatesCheckEnabled(true);
mConfig.setNotMatchingDomainCheckEnabled(true);
mConfig.setSelfSignedCertificateEnabled(false);
mConfig.setCallbackHandler(this);
} else {
// if it finds a cert, still use it, but don't check anything since
// TLS errors are not expected by the user
mConfig.setSecurityMode(SecurityMode.enabled);
if (sslContext == null)
{
sslContext = SSLContext.getInstance(SSLCONTEXT_TYPE);
SecureRandom mSecureRandom = new java.security.SecureRandom();
sslContext.init(null, new javax.net.ssl.TrustManager[] { getDummyTrustManager () },
mSecureRandom);
sslContext.getDefaultSSLParameters().setCipherSuites(XMPPCertPins.SSL_IDEAL_CIPHER_SUITES);
}
mConfig.setCustomSSLContext(sslContext);
if (!allowPlainAuth)
SASLAuthentication.unsupportSASLMechanism("PLAIN");
mConfig.setVerifyChainEnabled(false);
mConfig.setVerifyRootCAEnabled(false);
mConfig.setExpiredCertificatesCheckEnabled(false);
mConfig.setNotMatchingDomainCheckEnabled(false);
mConfig.setSelfSignedCertificateEnabled(true);
}
// Don't use smack reconnection - not reliable
mConfig.setReconnectionAllowed(false);
mConfig.setSendPresence(true);
mConfig.setRosterLoadedAtLogin(true);
mConnection = new MyXMPPConnection(mConfig);
//debug(TAG,"is secure connection? " + mConnection.isSecureConnection());
//debug(TAG,"is using TLS? " + mConnection.isUsingTLS());
mConnection.addPacketListener(new PacketListener() {
@Override
public void processPacket(Packet packet) {
debug(TAG, "receive message: " + packet.getFrom() + " to " + packet.getTo());
org.jivesoftware.smack.packet.Message smackMessage = (org.jivesoftware.smack.packet.Message) packet;
String address = smackMessage.getFrom();
String body = smackMessage.getBody();
if (body == null)
{
Collection<Body> mColl = smackMessage.getBodies();
for (Body bodyPart : mColl)
{
String msg = bodyPart.getMessage();
if (msg != null)
{
body = msg;
break;
}
}
}
DeliveryReceipts.DeliveryReceipt dr = (DeliveryReceipts.DeliveryReceipt) smackMessage
.getExtension("received", DeliveryReceipts.NAMESPACE);
if (dr != null) {
debug(TAG, "got delivery receipt for " + dr.getId());
boolean groupMessage = smackMessage.getType() == org.jivesoftware.smack.packet.Message.Type.groupchat;
ChatSession session = findOrCreateSession(address, groupMessage);
session.onMessageReceipt(dr.getId());
}
if (body != null)
{
XmppAddress aFrom = new XmppAddress(smackMessage.getFrom());
boolean isGroupMessage = smackMessage.getType() == org.jivesoftware.smack.packet.Message.Type.groupchat;
ChatSession session = findOrCreateSession(address, isGroupMessage);
Message rec = new Message(body);
rec.setTo(mUser.getAddress());
rec.setFrom(aFrom);
rec.setDateTime(new Date());
rec.setType(Imps.MessageType.INCOMING);
// Detect if this was said by us, and mark message as outgoing
if (isGroupMessage && rec.getFrom().getResource().equals(rec.getTo().getUser())) {
rec.setType(Imps.MessageType.OUTGOING);
}
boolean good = session.onReceiveMessage(rec);
if (smackMessage.getExtension("request", DeliveryReceipts.NAMESPACE) != null) {
if (good) {
debug(TAG, "sending delivery receipt");
// got XEP-0184 request, send receipt
sendReceipt(smackMessage);
session.onReceiptsExpected();
} else {
debug(TAG, "not sending delivery receipt due to processing error");
}
} else if (!good) {
debug(TAG, "packet processing error");
}
}
}
}, new PacketTypeFilter(org.jivesoftware.smack.packet.Message.class));
mConnection.addPacketListener(new PacketListener() {
@Override
public void processPacket(Packet packet) {
org.jivesoftware.smack.packet.Presence presence = (org.jivesoftware.smack.packet.Presence) packet;
qPresence.push(presence);
}
}, new PacketTypeFilter(org.jivesoftware.smack.packet.Presence.class));
if (mTimerPackets == null)
initPacketProcessor();
if (mTimerPresence == null)
initPresenceProcessor ();
ConnectionListener connectionListener = new ConnectionListener() {
/**
* Called from smack when connect() is fully successful
*
* This is called on the executor thread while we are in reconnect()
*/
@Override
public void reconnectionSuccessful() {
if (mStreamHandler == null || !mStreamHandler.isResumePending()) {
debug(TAG, "Reconnection success");
onReconnectionSuccessful();
mRoster = mConnection.getRoster();
} else {
debug(TAG, "Ignoring reconnection callback due to pending resume");
}
}
@Override
public void reconnectionFailed(Exception e) {
// We are not using the reconnection manager
throw new UnsupportedOperationException();
}
@Override
public void reconnectingIn(int seconds) {
// // We are not using the reconnection manager
// throw new UnsupportedOperationException();
}
@Override
public void connectionClosedOnError(final Exception e) {
/*
* This fires when:
* - Packet reader or writer detect an error
* - Stream compression failed
* - TLS fails but is required
* - Network error
* - We forced a socket shutdown
*/
debug(TAG, "reconnect on error: " + e.getMessage());
if (e.getMessage().contains("conflict")) {
execute(new Runnable() {
@Override
public void run() {
disconnect();
disconnected(new ImErrorInfo(ImpsErrorInfo.ALREADY_LOGGED,
"logged in from another location"));
}
});
} else if (!mNeedReconnect) {
execute(new Runnable() {
public void run() {
if (getState() == LOGGED_IN)
{
//Thread.sleep(1000);
mNeedReconnect = true;
setState(LOGGING_IN,
new ImErrorInfo(ImErrorInfo.NETWORK_ERROR, "network error"));
reconnect();
}
}
});
}
}
@Override
public void connectionClosed() {
debug(TAG, "connection closed");
/*
* This can be called in these cases:
* - Connection is shutting down
* - because we are calling disconnect
* - in do_logout
*
* - NOT
* - because server disconnected "normally"
* - we were trying to log in (initConnection), but are failing
* - due to network error
* - due to login failing
*/
}
};
mConnection.addConnectionListener(connectionListener);
mStreamHandler = new XmppStreamHandler(mConnection, connectionListener);
for (int i = 0; i < 3; i++)
{
try
{
mConnection.connect();
break;
}
catch (Exception uhe)
{
//sometimes DNS fails.. let's wait and try again a few times
try { Thread.sleep(500);} catch (Exception e){}
}
}
if (!mConnection.isConnected())
throw new XMPPException("Unable to connect to host");
}
private void sendPresencePacket() {
qPacket.add(makePresencePacket(mUserPresence));
}
public void sendReceipt(org.jivesoftware.smack.packet.Message msg) {
debug(TAG, "sending XEP-0184 ack to " + msg.getFrom() + " id=" + msg.getPacketID());
org.jivesoftware.smack.packet.Message ack = new org.jivesoftware.smack.packet.Message(
msg.getFrom(), msg.getType());
ack.addExtension(new DeliveryReceipts.DeliveryReceipt(msg.getPacketID()));
sendPacket(ack);
}
public X509TrustManager getDummyTrustManager ()
{
return new X509TrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] arg0, String arg1)
throws CertificateException {
}
@Override
public void checkServerTrusted(X509Certificate[] arg0, String arg1)
throws CertificateException {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
};
}
protected int parsePresence(org.jivesoftware.smack.packet.Presence presence) {
int type = Imps.Presence.AVAILABLE;
Mode rmode = presence.getMode();
Type rtype = presence.getType();
//if a device sends something other than available, check if there is a higher priority one available on the server
/*
if (rmode != Mode.available)
{
if (mRoster != null)
{
org.jivesoftware.smack.packet.Presence npresence = mRoster.getPresence(XmppAddress.stripResource(presence.getFrom()));
rmode = npresence.getMode();
rtype = npresence.getType();
if (rmode == Mode.away || rmode == Mode.xa)
type = Presence.AWAY;
else if (rmode == Mode.dnd)
type = Presence.DO_NOT_DISTURB;
else if (rtype == Type.unavailable || rtype == Type.error)
type = Presence.OFFLINE;
}
}*/
if (rmode == Mode.chat)
type = Imps.Presence.AVAILABLE;
else if (rmode == Mode.away || rmode == Mode.xa)
type = Imps.Presence.AWAY;
else if (rmode == Mode.dnd)
type = Imps.Presence.DO_NOT_DISTURB;
else if (rtype == Type.unavailable || rtype == Type.error)
type = Imps.Presence.OFFLINE;
else if (rtype == Type.unsubscribed)
type = Imps.Presence.OFFLINE;
return type;
}
// We must release resources here, because we will not be reused
void disconnected(ImErrorInfo info) {
debug(TAG, "disconnected");
join();
setState(DISCONNECTED, info);
}
@Override
public void logoutAsync() {
new Thread(new Runnable() {
@Override
public void run() {
do_logout();
}
}).start();
}
// Force immediate logout
public void logout() {
logoutAsync();
}
// Usually runs in executor thread, unless called from logout()
private void do_logout() {
setState(LOGGING_OUT, null);
disconnect();
disconnected(null);
}
// Runs in executor thread
private void disconnect() {
clearPing();
XMPPConnection conn = mConnection;
mConnection = null;
try {
conn.disconnect();
} catch (Throwable th) {
// ignore
}
mNeedReconnect = false;
mRetryLogin = false;
}
@Override
public void reestablishSessionAsync(Map<String, String> sessionContext) {
execute(new Runnable() {
@Override
public void run() {
if (getState() == SUSPENDED) {
debug(TAG, "reestablish");
mNeedReconnect = false;
setState(LOGGING_IN, null);
maybe_reconnect();
}
}
});
}
@Override
public void suspend() {
execute(new Runnable() {
@Override
public void run() {
debug(TAG, "suspend");
setState(SUSPENDED, null);
mNeedReconnect = false;
clearPing();
// Do not try to reconnect anymore if we were asked to suspend
if (mStreamHandler != null)
mStreamHandler.quickShutdown();
}
});
}
private ChatSession findOrCreateSession(String address, boolean groupChat) {
ChatSession session = mSessionManager.findSession(address);
if (session == null) {
ImEntity participant = findOrCreateParticipant(address, groupChat);
session = mSessionManager.createChatSession(participant,false);
}
return session;
}
ImEntity findOrCreateParticipant(String address, boolean groupChat) {
ImEntity participant = mContactListManager.getContact(address);
if (participant == null) {
if (!groupChat) {
participant = makeContact(address);
}
else {
try {
mChatGroupManager.createChatGroupAsync(address, mUser.getName());
Address xmppAddress = new XmppAddress(address);
participant = mChatGroupManager.getChatGroup(xmppAddress);
}
catch (Exception e) {
Log.e(ImApp.LOG_TAG,"unable to join group chat",e);
}
}
}
return participant;
}
Contact findOrCreateContact(String address) {
return (Contact) findOrCreateParticipant(address, false);
}
private Contact makeContact(String address) {
Contact contact = null;
//load from roster if we don't have the contact
RosterEntry rEntry = null;
if (mConnection != null)
rEntry = mConnection.getRoster().getEntry(address);
if (rEntry != null)
{
XmppAddress xAddress = new XmppAddress(address);
String name = rEntry.getName();
if (name == null)
name = xAddress.getUser();
contact = new Contact(xAddress, name);
}
else
{
XmppAddress xAddress = new XmppAddress(address);
contact = new Contact(xAddress, xAddress.getUser());
}
return contact;
}
private final class XmppChatSessionManager extends ChatSessionManager {
@Override
public void sendMessageAsync(ChatSession session, Message message) {
String chatRoomJid = message.getTo().getAddress();
MultiUserChat muc = ((XmppChatGroupManager)getChatGroupManager()).getMultiUserChat(chatRoomJid);
org.jivesoftware.smack.packet.Message msgXmpp = null;
if (muc != null)
{
msgXmpp = muc.createMessage();
}
else
{
msgXmpp = new org.jivesoftware.smack.packet.Message(
message.getTo().getAddress(), org.jivesoftware.smack.packet.Message.Type.chat);
msgXmpp.addExtension(new DeliveryReceipts.DeliveryReceiptRequest());
Contact contact = mContactListManager.getContact(message.getTo().getBareAddress());
if (contact != null && contact.getPresence() !=null && (!contact.getPresence().isOnline()))
requestPresenceRefresh(message.getTo().getBareAddress());
}
msgXmpp.setFrom(message.getFrom().getAddress());
msgXmpp.setBody(message.getBody());
sendPacket(msgXmpp);
//set message ID value on internal message
message.setID(msgXmpp.getPacketID());
}
ChatSession findSession(String address) {
return mSessions.get(Address.stripResource(address));
}
@Override
public ChatSession createChatSession(ImEntity participant, boolean isNewSession) {
qAvatar.push(participant.getAddress().getAddress());
requestPresenceRefresh(participant.getAddress().getAddress());
ChatSession session = super.createChatSession(participant,isNewSession);
// mSessions.put(Address.stripResource(participant.getAddress().getAddress()),session);
return session;
}
}
private void requestPresenceRefresh (String address)
{
org.jivesoftware.smack.packet.Presence p = new org.jivesoftware.smack.packet.Presence(Type.error);
p.setFrom(address);
qPresence.push(p);
}
public class XmppContactListManager extends ContactListManager {
@Override
protected void setListNameAsync(final String name, final ContactList list) {
execute(new Runnable() {
@Override
public void run() {
do_setListName(name, list);
}
});
}
// Runs in executor thread
private void do_setListName(String name, ContactList list) {
debug(TAG, "set list name");
mConnection.getRoster().getGroup(list.getName()).setName(name);
notifyContactListNameUpdated(list, name);
}
@Override
public String normalizeAddress(String address) {
return Address.stripResource(address);
}
@Override
public void loadContactListsAsync() {
execute(new Runnable() {
@Override
public void run() {
do_loadContactLists();
}
});
}
// For testing
/*
public void loadContactLists() {
do_loadContactLists();
}*/
/**
* Create new list of contacts from roster entries.
*
* Runs in executor thread
*
* @param entryIter iterator of roster entries to add to contact list
* @param skipList list of contacts which should be omitted; new
* contacts are added to this list automatically
* @return contacts from roster which were not present in skiplist.
*/
/*
private Collection<Contact> fillContacts(Collection<RosterEntry> entryIter,
Set<String> skipList) {
Roster roster = mConnection.getRoster();
Collection<Contact> contacts = new ArrayList<Contact>();
for (RosterEntry entry : entryIter) {
String address = entry.getUser();
if (skipList != null && !skipList.add(address))
continue;
String name = entry.getName();
if (name == null)
name = address;
XmppAddress xaddress = new XmppAddress(address);
org.jivesoftware.smack.packet.Presence presence = roster.getPresence(address);
String status = presence.getStatus();
String resource = null;
Presence p = new Presence(parsePresence(presence), status,
null, null, Presence.CLIENT_TYPE_DEFAULT);
String from = presence.getFrom();
if (from != null && from.lastIndexOf("/") > 0) {
resource = from.substring(from.lastIndexOf("/") + 1);
if (resource.indexOf('.')!=-1)
resource = resource.substring(0,resource.indexOf('.'));
p.setResource(resource);
}
Contact contact = mContactListManager.getContact(xaddress.getBareAddress());
if (contact == null)
contact = new Contact(xaddress, name);
contact.setPresence(p);
contacts.add(contact);
}
return contacts;
}
*/
// Runs in executor thread
private void do_loadContactLists() {
debug(TAG, "load contact lists");
if (mConnection == null)
return;
Roster roster = mConnection.getRoster();
//Set<String> seen = new HashSet<String>();
// This group will also contain all the unfiled contacts. We will create it locally if it
// does not exist.
/*
String generalGroupName = mContext.getString(R.string.buddies);
for (Iterator<RosterGroup> giter = roster.getGroups().iterator(); giter.hasNext();) {
RosterGroup group = giter.next();
debug(TAG, "loading group: " + group.getName() + " size:" + group.getEntryCount());
Collection<Contact> contacts = fillContacts(group.getEntries(), null);
if (group.getName().equals(generalGroupName) && roster.getUnfiledEntryCount() > 0) {
Collection<Contact> unfiled = fillContacts(roster.getUnfiledEntries(), null);
contacts.addAll(unfiled);
}
XmppAddress groupAddress = new XmppAddress(group.getName());
ContactList cl = new ContactList(groupAddress, group.getName(), group
.getName().equals(generalGroupName), contacts, this);
notifyContactListCreated(cl);
notifyContactsPresenceUpdated(contacts.toArray(new Contact[contacts.size()]));
}
Collection<Contact> contacts;
if (roster.getUnfiledEntryCount() > 0) {
contacts = fillContacts(roster.getUnfiledEntries(), null);
} else {
contacts = new ArrayList<Contact>();
}
ContactList cl = getContactList(generalGroupName);
cl = new ContactList(groupAddress, group.getName(), group
.getName().equals(generalGroupName), contacts, this);
// We might have already created the Buddies contact list above
if (cl == null) {
cl = new ContactList(mUser.getAddress(), generalGroupName, true, contacts, this);
notifyContactListCreated(cl);
notifyContactsPresenceUpdated(contacts.toArray(new Contact[contacts.size()]));
}
*/
//since we don't show lists anymore, let's just load all entries together
ContactList cl;
try {
cl = mContactListManager.getDefaultContactList();
} catch (ImException e1) {
debug(TAG,"couldn't read default list");
cl = null;
}
if (cl == null)
{
String generalGroupName = mContext.getString(R.string.buddies);
Collection<Contact> contacts = new ArrayList<Contact>();
XmppAddress groupAddress = new XmppAddress(generalGroupName);
cl = new ContactList(groupAddress,generalGroupName, true, contacts, this);
notifyContactListCreated(cl);
}
for (RosterEntry rEntry : roster.getEntries())
{
String address = rEntry.getUser();
String name = rEntry.getName();
if (mUser.getAddress().getBareAddress().equals(address)) //don't load a roster for yourself
continue;
Contact contact = getContact(address);
if (contact == null)
{
XmppAddress xAddr = new XmppAddress(address);
if (name == null || name.length() == 0)
name = xAddr.getUser();
contact = new Contact(xAddr,name);
}
requestPresenceRefresh(address);
if (!cl.containsContact(contact))
{
try {
cl.addExistingContact(contact);
} catch (ImException e) {
debug(TAG,"could not add contact to list: " + e.getLocalizedMessage());
}
}
}
notifyContactListLoaded(cl);
notifyContactListsLoaded();
}
// Runs in executor thread
public void addContactsToList(Collection<String> addresses) {
debug(TAG, "add contacts to lists");
if (mConnection == null)
return;
ContactList cl;
try {
cl = mContactListManager.getDefaultContactList();
} catch (ImException e1) {
debug(TAG,"couldn't read default list");
cl = null;
}
if (cl == null)
{
String generalGroupName = mContext.getString(R.string.buddies);
Collection<Contact> contacts = new ArrayList<Contact>();
XmppAddress groupAddress = new XmppAddress(generalGroupName);
cl = new ContactList(groupAddress,generalGroupName, true, contacts, this);
notifyContactListCreated(cl);
}
for (String address : addresses)
{
if (mUser.getAddress().getBareAddress().equals(address)) //don't load a roster for yourself
continue;
Contact contact = getContact(address);
if (contact == null)
{
XmppAddress xAddr = new XmppAddress(address);
contact = new Contact(xAddr,xAddr.getUser());
}
//org.jivesoftware.smack.packet.Presence p = roster.getPresence(contact.getAddress().getBareAddress());
//qPresence.push(p);
if (!cl.containsContact(contact))
{
try {
cl.addExistingContact(contact);
} catch (ImException e) {
debug(TAG,"could not add contact to list: " + e.getLocalizedMessage());
}
}
}
notifyContactListLoaded(cl);
notifyContactListsLoaded();
}
/*
* iterators through a list of contacts to see if there were any Presence
* notifications sent before the contact was loaded
*/
/*
private void processQueuedPresenceNotifications (Collection<Contact> contacts)
{
Roster roster = mConnection.getRoster();
//now iterate through the list of queued up unprocessed presence changes
for (Contact contact : contacts)
{
String address = parseAddressBase(contact.getAddress().getFullName());
org.jivesoftware.smack.packet.Presence presence = roster.getPresence(address);
if (presence != null)
{
debug(TAG, "processing queued presence: " + address + " - " + presence.getStatus());
unprocdPresence.remove(address);
contact.setPresence(new Presence(parsePresence(presence), presence.getStatus(), null, null, Presence.CLIENT_TYPE_DEFAULT));
Contact[] updatedContact = {contact};
notifyContactsPresenceUpdated(updatedContact);
}
}
}*/
public void listenToRoster(final Roster roster) {
roster.addRosterListener(rListener);
}
RosterListener rListener = new RosterListener() {
@Override
public void presenceChanged(org.jivesoftware.smack.packet.Presence presence) {
qPresence.push(presence);
}
@Override
public void entriesUpdated(Collection<String> addresses) {
for (String address :addresses)
{
requestPresenceRefresh(address);
}
}
@Override
public void entriesDeleted(Collection<String> addresses) {
ContactList cl;
try {
cl = mContactListManager.getDefaultContactList();
for (String address : addresses)
{
requestPresenceRefresh(address);
Contact contact = mContactListManager.getContact(XmppAddress.stripResource(address));
mContactListManager.notifyContactListUpdated(cl, ContactListListener.LIST_CONTACT_REMOVED, contact);
}
} catch (ImException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public void entriesAdded(Collection<String> addresses) {
try
{
if (mContactListManager.getState() == LISTS_LOADED)
{
for (String address : addresses)
{
Contact contact = getContact(address);
requestPresenceRefresh(address);
if (contact == null)
{
XmppAddress xAddr = new XmppAddress(address);
contact = new Contact(xAddr,xAddr.getUser());
}
try
{
ContactList cl = mContactListManager.getDefaultContactList();
if (!cl.containsContact(contact))
cl.addExistingContact(contact);
}
catch (Exception e)
{
debug(TAG,"could not add contact to list: " + e.getLocalizedMessage());
}
}
}
}
catch (Exception e)
{
Log.d(ImApp.LOG_TAG,"error adding contacts",e);
}
}
};
@Override
protected ImConnection getConnection() {
return XmppConnection.this;
}
@Override
protected void doRemoveContactFromListAsync(Contact contact, ContactList list) {
// FIXME synchronize this to executor thread
if (mConnection == null)
return;
Roster roster = mConnection.getRoster();
String address = contact.getAddress().getAddress();
try {
RosterEntry entry = roster.getEntry(address);
RosterGroup group = roster.getGroup(list.getName());
if (group == null) {
debug(TAG, "could not find group " + list.getName() + " in roster");
roster.removeEntry(entry);
}
else
{
group.removeEntry(entry);
entry = roster.getEntry(address);
// Remove from Roster if this is the last group
if (entry != null && entry.getGroups().size() <= 1)
roster.removeEntry(entry);
}
} catch (XMPPException e) {
debug(TAG, "remove entry failed: " + e.getMessage());
throw new RuntimeException(e);
}
//otherwise, send unsub message and delete from local contact database
org.jivesoftware.smack.packet.Presence response = new org.jivesoftware.smack.packet.Presence(
org.jivesoftware.smack.packet.Presence.Type.unsubscribed);
response.setTo(address);
sendPacket(response);
notifyContactListUpdated(list, ContactListListener.LIST_CONTACT_REMOVED, contact);
}
@Override
protected void doDeleteContactListAsync(ContactList list) {
// TODO delete contact list
debug(TAG, "delete contact list " + list.getName());
}
@Override
protected void doCreateContactListAsync(String name, Collection<Contact> contacts,
boolean isDefault) {
// TODO create contact list
debug(TAG, "create contact list " + name + " default " + isDefault);
}
@Override
protected void doBlockContactAsync(String address, boolean block) {
// TODO block contact
}
@Override
protected void doAddContactToListAsync(Contact contact, ContactList list) throws ImException {
debug(TAG, "add contact to " + list.getName());
if (mConnection.isConnected())
{
org.jivesoftware.smack.packet.Presence reqSubscribe = new org.jivesoftware.smack.packet.Presence(
org.jivesoftware.smack.packet.Presence.Type.subscribe);
reqSubscribe.setTo(contact.getAddress().getBareAddress());
sendPacket(reqSubscribe);
org.jivesoftware.smack.packet.Presence reqSubscribed = new org.jivesoftware.smack.packet.Presence(
org.jivesoftware.smack.packet.Presence.Type.subscribed);
reqSubscribed.setTo(contact.getAddress().getBareAddress());
sendPacket(reqSubscribed);
Roster roster = mConnection.getRoster();
String[] groups = new String[] { list.getName() };
try {
RosterEntry rEntry = roster.getEntry(contact.getAddress().getBareAddress());
RosterGroup rGroup = roster.getGroup(list.getName());
if (rGroup == null)
{
if (rEntry == null)
roster.createEntry (contact.getAddress().getBareAddress(), contact.getName(), null);
}
else if (rEntry == null)
{
roster.createEntry(contact.getAddress().getBareAddress(), contact.getName(), groups);
}
} catch (XMPPException e) {
debug(TAG,"error updating remote roster",e);
throw new ImException("error updating remote roster");
} catch (IllegalStateException e) {
String msg = "Not logged in to server while updating remote roster";
debug(TAG, msg, e);
throw new ImException(msg);
}
do_loadContactLists();
notifyContactListUpdated(list, ContactListListener.LIST_CONTACT_ADDED, contact);
}
}
@Override
public void declineSubscriptionRequest(Contact contact) {
debug(TAG, "decline subscription");
org.jivesoftware.smack.packet.Presence response = new org.jivesoftware.smack.packet.Presence(
org.jivesoftware.smack.packet.Presence.Type.unsubscribed);
response.setTo(contact.getAddress().getBareAddress());
sendPacket(response);
try {
mContactListManager.getSubscriptionRequestListener().onSubscriptionDeclined(contact, mProviderId, mAccountId);
} catch (RemoteException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public void approveSubscriptionRequest(final Contact contact) {
debug(TAG, "approve subscription: " + contact.getAddress().getAddress());
org.jivesoftware.smack.packet.Presence response = new org.jivesoftware.smack.packet.Presence(
org.jivesoftware.smack.packet.Presence.Type.subscribed);
response.setTo(contact.getAddress().getBareAddress());
sendPacket(response);
try
{
mContactListManager.getSubscriptionRequestListener().onSubscriptionApproved(contact, mProviderId, mAccountId);
doAddContactToListAsync(contact, getContactListManager().getDefaultContactList());
} catch (ImException e) {
debug (TAG, "error responding to subscription approval: " + e.getLocalizedMessage());
}
catch (RemoteException e) {
debug (TAG, "error responding to subscription approval: " + e.getLocalizedMessage());
}
}
@Override
public Contact[] createTemporaryContacts(String[] addresses) {
// debug(TAG, "create temporary " + address);
Contact[] contacts = new Contact[addresses.length];
int i = 0;
for (String address : addresses)
{
contacts[i++] = makeContact(address);
}
notifyContactsPresenceUpdated(contacts);
return contacts;
}
@Override
protected void doSetContactName(String address, String name) throws ImException {
Roster roster = mConnection.getRoster();
RosterEntry entry = roster.getEntry(address);
// confirm entry still exists
if (entry == null) {
return;
}
// set name
entry.setName(name);
}
}
public void sendHeartbeat(final long heartbeatInterval) {
// Don't let heartbeats queue up if we have long running tasks - only
// do the heartbeat if executor is idle.
boolean success = executeIfIdle(new Runnable() {
@Override
public void run() {
debug(TAG, "heartbeat state = " + getState());
doHeartbeat(heartbeatInterval);
}
});
if (!success) {
debug(TAG, "failed to schedule heartbeat state = " + getState());
}
}
// Runs in executor thread
public void doHeartbeat(long heartbeatInterval) {
heartbeatSequence++;
if (getState() == SUSPENDED) {
debug(TAG, "heartbeat during suspend");
return;
}
if (mConnection == null && mRetryLogin) {
debug(TAG, "reconnect with login");
do_login();
return;
}
if (mConnection == null)
return;
if (mNeedReconnect) {
reconnect();
} else if (!mConnection.isConnected() && getState() == LOGGED_IN) {
// Smack failed to tell us about a disconnect
debug(TAG, "reconnect on unreported state change");
setState(LOGGING_IN, new ImErrorInfo(ImErrorInfo.NETWORK_ERROR, "network disconnected"));
force_reconnect();
} else if (getState() == LOGGED_IN) {
if (PING_ENABLED) {
// Check ping on every heartbeat. checkPing() will return true immediately if we already checked.
if (!checkPing()) {
debug(TAG, "reconnect on ping failed: " + mUser.getAddress().getAddress());
setState(LOGGING_IN, new ImErrorInfo(ImErrorInfo.NETWORK_ERROR, "network timeout"));
maybe_reconnect();
} else {
// Send pings only at intervals configured by the user
if (heartbeatSequence >= heartbeatInterval) {
heartbeatSequence = 0;
debug(TAG, "ping");
sendPing();
}
}
}
}
}
private void clearPing() {
debug(TAG, "clear ping");
mPingCollector = null;
heartbeatSequence = 0;
}
// Runs in executor thread
private void sendPing() {
IQ req = new IQ() {
public String getChildElementXML() {
return "<ping xmlns='urn:xmpp:ping'/>";
}
};
req.setType(IQ.Type.GET);
PacketFilter filter = new AndFilter(new PacketIDFilter(req.getPacketID()),
new PacketTypeFilter(IQ.class));
mPingCollector = mConnection.createPacketCollector(filter);
mConnection.sendPacket(req);
}
// Runs in executor thread
private boolean checkPing() {
if (mPingCollector != null) {
IQ result = (IQ) mPingCollector.pollResult();
mPingCollector.cancel();
mPingCollector = null;
if (result == null) {
Log.e(TAG, "ping timeout");
return false;
}
}
return true;
}
// watch out, this is a different XMPPConnection class than XmppConnection! ;)
// org.jivesoftware.smack.XMPPConnection
// info.guardianproject.otr.app.im.plugin.xmpp.XmppConnection
public static class MyXMPPConnection extends XMPPConnection {
public MyXMPPConnection(ConnectionConfiguration config) {
super(config);
}
public void shutdown() {
if (socket != null)
{
try {
// Be forceful in shutting down since SSL can get stuck
try {
socket.shutdownInput();
} catch (Exception e) { }
socket.close();
shutdown(new org.jivesoftware.smack.packet.Presence(
org.jivesoftware.smack.packet.Presence.Type.unavailable));
} catch (Exception e) {
Log.e(TAG, "error on shutdown()", e);
}
}
}
}
@Override
public void networkTypeChanged() {
super.networkTypeChanged();
execute(new Runnable() {
@Override
public void run() {
debug(TAG, "network type changed");
mNeedReconnect = false;
setState(LOGGING_IN, null);
reconnect();
}
});
}
/*
* Force a shutdown and reconnect, unless we are already reconnecting.
*
* Runs in executor thread
*/
private void force_reconnect() {
debug(TAG, "force_reconnect mNeedReconnect=" + mNeedReconnect + " state=" + getState()
+ " connection?=" + (mConnection != null));
if (mConnection == null)
return;
if (mNeedReconnect)
return;
mNeedReconnect = true;
try {
if (mConnection != null && mConnection.isConnected()) {
mStreamHandler.quickShutdown();
}
} catch (Exception e) {
Log.w(TAG, "problem disconnecting on force_reconnect: " + e.getMessage());
}
reconnect();
}
/*
* Reconnect unless we are already in the process of doing so.
*
* Runs in executor thread.
*/
private void maybe_reconnect() {
debug(TAG, "maybe_reconnect mNeedReconnect=" + mNeedReconnect + " state=" + getState()
+ " connection?=" + (mConnection != null));
// This is checking whether we are already in the process of reconnecting. If we are,
// doHeartbeat will take care of reconnecting.
if (mNeedReconnect)
return;
if (getState() == SUSPENDED)
return;
if (mConnection == null)
return;
mNeedReconnect = true;
reconnect();
}
/*
* Retry connecting
*
* Runs in executor thread
*/
private void reconnect() {
if (getState() == SUSPENDED) {
debug(TAG, "reconnect during suspend, ignoring");
return;
}
if (mConnection != null) {
// It is safe to ask mConnection whether it is connected, because either:
// - We detected an error using ping and called force_reconnect, which did a shutdown
// - Smack detected an error, so it knows it is not connected
// so there are no cases where mConnection can be confused about being connected here.
// The only left over cases are reconnect() being called too many times due to errors
// reported multiple times or errors reported during a forced reconnect.
// The analysis above is incorrect in the case where Smack loses connectivity
// while trying to log in. This case is handled in a future heartbeat
// by checking ping responses.
clearPing();
if (mConnection.isConnected()) {
debug(TAG,"reconnect while already connected, assuming good: " + mConnection);
mNeedReconnect = false;
setState(LOGGED_IN, null);
return;
}
debug(TAG, "reconnect");
try {
if (mStreamHandler.isResumePossible()) {
// Connect without binding, will automatically trigger a resume
debug(TAG, "mStreamHandler resume");
mConnection.connect(false);
initServiceDiscovery();
} else {
debug(TAG, "reconnection on network change failed: " + mUser.getAddress().getAddress());
mConnection = null;
mNeedReconnect = true;
setState(LOGGING_IN, new ImErrorInfo(ImErrorInfo.NETWORK_ERROR, null));
while (mNeedReconnect)
{
do_login();
if (mNeedReconnect)
try { Thread.sleep(3000);}
catch (Exception e){}
}
}
} catch (Exception e) {
if (mStreamHandler != null)
mStreamHandler.quickShutdown();
mConnection = null;
debug(TAG, "reconnection attempt failed", e);
// Smack incorrectly notified us that reconnection was successful, reset in case it fails
mNeedReconnect = false;
setState(LOGGING_IN, new ImErrorInfo(ImErrorInfo.NETWORK_ERROR, e.getMessage()));
//while (mNeedReconnect)
// do_login();
}
} else {
mNeedReconnect = false;
mConnection = null;
debug(TAG, "reconnection on network change failed");
setState(LOGGING_IN, new ImErrorInfo(ImErrorInfo.NETWORK_ERROR,
"reconnection on network change failed"));
//while (mNeedReconnect)
// do_login();
}
}
@Override
protected void setState(int state, ImErrorInfo error) {
debug(TAG, "setState to " + state);
super.setState(state, error);
if (state == LOGGED_IN)
{
mUserPresence = new Presence(Presence.AVAILABLE, "", Presence.CLIENT_TYPE_MOBILE);
sendPresencePacket();
}
}
public void debug(String tag, String msg) {
// if (Log.isLoggable(TAG, Log.DEBUG)) {
if (Debug.DEBUG_ENABLED) {
Log.d(tag, "" + mGlobalId + " : " + msg);
}
}
public void debug(String tag, String msg, Exception e) {
if (Debug.DEBUG_ENABLED) {
Log.e(tag, "" + mGlobalId + " : " + msg,e);
}
}
@Override
public void handle(Callback[] arg0) throws IOException {
for (Callback cb : arg0) {
debug(TAG, cb.toString());
}
}
/*
public class MySASLDigestMD5Mechanism extends SASLMechanism
{
public MySASLDigestMD5Mechanism(SASLAuthentication saslAuthentication)
{
super(saslAuthentication);
}
protected void authenticate()
throws IOException, XMPPException
{
String mechanisms[] = {
getName()
};
java.util.Map props = new HashMap();
sc = Sasl.createSaslClient(mechanisms, null, "xmpp", hostname, props, this);
super.authenticate();
}
public void authenticate(String username, String host, String password)
throws IOException, XMPPException
{
authenticationId = username;
this.password = password;
hostname = host;
String mechanisms[] = {
getName()
};
java.util.Map props = new HashMap();
sc = Sasl.createSaslClient(mechanisms, null, "xmpp", host, props, this);
super.authenticate();
}
public void authenticate(String username, String host, CallbackHandler cbh)
throws IOException, XMPPException
{
String mechanisms[] = {
getName()
};
java.util.Map props = new HashMap();
sc = Sasl.createSaslClient(mechanisms, null, "xmpp", host, props, cbh);
super.authenticate();
}
protected String getName()
{
return "DIGEST-MD5";
}
public void challengeReceived(String challenge)
throws IOException
{
//StringBuilder stanza = new StringBuilder();
byte response[];
if(challenge != null)
response = sc.evaluateChallenge(Base64.decode(challenge));
else
//response = sc.evaluateChallenge(null);
response = sc.evaluateChallenge(new byte[0]);
//String authenticationText = "";
Packet responseStanza;
//if(response != null)
//{
//authenticationText = Base64.encodeBytes(response, 8);
//if(authenticationText.equals(""))
//authenticationText = "=";
if (response == null){
responseStanza = new Response();
} else {
responseStanza = new Response(Base64.encodeBytes(response,Base64.DONT_BREAK_LINES));
}
//}
//stanza.append("<response xmlns=\"urn:ietf:params:xml:ns:xmpp-sasl\">");
//stanza.append(authenticationText);
//stanza.append("</response>");
//getSASLAuthentication().send(stanza.toString());
getSASLAuthentication().send(responseStanza);
}
}
*/
private void initServiceDiscovery() {
debug(TAG, "init service discovery");
// register connection features
ServiceDiscoveryManager sdm = ServiceDiscoveryManager.getInstanceFor(mConnection);
if (sdm == null)
sdm = new ServiceDiscoveryManager(mConnection);
if (!sdm.includesFeature(DISCO_FEATURE))
sdm.addFeature(DISCO_FEATURE);
if (!sdm.includesFeature(DeliveryReceipts.NAMESPACE))
sdm.addFeature(DeliveryReceipts.NAMESPACE);
}
private void onReconnectionSuccessful() {
mNeedReconnect = false;
setState(LOGGED_IN, null);
}
private void addProviderManagerExtensions ()
{
ProviderManager pm = ProviderManager.getInstance();
// Private Data Storage
pm.addIQProvider("query","jabber:iq:private", new PrivateDataManager.PrivateDataIQProvider());
// Time
try {
pm.addIQProvider("query","jabber:iq:time", Class.forName("org.jivesoftware.smackx.packet.Time"));
} catch (ClassNotFoundException e) {
Log.w("TestClient", "Can't load class for org.jivesoftware.smackx.packet.Time");
}
// Roster Exchange
pm.addExtensionProvider("x","jabber:x:roster", new RosterExchangeProvider());
// Message Events
pm.addExtensionProvider("x","jabber:x:event", new MessageEventProvider());
// Chat State
pm.addExtensionProvider("active","http://jabber.org/protocol/chatstates", new ChatStateExtension.Provider());
pm.addExtensionProvider("composing","http://jabber.org/protocol/chatstates", new ChatStateExtension.Provider());
pm.addExtensionProvider("paused","http://jabber.org/protocol/chatstates", new ChatStateExtension.Provider());
pm.addExtensionProvider("inactive","http://jabber.org/protocol/chatstates", new ChatStateExtension.Provider());
pm.addExtensionProvider("gone","http://jabber.org/protocol/chatstates", new ChatStateExtension.Provider());
// XHTML
pm.addExtensionProvider("html","http://jabber.org/protocol/xhtml-im", new XHTMLExtensionProvider());
// Group Chat Invitations
pm.addExtensionProvider("x","jabber:x:conference", new GroupChatInvitation.Provider());
// Service Discovery # Items
pm.addIQProvider("query","http://jabber.org/protocol/disco#items", new DiscoverItemsProvider());
// Service Discovery # Info
pm.addIQProvider("query","http://jabber.org/protocol/disco#info", new DiscoverInfoProvider());
// Data Forms
pm.addExtensionProvider("x","jabber:x:data", new DataFormProvider());
// MUC User
pm.addExtensionProvider("x","http://jabber.org/protocol/muc#user", new MUCUserProvider());
// MUC Admin
pm.addIQProvider("query","http://jabber.org/protocol/muc#admin", new MUCAdminProvider());
// MUC Owner
pm.addIQProvider("query","http://jabber.org/protocol/muc#owner", new MUCOwnerProvider());
// Delayed Delivery
pm.addExtensionProvider("x","jabber:x:delay", new DelayInformationProvider());
// Version
try {
pm.addIQProvider("query","jabber:iq:version", Class.forName("org.jivesoftware.smackx.packet.Version"));
} catch (ClassNotFoundException e) {
// Not sure what's happening here.
}
// VCard
pm.addIQProvider("vCard","vcard-temp", new VCardProvider());
// Offline Message Requests
pm.addIQProvider("offline","http://jabber.org/protocol/offline", new OfflineMessageRequest.Provider());
// Offline Message Indicator
pm.addExtensionProvider("offline","http://jabber.org/protocol/offline", new OfflineMessageInfo.Provider());
// Last Activity
pm.addIQProvider("query","jabber:iq:last", new LastActivity.Provider());
// User Search
pm.addIQProvider("query","jabber:iq:search", new UserSearch.Provider());
// SharedGroupsInfo
pm.addIQProvider("sharedgroup","http:
// JEP-33: Extended Stanza Addressing
pm.addExtensionProvider("addresses","http://jabber.org/protocol/address", new MultipleAddressesProvider());
// FileTransfer
pm.addIQProvider("si","http://jabber.org/protocol/si", new StreamInitiationProvider());
pm.addIQProvider("query","http://jabber.org/protocol/bytestreams", new BytestreamsProvider());
// Privacy
pm.addIQProvider("query","jabber:iq:privacy", new PrivacyProvider());
pm.addIQProvider("command", "http://jabber.org/protocol/commands", new AdHocCommandDataProvider());
pm.addExtensionProvider("malformed-action", "http://jabber.org/protocol/commands", new AdHocCommandDataProvider.MalformedActionError());
pm.addExtensionProvider("bad-locale", "http://jabber.org/protocol/commands", new AdHocCommandDataProvider.BadLocaleError());
pm.addExtensionProvider("bad-payload", "http://jabber.org/protocol/commands", new AdHocCommandDataProvider.BadPayloadError());
pm.addExtensionProvider("bad-sessionid", "http://jabber.org/protocol/commands", new AdHocCommandDataProvider.BadSessionIDError());
pm.addExtensionProvider("session-expired", "http://jabber.org/protocol/commands", new AdHocCommandDataProvider.SessionExpiredError());
}
class NameSpace {
public static final String DISCO_INFO = "http://jabber.org/protocol/disco#info";
public static final String DISCO_ITEMS = "http://jabber.org/protocol/disco#items";
public static final String IQ_GATEWAY = "jabber:iq:gateway";
public static final String IQ_GATEWAY_REGISTER = "jabber:iq:gateway:register";
public static final String IQ_LAST = "jabber:iq:last";
public static final String IQ_REGISTER = "jabber:iq:register";
public static final String IQ_REGISTERED = "jabber:iq:registered";
public static final String IQ_ROSTER = "jabber:iq:roster";
public static final String IQ_VERSION = "jabber:iq:version";
public static final String CHATSTATES = "http://jabber.org/protocol/chatstates";
public static final String XEVENT = "jabber:x:event";
public static final String XDATA = "jabber:x:data";
public static final String MUC = "http://jabber.org/protocol/muc";
public static final String MUC_USER = MUC + "#user";
public static final String MUC_ADMIN = MUC + "#admin";
public static final String SPARKNS = "http:
public static final String DELAY = "urn:xmpp:delay";
public static final String OFFLINE = "http://jabber.org/protocol/offline";
public static final String X_DELAY = "jabber:x:delay";
public static final String VCARD_TEMP = "vcard-temp";
public static final String VCARD_TEMP_X_UPDATE = "vcard-temp:x:update";
public static final String ATTENTIONNS = "urn:xmpp:attention:0";
}
public boolean registerAccount (Imps.ProviderSettings.QueryMap providerSettings, String username, String password, Map<String,String> params) throws Exception
{
initConnection(providerSettings, username);
if (mConnection.getAccountManager().supportsAccountCreation())
{
mConnection.getAccountManager().createAccount(username, password, params);
return true;
}
else
{
return false;//not supported
}
}
private Contact handlePresenceChanged(org.jivesoftware.smack.packet.Presence presence) {
if (presence == null)
return null;
if (presence.getType() == Type.error)
{
if (mRoster == null)
return null;
presence = mRoster.getPresence(presence.getFrom());
}
String from = presence.getFrom();
if (TextUtils.isEmpty(from))
return null;
XmppAddress xaddress = new XmppAddress(from);
if (mUser.getAddress().getBareAddress().equals(xaddress.getBareAddress())) //ignore presence from yourself
return null;
String status = presence.getStatus();
Presence p = new Presence(parsePresence(presence), status, null, null,
Presence.CLIENT_TYPE_DEFAULT);
//this is only persisted in memory
p.setPriority(presence.getPriority());
// Get presence from the Roster to handle priorities and such
// TODO: this causes bad network and performance issues
// if (presence.getType() == Type.available) //get the latest presence for the highest priority
Contact contact = mContactListManager.getContact(xaddress.getBareAddress());
String[] presenceParts = presence.getFrom().split("/");
if (presenceParts.length > 1)
p.setResource(presenceParts[1]);
if (contact == null && presence.getType() == Type.subscribe) {
XmppAddress xAddr = new XmppAddress(presence.getFrom());
if (mRoster == null)
return null;
RosterEntry rEntry = mRoster.getEntry(xAddr.getBareAddress());
String name = null;
if (rEntry != null)
name = rEntry.getName();
if (name == null || name.length() == 0)
name = xAddr.getUser();
contact = new Contact(xAddr,name);
try {
if (!mContactListManager.getDefaultContactList().containsContact(contact.getAddress()))
{
mContactListManager.getDefaultContactList().addExistingContact(contact);
}
} catch (ImException e) {
debug(TAG,"unable to add new contact to default list: " + e.getLocalizedMessage());
}
}
else if (contact == null)
{
return null; //do nothing if we don't have a contact
}
if (presence.getType() == Type.subscribe) {
debug(TAG,"got subscribe request: " + presence.getFrom());
try
{
mContactListManager.getSubscriptionRequestListener().onSubScriptionRequest(contact, mProviderId, mAccountId);
}
catch (RemoteException e)
{
Log.e(TAG,"remote exception on subscription handling",e);
}
}
else if (presence.getType() == Type.subscribed) {
debug(TAG,"got subscribed confirmation request: " + presence.getFrom());
try
{
mContactListManager.getSubscriptionRequestListener().onSubscriptionApproved(contact, mProviderId, mAccountId);
}
catch (RemoteException e)
{
Log.e(TAG,"remote exception on subscription handling",e);
}
}
else if (presence.getType() == Type.unsubscribe) {
debug(TAG,"got unsubscribe request: " + presence.getFrom());
//TBD how to handle this
// mContactListManager.getSubscriptionRequestListener().onUnSubScriptionRequest(contact);
}
else if (presence.getType() == Type.unsubscribed) {
debug(TAG,"got unsubscribe request: " + presence.getFrom());
try
{
mContactListManager.getSubscriptionRequestListener().onSubscriptionDeclined(contact, mProviderId, mAccountId);
}
catch (RemoteException e)
{
Log.e(TAG,"remote exception on subscription handling",e);
}
}
else
{
//this is typical presence, let's get the latest/highest priority
debug(TAG,"got presence:: " + presence.getFrom() + "=" + p.getStatusText());
if (contact.getPresence() != null)
{
Presence pOld = contact.getPresence();
if (pOld.getResource() != null && pOld.getResource().equals(p.getResource())) //if the same resource as the existing one, then update it
{
contact.setPresence(p);
}
else if (p.getPriority() >= pOld.getPriority()) //if priority is higher, then override
{
contact.setPresence(p);
}
if (p.getStatus() != Imps.Presence.AVAILABLE)
{
//if offline, let's check for another online presence
presence = mRoster.getPresence(presence.getFrom());
p = new Presence(parsePresence(presence), status, null, null,
Presence.CLIENT_TYPE_DEFAULT);
//this is only persisted in memory
p.setPriority(presence.getPriority());
contact.setPresence(p);
}
}
else
{
//we don't have a presence yet so set one
contact.setPresence(p);
}
}
return contact;
}
private void initPresenceProcessor ()
{
mTimerPresence = new Timer();
mTimerPresence.scheduleAtFixedRate(new TimerTask() {
public void run() {
if (qPresence.size() > 0)
{
ArrayList<Contact> alUpdate = new ArrayList<Contact>();
org.jivesoftware.smack.packet.Presence p = null;
Contact contact = null;
while (qPresence.peek() != null)
{
p = qPresence.pop();
contact = handlePresenceChanged(p);
if (contact != null)
alUpdate.add(contact);
}
//Log.d(ImApp.LOG_TAG,"XMPP processed presence q=" + alUpdate.size());
mContactListManager.notifyContactsPresenceUpdated(alUpdate.toArray(new Contact[alUpdate.size()]));
loadVCardsAsync();
}
}
}, 1000, 5000);
}
Timer mTimerPackets = null;
private void initPacketProcessor ()
{
mTimerPackets = new Timer();
mTimerPackets.scheduleAtFixedRate(new TimerTask() {
public void run() {
try
{
org.jivesoftware.smack.packet.Packet packet = null;
if (qPacket.size() > 0)
while ((packet = qPacket.poll())!=null)
{
if (mConnection == null || (!mConnection.isConnected())) {
debug(TAG, "postponed packet to " + packet.getTo()
+ " because we are not connected");
postpone(packet);
return;
}
try {
mConnection.sendPacket(packet);
} catch (IllegalStateException ex) {
postpone(packet);
debug(TAG, "postponed packet to " + packet.getTo()
+ " because socket is disconnected");
}
}
}
catch (Exception e)
{
Log.e(ImApp.LOG_TAG,"error processing presence",e);
}
}
}, 500, 500);
}
}
|
package com.checkmarx.jenkins;
import com.checkmarx.cxconsole.CxConsoleLauncher;
import com.checkmarx.cxconsole.commands.CxConsoleCommand;
import com.checkmarx.cxviewer.ws.generated.Credentials;
import com.checkmarx.cxviewer.ws.generated.CxCLIWebService;
import com.checkmarx.cxviewer.ws.generated.CxCLIWebServiceSoap;
import com.checkmarx.cxviewer.ws.generated.CxWSResponseLoginData;
import com.checkmarx.cxviewer.ws.resolver.CxClientType;
import com.checkmarx.cxviewer.ws.resolver.CxWSResolver;
import com.checkmarx.cxviewer.ws.resolver.CxWSResolverSoap;
import com.checkmarx.cxviewer.ws.resolver.CxWSResponseDiscovery;
import com.sun.xml.internal.ws.wsdl.parser.InaccessibleWSDLException;
import hudson.Extension;
import hudson.Launcher;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.BuildListener;
import hudson.tasks.BuildStepDescriptor;
import hudson.tasks.Builder;
import hudson.util.FormValidation;
import net.sf.json.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.*;
import org.codehaus.groovy.runtime.ArrayUtil;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.LinkedList;
/**
* The main entry point for Checkmarx plugin. This class implements the Builder
* build stage that scans the source code.
*
* @author Denis Krivitski
* @since 3/10/13
*/
public class CxScanBuilder extends Builder {
// Static Initializer
static {
//PropertyConfigurator.configure(CxScanBuilder.class.getResource("log4j.properties"));
BasicConfigurator.configure(); // TODO: Find out why the property configuration does not work
}
// Persistent plugin configuration parameters
private String serverUrl;
private String username;
private String password;
private String projectName;
private String preset;
private boolean presetSpecified;
private String extensionsExclude;
private String locationPathExclude;
private boolean visibleToOthers;
private boolean incremental;
private String sourceEncoding;
private String comment;
// Private variables
private static Logger logger = Logger.getLogger(CxScanBuilder.class);
// Constructors
@DataBoundConstructor
public CxScanBuilder(String serverUrl,
String username,
String password,
String projectName,
String preset,
boolean presetSpecified,
String extensionsExclude,
String locationPathExclude,
boolean visibleToOthers,
boolean incremental,
String sourceEncoding,
String comment)
{
this.serverUrl = serverUrl;
this.username = username;
this.password = password;
this.projectName = projectName;
this.preset = preset;
this.presetSpecified = presetSpecified;
this.extensionsExclude = extensionsExclude;
this.locationPathExclude = locationPathExclude;
this.visibleToOthers = visibleToOthers;
this.incremental = incremental;
this.sourceEncoding = sourceEncoding;
this.comment = comment;
}
// Configuration fields getters
public String getServerUrl() {
return serverUrl;
}
public String getUsername() {
return username;
}
public String getPassword() {
return password;
}
public String getProjectName() {
return projectName;
}
public String getPreset() {
return preset;
}
public boolean isPresetSpecified() {
return presetSpecified;
}
public String getExtensionsExclude() {
return extensionsExclude;
}
public String getLocationPathExclude() {
return locationPathExclude;
}
public boolean isVisibleToOthers() {
return visibleToOthers;
}
public boolean isIncremental() {
return incremental;
}
public String getSourceEncoding() {
return sourceEncoding;
}
public String getComment() {
return comment;
}
@Override
public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException {
WriterAppender appender = new WriterAppender(new PatternLayout(),listener.getLogger());
appender.setThreshold(Level.INFO);
Logger.getLogger("com.checkmarx.cxconsole.commands").addAppender(appender);
// Debug appenders
ConsoleAppender debugAppender = new ConsoleAppender();
debugAppender.setThreshold(Level.DEBUG);
Logger.getLogger("com.checkmarx.cxconsole").addAppender(debugAppender);
Logger.getLogger("com.checkmarx.cxviewer").addAppender(debugAppender);
int cxConsoleLauncherExitCode = CxConsoleLauncher.runCli(createCliCommandLineArgs(build));
return cxConsoleLauncherExitCode == CxConsoleCommand.CODE_OK; // Return true if exit code == CODE_OK
}
private String[] createCliCommandLineArgs(AbstractBuild<?, ?> build) throws IOException
{
LinkedList<String> args = new LinkedList<String>();
args.add("Scan");
args.add("-comment"); args.add(this.getComment());
if ("1".equals(getSourceEncoding()))
{
args.add("-Configuration");
args.add("Japanese (Shift-JIS)");
}
args.add("-CxPassword"); args.add(this.getPassword());
args.add("-CxServer"); args.add(this.getServerUrl());
args.add("-CxUser"); args.add(this.getUsername());
if (this.isIncremental())
{
args.add("-incremental");
}
if (build.getWorkspace().isRemote())
{
logger.error("Workspace is on remote machine");
throw new IOException("Workspace is on remote machine");
}
args.add("-LocationPath"); args.add(build.getWorkspace().getRemote());
String[] excludeFolders = StringUtils.split(getLocationPathExclude()," ,;:");
if (excludeFolders.length > 0)
{
args.add("-LocationPathExclude");
for (String excludeFolder : excludeFolders)
{
args.add(excludeFolder);
}
}
String[] excludeExtensions = StringUtils.split(getExtensionsExclude()," ,;:");
if (excludeExtensions.length > 0)
{
args.add("-ExtensionsExclude");
for (String excludeExtension : excludeExtensions)
{
args.add(excludeExtension);
}
}
args.add("-LocationType"); args.add("folder");
if (this.isPresetSpecified())
{
args.add("-Preset"); args.add(this.getPreset());
}
if (!this.isVisibleToOthers())
{
args.add("-private");
}
args.add("-ProjectName"); args.add(this.getProjectName());
File reportFile = new File(new File(build.getRootDir(),"checkmarx"),"ScanReport.xml");
args.add("-ReportXML"); args.add(reportFile.getAbsolutePath());
args.add("-v");
File logFile = new File(new File(build.getRootDir(),"checkmarx"),"cx_scan.log");
args.add("-log"); args.add(logFile.getAbsolutePath());
String[] result = args.toArray(new String[0]);
logger.debug("CLI Command Arguments: " + StringUtils.join(result," "));
return result;
}
@Override
public DescriptorImpl getDescriptor() {
return (DescriptorImpl)super.getDescriptor();
}
/*public String getIconPath() {
PluginWrapper wrapper = Hudson.getInstance().getPluginManager().getPlugin([YOUR-PLUGIN-MAIN-CLASS].class);
return Hudson.getInstance().getRootUrl() + "plugin/"+ wrapper.getShortName()+"/";
}*/
@Extension
public static final class DescriptorImpl extends BuildStepDescriptor<Builder> {
public final static String DEFAULT_EXCLUDE_EXTENSION = "DS_Store, ipr, iws, bak, tmp, aac, aif, iff, m3u, mid, mp3, mpa, ra, wav, wma, 3g2, 3gp, asf, asx, avi, flv, mov, mp4, mpg, rm, swf, vob, wmv, bmp, gif, jpg, png, psd, tif, swf, jar, zip, rar, exe, dll, pdb, 7z, gz, tar.gz, tar, gz,ahtm, ahtml, fhtml, hdm, hdml, hsql, ht, hta, htc, htd, htm, html, htmls, ihtml, mht, mhtm, mhtml, ssi, stm, stml, ttml, txn, xhtm, xhtml, class, iml";
public final static String DEFAULT_EXCLUDE_FOLDERS = "_cvs, .svn, .hg, .git, .bzr, bin, obj, backup, .idea";
private static Logger logger = Logger.getLogger(DescriptorImpl.class);
private String webServiceUrl;
public boolean isApplicable(Class<? extends AbstractProject> aClass) {
return true;
}
// Field value validators
public FormValidation doCheckServerUrl(@QueryParameter String value) {
try {
this.webServiceUrl = null;
this.webServiceUrl = resolveWebServiceURL(value);
return FormValidation.ok("Server Validated Successfully");
} catch (Exception e)
{
return FormValidation.error(e.getMessage());
}
}
public FormValidation doCheckPassword(@QueryParameter String value, @QueryParameter String username) {
final int LCID = 1033;
String password = value;
if (this.webServiceUrl==null) {
return FormValidation.warning("Server URL not set");
}
try {
CxCLIWebService cxCLIWebService = new CxCLIWebService(new URL(this.webServiceUrl));
CxCLIWebServiceSoap cxCLIWebServiceSoap = cxCLIWebService.getCxCLIWebServiceSoap();
Credentials credentials = new Credentials();
credentials.setUser(username);
credentials.setPass(password);
CxWSResponseLoginData cxWSResponseLoginData = cxCLIWebServiceSoap.login(credentials, LCID);
if (cxWSResponseLoginData.isIsSuccesfull())
{
return FormValidation.ok("Login Successful");
} else {
return FormValidation.error(cxWSResponseLoginData.getErrorMessage());
}
} catch (InaccessibleWSDLException e)
{
return FormValidation.error("Error connecting to the server");
} catch (Exception e)
{
return FormValidation.error(e.getMessage());
}
}
private String resolveWebServiceURL(String serverUrl) throws Exception
{
final String WS_RESOLVER_PATH = "/cxwebinterface/cxWSResolver.asmx";
final int WS_CLI_INTERFACE_VERSION = 0;
final String NO_CONNECTION_ERROR_MESSAGE = "Checkmarx server did not respond on the specified URL";
try {
if (serverUrl==null || serverUrl.isEmpty())
{
throw new Exception("Provide Server URL");
}
URL url = new URL(serverUrl);
if (!url.getPath().isEmpty())
{
throw new Exception("URL Must not contain path");
}
if (url.getQuery()!=null)
{
throw new Exception("URL Must not contain query parameters");
}
url = new URL(url.toString() + WS_RESOLVER_PATH);
CxWSResolver cxWSResolver = new CxWSResolver(url);
CxWSResolverSoap cxWSResolverSoap = cxWSResolver.getCxWSResolverSoap();
CxWSResponseDiscovery cxWSResponseDiscovery = cxWSResolverSoap.getWebServiceUrl(CxClientType.CLI,WS_CLI_INTERFACE_VERSION);
if (cxWSResponseDiscovery.isIsSuccesfull())
{
return cxWSResponseDiscovery.getServiceURL();
} else {
throw new Exception(NO_CONNECTION_ERROR_MESSAGE);
}
} catch (InaccessibleWSDLException e)
{
logger.debug(e);
throw new Exception(NO_CONNECTION_ERROR_MESSAGE);
}
}
/**
* This human readable name is used in the configuration screen.
*/
public String getDisplayName() {
return "Execute Checkmarx Scan";
}
@Override
public boolean configure(StaplerRequest req, JSONObject formData) throws FormException {
// To persist global configuration information,
// set that to properties and call save().
// ^Can also use req.bindJSON(this, formData);
// (easier when there are many fields; need set* methods for this, like setUseFrench)
// save();
return super.configure(req,formData);
}
}
}
|
package paddlefish.protocol;
import java.io.IOException;
import java.util.ArrayList;
import jssc.SerialPortException;
import jssc.SerialPortTimeoutException;
import paddlefish.hal.HAL;
import paddlefish.protocol.CommConstants;
/*Singleton class Pattern is used*/
public class CommController
{
private static CommController instance = null;
private static HAL hal;
protected CommController() throws Exception
{
// Exists only to defeat instantiation.
if(hal==null)
hal = new HAL();
// TODO : implement CRC
// TODO : timeout control needed
// TODO : create a thread for serial read and write
}
public static CommController getInstance() throws Exception {
if(instance == null)
{
instance = new CommController();
}
return instance;
}
public boolean isConnected()
{
return hal.isConnected();
}
public ArrayList<String> listPorts()
{
ArrayList<String> ports = hal.listAvailablePorts();
return ports;
}
public boolean connect(String port, int baud)
{
return hal.connect(port, baud);
}
public void disconnect()
{
hal.disconnect();
}
public byte[] readByteArray(byte deviceAddress, byte registerAddress, int length) throws IOException, InterruptedException, SerialPortException, SerialPortTimeoutException
{
byte cmd[] = new byte[7];
cmd[0] = CommConstants.CMD_START;
cmd[1] = CommConstants.CMD_READ_BYTES;
cmd[2] = deviceAddress;
cmd[3] = registerAddress;
cmd[4] = (byte) length;
cmd[5] = 0x00;
cmd[6] = CommConstants.CMD_END;
hal.txData(cmd);
Thread.sleep(50);
byte[] receivedData = hal.rxData();
return receivedData;
}
public boolean writeSingleByte(byte deviceAddress, byte registerAddress, byte data) throws IOException, SerialPortException, SerialPortTimeoutException
{
byte cmd[] = new byte[8];
cmd[0] = CommConstants.CMD_START;
cmd[1] = CommConstants.CMD_WRITE_BYTES;
cmd[2] = deviceAddress;
cmd[3] = registerAddress;
cmd[4] = 0x01;
cmd[5] = CommConstants.CMD_END;
cmd[6] = data;
cmd[7] = CommConstants.CMD_END;
hal.txData(cmd);
byte[] receivedData = hal.rxData();
return checkOK(receivedData);
}
public boolean writeByteArray(byte deviceAddress, byte registerAddress, int length, byte data[]) throws IOException, SerialPortException, SerialPortTimeoutException
{
byte cmd[] = new byte[8+length];
cmd[0] = CommConstants.CMD_START;
cmd[1] = CommConstants.CMD_WRITE_BYTES;
cmd[2] = deviceAddress;
cmd[3] = registerAddress;
cmd[4] = (byte)length;
cmd[5] = CommConstants.CMD_END;
cmd[6+length] = CommConstants.CMD_END;
System.arraycopy(data, 0, cmd, 6, length);
hal.txData(cmd);
byte[] receivedData = hal.rxData();
return checkOK(receivedData);
}
public boolean writeBits(byte deviceAddress, byte registerAddress, byte data, byte mask) throws IOException, InterruptedException, SerialPortException, SerialPortTimeoutException
{
byte cmd[] = new byte[8];
cmd[0] = CommConstants.CMD_START;
cmd[1] = CommConstants.CMD_WRITE_BITS;
cmd[2] = deviceAddress;
cmd[3] = registerAddress;
cmd[4] = data;
cmd[5] = mask;
cmd[6] = 0x00;
cmd[7] = CommConstants.CMD_END;
hal.txData(cmd);
Thread.sleep(50);
byte[] receivedData = hal.rxData();
return checkOK(receivedData);
}
public boolean addDevice(byte deviceAddress, byte registerAddress, byte length, int period) throws IOException, InterruptedException, SerialPortException, SerialPortTimeoutException
{
byte cmd[] = new byte[9];
cmd[0] = CommConstants.CMD_START;
cmd[1] = CommConstants.CMD_STREAM_ADD;
cmd[2] = deviceAddress;
cmd[3] = registerAddress;
cmd[4] = (byte) length;
cmd[5] = (byte)( period & 0xFF );
cmd[6] = (byte)( ( period >> 8 ) & 0xFF );
cmd[7] = 0x00;
cmd[8] = CommConstants.CMD_END;
hal.txData(cmd);
Thread.sleep(50);
byte[] receivedData = hal.rxData();
return checkOK(receivedData);
}
public boolean setPeriod(int period) throws IOException, InterruptedException, SerialPortException, SerialPortTimeoutException
{
byte cmd[] = new byte[6];
cmd[0] = CommConstants.CMD_START;
cmd[1] = CommConstants.CMD_STREAM_ADD;
cmd[2] = (byte)( period & 0xFF );
cmd[3] = (byte)( ( period >> 8 ) & 0xFF );
cmd[4] = 0x00;
cmd[5] = CommConstants.CMD_END;
hal.txData(cmd);
Thread.sleep(50);
byte[] receivedData = hal.rxData();
return checkOK(receivedData);
}
public boolean start() throws IOException, InterruptedException, SerialPortException, SerialPortTimeoutException
{
byte cmd[] = new byte[5];
cmd[0] = CommConstants.CMD_START;
cmd[1] = CommConstants.CMD_STREAM_ADD;
cmd[2] = 0x01;
cmd[3] = 0x00;
cmd[4] = CommConstants.CMD_END;
hal.txData(cmd);
Thread.sleep(50);
byte[] receivedData = hal.rxData();
return checkOK(receivedData);
}
public boolean stop() throws IOException, InterruptedException, SerialPortException, SerialPortTimeoutException
{
byte cmd[] = new byte[5];
cmd[0] = CommConstants.CMD_START;
cmd[1] = CommConstants.CMD_STREAM_ADD;
cmd[2] = 0x00;
cmd[3] = 0x00;
cmd[4] = CommConstants.CMD_END;
hal.txData(cmd);
Thread.sleep(50);
byte[] receivedData = hal.rxData();
return checkOK(receivedData);
}
public boolean reset() throws IOException, InterruptedException, SerialPortException, SerialPortTimeoutException
{
byte cmd[] = new byte[4];
cmd[0] = CommConstants.CMD_START;
cmd[1] = CommConstants.CMD_STREAM_ADD;
cmd[2] = 0x00;
cmd[3] = CommConstants.CMD_END;
hal.txData(cmd);
Thread.sleep(50);
byte[] receivedData = hal.rxData();
return checkOK(receivedData);
}
public void close()
{
if(hal!=null)
{
hal.close();
}
//TODO: Log
else
System.out.println("No HAL available");
}
private static boolean checkOK(byte ans[])
{
if ( (ans[0] != CommConstants.CMD_START) || (ans[1] != CommConstants.CMD_OK) || (ans[2] != CommConstants.CMD_END))
return false;
return true;
}
}
|
package org.xins.client;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.UnknownHostException;
import java.net.URL;
import java.util.Iterator;
import java.util.Map;
import java.util.zip.Checksum;
import java.util.zip.CRC32;
import org.apache.log4j.Logger;
import org.jdom.Document;
import org.jdom.JDOMException;
import org.jdom.input.SAXBuilder;
import org.xins.util.MandatoryArgumentChecker;
import org.xins.util.http.HTTPRequester;
import org.xins.util.net.URLEncoding;
import org.xins.util.text.HexConverter;
import org.xins.util.text.FastStringBuffer;
/**
* Function caller implementation that actually sends an HTTP request to a
* remote XINS API.
*
* @version $Revision$ $Date$
* @author Ernst de Haan (<a href="mailto:znerd@FreeBSD.org">znerd@FreeBSD.org</a>)
*
* @since XINS 0.41
*/
public class ActualFunctionCaller
extends AbstractFunctionCaller {
// Class fields
/**
* The logging category used by this class. This class field is never
* <code>null</code>.
*/
private final static Logger LOG = Logger.getLogger(ActualFunctionCaller.class.getName());
/**
* Initial buffer size for a parameter string. See
* {@link #createParameterString(String,String,Map)}.
*/
private static int PARAMETER_STRING_BUFFER_SIZE = 256;
/**
* Flag that indicates if at the construction of an
* <code>ActualFunctionCaller</code> the backend API is called to see if it
* is up.
*/
private static boolean CALL_AT_CONSTRUCTION = true;
// Class functions
/**
* Set the flag that indicates if at the construction of an
* <code>ActualFunctionCaller</code> the backend API is called to see if it
* is up.
*
* @param b
* the new value for the flag.
*/
public static void setCallAtConstruction(boolean b) {
CALL_AT_CONSTRUCTION = b;
}
private long computeCRC32(URL url)
throws IllegalArgumentException {
// Check preconditions
MandatoryArgumentChecker.check("url", url);
// Get the string
String string = url.toString();
Checksum checksum = new CRC32();
byte[] bytes;
final String ENCODING = "US-ASCII";
try {
bytes = string.getBytes(ENCODING);
} catch (UnsupportedEncodingException exception) {
throw new Error("Encoding \"" + ENCODING + "\" is not supported.");
}
checksum.update(bytes, 0, bytes.length);
return checksum.getValue();
}
// Constructors
public ActualFunctionCaller(URL url)
throws IllegalArgumentException,
SecurityException,
UnknownHostException,
MultipleIPAddressesException {
this(url, null);
}
public ActualFunctionCaller(URL url, String hostName)
throws IllegalArgumentException,
SecurityException,
UnknownHostException,
MultipleIPAddressesException {
// Check preconditions
MandatoryArgumentChecker.check("url", url);
boolean debugEnabled = LOG.isDebugEnabled();
String urlString = url.toString();
if (debugEnabled) {
if (hostName == null) {
LOG.debug("Creating ActualFunctionCaller for URL: " + urlString);
} else {
LOG.debug("Creating ActualFunctionCaller for URL: " + urlString + ", hostname: " + hostName);
}
}
// Perform DNS lookup
String urlHostName = url.getHost();
InetAddress[] addresses = InetAddress.getAllByName(urlHostName);
if (addresses.length > 1) {
throw new MultipleIPAddressesException(urlHostName, addresses);
}
// Construct the internal URL, with absolute IP address, so no DNS
// lookups will be necessary anymore
try {
_url = new URL(url.getProtocol(), // protocol
addresses[0].getHostAddress(), // host
url.getPort(), // port
url.getFile()); // file
} catch (MalformedURLException mue) {
throw new Error("Caught MalformedURLException for a protocol that was previously accepted: \"" + url.getProtocol() + "\".");
}
// Initialize fields
_hostName = (hostName != null) ? hostName : urlHostName;
_callResultParser = new CallResultParser();
_crc32 = computeCRC32(_url);
_crc32String = HexConverter.toHexString(_crc32);
_urlString = urlString;
// Call the API to make sure it's up
if (CALL_AT_CONSTRUCTION) {
if (debugEnabled) {
LOG.debug("Checking if API at " + urlString + " is up.");
}
try {
call(null, "_NoOp", null);
LOG.info("API at " + urlString + " is up.");
} catch (IOException exception) {
LOG.error("API at " + urlString + " is not accessible.");
} catch (InvalidCallResultException exception) {
LOG.error("API at " + urlString + " returned an invalid call result.");
}
} else {
if (debugEnabled) {
LOG.debug("Not checking if API at " + urlString + " is up.");
}
}
}
// Fields
/**
* The URL for the API this object represents. This field is never
* <code>null</code>.
*/
private final URL _url;
/**
* The URL for the API, as a string. This field is never <code>null</code>.
*/
private final String _urlString;
/**
* The host name as passed to the constructor in the URL. This field is
* never <code>null</code>.
*/
private final String _hostName;
/**
* Call result parser. This field cannot be <code>null</code>.
*/
private final CallResultParser _callResultParser;
/**
* The CRC-32 checksum for the URL.
*/
private final long _crc32;
/**
* The CRC-32 checksum for the URL, as a String.
*/
private final String _crc32String;
// Methods
/**
* Returns the URL for the API this object represents.
*
* @return
* the URL, never <code>null</code>.
*/
public URL getURL() {
return _url;
}
private HTTPRequester.Result performRequest(String parameterString)
throws IllegalArgumentException, IOException {
// Check precondition
MandatoryArgumentChecker.check("parameterString", parameterString);
HTTPRequester requester = new HTTPRequester();
byte[] parameterStringBytes = parameterString.getBytes("US-ASCII");
if (LOG.isDebugEnabled()) {
LOG.debug("Calling " + _url.toString() + '?' + parameterString);
}
try {
return requester.post(_url, parameterStringBytes, _hostName);
} catch (Throwable exception) {
String message = "Failed to call " + _url.toString() + '?' + parameterString + " due to " + exception.getClass().getName() + ", message is: \"" + exception.getMessage() + "\".";
LOG.error(message);
throw new IOException(message);
}
}
public CallResult call(String sessionID, String functionName, Map parameters)
throws IllegalArgumentException, IOException, InvalidCallResultException {
// Check preconditions
MandatoryArgumentChecker.check("functionName", functionName);
boolean debugEnabled = LOG.isDebugEnabled();
// Prepare an HTTP request
String parameterString = createParameterString(sessionID, functionName, parameters);
// Execute the request
if (debugEnabled) {
LOG.debug("Posting to API: " + _url + '?' + parameterString);
}
HTTPRequester.Result result = performRequest(parameterString);
int httpCode = result.getCode();
// Evaluate the HTTP response code
if (httpCode != 200) {
throw new InvalidCallResultException("HTTP return code is " + httpCode + '.');
}
// Parse the result of the HTTP call
try {
return _callResultParser.parse(this, result.getString());
} catch (ParseException exception) {
throw new InvalidCallResultException(exception.getMessage(), exception.getCauseException());
}
}
private final String createParameterString(String sessionID, String functionName, Map parameters)
throws IllegalArgumentException {
// Check preconditions
MandatoryArgumentChecker.check("functionName", functionName);
// Initialize a buffer
// TODO: Use _function
FastStringBuffer buffer = new FastStringBuffer(PARAMETER_STRING_BUFFER_SIZE);
buffer.append("function=");
buffer.append(functionName);
// If there is a session identifier, process it
if (sessionID != null) {
buffer.append("&_session=");
buffer.append(sessionID);
}
// If there are parameters, then process them
if (parameters != null) {
// Loop through them all
Iterator keys = parameters.keySet().iterator();
while (keys.hasNext()) {
// Get the parameter key
String key = (String) keys.next();
// The key cannot equal 'function'
if ("function".equals(key)) {
throw new IllegalArgumentException("The function parameter \"function\" cannot be used for a normal parameter.");
}
// TODO: Make sure the key does not start with an underscore
// TODO: Make sure the key is properly formatted
// TODO: URL encode the value
// Add this parameter key/value combination
Object value = parameters.get(key);
if (value != null) {
buffer.append('&');
buffer.append(key);
buffer.append('=');
buffer.append(URLEncoding.encode(value.toString()));
}
}
}
return buffer.toString();
}
/**
* Returns the CRC-32 checksum for the URL of this function caller.
*
* @return
* the CRC-32 checksum.
*/
public long getCRC32() {
// TODO: Store the CRC-32 value in an int ?
return _crc32;
}
/**
* Returns the CRC-32 checksum for the URL of this function caller, as a
* String.
*
* @return
* the CRC-32 checksum, as a {@link String} containing an unsigned hex
* number.
*/
public String getCRC32String() {
return _crc32String;
}
public ActualFunctionCaller getActualFunctionCallerByCRC32(String crc32)
throws IllegalArgumentException {
MandatoryArgumentChecker.check("crc32", crc32);
return _crc32String.equals(crc32) ? this : null;
}
}
|
package io.scalecube.gateway.config;
import io.scalecube.config.ConfigRegistry;
import io.scalecube.config.ConfigRegistrySettings;
import io.scalecube.config.audit.Slf4JConfigEventListener;
import io.scalecube.config.source.ClassPathConfigSource;
import io.scalecube.config.source.DirectoryConfigSource;
import io.scalecube.config.source.SystemEnvironmentConfigSource;
import io.scalecube.config.source.SystemPropertiesConfigSource;
import java.nio.file.Path;
import java.util.function.Predicate;
import java.util.regex.Pattern;
public class GatewayConfigRegistry {
public static final String JMX_MBEAN_NAME = "io.scalecube.gateway.config:name=ConfigRegistry";
public static final Pattern CONFIG_PATTERN = Pattern.compile(".*[\\\\|/]?config[\\\\|/](.*)config(.*)?\\.properties");
public static final Predicate<Path> PATH_PREDICATE = path -> CONFIG_PATTERN.matcher(path.toString()).matches();
public static final int RELOAD_INTERVAL_SEC = 1;
private final ConfigRegistry configRegistry;
private GatewayConfigRegistry() {
this.configRegistry = ConfigRegistry.create(ConfigRegistrySettings.builder()
.addListener(new Slf4JConfigEventListener())
.addLastSource("sys_prop", new SystemPropertiesConfigSource())
.addLastSource("env_var", new SystemEnvironmentConfigSource())
.addLastSource("dir", new DirectoryConfigSource("config", PATH_PREDICATE))
.addLastSource("cp", new ClassPathConfigSource(PATH_PREDICATE))
.jmxMBeanName(JMX_MBEAN_NAME)
.reloadIntervalSec(RELOAD_INTERVAL_SEC)
.build());
}
public static ConfigRegistry configRegistry() {
return new GatewayConfigRegistry().configRegistry;
}
}
|
package io.car.server.rest.provider;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.util.Random;
import org.junit.Test;
import com.fasterxml.jackson.databind.JsonNode;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryCollection;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.LineString;
import com.vividsolutions.jts.geom.LinearRing;
import com.vividsolutions.jts.geom.MultiLineString;
import com.vividsolutions.jts.geom.MultiPoint;
import com.vividsolutions.jts.geom.MultiPolygon;
import com.vividsolutions.jts.geom.Point;
import com.vividsolutions.jts.geom.Polygon;
import io.car.server.core.exception.GeometryConverterException;
import io.car.server.rest.coding.GeoJSON;
/**
* @author Christian Autermann <autermann@uni-muenster.de>
*/
public class GeoJSONTest {
private final Random random = new Random();
private Coordinate randomCoordinate() {
return new Coordinate(random.nextInt(), random.nextInt());
}
protected LineString randomLineString(GeometryFactory fac) {
return fac.createLineString(new Coordinate[] { randomCoordinate(),
randomCoordinate(),
randomCoordinate() });
}
protected MultiLineString randomMultiLineString(GeometryFactory fac) {
return fac.createMultiLineString(new LineString[] { randomLineString(fac),
randomLineString(fac),
randomLineString(fac) });
}
protected Point randomPoint(GeometryFactory fac) {
return fac.createPoint(randomCoordinate());
}
protected LinearRing randomLinearRing(GeometryFactory fac) {
Coordinate p = randomCoordinate();
LinearRing linearRing = fac.createLinearRing(new Coordinate[] { p,
randomCoordinate(),
randomCoordinate(),
randomCoordinate(),
p });
return linearRing;
}
protected Polygon randomPolygon(GeometryFactory fac) {
return fac.createPolygon(randomLinearRing(fac), new LinearRing[] { randomLinearRing(fac),
randomLinearRing(fac),
randomLinearRing(fac) });
}
protected MultiPoint randomMultiPoint(GeometryFactory fac) {
return fac.createMultiPoint(new Coordinate[] { randomCoordinate(),
randomCoordinate(),
randomCoordinate(),
randomCoordinate(),
randomCoordinate(),
randomCoordinate() });
}
protected MultiPolygon randomMultiPolygon(GeometryFactory fac) {
return fac.createMultiPolygon(new Polygon[] { randomPolygon(fac),
randomPolygon(fac),
randomPolygon(fac) });
}
protected GeometryCollection randomGeometryCollection(GeometryFactory fac) {
return fac.createGeometryCollection(new Geometry[] { randomPoint(fac), randomMultiPoint(fac),
randomLineString(fac), randomMultiLineString(fac),
randomPolygon(fac), randomMultiPolygon(fac) });
}
@Test
public void readWriteTest() throws GeometryConverterException {
GeometryFactory fac = new GeometryFactory();
Geometry col = fac.createGeometryCollection(new Geometry[] { randomGeometryCollection(fac),
randomGeometryCollection(fac) });
GeoJSON conv = new GeoJSON(fac);
JsonNode json = conv.encode(col);
Geometry geom = conv.decode(json);
assertThat(geom, is(equalTo(col)));
}
}
|
package ai.h2o.automl;
import hex.Model;
import hex.ModelMetrics;
import sun.reflect.generics.reflectiveObjects.NotImplementedException;
import water.*;
import water.exceptions.H2OIllegalArgumentException;
import water.util.Log;
import java.util.Arrays;
import java.util.List;
import static water.DKV.getGet;
import static water.Key.make;
/**
* Utility to track all the models built for a given dataset type.
* <p>
* Note that if a new Leaderboard is made for the same project it'll
* keep using the old model list, which allows us to run AutoML multiple
* times and keep adding to the leaderboard.
* <p>
* TODO: make this robust against removal of models from the DKV.
*/
public class Leaderboard extends Keyed {
/**
* Identifier for the models that should be grouped together in the leaderboard
* (e.g., "airlines" and "iris").
*/
private final String project;
/**
* List of models for this leaderboard, sorted by metric so that the best is on top,
* according to the standard metric for the given model type. NOTE: callers should
* access this through #models() to make sure they don't get a stale copy.
*/
private Key<Model>[] models;
/**
* Metric used to sort this leaderboard.
*/
private String metric;
/** HIDEME! */
private Leaderboard() {
throw new NotImplementedException();
}
public Leaderboard(String project) {
super(make("AutoML_Leaderboard_" + project, (byte) 0, (byte) 2 /*builtin key*/, false));
this.project = project;
this.models = new Key[0];
DKV.put(this);
}
public String getProject() {
return project;
}
public void addModels(final Key<Model>[] newModels) {
if (null == this._key)
throw new H2OIllegalArgumentException("Can't add models to a Leaderboard which isn't in the DKV.");
new TAtomic<Leaderboard>() {
@Override
public Leaderboard atomic(Leaderboard old) {
if (old == null) old = new Leaderboard();
Key<Model>[] oldModels = old.models;
old.models = new Key[oldModels.length + newModels.length];
System.arraycopy(oldModels, 0, old.models, 0, oldModels.length);
System.arraycopy(newModels, 0, old.models, oldModels.length, newModels.length);
Model m = DKV.getGet(old.models[0]);
// Sort by metric.
// TODO: allow the metric to be passed in. Note that this assumes the validation (or training) frame is the same.
// If we want to train on different frames and then compare we need to score all the models and sort on the new metrics.
List<Key<Model>> newModelsSorted = null;
try {
if (m._output.isBinomialClassifier())
newModelsSorted = ModelMetrics.sortModelsByMetric("auc", true, Arrays.asList(old.models));
else if (m._output.isClassifier())
newModelsSorted = ModelMetrics.sortModelsByMetric("mean_per_class_error", false, Arrays.asList(old.models));
else if (m._output.isSupervised())
newModelsSorted = ModelMetrics.sortModelsByMetric("mean_residual_deviance", false, Arrays.asList(old.models));
}
catch (H2OIllegalArgumentException e) {
Log.warn("ModelMetrics.sortModelsByMetric failed: " + e);
throw e;
}
old.models = newModelsSorted.toArray(new Key[0]);
return old;
} // atomic
}.invoke(this._key);
}
public void addModel(final Key<Model> key) {
Key<Model>keys[] = new Key[1];
keys[0] = key;
addModels(keys);
}
public void addModel(final Model model) {
Key<Model>keys[] = new Key[1];
keys[0] = model._key;
addModels(keys);
}
private static Model[] modelsForModelKeys(Key<Model>[] modelKeys, Model[] models) {
assert models.length >= modelKeys.length;
int i = 0;
for (Key<Model> modelKey : modelKeys)
models[i++] = getGet(modelKey);
return models;
}
/**
* @return list of keys of models sorted by the default metric for the model category, fetched from the DKV
*/
public Key<Model>[] modelKeys() {
return ((Leaderboard)DKV.getGet(this._key)).models;
}
/**
* @return list of models sorted by the default metric for the model category
*/
public Model[] models() {
Key<Model>[] modelKeys = modelKeys();
if (modelKeys == null || 0 == modelKeys.length) return new Model[0];
Model[] models = new Model[modelKeys.length];
return modelsForModelKeys(modelKeys, models);
}
public Model leader() {
Key<Model>[] modelKeys = modelKeys();
if (modelKeys == null || 0 == modelKeys.length) return null;
return modelKeys[0].get();
}
/**
* Delete everything in the DKV that this points to. We currently need to be able to call this after deleteWithChildren().
*/
public void delete() {
remove();
}
public void deleteWithChildren() {
for (Model m : models())
m.delete();
delete();
}
public static String toString(Model[] models) {
return toString(null, models, "\n");
}
public static String toString(String project, Model[] models) {
return toString(project, models, "\n");
}
public static String toString(String project, Model[] models, String separator) {
StringBuilder sb = new StringBuilder("Leaderboard for project \"" + project + "\": ");
if (models.length == 0) {
sb.append("<empty>");
return sb.toString();
}
sb.append(separator);
for (Model m : models) {
sb.append(m._key.toString());
sb.append(" ");
// TODO: allow the metric to be passed in. Note that this assumes the validation (or training) frame is the same.
// TODO: if validation metrics are available, print those.
if (m._output.isBinomialClassifier()) {
sb.append("auc: ");
sb.append(m.auc());
} else if (m._output.isClassifier()) {
sb.append("mean per class error: ");
sb.append(m.mean_per_class_error());
} else if (m._output.isSupervised()) {
sb.append("mean residual deviance: ");
sb.append(m.deviance());
}
sb.append(separator);
}
return sb.toString();
}
public String toString(String separator) {
return toString(project, models(), separator);
}
@Override
public String toString() {
return toString(" | ");
}
}
|
// $Id: TileSetBundler.java,v 1.13 2003/01/24 21:51:26 mdb Exp $
package com.threerings.media.tile.bundle.tools;
import java.awt.Rectangle;
import java.awt.image.BufferedImage;
import javax.imageio.ImageIO;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
import org.xml.sax.SAXException;
import org.apache.commons.digester.Digester;
import org.apache.commons.digester.Rule;
import org.apache.commons.digester.RuleSetBase;
import org.apache.commons.io.StreamUtils;
import com.samskivert.io.NestableIOException;
import com.samskivert.io.PersistenceException;
import com.threerings.media.Log;
import com.threerings.media.image.Colorization;
import com.threerings.media.image.ImageUtil;
import com.threerings.media.image.Mirage;
import com.threerings.media.tile.ObjectTileSet;
import com.threerings.media.tile.SimpleCachingImageProvider;
import com.threerings.media.tile.TileSet;
import com.threerings.media.tile.TileSetIDBroker;
import com.threerings.media.tile.TrimmedObjectTileSet;
import com.threerings.media.tile.UniformTileSet;
import com.threerings.media.tile.bundle.BundleUtil;
import com.threerings.media.tile.bundle.TileSetBundle;
import com.threerings.media.tile.tools.xml.TileSetRuleSet;
/**
* The tileset bundler is used to create tileset bundles from a set of XML
* tileset descriptions in a bundle description file. The bundles contain
* a serialized representation of the tileset objects along with the
* actual image files referenced by those tilesets.
*
* <p> The organization of the bundle description file is customizable
* based on the an XML configuration file provided to the tileset bundler
* when constructed. The bundler configuration maps XML paths to tileset
* parsers. An example configuration follows:
*
* <pre>
* <bundler-config>
* <mapping>
* <path>bundle.tilesets.uniform</path>
* <ruleset>
* com.threerings.media.tile.tools.xml.UniformTileSetRuleSet
* </ruleset>
* </mapping>
* <mapping>
* <path>bundle.tilesets.object</path>
* <ruleset>
* com.threerings.media.tile.tools.xml.ObjectTileSetRuleSet
* </ruleset>
* </mapping>
* </bundler-config>
* </pre>
*
* This configuration would be used to parse a bundle description that
* looked something like the following:
*
* <pre>
* <bundle>
* <tilesets>
* <uniform>
* <tileset>
* <!-- ... -->
* </tileset>
* </uniform>
* <object>
* <tileset>
* <!-- ... -->
* </tileset>
* </object>
* </tilesets>
* </pre>
*
* The class specified in the <code>ruleset</code> element must derive
* from {@link TileSetRuleSet}. The images that will be included in the
* bundle must be in the same directory as the bundle description file and
* the tileset descriptions must reference the images without a preceding
* path.
*/
public class TileSetBundler
{
/**
* Constructs a tileset bundler with the specified path to a bundler
* configuration file. The configuration file will be loaded and used
* to configure this tileset bundler.
*/
public TileSetBundler (String configPath)
throws IOException
{
this(new File(configPath));
}
/**
* Constructs a tileset bundler with the specified bundler config
* file.
*/
public TileSetBundler (File configFile)
throws IOException
{
// we parse our configuration with a digester
Digester digester = new Digester();
// push our mappings array onto the stack
ArrayList mappings = new ArrayList();
digester.push(mappings);
// create a mapping object for each mapping entry and append it to
// our mapping list
digester.addObjectCreate("bundler-config/mapping",
Mapping.class.getName());
digester.addSetNext("bundler-config/mapping",
"add", "java.lang.Object");
// configure each mapping object with the path and ruleset
digester.addCallMethod("bundler-config/mapping", "init", 2);
digester.addCallParam("bundler-config/mapping/path", 0);
digester.addCallParam("bundler-config/mapping/ruleset", 1);
// now go like the wind
FileInputStream fin = new FileInputStream(configFile);
try {
digester.parse(fin);
} catch (SAXException saxe) {
String errmsg = "Failure parsing bundler config file " +
"[file=" + configFile.getPath() + "]";
throw new NestableIOException(errmsg, saxe);
}
fin.close();
// create our digester
_digester = new Digester();
// use the mappings we parsed to configure our actual digester
int msize = mappings.size();
for (int i = 0; i < msize; i++) {
Mapping map = (Mapping)mappings.get(i);
try {
TileSetRuleSet ruleset = (TileSetRuleSet)
Class.forName(map.ruleset).newInstance();
// configure the ruleset
ruleset.setPrefix(map.path);
// add it to the digester
_digester.addRuleSet(ruleset);
// and add a rule to stick the parsed tilesets onto the
// end of an array list that we'll put on the stack
_digester.addSetNext(map.path + TileSetRuleSet.TILESET_PATH,
"add", "java.lang.Object");
} catch (Exception e) {
String errmsg = "Unable to create tileset rule set " +
"instance [mapping=" + map + "].";
throw new NestableIOException(errmsg, e);
}
}
}
/**
* Creates a tileset bundle at the location specified by the
* <code>targetPath</code> parameter, based on the description
* provided via the <code>bundleDesc</code> parameter.
*
* @param idBroker the tileset id broker that will be used to map
* tileset names to tileset ids.
* @param bundleDef a file object pointing to the bundle description
* file.
* @param targetPath the path of the tileset bundle file that will be
* created.
*
* @exception IOException thrown if an error occurs reading, writing
* or processing anything.
*/
public void createBundle (
TileSetIDBroker idBroker, File bundleDesc, String targetPath)
throws IOException
{
createBundle(idBroker, bundleDesc, new File(targetPath));
}
/**
* Creates a tileset bundle at the location specified by the
* <code>targetPath</code> parameter, based on the description
* provided via the <code>bundleDesc</code> parameter.
*
* @param idBroker the tileset id broker that will be used to map
* tileset names to tileset ids.
* @param bundleDesc a file object pointing to the bundle description
* file.
* @param target the tileset bundle file that will be created.
*
* @exception IOException thrown if an error occurs reading, writing
* or processing anything.
*/
public void createBundle (
TileSetIDBroker idBroker, final File bundleDesc, File target)
throws IOException
{
// stick an array list on the top of the stack into which we will
// collect parsed tilesets
ArrayList sets = new ArrayList();
_digester.push(sets);
// parse the tilesets
FileInputStream fin = new FileInputStream(bundleDesc);
try {
_digester.parse(fin);
} catch (SAXException saxe) {
String errmsg = "Failure parsing bundle description file " +
"[path=" + bundleDesc.getPath() + "]";
throw new NestableIOException(errmsg, saxe);
} finally {
fin.close();
}
// create a tileset bundle to hold our tilesets
TileSetBundle bundle = new TileSetBundle();
// add all of the parsed tilesets to the tileset bundle
try {
for (int i = 0; i < sets.size(); i++) {
TileSet set = (TileSet)sets.get(i);
String name = set.getName();
// let's be robust
if (name == null) {
Log.warning("Tileset was parsed, but received no name " +
"[set=" + set + "]. Skipping.");
continue;
}
// assign a tilset id to the tileset and bundle it
try {
int tileSetId = idBroker.getTileSetID(name);
bundle.addTileSet(tileSetId, set);
} catch (PersistenceException pe) {
String errmsg = "Failure obtaining a tileset id for " +
"tileset [set=" + set + "].";
throw new NestableIOException(errmsg, pe);
}
}
// clear out our array list in preparation for another go
sets.clear();
} finally {
// before we go, we have to commit our brokered tileset ids
// back to the broker's persistent store
try {
idBroker.commit();
} catch (PersistenceException pe) {
Log.warning("Failure committing brokered tileset ids " +
"back to broker's persistent store " +
"[error=" + pe + "].");
}
}
// now we have to create the actual bundle file
FileOutputStream fout = new FileOutputStream(target);
Manifest manifest = new Manifest();
JarOutputStream jar = new JarOutputStream(fout, manifest);
// create an image provider for loading our tileset images
SimpleCachingImageProvider improv = new SimpleCachingImageProvider() {
protected BufferedImage loadImage (String path)
throws IOException {
return ImageIO.read(new File(bundleDesc.getParent(), path));
}
};
try {
// write all of the image files to the bundle, converting the
// tilesets to trimmed tilesets in the process
Iterator iditer = bundle.enumerateTileSetIds();
while (iditer.hasNext()) {
int tileSetId = ((Integer)iditer.next()).intValue();
TileSet set = bundle.getTileSet(tileSetId);
String imagePath = set.getImagePath();
// sanity checks
if (imagePath == null) {
Log.warning("Tileset contains no image path " +
"[set=" + set + "]. It ain't gonna work.");
continue;
}
// let the jar file know what's coming
jar.putNextEntry(new JarEntry(imagePath));
// if this is an object tileset, we can't trim it!
if (set instanceof ObjectTileSet) {
// set the tileset up with an image provider; we
// need to do this so that we can trim it!
set.setImageProvider(improv);
try {
// create a trimmed object tileset, which will
// write the trimmed tileset image to the jar
// output stream
TrimmedObjectTileSet tset =
TrimmedObjectTileSet.trimObjectTileSet(
(ObjectTileSet)set, jar);
tset.setImagePath(imagePath);
// replace the original set with the trimmed
// tileset in the tileset bundle
bundle.addTileSet(tileSetId, tset);
} catch (Exception e) {
System.err.println("Error adding tileset to bundle " +
"[set=" + set.getName() +
", ipath=" + imagePath + "].");
e.printStackTrace(System.err);
// replace the tileset with an error tileset
UniformTileSet ets = new UniformTileSet();
ets.setName(set.getName());
ets.setWidth(50);
ets.setHeight(50);
ets.setTileCount(1);
ets.setImagePath(imagePath);
bundle.addTileSet(tileSetId, ets);
// and write an error image to the jar file
ImageIO.write(ImageUtil.createErrorImage(50, 50),
"PNG", jar);
}
} else {
// open the image and pipe it into the jar file
File imgfile = new File(
bundleDesc.getParent(), imagePath);
FileInputStream imgin = new FileInputStream(imgfile);
StreamUtils.pipe(imgin, jar);
}
}
// now write a serialized representation of the tileset bundle
// object to the bundle jar file
JarEntry entry = new JarEntry(BundleUtil.METADATA_PATH);
jar.putNextEntry(entry);
ObjectOutputStream oout = new ObjectOutputStream(jar);
oout.writeObject(bundle);
oout.flush();
// finally close up the jar file and call ourself done
jar.close();
} catch (IOException ioe) {
// remove the incomplete jar file and rethrow the exception
fout.close();
target.delete();
throw ioe;
}
}
/** Used to parse our configuration. */
public static class Mapping
{
public String path;
public String ruleset;
public void init (String path, String ruleset)
{
this.path = path;
this.ruleset = ruleset;
}
public String toString ()
{
return "[path=" + path + ", ruleset=" + ruleset + "]";
}
}
/** The digester we use to parse bundle descriptions. */
protected Digester _digester;
}
|
package io.webfolder.cdp.event.network;
import java.util.HashMap;
import java.util.Map;
import io.webfolder.cdp.annotation.Domain;
import io.webfolder.cdp.annotation.EventName;
import io.webfolder.cdp.annotation.Experimental;
import io.webfolder.cdp.type.network.Request;
import io.webfolder.cdp.type.page.ResourceType;
/**
* Details of an intercepted HTTP request, which must be either allowed, blocked, modified or mocked
*/
@Experimental
@Domain("Network")
@EventName("requestIntercepted")
public class RequestIntercepted {
private String interceptionId;
private Request request;
private ResourceType resourceType;
private Map<String, Object> redirectHeaders = new HashMap<>();
private Integer redirectStatusCode;
private String redirectUrl;
/**
* Each request the page makes will have a unique id, however if any redirects are encountered while processing that fetch, they will be reported with the same id as the original fetch.
*/
public String getInterceptionId() {
return interceptionId;
}
/**
* Each request the page makes will have a unique id, however if any redirects are encountered while processing that fetch, they will be reported with the same id as the original fetch.
*/
public void setInterceptionId(String interceptionId) {
this.interceptionId = interceptionId;
}
public Request getRequest() {
return request;
}
public void setRequest(Request request) {
this.request = request;
}
/**
* How the requested resource will be used.
*/
public ResourceType getResourceType() {
return resourceType;
}
/**
* How the requested resource will be used.
*/
public void setResourceType(ResourceType resourceType) {
this.resourceType = resourceType;
}
/**
* HTTP response headers, only sent if a redirect was intercepted.
*/
public Map<String, Object> getRedirectHeaders() {
return redirectHeaders;
}
/**
* HTTP response headers, only sent if a redirect was intercepted.
*/
public void setRedirectHeaders(Map<String, Object> redirectHeaders) {
this.redirectHeaders = redirectHeaders;
}
/**
* HTTP response code, only sent if a redirect was intercepted.
*/
public Integer getRedirectStatusCode() {
return redirectStatusCode;
}
/**
* HTTP response code, only sent if a redirect was intercepted.
*/
public void setRedirectStatusCode(Integer redirectStatusCode) {
this.redirectStatusCode = redirectStatusCode;
}
/**
* Redirect location, only sent if a redirect was intercepted.
*/
public String getRedirectUrl() {
return redirectUrl;
}
/**
* Redirect location, only sent if a redirect was intercepted.
*/
public void setRedirectUrl(String redirectUrl) {
this.redirectUrl = redirectUrl;
}
}
|
package rhogenwizard.launcher;
import java.util.concurrent.atomic.AtomicBoolean;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.debug.core.DebugEvent;
import org.eclipse.debug.core.IDebugEventSetListener;
import org.eclipse.debug.core.ILaunch;
import org.eclipse.debug.core.ILaunchConfiguration;
import org.eclipse.debug.core.ILaunchManager;
import org.eclipse.debug.core.model.IProcess;
import org.eclipse.debug.core.model.LaunchConfigurationDelegate;
import org.eclipse.debug.internal.ui.DebugUIPlugin;
import org.eclipse.debug.internal.ui.preferences.IDebugPreferenceConstants;
import org.eclipse.jface.preference.IPreferenceStore;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONTokener;
import rhogenwizard.Activator;
import rhogenwizard.ConsoleHelper;
import rhogenwizard.DialogUtils;
import rhogenwizard.LogFileHelper;
import rhogenwizard.OSHelper;
import rhogenwizard.PlatformType;
import rhogenwizard.ProcessListViewer;
import rhogenwizard.RunExeHelper;
import rhogenwizard.RunType;
import rhogenwizard.ShowPerspectiveJob;
import rhogenwizard.constants.ConfigurationConstants;
import rhogenwizard.constants.DebugConstants;
import rhogenwizard.debugger.model.RhogenDebugTarget;
import rhogenwizard.rhohub.RhoHub;
import rhogenwizard.sdk.task.CleanPlatformTask;
import rhogenwizard.sdk.task.RunTask;
import rhogenwizard.sdk.task.run.RunDebugRhodesAppTask;
import rhogenwizard.sdk.task.run.RunReleaseRhodesAppTask;
public class LaunchDelegateBase extends LaunchConfigurationDelegate implements IDebugEventSetListener
{
private static LogFileHelper rhodesLogHelper = new LogFileHelper();
protected String m_projectName = null;
private String m_runType = null;
private String m_platformType = null;
private boolean m_isClean = false;
private boolean m_isReloadCode = false;
private boolean m_isTrace = false;
private boolean m_isRhohubBuild = false;
private AtomicBoolean m_buildFinished = new AtomicBoolean();
private IProcess m_debugProcess = null;
private final String m_startPathOverride;
private final String[] m_additionalRubyExtensions;
public LaunchDelegateBase(String startPathOverride, String[] additionalRubyExtensions)
{
m_startPathOverride = startPathOverride;
m_additionalRubyExtensions = additionalRubyExtensions;
}
private void setProcessFinished(boolean b)
{
m_buildFinished.set(b);
}
private boolean getProcessFinished()
{
return m_buildFinished.get();
}
private void releaseBuild(IProject project, RunType type) throws Exception
{
ConsoleHelper.Stream stream = ConsoleHelper.getBuildConsole().getStream();
Activator activator = Activator.getDefault();
activator.killProcessesForForRunReleaseRhodesAppTask();
ProcessListViewer rhosims = new ProcessListViewer("/RhoSimulator/rhosimulator.exe -approot=\'");
if (!runSelectedBuildConfiguration(project, type))
{
stream.println("Error in build application");
setProcessFinished(true);
return;
}
activator.storeProcessesForForRunReleaseRhodesAppTask(rhosims.getNewProcesses());
}
private IProcess debugBuild(IProject project, RunType type, ILaunch launch) throws Exception
{
ConsoleHelper.Stream stream = ConsoleHelper.getBuildConsole().getStream();
m_debugProcess = debugSelectedBuildConfiguration(project, type, launch);
if (m_debugProcess == null)
{
stream.println("Error in build application");
setProcessFinished(true);
}
return m_debugProcess;
}
public void startBuildThread(final IProject project, final String mode, final ILaunch launch)
{
final RunType type = RunType.fromString(m_runType);
Thread cancelingThread = new Thread(new Runnable()
{
@Override
public void run()
{
try
{
ConsoleHelper.Stream stream = ConsoleHelper.getBuildConsole().getStream();
stream.println("build started");
if (mode.equals(ILaunchManager.DEBUG_MODE))
{
debugBuild(project, type, launch);
}
else
{
releaseBuild(project, type);
}
rhodesLogHelper.startLog(PlatformType.fromString(m_platformType), project, type);
}
catch (Exception e)
{
e.printStackTrace();
}
ConsoleHelper.getAppConsole().show();
setProcessFinished(true);
}
});
cancelingThread.start();
}
private boolean runSelectedBuildConfiguration(IProject currProject, RunType selType) throws Exception
{
RunTask task = new RunReleaseRhodesAppTask(currProject.getLocation().toOSString(),
PlatformType.fromString(m_platformType), selType, m_isReloadCode, m_isTrace, m_startPathOverride,
m_additionalRubyExtensions);
task.run();
return task.isOk();
}
private IProcess debugSelectedBuildConfiguration(IProject currProject, RunType selType, ILaunch launch) throws Exception
{
RunDebugRhodesAppTask task = new RunDebugRhodesAppTask(launch, currProject.getLocation().toOSString(),
currProject.getName(), PlatformType.fromString(m_platformType), m_isReloadCode, m_isTrace,
m_startPathOverride, m_additionalRubyExtensions);
task.run();
return task.getDebugProcess();
}
protected void setupConfigAttributes(ILaunchConfiguration configuration) throws CoreException
{
m_projectName = configuration.getAttribute(ConfigurationConstants.projectNameCfgAttribute, "");
m_platformType = configuration.getAttribute(ConfigurationConstants.platforrmCfgAttribute, "");
m_isClean = configuration.getAttribute(ConfigurationConstants.isCleanAttribute, false);
m_runType = configuration.getAttribute(ConfigurationConstants.simulatorType, "");
m_isReloadCode = configuration.getAttribute(ConfigurationConstants.isReloadCodeAttribute, false);
m_isTrace = configuration.getAttribute(ConfigurationConstants.isTraceAttribute, false);
}
private void cleanSelectedPlatform(IProject project, boolean isClean, IProgressMonitor monitor)
{
if (isClean)
{
RunTask task = new CleanPlatformTask(project.getLocation().toOSString(), PlatformType.fromString(m_platformType));
task.run(monitor);
}
}
@SuppressWarnings("deprecation")
public synchronized void launch(ILaunchConfiguration configuration, String mode, ILaunch launch, final IProgressMonitor monitor) throws CoreException
{
setupConfigAttributes(configuration);
final IProject project = ResourcesPlugin.getWorkspace().getRoot().getProject(m_projectName);
// if (m_isRhohubBuild)
// launchRemoteProject(project, configuration, mode, launch, monitor);
// return; //TODO its temp statement
launchLocalProject(project, configuration, mode, launch, monitor);
}
@SuppressWarnings("deprecation")
public synchronized void launchRemoteProject(IProject project, ILaunchConfiguration configuration, String mode, ILaunch launch, final IProgressMonitor monitor) throws CoreException
{
//RhoHub.getInstance(configuration).findRemoteApp(project);
}
@SuppressWarnings("deprecation")
public synchronized void launchLocalProject(IProject project, ILaunchConfiguration configuration, String mode, ILaunch launch, final IProgressMonitor monitor) throws CoreException
{
try
{
RhogenDebugTarget target = null;
setProcessFinished(false);
rhodesLogHelper.stopLog();
setStandartConsoleOutputIsOff();
ConsoleHelper.getBuildConsole().clear();
ConsoleHelper.getBuildConsole().show();
setupConfigAttributes(configuration);
PlatformType currPlType = PlatformType.fromString(m_platformType);
// stop blackberry simulator
if (OSHelper.isWindows() && currPlType == PlatformType.eBb)
{
RunExeHelper.killBbSimulator();
}
if (m_projectName == null || m_projectName.length() == 0 || m_runType == null || m_runType.length() == 0)
{
throw new IllegalArgumentException("Platform and project name should be assigned");
}
if (!project.isOpen())
{
throw new IllegalArgumentException("Project " + project.getName() + " not found");
}
if (mode.equals(ILaunchManager.DEBUG_MODE))
{
ShowPerspectiveJob job = new ShowPerspectiveJob("show debug perspective", DebugConstants.debugPerspectiveId);
job.schedule();
try
{
OSHelper.killProcess("rhosimulator");
}
catch (Exception e)
{
e.printStackTrace();
}
target = new RhogenDebugTarget(launch, null, project);
}
try
{
cleanSelectedPlatform(project, m_isClean, monitor);
startBuildThread(project, mode, launch);
while(true)
{
try
{
if (monitor.isCanceled())
{
OSHelper.killProcess("ruby");
return;
}
if (getProcessFinished())
{
break;
}
Thread.sleep(100);
}
catch (InterruptedException e)
{
e.printStackTrace();
}
}
}
catch(IllegalArgumentException e)
{
Activator.logError(e);
}
catch (Exception e)
{
e.printStackTrace();
}
monitor.done();
if (mode.equals(ILaunchManager.DEBUG_MODE))
{
target.setProcess(m_debugProcess);
launch.addDebugTarget(target);
}
}
catch (IllegalArgumentException e)
{
DialogUtils.error("Error", e.getMessage());
}
}
void setStandartConsoleOutputIsOff()
{
IPreferenceStore prefs = DebugUIPlugin.getDefault().getPreferenceStore();
prefs.setDefault(IDebugPreferenceConstants.CONSOLE_OPEN_ON_OUT, false);
prefs.setDefault(IDebugPreferenceConstants.CONSOLE_OPEN_ON_ERR, false);
prefs.setValue(IDebugPreferenceConstants.CONSOLE_OPEN_ON_OUT, false);
prefs.setValue(IDebugPreferenceConstants.CONSOLE_OPEN_ON_ERR, false);
}
@Override
public void handleDebugEvents(DebugEvent[] events)
{
}
}
|
package com.couchbase.cblite;
import com.couchbase.cblite.internal.CBLBody;
import java.util.HashMap;
import java.util.Map;
public class CBLNewRevision extends CBLRevisionBase {
// private CBLBody body;
private String parentRevID;
private Map<String, Object> properties;
protected CBLNewRevision(CBLDocument document, CBLRevision parentRevision) {
super(document);
parentRevID = parentRevision.getId();
Map<String, Object> parentRevisionProperties = parentRevision.getProperties();
if (parentRevisionProperties == null) {
properties = new HashMap<String, Object>();
properties.put("_id", document.getId());
properties.put("_rev", parentRevID);
}
else {
properties = new HashMap<String, Object>(parentRevisionProperties);
}
}
public void setProperties(Map<String,Object> properties) {
this.properties = properties;
}
public Map<String, Object> getProperties() {
return properties;
}
public void setDeleted(boolean deleted) {
if (deleted == true) {
properties.put("_deleted", true);
}
else {
properties.remove("_deleted");
}
}
public CBLRevision getParentRevision() {
if (parentRevID == null || parentRevID.length() == 0) {
return null;
}
return document.getRevision(parentRevID);
}
public String getParentRevisionId() {
return parentRevID;
}
public CBLRevision save() throws CBLiteException {
return document.putProperties(properties, parentRevID);
}
public void addAttachment(CBLAttachment attachment, String name) {
Map<String, Object> attachments = (Map<String, Object>) properties.get("_attachments");
attachments.put(name, attachment);
properties.put("_attachments", attachments);
attachment.setName(name);
attachment.setRevision(this);
}
public void removeAttachmentNamed(String name) {
addAttachment(null, name);
}
}
|
package roart.util;
import java.io.BufferedWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.ParseException;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Queue;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import roart.iclij.config.IclijConfig;
import roart.iclij.config.IclijXMLConfig;
import roart.iclij.config.Market;
import roart.iclij.config.MarketFilter;
import roart.iclij.factory.actioncomponentconfig.ActionComponentConfigFactory;
import roart.iclij.factory.actioncomponentconfig.ComponentMap;
import roart.iclij.util.TrendUtil;
import roart.iclij.util.VerifyProfitUtil;
import roart.common.config.ConfigConstants;
import roart.common.config.MyConfig;
import roart.common.constants.Constants;
import roart.common.model.MetaItem;
import roart.common.pipeline.PipelineConstants;
import roart.common.util.JsonUtil;
import roart.common.util.MathUtil;
import roart.common.util.MetaUtil;
import roart.common.util.TimeUtil;
import roart.constants.IclijConstants;
import roart.constants.IclijPipelineConstants;
import roart.db.IclijDbDao;
import roart.iclij.model.ConfigItem;
import roart.iclij.model.IncDecItem;
import roart.iclij.model.MLMetricsItem;
import roart.iclij.model.MapList;
import roart.iclij.model.MemoryItem;
import roart.iclij.model.Parameters;
import roart.iclij.model.TimingItem;
import roart.iclij.model.Trend;
import roart.iclij.model.WebData;
import roart.iclij.model.action.DatasetActionData;
import roart.iclij.model.action.EvolveActionData;
import roart.iclij.model.action.ImproveFilterActionData;
import roart.iclij.model.action.ImproveProfitActionData;
import roart.iclij.model.action.MachineLearningActionData;
import roart.iclij.model.action.FindProfitActionData;
import roart.iclij.model.action.CrossTestActionData;
import roart.iclij.model.action.MarketActionData;
import roart.iclij.model.component.ComponentInput;
import roart.iclij.model.config.ActionComponentConfig;
import roart.iclij.util.MLUtil;
import roart.iclij.util.MarketUtil;
import roart.iclij.util.MiscUtil;
import roart.iclij.util.RelationUtil;
import roart.iclij.service.ControlService;
import roart.iclij.service.IclijServiceList;
import roart.iclij.service.IclijServiceResult;
public class ServiceUtil {
private static Logger log = LoggerFactory.getLogger(ServiceUtil.class);
public static double median(Set<Double> set) {
Double[] scores = set.toArray(new Double[set.size()]);
Arrays.sort(scores);
//System.out.print("Sorted Scores: ");
for (double x : scores) {
//System.out.print(x + " ");
}
//System.out.println("");
// Calculate median (middle number)
double median = 0;
double pos1 = Math.floor((scores.length - 1.0) / 2.0);
double pos2 = Math.ceil((scores.length - 1.0) / 2.0);
if (pos1 == pos2 ) {
median = scores[(int)pos1];
} else {
median = (scores[(int)pos1] + scores[(int)pos2]) / 2.0 ;
}
return median;
}
private static double median2(Set<Double> set) {
Double[] numArray = set.toArray(new Double[set.size()]);
Arrays.sort(numArray);
Arrays.sort(numArray);
int middle = numArray.length/2;
double medianValue = 0; //declare variable
if (numArray.length%2 == 1) {
medianValue = numArray[middle];
} else {
medianValue = (numArray[middle-1] + numArray[middle]) / 2;
}
return medianValue;
}
@Deprecated
public static String getWantedCategory(Map<String, Map<String, Object>> maps, String type) throws Exception {
List<String> wantedList = new ArrayList<>();
wantedList.add(Constants.PRICE);
wantedList.add(Constants.INDEX);
//wantedList.add("cy");
String cat = null;
for (String wanted : wantedList) {
Map<String, Object> map = maps.get(wanted);
if (map != null) {
if (map.containsKey(type)) {
LinkedHashMap<String, Object> tmpMap = (LinkedHashMap<String, Object>) map.get(type);
if (tmpMap.get(PipelineConstants.RESULT) != null) {
return wanted;
}
}
}
}
return cat;
}
public static String getWantedCategory2(Map<String, Map<String, Object>> maps, String type) throws Exception {
System.out.println(maps.keySet());
List<String> wantedList = new ArrayList<>();
wantedList.add(Constants.PRICE);
wantedList.add(Constants.INDEX);
//wantedList.add("cy");
String cat = null;
for (String wanted : wantedList) {
Map<String, Object> map = maps.get(wanted + " " + type.toUpperCase());
if (map != null) {
return wanted;
}
}
for (String key : maps.keySet()) {
if (key.endsWith(" " + type.toUpperCase())) {
return key.substring(0, key.length() - 1 - type.length());
}
}
return cat;
}
public static IclijServiceResult getConfig() throws Exception {
IclijXMLConfig conf = IclijXMLConfig.instance();
IclijConfig instance = IclijXMLConfig.getConfigInstance();
IclijServiceResult result = new IclijServiceResult();
result.setIclijConfig(instance);
return result;
}
public static IclijServiceResult getContent(ComponentInput componentInput) throws Exception {
IclijServiceResult result = new IclijServiceResult();
FindProfitActionData findProfitActionData = new FindProfitActionData();
LocalDate date = componentInput.getEnddate();
IclijXMLConfig i = new IclijXMLConfig();
IclijXMLConfig conf = IclijXMLConfig.instance();
IclijConfig instance = IclijXMLConfig.getConfigInstance();
List<IncDecItem> listAll = IclijDbDao.getAllIncDecs();
List<IncDecItem> listRel = new ArrayList<>();
List<IclijServiceList> lists = new ArrayList<>();
lists.add(getHeader("Content"));
Map<String, Object> trendMap = new HashMap<>();
List<Market> markets = conf.getMarkets(instance);
markets = new MarketUtil().filterMarkets(markets, findProfitActionData.isDataset());
for (Market market : markets) {
ControlService srv = new ControlService();
srv.getConfig();
srv.conf.setMarket(market.getConfig().getMarket());
// the market may be incomplete, the exception and skip
try {
Short mystartoffset = market.getConfig().getStartoffset();
short startoffset = mystartoffset != null ? mystartoffset : 0;
Trend trend = new TrendUtil().getTrend(instance.verificationDays(), date, srv, startoffset);
trendMap.put(market.getConfig().getMarket(), trend);
} catch (Exception e) {
log.error(Constants.EXCEPTION, e);
}
List<IncDecItem> allCurrentIncDecs = new MiscUtil().getCurrentIncDecs(date, listAll, market, market.getConfig().getFindtime());
listRel.addAll(allCurrentIncDecs);
Map<String, List<IncDecItem>> currentIncDecMap = splitParam(allCurrentIncDecs);
for (Entry<String, List<IncDecItem>> entry : currentIncDecMap.entrySet()) {
String key = entry.getKey();
List<IncDecItem> currentIncDecs = entry.getValue();
List<IncDecItem> listInc = currentIncDecs.stream().filter(m -> m.isIncrease()).collect(Collectors.toList());
List<IncDecItem> listDec = currentIncDecs.stream().filter(m -> !m.isIncrease()).collect(Collectors.toList());
listInc = mergeList(listInc, false);
listDec = mergeList(listDec, false);
List<IncDecItem> listIncDec = new MiscUtil().moveAndGetCommon(listInc, listDec, true);
List<IclijServiceList> subLists = getServiceList(market.getConfig().getMarket(), key, listInc, listDec, listIncDec);
lists.addAll(subLists);
}
}
result.setLists(lists);
getContentTimings(date, lists, markets, findProfitActionData);
Map<String, Map<String, Object>> updateMarketMap = new HashMap<>();
Map<String, Object> updateMap = new HashMap<>();
//getUpdateMarkets(componentInput, param, updateMarketMap, updateMap, findProfitActionData);
Map<String, Map<String, Object>> mapmaps = new HashMap<>();
mapmaps.put("ml", updateMap);
result.setMaps(mapmaps);
getContentMemoriesUpdates(componentInput, lists, updateMarketMap, findProfitActionData, true);
IclijServiceList trends = convert(trendMap);
lists.add(trends);
listRel = mergeList(listRel, false);
roundList(listRel);
addRelations(componentInput, lists, listRel);
new MiscUtil().print(result);
return result;
}
private static void addRelations(ComponentInput componentInput, List<IclijServiceList> lists, List<IncDecItem> listIncDecs) throws Exception {
List[] objects = new RelationUtil().method(componentInput, listIncDecs);
IclijServiceList incdecs = new IclijServiceList();
incdecs.setTitle("Incdecs with relations");
incdecs.setList(objects[0]);
IclijServiceList relations = new IclijServiceList();
relations.setTitle("Relations");
relations.setList(objects[1]);
lists.add(incdecs);
lists.add(relations);
}
public static IclijServiceResult getContentImprove(ComponentInput componentInput) throws Exception {
ImproveProfitActionData improveProfitActionData = new ImproveProfitActionData();
LocalDate date = componentInput.getEnddate();
IclijXMLConfig i = new IclijXMLConfig();
IclijXMLConfig conf = IclijXMLConfig.instance();
IclijConfig instance = IclijXMLConfig.getConfigInstance();
List<IncDecItem> listAll = IclijDbDao.getAllIncDecs();
List<IclijServiceList> lists = new ArrayList<>();
lists.add(getHeader("Content"));
List<Market> markets = conf.getMarkets(instance);
markets = new MarketUtil().filterMarkets(markets, improveProfitActionData.isDataset());
IclijServiceResult result = new IclijServiceResult();
result.setLists(lists);
ControlService srv = null;
try {
srv = getService(componentInput, 0);
} catch (Exception e) {
log.error(Constants.EXCEPTION, e);
return result;
}
getContentTimings(date, lists, markets, improveProfitActionData);
Map<String, Map<String, Object>> updateMarketMap = new HashMap<>();
Map<String, Object> updateMap = new HashMap<>();
getUpdateMarkets(componentInput, srv, updateMarketMap, updateMap, improveProfitActionData);
Map<String, Map<String, Object>> mapmaps = new HashMap<>();
mapmaps.put("ml", updateMap);
result.setMaps(mapmaps);
getContentMemoriesUpdates(componentInput, lists, updateMarketMap, improveProfitActionData, false);
new MiscUtil().print(result);
return result;
}
public static IclijServiceResult getContentFilter(ComponentInput componentInput) throws Exception {
ImproveFilterActionData improveFilterActionData = new ImproveFilterActionData();
LocalDate date = componentInput.getEnddate();
IclijXMLConfig i = new IclijXMLConfig();
IclijXMLConfig conf = IclijXMLConfig.instance();
IclijConfig instance = IclijXMLConfig.getConfigInstance();
List<IncDecItem> listAll = IclijDbDao.getAllIncDecs();
List<IclijServiceList> lists = new ArrayList<>();
lists.add(getHeader("Content"));
List<Market> markets = conf.getMarkets(instance);
markets = new MarketUtil().filterMarkets(markets, improveFilterActionData.isDataset());
IclijServiceResult result = new IclijServiceResult();
result.setLists(lists);
ControlService srv = null;
try {
srv = getService(componentInput, 0);
} catch (Exception e) {
log.error(Constants.EXCEPTION, e);
return result;
}
getContentTimings(date, lists, markets, improveFilterActionData);
Map<String, Map<String, Object>> updateMarketMap = new HashMap<>();
Map<String, Object> updateMap = new HashMap<>();
getUpdateMarkets(componentInput, srv, updateMarketMap, updateMap, improveFilterActionData);
Map<String, Map<String, Object>> mapmaps = new HashMap<>();
mapmaps.put("ml", updateMap);
result.setMaps(mapmaps);
getContentMemoriesUpdates(componentInput, lists, updateMarketMap, improveFilterActionData, false);
new MiscUtil().print(result);
return result;
}
private static void getContentTimings(LocalDate date, List<IclijServiceList> lists, List<Market> markets, MarketActionData action)
throws Exception {
List<TimingItem> listAllTimings = IclijDbDao.getAllTiming();
for (Market market : markets) {
List<TimingItem> currentTimings = new MiscUtil().getCurrentTimings(date, listAllTimings, market, action.getName(), action.getTime(market));
List<IclijServiceList> subLists = getServiceList(market.getConfig().getMarket(), currentTimings);
lists.addAll(subLists);
}
}
private static void getContentMLTest(LocalDate date, List<IclijServiceList> lists, List<Market> markets, MarketActionData action)
throws Exception {
List<MLMetricsItem> listAllTimings = IclijDbDao.getAllMLMetrics();
for (Market market : markets) {
List<MLMetricsItem> currentTimings = new MiscUtil().getCurrentMLMetrics(date, listAllTimings, market, action.getTime(market));
List<IclijServiceList> subLists = getServiceList2(market.getConfig().getMarket(), currentTimings);
lists.addAll(subLists);
}
}
private static void getUpdateMarkets(ComponentInput componentInput, ControlService srv,
Map<String, Map<String, Object>> updateMarketMap, Map<String, Object> updateMap, MarketActionData actionData)
throws Exception {
//Market market = findProfitActionData.findMarket(param);
//String marketName = market.getConfig().getMarket();
long time0 = System.currentTimeMillis();
List<MetaItem> metas = srv.getMetas();
for (Market market : new MarketUtil().getMarkets(actionData.isDataset())) {
String marketName = market.getConfig().getMarket();
MetaItem meta = new MetaUtil().findMeta(metas, marketName);
boolean wantThree = meta != null && Boolean.TRUE.equals(meta.isLhc());
List<String> componentList = actionData.getComponents(componentInput.getConfig(), wantThree);
Map<Boolean, String> booleanTexts = actionData.getBooleanTexts();
Boolean[] booleans = actionData.getBooleans();
for (Boolean bool : booleans) {
updateMarketMap.put(market.getConfig().getMarket() + " " + booleanTexts.get(bool), new HashMap<>());
Map<String, ActionComponentConfig> componentMap = new ComponentMap().getComponentMap(componentList, actionData.getName());
for (ActionComponentConfig component : componentMap.values()) {
List<String> subcomponents = component.getSubComponents(market, componentInput.getConfig(), null);
for (String subcomponent : subcomponents) {
Map<String, Object> anUpdateMap = new MiscUtil().loadConfig(srv, componentInput, market, market.getConfig().getMarket(), actionData.getName(), actionData.getName(), false, bool, subcomponent, actionData, null);
updateMarketMap.get(market.getConfig().getMarket() + " " + booleanTexts.get(bool)).putAll(anUpdateMap);
updateMap.putAll(anUpdateMap);
}
}
}
}
log.info("Gettings {}", (System.currentTimeMillis() - time0) / 1000);
}
public static IclijServiceResult getContentEvolve(ComponentInput componentInput) throws Exception {
EvolveActionData evolveActionData = new EvolveActionData();
LocalDate date = componentInput.getEnddate();
IclijXMLConfig conf = IclijXMLConfig.instance();
IclijConfig instance = IclijXMLConfig.getConfigInstance();
List<IclijServiceList> lists = new ArrayList<>();
lists.add(getHeader("Content"));
List<Market> markets = conf.getMarkets(instance);
markets = new MarketUtil().filterMarkets(markets, evolveActionData.isDataset());
IclijServiceResult result = new IclijServiceResult();
result.setLists(lists);
ControlService srv = null;
try {
srv = getService(componentInput, 0);
} catch (Exception e) {
log.error(Constants.EXCEPTION, e);
return result;
}
getContentTimings(date, lists, markets, evolveActionData);
Map<String, Map<String, Object>> updateMarketMap = new HashMap<>();
Map<String, Object> updateMap = new HashMap<>();
getUpdateMarkets(componentInput, srv, updateMarketMap, updateMap, evolveActionData);
Map<String, Map<String, Object>> mapmaps = new HashMap<>();
mapmaps.put("ml", updateMap);
result.setMaps(mapmaps);
getContentMemoriesUpdates(componentInput, lists, updateMarketMap, evolveActionData, false);
new MiscUtil().print(result);
return result;
}
public static IclijServiceResult getContentDataset(ComponentInput componentInput) throws Exception {
DatasetActionData datasetActionData = new DatasetActionData();
LocalDate date = componentInput.getEnddate();
IclijXMLConfig conf = IclijXMLConfig.instance();
IclijConfig instance = IclijXMLConfig.getConfigInstance();
List<IclijServiceList> lists = new ArrayList<>();
lists.add(getHeader("Content"));
List<Market> markets = conf.getMarkets(instance);
markets = new MarketUtil().filterMarkets(markets, datasetActionData.isDataset());
IclijServiceResult result = new IclijServiceResult();
result.setLists(lists);
ControlService srv = null;
try {
srv = getService(componentInput, 0);
} catch (Exception e) {
log.error(Constants.EXCEPTION, e);
return result;
}
getContentTimings(date, lists, markets, datasetActionData);
Map<String, Map<String, Object>> updateMarketMap = new HashMap<>();
Map<String, Object> updateMap = new HashMap<>();
getUpdateMarkets(componentInput, srv, updateMarketMap, updateMap, datasetActionData);
Map<String, Map<String, Object>> mapmaps = new HashMap<>();
mapmaps.put("ml", updateMap);
result.setMaps(mapmaps);
getContentMemoriesUpdates(componentInput, lists, updateMarketMap, datasetActionData, false);
new MiscUtil().print(result);
return result;
}
public static IclijServiceResult getContentCrosstest(ComponentInput componentInput) throws Exception {
CrossTestActionData crossTestActionData = new CrossTestActionData();
LocalDate date = componentInput.getEnddate();
IclijXMLConfig conf = IclijXMLConfig.instance();
IclijConfig instance = IclijXMLConfig.getConfigInstance();
List<IclijServiceList> lists = new ArrayList<>();
lists.add(getHeader("Content"));
List<Market> markets = conf.getMarkets(instance);
markets = new MarketUtil().filterMarkets(markets, crossTestActionData.isDataset());
IclijServiceResult result = new IclijServiceResult();
result.setLists(lists);
ControlService srv = null;
try {
srv = getService(componentInput, 0);
} catch (Exception e) {
log.error(Constants.EXCEPTION, e);
return result;
}
getContentTimings(date, lists, markets, crossTestActionData);
Map<String, Map<String, Object>> updateMarketMap = new HashMap<>();
Map<String, Object> updateMap = new HashMap<>();
getUpdateMarkets(componentInput, srv, updateMarketMap, updateMap, crossTestActionData);
Map<String, Map<String, Object>> mapmaps = new HashMap<>();
mapmaps.put("ml", updateMap);
result.setMaps(mapmaps);
getContentMemoriesUpdates(componentInput, lists, updateMarketMap, crossTestActionData, false);
new MiscUtil().print(result);
return result;
}
private static void getContentMemoriesUpdates(ComponentInput componentInput, List<IclijServiceList> lists,
Map<String, Map<String, Object>> updateMarketMap, MarketActionData actionData, boolean useMemory) {
for (Market market : new MarketUtil().getMarkets(actionData.isDataset())) {
//for (Entry<String, Map<String, Object>> entry : updateMarketMap.entrySet()) {
String marketName = market.getConfig().getMarket();
Map<Boolean, String> booleanTexts = actionData.getBooleanTexts();
Boolean[] booleans = actionData.getBooleans();
for (Boolean bool : booleans) {
Map<String, Object> anUpdateMap = updateMarketMap.get(marketName + " " + booleanTexts.get(bool));
if (anUpdateMap != null) {
IclijServiceList updates = convert(marketName + " " + booleanTexts.get(bool), anUpdateMap);
lists.add(updates);
}
}
if (!useMemory) {
continue;
}
List<MemoryItem> marketMemory = new MarketUtil().getMarketMemory(market);
List<MemoryItem> currentList = new MiscUtil().filterKeepRecent(marketMemory, componentInput.getEnddate(), actionData.getTime(market));
IclijServiceList memories = new IclijServiceList();
memories.setTitle("Memories " + marketName);
roundList3(currentList);
memories.setList(currentList);
lists.add(memories);
}
}
public static IclijServiceResult getContentMachineLearning(ComponentInput componentInput) throws Exception {
MachineLearningActionData mlActionData = new MachineLearningActionData();
LocalDate date = componentInput.getEnddate();
IclijXMLConfig conf = IclijXMLConfig.instance();
IclijConfig instance = IclijXMLConfig.getConfigInstance();
List<IclijServiceList> lists = new ArrayList<>();
lists.add(getHeader("Content"));
List<Market> markets = conf.getMarkets(instance);
markets = new MarketUtil().filterMarkets(markets, mlActionData.isDataset());
IclijServiceResult result = new IclijServiceResult();
result.setLists(lists);
getContentTimings(date, lists, markets, mlActionData);
getContentMLTest(date, lists, markets, mlActionData);
Map<String, Map<String, Object>> updateMarketMap = new HashMap<>();
Map<String, Object> updateMap = new HashMap<>();
//getUpdateMarkets(componentInput, param, updateMarketMap, updateMap, mlActionData);
Map<String, Map<String, Object>> mapmaps = new HashMap<>();
mapmaps.put("ml", updateMap);
result.setMaps(mapmaps);
getContentMemoriesUpdates(componentInput, lists, updateMarketMap, mlActionData, false);
new MiscUtil().print(result);
return result;
}
static IclijServiceList getHeader(String title) {
IclijServiceList header = new IclijServiceList();
header.setTitle(title);
return header;
}
private static List<IclijServiceList> getServiceList(String market, List<TimingItem> listIncDec) {
List<IclijServiceList> subLists = new ArrayList<>();
roundList2(listIncDec);
if (!listIncDec.isEmpty()) {
IclijServiceList incDec = new IclijServiceList();
incDec.setTitle(market + " " + "timing" + " " + listIncDec.stream().mapToDouble(TimingItem::getMytime).summaryStatistics());
incDec.setList(listIncDec);
subLists.add(incDec);
}
return subLists;
}
private static List<IclijServiceList> getServiceList2(String market, List<MLMetricsItem> listTest) {
List<IclijServiceList> subLists = new ArrayList<>();
roundList4(listTest);
if (!listTest.isEmpty()) {
IclijServiceList incDec = new IclijServiceList();
incDec.setTitle(market + " mlstats");
incDec.setList(listTest);
subLists.add(incDec);
}
return subLists;
}
static List<IclijServiceList> getServiceList(String market, String text, List<IncDecItem> listInc, List<IncDecItem> listDec,
List<IncDecItem> listIncDec) {
List<IclijServiceList> subLists = new ArrayList<>();
roundList(listInc);
roundList(listDec);
roundList(listIncDec);
if (!listInc.isEmpty()) {
List<Boolean> listIncBoolean = listInc.stream().map(IncDecItem::getVerified).filter(Objects::nonNull).collect(Collectors.toList());
long count = listIncBoolean.stream().filter(i -> i).count();
IclijServiceList inc = new IclijServiceList();
String trendStr = "";
inc.setTitle(market + " " + "Increase " + text + " ( verified " + count + " / " + listIncBoolean.size() + " )" + trendStr);
inc.setList(listInc);
subLists.add(inc);
}
if (!listDec.isEmpty()) {
List<Boolean> listDecBoolean = listDec.stream().map(IncDecItem::getVerified).filter(Objects::nonNull).collect(Collectors.toList());
long count = listDecBoolean.stream().filter(i -> i).count();
IclijServiceList dec = new IclijServiceList();
dec.setTitle(market + " " + "Decrease " + text + " ( verified " + count + " / " + listDecBoolean.size() + " )");
dec.setList(listDec);
subLists.add(dec);
}
if (!listIncDec.isEmpty()) {
List<Boolean> listIncDecBoolean = listIncDec.stream().map(IncDecItem::getVerified).filter(Objects::nonNull).collect(Collectors.toList());
long count = listIncDecBoolean.stream().filter(i -> i).count();
IclijServiceList incDec = new IclijServiceList();
incDec.setTitle(market + " " + "Increase and decrease " + text + "( verified " + count + " / " + listIncDecBoolean.size() + " )" );
incDec.setList(listIncDec);
subLists.add(incDec);
}
return subLists;
}
private static List<IncDecItem> mergeList(List<IncDecItem> itemList, boolean splitid) {
Map<String, IncDecItem> map = new HashMap<>();
for (IncDecItem item : itemList) {
String id;
if (!splitid) {
id = item.getId();
} else {
id = item.getId() + item.getDate().toString();
}
IncDecItem getItem = map.get(id);
if (getItem == null) {
map.put(id, item);
} else {
getItem.setScore(getItem.getScore() + item.getScore());
getItem.setDescription(getItem.getDescription() + ", " + item.getDescription());
}
}
return new ArrayList<>(map.values());
}
public static IclijServiceResult getVerify(ComponentInput componentInput) throws Exception {
String type = "Verify";
componentInput.setDoSave(componentInput.getConfig().wantVerificationSave());
componentInput.setDoSave(false);
int verificationdays = componentInput.getConfig().verificationDays();
boolean rerun = componentInput.getConfig().verificationRerun();
IclijServiceResult result = getFindProfitVerify(componentInput, type, verificationdays, rerun);
new MiscUtil().print(result);
return result;
}
private static IclijServiceResult getFindProfitVerify(ComponentInput componentInput, String type, int verificationdays, boolean rerun) throws Exception {
IclijServiceResult result = new IclijServiceResult();
result.setLists(new ArrayList<>());
List<IclijServiceList> retLists = result.getLists();
ControlService srv = null;
try {
srv = getService(componentInput, verificationdays);
} catch (Exception e) {
log.error(Constants.EXCEPTION, e);
return result;
}
//FindProfitActionData findProfitActionData = new FindProfitActionData();
// this calculates, does not read from db
List<MemoryItem> allMemoryItems = new ArrayList<>();
//ProfitData picks = findProfitActionData.getPicks(param, allMemoryItems);
//getMemoryItems(componentInput.getConfig(), param, verificationdays, getFindProfitComponents(componentInput.getConfig()));
IclijServiceList memories = new IclijServiceList();
memories.setTitle("Memories");
roundList3(allMemoryItems);
memories.setList(allMemoryItems);
Map<String, Object> updateMap = new HashMap<>();
//param.setUpdateMap(updateMap);
Market market = new MarketUtil().findMarket(srv.conf.getMarket());
boolean evolve = new MiscUtil().getEvolve(verificationdays, componentInput);
WebData myData;
if (rerun) {
myData = srv.getRun(IclijConstants.FINDPROFIT, componentInput);
//ProfitData buysells = myData.profitData; // findProfitActionData.getPicks(param, allMemoryItems);
} else {
myData = srv.getVerify(IclijConstants.FINDPROFIT, componentInput);
}
updateMap = myData.getUpdateMap();
allMemoryItems.addAll(myData.getMemoryItems());
List<IncDecItem> allListInc = new ArrayList<>(myData.getIncs());
List<IncDecItem> allListDec = new ArrayList<>(myData.getDecs());
allListInc = mergeList(allListInc, !rerun);
allListDec = mergeList(allListDec, !rerun);
List<IncDecItem> allListIncDec;
if (rerun) {
allListIncDec = new MiscUtil().moveAndGetCommon(allListInc, allListDec, verificationdays > 0);
} else {
allListIncDec = new MiscUtil().moveAndGetCommon2(allListInc, allListDec, verificationdays > 0);
}
Map<String, Object> trendMap = new HashMap<>();
Short mystartoffset = market.getConfig().getStartoffset();
short startoffset = mystartoffset != null ? mystartoffset : 0;
int loopoffset = componentInput.getLoopoffset() != null ? componentInput.getLoopoffset() : 0;
String dateString = TimeUtil.convertDate2(componentInput.getEnddate());
List<String> stockDates = srv.getDates(market.getConfig().getMarket());
int dateIndex = TimeUtil.getIndexEqualBefore(stockDates, dateString);
String aDate = stockDates.get(dateIndex - loopoffset);
LocalDate endDate = TimeUtil.convertDate(aDate);
Trend trend;
Trend trend2;
if (rerun) {
String endDateString2 = stockDates.get(dateIndex - market.getConfig().getFindtime());
LocalDate endDate2 = TimeUtil.convertDate(endDateString2);
trend = new TrendUtil().getTrend(componentInput.getConfig().verificationDays(), endDate2, srv, startoffset);
trend2 = new TrendUtil().getTrend(componentInput.getConfig().verificationDays(), endDate, srv, startoffset);
} else {
LocalDate prevdate = componentInput.getEnddate();
String prevdateString = TimeUtil.convertDate2(prevdate);
int prevdateIndex = TimeUtil.getIndexEqualBefore(stockDates, prevdateString);
prevdateIndex = prevdateIndex - componentInput.getLoopoffset();
Short startoffset2 = market.getConfig().getStartoffset();
startoffset2 = startoffset2 != null ? startoffset2 : 0;
prevdateIndex = prevdateIndex - verificationdays - startoffset2;
prevdateString = stockDates.get(prevdateIndex);
String olddateString = stockDates.get(prevdateIndex - market.getConfig().getFindtime());
LocalDate olddate = null;
try {
prevdate = TimeUtil.convertDate(prevdateString);
olddate = TimeUtil.convertDate(olddateString);
} catch (ParseException e) {
log.error(Constants.EXCEPTION, e);
}
trend = new TrendUtil().getTrend(componentInput.getConfig().verificationDays(), olddate, srv, startoffset2, stockDates, loopoffset);
trend2 = new TrendUtil().getTrend(componentInput.getConfig().verificationDays(), prevdate, srv, startoffset2, stockDates, loopoffset);
}
trendMap.put(market.getConfig().getMarket(), trend);
trendMap.put(market.getConfig().getMarket() + "end", trend2);
int offset = 0;
int futuredays = 0;
List<String> stockdates = srv.getDates(market.getConfig().getMarket());
List<String> list = new TimeUtil().setDates(TimeUtil.convertDate2(componentInput.getEnddate()), stockdates, offset, componentInput.getLoopoffset(), futuredays);
String baseDateStr = list.get(0);
String futureDateStr = list.get(1);
log.info("Base future date {} {}", baseDateStr, futureDateStr);
LocalDate baseDate = TimeUtil.convertDate(baseDateStr);
LocalDate futureDate = TimeUtil.convertDate(futureDateStr);
Map<String, List<IncDecItem>> allListIncDecMap = splitParam(allListIncDec);
Map<String, List<IncDecItem>> allListIncMap = splitParam(allListInc);
Map<String, List<IncDecItem>> allListDecMap = splitParam(allListDec);
for (Entry<String, List<IncDecItem>> entry : allListIncDecMap.entrySet()) {
String key = entry.getKey();
List<IncDecItem> listIncDec = entry.getValue();
List<IncDecItem> listInc = allListIncMap.get(key);
List<IncDecItem> listDec = allListDecMap.get(key);
if (listIncDec == null) {
listIncDec = new ArrayList<>();
}
if (listDec == null) {
listDec = new ArrayList<>();
}
if (listInc == null) {
listInc = new ArrayList<>();
}
if (verificationdays > 0) {
try {
//srv.setFuturedays(0);
//srv.setOffset(0);
//srv.setDates(0, 0, TimeUtil.convertDate2(componentInput.getEnddate()));
} catch (Exception e) {
log.error(Constants.EXCEPTION, e);
}
if (rerun) {
new VerifyProfitUtil().getVerifyProfit(verificationdays, futureDate, srv, baseDate, listInc, listDec, listIncDec, startoffset, componentInput.getConfig().getFindProfitManualThreshold());
} else {
new VerifyProfitUtil().getVerifyProfit(verificationdays, futureDate, srv, baseDate, listInc, listDec, listIncDec, startoffset, componentInput.getConfig().getFindProfitManualThreshold(), stockDates, loopoffset);
}
/*
List<MapList> inc = new ArrayList<>();
List<MapList> dec = new ArrayList<>();
IclijServiceList incMap = new IclijServiceList();
incMap.setTitle("Increase verify");
incMap.setList(inc);
IclijServiceList decMap = new IclijServiceList();
decMap.setTitle("Decrease verify");
decMap.setList(dec);
retLists.add(incMap);
retLists.add(decMap);
*/
}
addHeader(componentInput, type, result, baseDateStr, futureDateStr);
List<IclijServiceList> subLists = getServiceList(srv.conf.getMarket(), key, listInc, listDec, listIncDec);
retLists.addAll(subLists);
}
retLists.add(memories);
{
List<TimingItem> currentTimings = (List<TimingItem>) myData.getTimingMap().get(market.getConfig().getMarket());
if (currentTimings != null) {
List<IclijServiceList> subLists2 = getServiceList(market.getConfig().getMarket(), currentTimings);
retLists.addAll(subLists2);
}
}
Map<String, Map<String, Object>> mapmaps = new HashMap<>();
mapmaps.put("ml", updateMap);
result.setMaps(mapmaps);
IclijServiceList updates = convert(market.getConfig().getMarket(), updateMap);
retLists.add(updates);
IclijServiceList trends = convert(trendMap);
retLists.add(trends);
List<IncDecItem> listIncDecs = new ArrayList<>(myData.getIncs());
listIncDecs.addAll(myData.getDecs());
roundList(listIncDecs);
addRelations(componentInput, retLists, listIncDecs);
return result;
}
private static void addHeader(ComponentInput componentInput, String type, IclijServiceResult result,
String basedate, String futuredate) {
IclijServiceList header = new IclijServiceList();
result.getLists().add(header);
header.setTitle(type + " " + "Market: " + componentInput.getConfig().getMarket() + " Date: " + componentInput.getConfig().getDate() + " Offset: " + componentInput.getLoopoffset() + " Threshold: " + componentInput.getConfig().getFindProfitManualThreshold());
IclijServiceList header2 = new IclijServiceList();
result.getLists().add(header2);
header2.setTitle(type + " " + "ML market: " + componentInput.getConfig().getMlmarket() + " Date: " + componentInput.getConfig().getDate() + " Offset: " + componentInput.getLoopoffset());
List<MapList> aList = new ArrayList<>();
header.setList(aList);
MapList mapList = new MapList();
mapList.setKey("Dates");
mapList.setValue("Base " + basedate + " Future " + futuredate);
aList.add(mapList);
}
public static ControlService getService(ComponentInput input, int days) throws Exception {
String market = input.getConfig().getMarket();
String mlmarket = input.getConfig().getMlmarket();
/*
if (market == null) {
throw new Exception("Market null");
}
*/
//LocalDate date = input.getConfig().getDate();
ControlService srv = new ControlService();
srv.getConfig();
if (market != null) {
srv.conf.setMarket(market);
srv.conf.setMLmarket(mlmarket);
}
return srv;
}
private static Map<String, List<List<Double>>> getSimpleContent(String market) throws Exception {
ControlService srv = new ControlService();
srv.getConfig();
if (market != null) {
srv.conf.setMarket(market);
}
new MLUtil().disabler(srv.conf.getConfigValueMap());
srv.conf.getConfigValueMap().put(ConfigConstants.MISCTHRESHOLD, null);
Map<String, Map<String, Object>> result = srv.getContent();
Integer cat = (Integer) result.get(PipelineConstants.META).get(PipelineConstants.WANTEDCAT);
Map<String, List<List<Double>>> listMap = (Map<String, List<List<Double>>>) result.get("" + cat).get(PipelineConstants.LIST);
return listMap;
}
private static boolean anythingHere3(Map<String, List<List<Double>>> listMap2, int size) {
for (List<List<Double>> array : listMap2.values()) {
if (size != 3 || size != array.size()) {
return false;
}
out:
for (int i = 0; i < array.get(0).size(); i++) {
for (int j = 0; j < array.size(); j++) {
if (array.get(j).get(i) == null) {
continue out;
}
}
return true;
}
}
return false;
}
private static boolean wantThree(String market) {
try {
Map<String, List<List<Double>>> listMap = getSimpleContent(market);
return anythingHere3(listMap, 3);
} catch (Exception e) {
log.error(Constants.EXCEPTION, e);
return false;
}
}
/*
if (config.wantsImproveProfit()) {
ImproveProfitActionData improveProfitActionData = new ImproveProfitActionData();
getImprovements(retLists, market, date, save, improveProfitActionData, allMemoryItems);
}
*/
private static IclijServiceList convert(String marketName, Map<String, Object> updateMap) {
IclijServiceList list = new IclijServiceList();
list.setTitle("Updates for " + marketName);
List<MapList> aList = new ArrayList<>();
for (Entry<String, Object> map : updateMap.entrySet()) {
MapList m = new MapList();
m.setKey(map.getKey());
m.setValue((String) map.getValue().toString());
aList.add(m);
}
list.setList(aList);
return list;
}
private static IclijServiceList convert(Map<String, Object> map) {
IclijServiceList list = new IclijServiceList();
list.setTitle("Trends");
List<MapList> aList = new ArrayList<>();
for (Entry<String, Object> entry : map.entrySet()) {
MapList m = new MapList();
m.setKey(entry.getKey());
m.setValue(entry.getValue().toString());
aList.add(m);
}
list.setList(aList);
return list;
}
private static LocalDate getDateIndex(List<String> stocks, int index) throws ParseException {
String newDate = stocks.get(index);
return TimeUtil.convertDate(newDate);
}
/*
@Deprecated
private static void getImprovements(List<IclijServiceList> retLists, ComponentData param,
ImproveProfitActionData improveProfitActionData, List<MemoryItem> allMemoryItems) throws Exception {
Map<String, String> map = improveProfitActionData.getMarket(param, allMemoryItems);
List<MapList> mapList = improveProfitActionData.getList(map);
IclijServiceList resultMap = new IclijServiceList();
resultMap.setTitle("Improve Profit Info");
resultMap.setList(mapList);
retLists.add(resultMap);
}
*/
public static IclijServiceResult getFindProfit(ComponentInput componentInput) throws Exception {
String type = "FindProfit";
int days = 0; // config.verificationDays();
componentInput.setDoSave(false);
boolean rerun = componentInput.getConfig().singlemarketRerun();
IclijServiceResult result = getFindProfitVerify(componentInput, type, days, rerun);
new MiscUtil().print(result);
return result;
}
public static IclijServiceResult getImproveProfit(ComponentInput componentInput) throws Exception {
try {
int loopOffset = 0;
int days = 0; // config.verificationDays();
IclijServiceResult result = new IclijServiceResult();
result.setLists(new ArrayList<>());
List<IclijServiceList> retLists = result.getLists();
ControlService srv = null;
try {
srv = getService(componentInput, days);
} catch (Exception e) {
log.error(Constants.EXCEPTION, e);
return result;
}
componentInput.setDoSave(false);
//FindProfitActionData findProfitActionData = new FindProfitActionData();
ImproveProfitActionData improveProfitActionData = new ImproveProfitActionData();
List<MemoryItem> allMemoryItems = new ArrayList<>(); // getMemoryItems(componentInput.getConfig(), param, days, getImproveProfitComponents(componentInput.getConfig()));
//IclijServiceList memories = new IclijServiceList();
//memories.setTitle("Memories");
//memories.setList(allMemoryItems);
Map<String, Object> updateMap = new HashMap<>();
Market market = new MarketUtil().findMarket(srv.conf.getMarket());
WebData webData = srv.getRun(IclijConstants.IMPROVEPROFIT, componentInput);
List<MapList> mapList = new MiscUtil().getList(webData.getUpdateMap());
IclijServiceList resultMap = new IclijServiceList();
resultMap.setTitle("Improve Profit Info");
resultMap.setList(mapList);
retLists.add(resultMap);
//retLists.add(memories);
List<IclijServiceList> lists = new ArrayList<>();
Map<String, List<TimingItem>> timingMap = webData.getTimingMap();
for (Entry<String, List<TimingItem>> entry : timingMap.entrySet()) {
String marketName = entry.getKey();
List<TimingItem> list = (List<TimingItem>) entry.getValue();
List<IclijServiceList> subLists = getServiceList(marketName, list);
lists.addAll(subLists);
}
Map<String, List<TimingItem>> timingMap2 = webData.getTimingMap2();
for (Entry<String, List<TimingItem>> entry : timingMap2.entrySet()) {
String marketName = entry.getKey();
List<TimingItem> list = (List<TimingItem>) entry.getValue();
List<IclijServiceList> subLists = getServiceList(marketName + " sell", list);
lists.addAll(subLists);
}
result.setLists(lists);
updateMap = webData.getUpdateMap();
Map<String, Map<String, Object>> mapmaps = new HashMap<>();
mapmaps.put("ml", updateMap);
result.setMaps(mapmaps);
IclijServiceList updates = convert(null, updateMap);
lists.add(updates);
updateMap = webData.getUpdateMap2();
updates = convert(null, updateMap);
lists.add(updates);
return result;
} catch (Exception e) {
log.error("Ex", e);
}
return null;
}
private static LocalDate getLastDate(List<String> stocks) throws ParseException {
String aDate = stocks.get(stocks.size() - 1);
return TimeUtil.convertDate(aDate);
}
private static int getDateIndex(LocalDate date, List<String> stocks) {
int index;
if (date == null) {
index = stocks.size() - 1;
} else {
String aDate = TimeUtil.convertDate2(date);
index = stocks.indexOf(aDate);
}
return index;
}
private static int getDateOffset(LocalDate date, List<String> stocks) {
int offset = 0;
if (date != null) {
String aDate = TimeUtil.convertDate2(date);
int index = stocks.indexOf(aDate);
if (index >= 0) {
offset = stocks.size() - 1 - index;
}
}
return offset;
}
/*
public static List<MemoryItem> getMemoryItemsNot(IclijConfig config, ComponentData param, int days, List<String> components) throws InterruptedException {
List<MemoryItem> allMemoryItems = new ArrayList<>();
UpdateDBActionData updateDbActionData = new UpdateDBActionData();
Queue<ActionData> serviceActionDatas = updateDbActionData.findAllMarketComponentsToCheck(param, days, config, components);
for (ActionData serviceActionData : serviceActionDatas) {
serviceActionData.goal(null, param, null);
Map<String, Object> resultMap = serviceActionData.getLocalResultMap();
List<MemoryItem> memoryItems = (List<MemoryItem>) resultMap.get(IclijPipelineConstants.MEMORY);
if (memoryItems != null) {
allMemoryItems.addAll(memoryItems);
} else {
log.error("Memory null");
}
}
return allMemoryItems;
}
*/
private static void roundList(List<IncDecItem> list) {
for (IncDecItem item : list) {
Double score = item.getScore();
if (score != null) {
item.setScore(MathUtil.round2(score, 3));
}
}
}
private static void roundList2(List<TimingItem> list) {
for (TimingItem item : list) {
Double score = item.getScore();
if (score != null) {
item.setScore(MathUtil.round2(score, 3));
}
}
}
private static void roundList3(List<MemoryItem> list) {
for (MemoryItem item : list) {
Double testaccuracy = item.getTestaccuracy();
if (testaccuracy != null) {
item.setTestaccuracy(MathUtil.round2(testaccuracy, 3));
}
Double testloss = item.getTestloss();
if (testloss != null) {
item.setTestloss(MathUtil.round2(testloss, 3));
}
Double confidence = item.getConfidence();
if (confidence != null) {
item.setConfidence(MathUtil.round2(confidence, 3));
}
Double learnConfidence = item.getLearnConfidence();
if (learnConfidence != null) {
item.setLearnConfidence(MathUtil.round2(learnConfidence, 3));
}
Double tpConf = item.getTpConf();
if (tpConf != null) {
item.setTpConf(MathUtil.round2(tpConf, 3));
}
Double tpProb = item.getTpProb();
if (tpProb != null) {
item.setTpProb(MathUtil.round2(tpProb, 3));
}
Double tpProbConf = item.getTpProbConf();
if (tpProbConf != null) {
item.setTpProbConf(MathUtil.round2(tpProbConf, 3));
}
Double tnConf = item.getTnConf();
if (tnConf != null) {
item.setTnConf(MathUtil.round2(tnConf, 3));
}
Double tnProb = item.getTnProb();
if (tnProb != null) {
item.setTnProb(MathUtil.round2(tnProb, 3));
}
Double tnProbConf = item.getTnProbConf();
if (tnProbConf != null) {
item.setTnProbConf(MathUtil.round2(tnProbConf, 3));
}
Double fpConf = item.getFpConf();
if (fpConf != null) {
item.setFpConf(MathUtil.round2(fpConf, 3));
}
Double fpProb = item.getFpProb();
if (fpProb != null) {
item.setFpProb(MathUtil.round2(fpProb, 3));
}
Double fpProbConf = item.getFpProbConf();
if (fpProbConf != null) {
item.setFpProbConf(MathUtil.round2(fpProbConf, 3));
}
Double fnConf = item.getFnConf();
if (fnConf != null) {
item.setFnConf(MathUtil.round2(fnConf, 3));
}
Double fnProb = item.getFnProb();
if (fnProb != null) {
item.setFnProb(MathUtil.round2(fnProb, 3));
}
Double fnProbConf = item.getFnProbConf();
if (fnProbConf != null) {
item.setFnProbConf(MathUtil.round2(fnProbConf, 3));
}
}
}
private static void roundList4(List<MLMetricsItem> list) {
for (MLMetricsItem item : list) {
Double loss = item.getLoss();
if (loss != null) {
item.setLoss(MathUtil.round2(loss, 3));
}
Double accuracy = item.getTestAccuracy();
if (accuracy != null) {
item.setTestAccuracy(MathUtil.round2(accuracy, 3));
}
}
}
public static Map<String, List<IncDecItem>> splitParam(List<IncDecItem> items) {
Map<String, List<IncDecItem>> mymap = new HashMap<String, List<IncDecItem>>();
for (IncDecItem item : items) {
String key = item.getParameters();
List<IncDecItem> itemlist = mymap.get(key);
if (itemlist == null) {
itemlist = new ArrayList<IncDecItem>();
mymap.put(key, itemlist);
}
itemlist.add(item);
}
return mymap;
}
}
|
package me.prettyprint.cassandra.service;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import me.prettyprint.cassandra.model.HectorException;
import me.prettyprint.cassandra.model.HectorTransportException;
import me.prettyprint.cassandra.model.InvalidRequestException;
import me.prettyprint.cassandra.model.PoolExhaustedException;
import me.prettyprint.cassandra.model.TimedOutException;
import me.prettyprint.cassandra.model.UnavailableException;
import me.prettyprint.cassandra.service.CassandraClient.FailoverPolicy;
import me.prettyprint.cassandra.service.CassandraClientMonitor.Counter;
import org.apache.cassandra.thrift.Cassandra;
import org.perf4j.StopWatch;
import org.perf4j.slf4j.Slf4JStopWatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A fail-over operation executor.
*
* @author Ran Tavory (rantav@gmail.com)
*
*/
/*package*/ class FailoverOperator {
private static final Logger log = LoggerFactory.getLogger(FailoverOperator.class);
private static final Logger perf4jLogger =
LoggerFactory.getLogger("me.prettyprint.cassandra.hector.TimingLogger");
private final FailoverPolicy failoverPolicy;
/** List of all known remote cassandra nodes */
private final List<CassandraHost> knownHosts;
private final CassandraClientMonitor monitor;
private CassandraClient client;
private final CassandraClientPool clientPools;
/**
* A reference to the keyspace operating in this context, if it's a keyspace.
* This can be null if no keyspace in context.
*/
private final Keyspace keyspace;
/**
*
* @param policy The failover policy for this operator.
* @param hosts The list of known hosts it can failover to.
* @param keyspace The keyspace performing this operation (if it's a keyspace performing it). May
* be null
*/
public FailoverOperator(FailoverPolicy policy, CassandraClientMonitor monitor,
CassandraClient client, CassandraClientPool clientPools, Keyspace keyspace) {
this.failoverPolicy = policy;
this.knownHosts = new ArrayList<CassandraHost>(clientPools.getKnownHosts());
this.monitor = monitor;
this.client = client;
this.clientPools = clientPools;
this.keyspace = keyspace;
}
/**
* Performs the operation and retries in in case the class is configured for
* retries, and there are enough hosts to try and the error was
* {@link TimedOutException}.
*/
public CassandraClient operate(Operation<?> op) throws HectorException {
final StopWatch stopWatch = new Slf4JStopWatch(perf4jLogger);
int retries = Math.min(failoverPolicy.numRetries + 1, knownHosts.size());
boolean isFirst = true;
try {
while (retries > 0) {
if (!isFirst) {
--retries;
}
try {
boolean success = operateSingleIteration(op, stopWatch, retries, isFirst);
if (success) {
return client;
}
} catch (SkipHostException e) {
log.warn("Skip-host failed ", e);
// continue the loop to the next host.
}
sleepBetweenHostSkips();
isFirst = false;
}
} catch (InvalidRequestException e) {
monitor.incCounter(op.failCounter);
stopWatch.stop(op.stopWatchTagName + ".fail_");
throw e;
} catch (UnavailableException e) {
invalidate();
stopWatch.stop(op.stopWatchTagName + ".fail_");
monitor.incCounter(op.failCounter);
throw e;
} catch (HectorTransportException e) {
invalidate();
stopWatch.stop(op.stopWatchTagName + ".fail_");
monitor.incCounter(op.failCounter);
throw e;
} catch (TimedOutException e) {
invalidate();
stopWatch.stop(op.stopWatchTagName + ".fail_");
monitor.incCounter(op.failCounter);
throw e;
} catch (PoolExhaustedException e) {
log.warn("Pool is exhausted", e);
monitor.incCounter(op.failCounter);
monitor.incCounter(Counter.POOL_EXHAUSTED);
stopWatch.stop(op.stopWatchTagName + ".fail_");
throw e;
} catch (IllegalStateException e) {
log.error("Client Pool is already closed, cannot obtain new clients.", e);
monitor.incCounter(op.failCounter);
stopWatch.stop(op.stopWatchTagName + ".fail_");
throw new HectorException(e);
} catch (IOException e) {
invalidate();
monitor.incCounter(op.failCounter);
stopWatch.stop(op.stopWatchTagName + ".fail_");
throw new HectorTransportException(e);
} catch (Exception e) {
log.error("Cannot retry failover, got an Exception", e);
monitor.incCounter(op.failCounter);
stopWatch.stop(op.stopWatchTagName + ".fail_");
throw new HectorException(e);
}
return client;
}
/**
* Sleeps for the specified time as determined by sleepBetweenHostsMilli.
* In many cases failing over to other hosts is done b/c the cluster is too busy, so the sleep b/w
* hosts may help reduce load on the cluster.
*/
private void sleepBetweenHostSkips() {
if (failoverPolicy.sleepBetweenHostsMilli > 0) {
if ( log.isDebugEnabled() ) {
log.debug("Will sleep for {} millisec", failoverPolicy.sleepBetweenHostsMilli);
}
try {
Thread.sleep(failoverPolicy.sleepBetweenHostsMilli);
} catch (InterruptedException e) {
log.warn("Sleep between hosts interrupted", e);
}
}
}
/**
* Runs a single iteration of the operation.
* If successful, then returns true.
* If unsuccessful, then if a skip operation was successful, return false. If a skip operation was
* unsuccessful or retries == 0, then throws an exception.
* @param op the operation to perform
* @param stopWatch the stop watch measuring performance of this operation.
* @param retries the number of retries left.
* @param isFirst is this the first iteraion?
*/
private boolean operateSingleIteration(Operation<?> op, final StopWatch stopWatch,
int retries, boolean isFirst) throws HectorException,
PoolExhaustedException, Exception, UnavailableException, HectorTransportException {
if ( log.isDebugEnabled() ) {
log.debug("Performing operation on {}; retries: {}", client.getCassandraHost().getUrl(), retries);
}
try {
// Perform operation and save its result value
Cassandra.Client c = client.getCassandra();
if (keyspace != null) {
c.set_keyspace(keyspace.getName());
}
op.executeAndSetResult(c);
// hmmm don't count success, there are too many...
// monitor.incCounter(op.successCounter);
if ( log.isDebugEnabled() ) {
log.debug("Operation succeeded on {}", client.getCassandraHost().getUrl());
}
stopWatch.stop(op.stopWatchTagName + ".success_");
return true;
} catch (TimedOutException e) {
log.warn("Got a TimedOutException from {}. Num of retries: {} (thread={})",
new Object[]{client.getCassandraHost().getUrl(), retries, Thread.currentThread().getName()});
if (retries == 0) {
throw e;
} else {
skipToNextHost(isFirst, false);
monitor.incCounter(Counter.RECOVERABLE_TIMED_OUT_EXCEPTIONS);
}
} catch (UnavailableException e) {
log.warn("Got a UnavailableException from {}. Num of retries: {} (thread={})",
new Object[]{client.getCassandraHost().getUrl(), retries, Thread.currentThread().getName()});
if (retries == 0) {
throw e;
} else {
skipToNextHost(isFirst, true);
monitor.incCounter(Counter.RECOVERABLE_UNAVAILABLE_EXCEPTIONS);
}
} catch (HectorTransportException e) {
log.warn("Got a HectorTException from {}. Num of retries: {} (thread={})",
new Object[]{client.getCassandraHost().getUrl(), retries, Thread.currentThread().getName()});
if (retries == 0) {
throw e;
} else {
skipToNextHost(isFirst, true);
monitor.incCounter(Counter.RECOVERABLE_TRANSPORT_EXCEPTIONS);
}
}
return false;
}
/**
* Updates the client member and cassandra member to the next host in the
* ring.
*
* Returns the current client to the pool and retreives a new client from the
* next pool.
* @param isRetrySameHostAgain should the skip operation try the same current host, or should it
* really skip to the next host in the ring?
* @param invalidateAllConnectionsToCurrentHost If true, all connections to the current host
* should be invalidated.
*/
private void skipToNextHost(boolean isRetrySameHostAgain,
boolean invalidateAllConnectionsToCurrentHost) throws SkipHostException {
if ( log.isInfoEnabled() ) {
log.info("Skipping to next host (thread={}). Current host is: {}",
Thread.currentThread().getName(), client.getCassandraHost().getUrl());
}
invalidate();
if (invalidateAllConnectionsToCurrentHost) {
clientPools.invalidateAllConnectionsToHost(client);
}
CassandraHost nextHost = isRetrySameHostAgain ? client.getCassandraHost() :
getNextHost(client.getCassandraHost());
if (nextHost == null) {
log.error("Unable to find next host to skip to at {}", toString());
throw new SkipHostException("Unable to failover to next host");
}
// assume all hosts in the ring use the same port (cassandra's API only provides IPs, not ports)
try {
client = clientPools.borrowClient(nextHost);
} catch (IllegalStateException e) {
throw new SkipHostException(e);
} catch (PoolExhaustedException e) {
throw new SkipHostException(e);
} catch (Exception e) {
throw new SkipHostException(e);
}
monitor.incCounter(Counter.SKIP_HOST_SUCCESS);
if ( log.isInfoEnabled() ) {
log.info("Skipped host (thread={}). New client is {}", Thread.currentThread().getName(),
client);
}
}
/**
* Invalidates this keyspace and client associated with it.
* This method should be used when the keyspace had errors.
* It returns the client to the pool and marks it as invalid (essentially taking taking the client
* out of the pool indefinitely) and removed the keyspace from the client.
*/
private void invalidate() {
if ( log.isInfoEnabled() ) {
log.info("Invalidating client {} (thread={})", client, Thread.currentThread().getName());
}
try {
clientPools.invalidateClient(client);
if (keyspace != null) {
client.removeKeyspace(keyspace);
}
} catch (Exception e) {
log.error("Unable to invalidate client {}. Will continue anyhow. (thread={})", client,
Thread.currentThread().getName());
}
}
/**
* Finds the next host in the knownHosts. Next is the one after the given url
* (modulo the number of elements in the list)
*
* @return URL of the next presumably available host. null if none can be
* found.
*/
private CassandraHost getNextHost(CassandraHost cassandraHost) {
int size = knownHosts.size();
if (size < 1) {
return null;
}
for (int i = 0; i < knownHosts.size(); ++i) {
if (cassandraHost.equals(knownHosts.get(i))) {
// found this host. Return the next one in the array
return knownHosts.get((i + 1) % size);
}
}
log.error("The host {} wasn't found in the knownHosts ({}). Will try to choose a random " +
"host from the known host list. (thread={})",
new Object[]{cassandraHost, knownHosts, Thread.currentThread().getName()});
return chooseRandomHost(knownHosts);
}
/**
* Chooses a random host from the list.
* @param knownHosts
* @return
*/
private CassandraHost chooseRandomHost(List<CassandraHost> knownHosts) {
long rnd = Math.round(Math.random() * knownHosts.size());
CassandraHost host = knownHosts.get((int) rnd);
if ( log.isInfoEnabled() ) {
log.info("Choosing random host to skip to: {}", host);
}
return host;
}
}
/**
* Defines the interface of an operation performed on cassandra
*
* @param <T>
* The result type of the operation (if it has a result), such as the
* result of get_count or get_column
*
* Oh closures, how I wish you were here...
*/
/*package*/ abstract class Operation<T> {
/** Counts failed attempts */
protected final Counter failCounter;
/** The stopwatch used to measure operation performance */
protected final String stopWatchTagName;
protected T result;
private HectorException exception;
public Operation(OperationType operationType) {
this.failCounter = operationType.equals(OperationType.READ) ? Counter.READ_FAIL :
Counter.WRITE_FAIL;
this.stopWatchTagName = operationType.name();
}
public void setResult(T executionResult) {
result = executionResult;
}
/**
*
* @return The result of the operation, if this is an operation that has a
* result (such as getColumn etc.
*/
public T getResult() {
return result;
}
/**
* Performs the operation on the given cassandra instance.
*/
public abstract T execute(Cassandra.Client cassandra) throws HectorException;
public void executeAndSetResult(Cassandra.Client cassandra) throws HectorException {
setResult(execute(cassandra));
}
public void setException(HectorException e) {
exception = e;
}
public boolean hasException() {
return exception != null;
}
public HectorException getException() {
return exception;
}
}
/**
* Specifies the "type" of operation - read or write.
* It's used for perf4j, so should be in sync with hectorLog4j.xml
* @author Ran Tavory (ran@outbain.com)
*
*/
/*package*/ enum OperationType {
/** Read operations*/
READ,
/** Write operations */
WRITE,
/** Meta read operations, such as describe*() */
META_READ;
}
/**
* An internal implementation excption used to signal that the skip-host action has failed.
* @author Ran Tavory (ran@outbain.com)
*
*/
/*package*/ class SkipHostException extends HectorException {
private static final long serialVersionUID = -6099636388926769255L;
public SkipHostException(String msg) {
super(msg);
}
public SkipHostException(Throwable t) {
super(t);
}
}
|
package uk.org.ponder.rsf.components;
import uk.org.ponder.rsf.components.decorators.DecoratorList;
import uk.org.ponder.rsf.components.decorators.UIDecorator;
import uk.org.ponder.rsf.util.RSFUtil;
/**
* UIComponent is the base of the entire RSF component hierarchy. Components
* derived from this class may either be containers derived from UIContainer,
* or else leaf components peering with target dialect tags.
* Note that Components form a containment hierarchy ONLY to allow nested
* repetitive domains. This class is mutually referential with UIContainer.
* @author Antranig Basman (antranig@caret.cam.ac.uk)
*
*/
public class UIComponent {
/** This ID corresponds to the rsf:id in the view template, and is parsed
* by use of the class SplitID.
*/
public String ID;
// fullid is the full path to this component
// structure: ID1-prefix:ID1-suffix:localID1:ID2-prefix:ID2-suffix:localID2:etc.
private String fullID;
/** The algorithm used here must deterministically generate a string ID
* globally unique within the containing component tree (the View), by
* appending path components derived from IDs and local IDs found at each
* level of UIContainer. This algorithm should be "accessible" to simple
* environments such as XSLTs since they will need to operate it to generate
* inter-component references within a view (for example to express any
* EL dependencies).
* <p>
* The structure of the ID forms colon-separated "triples", one for each
* container in the path, ending with the rsf:id of any leaf component, e.g.
* ID1-prefix:ID1-suffix:localID1:ID2-prefix:ID2-suffix:localID2:etc.
*/
public String getFullID() {
if (fullID == null) {
fullID = RSFUtil.computeFullID(this);
}
return fullID;
}
/** Updates the full ID of this component with the supplied value. This
* is an "emergency" method to be used only as a last resort. Within RSF
* it is necessary to ensure that UIBound components arising as direct children
* of "composite" parents can have their IDs set correctly before value
* fixup.
*/
public void updateFullID(String fullID) {
this.fullID = fullID;
}
/** The containing parent of this component, or <code>null</code> for the
* UIContainer representing the view root.
*/
public UIContainer parent;
/** A list of "decorators" which alter the rendering behaviour of this
* component, orthogonal to its binding behaviour. Usually <code>null</code>
*/
public DecoratorList decorators;
/** Add the supplied decorator to the list for this component, initialising
* the list if necessary.
*/
public UIComponent decorate(UIDecorator decorator) {
if (decorators == null) {
decorators = new DecoratorList();
}
decorators.add(decorator);
return this;
}
}
|
package com.dailystudio.app.utils;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.StringReader;
import java.io.StringWriter;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.regex.Pattern;
import org.mozilla.universalchardet.UniversalDetector;
import android.annotation.TargetApi;
import android.content.ContentUris;
import android.content.Context;
import android.content.res.AssetManager;
import android.content.res.Resources;
import android.database.Cursor;
import android.net.Uri;
import android.os.Build;
import android.os.Environment;
import android.provider.DocumentsContract;
import android.provider.MediaStore;
import android.text.TextUtils;
import com.dailystudio.development.Logger;
import com.dailystudio.utils.ResourcesUtils;
public class FileUtils {
private final static String SCHEME_CONTENT = "content";
private final static String SCHEME_FILE = "file";
private final static String COLUMN_DATA = "_data";
private final static String MEDIA_DOC_AUTHORITY = "com.android.providers.media.documents";
private final static String EXT_STORAGE_DOC_AUTHORITY = "com.android.externalstorage.documents";
private final static String DOWNLOAD_DOC_AUTHORITY = "com.android.providers.downloads.documents";
private final static String MEIDA_TYPE_VIDEO = "video";
private final static String MEIDA_TYPE_AUDIO = "audio";
private final static String MEIDA_TYPE_IMAGE = "image";
private final static String NO_MEDIA_TAG_FILE = ".nomedia";
private static final int DOWNLOAD_CONNECTION_TIMEOUT = (3 * 1000);
private static final int DOWNLOAD_READ_TIMEOUT = (20 * 1000);
public static final long SIZE_KB = 1024;
public static final long SIZE_MB = (1024 * SIZE_KB);
public static final long SIZE_GB = (1024 * SIZE_MB);
public static final long SIZE_TB = (1024 * SIZE_GB);
public static boolean checkOrCreateNoMediaDirectory(String directory) {
if (directory == null) {
return false;
}
File dir = new File(directory);
return checkOrCreateNoMediaDirectory(dir);
}
public static boolean checkOrCreateNoMediaDirectory(File directory) {
return checkOrCreateDirectory(directory, true);
}
public static boolean checkOrCreateDirectory(String directory) {
if (directory == null) {
return false;
}
File dir = new File(directory);
return checkOrCreateDirectory(dir);
}
public static boolean checkOrCreateDirectory(File directory) {
return checkOrCreateDirectory(directory, false);
}
public static boolean checkOrCreateDirectory(File directory, boolean nomedia) {
if (directory == null) {
return false;
}
if (directory.exists()) {
if (directory.isDirectory()) {
return true;
} else {
Logger.warn("%s is NOT a directory", directory);
}
}
final boolean success = directory.mkdirs();
if (success == false) {
return false;
}
if (!nomedia) {
return success;
}
return checkOrCreateNoMediaTagInDirectory(directory);
}
public static boolean checkOrCreateNoMediaTagInDirectory(String directory) {
if (directory == null) {
return false;
}
File dir = new File(directory);
return checkOrCreateNoMediaTagInDirectory(dir);
}
public static boolean checkOrCreateNoMediaTagInDirectory(File dir) {
if (dir == null) {
return false;
}
File tagFile = new File(dir, NO_MEDIA_TAG_FILE);
if (tagFile.exists()) {
return true;
}
boolean success = false;
try {
success = tagFile.createNewFile();
} catch (IOException e) {
Logger.warn("could not create tag[%s] in dir[%s]: %s",
NO_MEDIA_TAG_FILE,
dir.getAbsoluteFile(),
e.toString());
success = false;
}
return success;
}
public static boolean isFileExisted(String filename) {
if (filename == null) {
return false;
}
File dstFile = new File(filename);
if (dstFile.exists()) {
return true;
}
return false;
}
public static String getDirectory(String filename) {
if (filename == null) {
return null;
}
File dstFile = new File(filename);
return dstFile.getParent();
}
public static boolean checkOrCreateFile(String filename) {
if (filename == null) {
return false;
}
File file = new File(filename);
return checkOrCreateFile(file);
}
public static boolean checkOrCreateFile(File file) {
if (file == null) {
return false;
}
if (file.exists()) {
return true;
}
boolean success = false;
try {
success = file.createNewFile();
} catch (IOException e) {
e.printStackTrace();
success = false;
}
return success;
}
public static boolean deleteFiles(String path) {
return deleteFiles(path, true);
}
public static boolean deleteFiles(String path, boolean includeFolder) {
if (path == null) {
return false;
}
File file = new File(path);
boolean success = false;
if (file.exists()) {
StringBuilder deleteCmd =
new StringBuilder("rm -r ");
deleteCmd.append(path);
if (!includeFolder) {
/*
* XXX: we could not use u.openStream() here.
* the default connect/read timeout is infinite.
* we need to set a acceptable value
*/
connection.setConnectTimeout(DOWNLOAD_CONNECTION_TIMEOUT);
connection.setReadTimeout(DOWNLOAD_READ_TIMEOUT);
is = connection.getInputStream();
DataInputStream dis = new DataInputStream(is);
DataOutputStream dos = new DataOutputStream(os);
@SuppressWarnings("unused")
int bytesReceived = 0;
int bytesRead = 0;
byte[] buffer = new byte[2048];
while ((bytesRead = dis.read(buffer, 0, 2048)) > 0) {
bytesReceived += bytesRead;
// Logger.debug("bytes received = %d", bytesReceived);
dos.write(buffer, 0, bytesRead);
dos.flush();
}
success = true;
} catch (MalformedURLException mue) {
mue.printStackTrace();
success = false;
} catch (IOException ioe) {
ioe.printStackTrace();
success = false;
} catch (NullPointerException ne) {
/*
* XXX: sometime, here will be thrown
* a NULL-pointer exception for address
* resolving.
*/
ne.printStackTrace();
success = false;
} finally {
try {
if (is != null) {
is.close();
}
if (os != null) {
os.close();
}
} catch (IOException ioe) {
}
}
return success;
}
private static class RegexFilter implements FileFilter {
private Pattern mFilePattern;
private RegexFilter(String pattern) {
if (!TextUtils.isEmpty(pattern)) {
mFilePattern = Pattern.compile(pattern);
}
}
@Override
public boolean accept(File file) {
if (file == null) {
return false;
}
if (mFilePattern == null) {
return true;
}
return mFilePattern.matcher(file.getName()).matches();
}
}
public static boolean saveFile(byte[] bytes, String filename) {
if (TextUtils.isEmpty(filename)) {
return false;
}
return saveFile(bytes, new File(filename));
}
public static boolean saveFile(byte[] bytes, File file) {
if (bytes == null || file == null) {
return false;
}
boolean success = false;
try {
FileOutputStream out = new FileOutputStream(file);
out.write(bytes);
out.flush();
out.close();
success = true;
} catch (IOException e) {
Logger.debug("save bytes failure: %s", e.toString());
success = false;
}
return success;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.