answer stringlengths 17 10.2M |
|---|
package org.pentaho.reporting.engine.classic.core.elementfactory;
import java.util.ArrayList;
import java.util.Locale;
import org.pentaho.reporting.engine.classic.core.AttributeNames;
import org.pentaho.reporting.engine.classic.core.CrosstabCell;
import org.pentaho.reporting.engine.classic.core.CrosstabCellBody;
import org.pentaho.reporting.engine.classic.core.CrosstabColumnGroup;
import org.pentaho.reporting.engine.classic.core.CrosstabColumnGroupBody;
import org.pentaho.reporting.engine.classic.core.CrosstabGroup;
import org.pentaho.reporting.engine.classic.core.CrosstabOtherGroup;
import org.pentaho.reporting.engine.classic.core.CrosstabOtherGroupBody;
import org.pentaho.reporting.engine.classic.core.CrosstabRowGroup;
import org.pentaho.reporting.engine.classic.core.CrosstabRowGroupBody;
import org.pentaho.reporting.engine.classic.core.DetailsHeader;
import org.pentaho.reporting.engine.classic.core.Element;
import org.pentaho.reporting.engine.classic.core.GroupBody;
import org.pentaho.reporting.engine.classic.core.MasterReport;
import org.pentaho.reporting.engine.classic.core.designtime.DesignTimeDataSchemaModel;
import org.pentaho.reporting.engine.classic.core.filter.types.LabelType;
import org.pentaho.reporting.engine.classic.core.filter.types.NumberFieldType;
import org.pentaho.reporting.engine.classic.core.filter.types.TextFieldType;
import org.pentaho.reporting.engine.classic.core.metadata.ElementType;
import org.pentaho.reporting.engine.classic.core.style.BandStyleKeys;
import org.pentaho.reporting.engine.classic.core.style.ElementStyleKeys;
import org.pentaho.reporting.engine.classic.core.wizard.AutoGeneratorUtility;
import org.pentaho.reporting.engine.classic.core.wizard.DataAttributeContext;
import org.pentaho.reporting.engine.classic.core.wizard.DataAttributes;
import org.pentaho.reporting.libraries.base.util.StringUtils;
public class CrosstabBuilder
{
private ArrayList<CrosstabDimension> rows;
private ArrayList<CrosstabDimension> columns;
private ArrayList<String> others;
private ArrayList<CrosstabDetail> details;
private DesignTimeDataSchemaModel dataSchemaModel;
private String groupNamePrefix;
private Float minimumWidth;
private Float minimumHeight;
private Float maximumWidth;
private Float maximumHeight;
private Float prefWidth;
private Float prefHeight;
private Boolean allowMetaDataStyling;
private Boolean allowMetaDataAttributes;
public CrosstabBuilder(final DesignTimeDataSchemaModel dataSchemaModel)
{
rows = new ArrayList<CrosstabDimension>();
columns = new ArrayList<CrosstabDimension>();
others = new ArrayList<String>();
details = new ArrayList<CrosstabDetail>();
this.dataSchemaModel = dataSchemaModel;
this.groupNamePrefix = "";
this.minimumHeight = 20f;
this.maximumHeight = 20f;
this.maximumWidth = 80f;
this.minimumWidth = 80f;
}
public Float getMinimumWidth()
{
return minimumWidth;
}
public void setMinimumWidth(final Float minimumWidth)
{
this.minimumWidth = minimumWidth;
}
public Float getMinimumHeight()
{
return minimumHeight;
}
public void setMinimumHeight(final Float minimumHeight)
{
this.minimumHeight = minimumHeight;
}
public Float getMaximumWidth()
{
return maximumWidth;
}
public void setMaximumWidth(final Float maximumWidth)
{
this.maximumWidth = maximumWidth;
}
public Float getMaximumHeight()
{
return maximumHeight;
}
public void setMaximumHeight(final Float maximumHeight)
{
this.maximumHeight = maximumHeight;
}
public Float getPrefWidth()
{
return prefWidth;
}
public void setPrefWidth(final Float prefWidth)
{
this.prefWidth = prefWidth;
}
public Float getPrefHeight()
{
return prefHeight;
}
public void setPrefHeight(final Float prefHeight)
{
this.prefHeight = prefHeight;
}
public Boolean getAllowMetaDataStyling()
{
return allowMetaDataStyling;
}
public void setAllowMetaDataStyling(final Boolean allowMetaDataStyling)
{
this.allowMetaDataStyling = allowMetaDataStyling;
}
public Boolean getAllowMetaDataAttributes()
{
return allowMetaDataAttributes;
}
public void setAllowMetaDataAttributes(final Boolean allowMetaDataAttributes)
{
this.allowMetaDataAttributes = allowMetaDataAttributes;
}
public String getGroupNamePrefix()
{
return groupNamePrefix;
}
public void setGroupNamePrefix(final String groupNamePrefix)
{
this.groupNamePrefix = groupNamePrefix;
}
public void addOtherDimension(final String field)
{
others.add(field);
}
public void addRowDimension(final CrosstabDimension dimension)
{
rows.add(dimension);
}
public void addRowDimension(final String field)
{
addRowDimension(new CrosstabDimension(field, field, false, "Summary"));
}
public void addRowDimension(final String field, final boolean addSummary)
{
addRowDimension(new CrosstabDimension(field, field, addSummary, "Summary"));
}
public void addColumnDimension(final CrosstabDimension dimension)
{
columns.add(dimension);
}
public void addColumnDimension(final String field)
{
addColumnDimension(new CrosstabDimension(field, field, false, "Summary"));
}
public void addColumnDimension(final String field, final boolean addSummary)
{
addColumnDimension(new CrosstabDimension(field, field, addSummary, "Summary"));
}
public void addDetails(final CrosstabDetail detail)
{
details.add(detail);
}
public void addDetails(final String field, final Class aggregation)
{
details.add(new CrosstabDetail(field, field, aggregation));
}
public MasterReport createReport()
{
final MasterReport report = new MasterReport();
report.setRootGroup(create());
return report;
}
public CrosstabGroup create()
{
if (columns.size() == 0)
{
throw new IllegalStateException();
}
if (rows.size() == 0)
{
throw new IllegalStateException();
}
final CrosstabCellBody cellBody = new CrosstabCellBody();
cellBody.addElement(createDetailsCell());
setupDetailsHeader(cellBody.getHeader());
GroupBody body = createColumnGroups(cellBody);
body = createRowGroups(cellBody, body);
body = createOtherGroups(body);
return new CrosstabGroup(body);
}
private GroupBody createOtherGroups(GroupBody body)
{
for (int other = others.size() - 1; other >= 0; other -= 1)
{
final String column = others.get(other);
final CrosstabOtherGroup columnGroup = new CrosstabOtherGroup(body);
columnGroup.setField(column);
columnGroup.getHeader().addElement(createFieldItem(column));
body = new CrosstabOtherGroupBody(columnGroup);
}
return body;
}
private GroupBody createRowGroups(final CrosstabCellBody cellBody, GroupBody body)
{
for (int row = rows.size() - 1; row >= 0; row -= 1)
{
final CrosstabDimension rowDimension = rows.get(row);
final CrosstabRowGroup rowGroup = new CrosstabRowGroup(body);
rowGroup.setName(groupNamePrefix + rowDimension.getField());
rowGroup.setField(rowDimension.getField());
rowGroup.getTitleHeader().getStyle().setStyleProperty(ElementStyleKeys.MIN_HEIGHT, -100f);
rowGroup.getTitleHeader().addElement(createLabel(rowDimension.getTitle(), rowDimension.getField()));
rowGroup.getHeader().getStyle().setStyleProperty(ElementStyleKeys.MIN_HEIGHT, -100f);
rowGroup.getHeader().addElement(createFieldItem(rowDimension.getField()));
rowGroup.getSummaryHeader().getStyle().setStyleProperty(ElementStyleKeys.MIN_HEIGHT, -100f);
rowGroup.getSummaryHeader().addElement(createLabel(rowDimension.getSummaryTitle(), null));
rowGroup.setPrintSummary(rowDimension.isPrintSummary());
if (rowDimension.isPrintSummary())
{
final CrosstabCell cell = createDetailsCell();
cell.setRowField(rowDimension.getField());
cell.setName(rowDimension.getField());
cellBody.addElement(cell);
for (int col = columns.size() - 1; col >= 0; col -= 1)
{
final CrosstabDimension column = columns.get(col);
if (column.isPrintSummary())
{
final CrosstabCell crosstabCell = createDetailsCell();
crosstabCell.setColumnField(column.getField());
crosstabCell.setRowField(rowDimension.getField());
crosstabCell.setName(column.getField() + "," + rowGroup.getField());
cellBody.addElement(crosstabCell);
}
}
}
body = new CrosstabRowGroupBody(rowGroup);
}
return body;
}
private GroupBody createColumnGroups(final CrosstabCellBody cellBody)
{
GroupBody body = cellBody;
for (int col = columns.size() - 1; col >= 0; col -= 1)
{
final CrosstabDimension column = columns.get(col);
final CrosstabColumnGroup columnGroup = new CrosstabColumnGroup(body);
columnGroup.setName(groupNamePrefix + column.getField());
columnGroup.setField(column.getField());
columnGroup.getTitleHeader().getStyle().setStyleProperty(ElementStyleKeys.MIN_HEIGHT, -100f);
columnGroup.getTitleHeader().addElement(createLabel(column.getTitle(), column.getField()));
columnGroup.getHeader().getStyle().setStyleProperty(ElementStyleKeys.MIN_HEIGHT, -100f);
columnGroup.getHeader().addElement(createFieldItem(column.getField()));
columnGroup.getSummaryHeader().getStyle().setStyleProperty(ElementStyleKeys.MIN_HEIGHT, -100f);
columnGroup.getSummaryHeader().addElement(createLabel(column.getSummaryTitle(), null));
columnGroup.setPrintSummary(column.isPrintSummary());
if (column.isPrintSummary())
{
final CrosstabCell cell = createDetailsCell();
cell.setColumnField(column.getField());
cell.setName(column.getField());
cellBody.addElement(cell);
}
body = new CrosstabColumnGroupBody(columnGroup);
}
return body;
}
private CrosstabCell createDetailsCell()
{
final CrosstabCell cell = new CrosstabCell();
cell.getStyle().setStyleProperty(ElementStyleKeys.MIN_HEIGHT, -100f);
cell.getStyle().setStyleProperty(BandStyleKeys.LAYOUT, BandStyleKeys.LAYOUT_ROW);
for (int i = 0; i < details.size(); i += 1)
{
final CrosstabDetail crosstabDetail = details.get(i);
cell.addElement(createFieldItem(crosstabDetail.getField(), crosstabDetail.getAggregation(), true));
}
return cell;
}
private void setupDetailsHeader(final DetailsHeader cell)
{
cell.getStyle().setStyleProperty(ElementStyleKeys.MIN_HEIGHT, -100f);
cell.getStyle().setStyleProperty(BandStyleKeys.LAYOUT, BandStyleKeys.LAYOUT_ROW);
for (int i = 0; i < details.size(); i += 1)
{
final CrosstabDetail crosstabDetail = details.get(i);
String title = crosstabDetail.getTitle();
if (StringUtils.isEmpty(title))
{
title = crosstabDetail.getField();
}
cell.addElement(createLabel(title, crosstabDetail.getField(), true));
}
}
private Element createFieldItem(final String text)
{
return createFieldItem(text, null, false);
}
private Element createFieldItem(final String fieldName,
final Class aggregationType,
final boolean split)
{
final ElementType targetType;
if (dataSchemaModel != null)
{
final DataAttributeContext context = dataSchemaModel.getDataAttributeContext();
final DataAttributes attributes = dataSchemaModel.getDataSchema().getAttributes(fieldName);
targetType = AutoGeneratorUtility.createFieldType(attributes, context);
}
else
{
targetType = TextFieldType.INSTANCE;
}
final Element element = new Element();
element.setElementType(targetType);
element.getElementType().configureDesignTimeDefaults(element, Locale.getDefault());
if (targetType instanceof NumberFieldType)
{
element.setAttribute(AttributeNames.Core.NAMESPACE, AttributeNames.Core.FORMAT_STRING, "0.00;-0.00");
}
element.setAttribute(AttributeNames.Core.NAMESPACE, AttributeNames.Core.FIELD, fieldName);
element.getStyle().setStyleProperty(ElementStyleKeys.MIN_WIDTH, split(split, minimumWidth));
element.getStyle().setStyleProperty(ElementStyleKeys.MIN_HEIGHT, minimumHeight);
element.getStyle().setStyleProperty(ElementStyleKeys.WIDTH, split(split, prefWidth));
element.getStyle().setStyleProperty(ElementStyleKeys.HEIGHT, prefHeight);
element.getStyle().setStyleProperty(ElementStyleKeys.MAX_WIDTH, split(split, maximumWidth));
element.getStyle().setStyleProperty(ElementStyleKeys.MAX_HEIGHT, maximumHeight);
element.setAttribute(AttributeNames.Wizard.NAMESPACE, AttributeNames.Wizard.AGGREGATION_TYPE, aggregationType);
element.setAttribute(AttributeNames.Wizard.NAMESPACE, AttributeNames.Wizard.ALLOW_METADATA_STYLING, allowMetaDataStyling);
return element;
}
private Element createLabel(final String text, final String labelFor)
{
return createLabel(text, labelFor, false);
}
private Element createLabel(final String text, final String labelFor, final boolean splitArea)
{
final Element element = new Element();
element.setElementType(LabelType.INSTANCE);
element.setAttribute(AttributeNames.Core.NAMESPACE, AttributeNames.Core.VALUE, text);
element.getStyle().setStyleProperty(ElementStyleKeys.MIN_WIDTH, split(splitArea, minimumWidth));
element.getStyle().setStyleProperty(ElementStyleKeys.MIN_HEIGHT, minimumHeight);
element.getStyle().setStyleProperty(ElementStyleKeys.WIDTH, split(splitArea, prefWidth));
element.getStyle().setStyleProperty(ElementStyleKeys.HEIGHT, prefHeight);
element.getStyle().setStyleProperty(ElementStyleKeys.MAX_WIDTH, split(splitArea, maximumWidth));
element.getStyle().setStyleProperty(ElementStyleKeys.MAX_HEIGHT, maximumHeight);
element.setAttribute(AttributeNames.Wizard.NAMESPACE, AttributeNames.Wizard.ALLOW_METADATA_STYLING, allowMetaDataStyling);
element.setAttribute(AttributeNames.Wizard.NAMESPACE, AttributeNames.Wizard.ALLOW_METADATA_ATTRIBUTES,
StringUtils.isEmpty(labelFor) == false || allowMetaDataAttributes);
element.setAttribute(AttributeNames.Wizard.NAMESPACE, AttributeNames.Wizard.LABEL_FOR, labelFor);
return element;
}
private Float split(final boolean split, final Float value)
{
if (split == false)
{
return value;
}
if (value == null)
{
return null;
}
final float f = value;
return f / Math.max(1, details.size());
}
} |
package de.charite.compbio.exomiser.core.filters;
import de.charite.compbio.exomiser.core.model.Filterable;
import de.charite.compbio.exomiser.core.model.Gene;
import de.charite.compbio.exomiser.core.model.VariantEvaluation;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Jules Jacobsen <jules.jacobsen@sanger.ac.uk>
*/
public class SimpleGeneFilterRunner implements GeneFilterRunner {
private static final Logger logger = LoggerFactory.getLogger(SimpleGeneFilterRunner.class);
@Override
public List<Gene> run(List<GeneFilter> filters, List<Gene> genes) {
logger.info("Filtering {} genes using non-destructive simple filtering", genes.size());
for (Gene gene : genes) {
//Gene filtering needs to happen after variant filtering and only on genes which have passed the variant filtering steps
//TODO: does this really have to be the case???
if (gene.passedFilters()) {
runAllFiltersOverGene(filters, gene);
}
}
logger.info("Ran {} filters over {} genes using non-destructive simple filtering.", getFilterTypes(filters), genes.size());
return genes;
}
@Override
public List<Gene> run(GeneFilter filter, List<Gene> genes) {
for (Gene gene : genes) {
if (gene.passedFilters()) {
FilterResult filterResult = runFilterAndAddResult(filter, gene);
addFilterResultToVariants(filterResult, gene.getVariantEvaluations());
}
}
return genes;
}
private void runAllFiltersOverGene(List<GeneFilter> filters, Gene gene) {
for (Filter filter : filters) {
FilterResult filterResult = runFilterAndAddResult(filter, gene);
//TODO: should this always be the case?
addFilterResultToVariants(filterResult, gene.getVariantEvaluations());
}
}
private void addFilterResultToVariants(FilterResult filterResult, List<VariantEvaluation> variantEvaluations) {
for (VariantEvaluation variantEvaluation : variantEvaluations) {
variantEvaluation.addFilterResult(filterResult);
}
}
private FilterResult runFilterAndAddResult(Filter filter, Filterable filterable) {
FilterResult filterResult = filter.runFilter(filterable);
if (filterResult.getResultStatus() != FilterResultStatus.NOT_RUN) {
filterable.addFilterResult(filterResult);
}
return filterResult;
}
private Set<FilterType> getFilterTypes(List<GeneFilter> filters) {
Set<FilterType> filtersRun = new LinkedHashSet<>();
for (Filter filter : filters) {
filtersRun.add(filter.getFilterType());
}
return filtersRun;
}
} |
package jade.domain.FIPAAgentManagement;
import jade.util.leap.List;
import jade.util.leap.ArrayList;
import jade.util.leap.Iterator;
import java.util.Date;
import jade.util.leap.Properties;
import jade.core.AID;
import jade.content.Concept;
/**
* This class models an envelope.
* @see jade.domain.FIPAAgentManagement.FIPAManagementOntology
* @author Fabio Bellifemine - CSELT S.p.A.
* @version $Date$ $Revision$
*/
public class Envelope implements Concept, jade.util.leap.Serializable {
private final static int EXPECTED_LIST_SIZE = 2;
/**
@serial
*/
private ArrayList to = new ArrayList(EXPECTED_LIST_SIZE);
/**
@serial
*/
private AID from;
/**
@serial
*/
private String comments;
/**
@serial
*/
private String aclRepresentation;
/**
@serial
*/
private Long payloadLength;
/**
@serial
*/
private String payloadEncoding;
/**
@serial
*/
private Date date;
/**
@serial
*/
private ArrayList intendedReceiver = new ArrayList(EXPECTED_LIST_SIZE);
/**
@serial
*/
private Properties transportBehaviour;
/**
@serial
*/
private ArrayList stamps = new ArrayList(EXPECTED_LIST_SIZE);
/**
@serial
*/
private ArrayList properties = new ArrayList(EXPECTED_LIST_SIZE);
/**
* Default constructor. Initializes the payloadLength to -1.
**/
public Envelope () {
payloadLength = new Long(-1);
}
/**
Add an agent identifier to the <code>to</code> slot collection
of this object.
@param id The agent identifier to add to the collection.
*/
public void addTo(AID id) {
to.add(id);
}
/**
Remove an agent identifier from the <code>to</code> slot
collection of this object.
@param id The agent identifierto remove from the collection.
@return A boolean, telling whether the element was present in
the collection or not.
*/
public boolean removeTo(AID id) {
return to.remove(id);
}
/**
Remove all agent identifiers from the <code>to</code> slot
collection of this object.
*/
public void clearAllTo() {
to.clear();
}
/**
Access all agent identifiers from the <code>to</code> slot
collection of this object.
@return An iterator over the agent identifiers collection.
*/
public Iterator getAllTo() {
return to.iterator();
}
/**
Set the <code>from</code> slot of this object.
@param id The agent identifier for the envelope sender.
*/
public void setFrom(AID id) {
from = id;
}
/**
Retrieve the <code>from</code> slot of this object.
@return The value of the <code>from</code> slot of this
envelope, or <code>null</code> if no value was set.
*/
public AID getFrom() {
return from;
}
/**
Set the <code>comments</code> slot of this object.
@param c The string for the envelope comments.
*/
public void setComments(String c) {
comments = c;
}
/**
Retrieve the <code>comments</code> slot of this object.
@return The value of the <code>comments</code> slot of this
envelope, or <code>null</code> if no value was set.
*/
public String getComments() {
return comments;
}
/**
Set the <code>acl-representation</code> slot of this object.
@param r The string for the ACL representation.
*/
public void setAclRepresentation(String r) {
aclRepresentation = r;
}
/**
Retrieve the <code>acl-representation</code> slot of this
object.
@return The value of the <code>acl-representation</code> slot
of this envelope, or <code>null</code> if no value was set.
*/
public String getAclRepresentation() {
return aclRepresentation;
}
/**
Set the <code>payload-length</code> slot of this object.
@param l The payload length, in bytes.
*/
public void setPayloadLength(Long l) {
payloadLength = l;
}
/**
Retrieve the <code>payload-length</code> slot of this object.
@return The value of the <code>payload-length</code> slot of
this envelope, or <code>null</code> if no value was set.
*/
public Long getPayloadLength() {
return payloadLength;
}
/**
Set the <code>payload-encoding</code> slot of this object.
@param e The string for the payload encoding.
*/
public void setPayloadEncoding(String e) {
payloadEncoding = e;
}
/**
Retrieve the <code>payload-encoding</code> slot of this object.
@return The value of the <code>payload-encoding</code> slot of
this envelope, or <code>null</code> if no value was set.
*/
public String getPayloadEncoding() {
return payloadEncoding;
}
/**
Set the <code>date</code> slot of this object.
@param d The envelope date.
*/
public void setDate(Date d) {
date = d;
}
/**
Retrieve the <code>date</code> slot of this object.
@return The value of the <code>date</code> slot of this
envelope, or <code>null</code> if no value was set.
*/
public Date getDate() {
return date;
}
/**
Add an agent identifier to the <code>intended-receiver</code>
slot collection of this object.
@param id The agent identifier to add to the collection.
*/
public void addIntendedReceiver(AID id) {
intendedReceiver.add(id);
}
/**
Remove an agent identifier from the
<code>intended-receiver</code> slot collection of this object.
@param id The agent identifier to remove from the collection.
@return A boolean, telling whether the element was present in
the collection or not.
*/
public boolean removeIntendedReceiver(AID id) {
return intendedReceiver.remove(id);
}
/**
Remove all agent identifiers from the
<code>intended-receiver</code> slot collection of this object.
*/
public void clearAllIntendedReceiver() {
intendedReceiver.clear();
}
/**
Access all agent identifiers from the <code>intended
receiver</code> slot collection of this object.
@return An iterator over the agent identifiers collection.
*/
public Iterator getAllIntendedReceiver() {
return intendedReceiver.iterator();
}
/**
Set the <code>received</code> slot of this object.
@param ro The received object for the <code>received</code>
slot.
*/
public void setReceived(ReceivedObject ro) {
addStamp(ro);
}
/**
Retrieve the <code>received</code> slot of this object.
@return The value of the <code>received</code> slot of this
envelope, or <code>null</code> if no value was set.
*/
public ReceivedObject getReceived() {
if(stamps.isEmpty())
return null;
else
return (ReceivedObject)stamps.get(stamps.size() - 1);
}
/**
Add a <code>received-object</code> stamp to this message
envelope. This method is used by the ACC to add a new stamp to
the envelope at every routing hop.
@param ro The <code>received-object</code> to add.
*/
public void addStamp(ReceivedObject ro) {
stamps.add(ro);
}
/**
Access the list of all the stamps. The
<code>received-object</code> stamps are sorted according to the
routing path, from the oldest to the newest.
*/
public ReceivedObject[] getStamps() {
ReceivedObject[] ret = new ReceivedObject[stamps.size()];
int counter = 0;
for(Iterator it = stamps.iterator(); it.hasNext(); )
ret[counter++] = (ReceivedObject)it.next();
return ret;
}
/**
Add a property to the <code>properties</code> slot collection
of this object.
@param p The property to add to the collection.
*/
public void addProperties(Property p) {
properties.add(p);
}
/**
Remove a property from the <code>properties</code> slot
collection of this object.
@param p The property to remove from the collection.
@return A boolean, telling whether the element was present in
the collection or not.
*/
public boolean removeProperties(Property p) {
return properties.remove(p);
}
/**
Remove all properties from the <code>properties</code> slot
collection of this object.
*/
public void clearAllProperties(){
properties.clear();
}
/**
Access all properties from the <code>properties</code> slot
collection of this object.
@return An iterator over the properties collection.
*/
public Iterator getAllProperties() {
return properties.iterator();
}
//#MIDP_EXCLUDE_BEGIN
/**
* Retrieve a string representation for this platform description.
* @return an SL0-like String representation of this object
**/
public String toString() {
String s = new String("(Envelope ");
Iterator i = getAllTo();
if (i.hasNext()) {
s = s + " :to (sequence ";
for (Iterator ii=i; ii.hasNext(); )
s = s+" "+ii.next().toString();
s = s + ") ";
}
if (getFrom() != null)
s = s + " :from " + getFrom().toString();
if (getComments() != null)
s = s + " :comments " + getComments();
if (getAclRepresentation() != null)
s = s + " :acl-representation " + getAclRepresentation();
if (getPayloadLength() != null)
s = s + " :payload-length " + getPayloadLength().toString();
if (getPayloadEncoding() != null)
s = s + " :payload-encoding " + getPayloadEncoding();
if (getDate() != null)
s = s + " :date " + getDate().toString();
i = getAllIntendedReceiver();
if (i.hasNext()) {
s = s + " :intended-receiver (sequence ";
for (Iterator ii=i; ii.hasNext(); )
s = s+" "+ ii.next().toString();
s = s + ") ";
}
ReceivedObject[] ro = getStamps();
if (ro.length > 0 ) {
s = s + " :received-object (sequence ";
for (int j=0; j<ro.length; j++) {
if (ro[j] != null) {
s = s + " "+ ro[j].toString();
}
}
s = s + ") ";
}
if (properties.size() > 0) {
s = s + " :properties (set";
for (int j=0; j<properties.size(); j++) {
Property p = (Property)properties.get(j);
s = s + " " + p.getName() + " " + p.getValue();
}
s = s + ")";
}
return s+")";
}
//#MIDP_EXCLUDE_END
//#APIDOC_EXCLUDE_BEGIN
public Object clone(){
Envelope env = new Envelope();
env.to = (ArrayList)to.clone();
env.intendedReceiver= (ArrayList)intendedReceiver.clone();
env.stamps = (ArrayList)stamps.clone();
env.from = from;
env.comments = comments;
env.aclRepresentation = aclRepresentation;
env.payloadLength = payloadLength;
env.payloadEncoding = payloadEncoding;
env.date = date;
env.transportBehaviour = transportBehaviour;
env.properties = (ArrayList)properties.clone();
return env;
}
//#APIDOC_EXCLUDE_END
//#MIDP_EXCLUDE_BEGIN
// For persistence service
private void setTo(ArrayList al) {
to = al;
}
// For persistence service
private ArrayList getTo() {
return to;
}
// For persistence service
private void setIntendedReceivers(ArrayList al) {
intendedReceiver = al;
}
// For persistence service
private ArrayList getIntendedReceivers() {
return intendedReceiver;
}
// For persistence service
private void setProperties(ArrayList al) {
properties = al;
}
// For persistence service
private ArrayList getProperties() {
return properties;
}
//#MIDP_EXCLUDE_END
} |
// $Id: SafeScrollPane.java,v 1.4 2002/10/06 20:57:36 mdb Exp $
package com.threerings.media;
import java.awt.Component;
import javax.swing.JScrollPane;
import javax.swing.JViewport;
/**
* A scroll pane that is safe to use in frame managed views.
*/
public class SafeScrollPane extends JScrollPane
{
public SafeScrollPane ()
{
}
public SafeScrollPane (Component view)
{
super(view);
}
protected JViewport createViewport ()
{
JViewport vp = new JViewport();
vp.setScrollMode(JViewport.SIMPLE_SCROLL_MODE);
return vp;
}
} |
package io.fabianterhorst.fastlayout.converters;
import java.util.List;
import io.fabianterhorst.fastlayout.annotations.Converter;
@Converter
public class LayoutConverter {
public LayoutAttribute convert(Object attributeValue, String attributeName) {
return onConvertLayoutAttributeValue(attributeValue, attributeName);
}
public List<LayoutAttribute> finish() {
return onFinish();
}
public LayoutAttribute onConvertLayoutAttributeValue(Object attributeValue, String attributeName) {
String attribute = String.valueOf(attributeValue);
boolean isString = true;
if((attribute.startsWith("@") || attribute.startsWith("?")) && attribute.contains("/")) {
String[] attributeSplit = attribute.split("/");
String type = attributeSplit[0].replace("@+", "").replace("@", "").replace("?", "");
attribute = "R." + type + "." + attributeSplit[1];
isString = false;
}
if (attribute.startsWith("R.dimen.")) {
return onConvertLayoutAttribute("(int) getContext().getResources().getDimension(" + attribute + ")", attributeName, false);
} else if (attribute.startsWith("R.string.")) {
return onConvertLayoutAttribute("getContext().getString(" + attribute + ")", attributeName, false);
} else if (attribute.endsWith("dp") && isNumber(attribute.replace("dp", ""))) {
return onConvertLayoutAttribute("(int)TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, " + attribute.replace("dp", "") + ", getResources().getDisplayMetrics())", attributeName, false);
} else if (attribute.equals("false") || attribute.equals("true")) {
return onConvertLayoutAttribute(attribute, attributeName, false);
} else if (attribute.endsWith("sp") && isNumber(attribute.replace("sp", ""))) {
return onConvertLayoutAttribute("(int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_SP, " + attribute.replace("sp", "") + ", Resources.getSystem().getDisplayMetrics())", attributeName, false);
} else if (isNumber(attribute)) {
return onConvertLayoutAttribute(attributeValue, attributeName, false);
}
return onConvertLayoutAttribute(attribute, attributeName, isString);
}
public LayoutAttribute onConvertLayoutAttribute(Object attributeValue, String attributeName, boolean isString) {
attributeName = attributeToName(attributeName);
return new LayoutAttribute(setter(attributeName, attributeValue, isString));
}
public String attributeToName(String attribute) {
attribute = attribute.split(":")[1];
String[] split = attribute.split("_");
attribute = "";
for (String refactor : split) {
attribute += capitalize(refactor);
}
return attribute;
}
public List<LayoutAttribute> onFinish() {
return null;
}
public String setter(String name, Object value, boolean isString) {
return "set" + name + (isString ? "(\"" : "(") + value + (isString ? "\")" : ")");
}
public String attribute(String name, Object value) {
return name + " = " + value;
}
private boolean isNumber(Object text) {
try {
Integer.parseInt(String.valueOf(text));
return true;
} catch (NumberFormatException ignore) {
return false;
}
}
private static String capitalize(String name) {
if (name != null && name.length() != 0) {
char[] chars = name.toCharArray();
chars[0] = Character.toUpperCase(chars[0]);
return new String(chars);
} else {
return name;
}
}
} |
package org.jasig.portal;
import java.io.IOException;
import java.net.URL;
import java.net.URLConnection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.jasig.portal.channels.portlet.CPortletAdapter;
import org.jasig.portal.container.services.information.PortletStateManager;
import org.jasig.portal.jndi.JNDIManager;
import org.jasig.portal.services.LogService;
import org.jasig.portal.utils.ResourceLoader;
/**
* This is an entry point into the uPortal.
* @author Peter Kharchenko <pkharchenko@interactivebusiness.com>
* @version $Revision$
*/
public class PortalSessionManager extends HttpServlet {
public static final String INTERNAL_TAG_VALUE=Long.toHexString((new Random()).nextLong());
public static final String IDEMPOTENT_URL_TAG="idempotent";
private static boolean initialized = false;
private static ServletContext servletContext = null;
private static PortalSessionManager instance = null;
private static boolean fatalError = false;
public static final ErrorID initPortalContext = new ErrorID("config","JNDI","Cannot initialize JNDI context");
/**
* Provides access to the servlet instance ultimately to provide access
* to the servlet context of the portal.
* @return instance, the PortalSessionManager servlet instance
*/
public static final PortalSessionManager getInstance() {
return instance;
}
// Following flag allows to disable features that prevent
// repeated requests from going through. This is useful
// when debugging and typing things in on a command line.
// Otherwise, the flag should be set to false.
private static final boolean ALLOW_REPEATED_REQUESTS = PropertiesManager.getPropertyAsBoolean("org.jasig.portal.PortalSessionManager.allow_repeated_requests");
// random number generator
private static final Random randomGenerator = new Random();
static {
LogService.log(LogService.INFO, "uPortal started");
}
/**
* Initialize the PortalSessionManager servlet
* @throws ServletException
*/
public void init() throws ServletException {
if(!initialized) {
instance = this;
// Retrieve the servlet configuration object from the servlet container
// and make sure it's available
ServletConfig sc = getServletConfig();
if (sc == null) {
throw new ServletException("PortalSessionManager.init(): ServletConfig object was returned as null");
}
// Supply PortletContainer with ServletConfig
CPortletAdapter.setServletConfig(sc);
servletContext = sc.getServletContext();
try {
JNDIManager.initializePortalContext();
} catch (Exception pe) {
ExceptionHelper.genericTopHandler(initPortalContext,pe);
fatalError=true;
}
// Turn off URL caching if it has been requested
if (!PropertiesManager.getPropertyAsBoolean("org.jasig.portal.PortalSessionManager.url_caching")) {
// strangely, we have to instantiate a URLConnection to turn off caching, so we'll get something we know is there
try {
URL url = ResourceLoader.getResourceAsURL(PortalSessionManager.class, "/properties/portal.properties");
URLConnection conn = url.openConnection();
conn.setDefaultUseCaches(false);
} catch (Exception e) {
LogService.log(LogService.WARN, "PortalSessionManager.init(): Caught Exception trying to disable URL Caching");
}
}
// Log orderly shutdown time
Runtime.getRuntime().addShutdownHook(new Thread("uPortal shutdown hook") {
public void run() {
LogService.log(LogService.INFO, "uPortal stopped");
}
});
// Flag that the portal has been initialized
initialized = true;
// Get the SAX implementation
if (System.getProperty("org.xml.sax.driver") == null) {
System.setProperty("org.xml.sax.driver", PropertiesManager.getProperty("org.xml.sax.driver"));
}
}
}
/**
* Process HTTP POST request
*
* @param req an incoming <code>HttpServletRequest</code> value
* @param res an outgoing <code>HttpServletResponse</code> value
* @exception ServletException if an error occurs
* @exception IOException if an error occurs
*/
public void doPost(HttpServletRequest req, HttpServletResponse res) {
doGet(req, res);
}
/**
* Process HTTP GET request.
*
* @param req an incoming <code>HttpServletRequest</code>
* @param res an outgoing <code>HttpServletResponse</code>
* @exception ServletException if an error occurs
* @exception IOException if an error occurs
*/
public void doGet(HttpServletRequest req, HttpServletResponse res) {
// Send the uPortal version in a header
res.setHeader("uPortal-version", "uPortal_rel-2-3+");
if (fatalError) {
try {
res.sendRedirect("error/fatal.htm");
} catch (IOException e) {
ExceptionHelper.genericTopHandler(Errors.bug,e);
}
return;
}
HttpSession session = req.getSession(false);
if (session != null) {
Set requestTags=null;
boolean request_verified=false;
if(!ALLOW_REPEATED_REQUESTS) {
// obtain a tag table
synchronized(session) {
requestTags=(Set)session.getAttribute("uP_requestTags");
if(requestTags==null) {
requestTags=Collections.synchronizedSet(new HashSet());
session.setAttribute("uP_requestTags",requestTags);
}
}
// determine current tag
UPFileSpec upfs=new UPFileSpec(req);
String tag=upfs.getTagId();
// see if the tag was registered
if(tag!=null) {
request_verified=(tag.equals(IDEMPOTENT_URL_TAG) || requestTags.remove(tag));
}
LogService.log(LogService.DEBUG, "PortalSessionManager::doGet() : request verified: "+request_verified);
}
try {
UserInstance userInstance = null;
try {
// Retrieve the user's UserInstance object
userInstance = UserInstanceManager.getUserInstance(req);
} catch(Exception e) {
ExceptionHelper.genericTopHandler(Errors.bug,e);
ExceptionHelper.generateErrorPage(res,e);
return;
}
// fire away
if(ALLOW_REPEATED_REQUESTS) {
userInstance.writeContent(new RequestParamWrapper(req,true),res);
} else {
// generate and register a new tag
String newTag=Long.toHexString(randomGenerator.nextLong());
LogService.log(LogService.DEBUG,"PortalSessionManager::doGet() : generated new tag \""+newTag+"\" for the session "+session.getId());
// no need to check for duplicates :) we'd have to wait a lifetime of a universe for this time happen
if(!requestTags.add(newTag)) {
LogService.log(LogService.ERROR,"PortalSessionManager::doGet() : a duplicate tag has been generated ! Time's up !");
}
RequestParamWrapper wrappedRequest = new RequestParamWrapper(req,request_verified);
wrappedRequest.getParameterMap().putAll(PortletStateManager.getURLDecodedParameters(wrappedRequest));
userInstance.writeContent(wrappedRequest, new ResponseSubstitutionWrapper(res,INTERNAL_TAG_VALUE,newTag));
}
} catch (Exception e) {
ExceptionHelper.genericTopHandler(Errors.bug,e);
ExceptionHelper.generateErrorPage(res,e);
return;
}
} else {
try {
//throw new ServletException("Session object is null !");
res.sendRedirect(req.getContextPath() + "/Login" );
} catch (Exception e) {
ExceptionHelper.genericTopHandler(Errors.bug,e);
ExceptionHelper.generateErrorPage(res,e);
return;
}
}
}
/**
* Gets a URL associated with the named resource.
* Call this to access files with paths relative to the
* document root. Paths should begin with a "/".
* @param resource relative to the document root
* @return a URL associated with the named resource or null if the URL isn't accessible
* @throws java.net.MalformedURLException
*/
public static URL getResourceAsURL(String resource) {
//Make sure resource string starts with a "/"
if (!resource.startsWith("/"))
resource = "/" + resource;
URL url = null;
try {
url = servletContext.getResource(resource);
} catch (java.net.MalformedURLException murle) {
// if the URL is bad, just return null
}
return url;
}
/**
* Gets an input stream associated with the named resource.
* Call this to access files with paths relative to the
* document root. Paths should begin with a "/".
* @param resource relative to the document root
* @return an input stream assosiated with the named resource
*/
public static java.io.InputStream getResourceAsStream(String resource) {
//Make sure resource string starts with a "/"
if (!resource.startsWith("/"))
resource = "/" + resource;
return servletContext.getResourceAsStream(resource);
}
} |
package com.google.sps.data;
import com.google.appengine.api.datastore.Key;
import com.google.appengine.api.datastore.Entity;
import java.util.ArrayList;
import java.util.List;
import com.google.sps.service.DatabaseService;
public class Room {
public static final String ROOM_ENTITY_NAME = "Room";
public static final String TITLE_PROPERTY_KEY = "title";
public static final String DESCRIPTION_PROPERTY_KEY = "description";
public static final String HOST_PROPERTY_KEY = "host";
public static final String FOLLOWERS_PROPERTY_KEY = "followers";
private Entity entity;
public Room(Entity entity) {
this.entity = entity;
}
public Room(User host, String title, String description) {
this.entity = new Entity(Room.ROOM_ENTITY_NAME);
this.entity.setProperty(Room.TITLE_PROPERTY_KEY, title);
this.entity.setProperty(Room.DESCRIPTION_PROPERTY_KEY, description);
this.entity.setProperty(Room.HOST_PROPERTY_KEY, host.getUserKey());
}
public Entity getRoomEntity() {
return this.entity;
}
public Key getHost() {
return (Key) this.entity.getProperty(Room.HOST_PROPERTY_KEY);
}
public void setHost(User host) {
this.entity.setProperty(Room.HOST_PROPERTY_KEY, host.getUserKey());
}
public String getTitle() {
return (String) this.entity.getProperty(Room.TITLE_PROPERTY_KEY);
}
public void setTitle(String title){
this.entity.setProperty(Room.TITLE_PROPERTY_KEY, title);
}
@SuppressWarnings("unchecked")
public List<Key> getAllFollowers() {
if (this.entity.getProperty(Room.FOLLOWERS_PROPERTY_KEY) == null) {
return new ArrayList<Key>();
}
return (ArrayList<Key>) this.entity.getProperty(Room.FOLLOWERS_PROPERTY_KEY);
}
@SuppressWarnings("unchecked")
public void addFollower(User follower) {
if (this.entity.getProperty(Room.FOLLOWERS_PROPERTY_KEY) == null) {
this.entity.setProperty(Room.FOLLOWERS_PROPERTY_KEY, new ArrayList<Key>());
}
ArrayList<Key> followers = (ArrayList<Key>) this.entity.getProperty(Room.FOLLOWERS_PROPERTY_KEY);
followers.add(follower.getUserKey());
}
@SuppressWarnings("unchecked")
public void removeFollower(User follower) {
if (this.entity.getProperty(Room.FOLLOWERS_PROPERTY_KEY) == null) {
this.entity.setProperty(Room.FOLLOWERS_PROPERTY_KEY, new ArrayList<Key>());
}
ArrayList<Key> followers = (ArrayList<Key>) this.entity.getProperty(Room.FOLLOWERS_PROPERTY_KEY);
followers.remove(follower.getUserKey());
}
@SuppressWarnings("unchecked")
public boolean isFollowerInRoom(User follower) {
ArrayList<Key> followers = (ArrayList<Key>) this.entity.getProperty(Room.FOLLOWERS_PROPERTY_KEY);
return (!(followers.lastIndexOf(follower.getUserKey()) == -1));
}
} |
package org.jasig.portal;
import java.io.Serializable;
import java.io.PrintWriter;
import java.io.IOException;
import java.io.BufferedReader;
import java.io.StringWriter;
import java.util.Enumeration;
import java.util.Map;
import java.util.Hashtable;
import java.util.HashSet;
import java.util.Collections;
import java.util.Locale;
import java.util.Random;
import java.util.Set;
import java.net.URL;
import java.lang.SecurityManager;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.security.AccessController;
import org.jasig.portal.services.ExternalServices;
import org.jasig.portal.services.LogService;
import org.jasig.portal.jndi.JNDIManager;
import org.jasig.portal.utils.SubstitutionWriter;
import org.jasig.portal.utils.SubstitutionServletOutputStream;
import com.oreilly.servlet.multipart.MultipartParser;
import com.oreilly.servlet.multipart.FilePart;
import com.oreilly.servlet.multipart.ParamPart;
public class PortalSessionManager extends HttpServlet {
// public static final String SESSION_TAG_VARIABLE="uP_session_tag";
public static final String INTERNAL_TAG_VALUE=Long.toHexString((new Random()).nextLong());
private static final int sizeLimit = PropertiesManager.getPropertyAsInt("org.jasig.portal.PortalSessionManager.File_upload_max_size");
private static boolean initialized = false;
private static ServletContext servletContext = null;
// Following flag allows to disable features that prevent
// repeated requests from going through. This is useful
// when debugging and typing things in on a command line.
// Otherwise, the flag should be set to false.
private static final boolean ALLOW_REPEATED_REQUESTS=false;
// random number generator
private static final Random randomGenerator= new Random();
static {
LogService.instance().log(LogService.INFO, "uPortal started");
}
/**
* Initialize the PortalSessionManager servlet
* @throws ServletException
*/
public void init() throws ServletException {
if(!initialized) {
// Retrieve the servlet configuration object from the servlet container
// and make sure it's available
ServletConfig sc = getServletConfig();
if (sc == null) {
throw new ServletException("PortalSessionManager.init(): ServletConfig object was returned as null");
}
servletContext = sc.getServletContext();
JNDIManager.initializePortalContext();
// Start any portal services configured in services.xml
try {
ExternalServices.startServices();
} catch (Exception ex) {
LogService.instance().log(LogService.ERROR, ex);
throw new ServletException ("Failed to start external portal services.");
}
// Flag that the portal has been initialized
initialized = true;
// Get the SAX implementation
if (System.getProperty("org.xml.sax.driver") == null) {
System.setProperty("org.xml.sax.driver", PropertiesManager.getProperty("org.xml.sax.driver"));
}
}
}
/**
* Process HTTP POST request
*
* @param req an incoming <code>HttpServletRequest</code> value
* @param res an outgoing <code>HttpServletResponse</code> value
* @exception ServletException if an error occurs
* @exception IOException if an error occurs
*/
public void doPost(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
doGet(req, res);
}
/**
* Process HTTP GET request.
*
* @param req an incoming <code>HttpServletRequest</code>
* @param res an outgoing <code>HttpServletResponse</code>
* @exception ServletException if an error occurs
* @exception IOException if an error occurs
*/
public void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
// Disable page caching
res.setHeader("pragma", "no-cache");
res.setHeader("Cache-Control", "no-cache, max-age=0, must-revalidate");
res.setHeader("uPortal-version", "uPortal_2-0-pre-release-2002-01-14");
res.setDateHeader("Expires", 0);
HttpSession session = req.getSession();
if (session != null) {
// obtain a tag table
Set requestTags=null;
synchronized(session) {
requestTags=(Set)session.getAttribute("uP_requestTags");
if(requestTags==null) {
requestTags=Collections.synchronizedSet(new HashSet());
session.setAttribute("uP_requestTags",requestTags);
}
}
// determine current tag
UPFileSpec upfs=new UPFileSpec(req);
String tag=upfs.getTagId();
// see if the tag was registered
boolean request_verified=false;
if(tag!=null) {
request_verified=requestTags.remove(tag);
}
LogService.instance().log(LogService.DEBUG, "PortalSessionManager::doGet() : request verified: "+request_verified);
try {
UserInstance userInstance = null;
try {
// Retrieve the user's UserInstance object
userInstance = UserInstanceManager.getUserInstance(req);
} catch(Exception e) {
// NOTE: Should probably be forwarded to error page if the user instance could not be properly retrieved.
LogService.instance().log(LogService.ERROR, e);
}
// generate and register a new tag
String newTag=Long.toHexString(randomGenerator.nextLong());
LogService.instance().log(LogService.DEBUG,"PortalSessionManager::doGet() : generated new tag \""+newTag+"\" for the session "+req.getSession(false).getId());
// no need to check for duplicates :) we'd have to wait a lifetime of a universe for this time happen
if(!requestTags.add(newTag)) {
LogService.instance().log(LogService.ERROR,"PortalSessionManager::doGet() : a duplicate tag has been generated ! Time's up !");
}
// fire away
if(ALLOW_REPEATED_REQUESTS) {
userInstance.writeContent(new RequestParamWrapper(req,true),res);
} else {
userInstance.writeContent(new RequestParamWrapper(req,request_verified), new ResponseSubstitutionWrapper(res,INTERNAL_TAG_VALUE,newTag));
}
} catch (PortalException pe) {
if(pe.getRecordedException()!=null) {
StringWriter sw=new StringWriter();
pe.getRecordedException().printStackTrace(new PrintWriter(sw));
sw.flush();
LogService.instance().log(LogService.ERROR,"PortalSessionManager::doGet() : a PortalException has occurred : "+sw.toString());
throw new ServletException(pe.getRecordedException());
} else {
StringWriter sw=new StringWriter();
pe.printStackTrace(new PrintWriter(sw));
sw.flush();
LogService.instance().log(LogService.ERROR,"PortalSessionManager::doGet() : an unknown exception occurred : "+sw.toString());
throw new ServletException(pe);
}
} catch (Exception e) {
StringWriter sw=new StringWriter();
e.printStackTrace(new PrintWriter(sw));
sw.flush();
LogService.instance().log(LogService.ERROR,"PortalSessionManager::doGet() : an unknown exception occurred : "+sw.toString());
throw new ServletException(e);
}
} else {
throw new ServletException("Session object is null !");
}
}
/**
* Gets a URL associated with the named resource.
* Call this to access files with paths relative to the
* document root. Paths should begin with a "/".
* @param resource relative to the document root
* @return a URL associated with the named resource or null if the URL isn't accessible
* @throws java.net.MalformedURLException
*/
public static URL getResourceAsURL(String resource) {
//Make sure resource string starts with a "/"
if (!resource.startsWith("/"))
resource = "/" + resource;
URL url = null;
try {
url = servletContext.getResource(resource);
} catch (java.net.MalformedURLException murle) {
// if the URL is bad, just return null
}
return url;
}
/**
* Gets an input stream associated with the named resource.
* Call this to access files with paths relative to the
* document root. Paths should begin with a "/".
* @param resource relative to the document root
* @return an input stream assosiated with the named resource
*/
public static java.io.InputStream getResourceAsStream(String resource) {
//Make sure resource string starts with a "/"
if (!resource.startsWith("/"))
resource = "/" + resource;
return servletContext.getResourceAsStream(resource);
}
public class ResponseSubstitutionWrapper implements HttpServletResponse {
protected final HttpServletResponse res;
protected String sessionTag;
protected String newTag;
public ResponseSubstitutionWrapper(HttpServletResponse res,String sessionTag, String newTag) {
this.res=res;
this.sessionTag=sessionTag;
this.newTag=newTag;
}
public ServletOutputStream getOutputStream() throws IOException {
String encoding=this.getCharacterEncoding();
byte[] target,substitute;
if(encoding!=null) {
// use specified encoding
target=sessionTag.getBytes(encoding);
substitute=newTag.getBytes(encoding);
} else {
// use default system encoding
target=sessionTag.getBytes();
substitute=newTag.getBytes();
}
return new SubstitutionServletOutputStream(res.getOutputStream(),target,substitute);
}
public PrintWriter getWriter() throws IOException {
return new PrintWriter(new SubstitutionWriter(res.getWriter(),sessionTag.toCharArray(),newTag.toCharArray()));
}
// pass-through implementation methods
// implementation of javax.servlet.ServletResponse interface
public String getCharacterEncoding() {
return res.getCharacterEncoding();
}
public void reset() {
res.reset();
}
public void flushBuffer() throws IOException {
res.flushBuffer();
}
public void setContentType(String type) {
res.setContentType(type);
}
public void setContentLength(int len) {
res.setContentLength(len);
}
public void setBufferSize(int size) {
res.setBufferSize(size);
}
public int getBufferSize() {
return res.getBufferSize();
}
public boolean isCommitted() {
return res.isCommitted();
}
public void setLocale(Locale loc) {
res.setLocale(loc);
}
public Locale getLocale() {
return res.getLocale();
}
// servlet 2.3 methods, inderect invokation:
public void resetBuffer() {
try {
java.lang.reflect.Method m = res.getClass().getMethod("reseBuffer", null);
m.invoke(res, null);
} catch (Exception e) {
}
}
// implementation of javax.servlet.http.HttpServletResponse interface
public void addCookie(Cookie cookie) {
this.res.addCookie(cookie);
}
public boolean containsHeader(String name) {
return this.res.containsHeader(name);
}
public String encodeURL(String url) {
return this.res.encodeURL(url);
}
public String encodeRedirectURL(String url) {
return this.res.encodeRedirectURL(url);
}
public String encodeUrl(String url) {
return this.res.encodeUrl(url);
}
public String encodeRedirectUrl(String url) {
return this.res.encodeRedirectUrl(url);
}
public void sendError(int sc, String msg) throws IOException {
this.res.sendError(sc, msg);
}
public void sendError(int sc) throws IOException {
this.res.sendError(sc);
}
public void sendRedirect(String location) throws IOException {
this.res.sendRedirect(location);
}
public void setDateHeader(String name, long date) {
this.res.setDateHeader(name, date);
}
public void addDateHeader(String name, long date) {
this.res.addDateHeader(name, date);
}
public void setHeader(String name, String value) {
this.res.setHeader(name, value);
}
public void addHeader(String name, String value) {
this.res.addHeader(name, value);
}
public void setIntHeader(String name, int value) {
this.res.setIntHeader(name, value);
}
public void addIntHeader(String name, int value) {
this.res.addIntHeader(name, value);
}
public void setStatus(int sc) {
this.res.setStatus(sc);
}
public void setStatus(int sc, String sm) {
this.res.setStatus(sc, sm);
}
}
/**
* A wrapper around http request object to prevent unverified requests from
* accessing any of the request parameters.
*
* @author <a href="mailto:pkharchenko@interactivebusiness.com">Peter Kharchenko</a>
*/
public class RequestParamWrapper implements HttpServletRequest {
// the request being wrapped
protected final HttpServletRequest req;
protected Hashtable parameters;
protected boolean request_verified;
/**
* Creates a new <code>RequestParamWrapper</code> instance.
*
* @param source an <code>HttpServletRequest</code> value that's being wrapped.
* @param request_verified a <code>boolean</code> flag that determines if the request params should be accessable.
*/
public RequestParamWrapper (HttpServletRequest source,boolean request_verified) {
// leech all of the information from the source request
this.req=source;
this.request_verified=request_verified;
parameters = new Hashtable();
// only bother with parameter work if should be accessable
if(request_verified) {
// parse request body
String contentType = source.getContentType();
if (contentType != null && contentType.startsWith("multipart/form-data")) {
com.oreilly.servlet.multipart.Part attachmentPart;
try {
MultipartParser multi = new MultipartParser(source, sizeLimit, true, true);
while ((attachmentPart = multi.readNextPart()) != null) {
String partName = attachmentPart.getName();
if (attachmentPart.isParam()) {
ParamPart parameterPart = (ParamPart)attachmentPart;
String paramValue = parameterPart.getStringValue();
if (parameters.containsKey(partName)) {
/* Assume they meant a multivalued tag, like a checkbox */
String[] oldValueArray = (String[])parameters.get(partName);
String[] valueArray = new String[oldValueArray.length + 1];
for (int i = 0; i < oldValueArray.length; i++) {
valueArray[i] = oldValueArray[i];
}
valueArray[oldValueArray.length] = paramValue;
parameters.put(partName, valueArray);
}
else {
String[] valueArray = new String[1];
valueArray[0] = paramValue;
parameters.put(partName, valueArray);
}
}
else if (attachmentPart.isFile()) {
FilePart filePart = (FilePart)attachmentPart;
String filename = filePart.getFileName();
if (filename != null) {
MultipartDataSource fileUpload = new MultipartDataSource(filePart);
if (parameters.containsKey(partName)) {
MultipartDataSource[] oldValueArray = (MultipartDataSource[])parameters.get(partName);
MultipartDataSource[] valueArray = new MultipartDataSource[oldValueArray.length + 1];
for (int i = 0; i < oldValueArray.length; i++) {
valueArray[i] = oldValueArray[i];
}
valueArray[oldValueArray.length] = fileUpload;
parameters.put(partName, valueArray);
}
else {
MultipartDataSource[] valueArray = new MultipartDataSource[1];
valueArray[0] = fileUpload;
parameters.put(partName, valueArray);
}
}
}
}
} catch (Exception e) {
LogService.instance().log(LogService.ERROR, e);
}
}
// regular params
Enumeration en = source.getParameterNames();
if (en != null) {
while (en.hasMoreElements()) {
String pName = (String)en.nextElement();
parameters.put(pName, source.getParameterValues(pName));
}
}
}
}
/**
* Overloaded method
* @param name
* @return parameter
*/
public String getParameter(String name) {
String[] value_array = this.getParameterValues(name);
if ((value_array != null) && (value_array.length > 0)) {
return value_array[0];
} else {
return null;
}
}
/**
* Overloaded method
* @return parameter names
*/
public Enumeration getParameterNames() {
return this.parameters.keys();
}
/**
* Return a String[] for this parameter
* @param parameter name
* @result String[] if parameter is not an Object[]
*/
public String[] getParameterValues(String name) {
Object[] pars = (Object[])this.parameters.get(name);
if (pars!=null && pars instanceof String[]) {
return (String[])this.parameters.get(name);
} else {
return null;
}
}
/**
* Overloaded method
*
* @return a <code>Map</code> value
*/
public Map getParameterMap() {
return parameters;
}
/**
* Return the Object represented by this parameter name
* @param parameter name
* @result Object
*/
public Object[] getObjectParameterValues(String name) {
return (Object[])this.parameters.get(name);
}
// this method should be overloaded, otherwise params will be visible
public String getRequestURI() {
return req.getRequestURI();
}
// pass through methods
public String getPathInfo() {
return req.getPathInfo();
}
public String getPathTranslated() {
return req.getPathTranslated();
}
public String getContextPath() {
return req.getContextPath();
}
public String getQueryString() {
return req.getQueryString();;
}
public String getServletPath() {
return req.getServletPath();
}
// This method is new in Servlet 2.3.
// java.lang.reflect methods are used here in an effort
// to be compatible with older servlet APIs.
public StringBuffer getRequestURL() {
try {
java.lang.reflect.Method m = req.getClass().getMethod("getRequestURL", null);
return (StringBuffer)m.invoke(req, null);
} catch (Exception e) {
return null;
}
}
// peterk: this won't work. Spec says that this method has to be executed prior to
// reading request body, and we do exactly this in the constructor of this class :(
// This method is new in Servlet 2.3.
// java.lang.reflect methods are used here in an effort
// to be compatible with older servlet APIs.
public void setCharacterEncoding(String env) throws java.io.UnsupportedEncodingException {
try {
Class[] paramTypes = new Class[] { new String().getClass() };
java.lang.reflect.Method m = req.getClass().getMethod("setCharacterEncoding", paramTypes);
Object[] args = new Object[] { env };
m.invoke(req, args);
} catch (Exception e) {
}
}
public String getAuthType() {
return req.getAuthType();
}
public Cookie[] getCookies() {
return req.getCookies();
}
public long getDateHeader(String name) {
return req.getDateHeader(name);
}
public String getHeader(String name) {
return req.getHeader(name);
}
public Enumeration getHeaders(String name) {
return req.getHeaders(name);
}
public Enumeration getHeaderNames() {
return req.getHeaderNames();
}
public int getIntHeader(String name) {
return req.getIntHeader(name);
}
public String getMethod() {
return req.getMethod();
}
public String getRemoteUser() {
return req.getRemoteUser();
}
public boolean isUserInRole(String role) {
return req.isUserInRole(role);
}
public java.security.Principal getUserPrincipal() {
return req.getUserPrincipal();
}
public String getRequestedSessionId() {
return req.getRequestedSessionId();
}
public HttpSession getSession(boolean create) {
return req.getSession(create);
}
public HttpSession getSession() {
return req.getSession();
}
public boolean isRequestedSessionIdValid() {
return req.isRequestedSessionIdValid();
}
public boolean isRequestedSessionIdFromCookie() {
return req.isRequestedSessionIdFromCookie();
}
public boolean isRequestedSessionIdFromURL() {
return req.isRequestedSessionIdFromURL();
}
public boolean isRequestedSessionIdFromUrl() {
return req.isRequestedSessionIdFromURL();
}
public Object getAttribute(String name) {
return req.getAttribute(name);
}
public Enumeration getAttributeNames() {
return req.getAttributeNames();
}
public String getCharacterEncoding() {
return req.getCharacterEncoding();
}
public int getContentLength() {
return req.getContentLength();
}
public String getContentType() {
return req.getContentType();
}
public ServletInputStream getInputStream() throws IOException {
return req.getInputStream();
}
public String getProtocol() {
return req.getProtocol();
}
public String getScheme() {
return req.getScheme();
}
public String getServerName() {
return req.getServerName();
}
public int getServerPort() {
return req.getServerPort();
}
public BufferedReader getReader() throws IOException {
return req.getReader();
}
public String getRemoteAddr() {
return req.getRemoteAddr();
}
public String getRemoteHost() {
return req.getRemoteHost();
}
public void setAttribute(String name, Object o) {
req.setAttribute(name, o);
}
public void removeAttribute(String name) {
req.removeAttribute(name);
}
public Locale getLocale() {
return req.getLocale();
}
public Enumeration getLocales() {
return req.getLocales();
}
public boolean isSecure() {
return req.isSecure();
}
public RequestDispatcher getRequestDispatcher(String path) {
return req.getRequestDispatcher(path);
}
public String getRealPath(String path) {
throw new RuntimeException("HttpServletRequest.getRealPath(String path) is deprectated! Use ServletContext.getRealPath(String path) instead.");
}
}
} |
package com.promenadevt;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import java.sql.Date;
import java.util.concurrent.ExecutionException;
import org.apache.commons.io.IOUtils;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.ContentResolver;
import android.content.DialogInterface;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.provider.OpenableColumns;
import android.view.KeyEvent;
import android.view.View;
import android.widget.ViewSwitcher;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Region;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.DeleteObjectRequest;
import com.amazonaws.services.s3.model.GeneratePresignedUrlRequest;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.ResponseHeaderOverrides;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectInputStream;
import com.promenadevt.android.R;
import com.promenadevt.library.Constants;
import com.promenadevt.library.UserFunctions;
public class EditActivity extends Activity
{
EditText inputName;
Button btnChangeName;
Button btnTakePhoto;
Button btnAddConnection;
Button btnViewRoom;
Button btnDelete;
Button btnDeleteYes;
Button btnDeleteNo;
ViewSwitcher switcher;
ImageView roomImage;
UserFunctions userFunctions;
private static String username;
private static String roomName;
private static String dbID;
private static String propID;
private static String addr;
private static String roomURL;
int CAMERA_PIC_REQUEST = 1337;
private static final int PHOTO_SELECTED = 1;
Constants Constants;
private AmazonS3Client s3Client = new AmazonS3Client(
new BasicAWSCredentials(Constants.ACCESS_KEY_ID,
Constants.SECRET_KEY));
/*Intent res = new Intent();
String mPackage = "com.google.android.gallery3d";
String mClass = "com.google.android.apps.lightcycle.ProtectedPanoramaCaptureActivity";
res.setComponent(new ComponentName(mPackage,mClass));
startActivityForResult(res,CAMERA_PIC_REQUEST);
}*/
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data){
if( requestCode == CAMERA_PIC_REQUEST){
Bitmap thumbnail = (Bitmap) data.getExtras().get("data");
roomImage.setImageBitmap(thumbnail);
}
else if(requestCode == PHOTO_SELECTED){
if (resultCode == RESULT_OK) {
Uri selectedImage = data.getData();
//Bitmap thumbnail = (Bitmap) data.getExtras().get("data");
roomImage.setImageURI(selectedImage);
new S3PutObjectTask().execute(selectedImage);
userFunctions.changeURL(dbID, "https://s3-us-west-2.amazonaws.com/promenadevt-1/room"+dbID);
}
}
super.onActivityResult(requestCode, resultCode, data);
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK ) {
// do something on back.
//DatabaseHandler db = new DatabaseHandler(getApplicationContext());
Intent rooms = new Intent(getApplicationContext(), RoomsActivity.class);
//HashMap<String, String> loginInfo = db.getUserDetails();
rooms.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
rooms.putExtra("user", username);
rooms.putExtra("id",propID);
rooms.putExtra("addr", addr);
startActivity(rooms);
return true;
}
return super.onKeyDown(keyCode, event);
}
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.edit_room);
s3Client.setRegion(Region.getRegion(Regions.US_WEST_2));
//new S3GeneratePresignedUrlTask().execute();
// may need to account for newly registered user here
Intent intent = getIntent();
// pull info from previous page
username = intent.getStringExtra("user");
roomName = intent.getStringExtra("name");
propID = intent.getStringExtra("propID");
addr = intent.getStringExtra("addr");
inputName = (EditText) findViewById(R.id.nameRoom);
inputName.setText(roomName);
dbID = intent.getStringExtra("id");
roomURL = "https://s3-us-west-2.amazonaws.com/promenadevt-1/room"+dbID;
Constants = new Constants(propID,dbID);
btnChangeName = (Button) findViewById(R.id.btnUpdateR);
btnTakePhoto = (Button) findViewById(R.id.btnPhoto);
btnAddConnection = (Button) findViewById(R.id.btnConnection);
btnViewRoom = (Button) findViewById(R.id.btnView);
btnDelete = (Button) findViewById(R.id.btnDelete);
btnDeleteYes = (Button) findViewById(R.id.btnDeleteRoomYes);
btnDeleteNo = (Button) findViewById(R.id.btnDeleteRoomNo);
switcher = (ViewSwitcher) findViewById(R.id.editRoomsSwitch);
roomImage =(ImageView) findViewById(R.id.PhotoCaptured);
userFunctions = new UserFunctions();
if(roomURL != null){
try {
Bitmap bitmap = new S3GetObjectTask().execute().get();
roomImage.setImageBitmap(bitmap);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ExecutionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
btnChangeName.setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View arg0) {
// change room name in database
UserFunctions userFunction = new UserFunctions();
String newName = inputName.getText().toString();
userFunction.renameRoom(dbID,newName);
}
});
btnTakePhoto.setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View arg0) {
Intent intent = new Intent(Intent.ACTION_GET_CONTENT); |
package ar.glyphsets.implicitgeometry;
import java.awt.Color;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.io.Serializable;
import java.lang.reflect.Array;
import ar.util.ColorNames;
import ar.util.Util;
import ar.util.memoryMapping.MemMapEncoder;
/**Interface designating something has an integer-valued "get" function.
* This interface is the basis for array-based and file-record conversions
* where the only reliable field accessor is the index.
*
* The subclasses are common ways of working with indexed records.
*
* **/
public interface Indexed extends Serializable {
/**What value is at index i? */
public Object get(int i);
/**Wrap an array as an Indexed item.**/
public static class ArrayWrapper implements Indexed {
private static final long serialVersionUID = -7081805779069559306L;
private final Object array;
@SuppressWarnings("javadoc")
public ArrayWrapper(Object parts) {this.array = parts;}
public Object get(int i) {return Array.get(array, i);}
}
/**Converts the elements of the passed array to the given types.
* Uses toString and primitive parsers.
*/
public static class Converter implements Indexed {
/**Types the converter understands. "X" means skip.**/
public enum TYPE{INT, DOUBLE, LONG, SHORT, BYTE, CHAR, FLOAT, X, COLOR}
private static final long serialVersionUID = 9142589107863879237L;
private final TYPE[] types;
private final Indexed values;
public Converter(MemMapEncoder.TYPE... types) {this(Util.transcodeTypes(types));}
/**Instantiate a converter with a null value-array.
* This converter is essentially a template for future converts built for specific data.
*/
public Converter(TYPE... types) {this(null, types);}
/**Instantiate a converter for a specific set of values.*/
public Converter(Indexed values, TYPE... types) {
this.values = values;
this.types = types;
}
@Override
public Object get(int i) {
Object v = values.get(i);
switch (types[i]) {
case INT: return v instanceof Integer ? (Integer) v : Integer.valueOf(v.toString());
case SHORT: return v instanceof Short ? (Short) v : Short.valueOf(v.toString());
case LONG: return v instanceof Long ? (Long) v : Long.valueOf(v.toString());
case FLOAT: return v instanceof Float ? (Float) v : Float.valueOf(v.toString());
case DOUBLE: return v instanceof Double ? (Double) v : Double.valueOf(v.toString());
case COLOR: return v instanceof Color ? (Color) v : ColorNames.byName(v.toString(), null);
default: throw new UnsupportedOperationException("Cannot perform conversion to " + types[i]);
}
}
@SuppressWarnings("unchecked")
public <T> T get(int f, Class<T> type) {
Object val = get(f);
if (type.isInstance(val)) {return (T) val;}
throw new IllegalArgumentException("Requested type that does not match encoded type.");
}
/**Get the type array associated with this converter.**/
public TYPE[] types() {return types;}
public Converter applyTo(Object[] values) {return new Converter(new ArrayWrapper(values), types);}
public Converter applyTo(Indexed values) {return new Converter(values, types);}
}
/**Apply the passed valuer to the value at the indicated index.
* The default value is the "IdentityValuer" found in the valuer class.
* **/
public static class ToValue<I,V> implements Valuer<Indexed,V>, Serializable {
private static final long serialVersionUID = -3420649810382539859L;
private final int vIdx;
private final Valuer<I,V> basis;
/**Extract a value from an indexed item without conversion.**/
@SuppressWarnings({ "unchecked", "rawtypes" })
public ToValue(int vIdx) {this(vIdx, new IdentityValuer());}
/**Extract a value from an indexed item, but do conversion using the valuer.**/
public ToValue(int vIdx, Valuer<I, V> basis) {
this.vIdx = vIdx;
this.basis = basis;
}
@SuppressWarnings("unchecked")
public V value(Indexed from) {
return basis.value((I) from.get(vIdx));
}
}
/**Convert an item to a fixed-sized rectangle at a variable
* position. The passed value determines the position, but the size
* is set by the ToRect constructor.
*/
public static class ToPoint implements Shaper.SafeApproximate<Indexed, Point2D>, Serializable {
private static final long serialVersionUID = 2509334944102906705L;
private final boolean flipY;
private final int xIdx, yIdx;
/** @param flipY Multiply Y-values by -1 (essentially flip up and down directions)**/
public ToPoint(boolean flipY, int xIdx, int yIdx) {
this.flipY=flipY;
this.xIdx = xIdx;
this.yIdx = yIdx;
}
public Point2D shape(Indexed from) {
double x=((Number) from.get(xIdx)).doubleValue();
double y=((Number) from.get(yIdx)).doubleValue();
y = flipY ? -y : y;
return new Point2D.Double(x, y);
}
}
/**Convert an item to a fixed-sized rectangle at a variable
* position. The passed value determines the position, but the size
* is set by the ToRect constructor.
*/
public static class ToRect implements Shaper.SafeApproximate<Indexed, Rectangle2D>, Serializable {
private static final long serialVersionUID = 2509334944102906705L;
private final double width,height;
private final boolean flipY;
private final int xIdx, yIdx;
/**Square construction using the indexed values directly for x/y**/
public ToRect(double size, int xIdx, int yIdx) {this(size,size,false,xIdx,yIdx);}
/**Full control constructor for creating rectangles.
*
* @param flipY Multiply Y-values by -1 (essentially flip up and down directions)
* **/
public ToRect(double width, double height, boolean flipY, int xIdx, int yIdx) {
this.width=width;
this.height=height;
this.flipY=flipY;
this.xIdx = xIdx;
this.yIdx = yIdx;
}
public Rectangle2D shape(Indexed from) {
double x=((Number) from.get(xIdx)).doubleValue();
double y=((Number) from.get(yIdx)).doubleValue();
y = flipY ? -y : y;
return new Rectangle2D.Double(x, y, width, height);
}
}
} |
package liquibase.dbdoc;
import liquibase.change.Change;
import liquibase.database.Database;
import liquibase.structure.core.*;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class TableWriter extends HTMLWriter {
public TableWriter(File rootOutputDir, Database database) {
super(new File(rootOutputDir, "tables"), database);
}
@Override
protected String createTitle(Object object) {
return "Changes affecting table \""+object.toString() + "\"";
}
@Override
protected void writeCustomHTML(Writer fileWriter, Object object, List<Change> changes, Database database) throws IOException {
final Table table = (Table) object;
writeTableRemarks(fileWriter, table, database);
writeColumns(fileWriter, table, database);
writeTableIndexes(fileWriter, table, database);
writeTableForeignKeys(fileWriter, table, database);
}
private void writeColumns(Writer fileWriter, Table table, Database database) throws IOException {
List<List<String>> cells = new ArrayList<List<String>>();
for (Column column : table.getColumns()) {
String remarks = column.getRemarks();
cells.add(Arrays.asList(column.getType().toString(),
column.isNullable() ? "NULL" : "NOT NULL",
"<A HREF=\"../columns/" + table.getSchema().getName().toLowerCase() + "." + table.getName().toLowerCase() + "." + column.getName().toLowerCase() + ".html" + "\">" + column.getName() + "</A>", (remarks != null) ? remarks : ""));
//todo: add foreign key info to columns?
}
writeTable("Current Columns", cells, fileWriter);
}
private void writeTableRemarks(Writer fileWriter, Table table, Database database) throws IOException {
final String tableRemarks = table.getRemarks();
if (tableRemarks != null && tableRemarks.length() > 0) {
final List<List<String>> cells = new ArrayList<List<String>>();
cells.add(Arrays.asList(tableRemarks));
writeTable("Table Description", cells, fileWriter);
}
}
private void writeTableIndexes(Writer fileWriter, Table table, Database database) throws IOException {
final List<List<String>> cells = new ArrayList<List<String>>();
final PrimaryKey primaryKey = table.getPrimaryKey();
if (!table.getIndexes().isEmpty()) {
for (Index index : table.getIndexes()) {
cells.add(Arrays.asList((primaryKey != null && primaryKey.getBackingIndex() == index ? "Primary Key " : index.isUnique() ? "Unique " : "Non-Unique ") +
(index.getClustered() == null ? "" : (index.getClustered() ? "Clustered" : "Non-Clustered")),
index.getName(),
index.getColumnNames().replace(index.getTable().getName() + ".","")));
}
writeTable("Current Table Indexes", cells, fileWriter);
}
}
private void writeTableForeignKeys(Writer fileWriter, Table table, Database database) throws IOException {
final List<List<String>> cells = new ArrayList<List<String>>();
if(!table.getOutgoingForeignKeys().isEmpty())
{
for (ForeignKey outgoingForeignKey : table.getOutgoingForeignKeys()) {
cells.add(Arrays.asList(outgoingForeignKey.getName(),
outgoingForeignKey.getForeignKeyColumns().toString().replace(table.getName() + ".", "").replaceAll("[\\[\\]]", ""),
outgoingForeignKey.getPrimaryKeyTable().toString(),
outgoingForeignKey.getPrimaryKeyColumns().toString().replace(outgoingForeignKey.getPrimaryKeyTable().toString() + ".", "").replaceAll("[\\[\\]]", "")));
}
writeTable("Current Table Foreign Keys", cells, fileWriter);
}
}
} |
package org.apache.velocity.runtime;
import java.io.InputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.Hashtable;
import java.util.Properties;
import org.apache.log.LogKit;
import org.apache.log.Logger;
import org.apache.log.LogTarget;
import org.apache.log.Formater;
import org.apache.log.output.FileOutputLogTarget;
import org.apache.velocity.runtime.log.VelocityFormater;
import org.apache.velocity.Template;
import org.apache.velocity.runtime.parser.Parser;
import org.apache.velocity.runtime.parser.ParseException;
import org.apache.velocity.runtime.parser.node.SimpleNode;
import org.apache.velocity.runtime.loader.TemplateFactory;
import org.apache.velocity.runtime.loader.TemplateLoader;
import org.apache.velocity.runtime.directive.Foreach;
import org.apache.velocity.runtime.directive.Dummy;
/**
* This is the Runtime system for Velocity. It is the
* single access point for all functionality in Velocity.
* It adheres to the mediator pattern and is the only
* structure that developers need to be familiar with
* in order to get Velocity to perform.
*
* <pre>
* Runtime.init(properties);
*
* Template template = Runtime.getTemplate("template.vm");
*
* Runtime.warn(message);
* Runtime.info(message);
* Runtime.error(message);
* Runtime.debug(message);
* </pre>
*
* The Runtime will also cooperate with external
* systems like Turbine. Normally the Runtime will
* be fully initialized from a properties file, but
* certain phases of initialization can be delayed
* if vital pieces of information are provided by
* an external system.
*
* Turbine for example knows where the templates
* are to be loaded from, and where the velocity
* log file should be placed.
*
* In order for this to happen the velocity.properties
* file must look something like the following:
*
* runtime.log = system
* template.path = system
*
* Having these properties set to 'system' lets the
* Velocity Runtime know that an external system
* will set these properties and initialized
* the appropriates sub systems when these properties
* are set.
*
* So in the case of Velocity cooperating with Turbine
* the code might look something like the following:
*
* <pre>
* Runtime.setProperty(Runtime.RUNTIME_LOG, pathToVelocityLog);
* Runtime.initializeLogger();
*
* Runtime.setProperty(Runtime.TEMPLATE_PATH, pathToTemplates);
* Runtime.initializeTemplateLoader();
* </pre>
*
* It is simply a matter of setting the appropriate property
* an initializing the matching sub system.
*
* @author <a href="mailto:jvanzyl@periapt.com">Jason van Zyl</a>
* @author <a href="mailto:jlb@houseofdistraction.com">Jeff Bowden</a>
* @version $Id: Runtime.java,v 1.26 2000/10/23 18:27:48 jvanzyl Exp $
*/
public class Runtime
{
/** Location of the log file */
public static final String RUNTIME_LOG = "runtime.log";
/** Location of templates */
public static final String TEMPLATE_PATH = "template.path";
/** Template loader to be used */
public static final String TEMPLATE_LOADER = "template.loader";
/** Specify template caching true/false */
public static final String TEMPLATE_CACHE = "template.cache";
/** The encoding to use for the template */
public static final String TEMPLATE_ENCODING = "template.encoding";
/** Enable the speed up provided by FastWriter */
public static final String TEMPLATE_ASCIIHACK = "template.asciihack";
/** How often to check for modified templates. */
public static final String TEMPLATE_MOD_CHECK_INTERVAL =
"template.modificationCheckInterval";
/** Initial counter value in #foreach directives */
public static final String COUNTER_NAME = "counter.name";
/** Initial counter value in #foreach directives */
public static final String COUNTER_INITIAL_VALUE = "counter.initial.value";
/** Content type */
public static final String DEFAULT_CONTENT_TYPE = "default.contentType";
/** Prefix for warning messages */
private final static String WARN = " [warn] ";
/** Prefix for info messages */
private final static String INFO = " [info] ";
/** Prefix for debug messages */
private final static String DEBUG = " [debug] ";
/** Prefix for error messages */
private final static String ERROR = " [error] ";
/** TemplateLoader used by the Runtime */
private static TemplateLoader templateLoader;
/** Turn Runtime debugging on with this field */
private final static boolean DEBUG_ON = true;
/** Default Runtime properties */
private final static String DEFAULT_RUNTIME_PROPERTIES =
"org/apache/velocity/runtime/defaults/velocity.properties";
/** The Runtime logger */
private static Logger logger;
/**
* The Runtime parser. This has to be changed to
* a pool of parsers!
*/
private static Parser parser;
/** Indicate whether the Runtime has been fully initialized */
private static boolean initialized;
/**
* The logging systems initialization may be defered if
* it is to be initialized by an external system. There
* may be messages that need to be stored until the
* logger is instantiated. They will be stored here
* until the logger is alive.
*/
private static StringBuffer pendingMessages = new StringBuffer();
private static Properties properties;
/**
* Initializes the Velocity Runtime with a set of
* values from a default velocity.properties that
* is on the classpath. This default properties file
* will be included in the distribution jar file to
* make the Velocity Runtime easy to init.
*/
public synchronized static void init() throws Exception
{
if (properties == null)
setDefaultProperties();
init(properties);
}
/**
* Get the default properties for the Velocity Runtime.
* This would allow the retrieval and modification of
* the base properties before initializing the Velocity
* Runtime.
*/
public static void setDefaultProperties()
{
properties = new Properties();
ClassLoader classLoader = Runtime.class.getClassLoader();
try
{
InputStream inputStream = classLoader.getResourceAsStream(
DEFAULT_RUNTIME_PROPERTIES);
properties.load(inputStream);
}
catch (IOException ioe)
{
System.err.println("Cannot get Velocity Runtime default properties!");
}
}
/**
* Initializes the Velocity Runtime.
*/
public synchronized static void init(String propertiesFileName)
throws Exception
{
Properties properties = new Properties();
File file = new File( propertiesFileName );
/*
* Try loading the properties from the named properties
* file. If that fails then set the default values.
* From the command line and for testing the default
* values should work fine, and makes initializing
* the Velocity runtime as easy as Runtime.init();
*/
try
{
properties.load( new FileInputStream(file) );
}
catch(Exception ex)
{
init();
}
init( properties );
}
public synchronized static void init(Properties properties)
throws Exception
{
if (! initialized)
{
try
{
Runtime.properties = properties;
initializeLogger();
initializeTemplateLoader();
initializeParserPool();
info("Velocity successfully started.");
initialized = true;
}
catch (Exception e)
{
System.out.println(e);
e.printStackTrace();
}
}
}
/**
* Allows an external system to set a property in
* the Velocity Runtime.
*/
public static void setProperty(String key, String value)
{
properties.setProperty(key, value);
}
/**
* Initialize the Velocity logging system.
*/
public static void initializeLogger() throws
MalformedURLException
{
if (!getString(RUNTIME_LOG).equals("system"))
{
// Let's look at the log file entry and
// correct it if it is not a property
// fomratted URL.
String logFile = getString(RUNTIME_LOG);
if (! logFile.startsWith("file"))
logFile = "file://" + logFile;
// Initialize the logger.
logger = LogKit.createLogger("velocity",
getString(RUNTIME_LOG), "DEBUG");
LogTarget[] t = logger.getLogTargets();
((FileOutputLogTarget)t[0])
.setFormater((Formater) new VelocityFormater());
((FileOutputLogTarget)t[0])
.setFormat("%{time} %{message}\\n%{throwable}" );
if (pendingMessages.length() > 0)
{
logger.info(pendingMessages.toString());
}
}
}
/**
* Initialize the template loader if there
* is a real path set for the template.path
* property. Otherwise defer initialization
* of the template loader because it is going
* to be set by some external mechanism: Turbine
* for example.
*/
public static void initializeTemplateLoader()
throws Exception
{
if (!getString(TEMPLATE_PATH).equals("system"))
{
templateLoader = TemplateFactory
.getLoader(getString(TEMPLATE_LOADER));
templateLoader.init();
}
}
/**
* Initializes the Velocity parser pool.
* This still needs to be implemented.
*/
private static void initializeParserPool()
{
// put this in a method and make a pool of
// parsers.
parser = new Parser();
Hashtable directives = new Hashtable();
directives.put("foreach", new Foreach());
directives.put("dummy", new Dummy());
parser.setDirectives(directives);
}
/**
* Parse the input stream and return the root of
* AST node structure.
*/
public synchronized static SimpleNode parse(InputStream inputStream)
throws ParseException
{
return parser.parse(inputStream);
}
/**
* Get a template via the TemplateLoader.
*/
public static Template getTemplate(String template)
{
try
{
return templateLoader.getTemplate(template);
}
catch (Exception e)
{
error(e);
return null;
}
}
/**
* Get a boolean property.
*/
public static boolean getBoolean(String property)
{
String prop = properties.getProperty( property );
return (prop != null && Boolean.valueOf( prop ).booleanValue());
}
/**
* Get a string property.
*/
public static String getString(String property)
{
return properties.getProperty( property );
}
/**
* Get a string property. with a default value
*/
public static String getString(String property, String defaultValue)
{
String prop = getString( property );
return (prop == null ? defaultValue : prop);
}
private static void log(String message)
{
if (logger != null)
logger.info(message);
else
pendingMessages.append(message);
}
/** Log a warning message */
public static void warn(Object message)
{
log(WARN + message.toString());
}
/** Log an info message */
public static void info(Object message)
{
log(INFO + message.toString());
}
/** Log an error message */
public static void error(Object message)
{
log(ERROR + message.toString());
}
/** Log a debug message */
public static void debug(Object message)
{
if (DEBUG_ON)
log(DEBUG + message.toString());
}
} |
package org.tensorics.core.tensor;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.Set;
import com.google.common.base.Function;
import com.google.common.base.MoreObjects;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSet.Builder;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import com.google.common.collect.Sets.SetView;
/**
* Contains utility methods for position objects.
*
* @author kfuchsbe
*/
public final class Positions {
/**
* private constructor to avoid instantiation
*/
private Positions() {
/* Only static methods */
}
public static Position union(Position left, Position right) {
checkNotNull(left, "left position must not be null.");
checkNotNull(right, "right position must not be null.");
checkArgument(Sets.intersection(left.dimensionSet(), right.dimensionSet()).isEmpty(),
"Positions have overlapping dimensions. It is not possible to create the union of them.");
SetView<Object> coordinates = Sets.union(left.coordinates(), right.coordinates());
return Position.of(coordinates);
}
/**
* Copies the given positions to a set, to be sure that each element is
* contained only once
*
* @param positions the positions, which shall be ensured that they are
* unique
* @return a set of unique positions
*/
public static Set<Position> unique(Iterable<Position> positions) {
return ImmutableSet.copyOf(positions);
}
/**
* Factory method for a dimension stripper. The naming ('by') is done so
* that its calls together with 'transform' sounds somehow fluent.
*
* @param dimensionsToStrip the dimensions which shall be stripped from the
* positions passed to the stripper.
* @return a function object that can strip the given dimensions from
* positions.
*/
public static Positions.DimensionStripper stripping(final Set<? extends Class<?>> dimensionsToStrip) {
return new Positions.DimensionStripper(dimensionsToStrip);
}
/**
* A functional object to transform positions to other positions with the
* dimensions stripped as given in the constructor.
*
* @author kaifox
*/
public static class DimensionStripper implements Function<Position, Position> {
private final Set<? extends Class<?>> dimensionsToStrip;
DimensionStripper(Set<? extends Class<?>> dimensionsToStrip) {
this.dimensionsToStrip = ImmutableSet.copyOf(dimensionsToStrip);
}
@Override
public Position apply(Position position) {
Builder<Object> builder = ImmutableSet.builder();
for (Object coordinate : position.coordinates()) {
if (!dimensionsToStrip.contains(coordinate.getClass())) {
builder.add(coordinate);
}
}
return Position.of(builder.build());
}
}
public static void assertConsistentDimensions(Position position, Set<? extends Class<?>> dimensions) {
if (!position.isConsistentWith(dimensions)) {
throw new IllegalArgumentException(
"The given coordinates are not consistent with the dimensions of the tensor! position='" + position
+ "'; required dimensions='" + dimensions + "'.");
}
}
/**
* Combines the both positions of the pair in such a way, that for each
* coordinate of the types given in the given set of dimensions have to be
* <ul>
* <li>either present in both positions of the pair, and then have to be the
* same
* <li>or be present in only one of the both positions
* </ul>
*
* @param pair the pair, whose dimensions should be united
* @param targetDimensions the dimension in which the positions shall be
* united
* @return a new position, containing the coordinates of the pair, merged by
* the above rules
*/
/* Similar to union ... maybe to be unified at some point */
public static Position combineDimensions(PositionPair pair, Set<Class<?>> targetDimensions) {
Position left = pair.left();
Position right = pair.right();
return combineDimensions(left, right, targetDimensions);
}
/**
* Combines the both positions in such a way, that for each coordinate of
* the types given in the given set of dimensions have to be
* <ul>
* <li>either present in both positions of the pair, and then have to be the
* same
* <li>or be present in only one of the both positions
* </ul>
*
* @param left the first of the two positions, whose dimensions should be
* united
* @param right the second of the two positions whose dimensions should be
* combined
* @param targetDimensions the dimension in which the positions shall be
* united
* @return a new position, with the coordinates merged according to the
* above rules
*/
public static Position combineDimensions(Position left, Position right, Set<Class<?>> targetDimensions) {
ImmutableSet.Builder<Object> builder = ImmutableSet.builder();
for (Class<?> dimension : targetDimensions) {
Object leftCoordinate = left.coordinateFor(dimension);
Object rightCoordinate = right.coordinateFor(dimension);
if (Objects.equal(leftCoordinate, rightCoordinate) || oneIsNull(leftCoordinate, rightCoordinate)) {
builder.add(MoreObjects.firstNonNull(leftCoordinate, rightCoordinate));
} else {
throw new IllegalArgumentException("Coordinates for dimension '" + dimension
+ "' are neither the same in both positions (" + left + " and " + right
+ "), nor present only in one. Cannot consistently combine.");
}
}
return Position.of(builder.build());
}
public static <T> boolean oneIsNull(T left, T right) {
return ((left == null) || (right == null));
}
/**
* Combines all position pairs into positions containing the given
* dimensions and returns a map from the combined positions to the original
* position pairs.
*
* @param positionPairs the position pairs to combine the final positions
* @param targetDimensions the dimensions in which to combine the positions
* @return a map from the combined dimensions to the original pairs
*/
public static ImmutableSetMultimap<Position, PositionPair> combineAll(Set<PositionPair> positionPairs,
Set<Class<?>> targetDimensions) {
ImmutableSetMultimap.Builder<Position, PositionPair> builder = ImmutableSetMultimap.builder();
for (PositionPair pair : positionPairs) {
builder.put(combineDimensions(pair, targetDimensions), pair);
}
return builder.build();
}
public static Multimap<Position, Position> mapByStripping(Iterable<Position> positions,
Set<Class<?>> dimensionsToStrip) {
DimensionStripper stripper = stripping(dimensionsToStrip);
ImmutableMultimap.Builder<Position, Position> builder = ImmutableMultimap.builder();
for (Position position : positions) {
builder.put(stripper.apply(position), position);
}
return builder.build();
}
} |
package <%=packageName%>.config;
<%_ if(authenticationType == 'jwt') { _%>
import <%=packageName%>.security.AuthoritiesConstants;
import <%=packageName%>.security.jwt.JWTConfigurer;
import <%=packageName%>.security.jwt.TokenProvider;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.HttpMethod;
import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.builders.WebSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.config.http.SessionCreationPolicy;
import org.springframework.security.data.repository.query.SecurityEvaluationContextExtension;
import javax.inject.Inject;
@Configuration
@EnableWebSecurity
@EnableGlobalMethodSecurity(prePostEnabled = true, securedEnabled = true)
public class MicroserviceSecurityConfiguration extends WebSecurityConfigurerAdapter {
@Inject
private TokenProvider tokenProvider;
@Override
public void configure(WebSecurity web) throws Exception {
web.ignoring()
.antMatchers("/app*.{js,html}") |
package org.jaxen.function;
import java.io.IOException;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import junit.framework.TestCase;
import org.jaxen.BaseXPath;
import org.jaxen.FunctionCallException;
import org.jaxen.JaxenException;
import org.jaxen.dom.DOMXPath;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
/**
* @author Elliotte Rusty Harold
*
*/
public class BooleanTest extends TestCase {
private Document doc;
public void setUp() throws ParserConfigurationException, SAXException, IOException
{
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
DocumentBuilder builder = factory.newDocumentBuilder();
doc = builder.newDocument();
}
public BooleanTest(String name) {
super(name);
}
// test case for JAXEN-55
public void testNonEmptyNodeSetsAreTrue()
throws JaxenException {
BaseXPath xpath = new DOMXPath("boolean(
org.w3c.dom.Element a = doc.createElementNS("", "a");
org.w3c.dom.Element b = doc.createElementNS("", "b");
doc.appendChild(a);
a.appendChild(b);
org.w3c.dom.Element x2 = doc.createElementNS("", "x");
org.w3c.dom.Element x3 = doc.createElementNS("", "x");
org.w3c.dom.Element x4 = doc.createElementNS("", "x");
a.appendChild(x4);
b.appendChild(x2);
b.appendChild(x3);
x2.appendChild(doc.createTextNode("false"));
x3.appendChild(doc.createTextNode("false"));
x4.appendChild(doc.createTextNode("false"));
List result = xpath.selectNodes(doc);
assertEquals(1, result.size());
assertEquals(Boolean.TRUE, result.get(0));
}
public void testEmptyNodeSetsAreFalse()
throws JaxenException {
BaseXPath xpath = new DOMXPath("boolean(
org.w3c.dom.Element a = doc.createElementNS("", "a");
org.w3c.dom.Element b = doc.createElementNS("", "b");
doc.appendChild(a);
a.appendChild(b);
org.w3c.dom.Element x2 = doc.createElementNS("", "x");
org.w3c.dom.Element x3 = doc.createElementNS("", "x");
org.w3c.dom.Element x4 = doc.createElementNS("", "x");
a.appendChild(x4);
b.appendChild(x2);
b.appendChild(x3);
x2.appendChild(doc.createTextNode("false"));
x3.appendChild(doc.createTextNode("false"));
x4.appendChild(doc.createTextNode("false"));
List result = xpath.selectNodes(doc);
assertEquals(1, result.size());
assertEquals(Boolean.FALSE, result.get(0));
}
public void testZeroIsFalse()
throws JaxenException {
BaseXPath xpath = new DOMXPath("boolean(0)");
org.w3c.dom.Element a = doc.createElementNS("", "a");
List result = xpath.selectNodes(a);
assertEquals(1, result.size());
assertEquals(Boolean.FALSE, result.get(0));
}
public void testEmptyStringIsFalse()
throws JaxenException {
BaseXPath xpath = new DOMXPath("boolean('')");
org.w3c.dom.Element a = doc.createElementNS("", "a");
List result = xpath.selectNodes(a);
assertEquals(1, result.size());
assertEquals(Boolean.FALSE, result.get(0));
}
public void testNonEmptyStringIsTrue()
throws JaxenException {
BaseXPath xpath = new DOMXPath("boolean('false')");
org.w3c.dom.Element a = doc.createElementNS("", "a");
List result = xpath.selectNodes(a);
assertEquals(1, result.size());
assertEquals(Boolean.TRUE, result.get(0));
}
public void testBooleanFunctionRequiresOneArgument()
throws JaxenException {
BaseXPath xpath = new DOMXPath("boolean()");
org.w3c.dom.Element a = doc.createElementNS("", "a");
try {
xpath.selectNodes(a);
fail("Allowed boolean function with no arguments");
}
catch (FunctionCallException ex) {
assertNotNull(ex.getMessage());
}
}
public void testBooleanFunctionRequiresExactlyOneArgument()
throws JaxenException {
BaseXPath xpath = new DOMXPath("boolean('', '')");
org.w3c.dom.Element a = doc.createElementNS("", "a");
try {
xpath.selectNodes(a);
fail("Allowed boolean function with two arguments");
}
catch (FunctionCallException ex) {
assertNotNull(ex.getMessage());
}
}
} |
package com.jme3.scene;
import com.jme3.asset.AssetNotFoundException;
import com.jme3.bounding.BoundingVolume;
import com.jme3.collision.Collidable;
import com.jme3.collision.CollisionResults;
import com.jme3.export.InputCapsule;
import com.jme3.export.JmeExporter;
import com.jme3.export.JmeImporter;
import com.jme3.export.OutputCapsule;
import com.jme3.material.Material;
import com.jme3.math.Matrix4f;
import com.jme3.renderer.Camera;
import com.jme3.scene.VertexBuffer.Type;
import com.jme3.util.TempVars;
import java.io.IOException;
import java.util.Queue;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* <code>Geometry</code> defines a leaf node of the scene graph. The leaf node
* contains the geometric data for rendering objects. It manages all rendering
* information such as a {@link Material} object to define how the surface
* should be shaded and the {@link Mesh} data to contain the actual geometry.
*
* @author Kirill Vainer
*/
public class Geometry extends Spatial {
// Version #1: removed shared meshes.
// models loaded with shared mesh will be automatically fixed.
public static final int SAVABLE_VERSION = 1;
private static final Logger logger = Logger.getLogger(Geometry.class.getName());
protected Mesh mesh;
protected transient int lodLevel = 0;
protected Material material;
/**
* When true, the geometry's transform will not be applied.
*/
protected boolean ignoreTransform = false;
protected transient Matrix4f cachedWorldMat = new Matrix4f();
/**
* Specifies which {@link GeometryGroupNode} this <code>Geometry</code>
* is managed by.
*/
protected GeometryGroupNode groupNode;
/**
* The start index of this <code>Geometry's</code> inside
* the {@link GeometryGroupNode}.
*/
protected int startIndex = -1;
/**
* Serialization only. Do not use.
*/
public Geometry() {
this(null);
}
/**
* Create a geometry node without any mesh data.
* Both the mesh and the material are null, the geometry
* cannot be rendered until those are set.
*
* @param name The name of this geometry
*/
public Geometry(String name) {
super(name);
// For backwards compatibility, only clear the "requires
// update" flag if we are not a subclass of Geometry.
// This prevents subclass from silently failing to receive
// updates when they upgrade.
setRequiresUpdates(Geometry.class != getClass());
}
/**
* Create a geometry node with mesh data.
* The material of the geometry is null, it cannot
* be rendered until it is set.
*
* @param name The name of this geometry
* @param mesh The mesh data for this geometry
*/
public Geometry(String name, Mesh mesh) {
this(name);
if (mesh == null) {
throw new IllegalArgumentException("mesh cannot be null");
}
this.mesh = mesh;
}
@Override
public boolean checkCulling(Camera cam) {
if (isGrouped()) {
setLastFrustumIntersection(Camera.FrustumIntersect.Outside);
return false;
}
return super.checkCulling(cam);
}
/**
* @return If ignoreTransform mode is set.
*
* @see Geometry#setIgnoreTransform(boolean)
*/
public boolean isIgnoreTransform() {
return ignoreTransform;
}
/**
* @param ignoreTransform If true, the geometry's transform will not be applied.
*/
public void setIgnoreTransform(boolean ignoreTransform) {
this.ignoreTransform = ignoreTransform;
}
/**
* Sets the LOD level to use when rendering the mesh of this geometry.
* Level 0 indicates that the default index buffer should be used,
* levels [1, LodLevels + 1] represent the levels set on the mesh
* with {@link Mesh#setLodLevels(com.jme3.scene.VertexBuffer[]) }.
*
* @param lod The lod level to set
*/
@Override
public void setLodLevel(int lod) {
if (mesh.getNumLodLevels() == 0) {
throw new IllegalStateException("LOD levels are not set on this mesh");
}
if (lod < 0 || lod >= mesh.getNumLodLevels()) {
throw new IllegalArgumentException("LOD level is out of range: " + lod);
}
lodLevel = lod;
if (isGrouped()) {
groupNode.onMeshChange(this);
}
}
/**
* Returns the LOD level set with {@link #setLodLevel(int) }.
*
* @return the LOD level set
*/
public int getLodLevel() {
return lodLevel;
}
/**
* Returns this geometry's mesh vertex count.
*
* @return this geometry's mesh vertex count.
*
* @see Mesh#getVertexCount()
*/
public int getVertexCount() {
return mesh.getVertexCount();
}
/**
* Returns this geometry's mesh triangle count.
*
* @return this geometry's mesh triangle count.
*
* @see Mesh#getTriangleCount()
*/
public int getTriangleCount() {
return mesh.getTriangleCount();
}
public void setMesh(Mesh mesh) {
if (mesh == null) {
throw new IllegalArgumentException();
}
this.mesh = mesh;
setBoundRefresh();
if (isGrouped()) {
groupNode.onMeshChange(this);
}
}
/**
* Returns the mesh to use for this geometry
*
* @return the mesh to use for this geometry
*
* @see #setMesh(com.jme3.scene.Mesh)
*/
public Mesh getMesh() {
return mesh;
}
/**
* Sets the material to use for this geometry.
*
* @param material the material to use for this geometry
*/
@Override
public void setMaterial(Material material) {
this.material = material;
if (isGrouped()) {
groupNode.onMaterialChange(this);
}
}
/**
* Returns the material that is used for this geometry.
*
* @return the material that is used for this geometry
*
* @see #setMaterial(com.jme3.material.Material)
*/
public Material getMaterial() {
return material;
}
/**
* @return The bounding volume of the mesh, in model space.
*/
public BoundingVolume getModelBound() {
return mesh.getBound();
}
/**
* Updates the bounding volume of the mesh. Should be called when the
* mesh has been modified.
*/
public void updateModelBound() {
mesh.updateBound();
setBoundRefresh();
}
/**
* <code>updateWorldBound</code> updates the bounding volume that contains
* this geometry. The location of the geometry is based on the location of
* all this node's parents.
*
* @see Spatial#updateWorldBound()
*/
@Override
protected void updateWorldBound() {
super.updateWorldBound();
if (mesh == null) {
throw new NullPointerException("Geometry: " + getName() + " has null mesh");
}
if (mesh.getBound() != null) {
if (ignoreTransform) {
// we do not transform the model bound by the world transform,
// just use the model bound as-is
worldBound = mesh.getBound().clone(worldBound);
} else {
worldBound = mesh.getBound().transform(worldTransform, worldBound);
}
}
}
@Override
protected void updateWorldTransforms() {
super.updateWorldTransforms();
computeWorldMatrix();
if (isGrouped()) {
groupNode.onTransformChange(this);
}
// geometry requires lights to be sorted
worldLights.sort(true);
}
@Override
protected void updateWorldLightList() {
super.updateWorldLightList();
// geometry requires lights to be sorted
worldLights.sort(true);
}
/**
* Associate this <code>Geometry</code> with a {@link GeometryGroupNode}.
*
* Should only be called by the parent {@link GeometryGroupNode}.
*
* @param node Which {@link GeometryGroupNode} to associate with.
* @param startIndex The starting index of this geometry in the group.
*/
public void associateWithGroupNode(GeometryGroupNode node, int startIndex) {
if (isGrouped()) {
unassociateFromGroupNode();
}
this.groupNode = node;
this.startIndex = startIndex;
}
/**
* Removes the {@link GeometryGroupNode} association from this
* <code>Geometry</code>.
*
* Should only be called by the parent {@link GeometryGroupNode}.
*/
public void unassociateFromGroupNode() {
if (groupNode != null) {
// Once the geometry is removed
// from the parent, the group node needs to be updated.
groupNode.onGeoemtryUnassociated(this);
groupNode = null;
// change the default to -1 to make error detection easier
startIndex = -1;
}
}
@Override
public boolean removeFromParent() {
return super.removeFromParent();
}
@Override
protected void setParent(Node parent) {
super.setParent(parent);
// If the geometry is managed by group node we need to unassociate.
if (parent == null && isGrouped()) {
unassociateFromGroupNode();
}
}
/**
* Indicate that the transform of this spatial has changed and that
* a refresh is required.
*/
// NOTE: Spatial has an identical implementation of this method,
// thus it was commented out.
// @Override
// protected void setTransformRefresh() {
// refreshFlags |= RF_TRANSFORM;
// setBoundRefresh();
/**
* Recomputes the matrix returned by {@link Geometry#getWorldMatrix() }.
* This will require a localized transform update for this geometry.
*/
public void computeWorldMatrix() {
// Force a local update of the geometry's transform
checkDoTransformUpdate();
// Compute the cached world matrix
cachedWorldMat.loadIdentity();
cachedWorldMat.setRotationQuaternion(worldTransform.getRotation());
cachedWorldMat.setTranslation(worldTransform.getTranslation());
TempVars vars = TempVars.get();
Matrix4f scaleMat = vars.tempMat4;
scaleMat.loadIdentity();
scaleMat.scale(worldTransform.getScale());
cachedWorldMat.multLocal(scaleMat);
vars.release();
}
/**
* A {@link Matrix4f matrix} that transforms the {@link Geometry#getMesh() mesh}
* from model space to world space. This matrix is computed based on the
* {@link Geometry#getWorldTransform() world transform} of this geometry.
* In order to receive updated values, you must call {@link Geometry#computeWorldMatrix() }
* before using this method.
*
* @return Matrix to transform from local space to world space
*/
public Matrix4f getWorldMatrix() {
return cachedWorldMat;
}
/**
* Sets the model bound to use for this geometry.
* This alters the bound used on the mesh as well via
* {@link Mesh#setBound(com.jme3.bounding.BoundingVolume) } and
* forces the world bounding volume to be recomputed.
*
* @param modelBound The model bound to set
*/
@Override
public void setModelBound(BoundingVolume modelBound) {
this.worldBound = null;
mesh.setBound(modelBound);
setBoundRefresh();
// NOTE: Calling updateModelBound() would cause the mesh
// to recompute the bound based on the geometry thus making
// this call useless!
//updateModelBound();
}
public int collideWith(Collidable other, CollisionResults results) {
// Force bound to update
checkDoBoundUpdate();
// Update transform, and compute cached world matrix
computeWorldMatrix();
assert (refreshFlags & (RF_BOUND | RF_TRANSFORM)) == 0;
if (mesh != null) {
// NOTE: BIHTree in mesh already checks collision with the
// mesh's bound
int prevSize = results.size();
int added = mesh.collideWith(other, cachedWorldMat, worldBound, results);
int newSize = results.size();
for (int i = prevSize; i < newSize; i++) {
results.getCollisionDirect(i).setGeometry(this);
}
return added;
}
return 0;
}
@Override
public void depthFirstTraversal(SceneGraphVisitor visitor) {
visitor.visit(this);
}
@Override
protected void breadthFirstTraversal(SceneGraphVisitor visitor, Queue<Spatial> queue) {
}
/**
* Determine whether this <code>Geometry</code> is managed by a
* {@link GeometryGroupNode} or not.
*
* @return True if managed by a {@link GeometryGroupNode}.
*/
public boolean isGrouped() {
return groupNode != null;
}
/**
* @deprecated Use {@link #isGrouped()} instead.
*/
@Deprecated
public boolean isBatched() {
return isGrouped();
}
/**
* This version of clone is a shallow clone, in other words, the
* same mesh is referenced as the original geometry.
* Exception: if the mesh is marked as being a software
* animated mesh, (bind pose is set) then the positions
* and normals are deep copied.
*/
@Override
public Geometry clone(boolean cloneMaterial) {
Geometry geomClone = (Geometry) super.clone(cloneMaterial);
// This geometry is managed,
// but the cloned one is not attached to anything, hence not managed.
if (geomClone.isGrouped()) {
geomClone.groupNode = null;
geomClone.startIndex = -1;
}
geomClone.cachedWorldMat = cachedWorldMat.clone();
if (material != null) {
if (cloneMaterial) {
geomClone.material = material.clone();
} else {
geomClone.material = material;
}
}
if (mesh != null && mesh.getBuffer(Type.BindPosePosition) != null) {
geomClone.mesh = mesh.cloneForAnim();
}
return geomClone;
}
/**
* This version of clone is a shallow clone, in other words, the
* same mesh is referenced as the original geometry.
* Exception: if the mesh is marked as being a software
* animated mesh, (bind pose is set) then the positions
* and normals are deep copied.
*/
@Override
public Geometry clone() {
return clone(true);
}
/**
* Create a deep clone of the geometry. This creates an identical copy of
* the mesh with the vertex buffer data duplicated.
*/
@Override
public Spatial deepClone() {
Geometry geomClone = clone(true);
geomClone.mesh = mesh.deepClone();
return geomClone;
}
@Override
public void write(JmeExporter ex) throws IOException {
super.write(ex);
OutputCapsule oc = ex.getCapsule(this);
oc.write(mesh, "mesh", null);
if (material != null) {
oc.write(material.getAssetName(), "materialName", null);
}
oc.write(material, "material", null);
oc.write(ignoreTransform, "ignoreTransform", false);
}
@Override
public void read(JmeImporter im) throws IOException {
super.read(im);
InputCapsule ic = im.getCapsule(this);
mesh = (Mesh) ic.readSavable("mesh", null);
material = null;
String matName = ic.readString("materialName", null);
if (matName != null) {
// Material name is set,
// Attempt to load material via J3M
try {
material = im.getAssetManager().loadMaterial(matName);
} catch (AssetNotFoundException ex) {
// Cannot find J3M file.
logger.log(Level.FINE, "Cannot locate {0} for geometry {1}", new Object[]{matName, key});
}
}
// If material is NULL, try to load it from the geometry
if (material == null) {
material = (Material) ic.readSavable("material", null);
}
ignoreTransform = ic.readBoolean("ignoreTransform", false);
if (ic.getSavableVersion(Geometry.class) == 0) {
// Fix shared mesh (if set)
Mesh sharedMesh = getUserData(UserData.JME_SHAREDMESH);
if (sharedMesh != null) {
getMesh().extractVertexData(sharedMesh);
setUserData(UserData.JME_SHAREDMESH, null);
}
}
}
} |
package com.servinglynk.hmis.warehouse.service;
import java.nio.charset.Charset;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.UUID;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import org.springframework.web.client.RestTemplate;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.servinglynk.hmis.warehouse.model.Client;
import com.servinglynk.hmis.warehouse.model.EnrollmentModel;
import com.servinglynk.hmis.warehouse.model.GlobalEnrollment;
import com.servinglynk.hmis.warehouse.model.GlobalEnrollmentMap;
import com.servinglynk.hmis.warehouse.model.GlobalEnrollmentsMap;
import com.servinglynk.hmis.warehouse.model.GlobalProjectMap;
import com.servinglynk.hmis.warehouse.model.GlobalProjectModel;
import com.servinglynk.hmis.warehouse.model.GlobalProjectsMap;
import com.servinglynk.hmis.warehouse.model.HmisPostingModel;
import com.servinglynk.hmis.warehouse.model.QuestionResponseModel;
import com.servinglynk.hmis.warehouse.model.SessionModel;
@Service
public class HmisPostingServiceImpl implements HmisPostingService {
protected final Log logger = LogFactory.getLog(getClass());
@Autowired RestTemplate restTemplate;
@Autowired protected ServiceFactory serviceFactory;
@Override
public void postHmis(HmisPostingModel hmisPostingModel, SessionModel session) {
if(hmisPostingModel != null) {
UUID enrollmentId = null;
String hmisPostingStatus = hmisPostingModel.getHmisPostingStatus();
HttpHeaders httpHeader = getHttpHeader(session.getClientId(), session.getSessionToken());
if(StringUtils.equals("CREATE", hmisPostingStatus) && hmisPostingModel.getGlobalEnrollmentId() == null) {
enrollmentId = createEnrollment(hmisPostingModel, httpHeader);
} else {
// When a global enrollment is provided we need to get the version specific enrollment to complete Hmis Posting.
enrollmentId = getVersionSpecificEnrollmentId(hmisPostingModel, httpHeader);
if(enrollmentId == null ) {
// Create a version specific enrollment if one does not exists from the global household
enrollmentId = createEnrollment(hmisPostingModel, httpHeader);
}
}
String hmisLink = "/hmis-clientapi/rest/v"+hmisPostingModel.getSchemaVersion()+"/clients/"+hmisPostingModel.getClientId()+"/enrollments/"+enrollmentId;
updateClientSurveySubmission(hmisPostingModel, httpHeader, hmisLink, HttpMethod.PUT);
postQuestionsToHmis(hmisPostingModel, httpHeader, enrollmentId, null);
postAssessmentQuestions(hmisPostingModel, httpHeader, enrollmentId);
}
}
/***
* Get Global project by id
* @param globalProjectId
* @param headers
* @return
*/
private GlobalProjectModel getGlobalProjectById(UUID globalProjectId, HttpHeaders headers) {
try {
HttpEntity<Client> requestEntity = new HttpEntity<Client>(headers);
ResponseEntity<GlobalProjectModel> responseEntity = restTemplate.exchange("http://hmiselb.aws.hmislynk.com/hmis-globalapi/rest/globalprojects/"+globalProjectId, HttpMethod.GET,requestEntity, GlobalProjectModel.class);
logger.info("Project created "+responseEntity.getStatusCodeValue());
return responseEntity.getBody();
} catch (Exception e) {
logger.error(" Error when getGlobalProjectById is called", e.getCause());
}
return null;
}
/***
* Get Global project by id
* @param globalProjectId
* @param headers
* @return
*/
private GlobalEnrollment getGlobalEnrollmentById(UUID globalEnrollmentId, HttpHeaders headers) {
try {
HttpEntity<Client> requestEntity = new HttpEntity<Client>(headers);
ResponseEntity<GlobalEnrollment> responseEntity = restTemplate.exchange("http://hmiselb.aws.hmislynk.com/hmis-globalapi/rest/globalenrollments/"+globalEnrollmentId, HttpMethod.GET,requestEntity, GlobalEnrollment.class);
logger.info("Project created "+responseEntity.getStatusCodeValue());
return responseEntity.getBody();
} catch (Exception e) {
logger.error(" Error when getGlobalEnrollmentById is called", e.getCause());
}
return null;
}
/***
* Get Version specific projectId to create the enrollment.
* If a version specific projectId does not exists, create one and return it.
* @param hmisPostingModel
* @param headers
* @return
*/
private UUID getVersionSpecificProjectId(HmisPostingModel hmisPostingModel, HttpHeaders headers) {
GlobalProjectModel globalProjectById = getGlobalProjectById(hmisPostingModel.getGlobalProjectId(), headers);
String schemaVersion = hmisPostingModel.getSchemaVersion();
GlobalProjectsMap projectsMap = globalProjectById.getProjects();
if(projectsMap != null) {
List<GlobalProjectMap> globalProjectMaps = projectsMap.getGlobalProjectMaps();
if(!CollectionUtils.isEmpty(globalProjectMaps)) {
for(GlobalProjectMap globalProjectMap : globalProjectMaps) {
if(StringUtils.equals(schemaVersion, globalProjectMap.getSource())) {
return globalProjectMap.getProjectId();
}
}
}
}
return createVersionSpecificProject(globalProjectById, headers, hmisPostingModel);
}
/***
* Get Version specific projectId to create the enrollment.
* If a version specific projectId does not exists, create one and return it.
* @param hmisPostingModel
* @param headers
* @return
*/
private UUID getVersionSpecificEnrollmentId(HmisPostingModel hmisPostingModel, HttpHeaders headers) {
GlobalEnrollment globalEnrollmentById = getGlobalEnrollmentById(hmisPostingModel.getGlobalEnrollmentId(), headers);
String schemaVersion = hmisPostingModel.getSchemaVersion();
GlobalEnrollmentsMap enrollments = globalEnrollmentById.getEnrollments();
if(enrollments != null) {
List<GlobalEnrollmentMap> globalEnrollmentMaps = enrollments.getGlobalEnrollmentMaps();
if(!CollectionUtils.isEmpty(globalEnrollmentMaps)) {
for(GlobalEnrollmentMap globalEnrollmentMap : globalEnrollmentMaps) {
if(StringUtils.equals(schemaVersion, globalEnrollmentMap.getSource())) {
return globalEnrollmentMap.getEnrollmentId();
}
}
}
}
return null;
}
/****
* Create a version specific project from the gloabl project id
* @param globalProjectById
* @param headers
* @param hmisPostingModel
* @return
*/
private UUID createVersionSpecificProject(GlobalProjectModel globalProjectById, HttpHeaders headers,
HmisPostingModel hmisPostingModel) {
Map<String, Object> map = new HashMap<>();
map.put("projectName", globalProjectById.getProjectName());
map.put("sourceSystemId", globalProjectById.getSourceSystemId());
map.put("description", globalProjectById.getDescription());
map.put("projectCommonName", globalProjectById.getProjectCommonName());
ObjectMapper objectMapper = new ObjectMapper();
try {
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String jsonObj = objectMapper.writer().withRootName("project").writeValueAsString(map);
String url = "http://hmiselb.aws.hmislynk.com/hmis-clientapi-v"+hmisPostingModel.getSchemaVersion()+"/rest/projects";
Object responseObject = makeAPICall(jsonObj, headers, url, HttpMethod.POST);
LinkedHashMap<Object, Map> persons = (LinkedHashMap<Object, Map>) responseObject;
Set<Entry<Object, Map>> entrySet = persons.entrySet();
Entry<Object, Map> next = entrySet.iterator().next();
logger.info("key:"+next.getKey() + " value:"+next.getValue());
Map value = next.getValue();
return UUID.fromString((String)value.get("projectId"));
} catch (JsonMappingException e) {
logger.error(" Error when createVersionSpecificProject is called", e.getCause());
} catch (JsonProcessingException e) {
logger.error(" Error when createVersionSpecificProject is called", e.getCause());
}
return null;
}
/****
* Create a version specific project from the gloabl project id
* @param globalProjectById
* @param headers
* @param hmisPostingModel
* @return
*/
private UUID createAssessment(HttpHeaders headers, HmisPostingModel hmisPostingModel, UUID enrollmentId) {
Map<String, Object> map = new HashMap<>();
map.put("assessmentType", "1");
map.put("assessmentLevel", "1");
ObjectMapper objectMapper = new ObjectMapper();
try {
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String jsonObj = objectMapper.writer().withRootName("assessment").writeValueAsString(map);
String requestPath = hmisPostingModel.getSchemaVersion()+"/{clientid}/enrollments/{enrollmentid}/assessments";
String url = getAssessmentUrl(requestPath, hmisPostingModel.getClientId(), enrollmentId, null, null);
Object responseObject = makeAPICall(jsonObj, headers, url, HttpMethod.POST);
LinkedHashMap<Object, Map> persons = (LinkedHashMap<Object, Map>) responseObject;
Set<Entry<Object, Map>> entrySet = persons.entrySet();
Entry<Object, Map> next = entrySet.iterator().next();
logger.info("key:"+next.getKey() + " value:"+next.getValue());
Map value = next.getValue();
return UUID.fromString((String)value.get("assessmentId"));
} catch (JsonMappingException e) {
logger.error(" Error when createAssessment is called", e.getCause());
} catch (JsonProcessingException e) {
logger.error(" Error when createAssessment is called", e.getCause());
}
return null;
}
/***
* The first step for Hmis posting is to make sure an enrollment is created.
* @param hmisPostingModel
* @param headers
* @param schemaYear
*/
private UUID createEnrollment(HmisPostingModel hmisPostingModel, HttpHeaders headers) {
UUID enrollmentId = null;
try {
EnrollmentModel enrollmentModel = new EnrollmentModel();
enrollmentModel.setClientId(hmisPostingModel.getClientId());
LocalDateTime entryDate = hmisPostingModel.getEntryDate();
if(entryDate != null) {
Date entryUtilDate = Date.from(entryDate.atZone(ZoneId.systemDefault()).toInstant());
enrollmentModel.setEntryDate(entryUtilDate);
}
UUID projectId = getVersionSpecificProjectId(hmisPostingModel, headers);
enrollmentModel.setProjectId(projectId);
enrollmentId = serviceFactory.getEnrollmentService().createEnrollment(hmisPostingModel.getSchemaVersion(), hmisPostingModel.getClientId(), enrollmentModel, headers);
} catch (Exception e) {
logger.error(" Error when createEnrollment is called", e.getCause());
}
return enrollmentId;
}
/***
* Create Exit if the survey category is Exit
* @param hmisPostingModel
* @param headers
* @param enrollmentId
*/
private UUID createExit(HmisPostingModel hmisPostingModel, HttpHeaders headers, UUID enrollmentId) {
Map<String, Object> map = new HashMap<>();
map.put("exitDate", hmisPostingModel.getExitDate());
ObjectMapper objectMapper = new ObjectMapper();
try {
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String jsonObj = objectMapper.writer().withRootName("exit").writeValueAsString(map);
String url = "http://hmiselb.aws.hmislynk.com/hmis-clientapi-v"+hmisPostingModel.getSchemaVersion()+"/rest/enrollments/"+enrollmentId+"/exits";
Object responseObject = makeAPICall(jsonObj, headers, url, HttpMethod.POST);
LinkedHashMap<Object, Map> persons = (LinkedHashMap<Object, Map>) responseObject;
Set<Entry<Object, Map>> entrySet = persons.entrySet();
Entry<Object, Map> next = entrySet.iterator().next();
logger.info("key:"+next.getKey() + " value:"+next.getValue());
Map value = next.getValue();
return UUID.fromString((String)value.get("exitId"));
} catch (JsonMappingException e) {
logger.error(" Error when createExit is called", e.getCause());
} catch (JsonProcessingException e) {
logger.error(" Error when createExit is called", e.getCause());
}
return null;
}
/***
* Post questions to HMIS
* @param hmisPostingModel
* @param headers
* @param enrollmentId
* @param exitId
* @param projectId
* @throws JsonProcessingException
* @throws JsonMappingException
*/
private void postQuestionsToHmis(HmisPostingModel hmisPostingModel, HttpHeaders headers, UUID enrollmentId, UUID projectId) {
UUID exitId = null;
if(StringUtils.equals("2", hmisPostingModel.getSurveyCategory())) {
exitId = createExit(hmisPostingModel, headers, enrollmentId);
}
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
List<QuestionResponseModel> questionResponses = hmisPostingModel.getQuestionResponses();
if(!CollectionUtils.isEmpty(questionResponses)) {
Map<String,List<QuestionResponseModel>> questionResponseMap = new HashMap<>();
for( QuestionResponseModel questionResponseModel : questionResponses) {
logger.info(questionResponseModel.getUriObjectField());
List<QuestionResponseModel> list = questionResponseMap.get(questionResponseModel.getUriObjectField());
if(!CollectionUtils.isEmpty(list)) {
list.add(questionResponseModel);
questionResponseMap.put(questionResponseModel.getUriObjectField(), list);
} else {
List<QuestionResponseModel> questionResponseModels = new ArrayList<QuestionResponseModel>();
questionResponseModels.add(questionResponseModel);
questionResponseMap.put(questionResponseModel.getUpdateUrlTemplate(), questionResponseModels);
}
}
Set<String> keySet = questionResponseMap.keySet();
for(String questionResponseKey : keySet) {
try {
List<QuestionResponseModel> list = questionResponseMap.get(questionResponseKey);
String rootName = "";
List<UUID> responseIds = new ArrayList<>();
if(!CollectionUtils.isEmpty(list)) {
Map<String, Object> map = new HashMap<>();
for(QuestionResponseModel questionResponseModel : list) {
String uriObjectField = questionResponseModel.getUriObjectField();
String[] split = StringUtils.split(uriObjectField, ".");
rootName = split[0];
responseIds.add(questionResponseModel.getResponseId());
map.put(split[1], StringUtils.isNotBlank(questionResponseModel.getPickListValueCode()) ? questionResponseModel.getPickListValueCode() : questionResponseModel.getResponseText());
}
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String jsonObj = objectMapper.writer().withRootName(rootName).writeValueAsString(map);
String url = getUrl(questionResponseKey,hmisPostingModel.getClientId(), enrollmentId, exitId, projectId, null);
// Create the url from the uriObjectFied.
Object responseObject = makeAPICall(jsonObj, headers, url, HttpMethod.POST);
LinkedHashMap<Object, Map> persons = (LinkedHashMap<Object, Map>) responseObject;
Set<Entry<Object, Map>> entrySet = persons.entrySet();
Entry<Object, Map> next = entrySet.iterator().next();
logger.info("key:"+next.getKey() + " value:"+next.getValue());
Map value = next.getValue();
String rootId = (String)value.get(rootName+"Id");
updateResponse(hmisPostingModel, responseIds, headers, url+"/"+rootId, HttpMethod.PUT);
}
}catch(Exception e) {
logger.error(" Error when postQuestionsToHmis is called", e.getCause());
}
}
}
}
/***
* Update HmisLink in the response table.
* @param hmisPostingModel
* @param responseIds
* @param headers
* @param hmisLink
* @param httpMethod
*/
private void updateResponse(HmisPostingModel hmisPostingModel,List<UUID> responseIds, HttpHeaders headers, String hmisLink, HttpMethod httpMethod) {
Map<String, Object> map = new HashMap<>();
map.put("hmisLink", StringUtils.replace(hmisLink, "http://hmiselb.aws.hmislynk.com", ""));
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
for(UUID responseId : responseIds) {
try {
String url = "http://ceselb.aws.hmislynk.com/survey-api/rest/v3/clients/"+hmisPostingModel.getDedupClientId()+"/surveys/"+hmisPostingModel.getSurveyId()+"/responses/"+responseId;
String jsonObj = objectMapper.writer().withRootName("response").writeValueAsString(map);
Object responseObject = makeAPICall(jsonObj, headers, url, httpMethod);
LinkedHashMap<Object, Map> persons = (LinkedHashMap<Object, Map>) responseObject;
Set<Entry<Object, Map>> entrySet = persons.entrySet();
Entry<Object, Map> next = entrySet.iterator().next();
logger.info("key:"+next.getKey() + " value:"+next.getValue());
} catch (JsonMappingException e) {
logger.error(" Error when updateResponse is called", e.getCause());
} catch (JsonProcessingException e) {
logger.error(" Error when updateResponse is called", e.getCause());
}
}
}
/***
* Update HmisLink in the client_survey_submission table.
* @param hmisPostingModel
* @param responseIds
* @param headers
* @param hmisLink
* @param httpMethod
*/
private void updateClientSurveySubmission(HmisPostingModel hmisPostingModel, HttpHeaders headers, String hmisLink, HttpMethod httpMethod) {
Map<String, Object> map = new HashMap<>();
map.put("hmisLink", StringUtils.replace(hmisLink, "http://hmiselb.aws.hmislynk.com", ""));
map.put("globalEnrollmentId", hmisPostingModel.getGlobalEnrollmentId());
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
try {
String url = "http://ceselb.aws.hmislynk.com/survey-api/rest/clientsurveysubmissions/"+hmisPostingModel.getClientSurveySubmissionId();
String jsonObj = objectMapper.writer().withRootName("clientsurveysubmission").writeValueAsString(map);
makeAPICall(jsonObj, headers, url, httpMethod);
} catch (JsonMappingException e) {
logger.error(" Error when updateResponse is called", e.getCause());
} catch (JsonProcessingException e) {
logger.error(" Error when updateResponse is called", e.getCause());
}
}
/***
* Post questions to HMIS
* @param hmisPostingModel
* @param headers
* @param enrollmentId
* @param exitId
* @param projectId
* @throws JsonProcessingException
* @throws JsonMappingException
*/
private void postAssessmentQuestions(HmisPostingModel hmisPostingModel, HttpHeaders headers, UUID enrollmentId) {
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
List<QuestionResponseModel> questionResponses = hmisPostingModel.getQuestionResponses();
boolean isAssessmentCreated = false;
if(!CollectionUtils.isEmpty(questionResponses)) {
Map<String,List<QuestionResponseModel>> questionResponseMap = new HashMap<>();
for( QuestionResponseModel questionResponseModel : questionResponses) {
logger.info(questionResponseModel.getUriObjectField());
if(StringUtils.equals(questionResponseModel.getQuestionClassification(), "CES")) {
try {
UUID assessmentId = null;
if(isAssessmentCreated) {
assessmentId = createAssessment(headers, hmisPostingModel, enrollmentId);
isAssessmentCreated = true;
}
Map<String, Object> map = new HashMap<>();
map.put("assessmentQuestion", questionResponseModel.getQuestionText());
map.put("assessmentAnswer", questionResponseModel.getResponseText());
if(hmisPostingModel.getDedupClientId() != null)
map.put("dedupClientId", String.valueOf(hmisPostingModel.getDedupClientId()));
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String jsonObj = objectMapper.writer().withRootName("assessmentQuestion").writeValueAsString(map);
String requestPath = hmisPostingModel.getSchemaVersion()+"/{clientid}/enrollments/{enrollmentid}/assessments/{assessmentid}/assessmentquestions";
String url = getAssessmentUrl(requestPath,hmisPostingModel.getClientId(), enrollmentId, assessmentId, null);
// Create the url from the uriObjectFied.
makeAPICall(jsonObj, headers, url, HttpMethod.POST);
} catch (Exception e) {
logger.error(" Error when creating assessment questions", e.getCause());
}
}
}
}
}
/***
* Get the URL from the uriTemplate
* @param requestPath
* @param clientId
* @param enrollmentId
* @param exitId
* @param projectId
* @param id
* @return
*/
private String getUrl(String requestPath,UUID clientId,UUID enrollmentId, UUID exitId, UUID projectId,UUID id) {
if(StringUtils.startsWith(requestPath, "/")) {
requestPath = StringUtils.substring(requestPath, 1);
requestPath = StringUtils.substring(requestPath, 0, StringUtils.lastIndexOf(requestPath, "/"));
if(id !=null) {
requestPath = requestPath +"/"+id;
}
if(StringUtils.indexOf(requestPath, "/2020/rest") == -1) {
requestPath = StringUtils.replace(requestPath, "2020", "2020/rest");
}
if(requestPath != null) {
if(enrollmentId !=null)
requestPath = StringUtils.replace(requestPath, "{enrollmentid}", enrollmentId.toString());
if(exitId !=null)
requestPath = StringUtils.replace(requestPath, "{exitid}", exitId.toString());
if(projectId !=null)
requestPath = StringUtils.replace(requestPath, "{projectid}", projectId.toString());
if(clientId !=null)
requestPath = StringUtils.replace(requestPath, "{clientid}", clientId.toString());
}
}
String url = "http://hmiselb.aws.hmislynk.com/hmis-clientapi-"+requestPath;
return url;
}
/***
* Get the URL from the uriTemplate
* @param requestPath
* @param clientId
* @param enrollmentId
* @param exitId
* @param projectId
* @param id
* @return
*/
private String getAssessmentUrl(String requestPath,UUID clientId,UUID enrollmentId, UUID assessmentId,UUID id) {
if(StringUtils.startsWith(requestPath, "/")) {
requestPath = StringUtils.substring(requestPath, 1);
requestPath = StringUtils.substring(requestPath, 0, StringUtils.lastIndexOf(requestPath, "/"));
if(id !=null) {
requestPath = requestPath +"/"+id;
}
if(StringUtils.indexOf(requestPath, "/2020/rest") == -1) {
requestPath = StringUtils.replace(requestPath, "2020", "2020/rest");
}
if(requestPath != null) {
if(enrollmentId !=null)
requestPath = StringUtils.replace(requestPath, "{enrollmentid}", enrollmentId.toString());
if(assessmentId !=null)
requestPath = StringUtils.replace(requestPath, "{assessmentid}", assessmentId.toString());
if(clientId !=null)
requestPath = StringUtils.replace(requestPath, "{clientid}", clientId.toString());
}
}
String url = "http://hmiselb.aws.hmislynk.com/hmis-clientapi-"+requestPath;
return url;
}
/***
* Make an API call to the version specific API.
* @param objectJson
* @param headers
* @param url
* @param httpMethod
* @return
*/
private Object makeAPICall(String objectJson,HttpHeaders headers, String url, HttpMethod httpMethod) {
try {
RestTemplate rest = new RestTemplate();
HttpEntity<Object> requestEntity = new HttpEntity<Object>(objectJson,headers);
logger.info("Make API URL "+url);
logger.info("Make API objectJSON "+objectJson);
ResponseEntity<Object> responseEntity = rest.exchange(url, httpMethod, requestEntity, Object.class);
logger.info("Make API call "+responseEntity.getStatusCodeValue());
return responseEntity.getBody();
} catch (Exception e) {
logger.error(" Error when makeAPICall is called", e.getCause());
}
return null;
}
/***
* Get Http headers
* @param clientId
* @param sessionToken
* @return
*/
public HttpHeaders getHttpHeader(String clientId, String sessionToken) {
HttpHeaders headers = new HttpHeaders();
headers.add("Accept", "application/json");
headers.add("Content-Type", "application/json; charset=UTF-8");
headers.add("X-HMIS-TrustedApp-Id", clientId);
headers.add("Authorization","HMISUserAuth session_token="+sessionToken);
MediaType mediaType = new MediaType("application", "json", Charset.forName("UTF-8"));
headers.setContentType(mediaType);
return headers;
}
} |
// modification, are permitted provided that the following conditions are met:
// documentation and/or other materials provided with the distribution.
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.db.oom;
import jodd.db.DbManager;
import jodd.db.DbQuery;
import jodd.db.DbSession;
import jodd.db.pool.CoreConnectionPool;
import jodd.exception.UncheckedException;
import jodd.log.Logger;
import jodd.log.LoggerFactory;
import jodd.log.impl.NOPLogger;
import jodd.log.impl.NOPLoggerFactory;
import static org.junit.Assert.assertTrue;
/**
* Abstract common DB integration test class.
*/
public abstract class DbBaseTest {
public static final String DB_NAME = "jodd-test";
protected CoreConnectionPool connectionPool;
protected DbOomManager dboom;
protected void init() {
LoggerFactory.setLoggerFactory(new NOPLoggerFactory() {
@Override
public Logger getLogger(String name) {
return new NOPLogger("") {
@Override
public boolean isWarnEnabled() {
return true;
}
@Override
public void warn(String message) {
throw new UncheckedException("NO WARNINGS ALLOWED: " + message);
}
@Override
public void warn(String message, Throwable throwable) {
throw new UncheckedException("NO WARNINGS ALLOWED: " + message);
}
};
}
});
DbOomManager.resetAll();
dboom = DbOomManager.getInstance();
connectionPool = new CoreConnectionPool();
}
protected void connect() {
connectionPool.init();
DbManager.getInstance().setConnectionProvider(connectionPool);
}
public abstract class DbAccess {
public abstract void initDb();
public abstract String getCreateTableSql();
public abstract String getTableName();
public final void createTables() {
DbSession session = new DbSession();
String sql = getCreateTableSql();
DbQuery query = new DbQuery(session, sql);
query.executeUpdate();
session.closeSession();
assertTrue(query.isClosed());
}
protected void close() {
DbSession session = new DbSession();
DbQuery query = new DbQuery(session, "drop table " + getTableName());
query.executeUpdate();
session.closeSession();
assertTrue(query.isClosed());
connectionPool.close();
}
}
public static String dbhost() {
return "localhost";
}
/**
* MySql.
*/
public abstract class MySqlDbAccess extends DbAccess {
public final void initDb() {
connectionPool.setDriver("com.mysql.jdbc.Driver");
connectionPool.setUrl("jdbc:mysql://" + dbhost() + ":3306");
connectionPool.setUser("root");
connectionPool.setPassword("root!");
dboom.getTableNames().setUppercase(true);
dboom.getColumnNames().setUppercase(true);
//dboom.getTableNames().setLowercase(true);
//dboom.getColumnNames().setLowercase(true);
connectionPool.init();
DbSession session = new DbSession(connectionPool);
DbQuery query = new DbQuery(session, "create database IF NOT EXISTS `jodd-test` CHARACTER SET utf8 COLLATE utf8_general_ci;");
query.executeUpdate();
session.closeSession();
connectionPool.close();
connectionPool.setUrl("jdbc:mysql://" + dbhost() + ":3306/" + DB_NAME);
}
}
/**
* PostgreSql.
*/
public abstract class PostgreSqlDbAccess extends DbAccess {
public void initDb() {
connectionPool.setDriver("org.postgresql.Driver");
connectionPool.setUrl("jdbc:postgresql://" + dbhost() + "/" + DB_NAME);
connectionPool.setUser("postgres");
connectionPool.setPassword("root!");
dboom.getTableNames().setLowercase(true);
dboom.getColumnNames().setLowercase(true);
}
}
/**
* HsqlDB.
*/
public abstract class HsqlDbAccess extends DbAccess {
public final void initDb() {
connectionPool = new CoreConnectionPool();
connectionPool.setDriver("org.hsqldb.jdbcDriver");
connectionPool.setUrl("jdbc:hsqldb:mem:test");
connectionPool.setUser("sa");
connectionPool.setPassword("");
dboom.getTableNames().setUppercase(true);
dboom.getColumnNames().setUppercase(true);
}
}
} |
package com.servinglynk.hmis.warehouse.service;
import java.nio.charset.Charset;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.UUID;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import org.springframework.web.client.RestTemplate;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.servinglynk.hmis.warehouse.model.Client;
import com.servinglynk.hmis.warehouse.model.ClientSurveySubmission;
import com.servinglynk.hmis.warehouse.model.ClientSurveySubmissions;
import com.servinglynk.hmis.warehouse.model.EnrollmentModel;
import com.servinglynk.hmis.warehouse.model.EnrollmentsModel;
import com.servinglynk.hmis.warehouse.model.GlobalEnrollments;
import com.servinglynk.hmis.warehouse.model.GlobalProjectMap;
import com.servinglynk.hmis.warehouse.model.GlobalProjectModel;
import com.servinglynk.hmis.warehouse.model.GlobalProjectsMap;
import com.servinglynk.hmis.warehouse.model.HmisPostingModel;
import com.servinglynk.hmis.warehouse.model.QuestionResponseModel;
import com.servinglynk.hmis.warehouse.model.SessionModel;
@Service
public class HmisPostingServiceImpl implements HmisPostingService {
protected final Log logger = LogFactory.getLog(getClass());
private static final String HMIS_HOST ="http://hmiselb.aws.hmislynk.com";
private static final String CES_HOST = "http://ceselb.aws.hmislynk.com";
@Autowired RestTemplate restTemplate;
@Autowired protected ServiceFactory serviceFactory;
@Override
public void postHmis(HmisPostingModel hmisPostingModel, SessionModel session) {
if(hmisPostingModel != null) {
UUID enrollmentId = null;
String hmisPostingStatus = hmisPostingModel.getHmisPostingStatus();
HttpHeaders httpHeader = getHttpHeader(session.getTrustedAppId(), session.getSessionToken());
if(StringUtils.equals("CREATE", hmisPostingStatus) && hmisPostingModel.getGlobalEnrollmentId() == null && StringUtils.equals("1", hmisPostingModel.getSurveyCategory())) {
enrollmentId = createEnrollment(hmisPostingModel, httpHeader);
} else {
String enrollmentAtEntry = getEnrollmentFromSurveySubmissionByClientId(hmisPostingModel, httpHeader);
if(enrollmentAtEntry != null) {
enrollmentId = UUID.fromString(enrollmentAtEntry);
}
//enrollmentId = getVersionSpecificEnrollmentId(hmisPostingModel, httpHeader);
if(enrollmentId == null ) {
// Create a version specific enrollment if one does not exists from the global household
enrollmentId = createEnrollment(hmisPostingModel, httpHeader);
}
}
String hmisLink = "/hmis-clientapi/rest/"+hmisPostingModel.getSchemaVersion()+"/clients/"+hmisPostingModel.getClientId()+"/enrollments/"+enrollmentId;
postQuestionsToHmis(hmisPostingModel, httpHeader, enrollmentId, null);
postAssessmentQuestions(hmisPostingModel, httpHeader, enrollmentId);
updateClientSurveySubmission(hmisPostingModel, httpHeader, hmisLink, HttpMethod.PUT,enrollmentId);
}
}
/***
* Get Global project by id
* @param globalProjectId
* @param headers
* @return
*/
private String getEnrollmentFromSurveySubmissionByClientId(HmisPostingModel hmisPostingModel, HttpHeaders headers) {
String enrollmentId = null;
try {
HttpEntity<Client> requestEntity = new HttpEntity<Client>(headers);
ResponseEntity<ClientSurveySubmissions> responseEntity = restTemplate.exchange(CES_HOST+"/survey-api/rest/clientsurveysubmissions?q="+hmisPostingModel.getClientId(), HttpMethod.GET,requestEntity, ClientSurveySubmissions.class);
logger.info("Get Survey Submission "+responseEntity.getStatusCodeValue());
ClientSurveySubmissions clientSurveySubmissions = responseEntity.getBody();
if(clientSurveySubmissions != null) {
List<ClientSurveySubmission> clientSurveySubmissionsList = clientSurveySubmissions.getClientSurveySubmissions();
if(!CollectionUtils.isEmpty(clientSurveySubmissionsList)) {
for(ClientSurveySubmission clientSurveySubmission : clientSurveySubmissionsList) {
if(StringUtils.equals("1", clientSurveySubmission.getSurveyCategory()) && StringUtils.equals("DONE", clientSurveySubmission.getHmisPostingStatus())
&& hmisPostingModel.getSurveyId().compareTo(clientSurveySubmission.getSurveyId()) == 0) {
if(clientSurveySubmission.getGlobalEnrollmentId() != null) {
System.out.println("testing");
enrollmentId = String.valueOf(clientSurveySubmission.getGlobalEnrollmentId());
break;
}
String hmisLink = clientSurveySubmission.getHmisLink();
if(StringUtils.isNotBlank(hmisLink)) {
String[] split = StringUtils.split("/");
enrollmentId = split[split.length-1];
break;
}
}
}
}
}
} catch (Exception e) {
logger.error(" Error when getGlobalProjectById is called", e.getCause());
}
return enrollmentId;
}
/***
* Get Global project by id
* @param globalProjectId
* @param headers
* @return
*/
private GlobalProjectModel getGlobalProjectById(UUID globalProjectId, HttpHeaders headers) {
try {
HttpEntity<Client> requestEntity = new HttpEntity<Client>(headers);
ResponseEntity<GlobalProjectModel> responseEntity = restTemplate.exchange(HMIS_HOST+"/hmis-globalapi/rest/globalprojects/"+globalProjectId, HttpMethod.GET,requestEntity, GlobalProjectModel.class);
logger.info("Project created "+responseEntity.getStatusCodeValue());
return responseEntity.getBody();
} catch (Exception e) {
logger.error(" Error when getGlobalProjectById is called", e.getCause());
}
return null;
}
/***
* Get Global project by id
* @param globalProjectId
* @param headers
* @return
*/
private GlobalEnrollments getGlobalEnrollmentByDedupClientId(UUID dedupClientId, HttpHeaders headers) {
try {
HttpEntity<Client> requestEntity = new HttpEntity<Client>(headers);
ResponseEntity<GlobalEnrollments> responseEntity = restTemplate.exchange(HMIS_HOST+"/hmis-globalapi/rest/clients/"+dedupClientId+"/globalenrollments", HttpMethod.GET,requestEntity, GlobalEnrollments.class);
logger.info("Project created "+responseEntity.getStatusCodeValue());
return responseEntity.getBody();
} catch (Exception e) {
logger.error(" Error when getGlobalEnrollmentById is called", e.getCause());
}
return null;
}
/***
* Get Version specific projectId to create the enrollment.
* If a version specific projectId does not exists, create one and return it.
* @param hmisPostingModel
* @param headers
* @return
*/
private UUID getVersionSpecificProjectId(HmisPostingModel hmisPostingModel, HttpHeaders headers) {
GlobalProjectModel globalProjectById = getGlobalProjectById(hmisPostingModel.getGlobalProjectId(), headers);
if(globalProjectById != null) {
String schemaVersion = hmisPostingModel.getSchemaVersion();
schemaVersion = StringUtils.substring(schemaVersion,1);
GlobalProjectsMap projectsMap = globalProjectById.getProjects();
if(projectsMap != null) {
List<GlobalProjectMap> globalProjectMaps = projectsMap.getGlobalProjectMaps();
if(!CollectionUtils.isEmpty(globalProjectMaps)) {
for(GlobalProjectMap globalProjectMap : globalProjectMaps) {
if(StringUtils.equals(schemaVersion, globalProjectMap.getSource())) {
return globalProjectMap.getProjectId();
}
}
}
}
return createVersionSpecificProject(globalProjectById, headers, hmisPostingModel);
}
return null;
}
/***
* Get Version specific projectId to create the enrollment.
* If a version specific projectId does not exists, create one and return it.
* @param hmisPostingModel
* @param headers
* @return
*/
private UUID getEnrollmentAtEntry(HmisPostingModel hmisPostingModel, HttpHeaders headers) {
GlobalProjectModel globalProjectById = getGlobalProjectById(hmisPostingModel.getGlobalProjectId(), headers);
String schemaVersion = hmisPostingModel.getSchemaVersion();
schemaVersion = StringUtils.substring(schemaVersion,1);
GlobalProjectsMap projectsMap = globalProjectById.getProjects();
if(projectsMap != null) {
List<GlobalProjectMap> globalProjectMaps = projectsMap.getGlobalProjectMaps();
if(!CollectionUtils.isEmpty(globalProjectMaps)) {
for(GlobalProjectMap globalProjectMap : globalProjectMaps) {
if(StringUtils.equals(schemaVersion, globalProjectMap.getSource())) {
return globalProjectMap.getProjectId();
}
}
}
}
return createVersionSpecificProject(globalProjectById, headers, hmisPostingModel);
}
/***
* Get Version specific projectId to create the enrollment.
* If a version specific projectId does not exists, create one and return it.
* @param hmisPostingModel
* @param headers
* @return
*/
private UUID getVersionSpecificEnrollmentId(HmisPostingModel hmisPostingModel, HttpHeaders headers) {
try {
UUID projectId = getVersionSpecificProjectId(hmisPostingModel, headers);
HttpEntity<Client> requestEntity = new HttpEntity<Client>(headers);
String url = HMIS_HOST+"/hmis-clientapi-"+hmisPostingModel.getSchemaVersion()+"/rest/clients/"+hmisPostingModel.getClientId()+"/enrollments";
ResponseEntity<EnrollmentsModel> responseEntity = restTemplate.exchange(url, HttpMethod.GET,requestEntity, EnrollmentsModel.class);
EnrollmentsModel enrollmentsModel = responseEntity.getBody();
if(enrollmentsModel != null) {
List<EnrollmentModel> enrollments = enrollmentsModel.getEnrollments();
if(!CollectionUtils.isEmpty(enrollments)) {
for(EnrollmentModel model : enrollments) {
if(projectId == model.getProjectId()) {
return model.getEnrollmentId();
}
}
}
}
} catch (Exception e) {
logger.error(" Error when getVersionSpecificEnrollmentId is called", e.getCause());
}
return null;
}
/****
* Create a version specific project from the gloabl project id
* @param globalProjectById
* @param headers
* @param hmisPostingModel
* @return
*/
private UUID createVersionSpecificProject(GlobalProjectModel globalProjectById, HttpHeaders headers,
HmisPostingModel hmisPostingModel) {
Map<String, Object> map = new HashMap<>();
map.put("projectName", globalProjectById.getProjectName());
map.put("sourceSystemId", globalProjectById.getSourceSystemId());
map.put("description", globalProjectById.getDescription());
map.put("projectCommonName", globalProjectById.getProjectCommonName());
ObjectMapper objectMapper = new ObjectMapper();
try {
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String jsonObj = objectMapper.writer().withRootName("project").writeValueAsString(map);
String url = HMIS_HOST+"/hmis-clientapi-"+hmisPostingModel.getSchemaVersion()+"/rest/projects";
Object responseObject = makeAPICall(jsonObj, headers, url, HttpMethod.POST);
LinkedHashMap<Object, Map> persons = (LinkedHashMap<Object, Map>) responseObject;
Set<Entry<Object, Map>> entrySet = persons.entrySet();
Entry<Object, Map> next = entrySet.iterator().next();
logger.info("key:"+next.getKey() + " value:"+next.getValue());
Map value = next.getValue();
return UUID.fromString((String)value.get("projectId"));
} catch (JsonMappingException e) {
logger.error(" Error when createVersionSpecificProject is called", e.getCause());
} catch (JsonProcessingException e) {
logger.error(" Error when createVersionSpecificProject is called", e.getCause());
}
return null;
}
/****
* Create a version specific project from the gloabl project id
* @param globalProjectById
* @param headers
* @param hmisPostingModel
* @return
*/
private UUID createAssessment(HttpHeaders headers, HmisPostingModel hmisPostingModel, UUID enrollmentId) {
Map<String, Object> map = new HashMap<>();
map.put("assessmentType", "1");
map.put("assessmentLevel", "1");
ObjectMapper objectMapper = new ObjectMapper();
try {
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String jsonObj = objectMapper.writer().withRootName("assessment").writeValueAsString(map);
String requestPath = hmisPostingModel.getSchemaVersion()+"/rest/clients/{clientid}/enrollments/{enrollmentid}/assessments";
String url = getAssessmentUrl(requestPath, hmisPostingModel.getClientId(), enrollmentId, null, null);
Object responseObject = makeAPICall(jsonObj, headers, url, HttpMethod.POST);
LinkedHashMap<Object, Map> persons = (LinkedHashMap<Object, Map>) responseObject;
Set<Entry<Object, Map>> entrySet = persons.entrySet();
Entry<Object, Map> next = entrySet.iterator().next();
logger.info("key:"+next.getKey() + " value:"+next.getValue());
Map value = next.getValue();
return UUID.fromString((String)value.get("assessmentId"));
} catch (JsonMappingException e) {
logger.error(" Error when createAssessment is called", e.getCause());
} catch (JsonProcessingException e) {
logger.error(" Error when createAssessment is called", e.getCause());
}
return null;
}
/***
* The first step for Hmis posting is to make sure an enrollment is created.
* @param hmisPostingModel
* @param headers
* @param schemaYear
*/
private UUID createEnrollment(HmisPostingModel hmisPostingModel, HttpHeaders headers) {
Map<String, Object> map = new HashMap<>();
try {
map.put("entryDate", hmisPostingModel.getEntryDate());
UUID projectId = getVersionSpecificProjectId(hmisPostingModel, headers);
map.put("projectId",projectId);
map.put("clientId", hmisPostingModel.getClientId());
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String jsonObj = objectMapper.writer().withRootName("enrollment").writeValueAsString(map);
String url = HMIS_HOST+"/hmis-clientapi-"+hmisPostingModel.getSchemaVersion()+"/rest/clients/"+hmisPostingModel.getClientId()+"/enrollments";
Object responseObject = makeAPICall(jsonObj, headers, url, HttpMethod.POST);
LinkedHashMap<Object, Map> persons = (LinkedHashMap<Object, Map>) responseObject;
Set<Entry<Object, Map>> entrySet = persons.entrySet();
Entry<Object, Map> next = entrySet.iterator().next();
logger.info("key:"+next.getKey() + " value:"+next.getValue());
Map value = next.getValue();
return UUID.fromString((String)value.get("enrollmentId"));
} catch (JsonMappingException e) {
logger.error(" Error when createExit is called", e.getCause());
} catch (JsonProcessingException e) {
logger.error(" Error when createExit is called", e.getCause());
}
return null;
}
/***
* Create Exit if the survey category is Exit
* @param hmisPostingModel
* @param headers
* @param enrollmentId
*/
private UUID createExit(HmisPostingModel hmisPostingModel, HttpHeaders headers, UUID enrollmentId) {
Map<String, Object> map = new HashMap<>();
String exitDate = hmisPostingModel.getExitDate();
map.put("exitDate",converDate(hmisPostingModel.getExitDate()));
ObjectMapper objectMapper = new ObjectMapper();
try {
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String jsonObj = objectMapper.writer().withRootName("exit").writeValueAsString(map);
String url = HMIS_HOST+"/hmis-clientapi-"+hmisPostingModel.getSchemaVersion()+"/rest/clients/"+hmisPostingModel.getClientId()+"/enrollments/"+enrollmentId+"/exits";
Object responseObject = makeAPICall(jsonObj, headers, url, HttpMethod.POST);
LinkedHashMap<Object, Map> persons = (LinkedHashMap<Object, Map>) responseObject;
Set<Entry<Object, Map>> entrySet = persons.entrySet();
Entry<Object, Map> next = entrySet.iterator().next();
logger.info("key:"+next.getKey() + " value:"+next.getValue());
Map value = next.getValue();
return UUID.fromString((String)value.get("exitId"));
} catch (JsonMappingException e) {
logger.error(" Error when createExit is called", e.getCause());
} catch (JsonProcessingException e) {
logger.error(" Error when createExit is called", e.getCause());
}
return null;
}
/***
* Post questions to HMIS
* @param hmisPostingModel
* @param headers
* @param enrollmentId
* @param exitId
* @param projectId
* @throws JsonProcessingException
* @throws JsonMappingException
*/
private void postQuestionsToHmis(HmisPostingModel hmisPostingModel, HttpHeaders headers, UUID enrollmentId, UUID projectId) {
UUID exitId = null;
if(StringUtils.equals("3", hmisPostingModel.getSurveyCategory())) {
exitId = createExit(hmisPostingModel, headers, enrollmentId);
}
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
List<QuestionResponseModel> questionResponses = hmisPostingModel.getQuestionResponses();
if(!CollectionUtils.isEmpty(questionResponses)) {
Map<String,List<QuestionResponseModel>> questionResponseMap = new HashMap<>();
for( QuestionResponseModel questionResponseModel : questionResponses) {
logger.info(questionResponseModel.getUriObjectField());
List<QuestionResponseModel> list = questionResponseMap.get(questionResponseModel.getUpdateUrlTemplate());
if(!CollectionUtils.isEmpty(list)) {
list.add(questionResponseModel);
questionResponseMap.put(questionResponseModel.getUpdateUrlTemplate(), list);
} else {
List<QuestionResponseModel> questionResponseModels = new ArrayList<QuestionResponseModel>();
questionResponseModels.add(questionResponseModel);
questionResponseMap.put(questionResponseModel.getUpdateUrlTemplate(), questionResponseModels);
}
}
Set<String> keySet = questionResponseMap.keySet();
for(String questionResponseKey : keySet) {
try {
List<QuestionResponseModel> list = questionResponseMap.get(questionResponseKey);
String rootName = "";
List<UUID> responseIds = new ArrayList<>();
if(!CollectionUtils.isEmpty(list)) {
Map<String, Object> map = new HashMap<>();
for(QuestionResponseModel questionResponseModel : list) {
String uriObjectField = questionResponseModel.getUriObjectField();
String[] split = StringUtils.split(uriObjectField, ".");
rootName = split[0];
responseIds.add(questionResponseModel.getResponseId());
map.put(split[1], StringUtils.isNotBlank(questionResponseModel.getPickListValueCode()) ? questionResponseModel.getPickListValueCode() : questionResponseModel.getResponseText());
}
map.put("dataCollectionStage", hmisPostingModel.getSurveyCategory());
map.put("informationDate", hmisPostingModel.getInformationDate());
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String jsonObj = objectMapper.writer().withRootName(rootName).writeValueAsString(map);
String url = getUrl(questionResponseKey,hmisPostingModel.getClientId(), enrollmentId, exitId, projectId, null);
// Create the url from the uriObjectFied.
Object responseObject = makeAPICall(jsonObj, headers, url, HttpMethod.POST);
LinkedHashMap<Object, Map> persons = (LinkedHashMap<Object, Map>) responseObject;
Set<Entry<Object, Map>> entrySet = persons.entrySet();
Entry<Object, Map> next = entrySet.iterator().next();
logger.info("key:"+next.getKey() + " value:"+next.getValue());
Map value = next.getValue();
String rootId = (String)value.get(rootName+"Id");
updateResponse(hmisPostingModel, responseIds, headers, url+"/"+rootId, HttpMethod.PUT);
}
}catch(Exception e) {
logger.error(" Error when postQuestionsToHmis is called", e.getCause());
}
}
}
}
/***
* Update HmisLink in the response table.
* @param hmisPostingModel
* @param responseIds
* @param headers
* @param hmisLink
* @param httpMethod
*/
private void updateResponse(HmisPostingModel hmisPostingModel,List<UUID> responseIds, HttpHeaders headers, String hmisLink, HttpMethod httpMethod) {
Map<String, Object> map = new HashMap<>();
map.put("hmisLink", StringUtils.replace(hmisLink, HMIS_HOST+"", ""));
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
for(UUID responseId : responseIds) {
try {
String url = CES_HOST+"/survey-api/rest/v3/clients/"+hmisPostingModel.getDedupClientId()+"/surveys/"+hmisPostingModel.getSurveyId()+"/responses/"+responseId;
String jsonObj = objectMapper.writer().withRootName("response").writeValueAsString(map);
makeAPICall(jsonObj, headers, url, httpMethod);
} catch (JsonMappingException e) {
logger.error(" Error when updateResponse is called", e.getCause());
} catch (JsonProcessingException e) {
logger.error(" Error when updateResponse is called", e.getCause());
}
}
}
/***
* Update HmisLink in the client_survey_submission table.
* @param hmisPostingModel
* @param responseIds
* @param headers
* @param hmisLink
* @param httpMethod
*/
private void updateClientSurveySubmission(HmisPostingModel hmisPostingModel, HttpHeaders headers, String hmisLink, HttpMethod httpMethod, UUID enrollmentId) {
Map<String, Object> map = new HashMap<>();
map.put("hmisLink", StringUtils.replace(hmisLink, HMIS_HOST+"", ""));
map.put("globalEnrollmentId", enrollmentId);
map.put("hmisPostingStatus", "DONE");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
try {
String url = CES_HOST+"/survey-api/rest/clientsurveysubmissions/"+hmisPostingModel.getClientSurveySubmissionId();
String jsonObj = objectMapper.writer().withRootName("clientsurveysubmission").writeValueAsString(map);
makeAPICall(jsonObj, headers, url, httpMethod);
} catch (JsonMappingException e) {
logger.error(" Error when updateResponse is called", e.getCause());
} catch (JsonProcessingException e) {
logger.error(" Error when updateResponse is called", e.getCause());
}
}
/***
* Post questions to HMIS
* @param hmisPostingModel
* @param headers
* @param enrollmentId
* @param exitId
* @param projectId
* @throws JsonProcessingException
* @throws JsonMappingException
*/
private void postAssessmentQuestions(HmisPostingModel hmisPostingModel, HttpHeaders headers, UUID enrollmentId) {
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
List<QuestionResponseModel> questionResponses = hmisPostingModel.getQuestionResponses();
boolean isAssessmentCreated = false;
UUID assessmentId = null;
if(!CollectionUtils.isEmpty(questionResponses)) {
Map<String,List<QuestionResponseModel>> questionResponseMap = new HashMap<>();
for( QuestionResponseModel questionResponseModel : questionResponses) {
logger.info(questionResponseModel.getUriObjectField());
if(StringUtils.equals(questionResponseModel.getQuestionClassification(), "CES")) {
try {
if(!isAssessmentCreated) {
assessmentId = createAssessment(headers, hmisPostingModel, enrollmentId);
isAssessmentCreated = true;
}
Map<String, Object> map = new HashMap<>();
map.put("assessmentQuestion", questionResponseModel.getQuestionText());
map.put("assessmentAnswer", questionResponseModel.getResponseText());
if(hmisPostingModel.getDedupClientId() != null)
map.put("dedupClientId", String.valueOf(hmisPostingModel.getDedupClientId()));
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String jsonObj = objectMapper.writer().withRootName("assessmentQuestion").writeValueAsString(map);
String requestPath = hmisPostingModel.getSchemaVersion()+"/rest/clients/{clientid}/enrollments/{enrollmentid}/assessments/{assessmentid}/assessmentquestions";
String url = getAssessmentUrl(requestPath,hmisPostingModel.getClientId(), enrollmentId, assessmentId, null);
// Create the url from the uriObjectFied.
Object responseObject = makeAPICall(jsonObj, headers, url, HttpMethod.POST);
LinkedHashMap<Object, Map> persons = (LinkedHashMap<Object, Map>) responseObject;
Set<Entry<Object, Map>> entrySet = persons.entrySet();
Entry<Object, Map> next = entrySet.iterator().next();
logger.info("key:"+next.getKey() + " value:"+next.getValue());
Map value = next.getValue();
String rootId = (String)value.get("assessmentQuestionId");
List<UUID> responses = new ArrayList<>();
responses.add(questionResponseModel.getResponseId());
updateResponse(hmisPostingModel,responses , headers, url+"/"+rootId, HttpMethod.PUT);
} catch (Exception e) {
logger.error(" Error when creating assessment questions", e.getCause());
}
}
}
}
}
/***
* Get the URL from the uriTemplate
* @param requestPath
* @param clientId
* @param enrollmentId
* @param exitId
* @param projectId
* @param id
* @return
*/
private String getUrl(String requestPath,UUID clientId,UUID enrollmentId, UUID exitId, UUID projectId,UUID id) {
if(StringUtils.startsWith(requestPath, "/")) {
requestPath = StringUtils.substring(requestPath, 1);
requestPath = StringUtils.substring(requestPath, 0, StringUtils.lastIndexOf(requestPath, "/"));
if(id !=null) {
requestPath = requestPath +"/"+id;
}
if(StringUtils.indexOf(requestPath, "/2020/rest") == -1) {
requestPath = StringUtils.replace(requestPath, "2020", "2020/rest");
}
if(requestPath != null) {
if(enrollmentId !=null)
requestPath = StringUtils.replace(requestPath, "{enrollmentid}", enrollmentId.toString());
if(exitId !=null)
requestPath = StringUtils.replace(requestPath, "{exitid}", exitId.toString());
if(projectId !=null)
requestPath = StringUtils.replace(requestPath, "{projectid}", projectId.toString());
if(clientId !=null)
requestPath = StringUtils.replace(requestPath, "{clientid}", clientId.toString());
}
}
String url = HMIS_HOST+"/hmis-clientapi-"+requestPath;
return url;
}
/***
* Get the URL from the uriTemplate
* @param requestPath
* @param clientId
* @param enrollmentId
* @param exitId
* @param projectId
* @param id
* @return
*/
private String getAssessmentUrl(String requestPath,UUID clientId,UUID enrollmentId, UUID assessmentId,UUID id) {
if(StringUtils.startsWith(requestPath, "/")) {
requestPath = StringUtils.substring(requestPath, 1);
requestPath = StringUtils.substring(requestPath, 0, StringUtils.lastIndexOf(requestPath, "/"));
if(id !=null) {
requestPath = requestPath +"/"+id;
}
if(StringUtils.indexOf(requestPath, "/2020/rest") == -1) {
requestPath = StringUtils.replace(requestPath, "2020", "2020/rest");
}
}
if(requestPath != null) {
if(enrollmentId !=null)
requestPath = StringUtils.replace(requestPath, "{enrollmentid}", enrollmentId.toString());
if(assessmentId !=null)
requestPath = StringUtils.replace(requestPath, "{assessmentid}", assessmentId.toString());
if(clientId !=null)
requestPath = StringUtils.replace(requestPath, "{clientid}", clientId.toString());
}
String url = HMIS_HOST+"/hmis-clientapi-"+requestPath;
return url;
}
/***
* Make an API call to the version specific API.
* @param objectJson
* @param headers
* @param url
* @param httpMethod
* @return
*/
private Object makeAPICall(String objectJson,HttpHeaders headers, String url, HttpMethod httpMethod) {
try {
RestTemplate rest = new RestTemplate();
HttpEntity<Object> requestEntity = new HttpEntity<Object>(objectJson,headers);
logger.info("Make API URL "+url);
logger.info("Make API objectJSON "+objectJson);
ResponseEntity<Object> responseEntity = rest.exchange(url, httpMethod, requestEntity, Object.class);
logger.info("Make API call "+responseEntity.getStatusCodeValue());
return responseEntity.getBody();
} catch (Exception e) {
logger.error(" Error when makeAPICall is called", e.getCause());
}
return null;
}
/***
* Convert String date to long timestamp.
* @param inputDate
* @return
*/
private String converDate(String inputDate) {
Date date = new Date();
try {
date = new SimpleDateFormat("yyyy-MM-dd").parse(inputDate);
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return String.valueOf(date.getTime());
}
/***
* Get Http headers
* @param clientId
* @param sessionToken
* @return
*/
public HttpHeaders getHttpHeader(String clientId, String sessionToken) {
HttpHeaders headers = new HttpHeaders();
headers.add("Accept", "application/json");
headers.add("Content-Type", "application/json; charset=UTF-8");
headers.add("X-HMIS-TrustedApp-Id", clientId);
headers.add("Authorization","HMISUserAuth session_token="+sessionToken);
MediaType mediaType = new MediaType("application", "json", Charset.forName("UTF-8"));
headers.setContentType(mediaType);
return headers;
}
} |
package org.eclipse.che.ide.api.project;
import com.google.inject.Inject;
import org.eclipse.che.api.project.shared.dto.CopyOptions;
import org.eclipse.che.api.project.shared.dto.ItemReference;
import org.eclipse.che.api.project.shared.dto.MoveOptions;
import org.eclipse.che.api.project.shared.dto.SourceEstimation;
import org.eclipse.che.api.project.shared.dto.TreeElement;
import org.eclipse.che.api.promises.client.Promise;
import org.eclipse.che.api.workspace.shared.dto.NewProjectConfigDto;
import org.eclipse.che.api.workspace.shared.dto.ProjectConfigDto;
import org.eclipse.che.api.workspace.shared.dto.SourceStorageDto;
import org.eclipse.che.ide.MimeType;
import org.eclipse.che.ide.api.app.AppContext;
import org.eclipse.che.ide.api.machine.WsAgentStateController;
import org.eclipse.che.ide.dto.DtoFactory;
import org.eclipse.che.ide.resource.Path;
import org.eclipse.che.ide.rest.AsyncRequestFactory;
import org.eclipse.che.ide.rest.DtoUnmarshallerFactory;
import org.eclipse.che.ide.rest.StringUnmarshaller;
import org.eclipse.che.ide.rest.UrlBuilder;
import org.eclipse.che.ide.ui.loaders.request.LoaderFactory;
import org.eclipse.che.ide.websocket.Message;
import org.eclipse.che.ide.websocket.MessageBuilder;
import org.eclipse.che.ide.websocket.WebSocketException;
import org.eclipse.che.ide.websocket.rest.RequestCallback;
import java.util.List;
import java.util.Map;
import static com.google.common.base.Strings.isNullOrEmpty;
import static com.google.gwt.http.client.RequestBuilder.DELETE;
import static com.google.gwt.http.client.RequestBuilder.POST;
import static com.google.gwt.http.client.RequestBuilder.PUT;
import static com.google.gwt.safehtml.shared.UriUtils.encodeAllowEscapes;
import static org.eclipse.che.api.promises.client.callback.AsyncPromiseHelper.createFromAsyncRequest;
import static org.eclipse.che.ide.MimeType.APPLICATION_JSON;
import static org.eclipse.che.ide.rest.HTTPHeader.ACCEPT;
import static org.eclipse.che.ide.rest.HTTPHeader.CONTENTTYPE;
import static org.eclipse.che.ide.rest.HTTPHeader.CONTENT_TYPE;
/**
* Implementation of {@link ProjectServiceClient}.
* <p>
* TODO need to remove interface as this component is internal one and couldn't have more than one instance
*
* @author Vitaly Parfonov
* @author Artem Zatsarynnyi
* @author Valeriy Svydenko
* @see ProjectServiceClient
*/
public class ProjectServiceClientImpl implements ProjectServiceClient {
private static final String PROJECT = "/project";
private static final String BATCH_PROJECTS = "/batch";
private static final String ITEM = "/item";
private static final String TREE = "/tree";
private static final String MOVE = "/move";
private static final String COPY = "/copy";
private static final String FOLDER = "/folder";
private static final String FILE = "/file";
private static final String SEARCH = "/search";
private static final String IMPORT = "/import";
private static final String RESOLVE = "/resolve";
private static final String ESTIMATE = "/estimate";
private final WsAgentStateController wsAgentStateController;
private final LoaderFactory loaderFactory;
private final AsyncRequestFactory reqFactory;
private final DtoFactory dtoFactory;
private final DtoUnmarshallerFactory unmarshaller;
private final AppContext appContext;
@Inject
protected ProjectServiceClientImpl(WsAgentStateController wsAgentStateController,
LoaderFactory loaderFactory,
AsyncRequestFactory reqFactory,
DtoFactory dtoFactory,
DtoUnmarshallerFactory unmarshaller,
AppContext appContext) {
this.wsAgentStateController = wsAgentStateController;
this.loaderFactory = loaderFactory;
this.reqFactory = reqFactory;
this.dtoFactory = dtoFactory;
this.unmarshaller = unmarshaller;
this.appContext = appContext;
}
/** {@inheritDoc} */
@Override
public Promise<List<ProjectConfigDto>> getProjects() {
final String url = getBaseUrl();
return reqFactory.createGetRequest(url)
.header(ACCEPT, MimeType.APPLICATION_JSON)
.loader(loaderFactory.newLoader("Getting projects..."))
.send(unmarshaller.newListUnmarshaller(ProjectConfigDto.class));
}
/** {@inheritDoc} */
@Override
public Promise<SourceEstimation> estimate(Path path, String pType) {
final String url = encodeAllowEscapes(getBaseUrl() + ESTIMATE + path(path.toString()) + "?type=" + pType);
return reqFactory.createGetRequest(url)
.header(ACCEPT, MimeType.APPLICATION_JSON)
.loader(loaderFactory.newLoader("Estimating project..."))
.send(unmarshaller.newUnmarshaller(SourceEstimation.class));
}
/** {@inheritDoc} */
@Override
public Promise<List<SourceEstimation>> resolveSources(Path path) {
final String url = encodeAllowEscapes(getBaseUrl() + RESOLVE + path(path.toString()));
return reqFactory.createGetRequest(url)
.header(ACCEPT, MimeType.APPLICATION_JSON)
.loader(loaderFactory.newLoader("Resolving sources..."))
.send(unmarshaller.newListUnmarshaller(SourceEstimation.class));
}
/** {@inheritDoc} */
@Override
public Promise<Void> importProject(final Path path,
final SourceStorageDto source) {
return createFromAsyncRequest(callback -> {
final String url = encodeAllowEscapes(PROJECT + IMPORT + path(path.toString()));
final Message message = new MessageBuilder(POST, url).data(dtoFactory.toJson(source))
.header(CONTENTTYPE, APPLICATION_JSON)
.build();
wsAgentStateController.getMessageBus().then(messageBus -> {
try {
messageBus.send(message, new RequestCallback<Void>() {
@Override
protected void onSuccess(Void result) {
callback.onSuccess(result);
}
@Override
protected void onFailure(Throwable exception) {
callback.onFailure(exception);
}
});
} catch (WebSocketException e) {
callback.onFailure(e);
}
}).catchError(error -> {
callback.onFailure(error.getCause());
});
});
}
/** {@inheritDoc} */
@Override
public Promise<List<ItemReference>> search(QueryExpression expression) {
final String url =
encodeAllowEscapes(getBaseUrl() + SEARCH + (isNullOrEmpty(expression.getPath()) ? Path.ROOT : path(expression.getPath())));
StringBuilder queryParameters = new StringBuilder();
if (expression.getName() != null && !expression.getName().isEmpty()) {
queryParameters.append("&name=").append(expression.getName());
}
if (expression.getText() != null && !expression.getText().isEmpty()) {
queryParameters.append("&text=").append(expression.getText());
}
if (expression.getMaxItems() != 0) {
queryParameters.append("&maxItems=").append(expression.getMaxItems());
}
if (expression.getSkipCount() != 0) {
queryParameters.append("&skipCount=").append(expression.getSkipCount());
}
return reqFactory.createGetRequest(url + queryParameters.toString().replaceFirst("&", "?"))
.header(ACCEPT, MimeType.APPLICATION_JSON)
.loader(loaderFactory.newLoader("Searching..."))
.send(unmarshaller.newListUnmarshaller(ItemReference.class));
}
/** {@inheritDoc} */
@Override
public Promise<ProjectConfigDto> createProject(ProjectConfigDto configuration, Map<String, String> options) {
UrlBuilder urlBuilder = new UrlBuilder(getBaseUrl());
for (String key : options.keySet()) {
urlBuilder.setParameter(key, options.get(key));
}
return reqFactory.createPostRequest(urlBuilder.buildString(), configuration)
.header(ACCEPT, MimeType.APPLICATION_JSON)
.loader(loaderFactory.newLoader("Creating project..."))
.send(unmarshaller.newUnmarshaller(ProjectConfigDto.class));
}
@Override
public Promise<List<ProjectConfigDto>> createBatchProjects(List<NewProjectConfigDto> configurations) {
final String url = encodeAllowEscapes(getBaseUrl() + BATCH_PROJECTS);
final String loaderMessage = configurations.size() > 1 ? "Creating the batch of projects..." : "Creating project...";
return reqFactory.createPostRequest(url, configurations)
.header(ACCEPT, MimeType.APPLICATION_JSON)
.loader(loaderFactory.newLoader(loaderMessage))
.send(unmarshaller.newListUnmarshaller(ProjectConfigDto.class));
}
/** {@inheritDoc} */
@Override
public Promise<ItemReference> createFile(Path path, String content) {
final String url = encodeAllowEscapes(getBaseUrl() + FILE + path(path.parent().toString()) + "?name=" + path.lastSegment());
return reqFactory.createPostRequest(url, null)
.data(content)
.loader(loaderFactory.newLoader("Creating file..."))
.send(unmarshaller.newUnmarshaller(ItemReference.class));
}
/** {@inheritDoc} */
@Override
public Promise<String> getFileContent(Path path) {
final String url = encodeAllowEscapes(getBaseUrl() + FILE + path(path.toString()));
return reqFactory.createGetRequest(url)
.loader(loaderFactory.newLoader("Loading file content..."))
.send(new StringUnmarshaller());
}
/** {@inheritDoc} */
@Override
public Promise<Void> setFileContent(Path path, String content) {
final String url = encodeAllowEscapes(getBaseUrl() + FILE + path(path.toString()));
return reqFactory.createRequest(PUT, url, null, false)
.data(content)
.loader(loaderFactory.newLoader("Updating file..."))
.send();
}
/** {@inheritDoc} */
@Override
public Promise<ItemReference> createFolder(Path path) {
final String url = encodeAllowEscapes(getBaseUrl() + FOLDER + path(path.toString()));
return reqFactory.createPostRequest(url, null)
.loader(loaderFactory.newLoader("Creating folder..."))
.send(unmarshaller.newUnmarshaller(ItemReference.class));
}
/** {@inheritDoc} */
@Override
public Promise<Void> deleteItem(Path path) {
final String url = encodeAllowEscapes(getBaseUrl() + path(path.toString()));
return reqFactory.createRequest(DELETE, url, null, false)
.loader(loaderFactory.newLoader("Deleting project..."))
.send();
}
/** {@inheritDoc} */
@Override
public Promise<Void> copy(Path source, Path target, String newName, boolean overwrite) {
final String url = encodeAllowEscapes(getBaseUrl() + COPY + path(source.toString()) + "?to=" + target.toString());
final CopyOptions copyOptions = dtoFactory.createDto(CopyOptions.class);
copyOptions.setName(newName);
copyOptions.setOverWrite(overwrite);
return reqFactory.createPostRequest(url, copyOptions)
.loader(loaderFactory.newLoader("Copying..."))
.send();
}
/** {@inheritDoc} */
@Override
public Promise<Void> move(Path source, Path target, String newName, boolean overwrite) {
final String url = encodeAllowEscapes(getBaseUrl() + MOVE + path(source.toString()) + "?to=" + target.toString());
final MoveOptions moveOptions = dtoFactory.createDto(MoveOptions.class);
moveOptions.setName(newName);
moveOptions.setOverWrite(overwrite);
return reqFactory.createPostRequest(url, moveOptions)
.loader(loaderFactory.newLoader("Moving..."))
.send();
}
/** {@inheritDoc} */
@Override
public Promise<TreeElement> getTree(Path path, int depth, boolean includeFiles) {
final String url =
encodeAllowEscapes(getBaseUrl() + TREE + path(path.toString()) + "?depth=" + depth + "&includeFiles=" + includeFiles);
// temporary workaround for CHE-3467, remove loader for disable UI blocking
// later this loader should be added with the new mechanism of client-server synchronization
return reqFactory.createGetRequest(url)
.header(ACCEPT, MimeType.APPLICATION_JSON)
.send(unmarshaller.newUnmarshaller(TreeElement.class));
}
/** {@inheritDoc} */
@Override
public Promise<ItemReference> getItem(Path path) {
final String url = encodeAllowEscapes(getBaseUrl() + ITEM + path(path.toString()));
return reqFactory.createGetRequest(url)
.header(ACCEPT, MimeType.APPLICATION_JSON)
.loader(loaderFactory.newLoader("Getting item..."))
.send(unmarshaller.newUnmarshaller(ItemReference.class));
}
/** {@inheritDoc} */
@Override
public Promise<ProjectConfigDto> getProject(Path path) {
final String url = encodeAllowEscapes(getBaseUrl() + path(path.toString()));
return reqFactory.createGetRequest(url)
.header(ACCEPT, MimeType.APPLICATION_JSON)
.loader(loaderFactory.newLoader("Getting project..."))
.send(unmarshaller.newUnmarshaller(ProjectConfigDto.class));
}
/** {@inheritDoc} */
@Override
public Promise<ProjectConfigDto> updateProject(ProjectConfigDto configuration) {
final String url = encodeAllowEscapes(getBaseUrl() + path(configuration.getPath()));
return reqFactory.createRequest(PUT, url, configuration, false)
.header(CONTENT_TYPE, MimeType.APPLICATION_JSON)
.header(ACCEPT, MimeType.APPLICATION_JSON)
.loader(loaderFactory.newLoader("Updating project..."))
.send(unmarshaller.newUnmarshaller(ProjectConfigDto.class));
}
/**
* Returns the base url for the project service. It consists of workspace agent base url plus project prefix.
*
* @return base url for project service
* @since 4.4.0
*/
private String getBaseUrl() {
return appContext.getDevMachine().getWsAgentBaseUrl() + PROJECT;
}
/**
* Normalizes the path by adding a leading '/' if it doesn't exist.
* Also escapes some special characters.
* <p/>
* See following javascript functions for details:
* escape() will not encode: @ * / +
* encodeURI() will not encode: ~ ! @ # $ & * ( ) = : / , ; ? + '
* encodeURIComponent() will not encode: ~ ! * ( ) '
*
* @param path
* path to normalize
* @return normalized path
*/
private String path(String path) {
while (path.indexOf('+') >= 0) {
path = path.replace("+", "%2B");
}
return path.startsWith("/") ? path : '/' + path;
}
} |
package org.torquebox.integration.arquillian;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.util.List;
import org.jboss.arquillian.api.Deployment;
import org.jboss.arquillian.api.Run;
import org.jboss.arquillian.api.RunModeType;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
@Run(RunModeType.AS_CLIENT)
public class FrozenRails_2_3_8_Test extends AbstractIntegrationTest {
private static final String[] GEM_NAMES = { "railties", "activesupport", "actionpack", "activerecord", "actionmailer", "activeresource", };
@Deployment
public static JavaArchive createDeployment() {
return createDeployment("rails/2.3.8/frozen-rails.yml");
}
@Test
public void testHighLevel() {
driver.get("http://localhost:8080/frozen-rails");
System.err.println("RESULT: ");
System.err.println(driver.getPageSource());
WebElement element = driver.findElementById("success");
assertNotNull(element);
assertEquals("frozen-rails", element.getAttribute("class"));
List<WebElement> elements = driver.findElements(By.className("load_path_element"));
for (WebElement each : elements) {
String pathElement = each.getText();
// Ensure that the mentioned gems are loaded absolutely from our frozen
// vendored Rails, and not from system gems. Inspect the paths for
// known elements that indicate frozenness.
for (int i = 0; i < GEM_NAMES.length; ++i) {
if (pathElement.contains( "/" + GEM_NAMES[i] + "/" ) ) {
assert (pathElement.contains("frozen/vendor/rails/" + GEM_NAMES[i] + "/lib"));
}
}
}
}
} |
package io.prometheus.client.smoketest;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.images.builder.ImageFromDockerfile;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* Smoke test with different Java versions.
*/
@RunWith(Parameterized.class)
public class JavaVersionsIT {
private final int port = 9000;
private final OkHttpClient client = new OkHttpClient();
@Rule
public JavaContainer javaContainer;
public JavaVersionsIT(String baseImage) {
javaContainer = new JavaContainer(baseImage).withExposedPorts(port);
}
@Parameterized.Parameters(name="{0}")
public static String[] images() {
return new String[] {
// HotSpot
"openjdk:8-jre",
"openjdk:11-jre",
"openjdk:17",
"ticketfly/java:6",
"adoptopenjdk/openjdk16:ubi-minimal-jre",
// OpenJ9
"ibmjava:8-jre",
"adoptopenjdk/openjdk11-openj9",
};
}
private final List<String> exampleMetrics = Arrays.asList(
"test_total{path=\"/hello-world\"}",
"jvm_memory_bytes_used{area=\"heap\"}"
);
@Test
public void testExampleMetrics() {
List<String> metrics = scrapeMetrics(TimeUnit.SECONDS.toMillis(10));
System.out.println(javaContainer.getLogs());
for (String metric : exampleMetrics) {
Assert.assertTrue(metric + " not found", metrics.stream()
.filter(m -> m.startsWith(metric + " "))
.peek(System.out::println)
.findAny()
.isPresent());
}
}
private static class JavaContainer extends GenericContainer<JavaContainer> {
JavaContainer(String baseImage) {
super(new ImageFromDockerfile("prometheus-client-java-example-application")
.withDockerfileFromBuilder(builder ->
builder
.from(baseImage)
.run("mkdir /app")
.workDir("/app")
.copy("example_application.jar", "/app/")
.cmd("java -version && java -jar example_application.jar")
.build())
.withFileFromPath("example_application.jar",
Paths.get("../example_application/target/example_application.jar")));
}
}
private List<String> scrapeMetrics(long timeoutMillis) {
long start = System.currentTimeMillis();
Exception exception = null;
String host = javaContainer.getHost();
Integer mappedPort = javaContainer.getMappedPort(port);
String metricsUrl = "http://" + host + ":" + mappedPort + "/metrics";
while (System.currentTimeMillis() - start < timeoutMillis) {
try {
Request request = new Request.Builder()
.header("Accept", "application/openmetrics-text; version=1.0.0; charset=utf-8")
.url(metricsUrl)
.build();
try (Response response = client.newCall(request).execute()) {
return Arrays.asList(response.body().string().split("\\n"));
}
} catch (Exception e) {
exception = e;
try {
Thread.sleep(100);
} catch (InterruptedException ignored) {
}
}
}
if (exception != null) {
exception.printStackTrace();
}
Assert.fail("Timeout while getting metrics from " + metricsUrl + " (orig port: " + port + ")");
return null; // will not happen
}
} |
package com.intellij.codeInspection;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.projectRoots.JavaSdk;
import com.intellij.openapi.projectRoots.JavaSdkVersion;
import com.intellij.openapi.projectRoots.ProjectJdkTable;
import com.intellij.openapi.projectRoots.Sdk;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collection;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
/**
* @author yole
*/
public class JavaCommandLineInspectionProjectConfigurator implements CommandLineInspectionProjectConfigurator {
@Override
public boolean isApplicable(@NotNull Path projectPath, @NotNull CommandLineInspectionProgressReporter logger) {
JavaSdk javaSdk = JavaSdk.getInstance();
List<Sdk> sdks = ProjectJdkTable.getInstance().getSdksOfType(javaSdk);
if (!sdks.isEmpty()) return false;
try {
boolean hasAnyJavaFiles = Files.walk(projectPath).anyMatch(f -> f.toString().endsWith(".java") || f.toString().endsWith(".kt"));
if (!hasAnyJavaFiles) {
logger.reportMessage(3, "Skipping JDK autodetection because the project doesn't contain any Java files");
}
return hasAnyJavaFiles;
}
catch (IOException e) {
return false;
}
}
@Override
public void configureEnvironment(@NotNull Path projectPath, @NotNull CommandLineInspectionProgressReporter logger) {
JavaSdk javaSdk = JavaSdk.getInstance();
List<Sdk> sdks = ProjectJdkTable.getInstance().getSdksOfType(javaSdk);
if (!sdks.isEmpty()) {
return;
}
Collection<String> homePaths = javaSdk.suggestHomePaths();
Set<JavaSdkVersion> existingVersions = EnumSet.noneOf(JavaSdkVersion.class);
for (String path : homePaths) {
String jdkVersion = javaSdk.getVersionString(path);
if (jdkVersion == null) continue;
JavaSdkVersion version = JavaSdkVersion.fromVersionString(jdkVersion);
if (existingVersions.contains(version)) continue;
existingVersions.add(version);
String name = javaSdk.suggestSdkName(null, path);
logger.reportMessage(2, "Detected JDK with name " + name + " at " + path);
Sdk jdk = javaSdk.createJdk(name, path, false);
ApplicationManager.getApplication().runWriteAction(() -> ProjectJdkTable.getInstance().addJdk(jdk));
}
}
} |
package com.jenjinstudios.world.client;
import com.jenjinstudios.client.net.ClientUser;
import com.jenjinstudios.core.MessageIO;
import com.jenjinstudios.core.io.Message;
import com.jenjinstudios.core.io.MessageInputStream;
import com.jenjinstudios.core.io.MessageOutputStream;
import com.jenjinstudios.core.io.MessageRegistry;
import com.jenjinstudios.world.World;
import com.jenjinstudios.world.io.ChecksumUtil;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import javax.crypto.Cipher;
import javax.crypto.KeyGenerator;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.security.KeyFactory;
import java.security.PublicKey;
import java.security.spec.X509EncodedKeySpec;
/**
* @author Caleb Brinkman
*/
public class ServerWorldFileTrackerTest
{
private static final MessageRegistry messageRegistry = MessageRegistry.getInstance();
private static final String validWorldString =
"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n" +
"<world>\n" +
" <zone id=\"0\" xSize=\"15\" ySize=\"15\">\n" +
" <location walkable=\"false\" x=\"1\" y=\"1\"/>\n" +
" </zone>\n" +
"</world>\n";
private WorldClient worldClient;
private static void removeRecursive(Path path) throws IOException {
Files.walkFileTree(path, new SimpleFileVisitor<Path>()
{
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException
{
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
if (exc == null)
{
Files.delete(dir);
return FileVisitResult.CONTINUE;
} else
{
throw exc;
}
}
});
}
@Test(timeOut = 5000)
public void testRequestWorldServerFileChecksum() throws Exception {
byte[] file = validWorldString.getBytes(StandardCharsets.UTF_8);
byte[] checksum = ChecksumUtil.getMD5Checksum(file);
worldClient.start();
while (!worldClient.isInitialized()) { Thread.sleep(10); }
ServerWorldFileTracker serverWorldFileTracker = worldClient.getServerWorldFileTracker();
serverWorldFileTracker.requestServerWorldFileChecksum();
Assert.assertEquals(serverWorldFileTracker.getChecksum(), checksum);
worldClient.shutdown();
}
@Test(timeOut = 5000)
public void testRequestServerWorldFile() throws Exception {
byte[] file = validWorldString.getBytes(StandardCharsets.UTF_8);
byte[] checksum = ChecksumUtil.getMD5Checksum(file);
worldClient.start();
while (!worldClient.isInitialized()) { Thread.sleep(10); }
ServerWorldFileTracker serverWorldFileTracker = worldClient.getServerWorldFileTracker();
serverWorldFileTracker.requestServerWorldFileChecksum();
serverWorldFileTracker.requestServerWorldFile();
Assert.assertFalse(serverWorldFileTracker.isWaitingForChecksum());
Assert.assertFalse(serverWorldFileTracker.isWaitingForFile());
Assert.assertEquals(serverWorldFileTracker.getChecksum(), checksum);
Assert.assertEquals(serverWorldFileTracker.getBytes(), file);
worldClient.shutdown();
}
@Test(timeOut = 5000)
public void testWriteServerWorldToFile() throws Exception {
byte[] file = validWorldString.getBytes(StandardCharsets.UTF_8);
worldClient.start();
while (!worldClient.isInitialized()) { Thread.sleep(10); }
ServerWorldFileTracker serverWorldFileTracker = worldClient.getServerWorldFileTracker();
serverWorldFileTracker.requestServerWorldFileChecksum();
serverWorldFileTracker.requestServerWorldFile();
serverWorldFileTracker.writeReceivedWorldToFile();
Path writtenFile = new File("resources/ServerWorldFileTracker.xml").toPath();
byte[] readBytes = Files.readAllBytes(writtenFile);
Assert.assertEquals(readBytes, file);
worldClient.shutdown();
}
@Test(timeOut = 5000)
public void testReadWorldFromServer() throws Exception {
worldClient.start();
while (!worldClient.isInitialized()) { Thread.sleep(10); }
ServerWorldFileTracker serverWorldFileTracker = worldClient.getServerWorldFileTracker();
serverWorldFileTracker.requestServerWorldFileChecksum();
serverWorldFileTracker.requestServerWorldFile();
World world = serverWorldFileTracker.readWorldFromServer();
Assert.assertNotNull(world);
worldClient.shutdown();
}
@Test(timeOut = 5000)
public void testReadWorldFromFile() throws Exception {
worldClient.start();
while (!worldClient.isInitialized()) { Thread.sleep(10); }
ServerWorldFileTracker serverWorldFileTracker = worldClient.getServerWorldFileTracker();
serverWorldFileTracker.requestServerWorldFileChecksum();
serverWorldFileTracker.requestServerWorldFile();
serverWorldFileTracker.writeReceivedWorldToFile();
World world = serverWorldFileTracker.readWorldFromFile();
Assert.assertNotNull(world);
worldClient.shutdown();
}
@BeforeMethod
public void setUpWorldClient() throws Exception {
cleanResources();
prepareWorldClient();
}
@AfterMethod
public void cleanResources() {
try
{
removeRecursive(new File("resources/").toPath());
} catch (IOException ignored) { }
if (worldClient != null)
worldClient.shutdown();
}
private void prepareWorldClient() throws Exception {
byte[] file = validWorldString.getBytes(StandardCharsets.UTF_8);
byte[] checksum = ChecksumUtil.getMD5Checksum(file);
MessageInputStream inputStream = Mockito.mock(MessageInputStream.class);
MessageOutputStream outputStream = Mockito.mock(MessageOutputStream.class);
MessageIO messageIO = new MessageIO(inputStream, outputStream);
ClientUser clientUser = new ClientUser("Foo", "Bar");
WorldClient wc = new WorldClient(messageIO, clientUser, new File("resources/ServerWorldFileTracker.xml"));
Message firstConnectResponse = messageRegistry.createMessage("FirstConnectResponse");
firstConnectResponse.setArgument("ups", 123);
Message blankMessage = messageRegistry.createMessage("BlankMessage");
Message worldChecksumResponse = messageRegistry.createMessage("WorldChecksumResponse");
Message[] blankMessageSpam = new Message[1500];
for (int i = 0; i < blankMessageSpam.length; i++) { blankMessageSpam[i] = blankMessage; }
worldChecksumResponse.setArgument("checksum", checksum);
Message worldFileResponse = messageRegistry.createMessage("WorldFileResponse");
worldFileResponse.setArgument("fileBytes", file);
Message aesMessage = getAesKeyMessage(messageRegistry, wc);
Mockito.when(inputStream.readMessage()).
thenReturn(firstConnectResponse, blankMessageSpam).
thenReturn(aesMessage, blankMessageSpam).
thenReturn(worldChecksumResponse, blankMessageSpam).
thenReturn(worldFileResponse, blankMessageSpam);
this.worldClient = wc;
}
private Message getAesKeyMessage(MessageRegistry messageRegistry, WorldClient wc) throws Exception {
byte[] clientKey = wc.getPublicKey().getEncoded();
KeyGenerator keyGenerator = KeyGenerator.getInstance("AES");
keyGenerator.init(128);
byte[] aesKeyBytes = keyGenerator.generateKey().getEncoded();
PublicKey publicKey = KeyFactory.getInstance("RSA").generatePublic(new X509EncodedKeySpec(clientKey));
Cipher cipher = Cipher.getInstance("RSA/ECB/PKCS1Padding");
cipher.init(Cipher.ENCRYPT_MODE, publicKey);
byte[] encryptedAESKey = cipher.doFinal(aesKeyBytes);
Message aesMessage = messageRegistry.createMessage("AESKeyMessage");
aesMessage.setArgument("key", encryptedAESKey);
return aesMessage;
}
} |
package me.xdrop.jrand.annotation.processing;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.Filer;
import javax.annotation.processing.Messager;
import javax.annotation.processing.ProcessingEnvironment;
import javax.lang.model.element.PackageElement;
import java.nio.file.Path;
import java.nio.file.Paths;
public abstract class BaseProcessor extends AbstractProcessor {
public static String GENERATED_PACKAGE = "me.xdrop.jrand.generated.generators";
public static String packageName = "me.xdrop.jrand";
public static String generatorPath = "generators";
private static ProcessorRepository repository;
private Messager messager;
private Filer filer;
private Path outputPathGenerators;
@Override
public synchronized void init(ProcessingEnvironment processingEnv) {
super.init(processingEnv);
this.messager = processingEnv.getMessager();
this.filer = processingEnv.getFiler();
repository = new ProcessorRepository();
this.outputPathGenerators = Paths.get("jrand-core", "src", "generated",
"java", "me", "xdrop", "jrand", "generators");
}
public Messager getMessager() {
return messager;
}
public Filer getFiler() {
return filer;
}
public ProcessorRepository getRepository() {
return repository;
}
public Path getOutputPathGenerators() {
return outputPathGenerators;
}
protected String getLastPackageName(PackageElement pkg) {
String[] subpackageParts = pkg.getQualifiedName().toString()
.split("\\.");
return subpackageParts[subpackageParts.length - 1];
}
} |
package org.sonatype.guice.bean.binders;
import java.util.Arrays;
import java.util.List;
import com.google.inject.Binder;
import com.google.inject.Module;
@Deprecated
public class WireModule
implements Module
{
private final Module delegate;
public WireModule( final Module... modules )
{
this( Arrays.asList( modules ) );
}
public WireModule( final List<Module> modules )
{
delegate = new org.eclipse.sisu.wire.WireModule( modules );
}
public void configure( final Binder binder )
{
delegate.configure( binder );
}
} |
package org.pentaho.di.core.row;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.net.SocketTimeoutException;
import java.nio.charset.Charset;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.text.NumberFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import org.pentaho.di.compatibility.Value;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Messages;
import org.pentaho.di.core.exception.KettleEOFException;
import org.pentaho.di.core.exception.KettleFileException;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.xml.XMLHandler;
import org.w3c.dom.Node;
/**
* @author jb
*
*/
public class ValueMeta implements ValueMetaInterface
{
public static final String DEFAULT_DATE_FORMAT_MASK = "yyyy/MM/dd HH:mm:ss.SSS";
public static final String XML_META_TAG = "value-meta";
public static final String XML_DATA_TAG = "value-data";
private String name;
private int length;
private int precision;
private int type;
private int trimType;
private int storageType;
private String origin;
private String comments;
private Object[] index;
private String conversionMask;
private String stringEncoding;
private String decimalSymbol;
private String groupingSymbol;
private String currencySymbol;
private boolean caseInsensitive;
private boolean sortedDescending;
private boolean outputPaddingEnabled;
private boolean largeTextField;
private Locale dateFormatLocale;
private boolean dateFormatLenient;
private SimpleDateFormat dateFormat;
private boolean dateFormatChanged;
private DecimalFormat decimalFormat;
private boolean decimalFormatChanged;
private ValueMetaInterface storageMetadata;
private boolean identicalFormat;
private ValueMetaInterface conversionMetadata;
boolean singleByteEncoding;
private long numberOfBinaryStringConversions;
// get & store original result set meta data for later use
// @see java.sql.ResultSetMetaData
private int originalColumnType;
private String originalColumnTypeName;
private int originalPrecision;
private int originalScale;
private boolean originalAutoIncrement;
private int originalNullable;
private boolean originalSigned;
/**
* The trim type codes
*/
public final static String trimTypeCode[] = { "none", "left", "right", "both" };
/**
* The trim description
*/
public final static String trimTypeDesc[] = { Messages.getString("ValueMeta.TrimType.None"), Messages.getString("ValueMeta.TrimType.Left"),
Messages.getString("ValueMeta.TrimType.Right"), Messages.getString("ValueMeta.TrimType.Both") };
public ValueMeta()
{
this(null, ValueMetaInterface.TYPE_NONE, -1, -1);
}
public ValueMeta(String name)
{
this(name, ValueMetaInterface.TYPE_NONE, -1, -1);
}
public ValueMeta(String name, int type)
{
this(name, type, -1, -1);
}
public ValueMeta(String name, int type, int storageType)
{
this(name, type, -1, -1);
this.storageType = storageType;
setDefaultConversionMask();
}
public ValueMeta(String name, int type, int length, int precision)
{
this.name = name;
this.type = type;
this.length = length;
this.precision = precision;
this.storageType=STORAGE_TYPE_NORMAL;
this.sortedDescending=false;
this.outputPaddingEnabled=false;
this.decimalSymbol = ""+Const.DEFAULT_DECIMAL_SEPARATOR;
this.groupingSymbol = ""+Const.DEFAULT_GROUPING_SEPARATOR;
this.dateFormatLocale = Locale.getDefault();
this.identicalFormat = true;
determineSingleByteEncoding();
setDefaultConversionMask();
}
public static final String[] SINGLE_BYTE_ENCODINGS = new String[] {
"ISO8859_1", "Cp1252", "ASCII", "Cp037", "Cp273", "Cp277", "Cp278", "Cp280", "Cp284", "Cp285",
"Cp297", "Cp420","Cp424", "Cp437", "Cp500", "Cp737", "Cp775", "Cp850", "Cp852", "Cp855", "Cp856", "Cp857", "Cp858", "Cp860",
"Cp861", "Cp862", "Cp863", "Cp865", "Cp866", "Cp869", "Cp870", "Cp871", "Cp875", "Cp918", "Cp921", "Cp922",
"Cp1140", "Cp1141", "Cp1142", "Cp1143", "Cp1144", "Cp1145", "Cp1146", "Cp1147", "Cp1148", "Cp1149",
"Cp1250", "Cp1251", "Cp1253", "Cp1254", "Cp1255", "Cp1257",
"ISO8859_2", "ISO8859_3", "ISO8859_5", "ISO8859_5", "ISO8859_6", "ISO8859_7", "ISO8859_8", "ISO8859_9", "ISO8859_13", "ISO8859_15", "ISO8859_15_FDIS",
"MacCentralEurope", "MacCroatian", "MacCyrillic", "MacDingbat", "MacGreek", "MacHebrew", "MacIceland", "MacRoman", "MacRomania", "MacSymbol", "MacTurkish", "MacUkraine",
};
private void setDefaultConversionMask()
{
// Set some sensible default mask on the numbers
switch(type)
{
case TYPE_INTEGER: setConversionMask("#;-#"); break;
case TYPE_NUMBER: setConversionMask("#.#;-#.#"); break;
default: break;
}
}
private void determineSingleByteEncoding()
{
singleByteEncoding=false;
Charset cs;
if (Const.isEmpty(stringEncoding))
{
cs = Charset.defaultCharset();
}
else
{
cs = Charset.forName(stringEncoding);
}
// See if the default character set for input is single byte encoded.
for (String charSetEncoding : SINGLE_BYTE_ENCODINGS) {
if (cs.toString().equalsIgnoreCase(charSetEncoding)) singleByteEncoding=true;
}
}
public ValueMeta clone()
{
try
{
ValueMeta valueMeta = (ValueMeta) super.clone();
valueMeta.dateFormat = null;
valueMeta.decimalFormat = null;
if (dateFormatLocale!=null) valueMeta.dateFormatLocale = (Locale) dateFormatLocale.clone();
if (storageMetadata!=null) valueMeta.storageMetadata = storageMetadata.clone();
if (conversionMetadata!=null) valueMeta.conversionMetadata = conversionMetadata.clone();
valueMeta.compareStorageAndActualFormat();
return valueMeta;
}
catch (CloneNotSupportedException e)
{
return null;
}
}
/**
* @return the comments
*/
public String getComments()
{
return comments;
}
/**
* @param comments the comments to set
*/
public void setComments(String comments)
{
this.comments = comments;
}
/**
* @return the index
*/
public Object[] getIndex()
{
return index;
}
/**
* @param index the index to set
*/
public void setIndex(Object[] index)
{
this.index = index;
}
/**
* @return the length
*/
public int getLength()
{
return length;
}
/**
* @param length the length to set
*/
public void setLength(int length)
{
this.length = length;
}
/**
* @param length the length to set
*/
public void setLength(int length, int precision)
{
this.length = length;
this.precision = precision;
}
/**
* @return the name
*/
public String getName()
{
return name;
}
/**
* @param name the name to set
*/
public void setName(String name)
{
this.name = name;
}
/**
* @return the origin
*/
public String getOrigin()
{
return origin;
}
/**
* @param origin the origin to set
*/
public void setOrigin(String origin)
{
this.origin = origin;
}
/**
* @return the precision
*/
public int getPrecision()
{
// For backward compatibility we need to tweak a bit...
if (isInteger() || isBinary()) return 0;
if (isString() || isBoolean()) return -1;
return precision;
}
/**
* @param precision the precision to set
*/
public void setPrecision(int precision)
{
this.precision = precision;
}
/**
* @return the storageType
*/
public int getStorageType()
{
return storageType;
}
/**
* @param storageType the storageType to set
*/
public void setStorageType(int storageType)
{
this.storageType = storageType;
}
public boolean isStorageNormal()
{
return storageType == STORAGE_TYPE_NORMAL;
}
public boolean isStorageIndexed()
{
return storageType == STORAGE_TYPE_INDEXED;
}
public boolean isStorageBinaryString()
{
return storageType == STORAGE_TYPE_BINARY_STRING;
}
/**
* @return the type
*/
public int getType()
{
return type;
}
/**
* @param type the type to set
*/
public void setType(int type)
{
this.type = type;
}
/**
* @return the conversionMask
*/
public String getConversionMask()
{
return conversionMask;
}
/**
* @param conversionMask the conversionMask to set
*/
public void setConversionMask(String conversionMask)
{
this.conversionMask = conversionMask;
dateFormatChanged = true;
decimalFormatChanged = true;
compareStorageAndActualFormat();
}
/**
* @return the encoding
*/
public String getStringEncoding()
{
return stringEncoding;
}
/**
* @param encoding the encoding to set
*/
public void setStringEncoding(String encoding)
{
this.stringEncoding = encoding;
determineSingleByteEncoding();
compareStorageAndActualFormat();
}
/**
* @return the decimalSymbol
*/
public String getDecimalSymbol()
{
return decimalSymbol;
}
/**
* @param decimalSymbol the decimalSymbol to set
*/
public void setDecimalSymbol(String decimalSymbol)
{
this.decimalSymbol = decimalSymbol;
decimalFormatChanged = true;
compareStorageAndActualFormat();
}
/**
* @return the groupingSymbol
*/
public String getGroupingSymbol()
{
return groupingSymbol;
}
/**
* @param groupingSymbol the groupingSymbol to set
*/
public void setGroupingSymbol(String groupingSymbol)
{
this.groupingSymbol = groupingSymbol;
decimalFormatChanged = true;
compareStorageAndActualFormat();
}
/**
* @return the currencySymbol
*/
public String getCurrencySymbol()
{
return currencySymbol;
}
/**
* @param currencySymbol the currencySymbol to set
*/
public void setCurrencySymbol(String currencySymbol)
{
this.currencySymbol = currencySymbol;
decimalFormatChanged = true;
}
/**
* @return the caseInsensitive
*/
public boolean isCaseInsensitive()
{
return caseInsensitive;
}
/**
* @param caseInsensitive the caseInsensitive to set
*/
public void setCaseInsensitive(boolean caseInsensitive)
{
this.caseInsensitive = caseInsensitive;
}
/**
* @return the sortedDescending
*/
public boolean isSortedDescending()
{
return sortedDescending;
}
/**
* @param sortedDescending the sortedDescending to set
*/
public void setSortedDescending(boolean sortedDescending)
{
this.sortedDescending = sortedDescending;
}
/**
* @return true if output padding is enabled (padding to specified length)
*/
public boolean isOutputPaddingEnabled()
{
return outputPaddingEnabled;
}
/**
* @param outputPaddingEnabled Set to true if output padding is to be enabled (padding to specified length)
*/
public void setOutputPaddingEnabled(boolean outputPaddingEnabled)
{
this.outputPaddingEnabled = outputPaddingEnabled;
}
/**
* @return true if this is a large text field (CLOB, TEXT) with arbitrary length.
*/
public boolean isLargeTextField()
{
return largeTextField;
}
/**
* @param largeTextField Set to true if this is to be a large text field (CLOB, TEXT) with arbitrary length.
*/
public void setLargeTextField(boolean largeTextField)
{
this.largeTextField = largeTextField;
}
/**
* @return the dateFormatLenient
*/
public boolean isDateFormatLenient()
{
return dateFormatLenient;
}
/**
* @param dateFormatLenient the dateFormatLenient to set
*/
public void setDateFormatLenient(boolean dateFormatLenient)
{
this.dateFormatLenient = dateFormatLenient;
dateFormatChanged=true;
}
/**
* @return the dateFormatLocale
*/
public Locale getDateFormatLocale()
{
return dateFormatLocale;
}
/**
* @param dateFormatLocale the dateFormatLocale to set
*/
public void setDateFormatLocale(Locale dateFormatLocale)
{
this.dateFormatLocale = dateFormatLocale;
dateFormatChanged=true;
}
// DATE + STRING
private synchronized String convertDateToString(Date date)
{
if (date==null) return null;
return getDateFormat().format(date);
}
private static SimpleDateFormat compatibleDateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss.SSS");
private synchronized String convertDateToCompatibleString(Date date)
{
if (date==null) return null;
return compatibleDateFormat.format(date);
}
private synchronized Date convertStringToDate(String string) throws KettleValueException
{
string = Const.trimToType(string, getTrimType()); // see if trimming needs to be performed before conversion
if (Const.isEmpty(string)) return null;
try
{
return getDateFormat().parse(string);
}
catch (ParseException e)
{
String dateFormat = (getDateFormat() != null) ? getDateFormat().toPattern() : "null";
throw new KettleValueException(toString()+" : couldn't convert string ["+string+"] to a date using format ["+dateFormat+"]", e);
}
}
// DATE + NUMBER
private Double convertDateToNumber(Date date)
{
return new Double( date.getTime() );
}
private Date convertNumberToDate(Double number)
{
return new Date( number.longValue() );
}
// DATE + INTEGER
private Long convertDateToInteger(Date date)
{
return new Long( date.getTime() );
}
private Date convertIntegerToDate(Long number)
{
return new Date( number.longValue() );
}
// DATE + BIGNUMBER
private BigDecimal convertDateToBigNumber(Date date)
{
return new BigDecimal( date.getTime() );
}
private Date convertBigNumberToDate(BigDecimal number)
{
return new Date( number.longValue() );
}
private synchronized String convertNumberToString(Double number) throws KettleValueException
{
if (number==null) {
if (!outputPaddingEnabled || length<1) {
return null;
}
else {
// Return strings padded to the specified length...
// This is done for backward compatibility with 2.5.x
// We just optimized this a bit...
String[] emptyPaddedStrings = Const.getEmptyPaddedStrings();
if (length<emptyPaddedStrings.length) {
return emptyPaddedStrings[length];
}
else {
return Const.rightPad("", length);
}
}
}
try
{
return getDecimalFormat().format(number);
}
catch(Exception e)
{
throw new KettleValueException(toString()+" : couldn't convert Number to String ", e);
}
}
private synchronized String convertNumberToCompatibleString(Double number) throws KettleValueException
{
if (number==null) return null;
return Double.toString(number);
}
private synchronized Double convertStringToNumber(String string) throws KettleValueException
{
string = Const.trimToType(string, getTrimType()); // see if trimming needs to be performed before conversion
if (Const.isEmpty(string)) return null;
try
{
return new Double( getDecimalFormat().parse(string).doubleValue() );
}
catch(Exception e)
{
throw new KettleValueException(toString()+" : couldn't convert String to number ", e);
}
}
public synchronized SimpleDateFormat getDateFormat()
{
// If we have a Date that is represented as a String
// In that case we can set the format of the original Date on the String value metadata in the form of a conversion metadata object.
// That way, we can always convert from Date to String and back without a problem, no matter how complex the format was.
// As such, we should return the date SimpleDateFormat of the conversion metadata.
if (conversionMetadata!=null ) {
return conversionMetadata.getDateFormat();
}
if (dateFormat==null || dateFormatChanged)
{
// This may not become static as the class is not thread-safe!
dateFormat = new SimpleDateFormat();
String mask;
if (Const.isEmpty(conversionMask))
{
mask = DEFAULT_DATE_FORMAT_MASK;
}
else
{
mask = conversionMask;
}
if (dateFormatLocale==null || dateFormatLocale.equals(Locale.getDefault()))
{
dateFormat = new SimpleDateFormat(mask);
}
else
{
dateFormat = new SimpleDateFormat(mask, dateFormatLocale);
}
// Set the conversion leniency as well
dateFormat.setLenient(dateFormatLenient);
dateFormatChanged=false;
}
return dateFormat;
}
public synchronized DecimalFormat getDecimalFormat()
{
// If we have an Integer that is represented as a String
// In that case we can set the format of the original Integer on the String value metadata in the form of a conversion metadata object.
// That way, we can always convert from Integer to String and back without a problem, no matter how complex the format was.
// As such, we should return the decimal format of the conversion metadata.
if (conversionMetadata!=null ) {
return conversionMetadata.getDecimalFormat();
}
// Calculate the decimal format as few times as possible.
// That is because creating or changing a DecimalFormat object is very CPU hungry.
if (decimalFormat==null || decimalFormatChanged)
{
decimalFormat = (DecimalFormat)NumberFormat.getInstance();
DecimalFormatSymbols decimalFormatSymbols = decimalFormat.getDecimalFormatSymbols();
if (!Const.isEmpty(currencySymbol)) decimalFormatSymbols.setCurrencySymbol( currencySymbol );
if (!Const.isEmpty(groupingSymbol)) decimalFormatSymbols.setGroupingSeparator( groupingSymbol.charAt(0) );
if (!Const.isEmpty(decimalSymbol)) decimalFormatSymbols.setDecimalSeparator( decimalSymbol.charAt(0) );
decimalFormat.setDecimalFormatSymbols(decimalFormatSymbols);
// Apply the conversion mask if we have one...
if (!Const.isEmpty(conversionMask)) {
decimalFormat.applyPattern(conversionMask);
}
else {
switch(type) {
case TYPE_INTEGER:
{
if (length<1) {
decimalFormat.applyPattern("
}
else {
StringBuffer integerPattern=new StringBuffer();
// First the format for positive integers...
integerPattern.append(" ");
for (int i=0;i<getLength();i++) integerPattern.append('0'); // all zeroes.
integerPattern.append(";");
// Then the format for the negative numbers...
integerPattern.append("-");
for (int i=0;i<getLength();i++) integerPattern.append('0'); // all zeroes.
decimalFormat.applyPattern(integerPattern.toString());
}
}
break;
case TYPE_NUMBER:
{
if (length<1) {
decimalFormat.applyPattern("
}
else {
StringBuffer numberPattern=new StringBuffer();
// First do the format for positive numbers...
numberPattern.append(' '); // to compensate for minus sign.
if (precision<0) // Default: two decimals
{
for (int i=0;i<length;i++) numberPattern.append('0');
numberPattern.append(".00"); // for the .00
}
else // Floating point format 00001234,56 --> (12,2)
{
for (int i=0;i<=length;i++) numberPattern.append('0'); // all zeroes.
int pos = length-precision+1;
if (pos>=0 && pos <numberPattern.length())
{
numberPattern.setCharAt(length-precision+1, '.'); // one 'comma'
}
}
// Now do the format for negative numbers...
StringBuffer negativePattern = new StringBuffer(numberPattern);
negativePattern.setCharAt(0, '-');
numberPattern.append(";");
numberPattern.append(negativePattern);
// Apply the pattern...
decimalFormat.applyPattern(numberPattern.toString());
}
}
}
}
decimalFormatChanged=false;
}
return decimalFormat;
}
private synchronized String convertIntegerToString(Long integer) throws KettleValueException
{
if (integer==null) {
if (!outputPaddingEnabled || length<1) {
return null;
}
else {
// Return strings padded to the specified length...
// This is done for backward compatibility with 2.5.x
// We just optimized this a bit...
String[] emptyPaddedStrings = Const.getEmptyPaddedStrings();
if (length<emptyPaddedStrings.length) {
return emptyPaddedStrings[length];
}
else {
return Const.rightPad("", length);
}
}
}
try
{
return getDecimalFormat().format(integer);
}
catch(Exception e)
{
throw new KettleValueException(toString()+" : couldn't convert Long to String ", e);
}
}
private synchronized String convertIntegerToCompatibleString(Long integer) throws KettleValueException
{
if (integer==null) return null;
return Long.toString(integer);
}
private synchronized Long convertStringToInteger(String string) throws KettleValueException
{
string = Const.trimToType(string, getTrimType()); // see if trimming needs to be performed before conversion
if (Const.isEmpty(string)) return null;
try
{
return new Long( getDecimalFormat().parse(string).longValue() );
}
catch(Exception e)
{
throw new KettleValueException(toString()+" : couldn't convert String to Integer", e);
}
}
private synchronized String convertBigNumberToString(BigDecimal number) throws KettleValueException
{
if (number==null) return null;
String string = number.toString();
return string;
}
private synchronized BigDecimal convertStringToBigNumber(String string) throws KettleValueException
{
string = Const.trimToType(string, getTrimType()); // see if trimming needs to be performed before conversion
if (Const.isEmpty(string)) return null;
/*
if (!".".equalsIgnoreCase(decimalSymbol))
{
string = Const.replace(string, decimalSymbol.substring(0, 1), ".");
}
*/
try
{
return new BigDecimal( string );
}
catch(NumberFormatException e)
{
throw new KettleValueException(toString()+" : couldn't convert string value '" + string + "' to a number.");
}
}
// BOOLEAN + STRING
private String convertBooleanToString(Boolean bool)
{
if (bool==null) return null;
if (length>=3)
{
return bool.booleanValue()?"true":"false";
}
else
{
return bool.booleanValue()?"Y":"N";
}
}
public static Boolean convertStringToBoolean(String string)
{
if (Const.isEmpty(string)) return null;
return Boolean.valueOf( "Y".equalsIgnoreCase(string) || "TRUE".equalsIgnoreCase(string) || "YES".equalsIgnoreCase(string) || "1".equals(string) );
}
// BOOLEAN + NUMBER
private Double convertBooleanToNumber(Boolean bool)
{
if (bool==null) return null;
return new Double( bool.booleanValue() ? 1.0 : 0.0 );
}
private Boolean convertNumberToBoolean(Double number)
{
if (number==null) return null;
return Boolean.valueOf( number.intValue() != 0 );
}
// BOOLEAN + INTEGER
private Long convertBooleanToInteger(Boolean bool)
{
if (bool==null) return null;
return Long.valueOf( bool.booleanValue() ? 1L : 0L );
}
private Boolean convertIntegerToBoolean(Long number)
{
if (number==null) return null;
return Boolean.valueOf( number.longValue() != 0 );
}
// BOOLEAN + BIGNUMBER
private BigDecimal convertBooleanToBigNumber(Boolean bool)
{
if (bool==null) return null;
return bool.booleanValue() ? BigDecimal.ONE : BigDecimal.ZERO;
}
private Boolean convertBigNumberToBoolean(BigDecimal number)
{
if (number==null) return null;
return Boolean.valueOf( number.intValue() != 0 );
}
/**
* Converts a byte[] stored in a binary string storage type into a String;
*
* @param binary the binary string
* @return the String in the correct encoding.
* @throws KettleValueException
*/
private String convertBinaryStringToString(byte[] binary) throws KettleValueException
{
// OK, so we have an internal representation of the original object, read from file.
// Before we release it back, we have to see if we don't have to do a String-<type>-String
// conversion with different masks.
// This obviously only applies to numeric data and dates.
// We verify if this is true or false in advance for performance reasons
if (binary==null || binary.length==0) return null;
String encoding;
if (identicalFormat) encoding = getStringEncoding();
else encoding = storageMetadata.getStringEncoding();
if (Const.isEmpty(encoding))
{
return new String(binary);
}
else
{
try
{
return new String(binary, encoding);
}
catch(UnsupportedEncodingException e)
{
throw new KettleValueException(toString()+" : couldn't convert binary value to String with specified string encoding ["+stringEncoding+"]", e);
}
}
}
/**
* Converts the specified data object to the normal storage type.
* @param object the data object to convert
* @return the data in a normal storage type
* @throws KettleValueException In case there is a data conversion error.
*/
public Object convertToNormalStorageType(Object object) throws KettleValueException
{
if (object==null) return null;
switch(storageType)
{
case STORAGE_TYPE_NORMAL:
return object;
case STORAGE_TYPE_BINARY_STRING :
return convertBinaryStringToNativeType((byte[])object);
case STORAGE_TYPE_INDEXED :
return index[(Integer)object];
default:
throw new KettleValueException(toStringMeta()+" : Unknown storage type ["+storageType+"] while converting to normal storage type");
}
}
/**
* Converts the specified data object to the binary string storage type.
* @param object the data object to convert
* @return the data in a binary string storage type
* @throws KettleValueException In case there is a data conversion error.
*/
public Object convertToBinaryStringStorageType(Object object) throws KettleValueException
{
if (object==null) return null;
switch(storageType)
{
case STORAGE_TYPE_NORMAL:
return convertNormalStorageTypeToBinaryString(object);
case STORAGE_TYPE_BINARY_STRING :
return object;
case STORAGE_TYPE_INDEXED :
return convertNormalStorageTypeToBinaryString( index[(Integer)object] );
default:
throw new KettleValueException(toStringMeta()+" : Unknown storage type ["+storageType+"] while converting to normal storage type");
}
}
/**
* Convert the binary data to the actual data type.<br>
* - byte[] --> Long (Integer)
* - byte[] --> Double (Number)
* - byte[] --> BigDecimal (BigNumber)
* - byte[] --> Date (Date)
* - byte[] --> Boolean (Boolean)
* - byte[] --> byte[] (Binary)
*
* @param binary
* @return
* @throws KettleValueException
*/
public Object convertBinaryStringToNativeType(byte[] binary) throws KettleValueException
{
if (binary==null) return null;
numberOfBinaryStringConversions++;
// OK, so we have an internal representation of the original object, read from file.
// First we decode it in the correct encoding
String string = convertBinaryStringToString(binary);
// In this method we always must convert the data.
// We use the storageMetadata object to convert the binary string object.
// --> Convert from the String format to the current data type...
return convertData(storageMetadata, string);
}
public Object convertNormalStorageTypeToBinaryString(Object object) throws KettleValueException
{
if (object==null) return null;
String string = getString(object);
return convertStringToBinaryString(string);
}
private byte[] convertStringToBinaryString(String string) throws KettleValueException
{
if (string==null) return null;
if (Const.isEmpty(stringEncoding))
{
return string.getBytes();
}
else
{
try
{
return string.getBytes(stringEncoding);
}
catch(UnsupportedEncodingException e)
{
throw new KettleValueException(toString()+" : couldn't convert String to Binary with specified string encoding ["+stringEncoding+"]", e);
}
}
}
/**
* Clones the data. Normally, we don't have to do anything here, but just for arguments and safety,
* we do a little extra work in case of binary blobs and Date objects.
* We should write a programmers manual later on to specify in all clarity that
* "we always overwrite/replace values in the Object[] data rows, we never modify them".
*
* @return a cloned data object if needed
*/
public Object cloneValueData(Object object) throws KettleValueException
{
if (object==null) return null;
if (storageType==STORAGE_TYPE_NORMAL)
{
switch(getType())
{
case ValueMeta.TYPE_STRING:
case ValueMeta.TYPE_NUMBER:
case ValueMeta.TYPE_INTEGER:
case ValueMeta.TYPE_BOOLEAN:
case ValueMeta.TYPE_BIGNUMBER: // primitive data types: we can only overwrite these, not change them
return object;
case ValueMeta.TYPE_DATE:
return new Date( ((Date)object).getTime() ); // just to make sure: very inexpensive too.
case ValueMeta.TYPE_BINARY:
byte[] origin = (byte[]) object;
byte[] target = new byte[origin.length];
System.arraycopy(origin, 0, target, 0, origin.length);
return target;
default: throw new KettleValueException(toString()+": unable to make copy of value type: "+getType());
}
}
else {
return object;
}
}
public String getCompatibleString(Object object) throws KettleValueException
{
try
{
String string;
switch(type)
{
case TYPE_DATE:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: string = convertDateToCompatibleString((Date)object); break;
case STORAGE_TYPE_BINARY_STRING: string = convertDateToCompatibleString((Date)convertBinaryStringToNativeType((byte[])object)); break;
case STORAGE_TYPE_INDEXED: string = object==null ? null : convertDateToCompatibleString((Date)index[((Integer)object).intValue()]); break;
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
break;
case TYPE_NUMBER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: string = convertNumberToCompatibleString((Double)object); break;
case STORAGE_TYPE_BINARY_STRING: string = convertNumberToCompatibleString((Double)convertBinaryStringToNativeType((byte[])object)); break;
case STORAGE_TYPE_INDEXED: string = object==null ? null : convertNumberToCompatibleString((Double)index[((Integer)object).intValue()]); break;
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
break;
case TYPE_INTEGER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: string = convertIntegerToCompatibleString((Long)object); break;
case STORAGE_TYPE_BINARY_STRING: string = convertIntegerToCompatibleString((Long)convertBinaryStringToNativeType((byte[])object)); break;
case STORAGE_TYPE_INDEXED: string = object==null ? null : convertIntegerToCompatibleString((Long)index[((Integer)object).intValue()]); break;
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
break;
default:
return getString(object);
}
return string;
}
catch(ClassCastException e)
{
throw new KettleValueException(toString()+" : There was a data type error: the data type of "+object.getClass().getName()+" object ["+object+"] does not correspond to value meta ["+toStringMeta()+"]");
}
}
public String getString(Object object) throws KettleValueException
{
try
{
String string;
switch(type)
{
case TYPE_STRING:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: string = (String)object; break;
case STORAGE_TYPE_BINARY_STRING: string = (String)convertBinaryStringToNativeType((byte[])object); break;
case STORAGE_TYPE_INDEXED: string = object==null ? null : (String) index[((Integer)object).intValue()]; break;
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
if ( string != null )
string = trim(string);
break;
case TYPE_DATE:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: string = convertDateToString((Date)object); break;
case STORAGE_TYPE_BINARY_STRING: string = convertDateToString((Date)convertBinaryStringToNativeType((byte[])object)); break;
case STORAGE_TYPE_INDEXED: string = object==null ? null : convertDateToString((Date)index[((Integer)object).intValue()]); break;
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
break;
case TYPE_NUMBER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: string = convertNumberToString((Double)object); break;
case STORAGE_TYPE_BINARY_STRING: string = convertNumberToString((Double)convertBinaryStringToNativeType((byte[])object)); break;
case STORAGE_TYPE_INDEXED: string = object==null ? null : convertNumberToString((Double)index[((Integer)object).intValue()]); break;
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
break;
case TYPE_INTEGER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: string = convertIntegerToString((Long)object); break;
case STORAGE_TYPE_BINARY_STRING: string = convertIntegerToString((Long)convertBinaryStringToNativeType((byte[])object)); break;
case STORAGE_TYPE_INDEXED: string = object==null ? null : convertIntegerToString((Long)index[((Integer)object).intValue()]); break;
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
break;
case TYPE_BIGNUMBER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: string = convertBigNumberToString((BigDecimal)object); break;
case STORAGE_TYPE_BINARY_STRING: string = convertBigNumberToString((BigDecimal)convertBinaryStringToNativeType((byte[])object)); break;
case STORAGE_TYPE_INDEXED: string = object==null ? null : convertBigNumberToString((BigDecimal)index[((Integer)object).intValue()]); break;
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
break;
case TYPE_BOOLEAN:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: string = convertBooleanToString((Boolean)object); break;
case STORAGE_TYPE_BINARY_STRING: string = convertBooleanToString((Boolean)convertBinaryStringToNativeType((byte[])object)); break;
case STORAGE_TYPE_INDEXED: string = object==null ? null : convertBooleanToString((Boolean)index[((Integer)object).intValue()]); break;
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
break;
case TYPE_BINARY:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: string = convertBinaryStringToString((byte[])object); break;
case STORAGE_TYPE_BINARY_STRING: string = convertBinaryStringToString((byte[])object); break;
case STORAGE_TYPE_INDEXED: string = object==null ? null : convertBinaryStringToString((byte[])index[((Integer)object).intValue()]); break;
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
break;
case TYPE_SERIALIZABLE:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: string = object.toString(); break; // just go for the default toString()
case STORAGE_TYPE_BINARY_STRING: string = convertBinaryStringToString((byte[])object); break;
case STORAGE_TYPE_INDEXED: string = object==null ? null : index[((Integer)object).intValue()].toString(); break; // just go for the default toString()
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
break;
default:
throw new KettleValueException(toString()+" : Unknown type "+type+" specified.");
}
if (isOutputPaddingEnabled() && getLength()>0)
{
string = ValueDataUtil.rightPad(string, getLength());
}
return string;
}
catch(ClassCastException e)
{
throw new KettleValueException(toString()+" : There was a data type error: the data type of "+object.getClass().getName()+" object ["+object+"] does not correspond to value meta ["+toStringMeta()+"]");
}
}
private String trim(String string) {
switch(getTrimType()) {
case TRIM_TYPE_NONE : break;
case TRIM_TYPE_RIGHT : string = Const.rtrim(string); break;
case TRIM_TYPE_LEFT : string = Const.ltrim(string); break;
case TRIM_TYPE_BOTH : string = Const.trim(string); break;
default: break;
}
return string;
}
public Double getNumber(Object object) throws KettleValueException
{
try
{
if (object==null) // NULL
{
return null;
}
switch(type)
{
case TYPE_NUMBER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return (Double)object;
case STORAGE_TYPE_BINARY_STRING: return (Double)convertBinaryStringToNativeType((byte[])object);
case STORAGE_TYPE_INDEXED: return (Double)index[((Integer)object).intValue()];
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_STRING:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertStringToNumber((String)object);
case STORAGE_TYPE_BINARY_STRING: return convertStringToNumber((String)convertBinaryStringToNativeType((byte[])object));
case STORAGE_TYPE_INDEXED: return convertStringToNumber((String) index[((Integer)object).intValue()]);
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_DATE:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertDateToNumber((Date)object);
case STORAGE_TYPE_BINARY_STRING: return convertDateToNumber((Date)convertBinaryStringToNativeType((byte[])object));
case STORAGE_TYPE_INDEXED: return new Double( ((Date)index[((Integer)object).intValue()]).getTime() );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_INTEGER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return new Double( ((Long)object).doubleValue() );
case STORAGE_TYPE_BINARY_STRING: return new Double( ((Long)convertBinaryStringToNativeType((byte[])object)).doubleValue() );
case STORAGE_TYPE_INDEXED: return new Double( ((Long)index[((Integer)object).intValue()]).doubleValue() );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_BIGNUMBER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return new Double( ((BigDecimal)object).doubleValue() );
case STORAGE_TYPE_BINARY_STRING: return new Double( ((BigDecimal)convertBinaryStringToNativeType((byte[])object)).doubleValue() );
case STORAGE_TYPE_INDEXED: return new Double( ((BigDecimal)index[((Integer)object).intValue()]).doubleValue() );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_BOOLEAN:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertBooleanToNumber( (Boolean)object );
case STORAGE_TYPE_BINARY_STRING: return convertBooleanToNumber( (Boolean)convertBinaryStringToNativeType((byte[])object) );
case STORAGE_TYPE_INDEXED: return convertBooleanToNumber( (Boolean)index[((Integer)object).intValue()] );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_BINARY:
throw new KettleValueException(toString()+" : I don't know how to convert binary values to numbers.");
case TYPE_SERIALIZABLE:
throw new KettleValueException(toString()+" : I don't know how to convert serializable values to numbers.");
default:
throw new KettleValueException(toString()+" : Unknown type "+type+" specified.");
}
}
catch(Exception e)
{
throw new KettleValueException("Unexpected conversion error while converting value ["+toString()+"] to a Number", e);
}
}
public Long getInteger(Object object) throws KettleValueException
{
try
{
if (object==null) // NULL
{
return null;
}
switch(type)
{
case TYPE_INTEGER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return (Long)object;
case STORAGE_TYPE_BINARY_STRING: return (Long)convertBinaryStringToNativeType((byte[])object);
case STORAGE_TYPE_INDEXED: return (Long)index[((Integer)object).intValue()];
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_STRING:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertStringToInteger((String)object);
case STORAGE_TYPE_BINARY_STRING: return convertStringToInteger((String)convertBinaryStringToNativeType((byte[])object));
case STORAGE_TYPE_INDEXED: return convertStringToInteger((String) index[((Integer)object).intValue()]);
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_NUMBER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return new Long( Math.round(((Double)object).doubleValue()) );
case STORAGE_TYPE_BINARY_STRING: return new Long( Math.round(((Double)convertBinaryStringToNativeType((byte[])object)).doubleValue()) );
case STORAGE_TYPE_INDEXED: return new Long( Math.round(((Double)index[((Integer)object).intValue()]).doubleValue()) );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_DATE:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertDateToInteger( (Date)object);
case STORAGE_TYPE_BINARY_STRING: return new Long( ((Date)convertBinaryStringToNativeType((byte[])object)).getTime() );
case STORAGE_TYPE_INDEXED: return convertDateToInteger( (Date)index[((Integer)object).intValue()]);
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_BIGNUMBER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return new Long( ((BigDecimal)object).longValue() );
case STORAGE_TYPE_BINARY_STRING: return new Long( ((BigDecimal)convertBinaryStringToNativeType((byte[])object)).longValue() );
case STORAGE_TYPE_INDEXED: return new Long( ((BigDecimal)index[((Integer)object).intValue()]).longValue() );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_BOOLEAN:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertBooleanToInteger( (Boolean)object );
case STORAGE_TYPE_BINARY_STRING: return convertBooleanToInteger( (Boolean)convertBinaryStringToNativeType((byte[])object) );
case STORAGE_TYPE_INDEXED: return convertBooleanToInteger( (Boolean)index[((Integer)object).intValue()] );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_BINARY:
throw new KettleValueException(toString()+" : I don't know how to convert binary values to integers.");
case TYPE_SERIALIZABLE:
throw new KettleValueException(toString()+" : I don't know how to convert serializable values to integers.");
default:
throw new KettleValueException(toString()+" : Unknown type "+type+" specified.");
}
}
catch(Exception e)
{
throw new KettleValueException("Unexpected conversion error while converting value ["+toString()+"] to an Integer", e);
}
}
public BigDecimal getBigNumber(Object object) throws KettleValueException
{
if (object==null) // NULL
{
return null;
}
switch(type)
{
case TYPE_BIGNUMBER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return (BigDecimal)object;
case STORAGE_TYPE_BINARY_STRING: return (BigDecimal)convertBinaryStringToNativeType((byte[])object);
case STORAGE_TYPE_INDEXED: return (BigDecimal)index[((Integer)object).intValue()];
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_STRING:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertStringToBigNumber( (String)object );
case STORAGE_TYPE_BINARY_STRING: return convertStringToBigNumber( (String)convertBinaryStringToNativeType((byte[])object) );
case STORAGE_TYPE_INDEXED: return convertStringToBigNumber((String) index[((Integer)object).intValue()]);
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_INTEGER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return BigDecimal.valueOf( ((Long)object).longValue() );
case STORAGE_TYPE_BINARY_STRING: return BigDecimal.valueOf( ((Long)convertBinaryStringToNativeType((byte[])object)).longValue() );
case STORAGE_TYPE_INDEXED: return BigDecimal.valueOf( ((Long)index[((Integer)object).intValue()]).longValue() );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_NUMBER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return BigDecimal.valueOf( ((Double)object).doubleValue() );
case STORAGE_TYPE_BINARY_STRING: return BigDecimal.valueOf( ((Double)convertBinaryStringToNativeType((byte[])object)).doubleValue() );
case STORAGE_TYPE_INDEXED: return BigDecimal.valueOf( ((Double)index[((Integer)object).intValue()]).doubleValue() );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_DATE:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertDateToBigNumber( (Date)object );
case STORAGE_TYPE_BINARY_STRING: return convertDateToBigNumber( (Date)convertBinaryStringToNativeType((byte[])object) );
case STORAGE_TYPE_INDEXED: return convertDateToBigNumber( (Date)index[((Integer)object).intValue()] );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_BOOLEAN:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertBooleanToBigNumber( (Boolean)object );
case STORAGE_TYPE_BINARY_STRING: return convertBooleanToBigNumber( (Boolean)convertBinaryStringToNativeType((byte[])object) );
case STORAGE_TYPE_INDEXED: return convertBooleanToBigNumber( (Boolean)index[((Integer)object).intValue()] );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_BINARY:
throw new KettleValueException(toString()+" : I don't know how to convert binary values to integers.");
case TYPE_SERIALIZABLE:
throw new KettleValueException(toString()+" : I don't know how to convert serializable values to integers.");
default:
throw new KettleValueException(toString()+" : Unknown type "+type+" specified.");
}
}
public Boolean getBoolean(Object object) throws KettleValueException
{
if (object==null) // NULL
{
return null;
}
switch(type)
{
case TYPE_BOOLEAN:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return (Boolean)object;
case STORAGE_TYPE_BINARY_STRING: return (Boolean)convertBinaryStringToNativeType((byte[])object);
case STORAGE_TYPE_INDEXED: return (Boolean)index[((Integer)object).intValue()];
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_STRING:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertStringToBoolean( trim((String)object) );
case STORAGE_TYPE_BINARY_STRING: return convertStringToBoolean( trim((String)convertBinaryStringToNativeType((byte[])object)) );
case STORAGE_TYPE_INDEXED: return convertStringToBoolean( trim((String) index[((Integer)object).intValue()] ));
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_INTEGER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertIntegerToBoolean( (Long)object );
case STORAGE_TYPE_BINARY_STRING: return convertIntegerToBoolean( (Long)convertBinaryStringToNativeType((byte[])object) );
case STORAGE_TYPE_INDEXED: return convertIntegerToBoolean( (Long)index[((Integer)object).intValue()] );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_NUMBER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertNumberToBoolean( (Double)object );
case STORAGE_TYPE_BINARY_STRING: return convertNumberToBoolean( (Double)convertBinaryStringToNativeType((byte[])object) );
case STORAGE_TYPE_INDEXED: return convertNumberToBoolean( (Double)index[((Integer)object).intValue()] );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_BIGNUMBER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertBigNumberToBoolean( (BigDecimal)object );
case STORAGE_TYPE_BINARY_STRING: return convertBigNumberToBoolean( (BigDecimal)convertBinaryStringToNativeType((byte[])object) );
case STORAGE_TYPE_INDEXED: return convertBigNumberToBoolean( (BigDecimal)index[((Integer)object).intValue()] );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_DATE:
throw new KettleValueException(toString()+" : I don't know how to convert date values to booleans.");
case TYPE_BINARY:
throw new KettleValueException(toString()+" : I don't know how to convert binary values to booleans.");
case TYPE_SERIALIZABLE:
throw new KettleValueException(toString()+" : I don't know how to convert serializable values to booleans.");
default:
throw new KettleValueException(toString()+" : Unknown type "+type+" specified.");
}
}
public Date getDate(Object object) throws KettleValueException
{
if (object==null) // NULL
{
return null;
}
switch(type)
{
case TYPE_DATE:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return (Date)object;
case STORAGE_TYPE_BINARY_STRING: return (Date)convertBinaryStringToNativeType((byte[])object);
case STORAGE_TYPE_INDEXED: return (Date)index[((Integer)object).intValue()];
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_STRING:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertStringToDate( (String)object );
case STORAGE_TYPE_BINARY_STRING: return convertStringToDate( (String)convertBinaryStringToNativeType((byte[])object) );
case STORAGE_TYPE_INDEXED: return convertStringToDate( (String) index[((Integer)object).intValue()] );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_NUMBER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertNumberToDate((Double)object);
case STORAGE_TYPE_BINARY_STRING: return convertNumberToDate((Double)convertBinaryStringToNativeType((byte[])object) );
case STORAGE_TYPE_INDEXED: return convertNumberToDate((Double)index[((Integer)object).intValue()]);
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_INTEGER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertIntegerToDate((Long)object);
case STORAGE_TYPE_BINARY_STRING: return convertIntegerToDate((Long)convertBinaryStringToNativeType((byte[])object));
case STORAGE_TYPE_INDEXED: return convertIntegerToDate((Long)index[((Integer)object).intValue()]);
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_BIGNUMBER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertBigNumberToDate((BigDecimal)object);
case STORAGE_TYPE_BINARY_STRING: return convertBigNumberToDate((BigDecimal)convertBinaryStringToNativeType((byte[])object));
case STORAGE_TYPE_INDEXED: return convertBigNumberToDate((BigDecimal)index[((Integer)object).intValue()]);
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_BOOLEAN:
throw new KettleValueException(toString()+" : I don't know how to convert a boolean to a date.");
case TYPE_BINARY:
throw new KettleValueException(toString()+" : I don't know how to convert a binary value to date.");
case TYPE_SERIALIZABLE:
throw new KettleValueException(toString()+" : I don't know how to convert a serializable value to date.");
default:
throw new KettleValueException(toString()+" : Unknown type "+type+" specified.");
}
}
public byte[] getBinary(Object object) throws KettleValueException
{
if (object==null) // NULL
{
return null;
}
switch(type)
{
case TYPE_BINARY:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return (byte[])object;
case STORAGE_TYPE_BINARY_STRING: return (byte[])object;
case STORAGE_TYPE_INDEXED: return (byte[])index[((Integer)object).intValue()];
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_DATE:
throw new KettleValueException(toString()+" : I don't know how to convert a date to binary.");
case TYPE_STRING:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertStringToBinaryString( (String)object );
case STORAGE_TYPE_BINARY_STRING: return (byte[])object;
case STORAGE_TYPE_INDEXED: return convertStringToBinaryString( (String) index[((Integer)object).intValue()] );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_NUMBER:
throw new KettleValueException(toString()+" : I don't know how to convert a number to binary.");
case TYPE_INTEGER:
throw new KettleValueException(toString()+" : I don't know how to convert an integer to binary.");
case TYPE_BIGNUMBER:
throw new KettleValueException(toString()+" : I don't know how to convert a bignumber to binary.");
case TYPE_BOOLEAN:
throw new KettleValueException(toString()+" : I don't know how to convert a boolean to binary.");
case TYPE_SERIALIZABLE:
throw new KettleValueException(toString()+" : I don't know how to convert a serializable to binary.");
default:
throw new KettleValueException(toString()+" : Unknown type "+type+" specified.");
}
}
public byte[] getBinaryString(Object object) throws KettleValueException
{
// If the input is a binary string, we should return the exact same binary object IF
// and only IF the formatting options for the storage metadata and this object are the same.
if (isStorageBinaryString() && identicalFormat)
{
return (byte[]) object; // shortcut it directly for better performance.
}
try
{
if (object==null) // NULL
{
return null;
}
switch(type)
{
case TYPE_STRING:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertStringToBinaryString((String)object);
case STORAGE_TYPE_BINARY_STRING: return convertStringToBinaryString((String)convertBinaryStringToNativeType((byte[])object));
case STORAGE_TYPE_INDEXED: return convertStringToBinaryString((String) index[((Integer)object).intValue()]);
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_DATE:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertStringToBinaryString(convertDateToString((Date)object));
case STORAGE_TYPE_BINARY_STRING: return convertStringToBinaryString(convertDateToString((Date)convertBinaryStringToNativeType((byte[])object)));
case STORAGE_TYPE_INDEXED: return convertStringToBinaryString(convertDateToString((Date)index[((Integer)object).intValue()]));
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_NUMBER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertStringToBinaryString(convertNumberToString((Double)object));
case STORAGE_TYPE_BINARY_STRING: return convertStringToBinaryString(convertNumberToString((Double)convertBinaryStringToNativeType((byte[])object)));
case STORAGE_TYPE_INDEXED: return convertStringToBinaryString(convertNumberToString((Double)index[((Integer)object).intValue()]));
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_INTEGER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertStringToBinaryString(convertIntegerToString((Long)object));
case STORAGE_TYPE_BINARY_STRING: return convertStringToBinaryString(convertIntegerToString((Long)convertBinaryStringToNativeType((byte[])object)));
case STORAGE_TYPE_INDEXED: return convertStringToBinaryString(convertIntegerToString((Long)index[((Integer)object).intValue()]));
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_BIGNUMBER:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertStringToBinaryString(convertBigNumberToString((BigDecimal)object));
case STORAGE_TYPE_BINARY_STRING: return convertStringToBinaryString(convertBigNumberToString((BigDecimal)convertBinaryStringToNativeType((byte[])object)));
case STORAGE_TYPE_INDEXED: return convertStringToBinaryString(convertBigNumberToString((BigDecimal)index[((Integer)object).intValue()]));
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_BOOLEAN:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertStringToBinaryString(convertBooleanToString((Boolean)object));
case STORAGE_TYPE_BINARY_STRING: return convertStringToBinaryString(convertBooleanToString((Boolean)convertBinaryStringToNativeType((byte[])object)));
case STORAGE_TYPE_INDEXED: return convertStringToBinaryString(convertBooleanToString((Boolean)index[((Integer)object).intValue()]));
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_BINARY:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return (byte[])object;
case STORAGE_TYPE_BINARY_STRING: return (byte[])object;
case STORAGE_TYPE_INDEXED: return (byte[])index[((Integer)object).intValue()];
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
case TYPE_SERIALIZABLE:
switch(storageType)
{
case STORAGE_TYPE_NORMAL: return convertStringToBinaryString(object.toString());
case STORAGE_TYPE_BINARY_STRING: return (byte[])object;
case STORAGE_TYPE_INDEXED: return convertStringToBinaryString( index[((Integer)object).intValue()].toString() );
default: throw new KettleValueException(toString()+" : Unknown storage type "+storageType+" specified.");
}
default:
throw new KettleValueException(toString()+" : Unknown type "+type+" specified.");
}
}
catch(ClassCastException e)
{
throw new KettleValueException(toString()+" : There was a data type error: the data type of "+object.getClass().getName()+" object ["+object+"] does not correspond to value meta ["+toStringMeta()+"]");
}
}
/**
* Checks whether or not the value is a String.
* @return true if the value is a String.
*/
public boolean isString()
{
return type==TYPE_STRING;
}
/**
* Checks whether or not this value is a Date
* @return true if the value is a Date
*/
public boolean isDate()
{
return type==TYPE_DATE;
}
/**
* Checks whether or not the value is a Big Number
* @return true is this value is a big number
*/
public boolean isBigNumber()
{
return type==TYPE_BIGNUMBER;
}
/**
* Checks whether or not the value is a Number
* @return true is this value is a number
*/
public boolean isNumber()
{
return type==TYPE_NUMBER;
}
/**
* Checks whether or not this value is a boolean
* @return true if this value has type boolean.
*/
public boolean isBoolean()
{
return type==TYPE_BOOLEAN;
}
/**
* Checks whether or not this value is of type Serializable
* @return true if this value has type Serializable
*/
public boolean isSerializableType() {
return type == TYPE_SERIALIZABLE;
}
/**
* Checks whether or not this value is of type Binary
* @return true if this value has type Binary
*/
public boolean isBinary() {
return type == TYPE_BINARY;
}
/**
* Checks whether or not this value is an Integer
* @return true if this value is an integer
*/
public boolean isInteger()
{
return type==TYPE_INTEGER;
}
/**
* Checks whether or not this Value is Numeric
* A Value is numeric if it is either of type Number or Integer
* @return true if the value is either of type Number or Integer
*/
public boolean isNumeric()
{
return isInteger() || isNumber() || isBigNumber();
}
/**
* Checks whether or not the specified type is either Integer or Number
* @param t the type to check
* @return true if the type is Integer or Number
*/
public static final boolean isNumeric(int t)
{
return t==TYPE_INTEGER || t==TYPE_NUMBER || t==TYPE_BIGNUMBER;
}
public boolean isSortedAscending()
{
return !isSortedDescending();
}
/**
* Return the type of a value in a textual form: "String", "Number", "Integer", "Boolean", "Date", ...
* @return A String describing the type of value.
*/
public String getTypeDesc()
{
return typeCodes[type];
}
/**
* Return the storage type of a value in a textual form: "normal", "binary-string", "indexes"
* @return A String describing the storage type of the value metadata
*/
public String getStorageTypeDesc()
{
return storageTypeCodes[storageType];
}
public String toString()
{
return name+" "+toStringMeta();
}
/**
* a String text representation of this Value, optionally padded to the specified length
* @return a String text representation of this Value, optionally padded to the specified length
*/
public String toStringMeta()
{
// We (Sven Boden) did explicit performance testing for this
// part. The original version used Strings instead of StringBuffers,
// performance between the 2 does not differ that much. A few milliseconds
// on 100000 iterations in the advantage of StringBuffers. The
// lessened creation of objects may be worth it in the long run.
StringBuffer retval=new StringBuffer(getTypeDesc());
switch(getType())
{
case TYPE_STRING :
if (getLength()>0) retval.append('(').append(getLength()).append(')');
break;
case TYPE_NUMBER :
case TYPE_BIGNUMBER :
if (getLength()>0)
{
retval.append('(').append(getLength());
if (getPrecision()>0)
{
retval.append(", ").append(getPrecision());
}
retval.append(')');
}
break;
case TYPE_INTEGER:
if (getLength()>0)
{
retval.append('(').append(getLength()).append(')');
}
break;
default: break;
}
if (!isStorageNormal())
{
retval.append("<").append(getStorageTypeDesc()).append(">");
}
return retval.toString();
}
public void writeData(DataOutputStream outputStream, Object object) throws KettleFileException
{
try
{
// Is the value NULL?
outputStream.writeBoolean(object==null);
if (object!=null) // otherwise there is no point
{
switch(storageType)
{
case STORAGE_TYPE_NORMAL:
// Handle Content -- only when not NULL
switch(getType())
{
case TYPE_STRING : writeString(outputStream, (String)object); break;
case TYPE_NUMBER : writeNumber(outputStream, (Double)object); break;
case TYPE_INTEGER : writeInteger(outputStream, (Long)object); break;
case TYPE_DATE : writeDate(outputStream, (Date)object); break;
case TYPE_BIGNUMBER : writeBigNumber(outputStream, (BigDecimal)object); break;
case TYPE_BOOLEAN : writeBoolean(outputStream, (Boolean)object); break;
case TYPE_BINARY : writeBinary(outputStream, (byte[])object); break;
default: throw new KettleFileException(toString()+" : Unable to serialize data type "+getType());
}
break;
case STORAGE_TYPE_BINARY_STRING:
// Handle binary string content -- only when not NULL
// In this case, we opt not to convert anything at all for speed.
// That way, we can save on CPU power.
// Since the streams can be compressed, volume shouldn't be an issue at all.
writeBinaryString(outputStream, (byte[])object);
break;
case STORAGE_TYPE_INDEXED:
writeInteger(outputStream, (Integer)object); // just an index
break;
default: throw new KettleFileException(toString()+" : Unknown storage type "+getStorageType());
}
}
}
catch(ClassCastException e) {
throw new RuntimeException(toString()+" : There was a data type error: the data type of "+object.getClass().getName()+" object ["+object+"] does not correspond to value meta ["+toStringMeta()+"]");
}
catch(IOException e)
{
throw new KettleFileException(toString()+" : Unable to write value data to output stream", e);
}
}
public Object readData(DataInputStream inputStream) throws KettleFileException, KettleEOFException, SocketTimeoutException
{
try
{
// Is the value NULL?
if (inputStream.readBoolean()) return null; // done
switch(storageType)
{
case STORAGE_TYPE_NORMAL:
// Handle Content -- only when not NULL
switch(getType())
{
case TYPE_STRING : return readString(inputStream);
case TYPE_NUMBER : return readNumber(inputStream);
case TYPE_INTEGER : return readInteger(inputStream);
case TYPE_DATE : return readDate(inputStream);
case TYPE_BIGNUMBER : return readBigNumber(inputStream);
case TYPE_BOOLEAN : return readBoolean(inputStream);
case TYPE_BINARY : return readBinary(inputStream);
default: throw new KettleFileException(toString()+" : Unable to de-serialize data of type "+getType());
}
case STORAGE_TYPE_BINARY_STRING:
return readBinaryString(inputStream);
case STORAGE_TYPE_INDEXED:
return readSmallInteger(inputStream); // just an index: 4-bytes should be enough.
default: throw new KettleFileException(toString()+" : Unknown storage type "+getStorageType());
}
}
catch(EOFException e)
{
throw new KettleEOFException(e);
}
catch(SocketTimeoutException e)
{
throw e;
}
catch(IOException e)
{
throw new KettleFileException(toString()+" : Unable to read value data from input stream", e);
}
}
private void writeString(DataOutputStream outputStream, String string) throws IOException
{
// Write the length and then the bytes
if (string==null)
{
outputStream.writeInt(-1);
}
else
{
byte[] chars = string.getBytes(Const.XML_ENCODING);
outputStream.writeInt(chars.length);
outputStream.write(chars);
}
}
private void writeBinaryString(DataOutputStream outputStream, byte[] binaryString) throws IOException
{
// Write the length and then the bytes
if (binaryString==null)
{
outputStream.writeInt(-1);
}
else
{
outputStream.writeInt(binaryString.length);
outputStream.write(binaryString);
}
}
private String readString(DataInputStream inputStream) throws IOException
{
// Read the length and then the bytes
int length = inputStream.readInt();
if (length<0)
{
return null;
}
byte[] chars = new byte[length];
inputStream.readFully(chars);
String string = new String(chars, Const.XML_ENCODING);
// System.out.println("Read string("+getName()+"), length "+length+": "+string);
return string;
}
private byte[] readBinaryString(DataInputStream inputStream) throws IOException
{
// Read the length and then the bytes
int length = inputStream.readInt();
if (length<0)
{
return null;
}
byte[] chars = new byte[length];
inputStream.readFully(chars);
return chars;
}
private void writeBigNumber(DataOutputStream outputStream, BigDecimal number) throws IOException
{
String string = number.toString();
writeString(outputStream, string);
}
private BigDecimal readBigNumber(DataInputStream inputStream) throws IOException
{
String string = readString(inputStream);
// System.out.println("Read big number("+getName()+") ["+string+"]");
return new BigDecimal(string);
}
private void writeDate(DataOutputStream outputStream, Date date) throws IOException
{
outputStream.writeLong(date.getTime());
}
private Date readDate(DataInputStream inputStream) throws IOException
{
long time = inputStream.readLong();
// System.out.println("Read Date("+getName()+") ["+new Date(time)+"]");
return new Date(time);
}
private void writeBoolean(DataOutputStream outputStream, Boolean bool) throws IOException
{
outputStream.writeBoolean(bool.booleanValue());
}
private Boolean readBoolean(DataInputStream inputStream) throws IOException
{
Boolean bool = Boolean.valueOf( inputStream.readBoolean() );
// System.out.println("Read boolean("+getName()+") ["+bool+"]");
return bool;
}
private void writeNumber(DataOutputStream outputStream, Double number) throws IOException
{
outputStream.writeDouble(number.doubleValue());
}
private Double readNumber(DataInputStream inputStream) throws IOException
{
Double d = new Double( inputStream.readDouble() );
// System.out.println("Read number("+getName()+") ["+d+"]");
return d;
}
private void writeInteger(DataOutputStream outputStream, Long number) throws IOException
{
outputStream.writeLong(number.longValue());
}
private Long readInteger(DataInputStream inputStream) throws IOException
{
Long l = new Long( inputStream.readLong() );
// System.out.println("Read integer("+getName()+") ["+l+"]");
return l;
}
private void writeInteger(DataOutputStream outputStream, Integer number) throws IOException
{
outputStream.writeInt(number.intValue());
}
private Integer readSmallInteger(DataInputStream inputStream) throws IOException
{
Integer i = Integer.valueOf( inputStream.readInt() );
// System.out.println("Read index integer("+getName()+") ["+i+"]");
return i;
}
private void writeBinary(DataOutputStream outputStream, byte[] binary) throws IOException
{
outputStream.writeInt(binary.length);
outputStream.write(binary);
}
private byte[] readBinary(DataInputStream inputStream) throws IOException
{
int size = inputStream.readInt();
byte[] buffer = new byte[size];
inputStream.readFully(buffer);
// System.out.println("Read binary("+getName()+") with size="+size);
return buffer;
}
public void writeMeta(DataOutputStream outputStream) throws KettleFileException
{
try
{
int type=getType();
// Handle type
outputStream.writeInt(type);
// Handle storage type
outputStream.writeInt(storageType);
switch(storageType) {
case STORAGE_TYPE_INDEXED:
{
// Save the indexed strings...
if (index==null)
{
outputStream.writeInt(-1); // null
}
else
{
outputStream.writeInt(index.length);
for (int i=0;i<index.length;i++)
{
try {
switch(type)
{
case TYPE_STRING: writeString(outputStream, (String)index[i]); break;
case TYPE_NUMBER: writeNumber(outputStream, (Double)index[i]); break;
case TYPE_INTEGER: writeInteger(outputStream, (Long)index[i]); break;
case TYPE_DATE: writeDate(outputStream, (Date)index[i]); break;
case TYPE_BIGNUMBER: writeBigNumber(outputStream, (BigDecimal)index[i]); break;
case TYPE_BOOLEAN: writeBoolean(outputStream, (Boolean)index[i]); break;
case TYPE_BINARY: writeBinary(outputStream, (byte[])index[i]); break;
default: throw new KettleFileException(toString()+" : Unable to serialize indexe storage type for data type "+getType());
}
} catch (ClassCastException e) {
throw new RuntimeException(toString()+" : There was a data type error: the data type of "+index[i].getClass().getName()+" object ["+index[i]+"] does not correspond to value meta ["+toStringMeta()+"]");
}
}
}
}
break;
case STORAGE_TYPE_BINARY_STRING:
{
// Save the storage meta data...
outputStream.writeBoolean(storageMetadata!=null);
if (storageMetadata!=null) {
storageMetadata.writeMeta(outputStream);
}
}
break;
default:
break;
}
// Handle name-length
writeString(outputStream, name);
// length & precision
outputStream.writeInt(getLength());
outputStream.writeInt(getPrecision());
// Origin
writeString(outputStream, origin);
// Comments
writeString(outputStream, comments);
// formatting Mask, decimal, grouping, currency
writeString(outputStream, conversionMask);
writeString(outputStream, decimalSymbol);
writeString(outputStream, groupingSymbol);
writeString(outputStream, currencySymbol);
outputStream.writeInt(trimType);
// Case sensitivity of compare
outputStream.writeBoolean(caseInsensitive);
// Sorting information
outputStream.writeBoolean(sortedDescending);
// Padding information
outputStream.writeBoolean(outputPaddingEnabled);
// date format lenient?
outputStream.writeBoolean(dateFormatLenient);
// date format locale?
writeString(outputStream, dateFormatLocale!=null ? dateFormatLocale.toString() : null);
}
catch(IOException e)
{
throw new KettleFileException(toString()+" : Unable to write value metadata to output stream", e);
}
}
public ValueMeta(DataInputStream inputStream) throws KettleFileException, KettleEOFException
{
this();
try
{
// Handle type
type=inputStream.readInt();
// Handle storage type
storageType = inputStream.readInt();
// Read the data in the index
switch(storageType) {
case STORAGE_TYPE_INDEXED:
{
int indexSize = inputStream.readInt();
if (indexSize<0)
{
index=null;
}
else
{
index=new Object[indexSize];
for (int i=0;i<indexSize;i++)
{
switch(type)
{
case TYPE_STRING: index[i] = readString(inputStream); break;
case TYPE_NUMBER: index[i] = readNumber(inputStream); break;
case TYPE_INTEGER: index[i] = readInteger(inputStream); break;
case TYPE_DATE: index[i] = readDate(inputStream); break;
case TYPE_BIGNUMBER: index[i] = readBigNumber(inputStream); break;
case TYPE_BOOLEAN: index[i] = readBoolean(inputStream); break;
case TYPE_BINARY: index[i] = readBinary(inputStream); break;
default: throw new KettleFileException(toString()+" : Unable to de-serialize indexed storage type for data type "+getType());
}
}
}
}
break;
case STORAGE_TYPE_BINARY_STRING:
{
// In case we do have storage metadata defined, we read that back in as well..
if (inputStream.readBoolean()) {
storageMetadata = new ValueMeta(inputStream);
}
}
break;
default:
break;
}
// name
name = readString(inputStream);
// length & precision
length = inputStream.readInt();
precision = inputStream.readInt();
// Origin
origin = readString(inputStream);
// Comments
comments=readString(inputStream);
// formatting Mask, decimal, grouping, currency
conversionMask=readString(inputStream);
decimalSymbol=readString(inputStream);
groupingSymbol=readString(inputStream);
currencySymbol=readString(inputStream);
trimType=inputStream.readInt();
// Case sensitivity
caseInsensitive = inputStream.readBoolean();
// Sorting type
sortedDescending = inputStream.readBoolean();
// Output padding?
outputPaddingEnabled = inputStream.readBoolean();
// is date parsing lenient?
dateFormatLenient = inputStream.readBoolean();
String strDateFormatLocale = readString(inputStream);
if (Const.isEmpty(strDateFormatLocale))
{
dateFormatLocale = null;
}
else
{
dateFormatLocale = new Locale(strDateFormatLocale);
}
}
catch(EOFException e)
{
throw new KettleEOFException(e);
}
catch(IOException e)
{
throw new KettleFileException(toString()+" : Unable to read value metadata from input stream", e);
}
}
public String getMetaXML() throws IOException
{
StringBuffer xml = new StringBuffer();
xml.append(XMLHandler.openTag(XML_META_TAG));
xml.append( XMLHandler.addTagValue("type", getTypeDesc()) ) ;
xml.append( XMLHandler.addTagValue("storagetype", getStorageType()) );
switch(storageType) {
case STORAGE_TYPE_INDEXED:
{
xml.append( XMLHandler.openTag("index"));
// Save the indexed strings...
if (index!=null)
{
for (int i=0;i<index.length;i++)
{
try {
switch(type)
{
case TYPE_STRING: xml.append( XMLHandler.addTagValue( "value", (String)index[i]) ); break;
case TYPE_NUMBER: xml.append( XMLHandler.addTagValue( "value", (Double)index[i]) ); break;
case TYPE_INTEGER: xml.append( XMLHandler.addTagValue( "value", (Long)index[i]) ); break;
case TYPE_DATE: xml.append( XMLHandler.addTagValue( "value", (Date)index[i]) ); break;
case TYPE_BIGNUMBER: xml.append( XMLHandler.addTagValue( "value", (BigDecimal)index[i]) ); break;
case TYPE_BOOLEAN: xml.append( XMLHandler.addTagValue( "value", (Boolean)index[i]) ); break;
case TYPE_BINARY: xml.append( XMLHandler.addTagValue( "value", (byte[])index[i]) ); break;
default: throw new IOException(toString()+" : Unable to serialize indexe storage type to XML for data type "+getType());
}
} catch (ClassCastException e) {
throw new RuntimeException(toString()+" : There was a data type error: the data type of "+index[i].getClass().getName()+" object ["+index[i]+"] does not correspond to value meta ["+toStringMeta()+"]");
}
}
}
xml.append( XMLHandler.closeTag("index"));
}
break;
case STORAGE_TYPE_BINARY_STRING:
{
// Save the storage meta data...
if (storageMetadata!=null)
{
xml.append(XMLHandler.openTag("storage-meta"));
xml.append(storageMetadata.getMetaXML());
xml.append(XMLHandler.closeTag("storage-meta"));
}
}
break;
default:
break;
}
xml.append( XMLHandler.addTagValue("name", name) );
xml.append( XMLHandler.addTagValue("length", length) );
xml.append( XMLHandler.addTagValue("precision", precision) );
xml.append( XMLHandler.addTagValue("origin", origin) );
xml.append( XMLHandler.addTagValue("comments", comments) );
xml.append( XMLHandler.addTagValue("conversion_Mask", conversionMask) );
xml.append( XMLHandler.addTagValue("decimal_symbol", decimalSymbol) );
xml.append( XMLHandler.addTagValue("grouping_symbol", groupingSymbol) );
xml.append( XMLHandler.addTagValue("currency_symbol", currencySymbol) );
xml.append( XMLHandler.addTagValue("trim_type", getTrimTypeCode(trimType)) );
xml.append( XMLHandler.addTagValue("case_insensitive", caseInsensitive) );
xml.append( XMLHandler.addTagValue("sort_descending", sortedDescending) );
xml.append( XMLHandler.addTagValue("output_padding", outputPaddingEnabled) );
xml.append( XMLHandler.addTagValue("date_format_lenient", dateFormatLenient) );
xml.append( XMLHandler.addTagValue("date_format_locale", dateFormatLocale.toString()) );
xml.append(XMLHandler.closeTag(XML_META_TAG));
return xml.toString();
}
public ValueMeta(Node node) throws IOException
{
this();
type = getType( XMLHandler.getTagValue(node, "type") ) ;
storageType = getStorageType( XMLHandler.getTagValue(node, "storagetype") );
switch(storageType) {
case STORAGE_TYPE_INDEXED:
{
Node indexNode = XMLHandler.getSubNode(node, "index");
int nrIndexes = XMLHandler.countNodes(indexNode, "value");
index = new Object[nrIndexes];
for (int i=0;i<index.length;i++)
{
Node valueNode = XMLHandler.getSubNodeByNr(indexNode, "value", i);
String valueString = XMLHandler.getNodeValue(valueNode);
if (Const.isEmpty(valueString))
{
index[i] = null;
}
else
{
switch(type)
{
case TYPE_STRING: index[i] = valueString; break;
case TYPE_NUMBER: index[i] = Double.parseDouble( valueString ); break;
case TYPE_INTEGER: index[i] = Long.parseLong( valueString ); break;
case TYPE_DATE: index[i] = XMLHandler.stringToDate( valueString ); ; break;
case TYPE_BIGNUMBER: index[i] = new BigDecimal( valueString ); ; break;
case TYPE_BOOLEAN: index[i] = Boolean.valueOf("Y".equalsIgnoreCase( valueString)); break;
case TYPE_BINARY: index[i] = XMLHandler.stringToBinary( valueString ); break;
default: throw new IOException(toString()+" : Unable to de-serialize indexe storage type from XML for data type "+getType());
}
}
}
}
break;
case STORAGE_TYPE_BINARY_STRING:
{
// Save the storage meta data...
Node storageMetaNode = XMLHandler.getSubNode(node, "storage-meta");
if (storageMetaNode!=null)
{
storageMetadata = new ValueMeta(storageMetaNode);
}
}
break;
default:
break;
}
name = XMLHandler.getTagValue(node, "name");
length = Integer.parseInt( XMLHandler.getTagValue(node, "length") );
precision = Integer.parseInt( XMLHandler.getTagValue(node, "precision") );
origin = XMLHandler.getTagValue(node, "origin");
comments = XMLHandler.getTagValue(node, "comments");
conversionMask = XMLHandler.getTagValue(node, "conversion_Mask");
decimalSymbol = XMLHandler.getTagValue(node, "decimal_symbol");
groupingSymbol = XMLHandler.getTagValue(node, "grouping_symbol");
currencySymbol = XMLHandler.getTagValue(node, "currency_symbol");
trimType = getTrimTypeByCode( XMLHandler.getTagValue(node, "trim_type") );
caseInsensitive = "Y".equalsIgnoreCase( XMLHandler.getTagValue(node, "case_insensitive") );
sortedDescending = "Y".equalsIgnoreCase( XMLHandler.getTagValue(node, "sort_descending") );
outputPaddingEnabled = "Y".equalsIgnoreCase( XMLHandler.getTagValue(node, "output_padding") );
dateFormatLenient = "Y".equalsIgnoreCase( XMLHandler.getTagValue(node, "date_format_lenient") );
String dateFormatLocaleString = XMLHandler.getTagValue(node, "date_format_locale");
if (!Const.isEmpty( dateFormatLocaleString ))
{
dateFormatLocale = new Locale(dateFormatLocaleString);
}
}
public String getDataXML(Object object) throws IOException
{
StringBuffer xml = new StringBuffer();
xml.append(XMLHandler.openTag(XML_DATA_TAG));
if (object!=null) // otherwise there is no point
{
try {
switch(storageType)
{
case STORAGE_TYPE_NORMAL:
// Handle Content -- only when not NULL
switch(getType())
{
case TYPE_STRING : xml.append( XMLHandler.addTagValue("string-value", (String)object) ); break;
case TYPE_NUMBER : xml.append( XMLHandler.addTagValue("number-value", (Double)object) ); break;
case TYPE_INTEGER : xml.append( XMLHandler.addTagValue("integer-value", (Long)object) ); break;
case TYPE_DATE : xml.append( XMLHandler.addTagValue("date-value", (Date)object) ); break;
case TYPE_BIGNUMBER : xml.append( XMLHandler.addTagValue("bignumber-value", (BigDecimal)object) ); break;
case TYPE_BOOLEAN : xml.append( XMLHandler.addTagValue("boolean-value", (Boolean)object) ); break;
case TYPE_BINARY : xml.append( XMLHandler.addTagValue("binary-value", (byte[])object) ); break;
default: throw new IOException(toString()+" : Unable to serialize data type to XML "+getType());
}
break;
case STORAGE_TYPE_BINARY_STRING:
// Handle binary string content -- only when not NULL
// In this case, we opt not to convert anything at all for speed.
// That way, we can save on CPU power.
// Since the streams can be compressed, volume shouldn't be an issue at all.
xml.append( XMLHandler.addTagValue("binary-string", (byte[])object) );
break;
case STORAGE_TYPE_INDEXED:
xml.append( XMLHandler.addTagValue("index-value", (Integer)object) ); // just an index
break;
default: throw new IOException(toString()+" : Unknown storage type "+getStorageType());
}
} catch (ClassCastException e) {
throw new RuntimeException(toString()+" : There was a data type error: the data type of "+object.getClass().getName()+" object ["+object+"] does not correspond to value meta ["+toStringMeta()+"]");
}
}
xml.append(XMLHandler.closeTag(XML_DATA_TAG));
return xml.toString();
}
/**
* Convert a data XML node to an Object that corresponds to the metadata.
* This is basically String to Object conversion that is being done.
* @param node the node to retrieve the data value from
* @return the converted data value
* @throws IOException thrown in case there is a problem with the XML to object conversion
*/
public Object getValue(Node node) throws IOException {
switch(storageType)
{
case STORAGE_TYPE_NORMAL:
String valueString = XMLHandler.getTagValue(node, "value");
if (Const.isEmpty(valueString)) return null;
// Handle Content -- only when not NULL
switch(getType())
{
case TYPE_STRING: return valueString;
case TYPE_NUMBER: return Double.parseDouble( valueString );
case TYPE_INTEGER: return Long.parseLong( valueString );
case TYPE_DATE: return XMLHandler.stringToDate( valueString );
case TYPE_BIGNUMBER: return new BigDecimal( valueString );
case TYPE_BOOLEAN: return Boolean.valueOf("Y".equalsIgnoreCase( valueString));
case TYPE_BINARY: return XMLHandler.stringToBinary( valueString );
default: throw new IOException(toString()+" : Unable to de-serialize '"+valueString+"' from XML for data type "+getType());
}
case STORAGE_TYPE_BINARY_STRING:
// Handle binary string content -- only when not NULL
// In this case, we opt not to convert anything at all for speed.
// That way, we can save on CPU power.
// Since the streams can be compressed, volume shouldn't be an issue at all.
String binaryString = XMLHandler.getTagValue(node, "binary-string");
if (Const.isEmpty(binaryString)) return null;
return XMLHandler.stringToBinary(binaryString);
case STORAGE_TYPE_INDEXED:
String indexString = XMLHandler.getTagValue(node, "index-value");
if (Const.isEmpty(indexString)) return null;
return Integer.parseInt(indexString);
default: throw new IOException(toString()+" : Unknown storage type "+getStorageType());
}
}
/**
* get an array of String describing the possible types a Value can have.
* @return an array of String describing the possible types a Value can have.
*/
public static final String[] getTypes()
{
String retval[] = new String[typeCodes.length-1];
System.arraycopy(typeCodes, 1, retval, 0, typeCodes.length-1);
return retval;
}
/**
* Get an array of String describing the possible types a Value can have.
* @return an array of String describing the possible types a Value can have.
*/
public static final String[] getAllTypes()
{
String retval[] = new String[typeCodes.length];
System.arraycopy(typeCodes, 0, retval, 0, typeCodes.length);
return retval;
}
/**
* TODO: change Desc to Code all over the place. Make sure we can localise this stuff later on.
*
* @param type the type
* @return the description (code) of the type
*/
public static final String getTypeDesc(int type)
{
return typeCodes[type];
}
/**
* Convert the String description of a type to an integer type.
* @param desc The description of the type to convert
* @return The integer type of the given String. (ValueMetaInterface.TYPE_...)
*/
public static final int getType(String desc)
{
for (int i=1;i<typeCodes.length;i++)
{
if (typeCodes[i].equalsIgnoreCase(desc))
{
return i;
}
}
return TYPE_NONE;
}
/**
* Convert the String description of a storage type to an integer type.
* @param desc The description of the storage type to convert
* @return The integer storage type of the given String. (ValueMetaInterface.STORAGE_TYPE_...) or -1 if the storage type code not be found.
*/
public static final int getStorageType(String desc)
{
for (int i=0;i<storageTypeCodes.length;i++)
{
if (storageTypeCodes[i].equalsIgnoreCase(desc))
{
return i;
}
}
return -1;
}
public static final String getStorageTypeCode(int storageType)
{
if (storageType>=STORAGE_TYPE_NORMAL && storageType<=STORAGE_TYPE_INDEXED)
{
return storageTypeCodes[storageType];
}
return null;
}
/**
* Determine if an object is null.
* This is the case if data==null or if it's an empty string.
* @param data the object to test
* @return true if the object is considered null.
*/
public boolean isNull(Object data)
{
try{
if (data==null) return true;
if (isString()) {
if (isStorageNormal() && ((String)data).length()==0) return true;
if (isStorageBinaryString()) {
if ( ((byte[])data).length==0 ) return true;
}
}
return false;
}
catch(ClassCastException e)
{
throw new RuntimeException("Unable to verify if ["+toString()+"] is null or not because of an error:"+e.toString(), e);
}
}
/*
* Compare 2 binary strings, one byte at a time.<br>
* This algorithm is very fast but most likely wrong as well.<br>
*
* @param one The first binary string to compare with
* @param two the second binary string to compare to
* @return -1 if <i>one</i> is smaller than <i>two</i>, 0 is both byte arrays are identical and 1 if <i>one</i> is larger than <i>two</i>
private int compareBinaryStrings(byte[] one, byte[] two) {
for (int i=0;i<one.length;i++)
{
if (i>=two.length) return 1; // larger
if (one[i]>two[i]) return 1; // larger
if (one[i]<two[i]) return -1; // smaller
}
if (one.length>two.length) return 1; // larger
if (one.length>two.length) return -11; // smaller
return 0;
}
*/
/**
* Compare 2 values of the same data type
* @param data1 the first value
* @param data2 the second value
* @return 0 if the values are equal, -1 if data1 is smaller than data2 and +1 if it's larger.
* @throws KettleValueException In case we get conversion errors
*/
public int compare(Object data1, Object data2) throws KettleValueException
{
boolean n1 = isNull(data1);
boolean n2 = isNull(data2);
// null is always smaller!
if (n1 && !n2) return -1;
if (!n1 && n2) return 1;
if (n1 && n2) return 0;
int cmp=0;
switch (getType())
{
case TYPE_STRING:
{
// if (isStorageBinaryString() && identicalFormat && storageMetadata.isSingleByteEncoding()) return compareBinaryStrings((byte[])data1, (byte[])data2); TODO
String one = Const.rtrim(getString(data1));
String two = Const.rtrim(getString(data2));
if (caseInsensitive)
{
cmp = one.compareToIgnoreCase(two);
}
else
{
cmp = one.compareTo(two);
}
}
break;
case TYPE_INTEGER:
{
// if (isStorageBinaryString() && identicalFormat) return compareBinaryStrings((byte[])data1, (byte[])data2); TODO
long compare = getInteger(data1).longValue() - getInteger(data2).longValue();
if (compare<0) cmp=-1;
else if (compare>0) cmp=1;
else cmp=0;
}
break;
case TYPE_NUMBER:
{
cmp=Double.compare(getNumber(data1).doubleValue(), getNumber(data2).doubleValue());
}
break;
case TYPE_DATE:
{
long compare = getDate(data1).getTime() - getDate(data2).getTime();
if (compare<0) cmp=-1;
else if (compare>0) cmp=1;
else cmp=0;
}
break;
case TYPE_BIGNUMBER:
{
cmp=getBigNumber(data1).compareTo(getBigNumber(data2));
}
break;
case TYPE_BOOLEAN:
{
if (getBoolean(data1).booleanValue() == getBoolean(data2).booleanValue()) cmp=0; // true == true, false == false
else if (getBoolean(data1).booleanValue() && !getBoolean(data2).booleanValue()) cmp=1; // true > false
else cmp=-1; // false < true
}
break;
case TYPE_BINARY:
{
byte[] b1 = (byte[]) data1;
byte[] b2 = (byte[]) data2;
int length= b1.length < b2.length ? b1.length : b2.length;
for (int i=0;i<length;i++)
{
cmp = b1[i] - b2[i];
if (cmp!=0)
{
cmp = Math.abs(cmp);
break;
}
}
}
break;
default:
throw new KettleValueException(toString()+" : Comparing values can not be done with data type : "+getType());
}
if (isSortedDescending())
{
return -cmp;
}
else
{
return cmp;
}
}
/**
* Compare 2 values of the same data type
* @param data1 the first value
* @param meta2 the second value's metadata
* @param data2 the second value
* @return 0 if the values are equal, -1 if data1 is smaller than data2 and +1 if it's larger.
* @throws KettleValueException In case we get conversion errors
*/
public int compare(Object data1, ValueMetaInterface meta2, Object data2) throws KettleValueException
{
if (meta2==null) {
throw new KettleValueException(toStringMeta()+" : Second meta data (meta2) is null, please check one of the previous steps.");
}
try
{
// Before we can compare data1 to data2 we need to make sure they have the same data type etc.
if (getType()==meta2.getType())
{
if (getStorageType()==meta2.getStorageType()) return compare(data1, data2);
// Convert the storage type to compare the data.
switch(getStorageType())
{
case STORAGE_TYPE_NORMAL :
return compare(data1, meta2.convertToNormalStorageType(data2));
case STORAGE_TYPE_BINARY_STRING :
return compare(data1, meta2.convertToBinaryStringStorageType(data2));
case STORAGE_TYPE_INDEXED :
switch(meta2.getStorageType())
{
case STORAGE_TYPE_INDEXED:
return compare(data1, data2); // not accessible, just to make sure.
case STORAGE_TYPE_NORMAL:
return -meta2.compare(data2, convertToNormalStorageType(data1));
case STORAGE_TYPE_BINARY_STRING:
return -meta2.compare(data2, convertToBinaryStringStorageType(data1));
default:
throw new KettleValueException(meta2.toStringMeta()+" : Unknown storage type : "+meta2.getStorageType());
}
default: throw new KettleValueException(toStringMeta()+" : Unknown storage type : "+getStorageType());
}
}
// If the data types are not the same, the first one is the driver...
// The second data type is converted to the first one.
return compare(data1, convertData(meta2, data2));
}
catch(Exception e)
{
throw new KettleValueException(toStringMeta()+" : Unable to compare with value ["+meta2.toStringMeta()+"]", e);
}
}
/**
* Convert the specified data to the data type specified in this object.
* @param meta2 the metadata of the object to be converted
* @param data2 the data of the object to be converted
* @return the object in the data type of this value metadata object
* @throws KettleValueException in case there is a data conversion error
*/
public Object convertData(ValueMetaInterface meta2, Object data2) throws KettleValueException
{
switch(getType())
{
case TYPE_STRING : return meta2.getString(data2);
case TYPE_NUMBER : return meta2.getNumber(data2);
case TYPE_INTEGER : return meta2.getInteger(data2);
case TYPE_DATE : return meta2.getDate(data2);
case TYPE_BIGNUMBER : return meta2.getBigNumber(data2);
case TYPE_BOOLEAN : return meta2.getBoolean(data2);
case TYPE_BINARY : return meta2.getBinary(data2);
default:
throw new KettleValueException(toString()+" : I can't convert the specified value to data type : "+getType());
}
}
/**
* Convert the specified data to the data type specified in this object.
* For String conversion, be compatible with version 2.5.2.
*
* @param meta2 the metadata of the object to be converted
* @param data2 the data of the object to be converted
* @return the object in the data type of this value metadata object
* @throws KettleValueException in case there is a data conversion error
*/
public Object convertDataCompatible(ValueMetaInterface meta2, Object data2) throws KettleValueException
{
switch(getType())
{
case TYPE_STRING : return meta2.getCompatibleString(data2);
case TYPE_NUMBER : return meta2.getNumber(data2);
case TYPE_INTEGER : return meta2.getInteger(data2);
case TYPE_DATE : return meta2.getDate(data2);
case TYPE_BIGNUMBER : return meta2.getBigNumber(data2);
case TYPE_BOOLEAN : return meta2.getBoolean(data2);
case TYPE_BINARY : return meta2.getBinary(data2);
default:
throw new KettleValueException(toString()+" : I can't convert the specified value to data type : "+getType());
}
}
/**
* Convert an object to the data type specified in the conversion metadata
* @param data The data
* @return The data converted to the storage data type
* @throws KettleValueException in case there is a conversion error.
*/
public Object convertDataUsingConversionMetaData(Object data2) throws KettleValueException {
if (conversionMetadata==null) {
throw new KettleValueException("API coding error: please specify the conversion metadata before attempting to convert value "+name);
}
// Suppose we have an Integer 123, length 5
// The string variation of this is " 00123"
// To convert this back to an Integer we use the storage metadata
// Specifically, in method convertStringToInteger() we consult the storageMetaData to get the correct conversion mask
// That way we're always sure that a conversion works both ways.
switch(conversionMetadata.getType()) {
case TYPE_STRING : return getString(data2);
case TYPE_INTEGER : return getInteger(data2);
case TYPE_NUMBER : return getNumber(data2);
case TYPE_DATE : return getDate(data2);
case TYPE_BIGNUMBER : return getBigNumber(data2);
case TYPE_BOOLEAN : return getBoolean(data2);
case TYPE_BINARY : return getBinary(data2);
default:
throw new KettleValueException(toString()+" : I can't convert the specified value to data type : "+storageMetadata.getType());
}
}
/**
* Convert the specified string to the data type specified in this object.
* @param pol the string to be converted
* @param convertMeta the metadata of the object (only string type) to be converted
* @param nullIf set the object to null if pos equals nullif (IgnoreCase)
* @param ifNull set the object to ifNull when pol is empty or null
* @param trim_type the trim type to be used (ValueMetaInterface.TRIM_TYPE_XXX)
* @return the object in the data type of this value metadata object
* @throws KettleValueException in case there is a data conversion error
*/
public Object convertDataFromString(String pol, ValueMetaInterface convertMeta, String nullIf, String ifNull, int trim_type) throws KettleValueException
{
// null handling and conversion of value to null
String null_value = nullIf;
if (null_value == null)
{
switch (convertMeta.getType())
{
case Value.VALUE_TYPE_BOOLEAN:
null_value = Const.NULL_BOOLEAN;
break;
case Value.VALUE_TYPE_STRING:
null_value = Const.NULL_STRING;
break;
case Value.VALUE_TYPE_BIGNUMBER:
null_value = Const.NULL_BIGNUMBER;
break;
case Value.VALUE_TYPE_NUMBER:
null_value = Const.NULL_NUMBER;
break;
case Value.VALUE_TYPE_INTEGER:
null_value = Const.NULL_INTEGER;
break;
case Value.VALUE_TYPE_DATE:
null_value = Const.NULL_DATE;
break;
case Value.VALUE_TYPE_BINARY:
null_value = Const.NULL_BINARY;
break;
default:
null_value = Const.NULL_NONE;
break;
}
}
// See if we need to convert a null value into a String
// For example, we might want to convert null into "Empty".
if (!Const.isEmpty(ifNull)) {
// Note that you can't pull the pad method up here as a nullComp variable because you could get an NPE since you haven't checked isEmpty(pol) yet!
if (Const.isEmpty(pol) || pol.equalsIgnoreCase(Const.rightPad(new StringBuffer(null_value), pol.length())))
{
pol = ifNull;
}
}
// See if the polled value is empty
// In that case, we have a null value on our hands...
if (Const.isEmpty(pol))
{
return null;
}
else
{
// if the null_value is specified, we try to match with that.
if (!Const.isEmpty(null_value))
{
if (null_value.length()<=pol.length())
{
// If the polled value is equal to the spaces right-padded null_value, we have a match
if (pol.equalsIgnoreCase(Const.rightPad(new StringBuffer(null_value), pol.length())))
{
return null;
}
}
}
else
{
// Verify if there are only spaces in the polled value...
// We consider that empty as well...
if (Const.onlySpaces(pol))
{
return null;
}
}
}
// Trimming
switch (trim_type)
{
case ValueMetaInterface.TRIM_TYPE_LEFT:
{
StringBuffer strpol = new StringBuffer(pol);
while (strpol.length() > 0 && strpol.charAt(0) == ' ')
strpol.deleteCharAt(0);
pol=strpol.toString();
}
break;
case ValueMetaInterface.TRIM_TYPE_RIGHT:
{
StringBuffer strpol = new StringBuffer(pol);
while (strpol.length() > 0 && strpol.charAt(strpol.length() - 1) == ' ')
strpol.deleteCharAt(strpol.length() - 1);
pol=strpol.toString();
}
break;
case ValueMetaInterface.TRIM_TYPE_BOTH:
StringBuffer strpol = new StringBuffer(pol);
{
while (strpol.length() > 0 && strpol.charAt(0) == ' ')
strpol.deleteCharAt(0);
while (strpol.length() > 0 && strpol.charAt(strpol.length() - 1) == ' ')
strpol.deleteCharAt(strpol.length() - 1);
pol=strpol.toString();
}
break;
default:
break;
}
// On with the regular program...
// Simply call the ValueMeta routines to do the conversion
// We need to do some effort here: copy all
return convertData(convertMeta, pol);
}
/**
* Calculate the hashcode of the specified data object
* @param object the data value to calculate a hashcode for
* @return the calculated hashcode
* @throws KettleValueException
*/
public int hashCode(Object object) throws KettleValueException
{
int hash=0;
if (isNull(object))
{
switch(getType())
{
case TYPE_BOOLEAN : hash^= 1; break;
case TYPE_DATE : hash^= 2; break;
case TYPE_NUMBER : hash^= 4; break;
case TYPE_STRING : hash^= 8; break;
case TYPE_INTEGER : hash^=16; break;
case TYPE_BIGNUMBER : hash^=32; break;
case TYPE_NONE : break;
default: break;
}
}
else
{
switch(getType())
{
case TYPE_BOOLEAN : hash^=getBoolean(object).hashCode(); break;
case TYPE_DATE : hash^=getDate(object).hashCode(); break;
case TYPE_INTEGER : hash^=getInteger(object).hashCode(); break;
case TYPE_NUMBER : hash^=getNumber(object).hashCode(); break;
case TYPE_STRING : hash^=getString(object).hashCode(); break;
case TYPE_BIGNUMBER : hash^=getBigNumber(object).hashCode(); break;
case TYPE_NONE : break;
default: break;
}
}
return hash;
}
/**
* Create an old-style value for backward compatibility reasons
* @param data the data to store in the value
* @return a newly created Value object
* @throws KettleValueException case there is a data conversion problem
*/
public Value createOriginalValue(Object data) throws KettleValueException
{
Value value = new Value(name, type);
value.setLength(length, precision);
if (isNull(data))
{
value.setNull();
}
else
{
switch(value.getType())
{
case TYPE_STRING : value.setValue( getString(data) ); break;
case TYPE_NUMBER : value.setValue( getNumber(data).doubleValue() ); break;
case TYPE_INTEGER : value.setValue( getInteger(data).longValue() ); break;
case TYPE_DATE : value.setValue( getDate(data) ); break;
case TYPE_BOOLEAN : value.setValue( getBoolean(data).booleanValue() ); break;
case TYPE_BIGNUMBER : value.setValue( getBigNumber(data) ); break;
case TYPE_BINARY : value.setValue( getBinary(data) ); break;
default: throw new KettleValueException(toString()+" : We can't convert data type "+getTypeDesc()+" to an original (V2) Value");
}
}
return value;
}
/**
* Extracts the primitive data from an old style Value object
* @param value the old style Value object
* @return the value's data, NOT the meta data.
* @throws KettleValueException case there is a data conversion problem
*/
public Object getValueData(Value value) throws KettleValueException
{
if (value==null || value.isNull()) return null;
// So far the old types and the new types map to the same thing.
// For compatibility we just ask the old-style value to convert to the new one.
// In the old transformation this would happen sooner or later anyway.
// It doesn't throw exceptions or complain either (unfortunately).
switch(getType())
{
case ValueMetaInterface.TYPE_STRING : return value.getString();
case ValueMetaInterface.TYPE_NUMBER : return value.getNumber();
case ValueMetaInterface.TYPE_INTEGER : return value.getInteger();
case ValueMetaInterface.TYPE_DATE : return value.getDate();
case ValueMetaInterface.TYPE_BOOLEAN : return value.getBoolean();
case ValueMetaInterface.TYPE_BIGNUMBER : return value.getBigNumber();
case ValueMetaInterface.TYPE_BINARY : return value.getBytes();
default: throw new KettleValueException(toString()+" : We can't convert original data type "+value.getTypeDesc()+" to a primitive data type");
}
}
/**
* @return the storageMetadata
*/
public ValueMetaInterface getStorageMetadata() {
return storageMetadata;
}
/**
* @param storageMetadata the storageMetadata to set
*/
public void setStorageMetadata(ValueMetaInterface storageMetadata) {
this.storageMetadata = storageMetadata;
compareStorageAndActualFormat();
}
private void compareStorageAndActualFormat() {
if (storageMetadata==null) {
identicalFormat = true;
}
else {
// If a trim type is set, we need to at least try to trim the strings.
// In that case, we have to set the identical format off.
if (trimType!=TRIM_TYPE_NONE) {
identicalFormat = false;
}
else {
// If there is a string encoding set and it's the same encoding in the binary string, then we don't have to convert
// If there are no encodings set, then we're certain we don't have to convert as well.
if (getStringEncoding()!=null && getStringEncoding().equals(storageMetadata.getStringEncoding()) ||
getStringEncoding()==null && storageMetadata.getStringEncoding()==null) {
// However, perhaps the conversion mask changed since we read the binary string?
// The output can be different from the input. If the mask is different, we need to do conversions.
// Otherwise, we can just ignore it...
if (isDate()) {
if ( (getConversionMask()!=null && getConversionMask().equals(storageMetadata.getConversionMask())) ||
(getConversionMask()==null && storageMetadata.getConversionMask()==null) ) {
identicalFormat = true;
}
else {
identicalFormat = false;
}
}
else if (isNumeric()) {
// Check the lengths first
if (getLength()!=storageMetadata.getLength()) identicalFormat=false;
else if (getPrecision()!=storageMetadata.getPrecision()) identicalFormat=false;
else
// For the same reasons as above, if the conversion mask, the decimal or the grouping symbol changes
// we need to convert from the binary strings to the target data type and then back to a string in the required format.
if ( (getConversionMask()!=null && getConversionMask().equals(storageMetadata.getConversionMask()) ||
(getConversionMask()==null && storageMetadata.getConversionMask()==null))
) {
if ( (getGroupingSymbol()!=null && getGroupingSymbol().equals(storageMetadata.getGroupingSymbol())) ||
(getConversionMask()==null && storageMetadata.getConversionMask()==null) ) {
if ( (getDecimalFormat()!=null && getDecimalFormat().equals(storageMetadata.getDecimalFormat())) ||
(getDecimalFormat()==null && storageMetadata.getDecimalFormat()==null) ) {
identicalFormat = true;
}
else {
identicalFormat = false;
}
}
else {
identicalFormat = false;
}
}
else {
identicalFormat = false;
}
}
}
}
}
}
/**
* @return the trimType
*/
public int getTrimType() {
return trimType;
}
/**
* @param trimType the trimType to set
*/
public void setTrimType(int trimType) {
this.trimType = trimType;
}
public final static int getTrimTypeByCode(String tt)
{
if (tt == null) return 0;
for (int i = 0; i < trimTypeCode.length; i++)
{
if (trimTypeCode[i].equalsIgnoreCase(tt)) return i;
}
return 0;
}
public final static int getTrimTypeByDesc(String tt)
{
if (tt == null) return 0;
for (int i = 0; i < trimTypeDesc.length; i++)
{
if (trimTypeDesc[i].equalsIgnoreCase(tt)) return i;
}
// If this fails, try to match using the code.
return getTrimTypeByCode(tt);
}
public final static String getTrimTypeCode(int i)
{
if (i < 0 || i >= trimTypeCode.length) return trimTypeCode[0];
return trimTypeCode[i];
}
public final static String getTrimTypeDesc(int i)
{
if (i < 0 || i >= trimTypeDesc.length) return trimTypeDesc[0];
return trimTypeDesc[i];
}
/**
* @return the conversionMetadata
*/
public ValueMetaInterface getConversionMetadata()
{
return conversionMetadata;
}
/**
* @param conversionMetadata the conversionMetadata to set
*/
public void setConversionMetadata(ValueMetaInterface conversionMetadata)
{
this.conversionMetadata = conversionMetadata;
}
/**
* @return true if the String encoding used (storage) is single byte encoded.
*/
public boolean isSingleByteEncoding()
{
return singleByteEncoding;
}
/**
* @return the number of binary string to native data type conversions done with this object conversions
*/
public long getNumberOfBinaryStringConversions() {
return numberOfBinaryStringConversions;
}
/**
* @param numberOfBinaryStringConversions the number of binary string to native data type done with this object conversions to set
*/
public void setNumberOfBinaryStringConversions(long numberOfBinaryStringConversions) {
this.numberOfBinaryStringConversions = numberOfBinaryStringConversions;
}
/* Original JDBC RecordSetMetaData
* @see java.sql.ResultSetMetaData#isAutoIncrement()
*/
public boolean isOriginalAutoIncrement() {
return originalAutoIncrement;
}
/* Original JDBC RecordSetMetaData
* @see java.sql.ResultSetMetaData#setAutoIncrement(boolean)
*/
public void setOriginalAutoIncrement(boolean originalAutoIncrement) {
this.originalAutoIncrement=originalAutoIncrement;
}
/* Original JDBC RecordSetMetaData
* @see java.sql.ResultSetMetaData#getColumnType()
*/
public int getOriginalColumnType() {
return originalColumnType;
}
/* Original JDBC RecordSetMetaData
* @see java.sql.ResultSetMetaData#setColumnType(int)
*/
public void setOriginalColumnType(int originalColumnType) {
this.originalColumnType=originalColumnType;
}
/* Original JDBC RecordSetMetaData
* @see java.sql.ResultSetMetaData#getColumnTypeName()
*/
public String getOriginalColumnTypeName() {
return originalColumnTypeName;
}
/* Original JDBC RecordSetMetaData
* @see java.sql.ResultSetMetaData#setColumnTypeName(java.lang.String)
*/
public void setOriginalColumnTypeName(String originalColumnTypeName) {
this.originalColumnTypeName=originalColumnTypeName;
}
/* Original JDBC RecordSetMetaData
* @see java.sql.ResultSetMetaData#isNullable()
*/
public int isOriginalNullable() {
return originalNullable;
}
/* Original JDBC RecordSetMetaData
* @see java.sql.ResultSetMetaData#setNullable(int)
*/
public void setOriginalNullable(int originalNullable) {
this.originalNullable=originalNullable;
}
/* Original JDBC RecordSetMetaData
* @see java.sql.ResultSetMetaData#getPrecision()
*/
public int getOriginalPrecision() {
return originalPrecision;
}
/* Original JDBC RecordSetMetaData
* @see java.sql.ResultSetMetaData#setPrecision(int)
*/
public void setOriginalPrecision(int originalPrecision) {
this.originalPrecision=originalPrecision;
}
/* Original JDBC RecordSetMetaData
* @see java.sql.ResultSetMetaData#getScale()
*/
public int getOriginalScale() {
return originalScale;
}
/* Original JDBC RecordSetMetaData
* @see java.sql.ResultSetMetaData#setScale(int)
*/
public void setOriginalScale(int originalScale) {
this.originalScale=originalScale;
}
/* Original JDBC RecordSetMetaData
* @see java.sql.ResultSetMetaData#isSigned()
*/
public boolean isOriginalSigned() {
return originalSigned;
}
/* Original JDBC RecordSetMetaData
* @see java.sql.ResultSetMetaData#setOriginalSigned(boolean)
*/
public void setOriginalSigned(boolean originalSigned) {
this.originalSigned=originalSigned;
}
} |
package joshua.decoder.hypergraph;
import joshua.decoder.ff.lm.LMFFDPState;
import joshua.decoder.ff.tm.Grammar;
import joshua.decoder.ff.tm.Rule;
import joshua.decoder.ff.tm.HieroGrammar.MemoryBasedBatchGrammar;
import joshua.decoder.ff.FFDPState;
import joshua.decoder.ff.FeatureFunction;
import joshua.corpus.SymbolTable;
import joshua.util.FileUtility;
import joshua.util.Regex;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.OutputStreamWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.HashMap;
/**
* this class implements functions of writting/reading hypergraph
* on disk. Limitations of this version
* (1) cannot recover each individual feature, notably the LM feature
* (2) assume we only have one stateful featuure, which must be a
* LM feature
*
* @author Zhifei Li, <zhifei.work@gmail.com>
* @author wren ng thornton <wren@users.sourceforge.net>
* @version $LastChangedDate$
*/
//Bottom-up
//line: SENTENCE_TAG, sent_id, sent_len, num_items, num_deductions (in average, num_deductions is about 10 times larger than the num_items, which is in average about 4000)
//line: ITEM_TAG, item id, i, j, lhs, num_deductions, tbl_state;
//line: best_cost, num_items, item_ids, rule id, OOV-Non-Terminal (optional), OOV (optional),
public class DiskHyperGraph {
// Fields
private int LMFeatureID = 0;
private SymbolTable symbolTable;
//when saving the hg, we simply compute all the model cost on the fly and store them on the disk
//TODO: when reading the hg, we read thm into a WithModelCostsHyperEdge; now, we let a program outside this class to figure out which model cost corresponds which feature function, we should avoid this in the future
private ArrayList<FeatureFunction> featureFunctions;
// Whether to store the costs at each HyperEdge
private boolean storeModelCosts = false;
// This will be set if the previous sentence is skipped
private String startLine;
private HashMap<HGNode,Integer> itemToID
= new HashMap<HGNode,Integer>(); // for saving hypergraph
private HashMap<Integer,HGNode> idToItem
= new HashMap<Integer,HGNode>(); // for reading hypergraph
private int currentItemID = 1;
private int qtyDeductions = 0;
// Shared by many hypergraphs, via the initialization functions
private HashMap<Integer,Rule> associatedGrammar = new HashMap<Integer,Rule>();
private BufferedWriter writer;
private BufferedReader reader;
private HyperGraphPruning pruner;
// Set in init_read(...), used in read_hyper_graph()
private HashMap<Integer,?> selectedSentences;
// Static Fields
private static final String SENTENCE_TAG = "#SENT: ";
private static final String ITEM_TAG = "
private static final String ITEM_STATE_TAG = " ST ";
private static final String NULL_ITEM_STATE = "nullstate";
private static final String RULE_TBL_SEP = " -LZF- ";
// TODO: this should be changed
// TODO: use joshua.util.Regex to avoid recompiling all the time
private static final String nonterminalRegexp
= "^\\[[A-Z]+\\,[0-9]*\\]$";
private static final String nonterminalReplaceRegexp
= "[\\[\\]\\,0-9]+";
/* three kinds of rule:
* (>0) regular rule
* (0) oov rule
* (-1) null rule
*/
private static int NULL_RULE_ID = -1;
//TODO: this is a hack for us to create OOVRule, and OOVRuleID
private static Grammar pGrammar = new MemoryBasedBatchGrammar();
private static final Logger logger =
Logger.getLogger(DiskHyperGraph.class.getName());
// Constructors
/**
* For saving purpose, one needs to specify the featureFunctions.
* For reading purpose, one does not need to provide the list.
*/
public DiskHyperGraph(SymbolTable symbolTable, int LMFeatureID,
boolean storeModelCosts, ArrayList<FeatureFunction> featureFunctions
) {
this.symbolTable = symbolTable;
this.LMFeatureID = LMFeatureID;
this.storeModelCosts = storeModelCosts;
this.featureFunctions = featureFunctions;
}
// Initialization Methods
//for writting hyper-graph: (1) saving each hyper-graph; (2) remember each regualar rule used; (3) dump the rule jointly (in case parallel decoding)
public void init_write(String itemsFile, boolean useForestPruning, double threshold)
throws IOException {
this.writer =
(null == itemsFile)
? new BufferedWriter(new OutputStreamWriter(System.out))
: FileUtility.getWriteFileStream(itemsFile) ;
if (useForestPruning) {
this.pruner = new HyperGraphPruning(
this.symbolTable, true, threshold, threshold, 1, 1);
}
}
public void init_read(String hypergraphsFile, String rulesFile, HashMap<Integer,?> selectedSentences) {
try {
this.reader = FileUtility.getReadFileStream(hypergraphsFile);
this.selectedSentences = selectedSentences;
/* Reload the rule table */
if (logger.isLoggable(Level.INFO))
logger.info("Reading rules from file " + rulesFile);
this.associatedGrammar.clear();
BufferedReader rulesReader =
FileUtility.getReadFileStream(rulesFile);
String line;
while ((line = FileUtility.read_line_lzf(rulesReader)) != null) {
// line format: ruleID owner RULE_TBL_SEP rule
String[] fds = line.split(RULE_TBL_SEP); // TODO: use joshua.util.Regex
if (fds.length != 2) {
logger.severe("wrong RULE line");
System.exit(1);
}
String[] words = Regex.spaces.split(fds[0]);
int ruleID = Integer.parseInt(words[0]);
int defaultOwner = this.symbolTable.addTerminal(words[1]);
// stateless cost is not properly set, so cannot extract individual features during kbest extraction
this.associatedGrammar.put(ruleID,
MemoryBasedBatchGrammar.createRule(this.symbolTable, nonterminalRegexp,
nonterminalReplaceRegexp, ruleID, fds[1], defaultOwner));
}
rulesReader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public HashMap<Integer,Rule> getAssocatedGrammar(){
return associatedGrammar;
}
private void resetStates() {
this.itemToID.clear();
this.idToItem.clear();
this.currentItemID = 1;
this.qtyDeductions = 0;
}
// Methods
public void save_hyper_graph(HyperGraph hg) throws IOException {
resetStates();
if (null != this.pruner) this.pruner.pruning_hg(hg);
constructItemTables(hg);
if (logger.isLoggable(Level.INFO))
logger.info("Number of Items is: " + this.itemToID.size());
this.writer.write(
SENTENCE_TAG + hg.sent_id
+ " " + hg.sent_len
+ " " + this.itemToID.size()
+ " " + this.qtyDeductions
+ "\n" );
// we save the hypergraph in a bottom-up way: so that reading is easy
if (this.idToItem.size() != this.itemToID.size()) {
logger.severe("Number of Items is not equal");
System.exit(1);
}
for (int i = 1; i <= this.idToItem.size(); i++) {
writeItem(this.idToItem.get(i));
}
if (null != this.pruner) this.pruner.clearState();
}
/**
* Assign IDs to all HGNodes in the hypergraph. We do a
* depth-first traversal starting at the goal item, and
* assign IDs from the bottom up. BUG: this code could stack
* overflow for deep trees.
*/
private void constructItemTables(HyperGraph hg) {
resetStates();
constructItemTables(hg.goal_item);
}
/**
* This method is <i>really</i> private, and should only
* be called by constructItemTables(HyperGraph).
*/
private void constructItemTables(HGNode item) {
if (this.itemToID.containsKey(item)) return;
// first: assign IDs to all my antecedents
for (HyperEdge hyperEdge : item.l_hyperedges) {
this.qtyDeductions++;
if (null != hyperEdge.get_ant_items()) {
for (HGNode antecedentItem : hyperEdge.get_ant_items()) {
constructItemTables(antecedentItem);
}
}
}
// second: assign ID to "myself"
this.idToItem.put(this.currentItemID, item);
this.itemToID.put(item, this.currentItemID);
this.currentItemID++;
}
private void writeItem(HGNode item) throws IOException {
this.writer.write(
new StringBuffer()
.append(ITEM_TAG)
.append(" ")
.append(this.itemToID.get(item))
.append(" ")
.append(item.i)
.append(" ")
.append(item.j)
.append(" ")
.append(this.symbolTable.getWord(item.lhs))
.append(" ")
.append(
null == item.l_hyperedges
? 0
: item.l_hyperedges.size() )
.append(ITEM_STATE_TAG)
.append(
// Assume LM is the only stateful feature
null != item.getTblFeatDPStates()
? item.getTblFeatDPStates()
.get(this.LMFeatureID)
.getSignature(this.symbolTable, true)
: NULL_ITEM_STATE )
.append("\n")
.toString()
);
if (null != item.l_hyperedges) {
for (HyperEdge hyperEdge : item.l_hyperedges) {
writeDeduction(item, hyperEdge);
}
}
this.writer.flush();
}
private final boolean isOutOfVocabularyRule(Rule rl) {
return (rl.getRuleID() == pGrammar.getOOVRuleID());
}
private void writeDeduction(HGNode item, HyperEdge deduction)
throws IOException {
//get rule id
int ruleID = NULL_RULE_ID;
final Rule deduction_rule = deduction.get_rule();
if (null != deduction_rule) {
ruleID = deduction_rule.getRuleID();
if (! isOutOfVocabularyRule(deduction_rule)) {
this.associatedGrammar.put(ruleID, deduction_rule); //remember used regular rule
}
}
StringBuffer s = new StringBuffer();
//line: best_cost, num_items, item_ids, rule id, OOV-Non-Terminal (optional), OOV (optional),
s.append(String.format("%.4f ", deduction.best_cost));
//s.append(" ").append(cur_d.best_cost).append(" ");//this 1.2 faster than the previous statement
//s.append(String.format("%.4f ", cur_d.get_transition_cost(false)));
//s.append(cur_d.get_transition_cost(false)).append(" ");//this 1.2 faster than the previous statement, but cost 1.4 larger disk space
if (null == deduction.get_ant_items()) {
s.append(0);
} else {
final int qtyItems = deduction.get_ant_items().size();
s.append(qtyItems);
for (int i = 0; i < qtyItems; i++) {
s.append(" ")
.append(this.itemToID.get(
deduction.get_ant_items().get(i) ));
}
}
s.append(" ")
.append(ruleID);
if (ruleID == pGrammar.getOOVRuleID()) {
s.append(" ")
.append(this.symbolTable.getWord(deduction_rule.getLHS()))
.append(" ")
.append(this.symbolTable.getWords(deduction_rule.getEnglish()));
}
s.append("\n");
// save model cost as a seprate line; optional
if (this.storeModelCosts) {
for (int k = 0; k < this.featureFunctions.size(); k++) {
FeatureFunction m = this.featureFunctions.get(k);
s.append(String.format("%.4f ",
null != deduction.get_rule()
? // deductions under goal item do not have rules
HyperGraph
.computeTransition(deduction, m, item.i, item.j)
.getTransitionCost()
: HyperGraph.computeFinalTransition(deduction, m)
))
.append(
k < this.featureFunctions.size() - 1
? " "
: "\n");
}
}
this.writer.write(s.toString());
}
// End save_hyper_graph()
public HyperGraph read_hyper_graph() {
resetStates();
//read first line: SENTENCE_TAG, sent_id, sent_len, num_items, num_deduct
String line = null;
if (null != this.startLine) { // the previous sentence is skipped
line = this.startLine;
this.startLine = null;
} else {
line = FileUtility.read_line_lzf(this.reader);
}
if (! line.startsWith(SENTENCE_TAG)) {
logger.severe("wrong sent tag line: " + line);
System.exit(1);
}
// Test if we should skip this sentence
if (null != this.selectedSentences
&& (! this.selectedSentences.containsKey(
Integer.parseInt(Regex.spaces.split(line)[1]) ))
) {
while ((line = FileUtility.read_line_lzf(this.reader)) != null) {
if (line.startsWith(SENTENCE_TAG)) break;
}
this.startLine = line;
System.out.println("sentence is skipped");
return null;
} else {
String[] fds = Regex.spaces.split(line);
int sentenceID = Integer.parseInt(fds[1]);
int sentenceLength = Integer.parseInt(fds[2]);
int qtyItems = Integer.parseInt(fds[3]);
int qtyDeductions = Integer.parseInt(fds[4]);
System.out.println(
"num_items: " + qtyItems
+ "; num_deducts: " + qtyDeductions);
for (int i = 0; i < qtyItems; i++) readItem();
//TODO check if the file reaches EOF, or if the num_deducts matches
//create hyper graph
HGNode goalItem = this.idToItem.get(qtyItems);
if (null == goalItem) {
logger.severe("no goal item");
System.exit(1);
}
return new HyperGraph(goalItem, qtyItems, qtyDeductions, sentenceID, sentenceLength);
}
}
private HGNode readItem() {
//line: ITEM_TAG itemID i j lhs qtyDeductions ITEM_STATE_TAG item_state
String line = FileUtility.read_line_lzf(this.reader);
String[] fds = line.split(ITEM_STATE_TAG); // TODO: use joshua.util.Regex
if (fds.length != 2) {
logger.severe("wrong item line");
System.exit(1);
}
String[] words = Regex.spaces.split(fds[0]);
int itemID = Integer.parseInt(words[1]);
int i = Integer.parseInt(words[2]);
int j = Integer.parseInt(words[3]);
int lhs = this.symbolTable.addNonterminal(words[4]);
int qtyDeductions = Integer.parseInt(words[5]);
//item state: signature (created from HashMap tbl_states)
HashMap<Integer,FFDPState> dpStates = null;
if (fds[1].compareTo(NULL_ITEM_STATE) != 0) {
// Assume the only stateful feature is lm feature
dpStates = new HashMap<Integer,FFDPState>();
dpStates.put(this.LMFeatureID,
new LMFFDPState(this.symbolTable, fds[1]));
}
ArrayList<HyperEdge> deductions = null;
HyperEdge bestDeduction = null;
double bestCost = Double.POSITIVE_INFINITY;
if (qtyDeductions > 0) {
deductions = new ArrayList<HyperEdge>();
for (int t = 0; t < qtyDeductions; t++) {
HyperEdge deduction = readDeduction();
deductions.add(deduction);
if (deduction.best_cost < bestCost) {
bestCost = deduction.best_cost;
bestDeduction = deduction;
}
}
}
HGNode item = new HGNode(i, j, lhs, deductions, bestDeduction, dpStates);
this.idToItem.put(itemID, item);
return item;
}
// Assumption: has this.associatedGrammar and this.idToItem
private HyperEdge readDeduction() {
//line: flag, best_cost, num_items, item_ids, rule id, OOV-Non-Terminal (optional), OOV (optional)
String line = FileUtility.read_line_lzf(this.reader);
String[] fds = Regex.spaces.split(line);
//best_cost transition_cost num_items item_ids
double bestCost = Double.parseDouble(fds[0]);
ArrayList<HGNode> antecedentItems = null;
final int qtyAntecedents = Integer.parseInt(fds[1]);
if (qtyAntecedents > 0) {
antecedentItems = new ArrayList<HGNode>();
for (int t = 0; t < qtyAntecedents; t++) {
final int itemID = Integer.parseInt(fds[2+t]);
HGNode item = this.idToItem.get(itemID);
if (null == item) {
logger.severe("item is null for id: " + itemID);
System.exit(1);
}
antecedentItems.add(item);
}
}
//rule_id
Rule rule = null;
final int ruleID = Integer.parseInt(fds[2+qtyAntecedents]);
if (ruleID != NULL_RULE_ID) {
if (ruleID != pGrammar.getOOVRuleID()) {
rule = this.associatedGrammar.get(ruleID);
if (null == rule) {
logger.severe("rule is null but id is " + ruleID);
System.exit(1);
}
} else {
//stateless cost is not properly set, so cannot extract individual features during kbest extraction
rule = pGrammar.constructOOVRule(
1,
this.symbolTable.addTerminal(fds[4+qtyAntecedents]),
false);
}
} else {
// Do nothing: goal item has null rule
}
HyperEdge hyperEdge;
//read model costs
if (this.storeModelCosts) {
String[] costs_s =
Regex.spaces.split(FileUtility.read_line_lzf(this.reader));
double[] costs = new double[costs_s.length];
for (int i = 0; i < costs_s.length; i++) {
costs[i] = Double.parseDouble(costs_s[i]);
}
hyperEdge = new WithModelCostsHyperEdge(rule, bestCost, null, antecedentItems, costs);
} else {
hyperEdge = new HyperEdge(rule, bestCost, null, antecedentItems);
}
hyperEdge.get_transition_cost(true); // to set the transition cost
return hyperEdge;
}
// End read_hyper_graph()
public void write_rules_non_parallel(String rulesFile)
throws IOException {
BufferedWriter out =
(null == rulesFile)
? new BufferedWriter(new OutputStreamWriter(System.out))
: FileUtility.getWriteFileStream(rulesFile) ;
logger.info("writing rules");
for (int ruleID : this.associatedGrammar.keySet()) {
writeRule(out, this.associatedGrammar.get(ruleID), ruleID);
}
out.flush();
out.close();
}
// writtenRules: remember what kind of rules have already been saved
public void write_rules_parallel(
BufferedWriter out, HashMap<Integer,Integer> writtenRules
) throws IOException {
logger.info("writing rules in a partition");
for (int ruleID : this.associatedGrammar.keySet()) {
if (! writtenRules.containsKey(ruleID)) {
writtenRules.put(ruleID, 1);
writeRule(out, this.associatedGrammar.get(ruleID), ruleID);
}
}
out.flush();
}
private void writeRule(BufferedWriter out, Rule rule, int ruleID)
throws IOException {
out.write(
ruleID
+ " "
+ this.symbolTable.getWord(rule.getOwner())
+ RULE_TBL_SEP
+ rule.toString(this.symbolTable)
+ "\n");
}
} |
package com.airbnb.lottie;
import android.graphics.Canvas;
import android.graphics.DashPathEffect;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PathMeasure;
import android.graphics.RectF;
import android.support.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
abstract class BaseStrokeContent implements DrawingContent, BaseKeyframeAnimation.AnimationListener {
private final PathMeasure pm = new PathMeasure();
private final Path path = new Path();
private final Path trimPathPath = new Path();
private final RectF rect = new RectF();
private final LottieDrawable lottieDrawable;
private final List<PathGroup> pathGroups = new ArrayList<>();
private final float[] dashPatternValues;
final Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG);
private final BaseKeyframeAnimation<?, Float> widthAnimation;
private final BaseKeyframeAnimation<?, Integer> opacityAnimation;
private final List<BaseKeyframeAnimation<?, Float>> dashPatternAnimations;
@Nullable private final BaseKeyframeAnimation<?, Float> dashPatternOffsetAnimation;
BaseStrokeContent(final LottieDrawable lottieDrawable, BaseLayer layer, Paint.Cap cap,
Paint.Join join, AnimatableIntegerValue opacity, AnimatableFloatValue width,
List<AnimatableFloatValue> dashPattern, AnimatableFloatValue offset) {
this.lottieDrawable = lottieDrawable;
paint.setStyle(Paint.Style.STROKE);
paint.setStrokeCap(cap);
paint.setStrokeJoin(join);
opacityAnimation = opacity.createAnimation();
widthAnimation = width.createAnimation();
if (offset == null) {
dashPatternOffsetAnimation = null;
} else {
dashPatternOffsetAnimation = offset.createAnimation();
}
dashPatternAnimations = new ArrayList<>(dashPattern.size());
dashPatternValues = new float[dashPattern.size()];
for (int i = 0; i < dashPattern.size(); i++) {
dashPatternAnimations.add(dashPattern.get(i).createAnimation());
}
layer.addAnimation(opacityAnimation);
layer.addAnimation(widthAnimation);
for (int i = 0; i < dashPatternAnimations.size(); i++) {
layer.addAnimation(dashPatternAnimations.get(i));
}
if (dashPatternOffsetAnimation != null) {
layer.addAnimation(dashPatternOffsetAnimation);
}
opacityAnimation.addUpdateListener(this);
widthAnimation.addUpdateListener(this);
for (int i = 0; i < dashPattern.size(); i++) {
dashPatternAnimations.get(i).addUpdateListener(this);
}
if (dashPatternOffsetAnimation != null) {
dashPatternOffsetAnimation.addUpdateListener(this);
}
}
@Override public void onValueChanged() {
lottieDrawable.invalidateSelf();
}
@Override public void setContents(List<Content> contentsBefore, List<Content> contentsAfter) {
TrimPathContent trimPathContentBefore = null;
for (int i = contentsBefore.size() - 1; i >= 0; i
Content content = contentsBefore.get(i);
if (content instanceof TrimPathContent &&
((TrimPathContent) content).getType() == ShapeTrimPath.Type.Individually) {
trimPathContentBefore = (TrimPathContent) content;
}
}
if (trimPathContentBefore != null) {
trimPathContentBefore.addListener(this);
}
PathGroup currentPathGroup = null;
for (int i = contentsAfter.size() - 1; i >= 0; i
Content content = contentsAfter.get(i);
if (content instanceof TrimPathContent &&
((TrimPathContent) content).getType() == ShapeTrimPath.Type.Individually) {
if (currentPathGroup != null) {
pathGroups.add(currentPathGroup);
}
currentPathGroup = new PathGroup((TrimPathContent) content);
((TrimPathContent) content).addListener(this);
} else if (content instanceof PathContent) {
if (currentPathGroup == null) {
currentPathGroup = new PathGroup(trimPathContentBefore);
}
currentPathGroup.paths.add((PathContent) content);
}
}
if (currentPathGroup != null) {
pathGroups.add(currentPathGroup);
}
}
@Override public void draw(Canvas canvas, Matrix parentMatrix, int parentAlpha) {
int alpha = (int) ((parentAlpha / 255f * opacityAnimation.getValue() / 100f) * 255);
paint.setAlpha(alpha);
paint.setStrokeWidth(widthAnimation.getValue() * Utils.getScale(parentMatrix));
if (paint.getStrokeWidth() <= 0) {
// Android draws a hairline stroke for 0, After Effects doesn't.
return;
}
applyDashPatternIfNeeded();
for (int i = 0; i < pathGroups.size(); i++) {
PathGroup pathGroup = pathGroups.get(i);
if (pathGroup.trimPath != null) {
applyTrimPath(canvas, pathGroup, parentMatrix);
} else {
path.reset();
for (int j = pathGroup.paths.size() - 1; j >= 0; j
path.addPath(pathGroup.paths.get(j).getPath(), parentMatrix);
}
canvas.drawPath(path, paint);
}
}
}
private void applyTrimPath(Canvas canvas, PathGroup pathGroup, Matrix parentMatrix) {
if (pathGroup.trimPath == null) {
return;
}
path.reset();
for (int j = pathGroup.paths.size() - 1; j >= 0; j
path.addPath(pathGroup.paths.get(j).getPath(), parentMatrix);
}
pm.setPath(path, false);
float totalLength = pm.getLength();
while (pm.nextContour()) {
totalLength += pm.getLength();
}
float offsetLength = totalLength * pathGroup.trimPath.getOffset().getValue() / 360f;
float startLength =
totalLength * pathGroup.trimPath.getStart().getValue() / 100f + offsetLength;
float endLength =
totalLength * pathGroup.trimPath.getEnd().getValue() / 100f + offsetLength;
float currentLength = 0;
for (int j = pathGroup.paths.size() - 1; j >= 0; j
trimPathPath.set(pathGroup.paths.get(j).getPath());
trimPathPath.transform(parentMatrix);
pm.setPath(trimPathPath, false);
float length = pm.getLength();
if (endLength > totalLength && endLength - totalLength < currentLength + length &&
currentLength < endLength - totalLength) {
// Draw the segment when the end is greater than the length which wraps around to the
// beginning.
float startValue;
if (startLength > totalLength) {
startValue = (startLength - totalLength) / length;
} else {
startValue = 0;
}
float endValue = Math.min((endLength - totalLength) / length, 1);
Utils.applyTrimPathIfNeeded(trimPathPath, startValue, endValue, 0);
canvas.drawPath(trimPathPath, paint);
} else //noinspection StatementWithEmptyBody
if (currentLength + length < startLength || currentLength > endLength) {
// Do nothing
} else if (currentLength + length <= endLength && startLength < currentLength) {
canvas.drawPath(trimPathPath, paint);
} else {
float startValue;
if (startLength < currentLength) {
startValue = 0;
} else {
startValue = (startLength - currentLength) / length;
}
float endValue;
if (endLength > currentLength + length) {
endValue = 1f;
} else {
endValue = (endLength - currentLength) / length;
}
Utils.applyTrimPathIfNeeded(trimPathPath, startValue, endValue, 0);
canvas.drawPath(trimPathPath, paint);
}
currentLength += length;
}
}
@Override public void getBounds(RectF outBounds, Matrix parentMatrix) {
path.reset();
for (int i = 0; i < pathGroups.size(); i++) {
PathGroup pathGroup = pathGroups.get(i);
for (int j = 0; j < pathGroup.paths.size(); j++) {
path.addPath(pathGroup.paths.get(i).getPath(), parentMatrix);
}
}
path.computeBounds(rect, false);
float width = widthAnimation.getValue();
rect.set(rect.left - width / 2f, rect.top - width / 2f,
rect.right + width / 2f, rect.bottom + width / 2f);
outBounds.set(rect);
// Add padding to account for rounding errors.
outBounds.set(
outBounds.left - 1,
outBounds.top - 1,
outBounds.right + 1,
outBounds.bottom + 1
);
}
private void applyDashPatternIfNeeded() {
if (dashPatternAnimations.isEmpty()) {
return;
}
float scale = lottieDrawable.getScale();
for (int i = 0; i < dashPatternAnimations.size(); i++) {
dashPatternValues[i] = dashPatternAnimations.get(i).getValue();
// If the value of the dash pattern or gap is too small, the number of individual sections
// approaches infinity as the value approaches 0.
// To mitigate this, we essentially put a minimum value on the dash pattern size of 1px
// and a minimum gap size of 0.01.
if (i % 2 == 0) {
if (dashPatternValues[i] < 1f) {
dashPatternValues[i] = 1f;
}
} else {
if (dashPatternValues[i] < 0.1f) {
dashPatternValues[i] = 0.1f;
}
}
dashPatternValues[i] *= scale;
}
float offset = dashPatternOffsetAnimation == null ? 0f : dashPatternOffsetAnimation.getValue();
paint.setPathEffect(new DashPathEffect(dashPatternValues, offset));
}
/**
* Data class to help drawing trim paths individually.
*/
private static final class PathGroup {
private final List<PathContent> paths = new ArrayList<>();
@Nullable private final TrimPathContent trimPath;
private PathGroup(@Nullable TrimPathContent trimPath) {
this.trimPath = trimPath;
}
}
} |
package com.airbnb.lottie;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Rect;
import android.os.AsyncTask;
import android.support.annotation.Nullable;
import android.support.annotation.RawRes;
import android.support.annotation.RestrictTo;
import android.support.v4.util.LongSparseArray;
import android.support.v4.util.SparseArrayCompat;
import android.util.Log;
import com.airbnb.lottie.model.FileCompositionLoader;
import com.airbnb.lottie.model.Font;
import com.airbnb.lottie.model.FontCharacter;
import com.airbnb.lottie.model.JsonCompositionLoader;
import com.airbnb.lottie.model.layer.Layer;
import com.airbnb.lottie.utils.Utils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import static com.airbnb.lottie.utils.Utils.closeQuietly;
/**
* After Effects/Bodymovin composition model. This is the serialized model from which the
* animation will be created.
* It can be used with a {@link com.airbnb.lottie.LottieAnimationView} or
* {@link com.airbnb.lottie.LottieDrawable}.
*/
public class LottieComposition {
private final Map<String, List<Layer>> precomps = new HashMap<>();
private final Map<String, LottieImageAsset> images = new HashMap<>();
/** Map of font names to fonts */
private final Map<String, Font> fonts = new HashMap<>();
private final SparseArrayCompat<FontCharacter> characters = new SparseArrayCompat<>();
private final LongSparseArray<Layer> layerMap = new LongSparseArray<>();
private final List<Layer> layers = new ArrayList<>();
// This is stored as a set to avoid duplicates.
private final HashSet<String> warnings = new HashSet<>();
private final PerformanceTracker performanceTracker = new PerformanceTracker();
private final Rect bounds;
private final long startFrame;
private final long endFrame;
private final float frameRate;
private final float dpScale;
/* Bodymovin version */
private final int majorVersion;
private final int minorVersion;
private final int patchVersion;
private LottieComposition(Rect bounds, long startFrame, long endFrame, float frameRate,
float dpScale, int major, int minor, int patch) {
this.bounds = bounds;
this.startFrame = startFrame;
this.endFrame = endFrame;
this.frameRate = frameRate;
this.dpScale = dpScale;
this.majorVersion = major;
this.minorVersion = minor;
this.patchVersion = patch;
if (!Utils.isAtLeastVersion(this, 4, 5, 0)) {
addWarning("Lottie only supports bodymovin >= 4.5.0");
}
}
@RestrictTo(RestrictTo.Scope.LIBRARY)
public void addWarning(String warning) {
Log.w(L.TAG, warning);
warnings.add(warning);
}
public ArrayList<String> getWarnings() {
return new ArrayList<>(Arrays.asList(warnings.toArray(new String[warnings.size()])));
}
public void setPerformanceTrackingEnabled(boolean enabled) {
performanceTracker.setEnabled(enabled);
}
public PerformanceTracker getPerformanceTracker() {
return performanceTracker;
}
@RestrictTo(RestrictTo.Scope.LIBRARY)
public Layer layerModelForId(long id) {
return layerMap.get(id);
}
@SuppressWarnings("WeakerAccess") public Rect getBounds() {
return bounds;
}
@SuppressWarnings("WeakerAccess") public long getDuration() {
long frameDuration = endFrame - startFrame;
return (long) (frameDuration / frameRate * 1000);
}
@RestrictTo(RestrictTo.Scope.LIBRARY)
public int getMajorVersion() {
return majorVersion;
}
@RestrictTo(RestrictTo.Scope.LIBRARY)
public int getMinorVersion() {
return minorVersion;
}
@RestrictTo(RestrictTo.Scope.LIBRARY)
public int getPatchVersion() {
return patchVersion;
}
@RestrictTo(RestrictTo.Scope.LIBRARY)
public long getStartFrame() {
return startFrame;
}
@RestrictTo(RestrictTo.Scope.LIBRARY)
public long getEndFrame() {
return endFrame;
}
public List<Layer> getLayers() {
return layers;
}
@RestrictTo(RestrictTo.Scope.LIBRARY)
@Nullable
public List<Layer> getPrecomps(String id) {
return precomps.get(id);
}
public SparseArrayCompat<FontCharacter> getCharacters() {
return characters;
}
public Map<String, Font> getFonts() {
return fonts;
}
public boolean hasImages() {
return !images.isEmpty();
}
@SuppressWarnings("WeakerAccess") public Map<String, LottieImageAsset> getImages() {
return images;
}
public float getDurationFrames() {
return getDuration() * frameRate / 1000f;
}
public float getDpScale() {
return dpScale;
}
@Override public String toString() {
final StringBuilder sb = new StringBuilder("LottieComposition:\n");
for (Layer layer : layers) {
sb.append(layer.toString("\t"));
}
return sb.toString();
}
public static class Factory {
private Factory() {
}
/**
* Loads a composition from a file stored in /assets.
*/
public static Cancellable fromAssetFileName(Context context, String fileName,
OnCompositionLoadedListener loadedListener) {
InputStream stream;
try {
stream = context.getAssets().open(fileName);
} catch (IOException e) {
throw new IllegalStateException("Unable to find file " + fileName, e);
}
return fromInputStream(context, stream, loadedListener);
}
/**
* Loads a composition from a file stored in res/raw.
*/
public static Cancellable fromRawFile(Context context, @RawRes int resId,
OnCompositionLoadedListener loadedListener) {
return fromInputStream(context, context.getResources().openRawResource(resId), loadedListener);
}
/**
* Loads a composition from an arbitrary input stream.
* <p>
* ex: fromInputStream(context, new FileInputStream(filePath), (composition) -> {});
*/
public static Cancellable fromInputStream(Context context, InputStream stream,
OnCompositionLoadedListener loadedListener) {
FileCompositionLoader loader =
new FileCompositionLoader(context.getResources(), loadedListener);
loader.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, stream);
return loader;
}
@SuppressWarnings("WeakerAccess")
public static LottieComposition fromFileSync(Context context, String fileName) {
InputStream stream;
try {
stream = context.getAssets().open(fileName);
} catch (IOException e) {
throw new IllegalStateException("Unable to find file " + fileName, e);
}
return fromInputStream(context.getResources(), stream);
}
/**
* Loads a composition from a raw json object. This is useful for animations loaded from the
* network.
*/
public static Cancellable fromJson(Resources res, JSONObject json,
OnCompositionLoadedListener loadedListener) {
JsonCompositionLoader loader = new JsonCompositionLoader(res, loadedListener);
loader.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, json);
return loader;
}
@Nullable
public static LottieComposition fromInputStream(Resources res, InputStream stream) {
try {
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(stream));
StringBuilder total = new StringBuilder();
String line;
while ((line = bufferedReader.readLine()) != null) {
total.append(line);
}
JSONObject jsonObject = new JSONObject(total.toString());
return fromJsonSync(res, jsonObject);
} catch (IOException e) {
Log.e(L.TAG, "Failed to load composition.",
new IllegalStateException("Unable to find file.", e));
} catch (JSONException e) {
Log.e(L.TAG, "Failed to load composition.",
new IllegalStateException("Unable to load JSON.", e));
} finally {
closeQuietly(stream);
}
return null;
}
public static LottieComposition fromJsonSync(Resources res, JSONObject json) {
Rect bounds = null;
float scale = res.getDisplayMetrics().density;
int width = json.optInt("w", -1);
int height = json.optInt("h", -1);
if (width != -1 && height != -1) {
int scaledWidth = (int) (width * scale);
int scaledHeight = (int) (height * scale);
bounds = new Rect(0, 0, scaledWidth, scaledHeight);
}
long startFrame = json.optLong("ip", 0);
long endFrame = json.optLong("op", 0);
float frameRate = (float) json.optDouble("fr", 0);
String version = json.optString("v");
String[] versions = version.split("[.]");
int major = Integer.parseInt(versions[0]);
int minor = Integer.parseInt(versions[1]);
int patch = Integer.parseInt(versions[2]);
LottieComposition composition = new LottieComposition(
bounds, startFrame, endFrame, frameRate, scale, major, minor, patch);
JSONArray assetsJson = json.optJSONArray("assets");
parseImages(assetsJson, composition);
parsePrecomps(assetsJson, composition);
parseFonts(json.optJSONObject("fonts"), composition);
parseChars(json.optJSONArray("chars"), composition);
parseLayers(json, composition);
return composition;
}
private static void parseLayers(JSONObject json, LottieComposition composition) {
JSONArray jsonLayers = json.optJSONArray("layers");
// This should never be null. Bodymovin always exports at least an empty array.
// However, it seems as if the unmarshalling from the React Native library sometimes
// causes this to be null. The proper fix should be done there but this will prevent a crash.
if (jsonLayers == null) {
return;
}
int length = jsonLayers.length();
int imageCount = 0;
for (int i = 0; i < length; i++) {
Layer layer = Layer.Factory.newInstance(jsonLayers.optJSONObject(i), composition);
if (layer.getLayerType() == Layer.LayerType.Image) {
imageCount++;
}
addLayer(composition.layers, composition.layerMap, layer);
}
if (imageCount > 4) {
composition.addWarning("You have " + imageCount + " images. Lottie should primarily be " +
"used with shapes. If you are using Adobe Illustrator, convert the Illustrator layers" +
" to shape layers.");
}
}
private static void parsePrecomps(
@Nullable JSONArray assetsJson, LottieComposition composition) {
if (assetsJson == null) {
return;
}
int length = assetsJson.length();
for (int i = 0; i < length; i++) {
JSONObject assetJson = assetsJson.optJSONObject(i);
JSONArray layersJson = assetJson.optJSONArray("layers");
if (layersJson == null) {
continue;
}
List<Layer> layers = new ArrayList<>(layersJson.length());
LongSparseArray<Layer> layerMap = new LongSparseArray<>();
for (int j = 0; j < layersJson.length(); j++) {
Layer layer = Layer.Factory.newInstance(layersJson.optJSONObject(j), composition);
layerMap.put(layer.getId(), layer);
layers.add(layer);
}
String id = assetJson.optString("id");
composition.precomps.put(id, layers);
}
}
private static void parseImages(
@Nullable JSONArray assetsJson, LottieComposition composition) {
if (assetsJson == null) {
return;
}
int length = assetsJson.length();
for (int i = 0; i < length; i++) {
JSONObject assetJson = assetsJson.optJSONObject(i);
if (!assetJson.has("p")) {
continue;
}
LottieImageAsset image = LottieImageAsset.Factory.newInstance(assetJson);
composition.images.put(image.getId(), image);
}
}
private static void parseFonts(@Nullable JSONObject fonts, LottieComposition composition) {
if (fonts == null) {
return;
}
JSONArray fontsList = fonts.optJSONArray("list");
if (fontsList == null) {
return;
}
int length = fontsList.length();
for (int i = 0; i < length; i++) {
Font font = Font.Factory.newInstance(fontsList.optJSONObject(i));
composition.fonts.put(font.getName(), font);
}
}
private static void parseChars(@Nullable JSONArray charsJson, LottieComposition composition) {
if (charsJson == null) {
return;
}
int length = charsJson.length();
for (int i = 0; i < length; i++) {
FontCharacter character =
FontCharacter.Factory.newInstance(charsJson.optJSONObject(i), composition);
composition.characters.put(character.hashCode(), character);
}
}
private static void addLayer(List<Layer> layers, LongSparseArray<Layer> layerMap, Layer layer) {
layers.add(layer);
layerMap.put(layer.getId(), layer);
}
}
} |
package org.jfree.data;
import java.io.Serializable;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.jfree.chart.util.ParamChecks;
import org.jfree.util.ObjectUtilities;
import org.jfree.util.PublicCloneable;
/**
* A data structure that stores zero, one or many values, where each value
* is associated with two keys (a 'row' key and a 'column' key). The keys
* should be (a) instances of {@link Comparable} and (b) immutable.
*/
public class DefaultKeyedValues2D implements KeyedValues2D, PublicCloneable,
Cloneable, Serializable {
/** For serialization. */
private static final long serialVersionUID = -5514169970951994748L;
/** The row keys. */
private List rowKeys;
/** The column keys. */
private List columnKeys;
/** The row data. */
private List rows;
/** If the row keys should be sorted by their comparable order. */
private boolean sortRowKeys;
/**
* Creates a new instance (initially empty).
*/
public DefaultKeyedValues2D() {
this(false);
}
/**
* Creates a new instance (initially empty).
*
* @param sortRowKeys if the row keys should be sorted.
*/
public DefaultKeyedValues2D(boolean sortRowKeys) {
this.rowKeys = new java.util.ArrayList();
this.columnKeys = new java.util.ArrayList();
this.rows = new java.util.ArrayList();
this.sortRowKeys = sortRowKeys;
}
/**
* Returns the row count.
*
* @return The row count.
*
* @see #getColumnCount()
*/
@Override
public int getRowCount() {
return this.rowKeys.size();
}
/**
* Returns the column count.
*
* @return The column count.
*
* @see #getRowCount()
*/
@Override
public int getColumnCount() {
return this.columnKeys.size();
}
/**
* Returns the value for a given row and column.
*
* @param row the row index.
* @param column the column index.
*
* @return The value.
*
* @see #getValue(Comparable, Comparable)
*/
@Override
public Number getValue(int row, int column) {
Number result = null;
DefaultKeyedValues rowData = (DefaultKeyedValues) this.rows.get(row);
if (rowData != null) {
Comparable columnKey = (Comparable) this.columnKeys.get(column);
// the row may not have an entry for this key, in which case the
// return value is null
int index = rowData.getIndex(columnKey);
if (index >= 0) {
result = rowData.getValue(index);
}
}
return result;
}
/**
* Returns the key for a given row.
*
* @param row the row index (in the range 0 to {@link #getRowCount()} - 1).
*
* @return The row key.
*
* @see #getRowIndex(Comparable)
* @see #getColumnKey(int)
*/
@Override
public Comparable getRowKey(int row) {
return (Comparable) this.rowKeys.get(row);
}
/**
* Returns the row index for a given key.
*
* @param key the key (<code>null</code> not permitted).
*
* @return The row index.
*
* @see #getRowKey(int)
* @see #getColumnIndex(Comparable)
*/
@Override
public int getRowIndex(Comparable key) {
ParamChecks.nullNotPermitted(key, "key");
if (this.sortRowKeys) {
return Collections.binarySearch(this.rowKeys, key);
}
else {
return this.rowKeys.indexOf(key);
}
}
/**
* Returns the row keys in an unmodifiable list.
*
* @return The row keys.
*
* @see #getColumnKeys()
*/
@Override
public List getRowKeys() {
return Collections.unmodifiableList(this.rowKeys);
}
/**
* Returns the key for a given column.
*
* @param column the column (in the range 0 to {@link #getColumnCount()}
* - 1).
*
* @return The key.
*
* @see #getColumnIndex(Comparable)
* @see #getRowKey(int)
*/
@Override
public Comparable getColumnKey(int column) {
return (Comparable) this.columnKeys.get(column);
}
/**
* Returns the column index for a given key.
*
* @param key the key (<code>null</code> not permitted).
*
* @return The column index.
*
* @see #getColumnKey(int)
* @see #getRowIndex(Comparable)
*/
@Override
public int getColumnIndex(Comparable key) {
ParamChecks.nullNotPermitted(key, "key");
return this.columnKeys.indexOf(key);
}
/**
* Returns the column keys in an unmodifiable list.
*
* @return The column keys.
*
* @see #getRowKeys()
*/
@Override
public List getColumnKeys() {
return Collections.unmodifiableList(this.columnKeys);
}
/**
* Returns the value for the given row and column keys. This method will
* throw an {@link UnknownKeyException} if either key is not defined in the
* data structure.
*
* @param rowKey the row key (<code>null</code> not permitted).
* @param columnKey the column key (<code>null</code> not permitted).
*
* @return The value (possibly <code>null</code>).
*
* @see #addValue(Number, Comparable, Comparable)
* @see #removeValue(Comparable, Comparable)
*/
@Override
public Number getValue(Comparable rowKey, Comparable columnKey) {
ParamChecks.nullNotPermitted(rowKey, "rowKey");
ParamChecks.nullNotPermitted(columnKey, "columnKey");
// check that the column key is defined in the 2D structure
if (!(this.columnKeys.contains(columnKey))) {
throw new UnknownKeyException("Unrecognised columnKey: "
+ columnKey);
}
// now fetch the row data - need to bear in mind that the row
// structure may not have an entry for the column key, but that we
// have already checked that the key is valid for the 2D structure
int row = getRowIndex(rowKey);
if (row >= 0) {
DefaultKeyedValues rowData
= (DefaultKeyedValues) this.rows.get(row);
int col = rowData.getIndex(columnKey);
return (col >= 0 ? rowData.getValue(col) : null);
}
else {
throw new UnknownKeyException("Unrecognised rowKey: " + rowKey);
}
}
/**
* Adds a value to the table. Performs the same function as
* #setValue(Number, Comparable, Comparable).
*
* @param value the value (<code>null</code> permitted).
* @param rowKey the row key (<code>null</code> not permitted).
* @param columnKey the column key (<code>null</code> not permitted).
*
* @see #setValue(Number, Comparable, Comparable)
* @see #removeValue(Comparable, Comparable)
*/
public void addValue(Number value, Comparable rowKey,
Comparable columnKey) {
// defer argument checking
setValue(value, rowKey, columnKey);
}
/**
* Adds or updates a value.
*
* @param value the value (<code>null</code> permitted).
* @param rowKey the row key (<code>null</code> not permitted).
* @param columnKey the column key (<code>null</code> not permitted).
*
* @see #addValue(Number, Comparable, Comparable)
* @see #removeValue(Comparable, Comparable)
*/
public void setValue(Number value, Comparable rowKey,
Comparable columnKey) {
DefaultKeyedValues row;
int rowIndex = getRowIndex(rowKey);
if (rowIndex >= 0) {
row = (DefaultKeyedValues) this.rows.get(rowIndex);
}
else {
row = new DefaultKeyedValues();
if (this.sortRowKeys) {
rowIndex = -rowIndex - 1;
this.rowKeys.add(rowIndex, rowKey);
this.rows.add(rowIndex, row);
}
else {
this.rowKeys.add(rowKey);
this.rows.add(row);
}
}
row.setValue(columnKey, value);
int columnIndex = this.columnKeys.indexOf(columnKey);
if (columnIndex < 0) {
this.columnKeys.add(columnKey);
}
}
/**
* Removes a value from the table by setting it to <code>null</code>. If
* all the values in the specified row and/or column are now
* <code>null</code>, the row and/or column is removed from the table.
*
* @param rowKey the row key (<code>null</code> not permitted).
* @param columnKey the column key (<code>null</code> not permitted).
*
* @see #addValue(Number, Comparable, Comparable)
*/
public void removeValue(Comparable rowKey, Comparable columnKey) {
setValue(null, rowKey, columnKey);
// 1. check whether the row is now empty.
boolean allNull = true;
int rowIndex = getRowIndex(rowKey);
DefaultKeyedValues row = (DefaultKeyedValues) this.rows.get(rowIndex);
for (int item = 0, itemCount = row.getItemCount(); item < itemCount;
item++) {
if (row.getValue(item) != null) {
allNull = false;
break;
}
}
if (allNull) {
this.rowKeys.remove(rowIndex);
this.rows.remove(rowIndex);
}
// 2. check whether the column is now empty.
allNull = true;
//int columnIndex = getColumnIndex(columnKey);
for (int item = 0, itemCount = this.rows.size(); item < itemCount;
item++) {
row = (DefaultKeyedValues) this.rows.get(item);
int columnIndex = row.getIndex(columnKey);
if (columnIndex >= 0 && row.getValue(columnIndex) != null) {
allNull = false;
break;
}
}
if (allNull) {
for (int item = 0, itemCount = this.rows.size(); item < itemCount;
item++) {
row = (DefaultKeyedValues) this.rows.get(item);
int columnIndex = row.getIndex(columnKey);
if (columnIndex >= 0) {
row.removeValue(columnIndex);
}
}
this.columnKeys.remove(columnKey);
}
}
/**
* Removes a row.
*
* @param rowIndex the row index.
*
* @see #removeRow(Comparable)
* @see #removeColumn(int)
*/
public void removeRow(int rowIndex) {
this.rowKeys.remove(rowIndex);
this.rows.remove(rowIndex);
}
/**
* Removes a row from the table.
*
* @param rowKey the row key (<code>null</code> not permitted).
*
* @see #removeRow(int)
* @see #removeColumn(Comparable)
*
* @throws UnknownKeyException if <code>rowKey</code> is not defined in the
* table.
*/
public void removeRow(Comparable rowKey) {
ParamChecks.nullNotPermitted(rowKey, "rowKey");
int index = getRowIndex(rowKey);
if (index >= 0) {
removeRow(index);
}
else {
throw new UnknownKeyException("Unknown key: " + rowKey);
}
}
/**
* Removes a column.
*
* @param columnIndex the column index.
*
* @see #removeColumn(Comparable)
* @see #removeRow(int)
*/
public void removeColumn(int columnIndex) {
Comparable columnKey = getColumnKey(columnIndex);
removeColumn(columnKey);
}
public void removeColumn(Comparable columnKey) {
ParamChecks.nullNotPermitted(columnKey, "columnKey");
if (!this.columnKeys.contains(columnKey)) {
throw new UnknownKeyException("Unknown key: " + columnKey);
}
Iterator iterator = this.rows.iterator();
while (iterator.hasNext()) {
DefaultKeyedValues rowData = (DefaultKeyedValues) iterator.next();
int index = rowData.getIndex(columnKey);
if (index >= 0) {
rowData.removeValue(columnKey);
}
}
this.columnKeys.remove(columnKey);
}
/**
* Clears all the data and associated keys.
*/
public void clear() {
this.rowKeys.clear();
this.columnKeys.clear();
this.rows.clear();
}
/**
* Tests if this object is equal to another.
*
* @param o the other object (<code>null</code> permitted).
*
* @return A boolean.
*/
public boolean equals(Object o) {
if (o == null) {
return false;
}
if (o == this) {
return true;
}
if (!(o instanceof KeyedValues2D)) {
return false;
}
KeyedValues2D kv2D = (KeyedValues2D) o;
if (!getRowKeys().equals(kv2D.getRowKeys())) {
return false;
}
if (!getColumnKeys().equals(kv2D.getColumnKeys())) {
return false;
}
int rowCount = getRowCount();
if (rowCount != kv2D.getRowCount()) {
return false;
}
int colCount = getColumnCount();
if (colCount != kv2D.getColumnCount()) {
return false;
}
for (int r = 0; r < rowCount; r++) {
for (int c = 0; c < colCount; c++) {
Number v1 = getValue(r, c);
Number v2 = kv2D.getValue(r, c);
if (v1 == null) {
if (v2 != null) {
return false;
}
}
else {
if (!v1.equals(v2)) {
return false;
}
}
}
}
return true;
}
/**
* Returns a hash code.
*
* @return A hash code.
*/
public int hashCode() {
int result;
result = this.rowKeys.hashCode();
result = 29 * result + this.columnKeys.hashCode();
result = 29 * result + this.rows.hashCode();
return result;
}
/**
* Returns a clone.
*
* @return A clone.
*
* @throws CloneNotSupportedException this class will not throw this
* exception, but subclasses (if any) might.
*/
public Object clone() throws CloneNotSupportedException {
DefaultKeyedValues2D clone = (DefaultKeyedValues2D) super.clone();
// for the keys, a shallow copy should be fine because keys
// should be immutable...
clone.columnKeys = new java.util.ArrayList(this.columnKeys);
clone.rowKeys = new java.util.ArrayList(this.rowKeys);
// but the row data requires a deep copy
clone.rows = (List) ObjectUtilities.deepClone(this.rows);
return clone;
}
} |
package com.zyeeda.framework.ws;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.DELETE;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import com.zyeeda.framework.entities.User;
import com.zyeeda.framework.ldap.LdapService;
import com.zyeeda.framework.managers.UserPersistException;
import com.zyeeda.framework.managers.internal.LdapUserManager;
import com.zyeeda.framework.sync.UserSyncService;
import com.zyeeda.framework.viewmodels.UserVo;
import com.zyeeda.framework.ws.base.ResourceService;
@Path("/users")
public class UserService extends ResourceService {
// private static final Logger logger = LoggerFactory.getLogger(LdapDepartmentManager.class);
public UserService(@Context ServletContext ctx) {
super(ctx);
}
@POST
@Path("/{parent:.*}")
@Produces("application/json")
public User createUser(@FormParam("") User user, @PathParam("parent") String parent) throws UserPersistException {
LdapService ldapSvc = this.getLdapService();
UserSyncService userSyncService = this.getUserSynchService();
LdapUserManager userMgr = new LdapUserManager(ldapSvc);
if (userMgr.findById(user.getId()) != null ) {
throw new RuntimeException("");
} else {
user.setDepartmentName(parent);
userMgr.persist(user);
user = userMgr.findById(user.getId());
userSyncService.persist(user);
return user;
}
}
@DELETE
@Path("/{id}")
public void removeUser(@PathParam("id") String id) throws UserPersistException {
LdapService ldapSvc = this.getLdapService();
LdapUserManager userMgr = new LdapUserManager(ldapSvc);
userMgr.remove(id);
}
@PUT
@Path("/{id}")
@Produces("application/json")
public User editUser(@FormParam("") User user, @PathParam("id") String id) throws UserPersistException {
LdapService ldapSvc = this.getLdapService();
UserSyncService userSyncService = this.getUserSynchService();
LdapUserManager userMgr = new LdapUserManager(ldapSvc);
user.setDeptFullPath(id);
String uid = user.getDeptFullPath().substring(user.getDeptFullPath().indexOf("=") + 1,
user.getDeptFullPath().indexOf(","));
if (!uid.equals(user.getId())) {
throw new RuntimeException("");
} else {
userMgr.update(user);
user = userMgr.findById(user.getId());
userSyncService.update(user);
return user;
}
}
@GET
@Path("/{id}")
@Produces("application/json")
public User getUserById(@PathParam("id") String id) throws UserPersistException {
LdapService ldapSvc = this.getLdapService();
LdapUserManager userMgr = new LdapUserManager(ldapSvc);
return userMgr.findById(id.substring(id.indexOf("=") + 1, id.indexOf(",")));
}
@GET
@Path("/search/{name}")
@Produces("application/json")
public List<UserVo> getUserListByName(@PathParam("name") String name) throws UserPersistException {
LdapService ldapSvc = this.getLdapService();
LdapUserManager userMgr = new LdapUserManager(ldapSvc);
return UserService.fillUserListPropertiesToVo(userMgr.findByName(name));
}
@GET
@Path("/userList/{deptId}")
@Produces("application/json")
public List<UserVo> getUserListByDepartmentId(@PathParam("deptId") String deptId) throws UserPersistException {
LdapService ldapSvc = this.getLdapService();
LdapUserManager userMgr = new LdapUserManager(ldapSvc);
return UserService.fillUserListPropertiesToVo(userMgr.findByDepartmentId(deptId));
}
@PUT
@Path("/{id}/update_password")
@Produces("application/json")
public User updatePassword(@PathParam("id") String id, @FormParam("oldPassword") String oldPassword,
@FormParam("newPassword") String newPassword) throws UserPersistException {
LdapService ldapSvc = this.getLdapService();
LdapUserManager userMgr = new LdapUserManager(ldapSvc);
User u = userMgr.findById(id.substring(id.indexOf("=") + 1, id.indexOf(",")));
if (("{MD5}" + oldPassword).equals(u.getPassword())) {
if (!newPassword.equals(oldPassword)) {
userMgr.updatePassword(id, newPassword);
}
} else {
throw new RuntimeException("");
}
return userMgr.findById(id.substring(id.indexOf("=") + 1, id.indexOf(",")));
}
@PUT
@Path("/{id}/enable")
@Produces("application/json")
public User enable(@PathParam("id") String id, @FormParam("status") Boolean visible)
throws UserPersistException {
LdapService ldapSvc = this.getLdapService();
UserSyncService userSyncService = this.getUserSynchService();
LdapUserManager userMgr = new LdapUserManager(ldapSvc);
userMgr.enable(id);
userSyncService.enable(id);
return userMgr.findById(id.substring(id.indexOf("=") + 1, id.indexOf(",")));
}
@PUT
@Path("/{id}/unenable")
@Produces("application/json")
public User disable(@PathParam("id") String id, @FormParam("status") Boolean visible)
throws UserPersistException {
LdapService ldapSvc = this.getLdapService();
UserSyncService userSyncService = this.getUserSynchService();
LdapUserManager userMgr = new LdapUserManager(ldapSvc);
userMgr.disable(id);
userSyncService.disable(id);
return userMgr.findById(id.substring(id.indexOf("=") + 1, id.indexOf(",")));
}
@POST
@Path("/{id}")
@Produces("application/json")
public void uploadPhoto(@Context HttpServletRequest request, @PathParam("id") String id) throws Throwable {
InputStream in = request.getInputStream();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] b = new byte[1024];
int len = 0;
while ((len = in.read(b, 0, 1024)) != -1) {
baos.write(b, 0, len);
}
baos.flush();
// byte[] bytes = baos.toByteArray();
LdapService ldapSvc = this.getLdapService();
LdapUserManager userMgr = new LdapUserManager(ldapSvc);
User user = new User();
user.setId("china");
// user.setPhoto(bytes);
userMgr.update(user);
}
// private User setVisible(String id, Boolean visible) throws UserPersistException {
// LdapService ldapSvc = this.getLdapService();
// UserSyncService userSyncService = this.getUserSynchService();
// LdapUserManager userMgr = new LdapUserManager(ldapSvc);
// userMgr.setVisible(visible, id);
// userSyncService.enable(id);
// return userMgr.findById(id.substring(id.indexOf("=") + 1, id.indexOf(",")));
public static UserVo fillUserPropertiesToVo(User user) {
UserVo userVo = new UserVo();
userVo.setId(user.getId());
userVo.setType("io");
userVo.setLabel( user.getId() );
userVo.setCheckName(user.getId());
userVo.setLeaf(true);
userVo.setUid(user.getId());
userVo.setDeptFullPath(user.getDeptFullPath());
userVo.setKind("user");
return userVo;
}
public static UserVo fillUserPropertiesToVo(User user, String type) {
UserVo userVo = new UserVo();
userVo.setId(user.getId());
userVo.setType(type);
userVo.setLabel( user.getId());
userVo.setCheckName(user.getId());
userVo.setLeaf(true);
userVo.setUid(user.getId());
userVo.setDeptFullPath(user.getDeptFullPath());
userVo.setKind("user");
return userVo;
}
// public static UserVo fillUserPropertiesToVo(User user, String type) {
// UserVo userVo = new UserVo();
// userVo.setId(user.getId());
// userVo.setType(type);
// userVo.setLabel("<a>" + user.getId() + "<a>");
// userVo.setCheckName(user.getId());
// userVo.setLeaf(true);
// userVo.setUid(user.getId());
// userVo.setDeptFullPath(user.getDeptFullPath());
// userVo.setKind("user");
// return userVo;
public static List<UserVo> fillUserListPropertiesToVo(List<User> userList) {
List<UserVo> userVoList = new ArrayList<UserVo>(userList.size());
UserVo userVo = null;
for (User user : userList) {
userVo = UserService.fillUserPropertiesToVo(user);
userVoList.add(userVo);
}
return userVoList;
}
public static List<UserVo> fillUserListPropertiesToVo(List<User> userList, String type) {
List<UserVo> userVoList = new ArrayList<UserVo>(userList.size());
UserVo userVo = null;
for (User user : userList) {
userVo = UserService.fillUserPropertiesToVo(user, type);
userVoList.add(userVo);
}
return userVoList;
}
} |
package net.oauth.client;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
/** A decorator that retains a copy of the first few bytes of data. */
public class ExcerptInputStream extends FilterInputStream
{
/**
* A marker that's appended to the excerpt if it's less than the complete
* stream.
*/
public static final byte[] ELLIPSIS = " ...".getBytes();
public ExcerptInputStream(InputStream in)
{
super(in);
}
private static final int LIMIT = 1024;
private byte[] excerpt = new byte[LIMIT + ELLIPSIS.length];
private int taken = 0; // bytes received from in
private int given = Integer.MAX_VALUE; // bytes delivered to callers
@Override
public void close() throws IOException
{
super.close();
trimExcerpt();
}
/** The first few bytes of data, plus ELLIPSIS if there are more bytes. */
public byte[] getExcerpt() throws IOException
{
if (taken < excerpt.length) {
final int mark = Math.min(given, taken);
given = Integer.MAX_VALUE;
while (taken < excerpt.length) {
read(excerpt, taken, LIMIT - taken);
}
given = mark;
}
return excerpt;
}
@Override
public int read(byte[] b, int offset, int length) throws IOException
{
if (given < taken) {
final int e = Math.min(length, taken - given);
if (e > 0) {
System.arraycopy(excerpt, given, b, offset, e);
given += e;
if (given >= taken) {
given = Integer.MAX_VALUE;
}
}
return e;
}
final int r = super.read(b, offset, length);
if (r > 0) {
final int e = Math.min(r, LIMIT - taken);
if (e >= 0) {
System.arraycopy(b, offset, excerpt, taken, e);
taken += e;
if (taken >= LIMIT) {
System.arraycopy(ELLIPSIS, 0, excerpt, LIMIT, ELLIPSIS.length);
taken = excerpt.length;
}
}
} else if (length > 0) {
// There's no more data to take.
trimExcerpt();
}
return r;
}
@Override
public int read(byte[] b) throws IOException
{
return read(b, 0, b.length);
}
@Override
public int read() throws IOException
{
byte[] b = new byte[1];
return (read(b) <= 0) ? -1 : unsigned(b[0]);
}
private void trimExcerpt()
{
if (taken < excerpt.length) {
byte[] complete = new byte[taken];
System.arraycopy(excerpt, 0, complete, 0, taken);
excerpt = complete;
}
}
private static int unsigned(byte b)
{
return (b >= 0) ? b : ((int) b) + 256;
}
} |
package glideme;
import javafx.application.Platform;
import javafx.concurrent.Task;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.Parent;
import javafx.scene.control.Button;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.Pane;
import javafx.scene.layout.VBox;
import javafx.scene.shape.Circle;
import javafx.scene.shape.Line;
import javafx.scene.shape.Rectangle;
import javafx.util.Duration;
import java.net.URL;
import java.util.EventListener;
import java.util.ResourceBundle;
public class MainWindow extends VBox implements Initializable {
private static World world;
@FXML
private Rectangle trolley;
@FXML
private Circle wheel;
@FXML
private Line rail;
@FXML
private Line rope;
@FXML
private Circle weight;
@FXML
private Button bStart;
@FXML
private Button bStop;
@FXML
private Pane pane;
private Task task;
private int lineLenght;
public MainWindow(){
};
public MainWindow(World world){
this.world = world;
};
@Override
public void initialize(URL location, ResourceBundle resources) {
bStart.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
task = new Task() {
@Override
protected Object call() throws Exception {
while(true) {
// System.out.println(task.isCancelled());
if (!task.isCancelled()) {
Platform.runLater(new Runnable() {
public void run() {
drawWindow();
}
});
try {
Thread.sleep(world.TIME_QUANTUM);
}
catch (InterruptedException _) {
return null;
}
}
else
return null;
}
}
};
Thread th = new Thread(task);
th.setDaemon(true);
th.start();
}
});
bStop.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
task.cancel();
}
});
lineLenght=(int)(Math.abs(rope.getStartY())+Math.abs(rope.getEndY()));
pane.setOnMouseClicked(new EventHandler<MouseEvent>() {
@Override
public void handle(MouseEvent event) {
if(task!=null && !task.isCancelled()) {
world.setDestination((int) (event.getSceneX() - 300)); //300 is a position of trolley - to change
}
}
});
}
public void drawWindow(){
trolley.setX(world.getCraneState().position);
wheel.setCenterX(world.getCraneState().position);
weight.setCenterX(world.getCraneState().position+lineLenght*Math.sin(world.getCraneState().angle));
weight.setCenterY(-lineLenght * (1 - Math.cos(world.getCraneState().angle)));
rope.setStartX(world.getCraneState().position);
rope.setEndX(world.getCraneState().position + lineLenght * Math.sin(world.getCraneState().angle));
rope.setEndY(lineLenght * (Math.cos(world.getCraneState().angle)));
System.out.println(rope.getEndY());
rail.setStartX(-World.TRACK_LENGTH);
rail.setEndX(World.TRACK_LENGTH);
};
} |
package velir.intellij.cq5.ui;
import velir.intellij.cq5.jcr.model.AbstractProperty;
import javax.swing.*;
public class StringField extends JTextField implements ValueInput {
public StringField(String s) {
super(s);
setPreferredSize(RegexTextField.GOOD_SIZE);
}
public Object getValue() {
return getText();
}
public String getType() {
return AbstractProperty.STRING_PREFIX;
}
public JComponent getComponent() {
return this;
}
} |
package com.ctrip.xpipe.simpleserver;
import java.io.IOException;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.slf4j.LoggerFactory;
import com.ctrip.xpipe.lifecycle.AbstractLifecycle;
import org.slf4j.Logger;
/**
* @author wenchao.meng
*
* 2016415 3:01:03
*/
public class Server extends AbstractLifecycle{
protected Logger logger = LoggerFactory.getLogger(getClass());
private int port;
private IoActionFactory ioActionFactory;
private ExecutorService executors = Executors.newCachedThreadPool();
private ServerSocket ss;
private AtomicInteger connected = new AtomicInteger(0);
private AtomicInteger totalConnected = new AtomicInteger(0);
public Server(int port, IoActionFactory ioActionFactory){
this.port = port;
this.ioActionFactory = ioActionFactory;
}
public int getPort() {
return port;
}
public int getConnected() {
return connected.get();
}
public int getTotalConnected() {
return totalConnected.get();
}
@Override
protected void doStart() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
executors.execute(new Runnable() {
@Override
public void run() {
try {
try{
ss = new ServerSocket(port);
if(logger.isInfoEnabled()){
logger.info("[run][listening]" + port);
}
}finally{
latch.countDown();
}
while(true){
Socket socket = ss.accept();
connected.incrementAndGet();
totalConnected.incrementAndGet();
if(logger.isInfoEnabled()){
logger.info("[run][new socket]" + socket);
}
IoAction ioAction = ioActionFactory.createIoAction(socket);
executors.execute(new Task(socket, ioAction));
}
} catch (IOException e) {
logger.warn("[run]" + port + "," + e.getMessage());
}finally{
}
}
});
latch.await(10, TimeUnit.SECONDS);
}
@Override
protected void doStop() throws Exception {
if(ss != null){
ss.close();
}
}
public class Task implements Runnable{
private Socket socket;
private IoAction ioAction;
public Task(Socket socket, IoAction ioAction){
this.socket = socket;
this.ioAction = ioAction;
}
@Override
public void run() {
try {
while(true){
Object read = ioAction.read();
if(read == null){
break;
}
ioAction.write();
}
} catch (IOException e) {
logger.error("[run]" + socket, e);
}finally{
try {
if(ioAction instanceof DeadAware){
((DeadAware) ioAction).setDead();
}
connected.decrementAndGet();
socket.close();
} catch (IOException e) {
logger.error("[close]", e);
}
}
}
}
} |
package org.mwg.core.utility;
import org.junit.Assert;
import org.junit.Test;
import org.mwg.core.CoreConstants;
import org.mwg.utility.HashHelper;
public class HashHelperTest {
// Integer.MIN_VALUE == 0x80000000
public static final int MIN_INT = -2147483648;
// Integer.MAX_VALUE == 0x7FFFFFFF
public static final int MAX_INT = 2147483647;
/* MAX TESTS */
@Test
public void stringHash_0Test() {
int hash = HashHelper.hash("helloMWG");
//System.out.println("stringHash_0Test: " + hash);
Assert.assertTrue(hash == -792688181L);
}
@Test
public void stringHash_1Test() {
int hash = HashHelper.hash("aVeryLongStringThatCanGoOverTheIntegerLimitAfterAHash");
System.out.println("stringHash_0Test: " + hash);
Assert.assertTrue(hash == -302989728);
}
@Test
public void longHash_0Test() {
try {
HashHelper.longHash(1, 0);
Assert.fail("This should have thrown an exception");
} catch (Exception e) {
}
}
@Test
public void longHash_1Test() {
try {
HashHelper.longHash(1, CoreConstants.BEGINNING_OF_TIME);
Assert.fail("This should have thrown an exception");
} catch (Exception e) {
}
}
@Test
public void tripleHash_0Test() {
try {
HashHelper.tripleHash((byte) 1, 2, 3, 4, 0);
Assert.fail("This should have thrown an exception");
} catch (Exception e) {
}
}
@Test
public void tripleHash_1Test() {
try {
HashHelper.tripleHash((byte) 1, 2, 3, 4, CoreConstants.BEGINNING_OF_TIME);
Assert.fail("This should have thrown an exception");
} catch (Exception e) {
}
}
/* HASH TESTS */
@Test
public void longHash_3Test() {
long hash = HashHelper.longHash(CoreConstants.END_OF_TIME, CoreConstants.END_OF_TIME);
//System.out.println("longHash_3Test: " + hash);
Assert.assertTrue(hash < CoreConstants.END_OF_TIME);
//Assert.assertTrue(hash == 673163482434621L);
}
@Test
public void longHash_4Test() {
long hash = HashHelper.longHash(CoreConstants.END_OF_TIME, 10000);
//System.out.println("longHash_4Test: " + hash);
Assert.assertTrue(hash < 10000);
//Assert.assertTrue(hash == 271);
}
@Test
public void longHash_5Test() {
long hash = HashHelper.longHash(-156487, 10000);
//System.out.println("longHash_5Test: " + hash);
Assert.assertTrue(hash < 10000);
//Assert.assertTrue(hash == 9854);
}
@Test
public void longHash_6Test() {
long hash = HashHelper.longHash(0, 10000);
//System.out.println("longHash_6Test: " + hash);
Assert.assertTrue(hash < 10000);
//Assert.assertTrue(hash == 8147);
}
@Test
public void tripleHash_3Test() {
long hash = HashHelper.tripleHash((byte) 1, 1, 2, 3, CoreConstants.END_OF_TIME);
//System.out.println("tripleHash_3Test: " + hash);
Assert.assertTrue(hash < CoreConstants.END_OF_TIME);
//Assert.assertTrue(hash == 6324531823975995L);
}
@Test
public void tripleHash_4Test() {
long hash = HashHelper.tripleHash((byte) 2, 1, -1, 3, CoreConstants.END_OF_TIME);
//System.out.println("tripleHash_4Test: " + hash);
Assert.assertTrue(hash < CoreConstants.END_OF_TIME);
//Assert.assertTrue(hash == 2261661239301336L);
}
@Test
public void tripleHash_5Test() {
long hash = HashHelper.tripleHash((byte) 3, 1, 2, 0, CoreConstants.END_OF_TIME);
//System.out.println("tripleHash_5Test: " + hash);
Assert.assertTrue(hash < CoreConstants.END_OF_TIME);
//Assert.assertTrue(hash == 914239194442175L);
}
@Test
public void tripleHash_6Test() {
long hash = HashHelper.tripleHash((byte) 4, 0, 0, 0, CoreConstants.END_OF_TIME);
//System.out.println("tripleHash_6Test: " + hash);
Assert.assertTrue(hash < CoreConstants.END_OF_TIME);
//Assert.assertTrue(hash == 1254293488547125L);
}
@Test
public void tripleHash_7Test() {
long hash = HashHelper.tripleHash((byte) 4, -1, -1, -1, 200);
//System.out.println("tripleHash_7Test: " + hash);
Assert.assertTrue(hash < 200);
//Assert.assertTrue(hash == 169);
}
@Test
public void tripleHash_8Test() {
long hash = HashHelper.tripleHash((byte) 1, 16, 500000, -132654987, 5000);
//System.out.println("tripleHash_8Test: " + hash);
Assert.assertTrue(hash < 5000);
//Assert.assertTrue(hash == 1380);
}
/*
@Test
public void stringHashPerfTest() {
final String val = "myAttributeNamett";
long before = System.currentTimeMillis();
long hash = 0;
for (int i = 0; i < 1000000000; i++) {
hash += val.hashCode();
}
System.out.println("Time:" + (System.currentTimeMillis() - before) + " L:" + hash);
before = System.currentTimeMillis();
hash = 0;
for (int i = 0; i < 1000000000; i++) {
hash += HashHelper.stringHash2(val);
}
System.out.println("Time:" + (System.currentTimeMillis() - before) + " L:" + hash);
before = System.currentTimeMillis();
hash = 0;
byte[] toBytes = val.getBytes();
for (int i = 0; i < 100000000; i++) {
hash += DataHasher.hash(toBytes);
}
System.out.println("Time:" + (System.currentTimeMillis() - before) + " L:" + hash);
}
*/
/*
@Test
public void bench() {
//System.out.println(HashHelper.tripleHash((byte) 0, 10, 10, 10, 1000000000));
long before = System.currentTimeMillis();
long sum = 0;
for (long i = 0; i < 1000000000; i++) {
sum += HashHelper.tripleHash((byte) 0, i, i * 2, i * 3, 1000000000);
}
long after = System.currentTimeMillis();
System.out.println(sum+"/"+(after - before) + " ms");
}
*/
} |
package soot.jimple.toolkits.scalar;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import soot.Body;
import soot.BodyTransformer;
import soot.G;
import soot.Local;
import soot.NullType;
import soot.RefLikeType;
import soot.Scene;
import soot.Singletons;
import soot.Timers;
import soot.Unit;
import soot.ValueBox;
import soot.jimple.AssignStmt;
import soot.jimple.CastExpr;
import soot.jimple.Constant;
import soot.jimple.DefinitionStmt;
import soot.jimple.IntConstant;
import soot.jimple.LongConstant;
import soot.jimple.NullConstant;
import soot.jimple.Stmt;
import soot.jimple.StmtBody;
import soot.options.CPOptions;
import soot.options.Options;
import soot.toolkits.exceptions.ThrowAnalysis;
import soot.toolkits.graph.ExceptionalUnitGraph;
import soot.toolkits.graph.PseudoTopologicalOrderer;
import soot.toolkits.scalar.LocalDefs;
import soot.toolkits.scalar.SimpleLiveLocals;
import soot.toolkits.scalar.SmartLocalDefs;
import soot.toolkits.scalar.SmartLocalDefsPool;
import soot.util.Chain;
public class CopyPropagator extends BodyTransformer {
protected ThrowAnalysis throwAnalysis = null;
protected boolean forceOmitExceptingUnitEdges = false;
public CopyPropagator(Singletons.Global g) {
}
public CopyPropagator(ThrowAnalysis ta) {
this.throwAnalysis = ta;
}
public CopyPropagator(ThrowAnalysis ta, boolean forceOmitExceptingUnitEdges) {
this.throwAnalysis = ta;
this.forceOmitExceptingUnitEdges = forceOmitExceptingUnitEdges;
}
public static CopyPropagator v() {
return G.v().soot_jimple_toolkits_scalar_CopyPropagator();
}
/**
* Cascaded copy propagator.
*
* If it encounters situations of the form: A: a = ...; B: ... x = a; C:...
* use (x); where a has only one definition, and x has only one definition
* (B), then it can propagate immediately without checking between B and C
* for redefinitions of a (namely) A because they cannot occur. In this case
* the propagator is global.
*
* Otherwise, if a has multiple definitions then it only checks for
* redefinitions of Propagates constants and copies in extended basic
* blocks.
*
* Does not propagate stack locals when the "only-regular-locals" option is
* true.
*/
protected void internalTransform(Body b, String phaseName, Map<String, String> opts) {
CPOptions options = new CPOptions(opts);
StmtBody stmtBody = (StmtBody) b;
int fastCopyPropagationCount = 0;
int slowCopyPropagationCount = 0;
if (Options.v().verbose())
G.v().out.println("[" + stmtBody.getMethod().getName() + "] Propagating copies...");
if (Options.v().time())
Timers.v().propagatorTimer.start();
Chain<Unit> units = stmtBody.getUnits();
Map<Local, Integer> localToDefCount = new HashMap<Local, Integer>();
// Count number of definitions for each local.
for (Unit u : units) {
Stmt s = (Stmt) u;
if (s instanceof DefinitionStmt && ((DefinitionStmt) s).getLeftOp() instanceof Local) {
Local l = (Local) ((DefinitionStmt) s).getLeftOp();
if (!localToDefCount.containsKey(l))
localToDefCount.put(l, new Integer(1));
else
localToDefCount.put(l, new Integer(localToDefCount.get(l).intValue() + 1));
}
}
if (this.throwAnalysis == null)
this.throwAnalysis = Scene.v().getDefaultThrowAnalysis();
ExceptionalUnitGraph graph = new ExceptionalUnitGraph(stmtBody, throwAnalysis,
forceOmitExceptingUnitEdges || Options.v().omit_excepting_unit_edges());
LocalDefs localDefs;
localDefs = new SmartLocalDefs(graph, new SimpleLiveLocals(graph));
// Perform a local propagation pass.
{
Iterator<Unit> stmtIt = (new PseudoTopologicalOrderer<Unit>()).newList(graph, false).iterator();
while (stmtIt.hasNext()) {
Stmt stmt = (Stmt) stmtIt.next();
for (ValueBox useBox : stmt.getUseBoxes()) {
if (useBox.getValue() instanceof Local) {
Local l = (Local) useBox.getValue();
// We force propagating nulls. If a target can only be
// null due to typing, we always inline that constant.
if (!(l.getType() instanceof NullType)) {
if (options.only_regular_locals() && l.getName().startsWith("$"))
continue;
if (options.only_stack_locals() && !l.getName().startsWith("$"))
continue;
}
List<Unit> defsOfUse = localDefs.getDefsOfAt(l, stmt);
// We can propagate the definition if we either only
// have
// one definition or all definitions are side-effect
// free
// and equal. For starters, we only support constants in
// the case of multiple definitions.
boolean propagateDef = defsOfUse.size() == 1;
if (!propagateDef && defsOfUse.size() > 0) {
boolean agrees = true;
Constant constVal = null;
for (Unit defUnit : defsOfUse) {
boolean defAgrees = false;
if (defUnit instanceof AssignStmt) {
AssignStmt assign = (AssignStmt) defUnit;
if (assign.getRightOp() instanceof Constant) {
if (constVal == null) {
constVal = (Constant) assign.getRightOp();
defAgrees = true;
} else if (constVal.equals(assign.getRightOp()))
defAgrees = true;
}
}
agrees &= defAgrees;
}
propagateDef = agrees;
}
if (propagateDef) {
DefinitionStmt def = (DefinitionStmt) defsOfUse.get(0);
if (def.getRightOp() instanceof Constant) {
if (useBox.canContainValue(def.getRightOp())) {
useBox.setValue(def.getRightOp());
}
}
else if (def.getRightOp() instanceof CastExpr) {
CastExpr ce = (CastExpr) def.getRightOp();
if (ce.getCastType() instanceof RefLikeType) {
boolean isConstNull = ce.getOp() instanceof IntConstant
&& ((IntConstant) ce.getOp()).value == 0;
isConstNull |= ce.getOp() instanceof LongConstant
&& ((LongConstant) ce.getOp()).value == 0;
if (isConstNull) {
if (useBox.canContainValue(NullConstant.v())) {
useBox.setValue(NullConstant.v());
}
}
}
}
else if (def.getRightOp() instanceof Local) {
Local m = (Local) def.getRightOp();
if (l != m) {
Integer defCount = localToDefCount.get(m);
if (defCount == null || defCount == 0)
throw new RuntimeException("Variable " + m + " used without definition!");
if (defCount == 1) {
useBox.setValue(m);
fastCopyPropagationCount++;
continue;
}
List<Unit> path = graph.getExtendedBasicBlockPathBetween(def, stmt);
if (path == null) {
// no path in the extended basic block
continue;
}
Iterator<Unit> pathIt = path.iterator();
// Skip first node
pathIt.next();
// Make sure that m is not redefined along
// path
{
boolean isRedefined = false;
while (pathIt.hasNext()) {
Stmt s = (Stmt) pathIt.next();
if (stmt == s) {
// Don't look at the last
// statement
// since it is evaluated after
// the uses
break;
}
if (s instanceof DefinitionStmt) {
if (((DefinitionStmt) s).getLeftOp() == m) {
isRedefined = true;
break;
}
}
}
if (isRedefined)
continue;
}
useBox.setValue(m);
slowCopyPropagationCount++;
}
}
}
}
}
}
}
if (Options.v().verbose())
G.v().out.println("[" + stmtBody.getMethod().getName() + "] Propagated: " + fastCopyPropagationCount
+ " fast copies " + slowCopyPropagationCount + " slow copies");
if (Options.v().time())
Timers.v().propagatorTimer.end();
SmartLocalDefsPool.v().invalidate(b);
}
} |
package spark;
import java.io.File;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.regex.Pattern;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function0;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.api.java.JavaStreamingContextFactory;
import org.apache.spark.streaming.kafka.KafkaUtils;
import org.elasticsearch.spark.rdd.api.java.JavaEsSpark;
import eventsimulator.ObjectSerializer;
import kafka.serializer.DefaultDecoder;
import scala.Tuple2;
/**
* read from a folder. Keep adding new file with data into the folder specified.
*/
public final class StreamingKafkaRecoverableDirectEvent {
private static String checkpointDir = "hdfs://idcdvstl233:8020/tmp/StreamingKafkaDirectEvent";
private static boolean streamCheckPoint = false;
private static int duration = 10;
public static void main(String[] args) {
Logger.getLogger("org").setLevel(Level.WARN);
Logger.getLogger("akka").setLevel(Level.WARN);
if (args.length > 0) {
try {
duration = Integer.parseInt(args[0]);
System.out.println("duration changed to " + duration);
} catch (Exception e) {
System.out.println("Duration reset to defaults");
}
if(args.length>1){
try{
streamCheckPoint = Boolean.getBoolean(args[1]);
System.out.println("streamCheckPoint changed to " + streamCheckPoint);
}catch (Exception e) {
System.out.println("streamCheckPoint reset to defaults");
}
}
if(args.length>2){
checkpointDir = args[2];
System.out.println("checkpointDir changed to " + checkpointDir);
}
}
JavaStreamingContext ssc = JavaStreamingContext.getOrCreate(checkpointDir,new JavaStreamingContextFactory() {
@Override
public JavaStreamingContext create() {
return createContext(checkpointDir, streamCheckPoint, duration);
}
});
ssc.start();
ssc.awaitTermination();
}
public static JavaStreamingContext createContext(String checkpointDirectory, boolean streamCheckPoint, int duration) {
SparkConf sparkConf = new SparkConf().setAppName("StreamingKafkaRecoverableDirectEvent");
// Only for running from eclipse
if (System.getProperty("dev") != null)
sparkConf.setJars(new String[] { "target\\TestProjects-1.0-SNAPSHOT.jar" });
sparkConf.set("spark.executor.memory", "4G");
// for elasticsearch
sparkConf.set("es.nodes", "10.204.102.200");
sparkConf.set("es.index.auto.create", "true");
final int streamingDuration = duration;
JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, Durations.seconds(streamingDuration));
ssc.checkpoint(checkpointDir);
HashSet<String> topicsSet = new HashSet<String>();
topicsSet.add("loadtest");
HashMap<String, String> kafkaParams = new HashMap<String, String>();
kafkaParams.put("metadata.broker.list", "10.204.100.180:19092");
JavaPairInputDStream<byte[], byte[]> messages = KafkaUtils.createDirectStream(ssc, byte[].class, byte[].class,
DefaultDecoder.class, DefaultDecoder.class, kafkaParams, topicsSet);
if(streamCheckPoint){
messages.checkpoint(Durations.minutes(1));
}
JavaDStream<Map<String, String>> lines = messages
.map(new Function<Tuple2<byte[], byte[]>, Map<String, String>>() {
@Override
public Map<String, String> call(Tuple2<byte[], byte[]> tuple2) {
Map<String, String> ret = (Map<String, String>) ObjectSerializer.getEvent(tuple2._2());
process(ret);
return ret;
}
private void process(Map<String, String> ret) {
ret.put("obscountry", "US");
}
});
lines.foreachRDD(new Function<JavaRDD<Map<String, String>>, Void>() {
@Override
public Void call(JavaRDD<Map<String, String>> rdd) throws Exception {
long start = System.currentTimeMillis();
long count = 1; // rdd.count();
long countTime = System.currentTimeMillis() - start;
start = System.currentTimeMillis();
// System.out.println(new Date() + " Total records read: "
// +count );
if (count > 0) {
try {
JavaEsSpark.saveToEs(rdd, "events/event");
long esSaveTime = System.currentTimeMillis() - start;
// System.out.println(new Date() + " Stats: countTime:"
// +countTime + " esSaveTime:" + esSaveTime + " total: "
// + count);
System.out.println(new Date() + " Stats: esSaveTime:" + esSaveTime);
} catch (Exception es) {
es.printStackTrace();
}
}
return null;
}
});
return ssc;
}
} |
package org.apache.batik.bridge;
import java.awt.AlphaComposite;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Composite;
import java.awt.Paint;
import java.awt.RenderingHints;
import java.awt.Shape;
import java.awt.Stroke;
import java.awt.geom.GeneralPath;
import java.awt.geom.Rectangle2D;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Map;
import java.util.HashMap;
import org.apache.batik.css.AbstractViewCSS;
import org.apache.batik.css.CSSOMReadOnlyStyleDeclaration;
import org.apache.batik.css.CSSOMReadOnlyValue;
import org.apache.batik.css.value.ImmutableString;
import org.apache.batik.dom.svg.SVGOMDocument;
import org.apache.batik.dom.util.XLinkSupport;
import org.apache.batik.ext.awt.MultipleGradientPaint;
import org.apache.batik.ext.awt.color.ICCColorSpaceExt;
import org.apache.batik.ext.awt.image.renderable.ClipRable;
import org.apache.batik.ext.awt.image.renderable.Filter;
import org.apache.batik.gvt.CompositeGraphicsNode;
import org.apache.batik.gvt.CompositeShapePainter;
import org.apache.batik.gvt.GraphicsNode;
import org.apache.batik.gvt.filter.Mask;
import org.apache.batik.util.SVGConstants;
import org.apache.batik.util.CSSConstants;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.css.CSSPrimitiveValue;
import org.w3c.dom.css.CSSStyleDeclaration;
import org.w3c.dom.css.CSSValue;
import org.w3c.dom.css.CSSValueList;
import org.w3c.dom.css.RGBColor;
import org.w3c.dom.css.ViewCSS;
import org.w3c.dom.css.Rect;
import org.w3c.dom.svg.SVGICCColor;
import org.w3c.dom.svg.SVGColor;
import org.w3c.dom.svg.SVGNumberList;
import org.w3c.dom.svg.SVGPaint;
/**
* A collection of utility method involving CSS property. The listed
* methods bellow could be used as convenient methods to create
* concrete objects regarding to CSS properties.
*
* @author <a href="mailto:tkormann@apache.org">Thierry Kormann</a>
* @version $Id$
*/
public abstract class CSSUtilities implements CSSConstants, ErrorConstants {
/**
* No instance of this class is required.
*/
protected CSSUtilities() {}
// Global methods
/**
* Returns the View CSS associated to the specified element.
* @param e the element
*/
public static AbstractViewCSS getViewCSS(Element e) {
return (AbstractViewCSS)
((SVGOMDocument)e.getOwnerDocument()).getDefaultView();
}
/**
* Returns the computed style of the specified element.
* @param e the element
*/
public static CSSOMReadOnlyStyleDeclaration getComputedStyle(Element e) {
return getViewCSS(e).getComputedStyleInternal(e, null);
}
// 'enable-background'
/**
* Returns the subregion of user space where access to the
* background image is allowed to happen.
*
* @param e the container element
*/
public static
Rectangle2D convertEnableBackground(Element e,
UnitProcessor.Context uctx) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(e);
CSSValue v
= decl.getPropertyCSSValueInternal(CSS_ENABLE_BACKGROUND_PROPERTY);
if (v.getCssValueType() != v.CSS_VALUE_LIST) {
return null; // accumulate
}
CSSValueList l = (CSSValueList)v;
int length = l.getLength();
switch (length) {
case 1:
return CompositeGraphicsNode.VIEWPORT; // new
case 5: // new <x>,<y>,<width>,<height>
v = l.item(1);
float x = UnitProcessor.cssHorizontalCoordinateToUserSpace
(v, CSS_ENABLE_BACKGROUND_PROPERTY, uctx);
v = l.item(2);
float y = UnitProcessor.cssVerticalCoordinateToUserSpace
(v, CSS_ENABLE_BACKGROUND_PROPERTY, uctx);
v = l.item(3);
float w = UnitProcessor.cssHorizontalLengthToUserSpace
(v, CSS_ENABLE_BACKGROUND_PROPERTY, uctx);
v = l.item(4);
float h = UnitProcessor.cssVerticalLengthToUserSpace
(v, CSS_ENABLE_BACKGROUND_PROPERTY, uctx);
return new Rectangle2D.Float(x, y, w, h);
default:
// If more than zero but less than four of the values
// <x>,<y>,<width> and <height> are specified or if zero
// values are specified for <width> or <height>,
// BackgroundImage and BackgroundAlpha are processed as if
// background image processing were not enabled.
return null;
}
}
// 'color-interpolation'
/**
* Returns the color space for the specified element. Checks the
* 'color-interpolation' property
*
* @param e the element
*/
public static MultipleGradientPaint.ColorSpaceEnum
convertColorInterpolation(Element e) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(e);
CSSPrimitiveValue v
= (CSSPrimitiveValue) decl.getPropertyCSSValueInternal
(CSS_COLOR_INTERPOLATION_PROPERTY);
return CSS_LINEARRGB_VALUE.equals(v.getStringValue())
? MultipleGradientPaint.LINEAR_RGB
: MultipleGradientPaint.SRGB;
}
// 'color-rendering', 'text-rendering', 'image-rendering', 'shape-rendering'
/**
* Returns the rendering hints for the specified shape element or null
* none has been specified. Checks the 'shape-rendering' property.
*
* @param e the element
*/
public static Map convertShapeRendering(Element e) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(e);
CSSPrimitiveValue v = (CSSPrimitiveValue)
decl.getPropertyCSSValueInternal(CSS_SHAPE_RENDERING_PROPERTY);
String s = v.getStringValue();
if (s.charAt(0) == 'a') { // auto
return null;
}
Map hints = new HashMap();
switch(s.charAt(0)) {
case 'o': // optimizeSpeed
hints.put(RenderingHints.KEY_RENDERING,
RenderingHints.VALUE_RENDER_SPEED);
hints.put(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_OFF);
break;
case 'c': // crispEdges
hints.put(RenderingHints.KEY_RENDERING,
RenderingHints.VALUE_RENDER_DEFAULT);
hints.put(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_OFF);
break;
case 'g': // geometricPrecision
hints.put(RenderingHints.KEY_RENDERING,
RenderingHints.VALUE_RENDER_QUALITY);
hints.put(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
break;
}
return hints;
}
/**
* Returns the rendering hints for the specified text element or null
* none has been specified. Checks the 'text-rendering' property.
*
* @param e the element
*/
public static Map convertTextRendering(Element e) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(e);
CSSPrimitiveValue v = (CSSPrimitiveValue)
decl.getPropertyCSSValueInternal(CSS_TEXT_RENDERING_PROPERTY);
String s = v.getStringValue();
if (s.charAt(0) == 'a') { // auto
return null;
}
Map hints = new HashMap();
switch(s.charAt(8)) {
case 's': // optimizeSpeed
hints.put(RenderingHints.KEY_RENDERING,
RenderingHints.VALUE_RENDER_SPEED);
hints.put(RenderingHints.KEY_TEXT_ANTIALIASING,
RenderingHints.VALUE_TEXT_ANTIALIAS_OFF);
hints.put(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_OFF);
hints.put(RenderingHints.KEY_FRACTIONALMETRICS,
RenderingHints.VALUE_FRACTIONALMETRICS_OFF);
break;
case 'l': // optimizeLegibility
hints.put(RenderingHints.KEY_RENDERING,
RenderingHints.VALUE_RENDER_QUALITY);
hints.put(RenderingHints.KEY_TEXT_ANTIALIASING,
RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
hints.put(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
hints.put(RenderingHints.KEY_FRACTIONALMETRICS,
RenderingHints.VALUE_FRACTIONALMETRICS_OFF);
break;
case 'c': // geometricPrecision
hints.put(RenderingHints.KEY_RENDERING,
RenderingHints.VALUE_RENDER_QUALITY);
hints.put(RenderingHints.KEY_TEXT_ANTIALIASING,
RenderingHints.VALUE_TEXT_ANTIALIAS_DEFAULT);
hints.put(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_DEFAULT);
hints.put(RenderingHints.KEY_FRACTIONALMETRICS,
RenderingHints.VALUE_FRACTIONALMETRICS_ON);
break;
}
return hints;
}
/**
* Returns the rendering hints for the specified image element or null
* none has been specified. Checks the 'image-rendering' property.
*
* @param e the element
*/
public static Map convertImageRendering(Element e) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(e);
CSSPrimitiveValue v = (CSSPrimitiveValue)
decl.getPropertyCSSValueInternal(CSS_IMAGE_RENDERING_PROPERTY);
String s = v.getStringValue();
if (s.charAt(0) == 'a') { // auto
return null;
}
Map hints = new HashMap();
switch(s.charAt(9)) {
case 'S': // optimizeSpeed
hints.put(RenderingHints.KEY_RENDERING,
RenderingHints.VALUE_RENDER_SPEED);
hints.put(RenderingHints.KEY_INTERPOLATION,
RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR);
break;
case 'Q': // optimizeQuality
hints.put(RenderingHints.KEY_RENDERING,
RenderingHints.VALUE_RENDER_QUALITY);
hints.put(RenderingHints.KEY_INTERPOLATION,
RenderingHints.VALUE_INTERPOLATION_BICUBIC);
break;
}
return hints;
}
/**
* Returns the rendering hints for the specified element or null
* none has been specified. Checks the 'color-rendering' property.
*
* @param e the element
*/
public static Map convertColorRendering(Element e) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(e);
CSSPrimitiveValue v = (CSSPrimitiveValue)
decl.getPropertyCSSValueInternal(CSS_COLOR_RENDERING_PROPERTY);
String s = v.getStringValue();
if (s.charAt(0) == 'a') { // auto
return null;
}
Map hints = new HashMap();
switch(v.getStringValue().charAt(9)) {
case 'S': // optimizeSpeed
hints.put(RenderingHints.KEY_COLOR_RENDERING,
RenderingHints.VALUE_COLOR_RENDER_SPEED);
hints.put(RenderingHints.KEY_ALPHA_INTERPOLATION,
RenderingHints.VALUE_ALPHA_INTERPOLATION_SPEED);
break;
case 'Q': // optimizeQuality
hints.put(RenderingHints.KEY_COLOR_RENDERING,
RenderingHints.VALUE_COLOR_RENDER_QUALITY);
hints.put(RenderingHints.KEY_ALPHA_INTERPOLATION,
RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
break;
}
return hints;
}
// 'display'
/**
* Returns true if the specified element has to be displayed, false
* otherwise. Checks the 'display' property.
*
* @param e the element
*/
public static boolean convertDisplay(Element e) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(e);
CSSValue v = decl.getPropertyCSSValueInternal(CSS_DISPLAY_PROPERTY);
return (((CSSPrimitiveValue)v).getStringValue().charAt(0) != 'n');
}
// 'visibility'
/**
* Returns true if the specified element is visible, false
* otherwise. Checks the 'visibility' property.
*
* @param e the element
*/
public static boolean convertVisibility(Element e) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(e);
CSSValue v = decl.getPropertyCSSValueInternal(CSS_VISIBILITY_PROPERTY);
if (v.getCssValueType() == CSSValue.CSS_INHERIT) {
// workaround for the CSS2 spec which indicates that the
// initial value is 'inherit'. So if we get 'inherit' it
// means that we are on the outermost svg element and we
// always return true.
return true;
} else {
return (((CSSPrimitiveValue)v).getStringValue().charAt(0) == 'v');
}
}
// 'opacity'
/**
* Returns a composite object that represents the 'opacity' of the
* specified element.
*
* @param e the element
*/
public static Composite convertOpacity(Element e) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(e);
CSSValue v =
getComputedStyle(e).getPropertyCSSValueInternal
(CSS_OPACITY_PROPERTY);
float opacity = PaintServer.convertOpacity(v);
if (opacity <= 0f) {
return null;
} else if (opacity >= 1f) {
return AlphaComposite.SrcOver;
} else {
return AlphaComposite.getInstance(AlphaComposite.SRC_OVER, opacity);
}
}
// 'overflow' and 'clip'
/**
* Returns true if the 'overflow' property indicates that an
* additional clip is required, false otherwise. An additional
* clip is needed if the 'overflow' property is 'scroll' or
* 'hidden'.
*
* @param e the element with the 'overflow' property
*/
public static boolean convertOverflow(Element e) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(e);
CSSPrimitiveValue overflow =
(CSSPrimitiveValue)decl.getPropertyCSSValueInternal
(CSS_OVERFLOW_PROPERTY);
String s = overflow.getStringValue();
// clip if 'hidden' or 'scroll'
return (s.charAt(0) == 'h') || (s.charAt(0) == 's');
}
/**
* Returns an array of floating offsets representing the 'clip'
* property or null if 'auto'. The offsets are specified in the
* order top, right, bottom, left.
*
* @param e the element with the 'clip' property
*/
public static float[] convertClip(Element e) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(e);
CSSPrimitiveValue clip =
(CSSPrimitiveValue)decl.getPropertyCSSValueInternal
(CSS_CLIP_PROPERTY);
switch (clip.getPrimitiveType()) {
case CSSPrimitiveValue.CSS_RECT:
float [] off = new float[4];
Rect r = clip.getRectValue();
off[0] = r.getTop().getFloatValue(CSSPrimitiveValue.CSS_NUMBER);
off[1] = r.getRight().getFloatValue(CSSPrimitiveValue.CSS_NUMBER);
off[2] = r.getBottom().getFloatValue(CSSPrimitiveValue.CSS_NUMBER);
off[3] = r.getLeft().getFloatValue(CSSPrimitiveValue.CSS_NUMBER);
return off;
case CSSPrimitiveValue.CSS_IDENT:
return null; // 'auto' means no offsets
default:
throw new Error(); // can't be reached
}
}
// 'filter'
/**
* Returns a <tt>Filter</tt> referenced by the specified element
* and which applies on the specified graphics node.
* Handle the 'filter' property.
*
* @param filteredElement the element that references the filter
* @param filteredNode the graphics node associated to the element to filter
* @param ctx the bridge context
*/
public static Filter convertFilter(Element filteredElement,
GraphicsNode filteredNode,
BridgeContext ctx) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(filteredElement);
CSSPrimitiveValue filterValue =
(CSSPrimitiveValue)decl.getPropertyCSSValueInternal
(CSS_FILTER_PROPERTY);
switch(filterValue.getPrimitiveType()){
case CSSPrimitiveValue.CSS_IDENT:
return null; // 'filter:none'
case CSSPrimitiveValue.CSS_URI:
String uri = filterValue.getStringValue();
Element filter = ctx.getReferencedElement(filteredElement, uri);
Bridge bridge = ctx.getBridge(filter);
if (bridge == null || !(bridge instanceof FilterBridge)) {
throw new BridgeException(filteredElement,
ERR_CSS_URI_BAD_TARGET,
new Object[] {uri});
}
return ((FilterBridge)bridge).createFilter(ctx,
filter,
filteredElement,
filteredNode);
default:
throw new Error(); // can't be reached
}
}
// 'clip-path' and 'clip-rule'
/**
* Returns a <tt>Clip</tt> referenced by the specified element and
* which applies on the specified graphics node.
* Handle the 'clip-path' property.
*
* @param clipedElement the element that references the clip
* @param clipedNode the graphics node associated to the element to clip
* @param ctx the bridge context
*/
public static ClipRable convertClipPath(Element clipedElement,
GraphicsNode clipedNode,
BridgeContext ctx) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(clipedElement);
CSSPrimitiveValue clipValue =
(CSSPrimitiveValue)decl.getPropertyCSSValueInternal
(CSS_CLIP_PATH_PROPERTY);
switch(clipValue.getPrimitiveType()){
case CSSPrimitiveValue.CSS_IDENT:
return null; // 'clip-path:none'
case CSSPrimitiveValue.CSS_URI:
String uri = clipValue.getStringValue();
Element clipPath = ctx.getReferencedElement(clipedElement, uri);
Bridge bridge = ctx.getBridge(clipPath);
if (bridge == null || !(bridge instanceof ClipBridge)) {
throw new BridgeException(clipedElement,
ERR_CSS_URI_BAD_TARGET,
new Object[] {uri});
}
return ((ClipBridge)bridge).createClip(ctx,
clipPath,
clipedElement,
clipedNode);
default:
throw new Error(); // can't be reached
}
}
/**
* Returns the 'clip-rule' for the specified element.
*
* @param e the element interested in its a 'clip-rule'
* @return GeneralPath.WIND_NON_ZERO | GeneralPath.WIND_EVEN_ODD
*/
public static int convertClipRule(Element e) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(e);
return rule(decl.getPropertyCSSValueInternal(CSS_CLIP_RULE_PROPERTY));
}
// 'mask'
/**
* Returns a <tt>Mask</tt> referenced by the specified element and
* which applies on the specified graphics node.
* Handle the 'mask' property.
*
* @param maskedElement the element that references the mask
* @param maskedNode the graphics node associated to the element to mask
* @param ctx the bridge context
*/
public static Mask convertMask(Element maskedElement,
GraphicsNode maskedNode,
BridgeContext ctx) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(maskedElement);
CSSPrimitiveValue maskValue =
(CSSPrimitiveValue)decl.getPropertyCSSValueInternal
(CSS_MASK_PROPERTY);
switch(maskValue.getPrimitiveType()){
case CSSPrimitiveValue.CSS_IDENT:
return null; // 'mask:none'
case CSSPrimitiveValue.CSS_URI:
String uri = maskValue.getStringValue();
Element mask = ctx.getReferencedElement(maskedElement, uri);
Bridge bridge = ctx.getBridge(mask);
if (bridge == null || !(bridge instanceof MaskBridge)) {
throw new BridgeException(maskedElement,
ERR_CSS_URI_BAD_TARGET,
new Object[] {uri});
}
return ((MaskBridge)bridge).createMask(ctx,
mask,
maskedElement,
maskedNode);
default:
throw new Error(); // can't be reached
}
}
/**
* Returns the 'fill-rule' for the specified element.
*
* @param e the element interested in its a 'fill-rule'
* @return GeneralPath.WIND_NON_ZERO | GeneralPath.WIND_EVEN_ODD
*/
public static int convertFillRule(Element e) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(e);
return rule(decl.getPropertyCSSValueInternal
(CSS_FILL_RULE_PROPERTY));
}
// 'lighting-color'
/**
* Converts the color defined on the specified lighting filter element
* to a <tt>Color</tt>.
*
* @param e the lighting filter element
* @param ctx the bridge context
*/
public static Color convertLightingColor(Element e, BridgeContext ctx) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(e);
CSSValue colorDef = decl.getPropertyCSSValueInternal
(CSS_LIGHTING_COLOR_PROPERTY);
if (colorDef.getCssValueType() == CSSValue.CSS_PRIMITIVE_VALUE) {
CSSPrimitiveValue v = (CSSPrimitiveValue)colorDef;
return PaintServer.convertColor(v.getRGBColorValue(), 1);
} else {
return PaintServer.convertRGBICCColor
(e, (SVGColor)colorDef, 1, ctx);
}
}
// 'flood-color' and 'flood-opacity'
/**
* Converts the color defined on the specified <feFlood>
* element to a <tt>Color</tt>.
*
* @param e the feFlood element
* @param ctx the bridge context
*/
public static Color convertFloodColor(Element e, BridgeContext ctx) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(e);
float opacity = PaintServer.convertOpacity
(decl.getPropertyCSSValueInternal(CSS_FLOOD_OPACITY_PROPERTY));
CSSValue colorDef
= decl.getPropertyCSSValueInternal(CSS_FLOOD_COLOR_PROPERTY);
if (colorDef.getCssValueType() == CSSValue.CSS_PRIMITIVE_VALUE) {
CSSPrimitiveValue v = (CSSPrimitiveValue)colorDef;
return PaintServer.convertColor(v.getRGBColorValue(), opacity);
} else {
return PaintServer.convertRGBICCColor
(e, (SVGColor)colorDef, opacity, ctx);
}
}
// 'stop-color'
/**
* Converts the color defined on the specified <stop> element
* to a <tt>Color</tt>.
*
* @param stopElement the stop element
* @param opacity the paint opacity
* @param ctx the bridge context to use
*/
public static Color convertStopColor(Element stopElement,
float opacity,
BridgeContext ctx) {
CSSOMReadOnlyStyleDeclaration decl = getComputedStyle(stopElement);
CSSValue colorDef
= decl.getPropertyCSSValueInternal(CSS_STOP_COLOR_PROPERTY);
float stopOpacity = PaintServer.convertOpacity
(decl.getPropertyCSSValueInternal(CSS_STOP_OPACITY_PROPERTY));
opacity *= stopOpacity;
if (colorDef.getCssValueType() == CSSValue.CSS_PRIMITIVE_VALUE) {
CSSPrimitiveValue v = (CSSPrimitiveValue)colorDef;
return PaintServer.convertColor(v.getRGBColorValue(), opacity);
} else {
return PaintServer.convertRGBICCColor
(stopElement, (SVGColor)colorDef, opacity, ctx);
}
}
// CSS support for <use>
/**
* Partially computes the style in the 'def' tree and set it in the 'use'
* tree.
* <p>Note: This method must be called only when 'use' has been
* added to the DOM tree.
*
* @param refElement the referenced element
* @param localRefElement the referenced element in the current document
*/
public static void computeStyleAndURIs(Element refElement,
Element localRefElement) {
SVGOMDocument document
= (SVGOMDocument)localRefElement.getOwnerDocument();
ViewCSS view = (ViewCSS)document.getDefaultView();
SVGOMDocument refDocument
= (SVGOMDocument)refElement.getOwnerDocument();
ViewCSS refView = (ViewCSS)refDocument.getDefaultView();
URL url = refDocument.getURLObject();
computeStyleAndURIs(refElement,
refView,
localRefElement,
view,
url);
}
/**
* Partially computes the style in the use tree and set it in
* the target tree.
* Note: This method must be called only when 'def' has been added
* to the tree.
*/
static void computeStyleAndURIs(Element use, ViewCSS uv,
Element def, ViewCSS dv,
URL url) {
String href = XLinkSupport.getXLinkHref(def);
if (!href.equals("")) {
try {
XLinkSupport.setXLinkHref(def, new URL(url, href).toString());
} catch (MalformedURLException e) { }
}
CSSOMReadOnlyStyleDeclaration usd;
AbstractViewCSS uview = (AbstractViewCSS)uv;
usd = (CSSOMReadOnlyStyleDeclaration)uview.computeStyle(use, null);
try {
updateURIs(usd, url);
} catch (MalformedURLException ex) { }
((AbstractViewCSS)dv).setComputedStyle(def, null, usd);
for (Node un = use.getFirstChild(), dn = def.getFirstChild();
un != null;
un = un.getNextSibling(), dn = dn.getNextSibling()) {
if (un.getNodeType() == Node.ELEMENT_NODE) {
computeStyleAndURIs((Element)un, uv, (Element)dn, dv, url);
}
}
}
/**
* Updates the URIs in the given style declaration.
*/
public static void updateURIs(CSSOMReadOnlyStyleDeclaration sd, URL url)
throws MalformedURLException {
int len = sd.getLength();
for (int i = 0; i < len; i++) {
String name = sd.item(i);
CSSValue val = sd.getLocalPropertyCSSValue(name);
if (val != null &&
val.getCssValueType() ==
CSSPrimitiveValue.CSS_PRIMITIVE_VALUE) {
CSSPrimitiveValue pv = (CSSPrimitiveValue)val;
if (pv.getPrimitiveType() == CSSPrimitiveValue.CSS_URI) {
CSSOMReadOnlyValue v = new CSSOMReadOnlyValue
(new ImmutableString
(CSSPrimitiveValue.CSS_URI,
new URL(url, pv.getStringValue()).toString()));
sd.setPropertyCSSValue(name, v,
sd.getLocalPropertyPriority(name),
sd.getLocalPropertyOrigin(name));
}
}
}
}
// Additional utility methods used internally
/**
* Returns the winding rule represented by the specified CSSValue.
*
* @param v the value that represents the rule
* @return GeneralPath.WIND_NON_ZERO | GeneralPath.WIND_EVEN_ODD
*/
protected static int rule(CSSValue v) {
return (((CSSPrimitiveValue)v).getStringValue().charAt(0) == 'n')
? GeneralPath.WIND_NON_ZERO
: GeneralPath.WIND_EVEN_ODD;
}
} |
package com.xenonteam.xenonlib.util.java;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
/**
* @author tim4242
* @author philipas
*
*/
public class StorageHelper
{
/**
* Serializes an {@link java.lang.Object Object} to a byte[]
*
* @param obj The {@link java.lang.Object Object}
* @return A byte[] representation of the {@link java.lang.Object Object}
* @throws IOException
*/
public static byte[] serialize(Object obj) throws IOException
{
ByteArrayOutputStream out = new ByteArrayOutputStream();
ObjectOutputStream os = new ObjectOutputStream(out);
os.writeObject(obj);
byte[] b = out.toByteArray();
out.close();
os.close();
return b;
}
/**
* Deserializes {@link java.lang.Object Objects} serialized by {@link com.xenonteam.xenonlib.util.java.StorageHelper#serialize(java.lang.Object) serialize(Object)}
*
* @param data The Object as a byte[]
* @return an
* @throws IOException
* @throws ClassNotFoundException
*/
public static Object deserialize(byte[] data) throws IOException, ClassNotFoundException
{
ByteArrayInputStream in = new ByteArrayInputStream(data);
ObjectInputStream is = new ObjectInputStream(in);
Object o = is.readObject();
in.close();
is.close();
return o;
}
/**
* Writes an {@link java.lang.Object Object} to the specified {@link java.io.File File}
*
* @param obj The Object to write
* @param f The {@link java.io.File File}
* @throws IOException
*/
public static void writeSerialized(Object obj, File f) throws IOException
{
File temp = f;
if(!f.toString().contains("."))
{
temp = new File(f + ".jobj");
}
FileOutputStream out = new FileOutputStream(temp);
byte[] ser = serialize(obj);
byte[] ba = new byte[ser.length + 1];
ba[0] = new Integer(ser.length).byteValue();
for(int i = 0; i < ser.length; i++)
{
ba[i + 1] = ser[i];
}
out.write(ba);
out.close();
}
public static Object readSearialized(File f) throws ClassNotFoundException, IOException
{
File temp = f;
if(!f.toString().contains("."))
{
temp = new File(f + ".jobj");
}
byte[] lenB = new byte[1];
FileInputStream in = new FileInputStream(temp);
in.read(lenB);
int len = new Byte(lenB[0]).intValue();
byte[] obj = new byte[len];
in.read(obj);
in.close();
return deserialize(obj);
}
} |
package com.bloatit.web.scgiserver;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.Map;
import com.bloatit.common.FatalErrorException;
import com.bloatit.common.Log;
import com.bloatit.web.server.DispatchServer;
import com.bloatit.web.server.HttpResponse;
import com.bloatit.web.server.SessionManager;
import com.bloatit.web.utils.url.AccountChargingPageUrl;
import com.bloatit.web.utils.url.CommentCommentActionUrl;
import com.bloatit.web.utils.url.CommentReplyPageUrl;
import com.bloatit.web.utils.url.ContributePageUrl;
import com.bloatit.web.utils.url.ContributionActionUrl;
import com.bloatit.web.utils.url.CreateIdeaActionUrl;
import com.bloatit.web.utils.url.CreateIdeaPageUrl;
import com.bloatit.web.utils.url.GlobalSearchPageUrl;
import com.bloatit.web.utils.url.IdeaCommentActionUrl;
import com.bloatit.web.utils.url.IdeaPageUrl;
import com.bloatit.web.utils.url.IdeasListUrl;
import com.bloatit.web.utils.url.IndexPageUrl;
import com.bloatit.web.utils.url.KudoActionUrl;
import com.bloatit.web.utils.url.LoginActionUrl;
import com.bloatit.web.utils.url.LoginPageUrl;
import com.bloatit.web.utils.url.LogoutActionUrl;
import com.bloatit.web.utils.url.MemberPageUrl;
import com.bloatit.web.utils.url.MembersListPageUrl;
import com.bloatit.web.utils.url.MyAccountPageUrl;
import com.bloatit.web.utils.url.OfferActionUrl;
import com.bloatit.web.utils.url.OfferPageUrl;
import com.bloatit.web.utils.url.PaylineActionUrl;
import com.bloatit.web.utils.url.PaylineNotifyActionUrl;
import com.bloatit.web.utils.url.PaylinePageUrl;
import com.bloatit.web.utils.url.RegisterActionUrl;
import com.bloatit.web.utils.url.RegisterPageUrl;
import com.bloatit.web.utils.url.SpecialsPageUrl;
import com.bloatit.web.utils.url.TestPageUrl;
public final class SCGIServer {
private static final String SCGI_HOST = "127.0.0.1";
private static final int SCGI_PORT = 4000;
public static void main(final String[] args) {
try {
new SCGIServer().run();
} catch (final IOException e) {
Log.server().fatal(e);
}
}
private ServerSocket providerSocket;
private Socket clientSocket;
private final DispatchServer dispatchServer;
public SCGIServer() {
clientSocket = null;
providerSocket = null;
dispatchServer = new DispatchServer();
dispatchServer.addLinkable(IndexPageUrl.getName(), IndexPageUrl.class);
dispatchServer.addLinkable(LoginPageUrl.getName(), LoginPageUrl.class);
dispatchServer.addLinkable(IdeasListUrl.getName(), IdeasListUrl.class);
dispatchServer.addLinkable(CreateIdeaPageUrl.getName(), CreateIdeaPageUrl.class);
dispatchServer.addLinkable(IdeaPageUrl.getName(), IdeaPageUrl.class);
dispatchServer.addLinkable(MyAccountPageUrl.getName(), MyAccountPageUrl.class);
dispatchServer.addLinkable(SpecialsPageUrl.getName(), SpecialsPageUrl.class);
dispatchServer.addLinkable(MembersListPageUrl.getName(), MembersListPageUrl.class);
dispatchServer.addLinkable(MemberPageUrl.getName(), MemberPageUrl.class);
dispatchServer.addLinkable(GlobalSearchPageUrl.getName(), GlobalSearchPageUrl.class);
dispatchServer.addLinkable(ContributePageUrl.getName(), ContributePageUrl.class);
dispatchServer.addLinkable(OfferPageUrl.getName(), OfferPageUrl.class);
dispatchServer.addLinkable(TestPageUrl.getName(), TestPageUrl.class);
dispatchServer.addLinkable(AccountChargingPageUrl.getName(), AccountChargingPageUrl.class);
dispatchServer.addLinkable(RegisterPageUrl.getName(), RegisterPageUrl.class);
dispatchServer.addLinkable(PaylinePageUrl.getName(), PaylinePageUrl.class);
dispatchServer.addLinkable(CommentReplyPageUrl.getName(), CommentReplyPageUrl.class);
dispatchServer.addLinkable(LoginActionUrl.getName(), LoginActionUrl.class);
dispatchServer.addLinkable(LogoutActionUrl.getName(), LogoutActionUrl.class);
dispatchServer.addLinkable(ContributionActionUrl.getName(), ContributionActionUrl.class);
dispatchServer.addLinkable(OfferActionUrl.getName(), OfferActionUrl.class);
dispatchServer.addLinkable(CreateIdeaActionUrl.getName(), CreateIdeaActionUrl.class);
dispatchServer.addLinkable(RegisterActionUrl.getName(), RegisterActionUrl.class);
dispatchServer.addLinkable(KudoActionUrl.getName(), KudoActionUrl.class);
dispatchServer.addLinkable(IdeaCommentActionUrl.getName(), IdeaCommentActionUrl.class);
dispatchServer.addLinkable(PaylineActionUrl.getName(), PaylineActionUrl.class);
dispatchServer.addLinkable(PaylineNotifyActionUrl.getName(), PaylineNotifyActionUrl.class);
dispatchServer.addLinkable(IdeaCommentActionUrl.getName(), IdeaCommentActionUrl.class);
dispatchServer.addLinkable(CommentCommentActionUrl.getName(), CommentCommentActionUrl.class);
}
private void init() throws IOException {
SessionManager.loadSessions();
Runtime.getRuntime().addShutdownHook(new ShutdownHook(clientSocket));
try {
Thread.sleep(100);
} catch (final InterruptedException ex) {
Log.server().warn("Init: Waiting has been interupted.", ex);
}
Log.server().info("Init: Start BloatIt serveur");
providerSocket = new ServerSocket(SCGI_PORT);
}
private void run() throws IOException {
init();
while (true) {
// Wait for connection
Log.server().info("Waiting connection");
// Load the SCGI headers.
clientSocket = providerSocket.accept();
final long startTime = System.nanoTime();
final BufferedInputStream bis = new BufferedInputStream(clientSocket.getInputStream(), 4096);
final Map<String, String> env = SCGIUtils.parse(bis);
final HttpHeader header = new HttpHeader(env);
final HttpPost post = new HttpPost(bis, header.getContentLength());
SessionManager.clearExpiredSessions();
try {
dispatchServer.process(header, post, new HttpResponse(clientSocket.getOutputStream()));
} catch (final FatalErrorException e) {
webPrintException(e);
Log.web().fatal("Unknown Fatal exception", e);
} catch (final SCGIRequestAbordedException e) {
webPrintException(e);
Log.web().info("SCGIUtils request aborded", e);
} catch (final Exception e) {
webPrintException(e);
Log.web().fatal("Unknown exception", e);
}
clientSocket.close();
final long endTime = System.nanoTime();
final double duration = ((endTime - startTime)) / 1000000.;
Log.server().debug("Page generated in " + duration + " ms");
}
}
private void webPrintException(final Exception e) {
final StringBuilder display = new StringBuilder();
display.append("Content-type: text/plain\r\n\r\n");
display.append(e.toString());
display.append(" :\n");
for (final StackTraceElement s : e.getStackTrace()) {
display.append("\t");
display.append(s);
display.append("\n");
}
try {
clientSocket.getOutputStream().write(display.toString().getBytes());
} catch (final IOException e1) {
Log.web().fatal("Cannot send exception through the SCGI soket.", e1);
}
}
private static final class ShutdownHook extends Thread {
private final Socket clientSocket;
public ShutdownHook(final Socket clientSocket) {
super();
this.clientSocket = clientSocket;
}
@Override
public void run() {
// TODO: lock to wait transaction end
try {
if (clientSocket != null) {
clientSocket.close();
}
} catch (final IOException e) {
Log.server().error("Fail to close the socket on shutdown.", e);
}
SessionManager.saveSessions();
}
}
} |
package grundkurs_java;
public class OOP {
public static void main(String[] args) {
Student studi = Student.createStudent();
students(studi);
}
private static void students(Student studi) {
studi.setName("Karla Karlsson");
studi.setNumber(12345);
System.out.println(studi.validateNumber());
System.out.println(studi); // every object has toString()
System.out.println(Student.getCounter()); // every object has toString()
}
}
class Student {
// encapsulation.
private String name;
private int number;
private static int counter = 0;
public static int getCounter() {
return counter;
}
public static Student createStudent() {
counter++;
return new Student();
}
public String toString() {
return name + " (" + number + ')';
}
public int getNumber() {
return this.number;
}
public void setNumber(int num) {
int oldNumber = number;
number = num;
if (!validateNumber())
number = oldNumber;
}
public String getName() {
return this.name;
}
public void setName(String str) {
this.name = str;
}
public boolean validateNumber() {
return (number >= 10000 && number <= 99999 && number % 2 != 0);
}
}
class Test extends OOP {
public static void main(String[] args) {
students(); // not visible
}
} |
package org.opencps.api.controller.util;
import java.io.File;
import java.util.List;
import org.opencps.api.constants.ConstantUtils;
import org.opencps.api.v21.model.ActionConfigList;
import org.opencps.api.v21.model.ActionConfigList.ActionConfig;
import org.opencps.api.v21.model.Actions;
import org.opencps.api.v21.model.Actions.ProcessAction;
import org.opencps.api.v21.model.Configs;
import org.opencps.api.v21.model.Configs.ServiceConfig;
import org.opencps.api.v21.model.DeliverableTypeList;
import org.opencps.api.v21.model.DeliverableTypeList.DeliverableType;
import org.opencps.api.v21.model.DictCollection;
import org.opencps.api.v21.model.DocumentTypeList;
import org.opencps.api.v21.model.DocumentTypeList.DocumentType;
import org.opencps.api.v21.model.DossierTemplate;
import org.opencps.api.v21.model.FileTemplates;
import org.opencps.api.v21.model.FileTemplates.FileTemplate;
import org.opencps.api.v21.model.Groups;
import org.opencps.api.v21.model.Groups.DictGroup;
import org.opencps.api.v21.model.Items;
import org.opencps.api.v21.model.Items.DictItem;
import org.opencps.api.v21.model.MenuConfigList;
import org.opencps.api.v21.model.MenuConfigList.MenuConfig;
import org.opencps.api.v21.model.NotificationTemplateList;
import org.opencps.api.v21.model.NotificationTemplateList.NotificationTemplate;
import org.opencps.api.v21.model.Parts;
import org.opencps.api.v21.model.Parts.DossierPart;
import org.opencps.api.v21.model.PaymentConfigList;
import org.opencps.api.v21.model.PaymentConfigList.PaymentConfig;
import org.opencps.api.v21.model.Processes;
import org.opencps.api.v21.model.Processes.ProcessOption;
import org.opencps.api.v21.model.Sequences;
import org.opencps.api.v21.model.Sequences.ProcessSequence;
import org.opencps.api.v21.model.ServerConfigList;
import org.opencps.api.v21.model.ServerConfigList.ServerConfig;
import org.opencps.api.v21.model.ServiceInfo;
import org.opencps.api.v21.model.ServiceProcess;
import org.opencps.api.v21.model.ServiceProcess.Roles;
import org.opencps.api.v21.model.ServiceProcess.Roles.ProcessRole;
import org.opencps.api.v21.model.StepConfigList;
import org.opencps.api.v21.model.StepConfigList.StepConfig;
import org.opencps.api.v21.model.Steps;
import org.opencps.api.v21.model.Steps.ProcessStep;
import org.opencps.api.v21.model.Steps.ProcessStep.Roles.StepRole;
import org.opencps.api.v21.model.UserManagement;
import org.opencps.communication.action.NotificationTemplateInterface;
import org.opencps.communication.action.impl.NotificationTemplateActions;
import org.opencps.communication.service.ServerConfigLocalServiceUtil;
import org.opencps.datamgt.action.DictcollectionInterface;
import org.opencps.datamgt.action.impl.DictCollectionActions;
import org.opencps.dossiermgt.action.ActionConfigActions;
import org.opencps.dossiermgt.action.DeliverableTypesActions;
import org.opencps.dossiermgt.action.DocumentTypeActions;
import org.opencps.dossiermgt.action.DossierTemplateActions;
import org.opencps.dossiermgt.action.FileUploadUtils;
import org.opencps.dossiermgt.action.MenuConfigActions;
import org.opencps.dossiermgt.action.PaymentConfigActions;
import org.opencps.dossiermgt.action.ServiceConfigActions;
import org.opencps.dossiermgt.action.ServiceInfoActions;
import org.opencps.dossiermgt.action.ServiceProcessActions;
import org.opencps.dossiermgt.action.StepConfigActions;
import org.opencps.dossiermgt.action.impl.ActionConfigActionsImpl;
import org.opencps.dossiermgt.action.impl.DeliverableTypesActionsImpl;
import org.opencps.dossiermgt.action.impl.DocumentTypeActionsImpl;
import org.opencps.dossiermgt.action.impl.DossierTemplateActionsImpl;
import org.opencps.dossiermgt.action.impl.MenuConfigActionsImpl;
import org.opencps.dossiermgt.action.impl.PaymentConfigActionsImpl;
import org.opencps.dossiermgt.action.impl.ServiceConfigActionImpl;
import org.opencps.dossiermgt.action.impl.ServiceInfoActionsImpl;
import org.opencps.dossiermgt.action.impl.ServiceProcessActionsImpl;
import org.opencps.dossiermgt.action.impl.StepConfigActionsImpl;
import org.opencps.dossiermgt.constants.ProcessActionTerm;
import org.opencps.dossiermgt.exception.NoSuchServiceConfigException;
import com.liferay.asset.kernel.exception.DuplicateCategoryException;
import com.liferay.portal.kernel.exception.NoSuchUserException;
import com.liferay.portal.kernel.exception.PortalException;
import com.liferay.portal.kernel.log.Log;
import com.liferay.portal.kernel.log.LogFactoryUtil;
import com.liferay.portal.kernel.repository.model.FileEntry;
import com.liferay.portal.kernel.service.ServiceContext;
import com.liferay.portal.kernel.util.StringPool;
import com.liferay.portal.kernel.util.Validator;
import backend.auth.api.exception.UnauthenticationException;
import backend.auth.api.exception.UnauthorizationException;
public class ProcessUpdateDBUtils {
private static Log _log = LogFactoryUtil.getLog(ProcessUpdateDBUtils.class);
//LamTV_Update ActionConfig to DB
public static void processUpdateActionConfig(ActionConfigList actList, String folderPath, long groupId,
long userId, ServiceContext serviceContext) {
try {
ActionConfigActions actions = new ActionConfigActionsImpl();
//Delete all table ActionConfig
boolean flagAct = actions.deleteAllActionConfig(groupId, userId, serviceContext);
//Update table ActionConfig
if (actList != null && flagAct) {
List<ActionConfig> actConfigList = actList.getActionConfig();
if (actConfigList != null && actConfigList.size() > 0) {
for (ActionConfig actConfig : actConfigList) {
String actionCode = actConfig.getActionCode();
String actionName = actConfig.getActionName();
Boolean extraForm = actConfig.isExtraForm();
String formConfig = actConfig.getFormConfig();
String sampleData = actConfig.getSampleData();
Boolean insideProcess = actConfig.isInsideProcess();
Integer userNote = actConfig.getUserNote();
Integer syncType = actConfig.getSyncType();
Integer eventType = actConfig.getEventType();
Integer infoType = actConfig.getInfoType();
Boolean rollbackable = actConfig.isRollbackable();
String notificationType = actConfig.getNotificationType();
String documentType = actConfig.getDocumentType();
String mappingAction = actConfig.getMappingAction();
if (Validator.isNotNull(actionCode)) {
// String filePath = folderPath + ConstantUtils.SOURCE_FORMS + StringPool.FORWARD_SLASH + ConstantUtils.PREFIX_ACTIONCONFIG
// + actionCode + ConstantUtils.EXTENTION_JSON;
// File jsonfile = new File(filePath);
// String formConfig = StringPool.BLANK;
// if (jsonfile.exists() && !jsonfile.isDirectory()) {
// formConfig = ReadXMLFileUtils.convertFiletoString(jsonfile);
// Check record exits DB
actions.updateActionConfigDB(userId, groupId, actionCode, actionName, extraForm, sampleData,
insideProcess, userNote, syncType, eventType, infoType, rollbackable,
notificationType, documentType, formConfig, mappingAction);
}
}
}
}
} catch (Exception e) {
_log.error(e);
}
}
//LamTV_Update StepConfig to DB
public static void processUpdateStepConfig(StepConfigList stepList, long groupId,
long userId, ServiceContext serviceContext) {
try {
StepConfigActions actions = new StepConfigActionsImpl();
//Delete all record StepConfig
boolean flagStep = actions.deleteAllStepConfig(groupId, userId, serviceContext);
//Insert StepConfig
if (stepList != null && flagStep) {
List<StepConfig> stepConfigList = stepList.getStepConfig();
if (stepConfigList != null && stepConfigList.size() > 0) {
for (StepConfig stepConfig : stepConfigList) {
String stepCode = stepConfig.getStepCode();
String stepName = stepConfig.getStepName();
Integer stepType = stepConfig.getStepType();
String dossierStatus = stepConfig.getDossierStatus();
String dossierSubStatus = stepConfig.getDossierSubStatus();
String menuGroup = stepConfig.getMenuGroup();
String menuStepName = stepConfig.getMenuStepName();
String buttonConfig = stepConfig.getButtonConfig();
if (Validator.isNotNull(stepCode)) {
// Check record exits DB
actions.updateStepConfigDB(userId, groupId, stepCode, stepName, stepType, dossierStatus, dossierSubStatus,
menuGroup, menuStepName, buttonConfig);
}
}
}
}
} catch (Exception e) {
_log.error(e);
}
}
//LamTV_Update MenuConfig to DB
public static void processUpdateMenuConfig(MenuConfigList menuList, long groupId,
long userId, ServiceContext serviceContext) {
try {
MenuConfigActions actions = new MenuConfigActionsImpl();
//Delete all table ActionConfig
boolean flagMenu = actions.deleteAllMenuConfig(groupId, userId, serviceContext);
//Update table ActionConfig
if (menuList != null && flagMenu) {
List<MenuConfig> menuConfigList = menuList.getMenuConfig();
if (menuConfigList != null && menuConfigList.size() > 0) {
for (MenuConfig menuConfig : menuConfigList) {
String menuGroup = menuConfig.getMenuGroup();
String menuName = menuConfig.getMenuName();
Integer order = menuConfig.getOrder();
Integer menuType = menuConfig.getMenuType();
String queryParams = menuConfig.getQueryParams();
String tableConfig = menuConfig.getTableConfig();
String buttonConfig = menuConfig.getButtonConfig();
String roles = menuConfig.getRoles();
if (Validator.isNotNull(menuGroup)) {
// Check record exits DB
long menuConfigId = actions.updateMenuConfigDB(userId, groupId, menuGroup, menuName, order, menuType, queryParams,
tableConfig, buttonConfig);
if (menuConfigId > 0) {
actions.updateMenuRoles(groupId, menuConfigId, roles);
}
}
}
}
}
} catch (Exception e) {
_log.error(e);
}
}
//LamTV_Update DocumentType to DB
public static void processUpdateDocumentType(DocumentTypeList docList, String folderPath, long groupId,
long userId, ServiceContext serviceContext) {
try {
if (docList != null) {
List<DocumentType> docTypeList = docList.getDocumentType();
if (docTypeList != null && docTypeList.size() > 0) {
for (DocumentType docType : docTypeList) {
String typeCode = docType.getTypeCode();
String documentName = docType.getDocumentName();
String codePattern = docType.getCodePattern();
Integer docSync = docType.getDocSync();
if (Validator.isNotNull(typeCode)) {
String filePath = folderPath + ConstantUtils.SOURCE_REPORTS + StringPool.FORWARD_SLASH
+ typeCode + ConstantUtils.EXTENTION_XML;
File xmlfile = new File(filePath);
String documentScript = StringPool.BLANK;
if (xmlfile.exists() && !xmlfile.isDirectory()) {
documentScript = ReadXMLFileUtils.convertFiletoString(xmlfile);
}
// Check record exits DB
DocumentTypeActions actions = new DocumentTypeActionsImpl();
actions.updateDocumentTypeDB(userId, groupId, typeCode, 0, documentName, codePattern, docSync,
documentScript);
}
}
}
}
} catch (Exception e) {
_log.error(e);
}
}
//LamTV_Update DeliverableType to DB
public static void processUpdateDeliverableType(DeliverableTypeList deliTypeList, String folderPath, long groupId,
long userId, ServiceContext serviceContext) {
try {
if (deliTypeList != null) {
List<DeliverableType> deliverableTypeList = deliTypeList.getDeliverableType();
if (deliverableTypeList != null && deliverableTypeList.size() > 0) {
for (DeliverableType deliType : deliverableTypeList) {
String typeCode = deliType.getTypeCode();
String typeName = deliType.getTypeName();
String codePattern = deliType.getCodePattern();
Integer docSync = deliType.getDocSync();
String mappingData = deliType.getMappingData();
String govAgencies = deliType.getGovAgencies();
if (Validator.isNotNull(typeCode)) {
String filePathReport = folderPath + ConstantUtils.SOURCE_REPORTS + StringPool.FORWARD_SLASH
+ typeCode + ConstantUtils.EXTENTION_XML;
String filePathForm = folderPath + ConstantUtils.SOURCE_FORMS + StringPool.FORWARD_SLASH
+ typeCode + ConstantUtils.EXTENTION_JSON;
File xmlFile = new File(filePathReport);
File jsonFile = new File(filePathForm);
String formScript = StringPool.BLANK;
String formReport = StringPool.BLANK;
if (xmlFile.exists() && !xmlFile.isDirectory()) {
formReport = ReadXMLFileUtils.convertFiletoString(xmlFile);
}
if (jsonFile.exists() && !jsonFile.isDirectory()) {
formScript = ReadXMLFileUtils.convertFiletoString(jsonFile);
}
// Check record exits DB
DeliverableTypesActions actions = new DeliverableTypesActionsImpl();
actions.updateDeliverableTypeDB(userId, groupId, typeCode, typeName, codePattern, docSync, mappingData,
govAgencies, formReport, formScript);
}
}
}
}
} catch (Exception e) {
_log.error(e);
}
}
//LamTV_Update PaymentConfig to DB
public static void processUpdatePaymentConfig(PaymentConfigList paymentList, long groupId, long userId,
ServiceContext serviceContext) {
try {
PaymentConfigActions actions = new PaymentConfigActionsImpl();
//Delete all table PaymentConfig
boolean flagPayment = actions.deleteAllPaymentConfig(groupId, userId, serviceContext);
//Update table PaymentConfig
if (paymentList != null && flagPayment) {
List<PaymentConfig> paymentConfigList = paymentList.getPaymentConfig();
if (paymentConfigList != null && paymentConfigList.size() > 0) {
String govAgencyCode = StringPool.BLANK;
String govAgencyName = StringPool.BLANK;
String govAgencyTaxNo = StringPool.BLANK;
String invoiceTemplateNo = StringPool.BLANK;
String invoiceIssueNo = StringPool.BLANK;
String invoiceLastNo = StringPool.BLANK;
String bankInfo = StringPool.BLANK;
String epaymentConfig = StringPool.BLANK;
for (PaymentConfig paymentConfig : paymentConfigList) {
govAgencyCode = paymentConfig.getGovAgencyCode();
govAgencyName = paymentConfig.getGovAgencyName();
govAgencyTaxNo = paymentConfig.getGovAgencyTaxNo();
invoiceTemplateNo = paymentConfig.getInvoiceTemplateNo();
invoiceIssueNo = paymentConfig.getInvoiceIssueNo();
invoiceLastNo = paymentConfig.getInvoiceLastNo();
bankInfo = paymentConfig.getBankInfo();
epaymentConfig = paymentConfig.getEpaymentConfig();
if (Validator.isNotNull(govAgencyCode)) {
// Check record exits DB
actions.updatePaymentConfigDB(userId, groupId, govAgencyCode, govAgencyName, govAgencyTaxNo, invoiceTemplateNo, invoiceIssueNo,
invoiceLastNo, bankInfo, epaymentConfig, serviceContext);
}
}
}
}
} catch (Exception e) {
_log.error(e);
}
}
//LamTV_Update ServerConfig to DB
public static void processUpdateServerConfig(ServerConfigList serverList, long groupId, long userId,
ServiceContext serviceContext) {
try {
//Delete all table ServerConfig
ServerConfigLocalServiceUtil.deleteByGroupId(groupId, userId, serviceContext);
//Update table ServerConfig
if (serverList != null) {
List<ServerConfig> serverConfigList = serverList.getServerConfig();
if (serverConfigList != null && serverConfigList.size() > 0) {
String govAgencyCode = StringPool.BLANK;
String serverNo = StringPool.BLANK;
String serverName = StringPool.BLANK;
String protocol = StringPool.BLANK;
String configs = StringPool.BLANK;
for (ServerConfig serverConfig : serverConfigList) {
govAgencyCode = serverConfig.getGovAgencyCode();
serverNo = serverConfig.getServerNo();
serverName = serverConfig.getServerName();
protocol = serverConfig.getProtocol();
configs = serverConfig.getConfigs();
if (Validator.isNotNull(govAgencyCode)) {
// Check record exits DB
ServerConfigLocalServiceUtil.updateServerConfig(groupId, 0, govAgencyCode, serverNo,
serverName, protocol, configs, null, serviceContext);
}
}
}
}
} catch (Exception e) {
_log.error(e);
}
}
//LamTV_Update NotificationTemplate to DB
public static void processUpdateNotificationTemplate(NotificationTemplateList notiTempList, long groupId,
long userId, ServiceContext serviceContext) {
try {
NotificationTemplateInterface actions = new NotificationTemplateActions();
//Delete all table NotificationTemplate
boolean flagTemp = actions.deleteAllNotificationTemplate(groupId, userId, serviceContext);
//Update table NotificationTemplate
if (notiTempList != null && flagTemp) {
List<NotificationTemplate> notiTemplateList = notiTempList.getNotificationTemplate();
if (notiTemplateList != null && notiTemplateList.size() > 0) {
String notificationType = StringPool.BLANK;
Boolean sendEmail = false;
String emailSubject = StringPool.BLANK;
String emailBody = StringPool.BLANK;
String textMessage = StringPool.BLANK;
Boolean sendSMS = false;
Integer expireDuration = 0;
for (NotificationTemplate notiTemplate : notiTemplateList) {
notificationType = notiTemplate.getNotificationType();
sendEmail = notiTemplate.isSendEmail();
emailSubject = notiTemplate.getEmailSubject();
emailBody = notiTemplate.getEmailBody();
textMessage = notiTemplate.getTextMessage();
sendSMS = notiTemplate.isSendSMS();
expireDuration = notiTemplate.getExpireDuration();
if (Validator.isNotNull(notificationType)) {
// Check record exits DB
actions.updateNotificationTemplateDB(userId, groupId, notificationType, sendEmail, emailSubject, emailBody, textMessage,
sendSMS, expireDuration, serviceContext);
}
}
}
}
} catch (Exception e) {
_log.error(e);
}
}
//LamTV_Update UserManagement to DB
public static void processUpdateUser(UserManagement userManagement, long groupId, long userId,
ServiceContext serviceContext) {
try {
// //Delete all table NotificationTemplate
// boolean flagUser = actions.deleteAllNotificationTemplate(groupId, userId, serviceContext);
// //Update table NotificationTemplate
if (userManagement != null) {
org.opencps.api.v21.model.UserManagement.Roles roles = userManagement.getRoles();
if (roles != null) {
processUpdateJobPos(userId, groupId, roles, serviceContext);
}
org.opencps.api.v21.model.UserManagement.Users users = userManagement.getUsers();
if (users != null) {
processUpdateEmployee(userId, groupId, users, serviceContext);
}
}
} catch (Exception e) {
_log.error(e);
}
}
private static void processUpdateEmployee(long userId, long groupId,
org.opencps.api.v21.model.UserManagement.Users users, ServiceContext serviceContext) throws NoSuchUserException, UnauthenticationException, UnauthorizationException, DuplicateCategoryException, PortalException {
// List<Employee> employeeList = users.getEmployee();
// if (employeeList != null && employeeList.size() > 0) {
// EmployeeInterface actionEmployee = new EmployeeActions();
// String employeeNo = StringPool.BLANK;
// String fullname = StringPool.BLANK;
// String title = StringPool.BLANK;
// Integer gender = 0;
// String birthdate = StringPool.BLANK;
// String telNo = StringPool.BLANK;
// String email = StringPool.BLANK;
// Integer workingStatus = 0;
// String jobTitle = StringPool.BLANK;
// String roles = StringPool.BLANK;
// for (Employee employee : employeeList) {
// employeeNo = employee.getEmployeeNo();
// fullname = employee.getFullname();
// title = employee.getTitle();
// gender = employee.getGender();
// birthdate = employee.getBirthdate();
// telNo = employee.getTelNo();
// email = employee.getEmail();
// workingStatus = employee.getWorkingStatus();
// jobTitle = employee.getJobTitle();
// roles = employee.getRoles();
// if (Validator.isNotNull(employeeNo)) {
// // Check record exits DB
// actionEmployee.updateEmployeeDB(userId, groupId, employeeNo, fullname, title, gender, birthdate,
// telNo, email, workingStatus, jobTitle, roles, serviceContext);
}
private static void processUpdateJobPos(long userId, long groupId,
org.opencps.api.v21.model.UserManagement.Roles roles, ServiceContext serviceContext)
throws NoSuchUserException {
// List<JobPos> jobPosList = roles.getJobPos();
// if (jobPosList != null && jobPosList.size() > 0) {
// JobposInterface actionJob = new JobposActions();
// String jobCode = StringPool.BLANK;
// String title = StringPool.BLANK;
// String description = StringPool.BLANK;
// for (JobPos jobPos : jobPosList) {
// jobCode = jobPos.getCode();
// title = jobPos.getTitle();
// description = jobPos.getDescription();
// if (Validator.isNotNull(jobCode)) {
// // Check record exits DB
// actionJob.updateJobPosDB(userId, groupId, jobCode, title, description, serviceContext);
}
//LamTV_Update Dictcollection to DB
public static void processUpdateDictCollection(DictCollection dicts, long groupId, long userId, ServiceContext serviceContext) {
try {
if (dicts != null) {
String collectionCode = dicts.getCollectionCode();
_log.info("collectionCode: "+collectionCode);
String collectionName = dicts.getCollectionName();
String collectionNameEN = dicts.getCollectionNameEN();
String description = dicts.getDescription();
DictcollectionInterface actionCollection = new DictCollectionActions();
long dictCollectionId = actionCollection.updateDictCollectionDB(userId, groupId, collectionCode,
collectionName, collectionNameEN, description);
if (dictCollectionId > 0) {
processUpdateDictItem(userId, groupId, dictCollectionId, dicts, actionCollection);
processUpdateDictGroup(userId, groupId, dictCollectionId, dicts, actionCollection, serviceContext);
}
}
} catch (Exception e) {
_log.error(e);
}
}
//LamTV_ Process service to DB
public static void processUpdateServiceInfo(ServiceInfo service, String folderPath, String folderParentPath,
long groupId, long userId, ServiceContext serviceContext) {
try {
if (service != null) {
String serviceCode = service.getServiceCode();
String serviceName = service.getServiceName();
String processText = service.getProcessText();
String methodText = service.getMethodText();
String dossierText = service.getDossierText();
String conditionText = service.getConditionText();
String durationText = service.getDurationText();
String applicantText = service.getApplicantText();
String resultText = service.getResultText();
String regularText = service.getRegularText();
String feeText = service.getFeeText();
String administrationCode = service.getAdministrationCode();
String administrationName = service.getAdministrationName();
String domainCode = service.getDomainCode();
String domainName = service.getDomainName();
Integer maxLevel = service.getMaxLevel();
// Update serviceInfo
ServiceInfoActions actionService = new ServiceInfoActionsImpl();
long serviceInfoId = actionService.updateServiceInfoDB(userId, groupId, serviceCode, serviceName, processText, methodText,
dossierText, conditionText, durationText, applicantText, resultText, regularText, feeText,
administrationCode, administrationName, domainCode, domainName, maxLevel);
// Update fileName
FileTemplates fileTemplate = service.getFileTemplates();
if (fileTemplate != null) {
processFileTemplate(userId, groupId, serviceInfoId, fileTemplate, folderParentPath, actionService, serviceContext);
}
// Add serviceConfig
Configs configs = service.getConfigs();
if (configs != null) {
processServiceConfig(userId, groupId, serviceInfoId, configs, actionService, serviceContext);
}
}
} catch (Exception e) {
_log.error(e);
}
}
//LamTV_Process service to DB
public static void processUpdateDossierTemplate(DossierTemplate template, String folderPath, String folderParentPath, long groupId,
long userId, ServiceContext serviceContext) {
try {
if (template != null) {
String templateNo = template.getTemplateNo();
String templateName = template.getTemplateName();
String description = template.getDescription();
// Update serviceInfo
DossierTemplateActions actionTemp = new DossierTemplateActionsImpl();
actionTemp.updateDossierTemplateDB(userId, groupId, templateNo, templateName, description, serviceContext);
// Update fileName
Parts parts = template.getParts();
if (parts != null) {
processDossierPart(userId, groupId, parts, actionTemp, folderParentPath, templateNo,
serviceContext);
}
}
} catch (Exception e) {
_log.error(e);
}
}
public static void processUpdateServiceProcess(ServiceProcess process, String folderPath, long groupId,
long userId, ServiceContext serviceContext) {
try {
if (process != null) {
String processNo = process.getProcessNo();
String processName = process.getProcessName();
String description = process.getDescription();
Integer durationCount = process.getDurationCount();
Integer durationUnit = process.getDurationUnit();
boolean generatePassword = process.isGeneratePassword();
String serverNo = process.getServerNo();
String serverName = process.getServerName();
String dossierNoPattern = process.getDossierNoPattern();
String dueDatePattern = process.getDueDatePattern();
// Update serviceInfo
ServiceProcessActions actionService = new ServiceProcessActionsImpl();
long serviceProcessId = actionService.updateServiceProcessDB(userId, groupId, processNo, processName,
description, durationCount, durationUnit, generatePassword, serverNo, serverName,
dossierNoPattern, dueDatePattern, serviceContext);
//Delete all record ServiceFileTemplate with serviceInfoId
Roles processRoles = process.getRoles();
if (processRoles != null) {
processProcessRole(userId, groupId, serviceProcessId, processRoles, actionService, serviceContext);
}
// Process step
Steps steps = process.getSteps();
if (steps != null) {
processProcessStep(userId, groupId, serviceProcessId, steps, actionService, serviceContext);
}
// Process actions
Actions actions = process.getActions();
if (actions != null) {
processProcessAction(userId, groupId, serviceProcessId, actions, actionService, serviceContext);
}
// Process processsequence
Sequences sequences = process.getSequences();
if (sequences != null) {
processProcessSequence(userId, groupId, serviceProcessId, sequences, actionService, serviceContext);
}
}
} catch (Exception e) {
_log.error(e);
}
}
//LamTV_ Process output ServiceFileTemplate to DB
private static void processFileTemplate(long userId, long groupId, long serviceInfoId, FileTemplates fileTemplate,
String folderParentPath, ServiceInfoActions actionService, ServiceContext serviceContext) {
// Delete all ServiceFileTemplate with serviceInfoId
boolean flagTemplate = actionService.deleteAllFileTemplate(userId, groupId, serviceInfoId, serviceContext);
// Add list file serviceFileTemplate
List<FileTemplate> fileTempList = fileTemplate.getFileTemplate();
if (fileTempList != null && fileTempList.size() > 0 && flagTemplate) {
String fileTemplateNo = StringPool.BLANK;
String fileTemplateName = StringPool.BLANK;
String fileName = StringPool.BLANK;
for (FileTemplate fileTemp : fileTempList) {
fileTemplateNo = fileTemp.getFileTemplateNo();
fileTemplateName = fileTemp.getTemplateName();
fileName = fileTemp.getFilename();
if (Validator.isNotNull(fileName)) {
String filePathTemplate = folderParentPath + ConstantUtils.SOURCE_FILES + StringPool.FORWARD_SLASH
+ fileName;
File file = new File(filePathTemplate);
FileEntry fileEntry = null;
if (file.exists() && !file.isDirectory()) {
try {
fileEntry = FileUploadUtils.uploadDossierFile(userId, groupId, file, fileName, serviceContext);
} catch (Exception e) {
e.printStackTrace();
}
}
if (fileEntry != null) {
long fileEntryId = fileEntry.getFileEntryId();
actionService.updateServiceFileTemplateDB(serviceInfoId, fileTemplateNo, fileTemplateName, fileName, fileEntryId);
}
}
}
}
}
//LamTV_Process output ServiceConfig to DB
private static void processServiceConfig(long userId, long groupId, long serviceInfoId, Configs configs,
ServiceInfoActions actionService, ServiceContext serviceContext) throws NoSuchServiceConfigException {
// Delete all ServiceFileTemplate with serviceInfoId
boolean flagConfig = actionService.deleteAllServiceConfig(userId, groupId, serviceInfoId, serviceContext);
// Add list file serviceFileTemplate
List<ServiceConfig> configList = configs.getServiceConfig();
if (configList != null && configList.size() > 0 && flagConfig) {
String govAgencyCode = StringPool.BLANK;
String govAgencyName = StringPool.BLANK;
String serviceInstruction = StringPool.BLANK;
Integer serviceLevel = 0;
String serviceUrl = StringPool.BLANK;
boolean forCitizen = false;
boolean forBusiness = false;
boolean postalService = false;
boolean registration = false;
for (ServiceConfig config : configList) {
govAgencyCode = config.getGovAgencyCode();
govAgencyName = config.getGovAgencyName();
serviceInstruction = config.getServiceInstruction();
serviceLevel = config.getServiceLevel();
serviceUrl = config.getServiceUrl();
forCitizen = config.isForCitizen();
forBusiness = config.isForBusiness();
postalService = config.isPostalService();
registration = config.isRegistration();
ServiceConfigActions actionConfig = new ServiceConfigActionImpl();
long serviceConfigId = actionConfig.updateServiceConfigDB(userId, groupId, serviceInfoId, govAgencyCode, govAgencyName,
serviceInstruction, serviceLevel, serviceUrl, forCitizen, forBusiness, postalService, registration, serviceContext);
// Process ProcessOption
if (serviceConfigId > 0) {
Processes process = config.getProcesses();
if (process != null) {
processProcessOption(userId, groupId, serviceConfigId, process, actionConfig,
serviceContext);
}
}
}
}
}
//LamTV_Process output ProcessOption to DB
private static void processProcessOption(long userId, long groupId, long serviceConfigId, Processes process,
ServiceConfigActions actionConfig, ServiceContext serviceContext) {
List<ProcessOption> optionList = process.getProcessOption();
if (optionList != null && optionList.size() > 0) {
_log.info("optionList: "+optionList.size());
String optionCode = StringPool.BLANK;
String optionName = StringPool.BLANK;
Integer seqOrder = 0;
String autoSelect = StringPool.BLANK;
String instructionNote = StringPool.BLANK;
String submissionNote = StringPool.BLANK;
String templateNo = StringPool.BLANK;
String templateName = StringPool.BLANK;
String processNo = StringPool.BLANK;
String processName = StringPool.BLANK;
String registerBookCode = StringPool.BLANK;
for (ProcessOption option : optionList) {
optionCode = option.getOptionCode();
optionName = option.getOptionName();
seqOrder = option.getSeqOrder();
autoSelect = option.getAutoSelect();
instructionNote = option.getInstructionNote();
submissionNote = option.getSubmissionNote();
templateNo = option.getTemplateNo();
templateName = option.getTemplateName();
processNo = option.getProcessNo();
processName = option.getProcessName();
registerBookCode = option.getRegisterBookCode();
actionConfig.updateOptionDB(userId, groupId, optionCode, optionName, serviceConfigId, seqOrder,
autoSelect, instructionNote, submissionNote, templateNo, templateName, processNo, processName,
registerBookCode, serviceContext);
}
}
}
//LamTV_Process DossierPart to DB
private static void processDossierPart(long userId, long groupId, Parts parts, DossierTemplateActions actionTemp,
String folderParentPath, String templateNo, ServiceContext serviceContext) throws PortalException {
// Delete all ServiceFileTemplate with serviceInfoId
boolean flagPart = actionTemp.deleteAllDossierPart(userId, groupId, templateNo, serviceContext);
// Add list file serviceFileTemplate
List<DossierPart> dossierPartList = parts.getDossierPart();
if (dossierPartList != null && dossierPartList.size() > 0 && flagPart) {
String partNo = StringPool.BLANK;
String partName = StringPool.BLANK;
String partTip = StringPool.BLANK;
Integer partType = 0;
boolean multiple = false;
boolean required = false;
boolean esign = false;
String fileTemplateNo = StringPool.BLANK;
String deliverableType = StringPool.BLANK;
Integer deliverableAction = 0;
boolean eForm = false;
String sampleData = StringPool.BLANK;
String formScript = StringPool.BLANK;
String formReport = StringPool.BLANK;
for (DossierPart dossierPart : dossierPartList) {
partNo = dossierPart.getPartNo();
partName = dossierPart.getPartName();
partTip = dossierPart.getPartTip();
partType = dossierPart.getPartType();
multiple = dossierPart.isMultiple();
required = dossierPart.isRequired();
esign = dossierPart.isEsign();
fileTemplateNo = dossierPart.getFileTemplateNo();
deliverableType = dossierPart.getDeliverableType();
deliverableAction = dossierPart.getDeliverableAction();
eForm = dossierPart.isEForm();
sampleData = dossierPart.getSampleData();
if (eForm) {
_log.info("eform: "+eForm);
String filePathReport = folderParentPath + ConstantUtils.SOURCE_REPORTS + StringPool.FORWARD_SLASH
+ templateNo + StringPool.UNDERLINE + partNo + ConstantUtils.EXTENTION_XML;
String filePathForm = folderParentPath + ConstantUtils.SOURCE_FORMS + StringPool.FORWARD_SLASH
+ templateNo + StringPool.UNDERLINE + partNo + ConstantUtils.EXTENTION_JSON;
File xmlFile = new File(filePathReport);
File jsonFile = new File(filePathForm);
if (xmlFile.exists() && !xmlFile.isDirectory()) {
formReport = ReadXMLFileUtils.convertFiletoString(xmlFile);
}
if (jsonFile.exists() && !jsonFile.isDirectory()) {
formScript = ReadXMLFileUtils.convertFiletoString(jsonFile);
}
} else {
formScript = StringPool.BLANK;
formReport = StringPool.BLANK;
}
actionTemp.updateDossierPartDB(userId, groupId, templateNo, partNo, partName, partTip, partType,
multiple, formScript, formReport, required, esign, fileTemplateNo, deliverableType,
deliverableAction, eForm, sampleData, serviceContext);
}
}
}
//LamTV_Process output ProcessAction to DB
private static void processProcessAction(long userId, long groupId, long serviceProcessId, Actions actions,
ServiceProcessActions actionService, ServiceContext serviceContext) throws PortalException {
// Delete all ServiceFileTemplate with serviceInfoId
boolean flagProAction = actionService.deleteAllProcessAction(userId, groupId, serviceProcessId, serviceContext);
// Add list file serviceFileTemplate
List<ProcessAction> processActionList = actions.getProcessAction();
if (processActionList != null && processActionList.size() > 0 && flagProAction) {
String actionCode = StringPool.BLANK;
String actionName = StringPool.BLANK;
String preStepCode = StringPool.BLANK;
String postStepCode = StringPool.BLANK;
String autoEvent = StringPool.BLANK;
String preCondition = StringPool.BLANK;
int allowAssignUser = ProcessActionTerm.NOT_ASSIGNED;
long assignUserId = 0;
String assignUserName = StringPool.BLANK;
Integer requestPayment = 0;
String paymentFee = StringPool.BLANK;
String createDossierFiles = StringPool.BLANK;
String returnDossierFiles = StringPool.BLANK;
boolean eSignature = false;
String signatureType = StringPool.BLANK;
String createDossiers = StringPool.BLANK;
for (ProcessAction processAction : processActionList) {
actionCode = processAction.getActionCode();
actionName = processAction.getActionName();
preStepCode = processAction.getPreStepCode();
postStepCode = processAction.getPostStepCode();
autoEvent = processAction.getAutoEvent();
preCondition = processAction.getPreCondition();
allowAssignUser = processAction.getAllowAssignUser();
assignUserId = processAction.getAssignUserId();
assignUserName = processAction.getAssignUserName();
requestPayment = processAction.getRequestPayment();
paymentFee = processAction.getPaymentFee();
createDossierFiles = processAction.getCreateDossierFiles();
returnDossierFiles = processAction.getReturnDossierFiles();
eSignature = processAction.isESignature();
signatureType = processAction.getSignatureType();
createDossiers = processAction.getCreateDossiers();
actionService.updateProcessActionDB(userId, groupId, serviceProcessId, actionCode,
actionName, preStepCode, postStepCode, autoEvent, preCondition, allowAssignUser,
assignUserId, assignUserName, requestPayment, paymentFee, createDossierFiles, returnDossierFiles,
eSignature, signatureType, createDossiers, serviceContext);
}
}
}
//LamTV_Process output ProcessRole to DB
private static void processProcessRole(long userId, long groupId, long serviceProcessId, Roles processRoles,
ServiceProcessActions actionService, ServiceContext serviceContext) {
// Delete all ServiceFileTemplate with serviceInfoId
boolean flagProRole = actionService.deleteAllProcessRole(userId, groupId, serviceProcessId, serviceContext);
// Add list file serviceFileTemplate
List<ProcessRole> processRoleList = processRoles.getProcessRole();
if (processRoleList != null && processRoleList.size() > 0 && flagProRole) {
String roleCode = StringPool.BLANK;
String roleName = StringPool.BLANK;
boolean moderator = false;
String condition = StringPool.BLANK;
for (ProcessRole processRole : processRoleList) {
roleCode = processRole.getRoleCode();
roleName = processRole.getRoleName();
moderator = processRole.isModerator();
condition = processRole.getCondition();
if (Validator.isNotNull(roleCode)) {
long roleId = actionService.getByRoleCode(groupId, roleCode);
actionService.updateServiceProcessRoleDB(userId, groupId, serviceProcessId, roleId, roleCode, roleName,
moderator, condition, serviceContext);
}
}
}
}
//LamTV_ Process output ProcessStep to DB
private static void processProcessStep(long userId, long groupId, long serviceProcessId, Steps steps,
ServiceProcessActions actionService, ServiceContext serviceContext) throws PortalException {
// Delete all ServiceFileTemplate with serviceInfoId
boolean flagStep = actionService.deleteAllProcessStep(userId, groupId, serviceProcessId, serviceContext);
_log.info("flagStep: "+flagStep);
// Add list file serviceFileTemplate
List<ProcessStep> proStepList = steps.getProcessStep();
if (proStepList != null && proStepList.size() > 0 && flagStep) {
String stepCode = StringPool.BLANK;
String stepName = StringPool.BLANK;
String sequenceNo = StringPool.BLANK;
String groupName = StringPool.BLANK;
String dossierStatus = StringPool.BLANK;
String dossierSubStatus = StringPool.BLANK;
Integer durationCount = 0;
String instructionNote = StringPool.BLANK;
String briefNote = StringPool.BLANK;
String roleAsStep = StringPool.BLANK;
for (ProcessStep step : proStepList) {
stepCode = step.getStepCode();
stepName = step.getStepName();
sequenceNo = step.getSequenceNo();
groupName = step.getGroupName();
dossierStatus = step.getDossierStatus();
dossierSubStatus = step.getDossierSubStatus();
durationCount = step.getDurationCount();
instructionNote = step.getInstructionNote();
briefNote = step.getBriefNote();
roleAsStep = step.getRoleAsStep();
long processStepId = actionService.updateProcessStepDB(userId, groupId, serviceProcessId, stepCode,
stepName, sequenceNo, groupName, dossierStatus, dossierSubStatus, durationCount,
instructionNote, briefNote, roleAsStep, serviceContext);
org.opencps.api.v21.model.Steps.ProcessStep.Roles stepRoles = step.getRoles();
if (stepRoles != null) {
processStepRole(userId, groupId, processStepId, stepRoles, actionService,
serviceContext);
}
}
}
}
//LamTV_ Process output ProcessSequence to DB
private static void processProcessSequence(long userId, long groupId, long serviceProcessId, Sequences sequences,
ServiceProcessActions actionService, ServiceContext serviceContext) throws PortalException {
// Delete all ServiceFileTemplate with serviceInfoId
boolean flagSequence = actionService.deleteAllProcessSequence(userId, groupId, serviceProcessId, serviceContext);
// Add list file serviceFileTemplate
List<ProcessSequence> sequenceList = sequences.getProcessSequence();
if (sequenceList != null && sequenceList.size() > 0 && flagSequence) {
String sequenceNo = StringPool.BLANK;
String sequenceName = StringPool.BLANK;
String sequenceRole = StringPool.BLANK;
Integer durationCount = 0;
for (ProcessSequence sequence : sequenceList) {
sequenceNo = sequence.getSequenceNo();
sequenceName = sequence.getSequenceName();
sequenceRole = sequence.getSequenceRole();
durationCount = sequence.getDurationCount();
actionService.updateProcessSequenceDB(userId, groupId, serviceProcessId, sequenceNo, sequenceName,
sequenceRole, durationCount, serviceContext);
}
}
}
//LamTV_ Process output ProcessStepRole to DB
private static void processStepRole(long userId, long groupId, long processStepId,
org.opencps.api.v21.model.Steps.ProcessStep.Roles stepRoles, ServiceProcessActions actionService,
ServiceContext serviceContext) {
List<StepRole> stepRoleList = stepRoles.getStepRole();
if (stepRoleList != null && stepRoleList.size() > 0) {
String roleCode = StringPool.BLANK;
String roleName = StringPool.BLANK;
boolean moderator = false;
String condition = StringPool.BLANK;
for (StepRole stepRole : stepRoleList) {
roleCode = stepRole.getRoleCode();
roleName = stepRole.getRoleName();
moderator = stepRole.isModerator();
condition = stepRole.getCondition();
if (Validator.isNotNull(roleCode)) {
long roleId = actionService.getByRoleCode(groupId, roleCode);
actionService.updateProcessStepRoleDB(userId, groupId, processStepId, roleId, roleCode, roleName,
moderator, condition, serviceContext);
}
}
}
}
//LamTV_ Process DictItem
private static void processUpdateDictItem(long userId, long groupId, long dictCollectionId, DictCollection dicts,
DictcollectionInterface actionCollection) {
Items itemList = dicts.getItems();
if (itemList != null) {
// Delete all DictItem with dictCollectionId
boolean flagItem = actionCollection.deleteAllDictItem(userId, groupId, dictCollectionId);
_log.info("flagItem: "+flagItem);
// Add list file serviceFileTemplate
List<DictItem> dictItemList = itemList.getDictItem();
if (dictItemList != null && dictItemList.size() > 0 && flagItem) {
String itemCode = StringPool.BLANK;
String itemName = StringPool.BLANK;
String itemNameEN = StringPool.BLANK;
String itemDescription = StringPool.BLANK;
String parent = StringPool.BLANK;
Integer level = 0;
Integer sibling = 0;
String metadata = StringPool.BLANK;
for (DictItem dictItem : dictItemList) {
itemCode = dictItem.getItemCode();
itemName = dictItem.getItemName();
itemNameEN = dictItem.getItemNameEN();
itemDescription = dictItem.getItemDescription();
parent = dictItem.getParent();
level = dictItem.getLevel();
sibling = dictItem.getSibling();
metadata = dictItem.getMetadata();
long dictItemParentId = actionCollection.getDictItemByItemCode(dictCollectionId, parent, groupId);
actionCollection.updateDictItemDB(userId, groupId, dictCollectionId, itemCode, itemName, itemNameEN,
itemDescription, dictItemParentId, level, sibling, metadata);
}
}
}
}
//LamTV_ Process DictGroup
private static void processUpdateDictGroup(long userId, long groupId, long dictCollectionId, DictCollection dicts,
DictcollectionInterface actionCollection, ServiceContext serviceContext) {
Groups groupList = dicts.getGroups();
if (groupList != null) {
// Delete all DictItem with dictCollectionId
boolean flagGroup = actionCollection.deleteAllDictGroup(userId, groupId, dictCollectionId);
// Add list file serviceFileTemplate
List<DictGroup> dictGroupList = groupList.getDictGroup();
if (dictGroupList != null && dictGroupList.size() > 0 && flagGroup) {
String groupCode = StringPool.BLANK;
String groupName = StringPool.BLANK;
String groupNameEN = StringPool.BLANK;
String groupDescription = StringPool.BLANK;
for (DictGroup dictGroup : dictGroupList) {
groupCode = dictGroup.getGroupCode();
groupName = dictGroup.getGroupName();
groupNameEN = dictGroup.getGroupNameEN();
groupDescription = dictGroup.getGroupDescription();
actionCollection.updateDictGroupDB(userId, groupId, dictCollectionId, groupCode, groupName, groupNameEN,
groupDescription, serviceContext);
}
}
}
}
} |
package org.hibernate.ogm.datastore.neo4j.test;
import java.io.File;
import java.net.URL;
import java.util.Properties;
import org.fest.util.Files;
import org.hibernate.HibernateException;
import org.hibernate.ogm.datastore.neo4j.Neo4jProperties;
import org.hibernate.ogm.datastore.neo4j.embedded.impl.EmbeddedNeo4jGraphDatabaseFactory;
import org.hibernate.ogm.datastore.neo4j.utils.EmbeddedNeo4jTestHelperDelegate;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* Test that it is possible to create and initialize the {@link EmbeddedNeo4jGraphDatabaseFactory} without exceptions.
*
* @author Davide D'Alto <davide@hibernate.org>
*/
public class EmbeddedGraphDatabaseFactoryTest {
String dbLocation = null;
@Before
public void setup() {
dbLocation = EmbeddedNeo4jTestHelperDelegate.dbLocation();
}
@After
public void tearDown() {
Files.delete( new File( dbLocation ) );
}
@Test
public void testLoadPropertiesFromUrl() throws Exception {
EmbeddedNeo4jGraphDatabaseFactory factory = new EmbeddedNeo4jGraphDatabaseFactory();
Properties properties = new Properties();
properties.put( Neo4jProperties.DATABASE_PATH, dbLocation );
properties.put( Neo4jProperties.CONFIGURATION_RESOURCE_NAME, neo4jPropertiesUrl().toExternalForm() );
factory.initialize( properties );
factory.create().shutdown();
}
@Test
public void testLoadPropertiesFromFilePath() throws Exception {
EmbeddedNeo4jGraphDatabaseFactory factory = new EmbeddedNeo4jGraphDatabaseFactory();
Properties properties = new Properties();
properties.put( Neo4jProperties.DATABASE_PATH, dbLocation );
properties.put( Neo4jProperties.CONFIGURATION_RESOURCE_NAME, neo4jPropertiesUrl().getFile() );
factory.initialize( properties );
factory.create().shutdown();
}
@Test(expected = HibernateException.class)
public void testLoadMalformedPropertiesLocation() throws Exception {
EmbeddedNeo4jGraphDatabaseFactory factory = new EmbeddedNeo4jGraphDatabaseFactory();
Properties properties = new Properties();
properties.put( Neo4jProperties.DATABASE_PATH, dbLocation );
properties.put( Neo4jProperties.CONFIGURATION_RESOURCE_NAME, "aKDJSAGFKJAFLASFlaLfsfaf" );
factory.initialize( properties );
factory.create().shutdown();
}
private URL neo4jPropertiesUrl() {
return Thread.currentThread().getContextClassLoader().getResource( "neo4j-embedded-test.properties" );
}
} |
import javax.swing.*;
import java.awt.*;
public class GUI {
public static void main(String[] args){
SwingUtilities.invokeLater( new Runnable() {
@Override
public void run () {
new MainFrame("Flugsimulator");
}
});
}
} |
package org.nuxeo.ecm.platform.ui.web.restAPI;
import static org.jboss.seam.ScopeType.EVENT;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import org.apache.myfaces.custom.fileupload.UploadedFileConverter;
import org.apache.myfaces.custom.fileupload.UploadedFileDefaultFileImpl;
import org.jboss.seam.annotations.In;
import org.jboss.seam.annotations.Name;
import org.jboss.seam.annotations.Scope;
import org.nuxeo.ecm.core.api.Blob;
import org.nuxeo.ecm.core.api.ClientException;
import org.nuxeo.ecm.core.api.CoreSession;
import org.nuxeo.ecm.core.api.DocumentModel;
import org.nuxeo.ecm.core.api.IdRef;
import org.nuxeo.ecm.core.api.impl.blob.StreamingBlob;
import org.nuxeo.ecm.platform.mimetype.interfaces.MimetypeRegistry;
import org.nuxeo.ecm.platform.ui.web.api.NavigationContext;
import org.nuxeo.ecm.platform.ui.web.tag.fn.LiveEditConstants;
import org.nuxeo.ecm.platform.util.RepositoryLocation;
import org.nuxeo.runtime.api.Framework;
import org.restlet.data.Request;
import org.restlet.data.Response;
/**
* Restlet to help LiveEdit clients update the blob content of a document
*
* @author Sun Tan <stan@nuxeo.com>
* @author Olivier Grisel <ogrisel@nuxeo.com>
*/
@Name("uploadFileRestlet")
@Scope(EVENT)
public class UploadFileRestlet extends BaseNuxeoRestlet implements
LiveEditConstants {
@In(create = true)
protected NavigationContext navigationContext;
protected CoreSession documentManager;
@Override
public void handle(Request req, Response res) {
String repo = (String) req.getAttributes().get("repo");
String docid = (String) req.getAttributes().get("docid");
String fileName = (String) req.getAttributes().get("filename");
try {
fileName = URLDecoder.decode(fileName, URL_ENCODE_CHARSET);
} catch (UnsupportedEncodingException e) {
handleError(res, e);
return;
}
if (repo == null || repo.equals("*")) {
handleError(res, "you must specify a repository");
return;
}
DocumentModel dm = null;
try {
navigationContext.setCurrentServerLocation(new RepositoryLocation(
repo));
documentManager = navigationContext.getOrCreateDocumentManager();
if (docid != null) {
dm = documentManager.getDocument(new IdRef(docid));
}
} catch (ClientException e) {
handleError(res, e);
return;
}
// find the names of the fields from the optional request parameters
// with fallback to defaults if none is provided
String schemaName = getQueryParamValue(req, SCHEMA, DEFAULT_SCHEMA);
String blobFieldName = getQueryParamValue(req, BLOB_FIELD,
DEFAULT_BLOB_FIELD);
String filenameFieldName = getQueryParamValue(req, FILENAME_FIELD,
DEFAULT_FILENAME_FIELD);
try {
// the stream can be read only once, thus save it first into the
// core before attempting to detect the mimetype that might rely on
// binary sniffing
Blob blob = StreamingBlob.createFromStream(req.getEntity().getStream());
dm.setProperty(schemaName, blobFieldName, blob);
dm.setProperty(schemaName, filenameFieldName, fileName);
dm = documentManager.saveDocument(dm);
// ask the mimetype service for the blob mimetype first according to
// filename extension with a fallback to binary sniffing
blob = (Blob) dm.getProperty(schemaName, blobFieldName);
MimetypeRegistry mimeService = Framework.getService(MimetypeRegistry.class);
String mimetype = mimeService.getMimetypeFromFilenameAndBlobWithDefault(
fileName, blob, "application/octet-stream");
blob.setMimeType(mimetype);
// reset the blob source before saving back since it might have
// exhausted by the binary sniffing of the MimetypeRegistry service
// (NB: LazyBlobs fetched from the Core are always resetable)
blob.getStream().reset();
// re-save the blob with the detected mimetype
dm.setProperty(schemaName, blobFieldName, blob);
documentManager.saveDocument(dm);
documentManager.save();
} catch (Exception e) {
handleError(res, e);
}
}
} |
package org.opencb.opencga.storage.variant.mongodb;
import com.mongodb.*;
import java.net.UnknownHostException;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.opencb.biodata.models.variant.ArchivedVariantFile;
import org.opencb.biodata.models.variant.Variant;
import org.opencb.biodata.models.variant.VariantSource;
import org.opencb.biodata.models.variant.effect.ConsequenceTypeMappings;
import org.opencb.biodata.models.variant.effect.VariantEffect;
import org.opencb.opencga.lib.auth.MongoCredentials;
import org.opencb.opencga.storage.variant.VariantDBWriter;
/**
* @author Alejandro Aleman Ramos <aaleman@cipf.es>
* @author Cristina Yenyxe Gonzalez Garcia <cyenyxe@ebi.ac.uk>
*/
public class VariantMongoWriter extends VariantDBWriter {
public static final int CHUNK_SIZE_SMALL = 1000;
public static final int CHUNK_SIZE_BIG = 10000;
private VariantSource file;
private MongoClient mongoClient;
private DB db;
private String filesCollectionName;
private String variantsCollectionName;
private DBCollection filesCollection;
private DBCollection variantsCollection;
private Map<String, DBObject> mongoMap;
private Map<String, DBObject> mongoFileMap;
private MongoCredentials credentials;
private boolean includeStats;
private boolean includeEffect;
private boolean includeSamples;
private List<String> samples;
private Map<String, Integer> conseqTypes;
private DBObjectToVariantConverter variantConverter;
private DBObjectToVariantStatsConverter statsConverter;
private DBObjectToVariantSourceConverter sourceConverter;
private DBObjectToArchivedVariantFileConverter archivedVariantFileConverter;
private long numVariantsWritten;
public VariantMongoWriter(VariantSource source, MongoCredentials credentials) {
this(source, credentials, "variants", "files");
}
public VariantMongoWriter(VariantSource source, MongoCredentials credentials, String variantsCollection, String filesCollection) {
this(source, credentials, variantsCollection, filesCollection, false, false, false);
}
public VariantMongoWriter(VariantSource source, MongoCredentials credentials, String variantsCollection, String filesCollection,
boolean includeSamples, boolean includeStats, boolean includeEffect) {
if (credentials == null) {
throw new IllegalArgumentException("Credentials for accessing the database must be specified");
}
this.file = source;
this.credentials = credentials;
this.filesCollectionName = filesCollection;
this.variantsCollectionName = variantsCollection;
this.mongoMap = new HashMap<>();
this.mongoFileMap = new HashMap<>();
this.includeSamples = includeSamples;
this.includeStats = includeStats;
this.includeEffect = includeEffect;
conseqTypes = new LinkedHashMap<>();
samples = new ArrayList<>();
setConverters(this.includeStats, this.includeSamples, this.includeEffect);
numVariantsWritten = 0;
}
@Override
public boolean open() {
try {
// Mongo configuration
ServerAddress address = new ServerAddress(credentials.getMongoHost(), credentials.getMongoPort());
if (credentials.getMongoCredentials() != null) {
mongoClient = new MongoClient(address, Arrays.asList(credentials.getMongoCredentials()));
} else {
mongoClient = new MongoClient(address);
}
db = mongoClient.getDB(credentials.getMongoDbName());
} catch (UnknownHostException ex) {
Logger.getLogger(VariantMongoWriter.class.getName()).log(Level.SEVERE, null, ex);
return false;
}
return db != null;
}
@Override
public boolean pre() {
// Mongo collection creation
filesCollection = db.getCollection(filesCollectionName);
variantsCollection = db.getCollection(variantsCollectionName);
return variantsCollection != null && filesCollection != null;
}
@Override
public boolean write(Variant variant) {
return write(Arrays.asList(variant));
}
@Override
public boolean write(List<Variant> data) {
buildBatchRaw(data);
if (this.includeEffect) {
buildEffectRaw(data);
}
buildBatchIndex(data);
return writeBatch(data);
}
@Override
protected boolean buildBatchRaw(List<Variant> data) {
for (Variant v : data) {
// Check if this variant is already stored
String rowkey = variantConverter.buildStorageId(v);
DBObject mongoVariant = new BasicDBObject("_id", rowkey);
if (variantsCollection.count(mongoVariant) == 0) {
mongoVariant = variantConverter.convertToStorageType(v);
} /*else {
System.out.println("Variant " + v.getChromosome() + ":" + v.getStart() + "-" + v.getEnd() + " already found");
}*/
BasicDBList mongoFiles = new BasicDBList();
for (ArchivedVariantFile archiveFile : v.getFiles().values()) {
if (!archiveFile.getFileId().equals(file.getFileId())) {
continue;
}
if (this.includeSamples && samples.isEmpty() && archiveFile.getSamplesData().size() > 0) {
// First time a variant is loaded, the list of samples is populated.
// This guarantees that samples are loaded only once to keep order among variants,
// and that they are loaded before needed by the ArchivedVariantFileConverter
samples.addAll(archiveFile.getSampleNames());
}
DBObject mongoFile = archivedVariantFileConverter.convertToStorageType(archiveFile);
mongoFiles.add(mongoFile);
mongoFileMap.put(rowkey + "_" + archiveFile.getFileId(), mongoFile);
}
mongoVariant.put(DBObjectToVariantConverter.FILES_FIELD, mongoFiles);
mongoMap.put(rowkey, mongoVariant);
}
return true;
}
@Override
protected boolean buildEffectRaw(List<Variant> variants) {
for (Variant v : variants) {
DBObject mongoVariant = mongoMap.get(variantConverter.buildStorageId(v));
if (!mongoVariant.containsField(DBObjectToVariantConverter.CHROMOSOME_FIELD)) {
// TODO It means that the same position was already found in this file, so __for now__ it won't be processed again
continue;
}
Set<String> genesSet = new HashSet<>();
Set<String> soSet = new HashSet<>();
// Add effects to file
if (!v.getAnnotation().getEffects().isEmpty()) {
Set<BasicDBObject> effectsSet = new HashSet<>();
for (List<VariantEffect> effects : v.getAnnotation().getEffects().values()) {
for (VariantEffect effect : effects) {
BasicDBObject object = getVariantEffectDBObject(effect);
effectsSet.add(object);
addConsequenceType(effect);
soSet.addAll(Arrays.asList((String[]) object.get("so")));
if (object.containsField("geneName")) {
genesSet.add(object.get("geneName").toString());
}
}
}
BasicDBList effectsList = new BasicDBList();
effectsList.addAll(effectsSet);
mongoVariant.put("effects", effectsList);
}
// Add gene fields directly to the variant, for query optimization purposes
BasicDBObject _at = (BasicDBObject) mongoVariant.get("_at");
if (!genesSet.isEmpty()) {
BasicDBList genesList = new BasicDBList(); genesList.addAll(genesSet);
_at.append("gn", genesList);
}
if (!soSet.isEmpty()) {
BasicDBList soList = new BasicDBList(); soList.addAll(soSet);
_at.append("ct", soList);
}
}
return false;
}
private BasicDBObject getVariantEffectDBObject(VariantEffect effect) {
String[] consequenceTypes = new String[effect.getConsequenceTypes().length];
for (int i = 0; i < effect.getConsequenceTypes().length; i++) {
consequenceTypes[i] = ConsequenceTypeMappings.accessionToTerm.get(effect.getConsequenceTypes()[i]);
}
BasicDBObject object = new BasicDBObject("so", consequenceTypes).append("featureId", effect.getFeatureId());
if (effect.getGeneName() != null && !effect.getGeneName().isEmpty()) {
object.append("geneName", effect.getGeneName());
}
return object;
}
@Override
protected boolean buildBatchIndex(List<Variant> data) {
variantsCollection.createIndex(new BasicDBObject("_at.chunkIds", 1));
variantsCollection.createIndex(new BasicDBObject("_at.gn", 1));
variantsCollection.createIndex(new BasicDBObject("_at.ct", 1));
variantsCollection.createIndex(new BasicDBObject(DBObjectToVariantConverter.ID_FIELD, 1));
variantsCollection.createIndex(new BasicDBObject(DBObjectToVariantConverter.CHROMOSOME_FIELD, 1));
variantsCollection.createIndex(new BasicDBObject(DBObjectToVariantConverter.FILES_FIELD + "." + DBObjectToArchivedVariantFileConverter.STUDYID_FIELD, 1)
.append(DBObjectToVariantConverter.FILES_FIELD + "." + DBObjectToArchivedVariantFileConverter.FILEID_FIELD, 1));
return true;
}
@Override
protected boolean writeBatch(List<Variant> batch) {
for (Variant v : batch) {
String rowkey = variantConverter.buildStorageId(v);
DBObject mongoVariant = mongoMap.get(rowkey);
DBObject query = new BasicDBObject("_id", rowkey);
WriteResult wr;
if (mongoVariant.containsField(DBObjectToVariantConverter.CHROMOSOME_FIELD)) {
// Was fully built in this run because it didn't exist, and must be inserted
try {
wr = variantsCollection.insert(mongoVariant);
if (!wr.getLastError().ok()) {
// TODO If not correct, retry?
Logger.getLogger(VariantMongoWriter.class.getName()).log(Level.SEVERE, wr.getError(), wr.getLastError());
}
} catch(MongoInternalException ex) {
System.out.println(v);
Logger.getLogger(VariantMongoWriter.class.getName()).log(Level.SEVERE, v.getChromosome() + ":" + v.getStart(), ex);
} catch(DuplicateKeyException ex) {
Logger.getLogger(VariantMongoWriter.class.getName()).log(Level.WARNING,
"Variant already existed: {0}:{1}", new Object[]{v.getChromosome(), v.getStart()});
}
} else { // It existed previously, was not fully built in this run and only files need to be updated
// TODO How to do this efficiently, inserting all files at once?
for (ArchivedVariantFile archiveFile : v.getFiles().values()) {
DBObject mongoFile = mongoFileMap.get(rowkey + "_" + archiveFile.getFileId());
BasicDBObject changes = new BasicDBObject().append("$addToSet",
new BasicDBObject(DBObjectToVariantConverter.FILES_FIELD, mongoFile));
wr = variantsCollection.update(query, changes, true, false);
if (!wr.getLastError().ok()) {
// TODO If not correct, retry?
Logger.getLogger(VariantMongoWriter.class.getName()).log(Level.SEVERE, wr.getError(), wr.getLastError());
}
}
}
}
mongoMap.clear();
mongoFileMap.clear();
numVariantsWritten += batch.size();
Variant lastVariantInBatch = batch.get(batch.size()-1);
Logger.getLogger(VariantMongoWriter.class.getName()).log(Level.INFO, "{0}\tvariants written upto position {1}:{2}",
new Object[]{numVariantsWritten, lastVariantInBatch.getChromosome(), lastVariantInBatch.getStart()});
return true;
}
private boolean writeSourceSummary(VariantSource source) {
DBObject studyMongo = sourceConverter.convertToStorageType(source);
DBObject query = new BasicDBObject(DBObjectToVariantSourceConverter.FILENAME_FIELD, source.getFileName());
WriteResult wr = filesCollection.update(query, studyMongo, true, false);
return wr.getLastError().ok(); // TODO Is this a proper return statement?
}
@Override
public boolean post() {
writeSourceSummary(file);
return true;
}
@Override
public boolean close() {
mongoClient.close();
return true;
}
@Override
public final void includeStats(boolean b) {
includeStats = b;
setConverters(includeStats, includeSamples, includeEffect);
}
@Override
public final void includeSamples(boolean b) {
includeSamples = b;
setConverters(includeStats, includeSamples, includeEffect);
}
@Override
public final void includeEffect(boolean b) {
includeEffect = b;
setConverters(includeStats, includeSamples, includeEffect);
}
private void setConverters(boolean includeStats, boolean includeSamples, boolean includeEffect) {
sourceConverter = new DBObjectToVariantSourceConverter();
statsConverter = new DBObjectToVariantStatsConverter();
archivedVariantFileConverter = new DBObjectToArchivedVariantFileConverter(
includeSamples ? samples : null,
includeStats ? statsConverter : null);
// TODO Not sure about commenting this, but otherwise it looks like the ArchiveVariantFile will be processed twice
// variantConverter = new DBObjectToVariantConverter(archivedVariantFileConverter);
variantConverter = new DBObjectToVariantConverter();
}
private void addConsequenceType(VariantEffect effect) {
for (int so : effect.getConsequenceTypes()) {
String ct = ConsequenceTypeMappings.accessionToTerm.get(so);
int ctCount = conseqTypes.containsKey(ct) ? conseqTypes.get(ct)+1 : 1;
conseqTypes.put(ct, ctCount);
}
}
} |
// This file is part of the OpenNMS(R) Application.
// OpenNMS(R) is a derivative work, containing both original code, included code and modified
// and included code are below.
// OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
// This program is free software; you can redistribute it and/or modify
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
// For more information contact:
package org.opennms.web.svclayer.support;
import java.awt.Color;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.Set;
import org.opennms.netmgt.dao.AggregateStatusViewDao;
import org.opennms.netmgt.dao.NodeDao;
import org.opennms.netmgt.model.AggregateStatusDefinition;
import org.opennms.netmgt.model.AggregateStatusView;
import org.opennms.netmgt.model.OnmsIpInterface;
import org.opennms.netmgt.model.OnmsMonitoredService;
import org.opennms.netmgt.model.OnmsNode;
import org.opennms.web.svclayer.AggregateStatus;
import org.opennms.web.svclayer.AggregateStatusColor;
import org.opennms.web.svclayer.AggregateStatusService;
/**
* This service layer class creates a collection that represents the current
* status of devices per site (a column from the asset table such as building,
* floor, etc.) The status per site is broken down into rows of categories from
* the categories table.
*
* example:
*
* site: HQBLDB
*
* |Routers/Switches | 1 of 20 |
* |Servers | 0 of 200 |
* |Hubs/APs | 5 of 30 |
*
*
* @author david hustace
*
*/
public class DefaultAggregateStatusService implements AggregateStatusService {
private NodeDao m_nodeDao;
private AggregateStatusViewDao m_statusViewDao;
public Collection<AggregateStatus> createAggregateStatusView(String statusViewName) {
AggregateStatusView statusView = m_statusViewDao.find(statusViewName);
return createAggreateStatus(statusView);
}
public Collection<AggregateStatus> createAggreateStatus(AggregateStatusView statusView) {
return createAggregateStatus(statusView.getTableName(), statusView.getColumnName(), statusView.getColumnValue(), statusView.getStatusDefinitions());
}
private Collection<AggregateStatus> createAggregateStatus(String tableName, String columnName, String columnValue, Collection<AggregateStatusDefinition> statusDefinitions) {
if (tableName != null && !tableName.equalsIgnoreCase("assets")) {
throw new UnsupportedOperationException("This service currently only implmented for aggregation on asset columns.");
}
return createAggregateStatusUsingAssetColumn(columnName, columnValue, statusDefinitions);
}
public Collection<AggregateStatus> createAggregateStatusUsingAssetColumn(String assetColumn,
String columnValue, Collection<AggregateStatusDefinition> categoryGrouping) {
/*
* We'll return this collection populated with all the aggregated statuss for the
* devices in the building (site) by for each group of categories.
*/
Collection<AggregateStatus> stati = new ArrayList<AggregateStatus>();
/*
* Iterate over the status definitions and create aggregated statuss
*/
for (AggregateStatusDefinition statusDef : categoryGrouping) {
AggregateStatus status = new AggregateStatus();
status.setLabel(statusDef.getAggrStatusLabel());
Collection<OnmsNode> nodes = m_nodeDao.findAllByVarCharAssetColumnCategoryList(assetColumn, columnValue, statusDef.getCategories());
status.setDownEntityCount(computeDownCount(nodes));
status.setTotalEntityCount(nodes.size());
status.setColor(computeColor(nodes, status));
stati.add(status);
}
return stati;
}
private Color computeColor(Collection<OnmsNode> nodes, AggregateStatus status) {
Color color = AggregateStatusColor.ALL_NODES_UP;
if (status.getDownEntityCount() >= 1) {
color = AggregateStatusColor.NODES_ARE_DOWN;
return color;
}
for (Iterator it = nodes.iterator(); it.hasNext();) {
OnmsNode node = (OnmsNode) it.next();
Set<OnmsIpInterface> ifs = node.getIpInterfaces();
for (Iterator ifIter = ifs.iterator(); ifIter.hasNext();) {
OnmsIpInterface ipIf = (OnmsIpInterface) ifIter.next();
Set<OnmsMonitoredService> svcs = ipIf.getMonitoredServices();
for (Iterator svcIter = svcs.iterator(); svcIter.hasNext();) {
OnmsMonitoredService svc = (OnmsMonitoredService) svcIter.next();
if (svc.isDown()) {
color = AggregateStatusColor.ONE_SERVICE_DOWN;
return color; //quick exit this mess
}
}
}
}
return color;
}
private Integer computeDownCount(Collection<OnmsNode> nodes) {
int totalNodesDown = 0;
for (OnmsNode node : nodes) {
if (node.isDown()) {
totalNodesDown += 1;
}
}
return new Integer(totalNodesDown);
}
public NodeDao getNodeDao() {
return m_nodeDao;
}
public void setNodeDao(NodeDao nodeDao) {
m_nodeDao = nodeDao;
}
} |
package org.caleydo.core.view.opengl.canvas.storagebased;
import static org.caleydo.core.view.opengl.canvas.storagebased.HeatMapRenderStyle.SELECTION_Z;
import static org.caleydo.core.view.opengl.renderstyle.GeneralRenderStyle.MOUSE_OVER_COLOR;
import static org.caleydo.core.view.opengl.renderstyle.GeneralRenderStyle.SELECTED_COLOR;
import gleem.linalg.Vec3f;
import java.awt.Point;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import javax.media.opengl.GL;
import org.caleydo.core.command.ECommandType;
import org.caleydo.core.command.view.opengl.CmdCreateGLEventListener;
import org.caleydo.core.data.collection.storage.EDataRepresentation;
import org.caleydo.core.data.mapping.EIDType;
import org.caleydo.core.data.selection.ESelectionCommandType;
import org.caleydo.core.data.selection.ESelectionType;
import org.caleydo.core.data.selection.GenericSelectionManager;
import org.caleydo.core.data.selection.Group;
import org.caleydo.core.data.selection.GroupList;
import org.caleydo.core.data.selection.IGroupList;
import org.caleydo.core.data.selection.IVirtualArray;
import org.caleydo.core.data.selection.SelectedElementRep;
import org.caleydo.core.data.selection.SelectionCommand;
import org.caleydo.core.data.selection.delta.ISelectionDelta;
import org.caleydo.core.data.selection.delta.IVirtualArrayDelta;
import org.caleydo.core.data.selection.delta.SelectionDelta;
import org.caleydo.core.data.selection.delta.VADeltaItem;
import org.caleydo.core.data.selection.delta.VirtualArrayDelta;
import org.caleydo.core.manager.event.view.group.InterchangeGroupsEvent;
import org.caleydo.core.manager.event.view.group.MergeGroupsEvent;
import org.caleydo.core.manager.event.view.storagebased.UpdateViewEvent;
import org.caleydo.core.manager.general.GeneralManager;
import org.caleydo.core.manager.id.EManagedObjectType;
import org.caleydo.core.manager.picking.EPickingMode;
import org.caleydo.core.manager.picking.EPickingType;
import org.caleydo.core.manager.picking.Pick;
import org.caleydo.core.util.clusterer.ClusterState;
import org.caleydo.core.util.clusterer.EClustererType;
import org.caleydo.core.util.mapping.color.ColorMapping;
import org.caleydo.core.util.mapping.color.ColorMappingManager;
import org.caleydo.core.util.mapping.color.EColorMappingType;
import org.caleydo.core.util.preferences.PreferenceConstants;
import org.caleydo.core.view.opengl.camera.EProjectionMode;
import org.caleydo.core.view.opengl.camera.IViewFrustum;
import org.caleydo.core.view.opengl.canvas.AGLEventListener;
import org.caleydo.core.view.opengl.canvas.EDetailLevel;
import org.caleydo.core.view.opengl.canvas.GLCaleydoCanvas;
import org.caleydo.core.view.opengl.canvas.listener.UpdateViewListener;
import org.caleydo.core.view.opengl.canvas.remote.IGLCanvasRemoteRendering;
import org.caleydo.core.view.opengl.canvas.remote.listener.GroupInterChangingActionListener;
import org.caleydo.core.view.opengl.canvas.remote.listener.GroupMergingActionListener;
import org.caleydo.core.view.opengl.canvas.remote.receiver.IGroupsInterChangingActionReceiver;
import org.caleydo.core.view.opengl.canvas.remote.receiver.IGroupsMergingActionReceiver;
import org.caleydo.core.view.opengl.mouse.GLMouseListener;
import org.caleydo.core.view.opengl.util.GLCoordinateUtils;
import org.caleydo.core.view.opengl.util.overlay.contextmenu.container.GroupContextMenuItemContainer;
import org.caleydo.core.view.opengl.util.overlay.infoarea.GLInfoAreaManager;
import org.caleydo.core.view.opengl.util.texture.EIconTextures;
import org.caleydo.core.view.opengl.util.texture.TextureManager;
import org.caleydo.core.view.serialize.ASerializedView;
import org.caleydo.core.view.serialize.SerializedDummyView;
import com.sun.opengl.util.BufferUtil;
import com.sun.opengl.util.texture.Texture;
import com.sun.opengl.util.texture.TextureCoords;
import com.sun.opengl.util.texture.TextureData;
import com.sun.opengl.util.texture.TextureIO;
/**
* Rendering the GLHierarchicalHeatMap with remote rendering support.
*
* @author Bernhard Schlegl
* @author Marc Streit
* @author Alexander Lex
*/
public class GLHierarchicalHeatMap
extends AStorageBasedView
implements IGroupsMergingActionReceiver, IGroupsInterChangingActionReceiver {
private final static float GAP_LEVEL1_2 = 0.6f;
private final static float GAP_LEVEL2_3 = 0.4f;
// private final static float MAX_NUM_SAMPLES = 8f;
private final static int MIN_SAMPLES_PER_HEATMAP = 14;
private final static int MAX_SAMPLES_PER_HEATMAP = 100;
private int iNumberOfElements = 0;
private int iSamplesPerTexture = 0;
private int iSamplesPerHeatmap = 0;
private ColorMapping colorMapper;
private EIDType eFieldDataType = EIDType.EXPRESSION_INDEX;
private EIDType eExperimentDataType = EIDType.EXPERIMENT_INDEX;
private TextureManager iconTextureManager;
private ArrayList<Float> fAlXDistances;
// for external selections of experiments
private ArrayList<Integer> AlExpMouseOver = new ArrayList<Integer>();
private ArrayList<Integer> AlExpSelected = new ArrayList<Integer>();
// selector for texture in overviewBar
private int iSelectorBar = 1;
// number of partitions for selection in overViewBar
private int iNrSelBar = 0;
// array of textures for holding the data samples
private ArrayList<Texture> AlTextures = new ArrayList<Texture>();
private ArrayList<Integer> iAlNumberSamples = new ArrayList<Integer>();
private Point PickingPoint = null;
private int iPickedSample = 0;
private int iFirstSample = 0;
private int iLastSample = 0;
private int iNrSamplesPerTexture = 0;
private ArrayList<HeatMapSelection> AlSelection = new ArrayList<HeatMapSelection>();
private boolean bRenderCaption;
private float fAnimationScale = 1.0f;
// embedded heat map
private GLHeatMap glHeatMapView;
private boolean bIsHeatmapInFocus = false;
private float fWidthEHM = 0;
// embedded dendrogram
private GLDendrogramVertical glDendrogram;
private boolean bRedrawTextures = false;
// if only a small number of genes is in the data set, level_1 (overViewBar) should not be rendered
private boolean bSkipLevel1 = false;
// dragging stuff
private boolean bIsDraggingActive = false;
private boolean bIsDraggingWholeBlock = false;
private boolean bDisableCursorDragging = false;
private boolean bDisableBlockDragging = false;
private int iDraggedCursor = 0;
private float fPosCursorFirstElement = 0;
private float fPosCursorLastElement = 0;
// clustering/grouping stuff
private ClusterState clusterstate = new ClusterState();
private boolean bSplitGroupExp = false;
private boolean bSplitGroupGene = false;
private int iGroupToSplit = 0;
private Point DraggingPoint = null;
private GroupMergingActionListener groupMergingActionListener;
private GroupInterChangingActionListener groupInterChangingActionListener;
private UpdateViewListener updateViewListener;
/**
* Constructor.
*
* @param glCanvas
* @param sLabel
* @param viewFrustum
*/
public GLHierarchicalHeatMap(GLCaleydoCanvas glCanvas, final String sLabel, final IViewFrustum viewFrustum) {
super(glCanvas, sLabel, viewFrustum);
viewType = EManagedObjectType.GL_HIER_HEAT_MAP;
ArrayList<ESelectionType> alSelectionTypes = new ArrayList<ESelectionType>();
alSelectionTypes.add(ESelectionType.NORMAL);
alSelectionTypes.add(ESelectionType.MOUSE_OVER);
alSelectionTypes.add(ESelectionType.SELECTION);
contentSelectionManager = new GenericSelectionManager.Builder(EIDType.EXPRESSION_INDEX).build();
storageSelectionManager = new GenericSelectionManager.Builder(EIDType.EXPERIMENT_INDEX).build();
colorMapper = ColorMappingManager.get().getColorMapping(EColorMappingType.GENE_EXPRESSION);
fAlXDistances = new ArrayList<Float>();
// activate clustering
bUseClusteredVA = false;
glKeyListener = new GLHierarchicalHeatMapKeyListener(this);
createHeatMap();
}
@Override
public void init(GL gl) {
glHeatMapView.initRemote(gl, this, glMouseListener, null, null);
// glDendrogram.initRemote(gl, this, glMouseListener, null, null);
iconTextureManager = TextureManager.get();
initTextures(gl);
}
/**
* Function responsible for initialization of hierarchy levels. Depending on the amount of samples in the
* data set 2 or 3 levels are used.
*/
private void initHierarchy() {
if (set == null)
return;
createDendrogram();
iNumberOfElements = set.getVA(iContentVAID).size();
if (iNumberOfElements < MIN_SAMPLES_PER_HEATMAP) {
System.out.println("Number of elements not supported!! Problems with visualization may occur!");
}
if (iNumberOfElements < 100) {
bSkipLevel1 = true;
iSelectorBar = 1;
iSamplesPerTexture = iNumberOfElements;
iSamplesPerHeatmap = (int) Math.floor(iSamplesPerTexture / 3);
}
else {
bSkipLevel1 = false;
iSelectorBar = 1;
iSamplesPerTexture = (int) Math.floor(iNumberOfElements / 5);
if (iSamplesPerTexture > 250)
iSamplesPerTexture = 250;
iSamplesPerHeatmap = (int) Math.floor(iSamplesPerTexture / 3);
}
if (iSamplesPerHeatmap > MAX_SAMPLES_PER_HEATMAP)
iSamplesPerTexture = 100;
if (iSamplesPerHeatmap < MIN_SAMPLES_PER_HEATMAP)
iSamplesPerHeatmap = MIN_SAMPLES_PER_HEATMAP;
}
@Override
public void initLocal(GL gl) {
bRenderStorageHorizontally = false;
// Register keyboard listener to GL canvas
// parentGLCanvas.getParentComposite().addKeyListener(glKeyListener);
iGLDisplayListIndexLocal = gl.glGenLists(1);
iGLDisplayListToCall = iGLDisplayListIndexLocal;
init(gl);
}
@Override
public void initRemote(GL gl, final AGLEventListener glParentView, GLMouseListener glMouseListener,
IGLCanvasRemoteRendering remoteRenderingGLCanvas, GLInfoAreaManager infoAreaManager) {
this.remoteRenderingGLView = remoteRenderingGLCanvas;
// Register keyboard listener to GL canvas
glParentView.getParentGLCanvas().getParentComposite().getDisplay().asyncExec(new Runnable() {
public void run() {
glParentView.getParentGLCanvas().getParentComposite().addKeyListener(glKeyListener);
}
});
bRenderStorageHorizontally = false;
this.glMouseListener = glMouseListener;
iGLDisplayListIndexRemote = gl.glGenLists(1);
iGLDisplayListToCall = iGLDisplayListIndexRemote;
init(gl);
}
/**
* If no selected elements are in the current texture, the function switches the texture
*/
private void setTexture() {
boolean bSetCurrentTexture = true;
if (AlSelection.size() > 0) {
for (HeatMapSelection selection : AlSelection) {
if (selection.getTexture() == iSelectorBar && selection.getPos() >= iFirstSample
&& selection.getPos() <= iLastSample || selection.getTexture() == iSelectorBar - 1
&& selection.getPos() >= iFirstSample && selection.getPos() <= iLastSample) {
bSetCurrentTexture = false;
break;
}
}
if (bSetCurrentTexture) {
iSelectorBar = AlSelection.get(0).getTexture() + 1;
if (iSelectorBar == iNrSelBar) {
iSelectorBar
}
initPosCursor();
}
}
}
/**
* Init (reset) the positions of cursors used for highlighting selected elements in stage 2 (texture)
*
* @param
*/
private void initPosCursor() {
if (iSamplesPerHeatmap > iAlNumberSamples.get(iSelectorBar - 1) / 2)
iSamplesPerHeatmap = (int) Math.floor(iAlNumberSamples.get(iSelectorBar - 1) / 3);
if (AlSelection.size() > 0) {
int iNumberSample = iAlNumberSamples.get(iSelectorBar - 1);
// int iNumberSample = iNrSamplesPerTexture; // * 2;
for (HeatMapSelection iter : AlSelection) {
if (iter.getTexture() == iSelectorBar - 1) {
iPickedSample = iter.getPos();
if (iSamplesPerHeatmap % 2 == 0) {
iFirstSample = iPickedSample - (int) Math.floor(iSamplesPerHeatmap / 2) + 1;
iLastSample = iPickedSample + (int) Math.floor(iSamplesPerHeatmap / 2);
}
else {
iFirstSample = iPickedSample - (int) Math.ceil(iSamplesPerHeatmap / 2);
iLastSample = iPickedSample + (int) Math.floor(iSamplesPerHeatmap / 2);
}
if (iPickedSample < iSamplesPerHeatmap / 2) {
iPickedSample = (int) Math.floor(iSamplesPerHeatmap / 2);
iFirstSample = 0;
iLastSample = iSamplesPerHeatmap - 1;
}
else if (iPickedSample > iNumberSample - 1 - iSamplesPerHeatmap / 2) {
iPickedSample = (int) Math.ceil(iNumberSample - iSamplesPerHeatmap / 2);
iLastSample = iNumberSample - 1;
iFirstSample = iNumberSample - iSamplesPerHeatmap;
}
break;
}
else if (iter.getTexture() == iSelectorBar) {
iPickedSample = iter.getPos() + iAlNumberSamples.get(iSelectorBar - 1);
if (iSamplesPerHeatmap % 2 == 0) {
iFirstSample = iPickedSample - (int) Math.floor(iSamplesPerHeatmap / 2) + 1;
iLastSample = iPickedSample + (int) Math.floor(iSamplesPerHeatmap / 2);
}
else {
iFirstSample = iPickedSample - (int) Math.ceil(iSamplesPerHeatmap / 2);
iLastSample = iPickedSample + (int) Math.floor(iSamplesPerHeatmap / 2);
}
if (iPickedSample < iSamplesPerHeatmap / 2) {
iPickedSample = (int) Math.floor(iSamplesPerHeatmap / 2);
iFirstSample = 0;
iLastSample = iSamplesPerHeatmap - 1;
}
else if (iPickedSample > iNumberSample - 1 - iSamplesPerHeatmap / 2) {
iPickedSample = (int) Math.ceil(iNumberSample - iSamplesPerHeatmap / 2);
iLastSample = iNumberSample - 1;
iFirstSample = iNumberSample - iSamplesPerHeatmap;
}
break;
}
iPickedSample = (int) Math.floor(iSamplesPerHeatmap / 2);
iFirstSample = 0;
iLastSample = iSamplesPerHeatmap - 1;
}
}
else {
iPickedSample = (int) Math.floor(iSamplesPerHeatmap / 2);
iFirstSample = 0;
iLastSample = iSamplesPerHeatmap - 1;
}
}
private void calculateTextures() {
// less than 100 elements in VA, level 1 (overview bar) will not be rendered
if (bSkipLevel1) {
iNrSelBar = 1;
AlTextures.clear();
iAlNumberSamples.clear();
Texture tempTextur = null;
AlTextures.add(tempTextur);
iAlNumberSamples.add(iSamplesPerTexture);
}
else {
if (set.getVA(iContentVAID).getGroupList() != null) {
IGroupList groupList = set.getVA(iContentVAID).getGroupList();
iNrSelBar = groupList.size();
AlTextures.clear();
iAlNumberSamples.clear();
Texture tempTextur = null;
for (int i = 0; i < iNrSelBar; i++) {
AlTextures.add(tempTextur);
iAlNumberSamples.add(groupList.get(i).getNrElements());
}
}
else {
iNrSelBar = (int) Math.ceil(set.getVA(iContentVAID).size() / iSamplesPerTexture);
AlTextures.clear();
iAlNumberSamples.clear();
Texture tempTextur = null;
int iTextureHeight = set.getVA(iContentVAID).size();
iNrSamplesPerTexture = (int) Math.floor(iTextureHeight / iNrSelBar);
for (int i = 0; i < iNrSelBar; i++) {
AlTextures.add(tempTextur);
iAlNumberSamples.add(iNrSamplesPerTexture);
}
}
}
}
/**
* Init textures, build array of textures used for holding the whole examples from contentSelectionManager
*
* @param
*/
private void initTextures(final GL gl) {
fAlXDistances.clear();
if (bSkipLevel1) {
AlTextures.clear();
iAlNumberSamples.clear();
Texture tempTextur;
int iTextureHeight = set.getVA(iContentVAID).size();
int iTextureWidth = set.getVA(iStorageVAID).size();
float fLookupValue = 0;
float fOpacity = 0;
FloatBuffer FbTemp = BufferUtil.newFloatBuffer(iTextureWidth * iTextureHeight * 4);
for (Integer iContentIndex : set.getVA(iContentVAID)) {
IVirtualArray storageVA = set.getVA(iStorageVAID);
for (Integer iStorageIndex : storageVA) {
if (contentSelectionManager.checkStatus(ESelectionType.DESELECTED, iContentIndex)) {
fOpacity = 0.3f;
}
else {
fOpacity = 1.0f;
}
fLookupValue =
set.get(iStorageIndex).getFloat(EDataRepresentation.NORMALIZED, iContentIndex);
float[] fArMappingColor = colorMapper.getColor(fLookupValue);
float[] fArRgba =
{ fArMappingColor[0], fArMappingColor[1], fArMappingColor[2], fOpacity };
FbTemp.put(fArRgba);
}
}
FbTemp.rewind();
TextureData texData =
new TextureData(GL.GL_RGBA /* internalFormat */, iTextureWidth /* height */,
iTextureHeight /* width */, 0 /* border */, GL.GL_RGBA /* pixelFormat */,
GL.GL_FLOAT /* pixelType */, false /* mipmap */, false /* dataIsCompressed */,
false /* mustFlipVertically */, FbTemp, null);
tempTextur = TextureIO.newTexture(0);
tempTextur.updateImage(texData);
AlTextures.add(tempTextur);
iAlNumberSamples.add(iSamplesPerTexture);
}
else {
if (set.getVA(iContentVAID).getGroupList() != null) {
IGroupList groupList = set.getVA(iContentVAID).getGroupList();
iNrSelBar = groupList.size();
AlTextures.clear();
iAlNumberSamples.clear();
Texture tempTextur;
// int iTextureHeight = set.getVA(iContentVAID).size();
int iTextureWidth = set.getVA(iStorageVAID).size();
float fLookupValue = 0;
float fOpacity = 0;
int iCount = 0;
int iTextureCounter = 0;
int iGroupNr = 0;
FloatBuffer FbTemp =
BufferUtil.newFloatBuffer(groupList.get(iGroupNr).getNrElements() * iTextureWidth * 4);
for (Integer iContentIndex : set.getVA(iContentVAID)) {
iCount++;
IVirtualArray storageVA = set.getVA(iStorageVAID);
for (Integer iStorageIndex : storageVA) {
if (contentSelectionManager.checkStatus(ESelectionType.DESELECTED, iContentIndex)) {
fOpacity = 0.3f;
}
else {
fOpacity = 1.0f;
}
fLookupValue =
set.get(iStorageIndex).getFloat(EDataRepresentation.NORMALIZED, iContentIndex);
float[] fArMappingColor = colorMapper.getColor(fLookupValue);
float[] fArRgba =
{ fArMappingColor[0], fArMappingColor[1], fArMappingColor[2], fOpacity };
FbTemp.put(fArRgba);
}
if (iCount >= groupList.get(iGroupNr).getNrElements()) {
FbTemp.rewind();
TextureData texData =
new TextureData(GL.GL_RGBA /* internalFormat */,
set.getVA(iStorageVAID).size() /* height */,
groupList.get(iGroupNr).getNrElements() /* width */,
// set.getVA(iContentVAID).size()/ iNrSelBar /* width */,
0 /* border */, GL.GL_RGBA /* pixelFormat */, GL.GL_FLOAT /* pixelType */,
false /* mipmap */, false /* dataIsCompressed */, false /* mustFlipVertically */,
FbTemp, null);
tempTextur = TextureIO.newTexture(0);
tempTextur.updateImage(texData);
AlTextures.add(tempTextur);
iAlNumberSamples.add(groupList.get(iGroupNr).getNrElements());
if (iGroupNr < iNrSelBar - 1) {
iGroupNr++;
FbTemp =
BufferUtil.newFloatBuffer(groupList.get(iGroupNr).getNrElements()
* iTextureWidth * 4);
}
iTextureCounter++;
iCount = 0;
}
}
}
else {
iNrSelBar = (int) Math.ceil(set.getVA(iContentVAID).size() / iSamplesPerTexture);
AlTextures.clear();
iAlNumberSamples.clear();
Texture tempTextur;
int iTextureHeight = set.getVA(iContentVAID).size();
int iTextureWidth = set.getVA(iStorageVAID).size();
iNrSamplesPerTexture = (int) Math.floor(iTextureHeight / iNrSelBar);
float fLookupValue = 0;
float fOpacity = 0;
FloatBuffer FbTemp =
BufferUtil.newFloatBuffer(iTextureWidth * iTextureHeight * 4 / iNrSelBar);
int iCount = 0;
int iTextureCounter = 0;
for (Integer iContentIndex : set.getVA(iContentVAID)) {
iCount++;
IVirtualArray storageVA = set.getVA(iStorageVAID);
for (Integer iStorageIndex : storageVA) {
if (contentSelectionManager.checkStatus(ESelectionType.DESELECTED, iContentIndex)) {
fOpacity = 0.3f;
}
else {
fOpacity = 1.0f;
}
fLookupValue =
set.get(iStorageIndex).getFloat(EDataRepresentation.NORMALIZED, iContentIndex);
float[] fArMappingColor = colorMapper.getColor(fLookupValue);
float[] fArRgba =
{ fArMappingColor[0], fArMappingColor[1], fArMappingColor[2], fOpacity };
FbTemp.put(fArRgba);
}
if (iCount >= iNrSamplesPerTexture) {
FbTemp.rewind();
TextureData texData =
new TextureData(GL.GL_RGBA /* internalFormat */,
set.getVA(iStorageVAID).size() /* height */, set.getVA(iContentVAID).size()
/ iNrSelBar /* width */, 0 /* border */, GL.GL_RGBA /* pixelFormat */,
GL.GL_FLOAT /* pixelType */, false /* mipmap */,
false /* dataIsCompressed */, false /* mustFlipVertically */, FbTemp, null);
tempTextur = TextureIO.newTexture(0);
tempTextur.updateImage(texData);
AlTextures.add(tempTextur);
iAlNumberSamples.add(iCount);
iTextureCounter++;
iCount = 0;
}
}
}
}
}
/**
* Create embedded heatmap, register heatmap as a sender and receiver
*
* @param
*/
private void createHeatMap() {
CmdCreateGLEventListener cmdView =
(CmdCreateGLEventListener) generalManager.getCommandManager().createCommandByType(
ECommandType.CREATE_GL_HEAT_MAP_3D);
float fHeatMapHeight = viewFrustum.getHeight();
float fHeatMapWidth = viewFrustum.getWidth();
cmdView.setAttributes(EProjectionMode.ORTHOGRAPHIC, 0, fHeatMapHeight, 0, fHeatMapWidth, -20, 20,
set, -1);
cmdView.doCommand();
glHeatMapView = (GLHeatMap) cmdView.getCreatedObject();
GeneralManager.get().getUseCase().addView(glHeatMapView);
glHeatMapView.setUseCase(useCase);
glHeatMapView.setRenderedRemote(true);
}
private void createDendrogram() {
CmdCreateGLEventListener cmdView =
(CmdCreateGLEventListener) generalManager.getCommandManager().createCommandByType(
ECommandType.CREATE_GL_DENDROGRAM_VERTICAL);
float fHeatMapHeight = viewFrustum.getHeight();
float fHeatMapWidth = viewFrustum.getWidth();
cmdView.setAttributes(EProjectionMode.ORTHOGRAPHIC, 0, fHeatMapHeight, 0, fHeatMapWidth, -20, 20,
set, -1);
cmdView.doCommand();
glDendrogram = (GLDendrogramVertical) cmdView.getCreatedObject();
GeneralManager.get().getUseCase().addView(glDendrogram);
glDendrogram.setUseCase(useCase);
glDendrogram.setRenderedRemote(true);
glDendrogram.initData();
}
@Override
public void setDetailLevel(EDetailLevel detailLevel) {
super.setDetailLevel(detailLevel);
}
@Override
public void displayLocal(GL gl) {
if (set == null)
return;
pickingManager.handlePicking(this, gl);
if (bIsDisplayListDirtyLocal) {
buildDisplayList(gl, iGLDisplayListIndexLocal);
bIsDisplayListDirtyLocal = false;
}
iGLDisplayListToCall = iGLDisplayListIndexLocal;
display(gl);
checkForHits(gl);
if (eBusyModeState != EBusyModeState.OFF) {
renderBusyMode(gl);
}
}
@Override
public void displayRemote(GL gl) {
if (set == null)
return;
if (bIsDisplayListDirtyRemote) {
buildDisplayList(gl, iGLDisplayListIndexRemote);
bIsDisplayListDirtyRemote = false;
}
iGLDisplayListToCall = iGLDisplayListIndexRemote;
display(gl);
checkForHits(gl);
}
/**
* Function called any time a update is triggered external
*
* @param
*/
@Override
protected void reactOnExternalSelection(boolean scrollToSelection) {
int iIndex = 0;
int iTemp = 0;
int iTexture = 0;
int iPos = 0;
HeatMapSelection temp;
AlSelection.clear();
Set<Integer> setMouseOverElements = contentSelectionManager.getElements(ESelectionType.MOUSE_OVER);
for (Integer iSelectedID : setMouseOverElements) {
iIndex = set.getVA(iContentVAID).indexOf(iSelectedID.intValue()) + 1;
iTemp = iIndex;
if (iIndex - iAlNumberSamples.get(0) <= 0) {
iTexture = 0;
iPos = iIndex;
}
else {
for (int i = 0; i < iNrSelBar; i++) {
if (iTemp - iAlNumberSamples.get(i) <= 0) {
iTexture = i;
iPos = iTemp;
break;
}
iTemp -= iAlNumberSamples.get(i);
}
}
temp = new HeatMapSelection(iTexture, iPos, iSelectedID.intValue(), ESelectionType.MOUSE_OVER);
AlSelection.add(temp);
}
Set<Integer> setSelectionElements = contentSelectionManager.getElements(ESelectionType.SELECTION);
for (Integer iSelectedID : setSelectionElements) {
iIndex = set.getVA(iContentVAID).indexOf(iSelectedID.intValue()) + 1;
iTemp = iIndex;
if (iIndex - iAlNumberSamples.get(0) <= 0) {
iTexture = 0;
iPos = iIndex;
}
else {
for (int i = 0; i < iNrSelBar; i++) {
if ((iTemp - iAlNumberSamples.get(i)) <= 0) {
iTexture = i;
iPos = iTemp;
break;
}
iTemp -= iAlNumberSamples.get(i);
}
}
temp = new HeatMapSelection(iTexture, iPos, iSelectedID.intValue(), ESelectionType.SELECTION);
AlSelection.add(temp);
}
Set<Integer> setDeselctedElements = contentSelectionManager.getElements(ESelectionType.DESELECTED);
for (Integer iSelectedID : setDeselctedElements) {
iIndex = set.getVA(iContentVAID).indexOf(iSelectedID.intValue()) + 1;
iTemp = iIndex;
if (iIndex - iAlNumberSamples.get(0) <= 0) {
iTexture = 0;
iPos = iIndex;
}
else {
for (int i = 0; i < iNrSelBar; i++) {
if ((iTemp - iAlNumberSamples.get(i)) <= 0) {
iTexture = i;
iPos = iTemp;
break;
}
iTemp -= iAlNumberSamples.get(i);
}
}
temp = new HeatMapSelection(iTexture, iPos, iSelectedID.intValue(), ESelectionType.DESELECTED);
AlSelection.add(temp);
}
setMouseOverElements = storageSelectionManager.getElements(ESelectionType.MOUSE_OVER);
AlExpMouseOver.clear();
if (setMouseOverElements.size() >= 0) {
for (Integer iSelectedID : setMouseOverElements) {
AlExpMouseOver.add(iSelectedID);
}
}
setSelectionElements = storageSelectionManager.getElements(ESelectionType.SELECTION);
AlExpSelected.clear();
if (setSelectionElements.size() >= 0) {
for (Integer iSelectedID : setSelectionElements) {
AlExpSelected.add(iSelectedID);
}
}
if (bSkipLevel1 == false) {
if (scrollToSelection) {
setTexture();
}
}
}
@Override
protected void reactOnVAChanges(IVirtualArrayDelta delta) {
glHeatMapView.handleVirtualArrayUpdate(delta, getShortInfo());
bRedrawTextures = true;
Set<Integer> setMouseOverElements = storageSelectionManager.getElements(ESelectionType.MOUSE_OVER);
AlExpMouseOver.clear();
if (setMouseOverElements.size() >= 0) {
for (Integer iSelectedID : setMouseOverElements) {
AlExpMouseOver.add(iSelectedID);
}
}
Set<Integer> setSelectionElements = storageSelectionManager.getElements(ESelectionType.SELECTION);
AlExpSelected.clear();
if (setSelectionElements.size() >= 0) {
for (Integer iSelectedID : setSelectionElements) {
AlExpSelected.add(iSelectedID);
}
}
setDisplayListDirty();
}
/**
* Render caption, simplified version used in (original) heatmap
*
* @param gl
* @param sLabel
* @param fXOrigin
* @param fYOrigin
* @param fFontScaling
*/
private void renderCaption(GL gl, String sLabel, float fXOrigin, float fYOrigin, float fFontScaling) {
textRenderer.setColor(1, 1, 1, 1);
gl.glPushAttrib(GL.GL_CURRENT_BIT | GL.GL_LINE_BIT);
gl.glTranslatef(fXOrigin, fYOrigin, 0);
textRenderer.begin3DRendering();
textRenderer.draw3D(sLabel, 0, 0, 0, fFontScaling);
textRenderer.end3DRendering();
gl.glTranslatef(-fXOrigin, -fYOrigin, 0);
gl.glPopAttrib();
}
/**
* Render a curved (nice looking) grey area between two views
*
* @param gl
* @param startpoint1
* @param endpoint1
* @param startpoint2
* @param endpoint2
*/
private void renderSelectedDomain(GL gl, Vec3f startpoint1, Vec3f endpoint1, Vec3f startpoint2,
Vec3f endpoint2) {
float fthickness = (endpoint1.x() - startpoint1.x()) / 4;
float fScalFactor1, fScalFactor2;
if (endpoint1.y() - startpoint1.y() < 0.2f) {
fScalFactor1 = (endpoint1.y() - startpoint1.y()) * 5f;
}
else {
fScalFactor1 = 1;
}
if (startpoint2.y() - endpoint2.y() < 0.2f) {
fScalFactor2 = (startpoint2.y() - endpoint2.y()) * 5f;
}
else {
fScalFactor2 = 1;
}
gl.glColor4f(0.5f, 0.5f, 0.5f, 1f);
gl.glBegin(GL.GL_QUADS);
gl.glVertex3f(startpoint1.x(), startpoint1.y(), startpoint1.z());
gl.glVertex3f(startpoint1.x() + 2 * fthickness, startpoint1.y(), startpoint1.z());
gl.glVertex3f(startpoint2.x() + 2 * fthickness, startpoint2.y(), startpoint2.z());
gl.glVertex3f(startpoint2.x(), startpoint2.y(), startpoint2.z());
gl.glEnd();
gl.glBegin(GL.GL_QUADS);
gl.glVertex3f(endpoint1.x(), endpoint1.y(), endpoint1.z());
gl.glVertex3f(endpoint1.x() - 1 * fthickness, endpoint1.y(), endpoint1.z());
gl.glVertex3f(endpoint2.x() - 1 * fthickness, endpoint2.y(), endpoint2.z());
gl.glVertex3f(endpoint2.x(), endpoint2.y(), endpoint2.z());
gl.glEnd();
// fill gap
gl.glBegin(GL.GL_QUADS);
gl.glVertex3f(endpoint1.x() - 1 * fthickness, endpoint1.y() - 0.1f * fScalFactor1, endpoint1.z());
gl.glVertex3f(endpoint1.x() - 2 * fthickness, endpoint1.y() - 0.1f * fScalFactor1, endpoint1.z());
gl.glVertex3f(endpoint2.x() - 2 * fthickness, endpoint2.y() + 0.1f * fScalFactor2, endpoint2.z());
gl.glVertex3f(endpoint2.x() - 1 * fthickness, endpoint2.y() + 0.1f * fScalFactor2, endpoint2.z());
gl.glEnd();
gl.glPushAttrib(GL.GL_CURRENT_BIT | GL.GL_LINE_BIT);
gl.glColor4f(1, 1, 1, 1);
Texture TextureMask = iconTextureManager.getIconTexture(gl, EIconTextures.NAVIGATION_MASK_CURVE);
TextureMask.enable();
TextureMask.bind();
TextureCoords texCoordsMask = TextureMask.getImageTexCoords();
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoordsMask.right(), texCoordsMask.top());
gl.glVertex3f(startpoint1.x() + 2 * fthickness, startpoint1.y(), startpoint1.z());
gl.glTexCoord2f(texCoordsMask.left(), texCoordsMask.top());
gl.glVertex3f(startpoint1.x() + 1 * fthickness, startpoint1.y(), startpoint1.z());
gl.glTexCoord2f(texCoordsMask.left(), texCoordsMask.bottom());
gl.glVertex3f(startpoint1.x() + 1 * fthickness, startpoint1.y() + 0.1f * fScalFactor1, startpoint1
.z());
gl.glTexCoord2f(texCoordsMask.right(), texCoordsMask.bottom());
gl.glVertex3f(startpoint1.x() + 2 * fthickness, startpoint1.y() + 0.1f * fScalFactor1, startpoint1
.z());
gl.glEnd();
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoordsMask.right(), texCoordsMask.top());
gl.glVertex3f(startpoint2.x() + 2 * fthickness, startpoint2.y(), startpoint2.z());
gl.glTexCoord2f(texCoordsMask.left(), texCoordsMask.top());
gl.glVertex3f(startpoint2.x() + 1 * fthickness, startpoint2.y(), startpoint2.z());
gl.glTexCoord2f(texCoordsMask.left(), texCoordsMask.bottom());
gl.glVertex3f(startpoint2.x() + 1 * fthickness, startpoint2.y() - 0.1f * fScalFactor2, startpoint2
.z());
gl.glTexCoord2f(texCoordsMask.right(), texCoordsMask.bottom());
gl.glVertex3f(startpoint2.x() + 2 * fthickness, startpoint2.y() - 0.1f * fScalFactor2, startpoint2
.z());
gl.glEnd();
TextureMask.disable();
Texture TextureMaskNeg =
iconTextureManager.getIconTexture(gl, EIconTextures.NAVIGATION_MASK_CURVE_NEG);
TextureMaskNeg.enable();
TextureMaskNeg.bind();
TextureCoords texCoordsMaskNeg = TextureMaskNeg.getImageTexCoords();
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoordsMaskNeg.left(), texCoordsMaskNeg.bottom());
gl.glVertex3f(endpoint1.x() - 2 * fthickness, endpoint1.y() - 0.1f * fScalFactor1, endpoint1.z());
gl.glTexCoord2f(texCoordsMaskNeg.right(), texCoordsMaskNeg.bottom());
gl.glVertex3f(endpoint1.x() - 1 * fthickness, endpoint1.y() - 0.1f * fScalFactor1, endpoint1.z());
gl.glTexCoord2f(texCoordsMaskNeg.right(), texCoordsMaskNeg.top());
gl.glVertex3f(endpoint1.x() - 1 * fthickness, endpoint1.y(), endpoint1.z());
gl.glTexCoord2f(texCoordsMaskNeg.left(), texCoordsMaskNeg.top());
gl.glVertex3f(endpoint1.x() - 2 * fthickness, endpoint1.y(), endpoint1.z());
gl.glEnd();
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoordsMaskNeg.left(), texCoordsMaskNeg.bottom());
gl.glVertex3f(endpoint2.x() - 2 * fthickness, endpoint2.y() + 0.1f * fScalFactor2, endpoint2.z());
gl.glTexCoord2f(texCoordsMaskNeg.right(), texCoordsMaskNeg.bottom());
gl.glVertex3f(endpoint2.x() - 1 * fthickness, endpoint2.y() + 0.1f * fScalFactor2, endpoint2.z());
gl.glTexCoord2f(texCoordsMaskNeg.right(), texCoordsMaskNeg.top());
gl.glVertex3f(endpoint2.x() - 1 * fthickness, endpoint2.y(), endpoint2.z());
gl.glTexCoord2f(texCoordsMaskNeg.left(), texCoordsMaskNeg.top());
gl.glVertex3f(endpoint2.x() - 2 * fthickness, endpoint2.y(), endpoint2.z());
gl.glEnd();
TextureMaskNeg.disable();
gl.glPopAttrib();
}
/**
* Render the first stage of the hierarchy (OverviewBar)
*
* @param gl
*/
private void renderOverviewBar(GL gl) {
float fHeight;
float fWidth;
float fyOffset = 0.0f;
fHeight = viewFrustum.getHeight();
fWidth = 0.1f;
float fHeightElem = fHeight / set.getVA(iContentVAID).size();
float fStep = 0;
gl.glColor4f(1f, 1f, 0f, 1f);
for (int i = 0; i < iNrSelBar; i++) {
fStep = fHeightElem * iAlNumberSamples.get(iNrSelBar - i - 1);
AlTextures.get(iNrSelBar - i - 1).enable();
AlTextures.get(iNrSelBar - i - 1).bind();
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_NEAREST);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_NEAREST);
TextureCoords texCoords = AlTextures.get(iNrSelBar - i - 1).getImageTexCoords();
gl.glPushName(pickingManager.getPickingID(iUniqueID,
EPickingType.HIER_HEAT_MAP_TEXTURE_SELECTION, iNrSelBar - i));
gl.glBegin(GL.GL_QUADS);
gl.glTexCoord2d(texCoords.left(), texCoords.top());
gl.glVertex3f(0, fyOffset, 0);
gl.glTexCoord2d(texCoords.left(), texCoords.bottom());
gl.glVertex3f(0, fyOffset + fStep, 0);
gl.glTexCoord2d(texCoords.right(), texCoords.bottom());
gl.glVertex3f(fWidth, fyOffset + fStep, 0);
gl.glTexCoord2d(texCoords.right(), texCoords.top());
gl.glVertex3f(fWidth, fyOffset, 0);
gl.glEnd();
gl.glPopName();
fyOffset += fStep;
AlTextures.get(iNrSelBar - i - 1).disable();
}
}
private void renderClassAssignmentsExperimentsLevel3(final GL gl) {
float fWidth = viewFrustum.getWidth() / 4.0f * fAnimationScale;
int iNrElements = set.getVA(iStorageVAID).size();
float fWidthSamples = fWidthEHM / iNrElements;
float fxpos = fWidth + GAP_LEVEL2_3;
float fHeight = viewFrustum.getHeight() + 0.1f;
IGroupList groupList = set.getVA(iStorageVAID).getGroupList();
int iNrClasses = groupList.size();
gl.glLineWidth(1f);
for (int i = 0; i < iNrClasses; i++) {
// gl.glPushName(pickingManager.getPickingID(iUniqueID,
// EPickingType.HIER_HEAT_MAP_EXPERIMENTS_GROUP, i));
float classWidth = groupList.get(i).getNrElements() * fWidthSamples;
if (groupList.get(i).getSelectionType() == ESelectionType.NORMAL)
gl.glColor4f(0f, 0f, 1f, 0.5f);
if (groupList.get(i).getSelectionType() == ESelectionType.SELECTION)
gl.glColor4f(0f, 1f, 0f, 0.5f);
gl.glBegin(GL.GL_QUADS);
gl.glVertex3f(fxpos, fHeight, 0);
gl.glVertex3f(fxpos, fHeight + 0.1f, 0);
gl.glVertex3f(fxpos + classWidth, fHeight + 0.1f, 0);
gl.glVertex3f(fxpos + classWidth, fHeight, 0);
gl.glEnd();
gl.glColor4f(0f, 0f, 0f, 1);
gl.glBegin(GL.GL_LINES);
gl.glVertex3f(fxpos, fHeight, 0);
gl.glVertex3f(fxpos, fHeight + 0.1f, 0);
gl.glVertex3f(fxpos + classWidth, fHeight + 0.1f, 0);
gl.glVertex3f(fxpos + classWidth, fHeight, 0);
gl.glEnd();
// gl.glPopName();
fxpos = fxpos + classWidth;
}
}
private void renderClassAssignmentsExperimentsLevel2(final GL gl) {
float fWidth = viewFrustum.getWidth() / 4.0f * fAnimationScale;
int iNrElements = set.getVA(iStorageVAID).size();
float fWidthSamples = fWidth / iNrElements;
float fxpos = 0;
float fHeight = viewFrustum.getHeight() + 0.1f;
IGroupList groupList = set.getVA(iStorageVAID).getGroupList();
int iNrClasses = groupList.size();
gl.glLineWidth(1f);
for (int i = 0; i < iNrClasses; i++) {
gl.glPushName(pickingManager.getPickingID(iUniqueID,
EPickingType.HIER_HEAT_MAP_EXPERIMENTS_GROUP, i));
float classWidth = groupList.get(i).getNrElements() * fWidthSamples;
if (groupList.get(i).getSelectionType() == ESelectionType.NORMAL)
gl.glColor4f(0f, 0f, 1f, 0.5f);
if (groupList.get(i).getSelectionType() == ESelectionType.SELECTION)
gl.glColor4f(0f, 1f, 0f, 0.5f);
gl.glBegin(GL.GL_QUADS);
gl.glVertex3f(fxpos, fHeight, 0);
gl.glVertex3f(fxpos, fHeight + 0.1f, 0);
gl.glVertex3f(fxpos + classWidth, fHeight + 0.1f, 0);
gl.glVertex3f(fxpos + classWidth, fHeight, 0);
gl.glEnd();
gl.glColor4f(0f, 0f, 0f, 1);
gl.glBegin(GL.GL_LINES);
gl.glVertex3f(fxpos, fHeight, 0);
gl.glVertex3f(fxpos, fHeight + 0.1f, 0);
gl.glVertex3f(fxpos + classWidth, fHeight + 0.1f, 0);
gl.glVertex3f(fxpos + classWidth, fHeight, 0);
gl.glEnd();
gl.glPopName();
fxpos = fxpos + classWidth;
}
}
private void renderClassAssignmentsGenes(final GL gl) {
float fHeight = viewFrustum.getHeight();
int iNrElements = iNumberOfElements;
float fHeightSamples = fHeight / iNrElements;
float fyPos = fHeight;
IGroupList groupList = set.getVA(iContentVAID).getGroupList();
int iNrClasses = groupList.size();
gl.glLineWidth(1f);
for (int i = 0; i < iNrClasses; i++) {
gl.glPushName(pickingManager.getPickingID(iUniqueID, EPickingType.HIER_HEAT_MAP_GENES_GROUP, i));
float classHeight = groupList.get(i).getNrElements() * fHeightSamples;
if (groupList.get(i).getSelectionType() == ESelectionType.NORMAL)
gl.glColor4f(0f, 0f, 1f, 0.5f);
if (groupList.get(i).getSelectionType() == ESelectionType.SELECTION)
gl.glColor4f(0f, 1f, 0f, 0.5f);
gl.glBegin(GL.GL_QUADS);
gl.glVertex3f(0, fyPos, 0);
gl.glVertex3f(0.1f, fyPos, 0);
gl.glVertex3f(0.1f, fyPos - classHeight, 0);
gl.glVertex3f(0, fyPos - classHeight, 0);
gl.glEnd();
gl.glColor4f(0f, 0f, 0f, 1);
gl.glBegin(GL.GL_LINES);
gl.glVertex3f(0, fyPos, 0);
gl.glVertex3f(0.1f, fyPos, 0);
gl.glVertex3f(0.1f, fyPos - classHeight, 0);
gl.glVertex3f(0, fyPos - classHeight, 0);
gl.glEnd();
gl.glPopName();
fyPos = fyPos - classHeight;
}
}
/**
* Render marker in OverviewBar for visualization of the currently (in stage 2) rendered part
*
* @param gl
*/
private void renderMarkerOverviewBar(final GL gl) {
float fHeight = viewFrustum.getHeight();
float fFieldWith = 0.1f;
Vec3f startpoint1, endpoint1, startpoint2, endpoint2;
float fHeightElem = fHeight / set.getVA(iContentVAID).size();
int iStartElem = 0;
int iLastElem = 0;
boolean colorToggle = true;
gl.glLineWidth(2f);
for (int currentGroup = 0; currentGroup < iNrSelBar; currentGroup++) {
iStartElem = iLastElem;
iLastElem += iAlNumberSamples.get(currentGroup);
if (colorToggle)
gl.glColor4f(0f, 0f, 0f, 1f);
else
gl.glColor4f(1f, 1f, 1f, 1f);
colorToggle = (colorToggle == true) ? false : true;
if (currentGroup == iSelectorBar - 1) {
startpoint1 = new Vec3f(fFieldWith, fHeight - fHeightElem * iStartElem, 0);
endpoint1 = new Vec3f(GAP_LEVEL1_2, fHeight, 0);
startpoint2 = new Vec3f(fFieldWith, fHeight - fHeightElem * iLastElem, 0);
endpoint2 = new Vec3f(GAP_LEVEL1_2, 0, 0);
renderSelectedDomain(gl, startpoint1, endpoint1, startpoint2, endpoint2);
gl.glColor4fv(MOUSE_OVER_COLOR, 0);
}
// TODO: find a better way to render cluster assignments (--> +0.01f)
gl.glBegin(GL.GL_LINE_LOOP);
gl.glVertex3f(0, fHeight - fHeightElem * iStartElem, 0);
gl.glVertex3f(fFieldWith, fHeight - fHeightElem * iStartElem, 0);
gl.glVertex3f(fFieldWith, (fHeight - fHeightElem * iLastElem) + 0.01f, 0);
gl.glVertex3f(0, (fHeight - fHeightElem * iLastElem) + 0.01f, 0);
gl.glEnd();
}
gl.glColor4f(1f, 1f, 1f, 1f);
}
/**
* Render marker next to OverviewBar for visualization of selected elements in the data set
*
* @param gl
*/
private void renderSelectedElementsOverviewBar(GL gl) {
float fHeight = viewFrustum.getHeight();
float fBarWidth = 0.1f;
float fHeightElem = fHeight / set.getVA(iContentVAID).size();
for (HeatMapSelection selection : AlSelection) {
if (selection.getSelectionType() == ESelectionType.MOUSE_OVER) {
gl.glColor4fv(MOUSE_OVER_COLOR, 0);
}
else if (selection.getSelectionType() == ESelectionType.SELECTION) {
gl.glColor4fv(SELECTED_COLOR, 0);
}
// else if (selection.getSelectionType() == ESelectionType.DESELECTED) {
// gl.glColor4f(1, 1, 1, 0.5f);
else
continue;
float fStartElem = 0;
for (int i = 0; i < selection.getTexture(); i++)
fStartElem += iAlNumberSamples.get(i);
// elements in overview bar
gl.glBegin(GL.GL_QUADS);
gl.glVertex3f(fBarWidth, fHeight - fHeightElem * fStartElem, 0.001f);
gl.glVertex3f(fBarWidth + 0.1f, fHeight - fHeightElem * fStartElem, 0.001f);
gl.glVertex3f(fBarWidth + 0.1f, fHeight - fHeightElem
* (fStartElem + iAlNumberSamples.get(selection.getTexture())), 0.001f);
gl.glVertex3f(fBarWidth, fHeight - fHeightElem
* (fStartElem + iAlNumberSamples.get(selection.getTexture())), 0.001f);
gl.glEnd();
}
gl.glColor4f(1f, 1f, 0f, 1f);
}
/**
* Render the second stage of the hierarchy (Texture)
*
* @param gl
*/
private void renderTextureHeatMap(GL gl) {
float fHeight;
float fWidth;
Texture TexTemp1 = AlTextures.get(iSelectorBar - 1);
TexTemp1.enable();
TexTemp1.bind();
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_NEAREST);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_NEAREST);
TextureCoords texCoords1 = TexTemp1.getImageTexCoords();
gl.glPushAttrib(GL.GL_CURRENT_BIT | GL.GL_LINE_BIT);
fHeight = viewFrustum.getHeight();
fWidth = viewFrustum.getWidth() / 4.0f * fAnimationScale;
gl.glPushName(pickingManager.getPickingID(iUniqueID, EPickingType.HIER_HEAT_MAP_FIELD_SELECTION, 1));
gl.glBegin(GL.GL_QUADS);
gl.glTexCoord2d(texCoords1.left(), texCoords1.top());
// gl.glVertex3f(0, fHeight / 2, 0);
gl.glVertex3f(0, 0, 0);
gl.glTexCoord2d(texCoords1.left(), texCoords1.bottom());
gl.glVertex3f(0, fHeight, 0);
gl.glTexCoord2d(texCoords1.right(), texCoords1.bottom());
gl.glVertex3f(fWidth, fHeight, 0);
gl.glTexCoord2d(texCoords1.right(), texCoords1.top());
// gl.glVertex3f(fWidth, fHeight / 2, 0);
gl.glVertex3f(fWidth, 0, 0);
gl.glEnd();
// Texture TexTemp2 = AlTextures.get(iSelectorBar);
// TexTemp2.enable();
// TexTemp2.bind();
// TextureCoords texCoords2 = TexTemp2.getImageTexCoords();
// gl.glBegin(GL.GL_QUADS);
// gl.glTexCoord2d(texCoords2.left(), texCoords2.top());
// gl.glVertex3f(0, 0, 0);
// gl.glTexCoord2d(texCoords2.left(), texCoords2.bottom());
// gl.glVertex3f(0, fHeight / 2, 0);
// gl.glTexCoord2d(texCoords2.right(), texCoords2.bottom());
// gl.glVertex3f(fWidth, fHeight / 2, 0);
// gl.glTexCoord2d(texCoords2.right(), texCoords2.top());
// gl.glVertex3f(fWidth, 0, 0);
// gl.glEnd();
gl.glPopName();
gl.glPopAttrib();
TexTemp1.disable();
// TexTemp2.disable();
}
/**
* Render marker in Texture for visualization of the currently (in stage 3) rendered part
*
* @param gl
*/
private void renderMarkerTexture(final GL gl) {
float fFieldWith = viewFrustum.getWidth() / 4.0f * fAnimationScale;
float fHeightSample = viewFrustum.getHeight() / (iAlNumberSamples.get(iSelectorBar - 1));
Vec3f startpoint1, endpoint1, startpoint2, endpoint2;
gl.glColor4f(1, 1, 0, 1);
gl.glLineWidth(2f);
gl.glBegin(GL.GL_LINE_LOOP);
gl.glVertex3f(0, viewFrustum.getHeight() - iFirstSample * fHeightSample, 0);
gl.glVertex3f(fFieldWith, viewFrustum.getHeight() - iFirstSample * fHeightSample, 0);
gl.glVertex3f(fFieldWith, viewFrustum.getHeight() - (iLastSample + 1) * fHeightSample, 0);
gl.glVertex3f(0, viewFrustum.getHeight() - (iLastSample + 1) * fHeightSample, 0);
gl.glEnd();
if (bIsDraggingActive == false) {
fPosCursorFirstElement = viewFrustum.getHeight() - iFirstSample * fHeightSample;
fPosCursorLastElement = viewFrustum.getHeight() - (iLastSample + 1) * fHeightSample;
}
startpoint1 = new Vec3f(fFieldWith, viewFrustum.getHeight() - iFirstSample * fHeightSample, 0);
endpoint1 = new Vec3f(fFieldWith + GAP_LEVEL2_3, viewFrustum.getHeight(), 0);
startpoint2 = new Vec3f(fFieldWith, viewFrustum.getHeight() - (iLastSample + 1) * fHeightSample, 0);
endpoint2 = new Vec3f(fFieldWith + GAP_LEVEL2_3, 0.0f, 0);
renderSelectedDomain(gl, startpoint1, endpoint1, startpoint2, endpoint2);
Texture tempTexture = iconTextureManager.getIconTexture(gl, EIconTextures.NAVIGATION_NEXT_BIG_MIDDLE);
tempTexture.enable();
tempTexture.bind();
float fYCoord = viewFrustum.getHeight() / 2;
TextureCoords texCoords = tempTexture.getImageTexCoords();
gl
.glPushName(pickingManager.getPickingID(iUniqueID, EPickingType.HIER_HEAT_MAP_INFOCUS_SELECTION,
1));
if (bIsHeatmapInFocus) {
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoords.left(), texCoords.bottom());
gl.glVertex3f(fFieldWith + 0.2f, fYCoord - 0.3f, 0.1f);
gl.glTexCoord2f(texCoords.right(), texCoords.bottom());
gl.glVertex3f(fFieldWith + 0.2f, fYCoord + 0.3f, 0.1f);
gl.glTexCoord2f(texCoords.right(), texCoords.top());
gl.glVertex3f(fFieldWith + 0.3f, fYCoord + 0.3f, 0.1f);
gl.glTexCoord2f(texCoords.left(), texCoords.top());
gl.glVertex3f(fFieldWith + 0.3f, fYCoord - 0.3f, 0.1f);
gl.glEnd();
}
else {
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoords.left(), texCoords.top());
gl.glVertex3f(fFieldWith + 0.2f, fYCoord - 0.3f, 0.1f);
gl.glTexCoord2f(texCoords.right(), texCoords.top());
gl.glVertex3f(fFieldWith + 0.2f, fYCoord + 0.3f, 0.1f);
gl.glTexCoord2f(texCoords.right(), texCoords.bottom());
gl.glVertex3f(fFieldWith + 0.3f, fYCoord + 0.3f, 0.1f);
gl.glTexCoord2f(texCoords.left(), texCoords.bottom());
gl.glVertex3f(fFieldWith + 0.3f, fYCoord - 0.3f, 0.1f);
gl.glEnd();
}
gl.glPopName();
tempTexture.disable();
if (bRenderCaption == true) {
renderCaption(gl, "Number Samples:" + iSamplesPerHeatmap, 0.0f, viewFrustum.getHeight()
- iPickedSample * fHeightSample, 0.01f);
bRenderCaption = false;
}
}
/**
* Render marker in Texture (level 2) for visualization of selected elements in the data set
*
* @param gl
*/
private void renderSelectedElementsTexture(GL gl) {
float fFieldWith = viewFrustum.getWidth() / 4.0f * fAnimationScale;
float fHeightSample = viewFrustum.getHeight() / iAlNumberSamples.get(iSelectorBar - 1);
// float fHeightSample = viewFrustum.getHeight() / (iNrSamplesPerTexture);// * 2);
float fExpWidth = fFieldWith / set.getVA(iStorageVAID).size();
gl.glEnable(GL.GL_LINE_STIPPLE);
gl.glLineStipple(2, (short) 0xAAAA);
gl.glColor4fv(MOUSE_OVER_COLOR, 0);
Set<Integer> selectedSet = storageSelectionManager.getElements(ESelectionType.MOUSE_OVER);
int iColumnIndex = 0;
for (int iTempLine : set.getVA(iStorageVAID)) {
for (Integer iCurrentLine : selectedSet) {
if (iTempLine == iCurrentLine) {
gl.glBegin(GL.GL_LINE_LOOP);
gl.glVertex3f(iColumnIndex * fExpWidth, 0, SELECTION_Z);
gl.glVertex3f((iColumnIndex + 1) * fExpWidth, 0, SELECTION_Z);
gl.glVertex3f((iColumnIndex + 1) * fExpWidth, viewFrustum.getHeight(), SELECTION_Z);
gl.glVertex3f(iColumnIndex * fExpWidth, viewFrustum.getHeight(), SELECTION_Z);
gl.glEnd();
}
}
iColumnIndex++;
}
gl.glColor4fv(SELECTED_COLOR, 0);
selectedSet = storageSelectionManager.getElements(ESelectionType.SELECTION);
int iLineIndex = 0;
for (int iTempLine : set.getVA(iStorageVAID)) {
for (Integer iCurrentLine : selectedSet) {
if (iTempLine == iCurrentLine) {
gl.glBegin(GL.GL_LINE_LOOP);
gl.glVertex3f(iLineIndex * fExpWidth, 0, SELECTION_Z);
gl.glVertex3f((iLineIndex + 1) * fExpWidth, 0, SELECTION_Z);
gl.glVertex3f((iLineIndex + 1) * fExpWidth, viewFrustum.getHeight(), SELECTION_Z);
gl.glVertex3f(iLineIndex * fExpWidth, viewFrustum.getHeight(), SELECTION_Z);
gl.glEnd();
}
}
iLineIndex++;
}
gl.glDisable(GL.GL_LINE_STIPPLE);
for (HeatMapSelection selection : AlSelection) {
if (selection.getSelectionType() == ESelectionType.MOUSE_OVER) {
gl.glColor4fv(MOUSE_OVER_COLOR, 0);
}
else if (selection.getSelectionType() == ESelectionType.SELECTION) {
gl.glColor4fv(SELECTED_COLOR, 0);
}
// else if (selection.getSelectionType() == ESelectionType.DESELECTED) {
// gl.glColor4f(1, 1, 1, 0.5f);
else
continue;
// elements in texture
if (iSelectorBar == selection.getTexture() + 1) {
gl.glLineWidth(2f);
gl.glBegin(GL.GL_LINE_LOOP);
gl.glVertex3f(-0.1f, viewFrustum.getHeight() - (selection.getPos() - 1) * fHeightSample,
SELECTION_Z);
gl.glVertex3f(fFieldWith + 0.1f, viewFrustum.getHeight() - (selection.getPos() - 1)
* fHeightSample, SELECTION_Z);
gl.glVertex3f(fFieldWith + 0.1f,
viewFrustum.getHeight() - selection.getPos() * fHeightSample, SELECTION_Z);
gl.glVertex3f(-0.1f, viewFrustum.getHeight() - selection.getPos() * fHeightSample,
SELECTION_Z);
gl.glEnd();
}
else if (iSelectorBar + 1 == selection.getTexture() + 1) {
gl.glLineWidth(2f);
gl.glBegin(GL.GL_LINE_LOOP);
gl.glVertex3f(-0.1f, viewFrustum.getHeight()
- (selection.getPos() - 1 + iAlNumberSamples.get(iSelectorBar - 1)) * fHeightSample,
SELECTION_Z);
gl.glVertex3f(fFieldWith + 0.1f, viewFrustum.getHeight()
- (selection.getPos() - 1 + iAlNumberSamples.get(iSelectorBar - 1)) * fHeightSample,
SELECTION_Z);
gl.glVertex3f(fFieldWith + 0.1f, viewFrustum.getHeight()
- (selection.getPos() + iAlNumberSamples.get(iSelectorBar - 1)) * fHeightSample,
SELECTION_Z);
gl.glVertex3f(-0.1f, viewFrustum.getHeight()
- (selection.getPos() + iAlNumberSamples.get(iSelectorBar - 1)) * fHeightSample,
SELECTION_Z);
gl.glEnd();
}
}
}
/**
* Render cursor used for controlling hierarchical heatmap (e.g. next Texture, previous Texture, set
* heatmap in focus)
*
* @param gl
*/
private void renderCursor(final GL gl) {
float fHeight = viewFrustum.getHeight();
float fWidth = viewFrustum.getWidth() / 4.0f;
Texture tempTexture = iconTextureManager.getIconTexture(gl, EIconTextures.NAVIGATION_NEXT_BIG_SIDE);
tempTexture.enable();
tempTexture.bind();
TextureCoords texCoords = tempTexture.getImageTexCoords();
gl.glPushAttrib(GL.GL_CURRENT_BIT | GL.GL_LINE_BIT);
gl.glColor4f(1f, 1, 1, 1f);
if (iSelectorBar != 1) {
// Polygon for selecting previous texture
gl.glPushName(pickingManager
.getPickingID(iUniqueID, EPickingType.HIER_HEAT_MAP_TEXTURE_CURSOR, 1));
// left
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoords.left(), texCoords.bottom());
gl.glVertex3f(0.0f, fHeight, 0);
gl.glTexCoord2f(texCoords.right(), texCoords.bottom());
gl.glVertex3f(0.1f, fHeight, 0);
gl.glTexCoord2f(texCoords.right(), texCoords.top());
gl.glVertex3f(0.1f, fHeight + 0.1f, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.top());
gl.glVertex3f(0.0f, fHeight + 0.1f, 0);
gl.glEnd();
// right
if (bIsHeatmapInFocus) {
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoords.right(), texCoords.bottom());
gl.glVertex3f(fWidth / 5 - 0.1f, fHeight, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.bottom());
gl.glVertex3f(fWidth / 5, fHeight, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.top());
gl.glVertex3f(fWidth / 5, fHeight + 0.1f, 0);
gl.glTexCoord2f(texCoords.right(), texCoords.top());
gl.glVertex3f(fWidth / 5 - 0.1f, fHeight + 0.1f, 0);
gl.glEnd();
}
else {
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoords.right(), texCoords.bottom());
gl.glVertex3f(fWidth - 0.1f, fHeight, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.bottom());
gl.glVertex3f(fWidth, fHeight, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.top());
gl.glVertex3f(fWidth, fHeight + 0.1f, 0);
gl.glTexCoord2f(texCoords.right(), texCoords.top());
gl.glVertex3f(fWidth - 0.1f, fHeight + 0.1f, 0);
gl.glEnd();
}
tempTexture = iconTextureManager.getIconTexture(gl, EIconTextures.NAVIGATION_NEXT_BIG_MIDDLE);
tempTexture.enable();
tempTexture.bind();
texCoords = tempTexture.getImageTexCoords();
// middle
if (bIsHeatmapInFocus) {
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoords.left(), texCoords.bottom());
gl.glVertex3f(fWidth / 10 - 0.15f, fHeight, 0);
gl.glTexCoord2f(texCoords.right(), texCoords.bottom());
gl.glVertex3f(fWidth / 10 + 0.15f, fHeight, 0);
gl.glTexCoord2f(texCoords.right(), texCoords.top());
gl.glVertex3f(fWidth / 10 + 0.15f, fHeight + 0.1f, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.top());
gl.glVertex3f(fWidth / 10 - 0.15f, fHeight + 0.1f, 0);
gl.glEnd();
}
else {
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoords.left(), texCoords.bottom());
gl.glVertex3f(fWidth / 2 - 0.15f, fHeight, 0.001f);
gl.glTexCoord2f(texCoords.right(), texCoords.bottom());
gl.glVertex3f(fWidth / 2 + 0.15f, fHeight, 0.001f);
gl.glTexCoord2f(texCoords.right(), texCoords.top());
gl.glVertex3f(fWidth / 2 + 0.15f, fHeight + 0.1f, 0.001f);
gl.glTexCoord2f(texCoords.left(), texCoords.top());
gl.glVertex3f(fWidth / 2 - 0.15f, fHeight + 0.1f, 0.001f);
gl.glEnd();
// fill gap between middle and side
gl.glBegin(GL.GL_QUADS);
gl.glVertex3f(0.1f, fHeight, 0);
gl.glVertex3f(fWidth / 2 - 0.15f, fHeight, 0);
gl.glVertex3f(fWidth / 2 - 0.15f, fHeight + 0.1f, 0);
gl.glVertex3f(0.1f, fHeight + 0.1f, 0);
gl.glEnd();
gl.glBegin(GL.GL_QUADS);
gl.glVertex3f(fWidth - 0.1f, fHeight, 0);
gl.glVertex3f(fWidth / 2 + 0.15f, fHeight, 0);
gl.glVertex3f(fWidth / 2 + 0.15f, fHeight + 0.1f, 0);
gl.glVertex3f(fWidth - 0.1f, fHeight + 0.1f, 0);
gl.glEnd();
}
gl.glPopName();
}
tempTexture = iconTextureManager.getIconTexture(gl, EIconTextures.NAVIGATION_NEXT_BIG_SIDE);
tempTexture.enable();
tempTexture.bind();
texCoords = tempTexture.getImageTexCoords();
// if (iSelectorBar != iNrSelBar - 1) {
if (iSelectorBar != iNrSelBar) {
// Polygon for selecting next texture
gl.glPushName(pickingManager
.getPickingID(iUniqueID, EPickingType.HIER_HEAT_MAP_TEXTURE_CURSOR, 2));
// left
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoords.left(), texCoords.bottom());
gl.glVertex3f(0.0f, 0.0f, 0);
gl.glTexCoord2f(texCoords.right(), texCoords.bottom());
gl.glVertex3f(0.1f, 0.0f, 0);
gl.glTexCoord2f(texCoords.right(), texCoords.top());
gl.glVertex3f(0.1f, -0.1f, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.top());
gl.glVertex3f(0.0f, -0.1f, 0);
gl.glEnd();
// right
if (bIsHeatmapInFocus) {
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoords.right(), texCoords.bottom());
gl.glVertex3f(fWidth / 5 - 0.1f, 0, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.bottom());
gl.glVertex3f(fWidth / 5, 0, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.top());
gl.glVertex3f(fWidth / 5, -0.1f, 0);
gl.glTexCoord2f(texCoords.right(), texCoords.top());
gl.glVertex3f(fWidth / 5 - 0.1f, -0.1f, 0);
gl.glEnd();
}
else {
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoords.right(), texCoords.bottom());
gl.glVertex3f(fWidth - 0.1f, 0, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.bottom());
gl.glVertex3f(fWidth, 0, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.top());
gl.glVertex3f(fWidth, -0.1f, 0);
gl.glTexCoord2f(texCoords.right(), texCoords.top());
gl.glVertex3f(fWidth - 0.1f, -0.1f, 0);
gl.glEnd();
}
tempTexture = iconTextureManager.getIconTexture(gl, EIconTextures.NAVIGATION_NEXT_BIG_MIDDLE);
tempTexture.enable();
tempTexture.bind();
texCoords = tempTexture.getImageTexCoords();
// middle
if (bIsHeatmapInFocus) {
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoords.left(), texCoords.bottom());
gl.glVertex3f(fWidth / 10 - 0.15f, 0, 0);
gl.glTexCoord2f(texCoords.right(), texCoords.bottom());
gl.glVertex3f(fWidth / 10 + 0.15f, 0, 0);
gl.glTexCoord2f(texCoords.right(), texCoords.top());
gl.glVertex3f(fWidth / 10 + 0.15f, -0.1f, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.top());
gl.glVertex3f(fWidth / 10 - 0.15f, -0.1f, 0);
gl.glEnd();
}
else {
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoords.left(), texCoords.bottom());
gl.glVertex3f(fWidth / 2 - 0.15f, 0, 0.001f);
gl.glTexCoord2f(texCoords.right(), texCoords.bottom());
gl.glVertex3f(fWidth / 2 + 0.15f, 0, 0.001f);
gl.glTexCoord2f(texCoords.right(), texCoords.top());
gl.glVertex3f(fWidth / 2 + 0.15f, -0.1f, 0.001f);
gl.glTexCoord2f(texCoords.left(), texCoords.top());
gl.glVertex3f(fWidth / 2 - 0.15f, -0.1f, 0.001f);
gl.glEnd();
// fill gap between middle and side
gl.glBegin(GL.GL_QUADS);
gl.glVertex3f(0.1f, 0, 0);
gl.glVertex3f(fWidth / 2 - 0.15f, 0, 0);
gl.glVertex3f(fWidth / 2 - 0.15f, -0.1f, 0);
gl.glVertex3f(0.1f, -0.1f, 0);
gl.glEnd();
gl.glBegin(GL.GL_QUADS);
gl.glVertex3f(fWidth - 0.1f, 0, 0);
gl.glVertex3f(fWidth / 2 + 0.15f, 0, 0);
gl.glVertex3f(fWidth / 2 + 0.15f, -0.1f, 0);
gl.glVertex3f(fWidth - 0.1f, -0.1f, 0);
gl.glEnd();
}
gl.glPopName();
}
tempTexture = iconTextureManager.getIconTexture(gl, EIconTextures.NAVIGATION_NEXT_SMALL);
tempTexture.enable();
tempTexture.bind();
texCoords = tempTexture.getImageTexCoords();
// Polygon for iFirstElement-Cursor
gl.glPushName(pickingManager.getPickingID(iUniqueID, EPickingType.HIER_HEAT_MAP_CURSOR, 1));
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoords.right(), texCoords.top());
gl.glVertex3f(0.0f, fPosCursorFirstElement, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.top());
gl.glVertex3f(-GAP_LEVEL1_2 / 4, fPosCursorFirstElement, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.bottom());
gl.glVertex3f(-GAP_LEVEL1_2 / 4, fPosCursorFirstElement + 0.1f, 0);
gl.glTexCoord2f(texCoords.right(), texCoords.bottom());
gl.glVertex3f(0.0f, fPosCursorFirstElement + 0.1f, 0);
gl.glEnd();
gl.glPopName();
// Polygon for iLastElement-Cursor
gl.glPushName(pickingManager.getPickingID(iUniqueID, EPickingType.HIER_HEAT_MAP_CURSOR, 2));
gl.glBegin(GL.GL_POLYGON);
gl.glTexCoord2f(texCoords.right(), texCoords.top());
gl.glVertex3f(0.0f, fPosCursorLastElement, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.top());
gl.glVertex3f(-GAP_LEVEL1_2 / 4, fPosCursorLastElement, 0);
gl.glTexCoord2f(texCoords.left(), texCoords.bottom());
gl.glVertex3f(-GAP_LEVEL1_2 / 4, fPosCursorLastElement - 0.1f, 0);
gl.glTexCoord2f(texCoords.right(), texCoords.bottom());
gl.glVertex3f(0.0f, fPosCursorLastElement - 0.1f, 0);
gl.glEnd();
gl.glPopName();
// fill gap between cursor
gl.glColor4f(0f, 0f, 0f, 0.45f);
gl.glPushName(pickingManager.getPickingID(iUniqueID, EPickingType.HIER_HEAT_MAP_BLOCK_CURSOR, 1));
gl.glBegin(GL.GL_QUADS);
gl.glVertex3f(-GAP_LEVEL1_2 / 4, fPosCursorLastElement, 0);
gl.glVertex3f(0.0f, fPosCursorLastElement, 0);
gl.glVertex3f(0.0f, fPosCursorFirstElement, 0);
gl.glVertex3f(-GAP_LEVEL1_2 / 4, fPosCursorFirstElement, 0);
gl.glEnd();
gl.glPopName();
gl.glPopAttrib();
tempTexture.disable();
}
@Override
public void display(GL gl) {
processEvents();
if (generalManager.isWiiModeActive()) {
handleWiiInput();
}
if (bIsDraggingActive) {
handleCursorDragging(gl);
if (glMouseListener.wasMouseReleased()) {
bIsDraggingActive = false;
}
}
if (bIsDraggingWholeBlock) {
handleBlockDragging(gl);
if (glMouseListener.wasMouseReleased()) {
bIsDraggingWholeBlock = false;
}
}
if (bSplitGroupExp) {
handleGroupSplitExperiments(gl);
if (glMouseListener.wasMouseReleased()) {
bSplitGroupExp = false;
}
}
if (bSplitGroupGene) {
handleGroupSplitGenes(gl);
if (glMouseListener.wasMouseReleased()) {
bSplitGroupExp = false;
}
}
gl.glCallList(iGLDisplayListToCall);
float fright = 0.0f;
float ftop = viewFrustum.getTop();
float fleftOffset = 0;
if (bSkipLevel1 == false) {
gl.glTranslatef(GAP_LEVEL2_3, 0, 0);
}
// render embedded heat map
if (bIsHeatmapInFocus) {
fright = viewFrustum.getWidth() - 1.2f;
fleftOffset = 0.095f + // width level 1 + boarder
GAP_LEVEL1_2 + // width gap between level 1 and 2
viewFrustum.getWidth() / 4f * 0.2f;
}
else {
fright = viewFrustum.getWidth() - 2.75f;
fleftOffset = 0.075f + // width level 1
GAP_LEVEL1_2 + // width gap between level 1 and 2
viewFrustum.getWidth() / 4f;
}
if (glHeatMapView.isInDefaultOrientation()) {
gl.glTranslatef(fleftOffset, +0.4f, 0);
}
else {
gl.glTranslatef(fleftOffset, -0.2f, 0);
}
glHeatMapView.getViewFrustum().setTop(ftop);
glHeatMapView.getViewFrustum().setRight(fright);
gl.glPushName(pickingManager.getPickingID(iUniqueID, EPickingType.HIER_HEAT_MAP_VIEW_SELECTION,
glHeatMapView.getID()));
glHeatMapView.displayRemote(gl);
gl.glPopName();
fWidthEHM = glHeatMapView.getViewFrustum().getWidth() - 0.95f;
if (glHeatMapView.isInDefaultOrientation()) {
gl.glTranslatef(-fleftOffset, -0.4f, 0);
}
else {
gl.glTranslatef(-fleftOffset, +0.2f, 0);
}
if (bSkipLevel1 == false) {
gl.glTranslatef(-GAP_LEVEL2_3, 0, 0);
}
// // render embedded dendrogram
// if (bIsHeatmapInFocus) {
// fright = viewFrustum.getWidth() - 1.2f;
// fleftOffset = 0.095f + // width level 1 + boarder
// GAP_LEVEL1_2 + // width gap between level 1 and 2
// viewFrustum.getWidth() / 4f * 0.2f;
// else {
// fright = viewFrustum.getWidth() - 2.75f;
// fleftOffset = 0.075f + // width level 1
// GAP_LEVEL1_2 + // width gap between level 1 and 2
// viewFrustum.getWidth() / 4f;
// gl.glTranslatef(fleftOffset, 0, 1);
// glDendrogram.getViewFrustum().setTop(ftop);
// glDendrogram.getViewFrustum().setRight(fright);
// glDendrogram.setDisplayListDirty();
// glDendrogram.displayRemote(gl);
// gl.glTranslatef(-fleftOffset, -0, -1);
contextMenu.render(gl, this);
}
private void buildDisplayList(final GL gl, int iGLDisplayListIndex) {
if (bRedrawTextures) {
initTextures(gl);
bRedrawTextures = false;
}
if (bHasFrustumChanged) {
glHeatMapView.setDisplayListDirty();
glDendrogram.setDisplayListDirty();
bHasFrustumChanged = false;
}
gl.glNewList(iGLDisplayListIndex, GL.GL_COMPILE);
gl.glMatrixMode(GL.GL_MODELVIEW);
gl.glLoadIdentity();
// background color
gl.glColor4f(0, 0, 0, 0.15f);
gl.glBegin(GL.GL_QUADS);
gl.glVertex3f(0, 0, -0.1f);
gl.glVertex3f(viewFrustum.getRight(), 0, -0.1f);
gl.glVertex3f(viewFrustum.getRight(), viewFrustum.getHeight(), -0.1f);
gl.glVertex3f(0, viewFrustum.getHeight(), -0.1f);
gl.glEnd();
// padding along borders
viewFrustum.setTop(viewFrustum.getTop() - 0.6f);
viewFrustum.setLeft(viewFrustum.getLeft() + 0.1f);
gl.glTranslatef(0.0f, 0.4f, 0);
if (set.getVA(iContentVAID).getGroupList() != null)
renderClassAssignmentsGenes(gl);
gl.glTranslatef(0.1f, 0.0f, 0);
handleTexturePicking(gl);
// all stuff for rendering level 1 (overview bar)
if (bSkipLevel1 == false) {
renderOverviewBar(gl);
renderMarkerOverviewBar(gl);
renderSelectedElementsOverviewBar(gl);
gl.glTranslatef(GAP_LEVEL1_2, 0, 0);
}
else {
gl.glColor4f(1f, 1f, 0f, 1f);
// width of dragging cursor
gl.glTranslatef(0.2f, 0.0f, 0);
}
if (bIsHeatmapInFocus) {
fAnimationScale = 0.2f;
}
else {
fAnimationScale = 1.0f;
}
// all stuff for rendering level 2 (textures)
gl.glColor4f(1f, 1f, 1f, 1f);
renderTextureHeatMap(gl);
renderMarkerTexture(gl);
renderSelectedElementsTexture(gl);
renderCursor(gl);
if (set.getVA(iStorageVAID).getGroupList() != null) {
renderClassAssignmentsExperimentsLevel2(gl);
renderClassAssignmentsExperimentsLevel3(gl);
}
viewFrustum.setTop(viewFrustum.getTop() + 0.6f);
viewFrustum.setLeft(viewFrustum.getLeft() - 0.1f);
gl.glTranslatef(-0.1f, -0.4f, 0);
if (bSkipLevel1 == false) {
gl.glTranslatef(-GAP_LEVEL1_2, 0, 0);
}
else {
// width of dragging cursor
gl.glTranslatef(-0.2f, 0.0f, 0);
}
// gl.glDisable(GL.GL_STENCIL_TEST);
gl.glEndList();
}
/**
* Function responsible for handling SelectionDelta for embedded heatmap
*/
private void triggerSelectionBlock() {
int iCount = iFirstSample;
for (int i = 0; i < iSelectorBar - 1; i++)
iCount += iAlNumberSamples.get(i);
List<SelectionCommand> commands = new ArrayList<SelectionCommand>();
// SelectionCommand command = new SelectionCommand(ESelectionCommandType.RESET);
// commands.add(command);
glHeatMapView.handleContentTriggerSelectionCommand(eFieldDataType, commands);
glHeatMapView.resetView();
IVirtualArrayDelta delta = new VirtualArrayDelta(eFieldDataType);
ISelectionDelta selectionDelta = new SelectionDelta(eFieldDataType);
IVirtualArray currentVirtualArray = set.getVA(iContentVAID);
int iIndex = 0;
int iContentIndex = 0;
for (int index = 0; index < iSamplesPerHeatmap; index++) {
iIndex = iCount + index;
if (iIndex < currentVirtualArray.size()) {
iContentIndex = currentVirtualArray.get(iIndex);
}
delta.add(VADeltaItem.append(iContentIndex));
// set elements selected in embedded heatMap
for (HeatMapSelection selection : AlSelection) {
if (selection.getContentIndex() == iContentIndex) {
selectionDelta.addSelection(iContentIndex, selection.getSelectionType());
}
}
}
glHeatMapView.handleVirtualArrayUpdate(delta, getShortInfo());
if (selectionDelta.size() > 0) {
glHeatMapView.handleSelectionUpdate(selectionDelta, true, null);
}
// selected experiments
commands = new ArrayList<SelectionCommand>();
SelectionCommand command = new SelectionCommand(ESelectionCommandType.RESET);
commands.add(command);
glHeatMapView.handleStorageTriggerSelectionCommand(eExperimentDataType, commands);
IVirtualArrayDelta deltaExp = new VirtualArrayDelta(eExperimentDataType);
ISelectionDelta selectionDeltaEx = new SelectionDelta(eExperimentDataType);
IVirtualArray currentVirtualArrayEx = set.getVA(iStorageVAID);
for (int index = 0; index < currentVirtualArrayEx.size(); index++) {
iContentIndex = currentVirtualArrayEx.get(index);
deltaExp.add(VADeltaItem.append(iContentIndex));
// set elements selected in embedded heatMap
for (Integer selection : AlExpMouseOver) {
if (selection == iContentIndex) {
selectionDeltaEx.addSelection(iContentIndex, ESelectionType.MOUSE_OVER);
}
}
for (Integer selection : AlExpSelected) {
if (selection == iContentIndex) {
selectionDeltaEx.addSelection(iContentIndex, ESelectionType.SELECTION);
}
}
}
glHeatMapView.handleVirtualArrayUpdate(deltaExp, getShortInfo());
if (selectionDeltaEx.size() > 0) {
glHeatMapView.handleSelectionUpdate(selectionDeltaEx, true, null);
}
}
public void renderHorizontally(boolean bRenderStorageHorizontally) {
if (glHeatMapView.isInDefaultOrientation()) {
glHeatMapView.changeOrientation(false);
}
else {
glHeatMapView.changeOrientation(true);
}
setDisplayListDirty();
}
@Override
protected void initLists() {
if (bRenderOnlyContext) {
iContentVAID = mapVAIDs.get(EStorageBasedVAType.EXTERNAL_SELECTION);
}
else {
if (!mapVAIDs.containsKey(EStorageBasedVAType.COMPLETE_SELECTION)) {
initCompleteList();
}
iContentVAID = mapVAIDs.get(EStorageBasedVAType.COMPLETE_SELECTION);
}
iStorageVAID = mapVAIDs.get(EStorageBasedVAType.STORAGE_SELECTION);
// In case of importing group info
if (set.isClusterInfo())
set.getVA(iContentVAID).setGroupList(set.getGroupList());
// clustering triggered by StartClusteringAction
if (bUseClusteredVA) {
int iContentVAIDtemp = 0, iStorageVAIDtemp = 0;
if (bRenderOnlyContext) {
iContentVAIDtemp = mapVAIDs.get(EStorageBasedVAType.EXTERNAL_SELECTION);
}
else {
if (!mapVAIDs.containsKey(EStorageBasedVAType.COMPLETE_SELECTION)) {
initCompleteList();
}
iContentVAIDtemp = mapVAIDs.get(EStorageBasedVAType.COMPLETE_SELECTION);
}
iStorageVAIDtemp = mapVAIDs.get(EStorageBasedVAType.STORAGE_SELECTION);
if (clusterstate.getClustererType() == EClustererType.GENE_CLUSTERING) {
int iVAid = set.cluster(iContentVAIDtemp, iStorageVAIDtemp, clusterstate);
if (iVAid == -1)
iContentVAID = iContentVAIDtemp;
else
iContentVAID = iVAid;
iStorageVAID = iStorageVAIDtemp;
}
else if (clusterstate.getClustererType() == EClustererType.EXPERIMENTS_CLUSTERING) {
int iVAid = set.cluster(iContentVAIDtemp, iStorageVAIDtemp, clusterstate);
if (iVAid == -1)
iStorageVAID = iStorageVAIDtemp;
else
iStorageVAID = iVAid;
iContentVAID = iContentVAIDtemp;
}
else {
clusterstate.setClustererType(EClustererType.EXPERIMENTS_CLUSTERING);
int iVAid = set.cluster(iContentVAIDtemp, iStorageVAIDtemp, clusterstate);
if (iVAid == -1)
iStorageVAID = iStorageVAIDtemp;
else
iStorageVAID = iVAid;
clusterstate.setClustererType(EClustererType.GENE_CLUSTERING);
iVAid = set.cluster(iContentVAIDtemp, iStorageVAID, clusterstate);
if (iVAid == -1)
iContentVAID = iContentVAIDtemp;
else
iContentVAID = iVAid;
}
AlSelection.clear();
}
// // normal startup
// else {
// if (bRenderOnlyContext) {
// iContentVAID = mapVAIDs.get(EStorageBasedVAType.EXTERNAL_SELECTION);
// else {
// if (!mapVAIDs.containsKey(EStorageBasedVAType.COMPLETE_SELECTION)) {
// initCompleteList();
// iContentVAID = mapVAIDs.get(EStorageBasedVAType.COMPLETE_SELECTION);
// iStorageVAID = mapVAIDs.get(EStorageBasedVAType.STORAGE_SELECTION);
// // In case of importing group info
// if (set.isClusterInfo())
// set.getVA(iContentVAID).setGroupList(set.getGroupList());
contentSelectionManager.resetSelectionManager();
storageSelectionManager.resetSelectionManager();
contentSelectionManager.setVA(set.getVA(iContentVAID));
storageSelectionManager.setVA(set.getVA(iStorageVAID));
int iNumberOfColumns = set.getVA(iContentVAID).size();
int iNumberOfRows = set.getVA(iStorageVAID).size();
for (int iRowCount = 0; iRowCount < iNumberOfRows; iRowCount++) {
storageSelectionManager.initialAdd(set.getVA(iStorageVAID).get(iRowCount));
}
// this for loop executes one per axis
for (int iColumnCount = 0; iColumnCount < iNumberOfColumns; iColumnCount++) {
contentSelectionManager.initialAdd(set.getVA(iContentVAID).get(iColumnCount));
}
}
@Override
public String getShortInfo() {
return "Hierarchical Heat Map (" + set.getVA(iContentVAID).size() + " genes / "
+ set.getVA(iStorageVAID).size() + " experiments)";
}
@Override
public String getDetailedInfo() {
StringBuffer sInfoText = new StringBuffer();
sInfoText.append("<b>Type:</b> Hierarchical Heat Map\n");
if (bRenderStorageHorizontally) {
sInfoText.append(set.getVA(iContentVAID).size() + "Genes in columns and "
+ set.getVA(iStorageVAID).size() + " experiments in rows.\n");
}
else {
sInfoText.append(set.getVA(iContentVAID).size() + " Genes in rows and "
+ set.getVA(iStorageVAID).size() + " experiments in columns.\n");
}
if (bRenderOnlyContext) {
sInfoText.append("Showing only genes which occur in one of the other views in focus\n");
}
else {
if (dataFilterLevel == EDataFilterLevel.COMPLETE) {
sInfoText.append("Showing all genes in the dataset\n");
}
else if (dataFilterLevel == EDataFilterLevel.ONLY_MAPPING) {
sInfoText.append("Showing all genes that have a known DAVID ID mapping\n");
}
else if (dataFilterLevel == EDataFilterLevel.ONLY_CONTEXT) {
sInfoText
.append("Showing all genes that are contained in any of the KEGG or Biocarta pathways\n");
}
}
return sInfoText.toString();
}
/**
* Determine selected element in stage 2 (texture)
*
* @param gl
*/
private void handleTexturePicking(GL gl) {
int iNumberSample = iAlNumberSamples.get(iSelectorBar - 1);
// int iNumberSample = iNrSamplesPerTexture;// * 2;
float fOffsety;
float fHeightSample = viewFrustum.getHeight() / iNumberSample;
float[] fArPickingCoords = new float[3];
if (PickingPoint != null) {
fArPickingCoords =
GLCoordinateUtils.convertWindowCoordinatesToWorldCoordinates(gl, PickingPoint.x,
PickingPoint.y);
fOffsety = viewFrustum.getHeight() - fArPickingCoords[1] + 0.4f;
iPickedSample = (int) Math.ceil(fOffsety / fHeightSample);
PickingPoint = null;
if (iSamplesPerHeatmap % 2 == 0) {
iFirstSample = iPickedSample - (int) Math.floor(iSamplesPerHeatmap / 2) + 1;
iLastSample = iPickedSample + (int) Math.floor(iSamplesPerHeatmap / 2);
}
else {
iFirstSample = iPickedSample - (int) Math.ceil(iSamplesPerHeatmap / 2);
iLastSample = iPickedSample + (int) Math.floor(iSamplesPerHeatmap / 2);
}
if (iPickedSample < iSamplesPerHeatmap / 2) {
iPickedSample = (int) Math.floor(iSamplesPerHeatmap / 2);
iFirstSample = 0;
iLastSample = iSamplesPerHeatmap - 1;
}
else if (iPickedSample > iNumberSample - 1 - iSamplesPerHeatmap / 2) {
iPickedSample = (int) Math.ceil(iNumberSample - iSamplesPerHeatmap / 2);
iLastSample = iNumberSample - 1;
iFirstSample = iNumberSample - iSamplesPerHeatmap;
}
}
setDisplayListDirty();
triggerSelectionBlock();
}
/**
* Handles the dragging cursor for gene groups
*
* @param gl
*/
private void handleGroupSplitGenes(final GL gl) {
Point currentPoint = glMouseListener.getPickedPoint();
float[] fArTargetWorldCoordinates = new float[3];
float[] fArDraggedPoint = new float[3];
fArTargetWorldCoordinates =
GLCoordinateUtils.convertWindowCoordinatesToWorldCoordinates(gl, currentPoint.x, currentPoint.y);
gl.glBegin(GL.GL_QUADS);
gl.glVertex3f(fArTargetWorldCoordinates[0], fArTargetWorldCoordinates[1], 0);
gl.glVertex3f(fArTargetWorldCoordinates[0], fArTargetWorldCoordinates[1] + 0.1f, 0);
gl.glVertex3f(fArTargetWorldCoordinates[0] + 0.1f, fArTargetWorldCoordinates[1] + 0.1f, 0);
gl.glVertex3f(fArTargetWorldCoordinates[0] + 0.1f, fArTargetWorldCoordinates[1], 0);
gl.glEnd();
if (glMouseListener.wasMouseReleased()) {
bSplitGroupGene = false;
fArDraggedPoint =
GLCoordinateUtils.convertWindowCoordinatesToWorldCoordinates(gl, DraggingPoint.x,
DraggingPoint.y);
float fYPosDrag = fArDraggedPoint[1] - 0.4f;
float fYPosRelease = fArTargetWorldCoordinates[1] - 0.4f;
float fHeight = viewFrustum.getHeight() - 0.6f;
int iNrSamples = set.getVA(iContentVAID).size();
float fHeightSample = fHeight / iNrSamples;
int iFirstSample = iNrSamples - (int) Math.floor(fYPosDrag / fHeightSample);
int iLastSample = iNrSamples - (int) Math.ceil(fYPosRelease / fHeightSample);
// System.out.println("von: " + fYPosDrag + " bis: " + fYPosRelease);
// System.out.println("von: " + iFirstSample + " bis: " + iLastSample);
if (set.getVA(iContentVAID).getGroupList().split(iGroupToSplit, iFirstSample, iLastSample) == false)
System.out.println("Operation not allowed!!");
}
}
/**
* Handles the dragging cursor for experiments groups
*
* @param gl
*/
private void handleGroupSplitExperiments(final GL gl) {
Point currentPoint = glMouseListener.getPickedPoint();
float[] fArTargetWorldCoordinates = new float[3];
float[] fArDraggedPoint = new float[3];
fArTargetWorldCoordinates =
GLCoordinateUtils.convertWindowCoordinatesToWorldCoordinates(gl, currentPoint.x, currentPoint.y);
gl.glBegin(GL.GL_QUADS);
gl.glVertex3f(fArTargetWorldCoordinates[0], fArTargetWorldCoordinates[1], 0);
gl.glVertex3f(fArTargetWorldCoordinates[0], fArTargetWorldCoordinates[1] + 0.1f, 0);
gl.glVertex3f(fArTargetWorldCoordinates[0] + 0.1f, fArTargetWorldCoordinates[1] + 0.1f, 0);
gl.glVertex3f(fArTargetWorldCoordinates[0] + 0.1f, fArTargetWorldCoordinates[1], 0);
gl.glEnd();
if (glMouseListener.wasMouseReleased()) {
bSplitGroupExp = false;
fArDraggedPoint =
GLCoordinateUtils.convertWindowCoordinatesToWorldCoordinates(gl, DraggingPoint.x,
DraggingPoint.y);
float fXPosDrag = fArDraggedPoint[0] - 0.7f;
float fXPosRelease = fArTargetWorldCoordinates[0] - 0.7f;
float fWidth = viewFrustum.getWidth() / 4.0f * fAnimationScale;
int iNrSamples = set.getVA(iStorageVAID).size();
float fWidthSample = fWidth / iNrSamples;
int iFirstSample = (int) Math.floor(fXPosDrag / fWidthSample);
int iLastSample = (int) Math.ceil(fXPosRelease / fWidthSample);
if (set.getVA(iStorageVAID).getGroupList().split(iGroupToSplit, iLastSample, iFirstSample) == false)
System.out.println("Operation not allowed!!");
}
}
/**
* Function used for updating position of block (block of elements rendered in EHM) in case of dragging
*
* @param gl
*/
private void handleBlockDragging(final GL gl) {
Point currentPoint = glMouseListener.getPickedPoint();
float[] fArTargetWorldCoordinates = new float[3];
int iselElement;
fArTargetWorldCoordinates =
GLCoordinateUtils.convertWindowCoordinatesToWorldCoordinates(gl, currentPoint.x, currentPoint.y);
float fTextureHeight = viewFrustum.getHeight() - 0.6f;
float fStep = fTextureHeight / (iAlNumberSamples.get(iSelectorBar - 1));
float fYPosMouse = fArTargetWorldCoordinates[1] - 0.4f;
iselElement = (int) Math.floor((fTextureHeight - fYPosMouse) / fStep);
if (iSamplesPerHeatmap % 2 == 0) {
if ((iselElement - (int) Math.floor(iSamplesPerHeatmap / 2) + 1) >= 0
&& (iselElement + (int) Math.floor(iSamplesPerHeatmap / 2)) < iAlNumberSamples
.get(iSelectorBar - 1)) {
iFirstSample = iselElement - (int) Math.floor(iSamplesPerHeatmap / 2) + 1;
fPosCursorFirstElement = fTextureHeight - (iFirstSample * fStep);
iLastSample = iselElement + (int) Math.floor(iSamplesPerHeatmap / 2);
fPosCursorLastElement = fTextureHeight - ((iLastSample + 1) * fStep);
}
}
else {
if ((iselElement - (int) Math.ceil(iSamplesPerHeatmap / 2)) >= 0
&& (iselElement + (int) Math.floor(iSamplesPerHeatmap / 2)) < iAlNumberSamples
.get(iSelectorBar - 1)) {
iFirstSample = iselElement - (int) Math.ceil(iSamplesPerHeatmap / 2);
fPosCursorFirstElement = fTextureHeight - (iFirstSample * fStep);
iLastSample = iselElement + (int) Math.floor(iSamplesPerHeatmap / 2);
fPosCursorLastElement = fTextureHeight - ((iLastSample + 1) * fStep);
}
}
setDisplayListDirty();
triggerSelectionBlock();
if (glMouseListener.wasMouseReleased()) {
bIsDraggingWholeBlock = false;
bDisableCursorDragging = false;
}
}
/**
* Function used for updating cursor position in case of dragging
*
* @param gl
*/
private void handleCursorDragging(final GL gl) {
Point currentPoint = glMouseListener.getPickedPoint();
float[] fArTargetWorldCoordinates = new float[3];
int iselElement;
int iNrSamples;
fArTargetWorldCoordinates =
GLCoordinateUtils.convertWindowCoordinatesToWorldCoordinates(gl, currentPoint.x, currentPoint.y);
float fTextureHeight = viewFrustum.getHeight() - 0.6f;
float fStep = fTextureHeight / (iAlNumberSamples.get(iSelectorBar - 1));
float fYPosMouse = fArTargetWorldCoordinates[1] - 0.4f;
// cursor for iFirstElement
if (iDraggedCursor == 1) {
if (fYPosMouse > fPosCursorLastElement && fYPosMouse <= viewFrustum.getHeight() - 0.6f) {
iselElement = (int) Math.floor((fTextureHeight - fYPosMouse) / fStep);
iNrSamples = iLastSample - iselElement + 1;
if (iNrSamples >= MIN_SAMPLES_PER_HEATMAP
&& iNrSamples < iAlNumberSamples.get(iSelectorBar - 1) / 3) {
fPosCursorFirstElement = fYPosMouse;
iFirstSample = iselElement;
iSamplesPerHeatmap = iLastSample - iFirstSample + 1;
// update Preference store
generalManager.getPreferenceStore().setValue(
PreferenceConstants.HM_NUM_SAMPLES_PER_HEATMAP, iSamplesPerHeatmap);
}
}
}
// cursor for iLastElement
if (iDraggedCursor == 2) {
if (fYPosMouse < fPosCursorFirstElement && fYPosMouse >= 0.0f) {
iselElement = (int) Math.floor((fTextureHeight - fYPosMouse) / fStep);
iNrSamples = iselElement - iFirstSample + 1;
if (iNrSamples >= MIN_SAMPLES_PER_HEATMAP
&& iNrSamples < iAlNumberSamples.get(iSelectorBar - 1) / 3) {
fPosCursorLastElement = fYPosMouse;
iLastSample = iselElement;
iSamplesPerHeatmap = iLastSample - iFirstSample + 1;
// update Preference store
generalManager.getPreferenceStore().setValue(
PreferenceConstants.HM_NUM_SAMPLES_PER_HEATMAP, iSamplesPerHeatmap);
}
}
}
setDisplayListDirty();
triggerSelectionBlock();
if (glMouseListener.wasMouseReleased()) {
bIsDraggingActive = false;
bDisableBlockDragging = false;
}
}
@Override
protected void handleEvents(EPickingType ePickingType, EPickingMode pickingMode, int iExternalID,
Pick pick) {
if (detailLevel == EDetailLevel.VERY_LOW) {
return;
}
switch (ePickingType) {
// handling the groups/clusters of genes
case HIER_HEAT_MAP_GENES_GROUP:
switch (pickingMode) {
case CLICKED:
set.getVA(iContentVAID).getGroupList().get(iExternalID).toggleSelectionType();
setDisplayListDirty();
break;
case DRAGGED:
if (bSplitGroupGene == false) {
bSplitGroupGene = true;
bSplitGroupExp = false;
iGroupToSplit = iExternalID;
DraggingPoint = pick.getPickedPoint();
}
setDisplayListDirty();
break;
case RIGHT_CLICKED:
GroupContextMenuItemContainer groupContextMenuItemContainer =
new GroupContextMenuItemContainer();
groupContextMenuItemContainer.setGeneExperimentFlag(true);
contextMenu.addItemContanier(groupContextMenuItemContainer);
// if (!isRenderedRemote()) {
contextMenu.setLocation(pick.getPickedPoint(), getParentGLCanvas().getWidth(),
getParentGLCanvas().getHeight());
contextMenu.setMasterGLView(this);
break;
case MOUSE_OVER:
// System.out.print("genes group " + iExternalID);
// System.out.print(" number elements in group: ");
// System.out.println(set.getVA(iContentVAID).getGroupList().get(iExternalID)
// .getNrElements());
// setDisplayListDirty();
break;
}
break;
// handling the groups/clusters of experiments
case HIER_HEAT_MAP_EXPERIMENTS_GROUP:
switch (pickingMode) {
case CLICKED:
set.getVA(iStorageVAID).getGroupList().get(iExternalID).toggleSelectionType();
setDisplayListDirty();
break;
case DRAGGED:
if (bSplitGroupExp == false) {
bSplitGroupExp = true;
bSplitGroupGene = false;
iGroupToSplit = iExternalID;
DraggingPoint = pick.getPickedPoint();
}
setDisplayListDirty();
break;
case RIGHT_CLICKED:
GroupContextMenuItemContainer groupContextMenuItemContainer =
new GroupContextMenuItemContainer();
groupContextMenuItemContainer.setGeneExperimentFlag(false);
contextMenu.addItemContanier(groupContextMenuItemContainer);
// if (!isRenderedRemote()) {
contextMenu.setLocation(pick.getPickedPoint(), getParentGLCanvas().getWidth(),
getParentGLCanvas().getHeight());
contextMenu.setMasterGLView(this);
break;
case MOUSE_OVER:
// System.out.print("patients group " + iExternalID);
// System.out.print(" number elements in group: ");
// System.out.println(set.getVA(iStorageVAID).getGroupList().get(iExternalID)
// .getNrElements());
// setDisplayListDirty();
break;
}
break;
// handle click on button for setting EHM in focus
case HIER_HEAT_MAP_INFOCUS_SELECTION:
switch (pickingMode) {
case CLICKED:
bIsHeatmapInFocus = bIsHeatmapInFocus == true ? false : true;
glHeatMapView.setDisplayListDirty();
setDisplayListDirty();
break;
case DRAGGED:
break;
case MOUSE_OVER:
break;
}
break;
// handle click on button for selecting next/previous texture in level 1 and 2
case HIER_HEAT_MAP_TEXTURE_CURSOR:
switch (pickingMode) {
case CLICKED:
if (bSkipLevel1 == false) {
if (iExternalID == 1) {
iSelectorBar
initPosCursor();
triggerSelectionBlock();
setDisplayListDirty();
}
if (iExternalID == 2) {
iSelectorBar++;
initPosCursor();
triggerSelectionBlock();
setDisplayListDirty();
}
setDisplayListDirty();
}
break;
case DRAGGED:
break;
case MOUSE_OVER:
break;
}
break;
// handle dragging cursor for first and last element of block
case HIER_HEAT_MAP_CURSOR:
switch (pickingMode) {
case CLICKED:
break;
case DRAGGED:
if (bDisableCursorDragging)
return;
bIsDraggingActive = true;
bDisableBlockDragging = true;
iDraggedCursor = iExternalID;
setDisplayListDirty();
break;
case MOUSE_OVER:
break;
}
break;
// handle dragging cursor for whole block
case HIER_HEAT_MAP_BLOCK_CURSOR:
switch (pickingMode) {
case CLICKED:
break;
case DRAGGED:
if (bDisableBlockDragging)
return;
bIsDraggingWholeBlock = true;
bDisableCursorDragging = true;
iDraggedCursor = iExternalID;
setDisplayListDirty();
break;
case MOUSE_OVER:
break;
}
break;
// handle click on level 1 (overview bar)
case HIER_HEAT_MAP_TEXTURE_SELECTION:
switch (pickingMode) {
case CLICKED:
if (bSkipLevel1 == false) {
iSelectorBar = iExternalID;
// if (iSelectorBar == iNrSelBar) {
// iSelectorBar--;
initPosCursor();
triggerSelectionBlock();
setDisplayListDirty();
}
break;
case DRAGGED:
break;
case MOUSE_OVER:
break;
}
break;
// handle click on level 2
case HIER_HEAT_MAP_FIELD_SELECTION:
switch (pickingMode) {
case CLICKED:
PickingPoint = pick.getPickedPoint();
triggerSelectionBlock();
setDisplayListDirty();
break;
case DRAGGED:
break;
case MOUSE_OVER:
break;
}
break;
// handle click on level 3 (EHM)
case HIER_HEAT_MAP_VIEW_SELECTION:
switch (pickingMode) {
case CLICKED:
break;
case DRAGGED:
break;
case MOUSE_OVER:
break;
case RIGHT_CLICKED:
contextMenu.setLocation(pick.getPickedPoint(), getParentGLCanvas().getWidth(),
getParentGLCanvas().getHeight());
contextMenu.setMasterGLView(this);
break;
}
break;
}
}
@Override
protected ArrayList<SelectedElementRep> createElementRep(EIDType idType, int iStorageIndex) {
return null;
}
@Override
public void renderContext(boolean bRenderOnlyContext) {
throw new IllegalStateException("Rendering only context not supported for the hierachical heat map");
}
@Override
public void broadcastElements() {
throw new IllegalStateException("broadcast elements of the contained heat map or all?");
}
@Override
public void clearAllSelections() {
contentSelectionManager.clearSelections();
storageSelectionManager.clearSelections();
AlSelection.clear();
AlExpMouseOver.clear();
AlExpSelected.clear();
iSelectorBar = 1;
initPosCursor();
bRedrawTextures = true;
setDisplayListDirty();
triggerSelectionBlock();
glHeatMapView.setDisplayListDirty();
// group/cluster selections
if (set.getVA(iStorageVAID).getGroupList() != null) {
IGroupList groupList = set.getVA(iStorageVAID).getGroupList();
for (Group group : groupList)
group.setSelectionType(ESelectionType.NORMAL);
}
if (set.getVA(iContentVAID).getGroupList() != null) {
IGroupList groupList = set.getVA(iContentVAID).getGroupList();
for (Group group : groupList)
group.setSelectionType(ESelectionType.NORMAL);
}
}
@Override
public void changeOrientation(boolean defaultOrientation) {
renderHorizontally(defaultOrientation);
}
@Override
public boolean isInDefaultOrientation() {
return bRenderStorageHorizontally;
}
public void changeFocus(boolean bInFocus) {
bIsHeatmapInFocus = bIsHeatmapInFocus == true ? false : true;
setDisplayListDirty();
}
public void startClustering(ClusterState clusterState) {
this.clusterstate = clusterState;
bUseClusteredVA = true;
initData();
bUseClusteredVA = false;
setDisplayListDirty();
}
public void activateGroupHandling() {
if (set.getVA(iContentVAID).getGroupList() == null) {
IGroupList groupList = new GroupList(0);
Group group = new Group(set.getVA(iContentVAID).size(), false, 0, ESelectionType.NORMAL);
groupList.append(group);
set.getVA(iContentVAID).setGroupList(groupList);
}
if (set.getVA(iStorageVAID).getGroupList() == null) {
IGroupList groupList = new GroupList(0);
Group group = new Group(set.getVA(iStorageVAID).size(), false, 0, ESelectionType.NORMAL);
groupList.append(group);
set.getVA(iStorageVAID).setGroupList(groupList);
}
setDisplayListDirty();
}
public boolean isInFocus() {
return bIsHeatmapInFocus;
}
private void handleWiiInput() {
float fHeadPositionX = generalManager.getWiiRemote().getCurrentSmoothHeadPosition()[0];
if (fHeadPositionX < -1.2f) {
bIsHeatmapInFocus = false;
}
else {
bIsHeatmapInFocus = true;
}
setDisplayListDirty();
}
@Override
public void initData() {
super.initData();
initHierarchy();
calculateTextures();
initPosCursor();
glHeatMapView.setSet(set);
glHeatMapView.initData();
bRedrawTextures = true;
}
public void setClusterstate(ClusterState clusterstate) {
this.clusterstate = clusterstate;
}
public ClusterState getClusterstate() {
return clusterstate;
}
@Override
public ASerializedView getSerializableRepresentation() {
SerializedDummyView serializedForm = new SerializedDummyView();
serializedForm.setViewID(this.getID());
return serializedForm;
}
@Override
public void handleInterchangeGroups(boolean bGeneGroup) {
int iVAId = 0;
if (bGeneGroup)
iVAId = iContentVAID;
else
iVAId = iStorageVAID;
IGroupList groupList = set.getVA(iVAId).getGroupList();
ArrayList<Integer> selGroups = new ArrayList<Integer>();
if (groupList == null) {
System.out.println("No group assignment available!");
return;
}
for (Group iter : groupList) {
if (iter.getSelectionType() == ESelectionType.SELECTION)
selGroups.add(groupList.indexOf(iter));
}
if (selGroups.size() != 2) {
System.out.println("Number of selected elements has to be 2!!!");
return;
}
// interchange
if (groupList.interchange(set.getVA(iVAId), selGroups.get(0), selGroups.get(1)) == false) {
System.out.println("Problem during interchange!!!");
return;
}
bRedrawTextures = true;
setDisplayListDirty();
}
@Override
public void handleMergeGroups(boolean bGeneGroup) {
int iVAId = 0;
if (bGeneGroup)
iVAId = iContentVAID;
else
iVAId = iStorageVAID;
IGroupList groupList = set.getVA(iVAId).getGroupList();
ArrayList<Integer> selGroups = new ArrayList<Integer>();
if (groupList == null) {
System.out.println("No group assignment available!");
return;
}
for (Group iter : groupList) {
if (iter.getSelectionType() == ESelectionType.SELECTION)
selGroups.add(groupList.indexOf(iter));
}
if (selGroups.size() != 2) {
System.out.println("Number of selected elements has to be 2!!!");
return;
}
// merge
if (groupList.merge(set.getVA(iVAId), selGroups.get(0), selGroups.get(1)) == false) {
System.out.println("Problem during merge!!!");
return;
}
bRedrawTextures = true;
setDisplayListDirty();
}
@Override
public void registerEventListeners() {
super.registerEventListeners();
groupMergingActionListener = new GroupMergingActionListener();
groupMergingActionListener.setHandler(this);
eventPublisher.addListener(MergeGroupsEvent.class, groupMergingActionListener);
groupInterChangingActionListener = new GroupInterChangingActionListener();
groupInterChangingActionListener.setHandler(this);
eventPublisher.addListener(InterchangeGroupsEvent.class, groupInterChangingActionListener);
updateViewListener = new UpdateViewListener();
updateViewListener.setHandler(this);
eventPublisher.addListener(UpdateViewEvent.class, updateViewListener);
}
@Override
public void unregisterEventListeners() {
super.unregisterEventListeners();
if (groupMergingActionListener != null) {
eventPublisher.removeListener(groupMergingActionListener);
groupMergingActionListener = null;
}
if (groupInterChangingActionListener != null) {
eventPublisher.removeListener(groupInterChangingActionListener);
groupInterChangingActionListener = null;
}
if (updateViewListener != null) {
eventPublisher.removeListener(updateViewListener);
updateViewListener = null;
}
}
@Override
public void handleUpdateView() {
bRedrawTextures = true;
setDisplayListDirty();
}
} |
package org.carewebframework.shell.layout;
import org.zkoss.zul.Caption;
import org.zkoss.zul.Menupopup;
import org.zkoss.zul.Tab;
import org.zkoss.zul.Tabbox;
import org.zkoss.zul.Tabpanel;
/**
* Wraps the ZK Tab and Tabpanel components.
*/
public class UIElementTabPane extends UIElementZKBase {
static {
registerAllowedParentClass(UIElementTabPane.class, UIElementTabView.class);
registerAllowedChildClass(UIElementTabPane.class, UIElementBase.class);
}
private final Tab tab = new Tab();
private final Caption caption = new Caption();
private final Tabpanel tabPanel = new Tabpanel();
/**
* Set up the tab and tab panel ZK components. Note that we use a custom widget override to
* allow setting the color of the caption text.
*/
public UIElementTabPane() {
super();
setOuterComponent(tabPanel);
associateComponent(tab);
tabPanel.setSclass("cwf-tab-panel");
tabPanel.setHeight("100%");
tab.setSclass("cwf-tab");
tab.setWidgetOverride(CUSTOM_COLOR_OVERRIDE,
"function(value) {jq(this).find('.z-tab-text').css('color',value?value:'');}");
tab.appendChild(caption);
caption.setSclass("cwf-tab-caption");
}
/**
* Make this tab pane active.
*/
@Override
public void bringToFront() {
super.bringToFront();
((UIElementTabView) getParent()).setActivePane(this);
}
/**
* Requires moving both ZK components.
*/
@Override
protected void afterMoveTo(int index) {
moveChild(tab, index);
moveChild(tabPanel, index);
}
/**
* The caption label is the instance name.
*/
@Override
public String getInstanceName() {
return getLabel();
}
/**
* Sets the visibility of the tab and tab panel.
*/
@Override
protected void updateVisibility(boolean visible, boolean activated) {
tab.setSelected(activated);
tab.setVisible(visible);
}
/**
* Apply/remove the design context menu both tab and tab panel.
*
* @param contextMenu The design menu if design mode is activated, or null if it is not.
*/
@Override
protected void setDesignContextMenu(Menupopup contextMenu) {
setDesignContextMenu(tabPanel, contextMenu);
setDesignContextMenu(tab, contextMenu);
}
/**
* Apply the disable style when a tab is disabled.
*/
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
tab.setSclass(enabled ? "cwf-tab" : "cwf-tab-disabled");
}
/**
* Applies color to the tab caption text as well as the tab panel.
*/
@Override
protected void applyColor() {
super.applyColor();
applyColor(tab);
tabPanel.invalidate();
}
@Override
protected void bind() {
Tabbox tabbox = (Tabbox) getParent().getOuterComponent();
tabbox.getTabs().appendChild(tab);
tabbox.getTabpanels().appendChild(tabPanel);
}
@Override
protected void unbind() {
tab.detach();
tabPanel.detach();
}
/*package*/Caption getCaption() {
return caption;
}
/**
* Returns the caption label.
*
* @return
*/
public String getLabel() {
return caption.getLabel();
}
/**
* Sets the caption label.
*
* @param value
*/
public void setLabel(String value) {
tab.setLabel(value);
caption.setLabel(value);
}
/**
* Hint text should be applied to the tab.
*/
@Override
protected void applyHint() {
tab.setTooltiptext(getHint());
}
} |
package org.csstudio.display.builder.editor.tree;
import static org.csstudio.display.builder.editor.Plugin.logger;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import org.csstudio.display.builder.editor.DisplayEditor;
import org.csstudio.display.builder.editor.EditorUtil;
import org.csstudio.display.builder.editor.actions.ActionDescription;
import org.csstudio.display.builder.model.ArrayWidgetProperty;
import org.csstudio.display.builder.model.ChildrenProperty;
import org.csstudio.display.builder.model.DisplayModel;
import org.csstudio.display.builder.model.Widget;
import org.csstudio.display.builder.model.WidgetProperty;
import org.csstudio.display.builder.model.WidgetPropertyListener;
import org.csstudio.display.builder.model.widgets.TabsWidget;
import org.csstudio.display.builder.model.widgets.TabsWidget.TabItemProperty;
import org.csstudio.javafx.TreeHelper;
import javafx.application.Platform;
import javafx.beans.InvalidationListener;
import javafx.beans.Observable;
import javafx.collections.ObservableList;
import javafx.scene.control.Control;
import javafx.scene.control.MultipleSelectionModel;
import javafx.scene.control.SelectionMode;
import javafx.scene.control.TreeCell;
import javafx.scene.control.TreeItem;
import javafx.scene.control.TreeView;
import javafx.scene.input.KeyEvent;
import javafx.util.Callback;
/** Tree view of widget hierarchy
* @author Kay Kasemir
* @author Claudio Rosati
*/
@SuppressWarnings("nls")
public class WidgetTree
{
/** Is this class updating the selection of tree or model? */
private final AtomicBoolean active = new AtomicBoolean();
/** Associated Editor */
private final DisplayEditor editor;
private final TreeView<WidgetOrTab> tree_view = new TreeView<>();
private DisplayModel model = null;
/** Map model widgets to their tree items in <code>tree_view</code>
*
* <p>When model notifies about changed Widget,
* this map provides the corresponding TreeItem.
*/
private final Map<Widget, TreeItem<WidgetOrTab>> widget2tree = new ConcurrentHashMap<>();
/** Map of tab's name property to TreeItem */
private final Map<WidgetProperty<String>, TreeItem<WidgetOrTab>> tab_name2tree = new ConcurrentHashMap<>();
/** Listener to changes in Widget's children */
private final WidgetPropertyListener<List<Widget>> children_listener;
/** Listener to changes in Widget's children */
private final WidgetPropertyListener<String> name_listener = (property, old, new_name) ->
{
final Widget widget = property.getWidget();
logger.log(Level.FINE, "{0} changed name", widget);
final TreeItem<WidgetOrTab> item = Objects.requireNonNull(widget2tree.get(widget));
Platform.runLater(() -> TreeHelper.triggerTreeItemRefresh(item));
};
/** Listener to changes in a TabWidget's tabs */
private final WidgetPropertyListener<List<TabItemProperty>> tabs_property_listener = (tabs, removed, added) ->
{
if (added != null)
addTabs(added);
if (removed != null)
removeTabs(removed);
};
/** Update the name of a tab item in the tree */
private final WidgetPropertyListener<String> tab_name_listener = (tab_name, old_name, new_name) ->
{
final TreeItem<WidgetOrTab> tab_item = Objects.requireNonNull(tab_name2tree.get(tab_name));
TreeHelper.triggerTreeItemRefresh(tab_item);
};
/** Cell factory that displays {@link WidgetOrTab} info in tree cell */
private final Callback<TreeView<WidgetOrTab>, TreeCell<WidgetOrTab>> cell_factory = cell -> new WidgetTreeCell();
/** Construct widget tree
* @param selection Handler of selected widgets
*/
public WidgetTree(final DisplayEditor editor)
{
this.editor = editor;
children_listener = (p, removed, added) ->
{
// Update must be on UI thread.
// Even if already on UI thread, decouple.
if (removed != null)
Platform.runLater(() ->
{
active.set(true);
try
{
for (Widget removed_widget : removed)
removeWidget(removed_widget);
}
finally
{
active.set(false);
}
});
if (added != null)
{ // Need to determine the index of added item in model _now_,
// not in decoupled thread.
// Assume model [ a, b, c, d ] that moves a, b, to the end: [ b, c, d, a ], then [ c, d, a, b ]
// By the time decoupled thread moves a, it will already see the model as [ c, d, a, b ]
// and thus determine that a needs to be at index 2 -> [ b, c, a, d ]
// Then it moves b, determined that it needs to be at index 3 -> [ c, a, d, b ]
final int[] indices = new int[added.size()];
for (int i=0; i<indices.length; ++i)
indices[i] = determineWidgetIndex(added.get(i));
Platform.runLater(() ->
{
active.set(true);
try
{
for (int i=0; i<indices.length; ++i)
addWidget(added.get(i), indices[i]);
}
finally
{
active.set(false);
}
// Restore tree's selection to match model
// after removing/adding items may have changed it.
setSelectedWidgets(editor.getWidgetSelectionHandler().getSelection());
});
}
};
}
/** Create UI components
* @return Root {@link Control}
*/
public Control create()
{
tree_view.setShowRoot(false);
tree_view.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE);
tree_view.setCellFactory(cell_factory);
bindSelections();
tree_view.setOnKeyPressed(this::handleKeyPress);
return tree_view;
}
private void handleKeyPress(final KeyEvent event)
{
WidgetTree.handleWidgetOrderKeys(event, editor);
}
/** Handle Alt-[Up, Down, PgUp, PgDown] to move widgets in hierarchy
*
* @param event {@link KeyEvent}
* @param editor {@link DisplayEditor}
* @return <code>true</code> if key was handled
*/
public static boolean handleWidgetOrderKeys(final KeyEvent event, final DisplayEditor editor)
{
if (event.isAltDown())
{
switch (event.getCode())
{
case PAGE_UP:
event.consume();
ActionDescription.TO_BACK.run(editor);
return true;
case UP:
event.consume();
ActionDescription.MOVE_UP.run(editor);
return true;
case DOWN:
event.consume();
ActionDescription.MOVE_DOWN.run(editor);
return true;
case PAGE_DOWN:
event.consume();
ActionDescription.TO_FRONT.run(editor);
return true;
default:
break;
}
}
return false;
}
/** Link selections in tree view and model */
private void bindSelections()
{
// Update selected widgets in model from selection in tree_view
final ObservableList<TreeItem<WidgetOrTab>> tree_selection = tree_view.getSelectionModel().getSelectedItems();
InvalidationListener listener = (Observable observable) ->
{
if (! active.compareAndSet(false, true))
return;
try
{
final List<Widget> widgets = new ArrayList<>(tree_selection.size());
for (TreeItem<WidgetOrTab> item : tree_selection)
{
final WidgetOrTab wot = item.getValue();
final Widget widget = wot.isWidget()
? wot.getWidget()
: wot.getTab().getWidget();
if (! widgets.contains(widget))
widgets.add(widget);
};
logger.log(Level.FINE, "Selected in tree: {0}", widgets);
editor.getWidgetSelectionHandler().setSelection(widgets);
}
finally
{
active.set(false);
}
};
tree_selection.addListener(listener);
// Update selection in tree_view from selected widgets in model
editor.getWidgetSelectionHandler().addListener(this::setSelectedWidgets);
}
/** @param model Model to display as widget tree */
public void setModel(final DisplayModel model)
{
// Could recursively remove all old model tree elements,
// on UI thread, one by one.
// Faster: Unlink listeners and then replace the whole
// tree model which was created in background.
final DisplayModel old_model = this.model;
if (old_model != null)
{
old_model.runtimeChildren().removePropertyListener(children_listener);
for (Widget widget : old_model.runtimeChildren().getValue())
removeWidgetListeners(widget);
widget2tree.clear();
tab_name2tree.clear();
}
this.model = model;
// Might be called on UI thread, move off
EditorUtil.getExecutor().execute(() ->
{
final TreeItem<WidgetOrTab> root = new TreeItem<WidgetOrTab>(WidgetOrTab.of(model));
if (model != null)
{
widget2tree.put(model, root);
for (Widget widget : model.runtimeChildren().getValue())
addWidget(widget, -1);
root.setExpanded(true);
model.runtimeChildren().addPropertyListener(children_listener);
}
logger.log(Level.FINE, "Computed new tree on {0}, updating UI", Thread.currentThread().getName());
Platform.runLater(() ->
{
tree_view.setRoot(root);
setSelectedWidgets(editor.getWidgetSelectionHandler().getSelection());
});
});
}
/** Called by selection handler when selected widgets have changed, or on new model
* @param widgets Widgets to select in tree
*/
public void setSelectedWidgets(final List<Widget> widgets)
{
if (! active.compareAndSet(false, true))
return;
try
{
final MultipleSelectionModel<TreeItem<WidgetOrTab>> selection = tree_view.getSelectionModel();
selection.clearSelection();
for (Widget widget : widgets)
selection.select(widget2tree.get(widget));
// If something's selected, show it.
// Otherwise leave tree at current position.
final int index = selection.getSelectedIndex();
if (index >= 0)
tree_view.scrollTo(index);
}
finally
{
active.set(false);
}
}
/** Determine location of widget within parent of model
* @param widget Widget
* @return Index of widget in model's parent
*/
private int determineWidgetIndex(final Widget widget)
{
final Widget widget_parent = Objects.requireNonNull(widget.getParent().get());
if (widget_parent instanceof TabsWidget)
{
for (TabItemProperty tab : ((TabsWidget)widget_parent).propTabs().getValue())
{
int index = tab.children().getValue().indexOf(widget);
if (index >= 0)
return index;
}
}
else
return ChildrenProperty.getChildren(widget_parent).getValue().indexOf(widget);
return -1;
}
/** Add widget to existing model & tree
* @param added_widget Widget to add
* @param index Index of widget within parent. -1 to add at end
*/
private void addWidget(final Widget added_widget, final int index)
{
// Have widget and its parent in model
final Widget widget_parent = added_widget.getParent().get();
// Determine parent tree item
TreeItem<WidgetOrTab> item_parent = null;
if (widget_parent instanceof TabsWidget)
{
for (TabItemProperty tab : ((TabsWidget)widget_parent).propTabs().getValue())
if (tab.children().getValue().contains(added_widget))
{
item_parent = tab_name2tree.get(tab.name());
break;
}
}
else
item_parent = widget2tree.get(widget_parent);
Objects.requireNonNull(item_parent, "Cannot obtain parent item for " + added_widget);
// Create Tree item
final TreeItem<WidgetOrTab> item = new TreeItem<>(WidgetOrTab.of(added_widget));
widget2tree.put(added_widget, item);
item.setExpanded(true);
if (index >= 0)
// Add at same index into Tree
item_parent.getChildren().add(index, item);
else// Append to end
item_parent.getChildren().add(item);
added_widget.propName().addPropertyListener(name_listener);
if (added_widget instanceof TabsWidget)
{
final ArrayWidgetProperty<TabItemProperty> tabs = ((TabsWidget)added_widget).propTabs();
addTabs(tabs.getValue());
tabs.addPropertyListener(tabs_property_listener);
}
else
{
final ChildrenProperty children = ChildrenProperty.getChildren(added_widget);
if (children != null)
{
children.addPropertyListener(children_listener);
for (Widget child : children.getValue())
addWidget(child, -1);
}
}
}
private void addTabs(final List<TabItemProperty> added)
{
for (TabItemProperty tab : added)
{
final TreeItem<WidgetOrTab> widget_item = widget2tree.get(tab.getWidget());
final TreeItem<WidgetOrTab> tab_item = new TreeItem<>(WidgetOrTab.of(tab));
widget_item.getChildren().add(tab_item);
tab_name2tree.put(tab.name(), tab_item);
tab.name().addPropertyListener(tab_name_listener);
for (Widget child : tab.children().getValue())
addWidget(child, -1);
tab.children().addPropertyListener(children_listener);
}
}
private void removeTabs(final List<TabItemProperty> removed)
{
for (TabItemProperty tab : removed)
{
tab.children().removePropertyListener(children_listener);
tab.name().removePropertyListener(tab_name_listener);
final TreeItem<WidgetOrTab> tab_item = tab_name2tree.remove(tab.name());
tab_item.getParent().getChildren().remove(tab_item);
}
}
/** Remove widget from existing model & tree
* @param removed_widget
*/
private void removeWidget(final Widget removed_widget)
{
if (removed_widget instanceof TabsWidget)
{
final ArrayWidgetProperty<TabItemProperty> tabs = ((TabsWidget)removed_widget).propTabs();
tabs.removePropertyListener(tabs_property_listener);
removeTabs(tabs.getValue());
}
removed_widget.propName().removePropertyListener(name_listener);
final ChildrenProperty children = ChildrenProperty.getChildren(removed_widget);
if (children != null)
{
children.removePropertyListener(children_listener);
for (Widget child : children.getValue())
removeWidget(child);
}
final TreeItem<WidgetOrTab> item = widget2tree.remove(removed_widget);
item.getParent().getChildren().remove(item);
}
/** Recursively remove model widget listeners
* @param container Widgets to unlink
*/
private void removeWidgetListeners(final Widget widget)
{
if (widget instanceof TabsWidget)
{
final ArrayWidgetProperty<TabItemProperty> tabs = ((TabsWidget)widget).propTabs();
tabs.removePropertyListener(tabs_property_listener);
for (TabItemProperty tab : tabs.getValue())
{
tab.children().removePropertyListener(children_listener);
tab.name().removePropertyListener(tab_name_listener);
}
}
widget.propName().removePropertyListener(name_listener);
final ChildrenProperty children = ChildrenProperty.getChildren(widget);
if (children != null)
{
children.removePropertyListener(children_listener);
for (Widget child : children.getValue())
removeWidgetListeners(child);
}
}
} |
package org.pentaho.di.version;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.pentaho.di.core.Const;
/**
* Singleton class to allow us to see on which date & time the kettle3.jar was built.
*
* @author Matt
* @since 2006-aug-12
*/
public class BuildVersion
{
/** name of the Kettle version file, updated in the ant script, contains date and time of build */
public static final String BUILD_VERSION_FILE = "build_version.txt";
public static final String SEPARATOR = "@";
public static final String BUILD_DATE_FORMAT = "yyyy/MM/dd'T'HH:mm:ss";
private static BuildVersion buildVersion;
/**
* @return the instance of the BuildVersion singleton
*/
public static final BuildVersion getInstance()
{
if (buildVersion!=null) return buildVersion;
buildVersion = new BuildVersion();
return buildVersion;
}
private int version;
private Date buildDate;
private String hostname;
private BuildVersion()
{
String filename = BUILD_VERSION_FILE;
StringBuffer buffer = new StringBuffer(30);
try
{
// The version file only contains a single lines of text
InputStream inputStream = getClass().getResourceAsStream( "/"+filename ); // try to find it in the jars...
if (inputStream==null) // not found
{
// System.out.println("Stream not found for filename [/"+filename+"], looking for it on the normal filesystem...");
try
{
inputStream = new FileInputStream(filename); // Retry from normal file system
}
catch(FileNotFoundException e)
{
inputStream = new FileInputStream("./"+filename);
}
}
else
{
}
// read the file into a String
int c=inputStream.read();
while ( c>0 && c!='\n' && c!='\r' )
{
if (c!=' ' && c!='\t') buffer.append((char)c); // no spaces or tabs please ;-)
c=inputStream.read();
}
// The 3 parts we expect are in here:
String parts[] = buffer.toString().split(SEPARATOR);
if (parts.length!=3)
{
throw new RuntimeException("Could not find 3 parts in versioning line : ["+buffer+"]");
}
// Get the revision
version = Integer.parseInt(parts[0]);
// Get the build date
SimpleDateFormat format = new SimpleDateFormat(BUILD_DATE_FORMAT);
buildDate = format.parse(parts[1]);
try {
File engineJar = new File("lib/kettle-engine.jar");
long lastModifiedJar = engineJar.lastModified();
if (lastModifiedJar!=0L) {
buildDate = new Date(lastModifiedJar);
} else {
System.out.println("Unable to find kettle engine jar file to set build date. (ingored)");
}
}
catch(Exception e) {
// Eat this exception, keep things the way the were before.
// Eats security exceptions, etc.
}
}
catch(Exception e)
{
System.out.println("Unable to load revision number from file : ["+filename+"]");
version = 1;
buildDate = new Date();
}
}
/**
* @return the buildDate
*/
public Date getBuildDate()
{
return buildDate;
}
/**
* @param buildDate the buildDate to set
*/
public void setBuildDate(Date buildDate)
{
this.buildDate = buildDate;
}
/**
* @return the revision
*/
public int getVersion()
{
return version;
}
/**
* @param revision the revision to set
*/
public void setVersion(int revision)
{
this.version = revision;
}
public void save()
{
FileWriter fileWriter = null;
String filename = BUILD_VERSION_FILE;
File file = new File( filename );
try
{
fileWriter = new FileWriter(file);
// First write the revision
fileWriter.write(Integer.toString(version)+" ");
// Then the separator
fileWriter.write(SEPARATOR);
// Finally the build date
SimpleDateFormat format = new SimpleDateFormat(BUILD_DATE_FORMAT);
fileWriter.write(" "+format.format(buildDate)+" ");
// Then the separator
fileWriter.write(SEPARATOR);
// Then the hostname
fileWriter.write(" "+Const.getHostname());
// Return
fileWriter.write(Const.CR);
System.out.println("Saved build version info to file ["+file.getAbsolutePath()+"]");
}
catch(Exception e)
{
throw new RuntimeException("Unable to save revision information to file ["+BUILD_VERSION_FILE+"]", e);
}
finally
{
try
{
if (fileWriter!=null)
{
fileWriter.close();
}
}
catch(Exception e)
{
throw new RuntimeException("Unable to close file ["+BUILD_VERSION_FILE+"] after writing", e);
}
}
}
/**
* @return the hostname
*/
public String getHostname()
{
return hostname;
}
/**
* @param hostname the hostname to set
*/
public void setHostname(String hostname)
{
this.hostname = hostname;
}
} |
package org.csstudio.display.builder.editor.tree;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.csstudio.display.builder.editor.EditorUtil;
import org.csstudio.display.builder.editor.WidgetSelectionHandler;
import org.csstudio.display.builder.editor.util.WidgetIcons;
import org.csstudio.display.builder.model.ChildrenProperty;
import org.csstudio.display.builder.model.DisplayModel;
import org.csstudio.display.builder.model.Widget;
import org.csstudio.display.builder.model.WidgetPropertyListener;
import javafx.application.Platform;
import javafx.beans.InvalidationListener;
import javafx.beans.Observable;
import javafx.collections.ObservableList;
import javafx.scene.Node;
import javafx.scene.control.Label;
import javafx.scene.control.MultipleSelectionModel;
import javafx.scene.control.SelectionMode;
import javafx.scene.control.TreeCell;
import javafx.scene.control.TreeItem;
import javafx.scene.control.TreeView;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.Priority;
import javafx.scene.layout.VBox;
import javafx.util.Callback;
/** Tree view of widget hierarchy
* @author Kay Kasemir
*/
@SuppressWarnings("nls")
public class WidgetTree
{
private final Logger logger = Logger.getLogger(getClass().getName());
/** Is this class updating the selection of tree or model? */
private final AtomicBoolean active = new AtomicBoolean();
/** Handler for setting and tracking the currently selected widgets */
private final WidgetSelectionHandler selection;
private final TreeView<Widget> tree_view = new TreeView<>();
private DisplayModel model = null;
/** Map model widgets to their tree items in <code>tree_view</code>
*
* <p>When model notifies about changed Widget,
* this map provides the corresponding TreeItem.
*/
private volatile Map<Widget, TreeItem<Widget>> widget_items = new ConcurrentHashMap<>();
/** Listener to changes in ContainerWidget's children */
private final WidgetPropertyListener<List<Widget>> children_listener;
/** Listener to changes in ContainerWidget's children */
private final WidgetPropertyListener<String> name_listener = (property, old, new_name) ->
{
final Widget widget = property.getWidget();
logger.log(Level.FINE, "{0} changed name", widget);
final TreeItem<Widget> item = Objects.requireNonNull(widget_items.get(widget));
// 'setValue' triggers a refresh of the item,
// but only if value is different..
Platform.runLater(() ->
{
item.setValue(null);
item.setValue(widget);
});
};
/** Tree cell that displays {@link Widget} (name, icon, ..) */
private static class WidgetTreeCell extends TreeCell<Widget>
{
@Override
public void updateItem(final Widget widget, final boolean empty)
{
super.updateItem(widget, empty);
if (empty || widget == null)
{
setText(null);
setGraphic(null);
}
else
{
final String type = widget.getType();
setText(type + " '" + widget.getName() + "'");
final Image icon = WidgetIcons.getIcon(type);
if (icon != null)
setGraphic(new ImageView(icon));
}
}
};
/** Cell factory that displays {@link Widget} info in tree cell */
private final Callback<TreeView<Widget>, TreeCell<Widget>> cell_factory = (final TreeView<Widget> param) -> new WidgetTreeCell();
/** Construct widget tree
* @param selection Handler of selected widgets
*/
public WidgetTree(final WidgetSelectionHandler selection)
{
this.selection = selection;
children_listener = (p, removed, added) ->
{
// Update must be on UI thread.
// Even if already on UI thread, decouple.
Platform.runLater(() ->
{
active.set(true);
try
{
if (removed != null)
for (Widget removed_widget : removed)
removeWidget(removed_widget);
if (added != null)
for (Widget added_widget : added)
addWidget(added_widget, widget_items);
}
finally
{
active.set(false);
}
// Restore tree's selection to match model
// after removing/adding items may have changed it.
setSelectedWidgets(selection.getSelection());
});
};
}
/** Create UI components
* @return Root {@link Node}
*/
public Node create()
{
final VBox box = new VBox();
final Label header = new Label("Widgets");
header.setMaxWidth(Double.MAX_VALUE);
header.getStyleClass().add("header");
tree_view.setShowRoot(false);
tree_view.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE);
tree_view.setCellFactory(cell_factory);
VBox.setVgrow(tree_view, Priority.ALWAYS);
box.getChildren().addAll(header, tree_view);
bindSelections();
return box;
}
/** Link selections in tree view and model */
private void bindSelections()
{
// Update selected widgets in model from selection in tree_view
final ObservableList<TreeItem<Widget>> tree_selection = tree_view.getSelectionModel().getSelectedItems();
InvalidationListener listener = (Observable observable) ->
{
if (! active.compareAndSet(false, true))
return;
try
{
final List<Widget> widgets = new ArrayList<>(tree_selection.size());
tree_selection.forEach(item -> widgets.add(item.getValue()));
logger.log(Level.FINE, "Selected in tree: {0}", widgets);
selection.setSelection(widgets);
}
finally
{
active.set(false);
}
};
tree_selection.addListener(listener);
// Update selection in tree_view from selected widgets in model
selection.addListener(this::setSelectedWidgets);
}
/** @param model Model to display as widget tree */
public void setModel(final DisplayModel model)
{
// Could recursively remove all old model tree elements,
// on UI thread, one by one.
// Faster: Unlink listeners and then replace the whole
// tree model which was created in background.
final DisplayModel old_model = this.model;
if (old_model != null)
{
old_model.runtimeChildren().removePropertyListener(children_listener);
for (Widget widget : old_model.runtimeChildren().getValue())
removeWidgetListeners(widget);
}
this.model = model;
// Might be called on UI thread, move off
EditorUtil.getExecutor().execute(() ->
{
final TreeItem<Widget> root = new TreeItem<Widget>(model);
final Map<Widget, TreeItem<Widget>> widget_items = new ConcurrentHashMap<>();
if (model != null)
{
widget_items.put(model, root);
for (Widget widget : model.runtimeChildren().getValue())
addWidget(widget, widget_items);
root.setExpanded(true);
model.runtimeChildren().addPropertyListener(children_listener);
this.widget_items = widget_items;
}
logger.log(Level.FINE, "Computed new tree on {0}, updating UI", Thread.currentThread().getName());
Platform.runLater(() ->
{
tree_view.setRoot(root);
setSelectedWidgets(selection.getSelection());
});
});
}
/** Called by selection handler when selected widgets have changed, or on new model
* @param widgets Widgets to select in tree
*/
public void setSelectedWidgets(final List<Widget> widgets)
{
if (! active.compareAndSet(false, true))
return;
try
{
final MultipleSelectionModel<TreeItem<Widget>> selection = tree_view.getSelectionModel();
selection.clearSelection();
for (Widget widget : widgets)
selection.select(widget_items.get(widget));
}
finally
{
active.set(false);
}
}
/** Add widget to existing model & tree
* @param added_widget Widget to add
* @param widget_items Map of widget to tree item
*/
private void addWidget(final Widget added_widget, final Map<Widget, TreeItem<Widget>> widget_items)
{ // Determine location of widget within parent of model
final Widget widget_parent = added_widget.getParent().get();
final int index = ChildrenProperty.getChildren(widget_parent).getValue().indexOf(added_widget);
// Create Tree item, add at same index into Tree
final TreeItem<Widget> item_parent = widget_items.get(widget_parent);
final TreeItem<Widget> item = new TreeItem<>(added_widget);
widget_items.put(added_widget, item);
item.setExpanded(true);
item_parent.getChildren().add(index, item);
added_widget.widgetName().addPropertyListener(name_listener);
final ChildrenProperty children = ChildrenProperty.getChildren(added_widget);
if (children != null)
{
children.addPropertyListener(children_listener);
for (Widget child : children.getValue())
addWidget(child, widget_items);
}
}
/** Remove widget from existing model & tree
* @param removed_widget
*/
private void removeWidget(final Widget removed_widget)
{
removed_widget.widgetName().removePropertyListener(name_listener);
final ChildrenProperty children = ChildrenProperty.getChildren(removed_widget);
if (children != null)
{
children.removePropertyListener(children_listener);
for (Widget child : children.getValue())
removeWidget(child);
}
final TreeItem<Widget> item = widget_items.get(removed_widget);
item.getParent().getChildren().remove(item);
widget_items.remove(removed_widget);
}
/** Recursively remove model widget listeners
* @param container Widgets to unlink
*/
private void removeWidgetListeners(final Widget widget)
{
widget.widgetName().removePropertyListener(name_listener);
final ChildrenProperty children = ChildrenProperty.getChildren(widget);
if (children != null)
{
children.removePropertyListener(children_listener);
for (Widget child : children.getValue())
removeWidgetListeners(child);
}
}
} |
package org.pentaho.di.core.database;
import java.io.StringReader;
import java.sql.BatchUpdateException;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.ParameterMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Savepoint;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Date;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.sql.DataSource;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Counter;
import org.pentaho.di.core.DBCache;
import org.pentaho.di.core.DBCacheEntry;
import org.pentaho.di.core.ProgressMonitorListener;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.map.DatabaseConnectionMap;
import org.pentaho.di.core.encryption.Encr;
import org.pentaho.di.core.exception.KettleDatabaseBatchException;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.logging.DefaultLogLevel;
import org.pentaho.di.core.logging.LogChannel;
import org.pentaho.di.core.logging.LogChannelInterface;
import org.pentaho.di.core.logging.LogLevel;
import org.pentaho.di.core.logging.LogStatus;
import org.pentaho.di.core.logging.LogTableField;
import org.pentaho.di.core.logging.LogTableInterface;
import org.pentaho.di.core.logging.LoggingObjectInterface;
import org.pentaho.di.core.logging.LoggingObjectType;
import org.pentaho.di.core.row.RowDataUtil;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.variables.Variables;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.ObjectRevision;
import org.pentaho.di.repository.RepositoryDirectory;
/**
* Database handles the process of connecting to, reading from, writing to and updating databases.
* The database specific parameters are defined in DatabaseInfo.
*
* @author Matt
* @since 05-04-2003
*
*/
public class Database implements VariableSpace, LoggingObjectInterface
{
private static Class<?> PKG = Database.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
private DatabaseMeta databaseMeta;
private int rowlimit;
private int commitsize;
private Connection connection;
private Statement sel_stmt;
private PreparedStatement pstmt;
private PreparedStatement prepStatementLookup;
private PreparedStatement prepStatementUpdate;
private PreparedStatement prepStatementInsert;
private PreparedStatement pstmt_seq;
private CallableStatement cstmt;
// private ResultSetMetaData rsmd;
private DatabaseMetaData dbmd;
private RowMetaInterface rowMeta;
private int written;
private LogChannelInterface log;
private LoggingObjectInterface parentLoggingObject;
/**
* Number of times a connection was opened using this object.
* Only used in the context of a database connection map
*/
private int opened;
/**
* The copy is equal to opened at the time of creation.
*/
private int copy;
private String connectionGroup;
private String partitionId;
private VariableSpace variables = new Variables();
private LogLevel logLevel = DefaultLogLevel.getLogLevel();
private String containerObjectId;
/**
* Construct a new Database Connection
* @param databaseMeta The Database Connection Info to construct the connection with.
* @deprecated Please specify the parent object so that we can see which object is initiating a database connection
*/
public Database(DatabaseMeta databaseMeta)
{
this.parentLoggingObject = null;
this.databaseMeta = databaseMeta;
shareVariablesWith(databaseMeta);
// In this case we don't have the parent object, so we don't know which object makes the connection.
// We also don't know what log level to attach to it, so we have to stick to the default
// As such, this constructor is @deprecated.
log=new LogChannel(this);
logLevel = log.getLogLevel();
containerObjectId = log.getContainerObjectId();
pstmt = null;
rowMeta = null;
dbmd = null;
rowlimit=0;
written=0;
if(log.isDetailed()) log.logDetailed("New database connection defined");
}
/**
* Construct a new Database Connection
* @param databaseMeta The Database Connection Info to construct the connection with.
*/
public Database(LoggingObjectInterface parentObject, DatabaseMeta databaseMeta)
{
this.parentLoggingObject = parentObject;
this.databaseMeta = databaseMeta;
shareVariablesWith(databaseMeta);
log=new LogChannel(this, parentObject);
this.containerObjectId = log.getContainerObjectId();
this.logLevel = log.getLogLevel();
pstmt = null;
rowMeta = null;
dbmd = null;
rowlimit=0;
written=0;
if(log.isDetailed()) log.logDetailed("New database connection defined");
}
public boolean equals(Object obj)
{
Database other = (Database) obj;
return other.databaseMeta.equals(other.databaseMeta);
}
/**
* Allows for the injection of a "life" connection, generated by a piece of software outside of Kettle.
* @param connection
*/
public void setConnection(Connection connection) {
this.connection = connection;
}
/**
* @return Returns the connection.
*/
public Connection getConnection()
{
return connection;
}
/**
* Set the maximum number of records to retrieve from a query.
* @param rows
*/
public void setQueryLimit(int rows)
{
rowlimit = rows;
}
/**
* @return Returns the prepStatementInsert.
*/
public PreparedStatement getPrepStatementInsert()
{
return prepStatementInsert;
}
/**
* @return Returns the prepStatementLookup.
*/
public PreparedStatement getPrepStatementLookup()
{
return prepStatementLookup;
}
/**
* @return Returns the prepStatementUpdate.
*/
public PreparedStatement getPrepStatementUpdate()
{
return prepStatementUpdate;
}
/**
* Open the database connection.
* @throws KettleDatabaseException if something went wrong.
*/
public void connect() throws KettleDatabaseException
{
connect(null);
}
/**
* Open the database connection.
* @param partitionId the partition ID in the cluster to connect to.
* @throws KettleDatabaseException if something went wrong.
*/
public void connect(String partitionId) throws KettleDatabaseException
{
connect(null, partitionId);
}
public synchronized void connect(String group, String partitionId) throws KettleDatabaseException
{
// Before anything else, let's see if we already have a connection defined for this group/partition!
// The group is called after the thread-name of the transformation or job that is running
// The name of that threadname is expected to be unique (it is in Kettle)
// So the deal is that if there is another thread using that, we go for it.
if (!Const.isEmpty(group))
{
this.connectionGroup = group;
this.partitionId = partitionId;
DatabaseConnectionMap map = DatabaseConnectionMap.getInstance();
// Try to find the connection for the group
Database lookup = map.getDatabase(group, partitionId, this);
if (lookup==null) // We already opened this connection for the partition & database in this group
{
// Do a normal connect and then store this database object for later re-use.
normalConnect(partitionId);
opened++;
copy = opened;
map.storeDatabase(group, partitionId, this);
}
else
{
connection = lookup.getConnection();
lookup.setOpened(lookup.getOpened()+1); // if this counter hits 0 again, close the connection.
copy = lookup.getOpened();
}
}
else
{
// Proceed with a normal connect
normalConnect(partitionId);
}
}
/**
* Open the database connection.
* @param partitionId the partition ID in the cluster to connect to.
* @throws KettleDatabaseException if something went wrong.
*/
public void normalConnect(String partitionId) throws KettleDatabaseException
{
if (databaseMeta==null)
{
throw new KettleDatabaseException("No valid database connection defined!");
}
try
{
// First see if we use connection pooling...
if ( databaseMeta.isUsingConnectionPool() && // default = false for backward compatibility
databaseMeta.getAccessType()!=DatabaseMeta.TYPE_ACCESS_JNDI // JNDI does pooling on it's own.
)
{
try
{
this.connection = ConnectionPoolUtil.getConnection(log, databaseMeta, partitionId);
}
catch (Exception e)
{
throw new KettleDatabaseException("Error occured while trying to connect to the database", e);
}
}
else
{
connectUsingClass(databaseMeta.getDriverClass(), partitionId );
if(log.isDetailed()) log.logDetailed("Connected to database.");
// See if we need to execute extra SQL statemtent...
String sql = environmentSubstitute( databaseMeta.getConnectSQL() );
// only execute if the SQL is not empty, null and is not just a bunch of spaces, tabs, CR etc.
if (!Const.isEmpty(sql) && !Const.onlySpaces(sql))
{
execStatements(sql);
if(log.isDetailed()) log.logDetailed("Executed connect time SQL statements:"+Const.CR+sql);
}
}
}
catch(Exception e)
{
throw new KettleDatabaseException("Error occured while trying to connect to the database", e);
}
}
/**
* Initialize by getting the connection from a javax.sql.DataSource. This method uses the
* DataSourceProviderFactory to get the provider of DataSource objects.
* @param dataSourceName
* @throws KettleDatabaseException
*/
private void initWithNamedDataSource(String dataSourceName) throws KettleDatabaseException {
connection = null;
DataSource dataSource = DataSourceProviderFactory.getDataSourceProviderInterface().getNamedDataSource(dataSourceName);
if (dataSource != null) {
try {
connection = dataSource.getConnection();
} catch (SQLException e) {
throw new KettleDatabaseException( "Invalid JNDI connection "+ dataSourceName + " : " + e.getMessage()); //$NON-NLS-1$
}
if (connection == null) {
throw new KettleDatabaseException( "Invalid JNDI connection "+ dataSourceName); //$NON-NLS-1$
}
} else {
throw new KettleDatabaseException( "Invalid JNDI connection "+ dataSourceName); //$NON-NLS-1$
}
}
/**
* Connect using the correct classname
* @param classname for example "org.gjt.mm.mysql.Driver"
* @return true if the connect was succesfull, false if something went wrong.
*/
private void connectUsingClass(String classname, String partitionId) throws KettleDatabaseException
{
// Install and load the jdbc Driver
// first see if this is a JNDI connection
if( databaseMeta.getAccessType() == DatabaseMeta.TYPE_ACCESS_JNDI ) {
initWithNamedDataSource( environmentSubstitute(databaseMeta.getDatabaseName()) );
return;
}
try
{
Class.forName(classname);
}
catch(NoClassDefFoundError e)
{
throw new KettleDatabaseException("Exception while loading class", e);
}
catch(ClassNotFoundException e)
{
throw new KettleDatabaseException("Exception while loading class", e);
}
catch(Exception e)
{
throw new KettleDatabaseException("Exception while loading class", e);
}
try
{
String url;
if (databaseMeta.isPartitioned() && !Const.isEmpty(partitionId))
{
url = environmentSubstitute(databaseMeta.getURL(partitionId));
}
else
{
url = environmentSubstitute(databaseMeta.getURL());
}
String clusterUsername=null;
String clusterPassword=null;
if (databaseMeta.isPartitioned() && !Const.isEmpty(partitionId))
{
// Get the cluster information...
PartitionDatabaseMeta partition = databaseMeta.getPartitionMeta(partitionId);
if (partition!=null)
{
clusterUsername = partition.getUsername();
clusterPassword = Encr.decryptPasswordOptionallyEncrypted(partition.getPassword());
}
}
String username;
String password;
if (!Const.isEmpty(clusterUsername))
{
username = clusterUsername;
password = clusterPassword;
}
else
{
username = environmentSubstitute(databaseMeta.getUsername());
password = Encr.decryptPasswordOptionallyEncrypted(environmentSubstitute(databaseMeta.getPassword()));
}
if (databaseMeta.supportsOptionsInURL())
{
if (!Const.isEmpty(username) || !Const.isEmpty(password))
{
// also allow for empty username with given password, in this case username must be given with one space
connection = DriverManager.getConnection(url, Const.NVL(username, " "), Const.NVL(password, ""));
}
else
{
// Perhaps the username is in the URL or no username is required...
connection = DriverManager.getConnection(url);
}
}
else
{
Properties properties = databaseMeta.getConnectionProperties();
if (!Const.isEmpty(username)) properties.put("user", username);
if (!Const.isEmpty(password)) properties.put("password", password);
connection = DriverManager.getConnection(url, properties);
}
}
catch(SQLException e)
{
throw new KettleDatabaseException("Error connecting to database: (using class "+classname+")", e);
}
catch(Throwable e)
{
throw new KettleDatabaseException("Error connecting to database: (using class "+classname+")", e);
}
}
/**
* Disconnect from the database and close all open prepared statements.
*/
public synchronized void disconnect()
{
try
{
if (connection==null)
{
return ; // Nothing to do...
}
if (connection.isClosed())
{
return ; // Nothing to do...
}
if (pstmt !=null)
{
pstmt.close();
pstmt=null;
}
if (prepStatementLookup!=null)
{
prepStatementLookup.close();
prepStatementLookup=null;
}
if (prepStatementInsert!=null)
{
prepStatementInsert.close();
prepStatementInsert=null;
}
if (prepStatementUpdate!=null)
{
prepStatementUpdate.close();
prepStatementUpdate=null;
}
if (pstmt_seq!=null)
{
pstmt_seq.close();
pstmt_seq=null;
}
// See if there are other steps using this connection in a connection group.
// If so, we will hold commit & connection close until then.
if (!Const.isEmpty(connectionGroup))
{
return;
}
else
{
if (!isAutoCommit()) // Do we really still need this commit??
{
commit();
}
}
closeConnectionOnly();
}
catch(SQLException ex)
{
log.logError("Error disconnecting from database:"+Const.CR+ex.getMessage());
log.logError(Const.getStackTracker(ex));
}
catch(KettleDatabaseException dbe)
{
log.logError("Error disconnecting from database:"+Const.CR+dbe.getMessage());
log.logError(Const.getStackTracker(dbe));
}
}
/**
* Only for unique connections usage, typically you use disconnect() to disconnect() from the database.
* @throws KettleDatabaseException in case there is an error during connection close.
*/
public synchronized void closeConnectionOnly() throws KettleDatabaseException {
try
{
if (connection!=null)
{
connection.close();
if (!databaseMeta.isUsingConnectionPool())
{
connection=null;
}
}
if(log.isDetailed()) log.logDetailed("Connection to database closed!");
}
catch(SQLException e) {
throw new KettleDatabaseException("Error disconnecting from database '"+toString()+"'", e);
}
}
/**
* Cancel the open/running queries on the database connection
* @throws KettleDatabaseException
*/
public void cancelQuery() throws KettleDatabaseException
{
cancelStatement(pstmt);
cancelStatement(sel_stmt);
}
/**
* Cancel an open/running SQL statement
* @param statement the statement to cancel
* @throws KettleDatabaseException
*/
public void cancelStatement(Statement statement) throws KettleDatabaseException
{
try
{
if (statement!=null)
{
statement.cancel();
}
if(log.isDebug()) log.logDebug("Statement canceled!");
}
catch(SQLException ex)
{
throw new KettleDatabaseException("Error cancelling statement", ex);
}
}
/**
* Specify after how many rows a commit needs to occur when inserting or updating values.
* @param commsize The number of rows to wait before doing a commit on the connection.
*/
public void setCommit(int commsize)
{
commitsize=commsize;
String onOff = (commitsize<=0?"on":"off");
try
{
connection.setAutoCommit(commitsize<=0);
if(log.isDetailed()) log.logDetailed("Auto commit "+onOff);
}
catch(Exception e)
{
if (log.isDebug()) log.logDebug("Can't turn auto commit "+onOff);
}
}
public void setAutoCommit(boolean useAutoCommit) throws KettleDatabaseException {
try {
connection.setAutoCommit(useAutoCommit);
} catch (SQLException e) {
if (useAutoCommit) {
throw new KettleDatabaseException(BaseMessages.getString(PKG, "Database.Exception.UnableToEnableAutoCommit", toString()));
} else {
throw new KettleDatabaseException(BaseMessages.getString(PKG, "Database.Exception.UnableToDisableAutoCommit", toString()));
}
}
}
/**
* Perform a commit the connection if this is supported by the database
*/
public void commit() throws KettleDatabaseException
{
commit(false);
}
public void commit(boolean force) throws KettleDatabaseException
{
try
{
// Don't do the commit, wait until the end of the transformation.
// When the last database copy (opened counter) is about to be closed, we do a commit
// There is one catch, we need to catch the rollback
// The transformation will stop everything and then we'll do the rollback.
// The flag is in "performRollback", private only
if (!Const.isEmpty(connectionGroup) && !force)
{
return;
}
if (getDatabaseMetaData().supportsTransactions())
{
if (log.isDebug()) log.logDebug("Commit on database connection ["+toString()+"]");
connection.commit();
}
else
{
if(log.isDetailed()) log.logDetailed("No commit possible on database connection ["+toString()+"]");
}
}
catch(Exception e)
{
if (databaseMeta.supportsEmptyTransactions())
throw new KettleDatabaseException("Error comitting connection", e);
}
}
public void rollback() throws KettleDatabaseException
{
rollback(false);
}
public void rollback(boolean force) throws KettleDatabaseException
{
try
{
if (!Const.isEmpty(connectionGroup) && !force)
{
return; // Will be handled by Trans --> endProcessing()
}
if (getDatabaseMetaData().supportsTransactions())
{
if (connection!=null) {
if (log.isDebug()) log.logDebug("Rollback on database connection ["+toString()+"]");
connection.rollback();
}
}
else
{
if(log.isDetailed()) log.logDetailed("No rollback possible on database connection ["+toString()+"]");
}
}
catch(SQLException e)
{
throw new KettleDatabaseException("Error performing rollback on connection", e);
}
}
/**
* Prepare inserting values into a table, using the fields & values in a Row
* @param rowMeta The row metadata to determine which values need to be inserted
* @param table The name of the table in which we want to insert rows
* @throws KettleDatabaseException if something went wrong.
*/
public void prepareInsert(RowMetaInterface rowMeta, String tableName) throws KettleDatabaseException
{
prepareInsert(rowMeta, null, tableName);
}
/**
* Prepare inserting values into a table, using the fields & values in a Row
* @param rowMeta The metadata row to determine which values need to be inserted
* @param schemaName The name of the schema in which we want to insert rows
* @param tableName The name of the table in which we want to insert rows
* @throws KettleDatabaseException if something went wrong.
*/
public void prepareInsert(RowMetaInterface rowMeta, String schemaName, String tableName) throws KettleDatabaseException
{
if (rowMeta.size()==0)
{
throw new KettleDatabaseException("No fields in row, can't insert!");
}
String ins = getInsertStatement(schemaName, tableName, rowMeta);
if(log.isDetailed()) log.logDetailed("Preparing statement: "+Const.CR+ins);
prepStatementInsert=prepareSQL(ins);
}
/**
* Prepare a statement to be executed on the database. (does not return generated keys)
* @param sql The SQL to be prepared
* @return The PreparedStatement object.
* @throws KettleDatabaseException
*/
public PreparedStatement prepareSQL(String sql)
throws KettleDatabaseException
{
return prepareSQL(sql, false);
}
/**
* Prepare a statement to be executed on the database.
* @param sql The SQL to be prepared
* @param returnKeys set to true if you want to return generated keys from an insert statement
* @return The PreparedStatement object.
* @throws KettleDatabaseException
*/
public PreparedStatement prepareSQL(String sql, boolean returnKeys) throws KettleDatabaseException
{
try
{
if (returnKeys)
{
return connection.prepareStatement(databaseMeta.stripCR(sql), Statement.RETURN_GENERATED_KEYS);
}
else
{
return connection.prepareStatement(databaseMeta.stripCR(sql));
}
}
catch(SQLException ex)
{
throw new KettleDatabaseException("Couldn't prepare statement:"+Const.CR+sql, ex);
}
}
public void closeLookup() throws KettleDatabaseException
{
closePreparedStatement(pstmt);
pstmt=null;
}
public void closePreparedStatement(PreparedStatement ps) throws KettleDatabaseException
{
if (ps!=null)
{
try
{
ps.close();
}
catch(SQLException e)
{
throw new KettleDatabaseException("Error closing prepared statement", e);
}
}
}
public void closeInsert() throws KettleDatabaseException
{
if (prepStatementInsert!=null)
{
try
{
prepStatementInsert.close();
prepStatementInsert = null;
}
catch(SQLException e)
{
throw new KettleDatabaseException("Error closing insert prepared statement.", e);
}
}
}
public void closeUpdate() throws KettleDatabaseException
{
if (prepStatementUpdate!=null)
{
try
{
prepStatementUpdate.close();
prepStatementUpdate=null;
}
catch(SQLException e)
{
throw new KettleDatabaseException("Error closing update prepared statement.", e);
}
}
}
public void setValues(RowMetaInterface rowMeta, Object[] data) throws KettleDatabaseException
{
setValues(rowMeta, data, pstmt);
}
public void setValues(RowMetaAndData row) throws KettleDatabaseException
{
setValues(row.getRowMeta(), row.getData());
}
public void setValuesInsert(RowMetaInterface rowMeta, Object[] data) throws KettleDatabaseException
{
setValues(rowMeta, data, prepStatementInsert);
}
public void setValuesInsert(RowMetaAndData row) throws KettleDatabaseException
{
setValues(row.getRowMeta(), row.getData(), prepStatementInsert);
}
public void setValuesUpdate(RowMetaInterface rowMeta, Object[] data) throws KettleDatabaseException
{
setValues(rowMeta, data, prepStatementUpdate);
}
public void setValuesLookup(RowMetaInterface rowMeta, Object[] data) throws KettleDatabaseException
{
setValues(rowMeta, data, prepStatementLookup);
}
public void setProcValues(RowMetaInterface rowMeta, Object[] data, int argnrs[], String argdir[], boolean result) throws KettleDatabaseException
{
int pos;
if (result) pos=2; else pos=1;
for (int i=0;i<argnrs.length;i++)
{
if (argdir[i].equalsIgnoreCase("IN") || argdir[i].equalsIgnoreCase("INOUT"))
{
ValueMetaInterface valueMeta = rowMeta.getValueMeta(argnrs[i]);
Object value = data[argnrs[i]];
setValue(cstmt, valueMeta, value, pos);
pos++;
} else {
pos++; //next parameter when OUT
}
}
}
public void setValue(PreparedStatement ps, ValueMetaInterface v, Object object, int pos) throws KettleDatabaseException
{
String debug = "";
try
{
switch(v.getType())
{
case ValueMetaInterface.TYPE_NUMBER :
if (!v.isNull(object))
{
debug="Number, not null, getting number from value";
double num = v.getNumber(object).doubleValue();
if (databaseMeta.supportsFloatRoundingOnUpdate() && v.getPrecision()>=0)
{
debug="Number, rounding to precision ["+v.getPrecision()+"]";
num = Const.round(num, v.getPrecision());
}
debug="Number, setting ["+num+"] on position #"+pos+" of the prepared statement";
ps.setDouble(pos, num);
}
else
{
ps.setNull(pos, java.sql.Types.DOUBLE);
}
break;
case ValueMetaInterface.TYPE_INTEGER:
debug="Integer";
if (!v.isNull(object))
{
if (databaseMeta.supportsSetLong())
{
ps.setLong(pos, v.getInteger(object).longValue() );
}
else
{
double d = v.getNumber(object).doubleValue();
if (databaseMeta.supportsFloatRoundingOnUpdate() && v.getPrecision()>=0)
{
ps.setDouble(pos, d );
}
else
{
ps.setDouble(pos, Const.round( d, v.getPrecision() ) );
}
}
}
else
{
ps.setNull(pos, java.sql.Types.INTEGER);
}
break;
case ValueMetaInterface.TYPE_STRING :
debug="String";
if (v.getLength()<DatabaseMeta.CLOB_LENGTH)
{
if (!v.isNull(object))
{
ps.setString(pos, v.getString(object));
}
else
{
ps.setNull(pos, java.sql.Types.VARCHAR);
}
}
else
{
if (!v.isNull(object))
{
String string = v.getString(object);
int maxlen = databaseMeta.getMaxTextFieldLength();
int len = string.length();
// Take the last maxlen characters of the string...
int begin = len - maxlen;
if (begin<0) begin=0;
// Get the substring!
String logging = string.substring(begin);
if (databaseMeta.supportsSetCharacterStream())
{
StringReader sr = new StringReader(logging);
ps.setCharacterStream(pos, sr, logging.length());
}
else
{
ps.setString(pos, logging);
}
}
else
{
ps.setNull(pos, java.sql.Types.VARCHAR);
}
}
break;
case ValueMetaInterface.TYPE_DATE :
debug="Date";
if (!v.isNull(object))
{
long dat = v.getInteger(object).longValue(); // converts using Date.getTime()
if(v.getPrecision()==1 || !databaseMeta.supportsTimeStampToDateConversion())
{
// Convert to DATE!
java.sql.Date ddate = new java.sql.Date(dat);
ps.setDate(pos, ddate);
}
else
{
java.sql.Timestamp sdate = new java.sql.Timestamp(dat);
ps.setTimestamp(pos, sdate);
}
}
else
{
if(v.getPrecision()==1 || !databaseMeta.supportsTimeStampToDateConversion())
{
ps.setNull(pos, java.sql.Types.DATE);
}
else
{
ps.setNull(pos, java.sql.Types.TIMESTAMP);
}
}
break;
case ValueMetaInterface.TYPE_BOOLEAN:
debug="Boolean";
if (databaseMeta.supportsBooleanDataType())
{
if (!v.isNull(object))
{
ps.setBoolean(pos, v.getBoolean(object).booleanValue());
}
else
{
ps.setNull(pos, java.sql.Types.BOOLEAN);
}
}
else
{
if (!v.isNull(object))
{
ps.setString(pos, v.getBoolean(object).booleanValue()?"Y":"N");
}
else
{
ps.setNull(pos, java.sql.Types.CHAR);
}
}
break;
case ValueMetaInterface.TYPE_BIGNUMBER:
debug="BigNumber";
if (!v.isNull(object))
{
ps.setBigDecimal(pos, v.getBigNumber(object));
}
else
{
ps.setNull(pos, java.sql.Types.DECIMAL);
}
break;
case ValueMetaInterface.TYPE_BINARY:
debug="Binary";
if (!v.isNull(object))
{
ps.setBytes(pos, v.getBinary(object));
}
else
{
ps.setNull(pos, java.sql.Types.BINARY);
}
break;
default:
debug="default";
// placeholder
ps.setNull(pos, java.sql.Types.VARCHAR);
break;
}
}
catch(SQLException ex)
{
throw new KettleDatabaseException("Error setting value #"+pos+" ["+v.toString()+"] on prepared statement ("+debug+")"+Const.CR+ex.toString(), ex);
}
catch(Exception e)
{
throw new KettleDatabaseException("Error setting value #"+pos+" ["+(v==null?"NULL":v.toString())+"] on prepared statement ("+debug+")"+Const.CR+e.toString(), e);
}
}
public void setValues(RowMetaAndData row, PreparedStatement ps) throws KettleDatabaseException
{
setValues(row.getRowMeta(), row.getData(), ps);
}
public void setValues(RowMetaInterface rowMeta, Object[] data, PreparedStatement ps) throws KettleDatabaseException
{
// now set the values in the row!
for (int i=0;i<rowMeta.size();i++)
{
ValueMetaInterface v = rowMeta.getValueMeta(i);
Object object = data[i];
try
{
setValue(ps, v, object, i+1);
}
catch(KettleDatabaseException e)
{
throw new KettleDatabaseException("offending row : "+rowMeta, e);
}
}
}
/**
* Sets the values of the preparedStatement pstmt.
* @param rowMeta
* @param data
*/
public void setValues(RowMetaInterface rowMeta, Object[] data, PreparedStatement ps, int ignoreThisValueIndex) throws KettleDatabaseException
{
// now set the values in the row!
int index=0;
for (int i=0;i<rowMeta.size();i++)
{
if (i!=ignoreThisValueIndex)
{
ValueMetaInterface v = rowMeta.getValueMeta(i);
Object object = data[i];
try
{
setValue(ps, v, object, index+1);
index++;
}
catch(KettleDatabaseException e)
{
throw new KettleDatabaseException("offending row : "+rowMeta, e);
}
}
}
}
/**
* @param ps The prepared insert statement to use
* @return The generated keys in auto-increment fields
* @throws KettleDatabaseException in case something goes wrong retrieving the keys.
*/
public RowMetaAndData getGeneratedKeys(PreparedStatement ps) throws KettleDatabaseException
{
ResultSet keys = null;
try
{
keys=ps.getGeneratedKeys(); // 1 row of keys
ResultSetMetaData resultSetMetaData = keys.getMetaData();
RowMetaInterface rowMeta = getRowInfo(resultSetMetaData, false, false);
return new RowMetaAndData(rowMeta, getRow(keys, resultSetMetaData, rowMeta));
}
catch(Exception ex)
{
throw new KettleDatabaseException("Unable to retrieve key(s) from auto-increment field(s)", ex);
}
finally
{
if (keys!=null)
{
try
{
keys.close();
}
catch(SQLException e)
{
throw new KettleDatabaseException("Unable to close resultset of auto-generated keys", e);
}
}
}
}
public Long getNextSequenceValue(String sequenceName, String keyfield) throws KettleDatabaseException
{
return getNextSequenceValue(null, sequenceName, keyfield);
}
public Long getNextSequenceValue(String schemaName, String sequenceName, String keyfield) throws KettleDatabaseException
{
Long retval=null;
String schemaSequence = databaseMeta.getQuotedSchemaTableCombination(schemaName, sequenceName);
try
{
if (pstmt_seq==null)
{
pstmt_seq=connection.prepareStatement(databaseMeta.getSeqNextvalSQL(databaseMeta.stripCR(schemaSequence)));
}
ResultSet rs=null;
try
{
rs = pstmt_seq.executeQuery();
if (rs.next())
{
retval = Long.valueOf( rs.getLong(1) );
}
}
finally
{
if ( rs != null ) rs.close();
}
}
catch(SQLException ex)
{
throw new KettleDatabaseException("Unable to get next value for sequence : "+schemaSequence, ex);
}
return retval;
}
public void insertRow(String tableName, RowMetaInterface fields, Object[] data) throws KettleDatabaseException
{
insertRow(null, tableName, fields, data);
}
public void insertRow(String schemaName, String tableName, RowMetaInterface fields, Object[] data) throws KettleDatabaseException
{
prepareInsert(fields, schemaName, tableName);
setValuesInsert(fields, data);
insertRow();
closeInsert();
}
public String getInsertStatement(String tableName, RowMetaInterface fields)
{
return getInsertStatement(null, tableName, fields);
}
public String getInsertStatement(String schemaName, String tableName, RowMetaInterface fields)
{
StringBuffer ins=new StringBuffer(128);
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(schemaName, tableName);
ins.append("INSERT INTO ").append(schemaTable).append(" (");
// now add the names in the row:
for (int i=0;i<fields.size();i++)
{
if (i>0) ins.append(", ");
String name = fields.getValueMeta(i).getName();
ins.append(databaseMeta.quoteField(name));
}
ins.append(") VALUES (");
// Add placeholders...
for (int i=0;i<fields.size();i++)
{
if (i>0) ins.append(", ");
ins.append(" ?");
}
ins.append(')');
return ins.toString();
}
public void insertRow()
throws KettleDatabaseException
{
insertRow(prepStatementInsert);
}
public void insertRow(boolean batch) throws KettleDatabaseException
{
insertRow(prepStatementInsert, batch);
}
public void updateRow()
throws KettleDatabaseException
{
insertRow(prepStatementUpdate);
}
public void insertRow(PreparedStatement ps)
throws KettleDatabaseException
{
insertRow(ps, false);
}
/**
* Insert a row into the database using a prepared statement that has all values set.
* @param ps The prepared statement
* @param batch True if you want to use batch inserts (size = commit size)
* @return true if the rows are safe: if batch of rows was sent to the database OR if a commit was done.
* @throws KettleDatabaseException
*/
public boolean insertRow(PreparedStatement ps, boolean batch) throws KettleDatabaseException
{
return insertRow(ps, false, true);
}
/**
* Insert a row into the database using a prepared statement that has all values set.
* @param ps The prepared statement
* @param batch True if you want to use batch inserts (size = commit size)
* @param handleCommit True if you want to handle the commit here after the commit size (False e.g. in case the step handles this, see TableOutput)
* @return true if the rows are safe: if batch of rows was sent to the database OR if a commit was done.
* @throws KettleDatabaseException
*/
public boolean insertRow(PreparedStatement ps, boolean batch, boolean handleCommit) throws KettleDatabaseException
{
String debug="insertRow start";
boolean rowsAreSafe=false;
try
{
// Unique connections and Batch inserts don't mix when you want to roll back on certain databases.
// That's why we disable the batch insert in that case.
boolean useBatchInsert = batch && getDatabaseMetaData().supportsBatchUpdates() && databaseMeta.supportsBatchUpdates() && Const.isEmpty(connectionGroup);
// Add support for batch inserts...
if (!isAutoCommit())
{
if (useBatchInsert)
{
debug="insertRow add batch";
ps.addBatch(); // Add the batch, but don't forget to run the batch
}
else
{
debug="insertRow exec update";
ps.executeUpdate();
}
}
else
{
ps.executeUpdate();
}
written++;
if (handleCommit) { // some steps handle the commit themselves (see e.g. TableOutput step)
if (!isAutoCommit() && (written%commitsize)==0)
{
if (useBatchInsert)
{
debug="insertRow executeBatch commit";
ps.executeBatch();
commit();
ps.clearBatch();
}
else
{
debug="insertRow normal commit";
commit();
}
rowsAreSafe=true;
}
}
return rowsAreSafe;
}
catch(BatchUpdateException ex)
{
KettleDatabaseBatchException kdbe = new KettleDatabaseBatchException("Error updating batch", ex);
kdbe.setUpdateCounts(ex.getUpdateCounts());
List<Exception> exceptions = new ArrayList<Exception>();
// 'seed' the loop with the root exception
SQLException nextException = ex;
do
{
exceptions.add(nextException);
// while current exception has next exception, add to list
}
while ((nextException = nextException.getNextException())!=null);
kdbe.setExceptionsList(exceptions);
throw kdbe;
}
catch(SQLException ex)
{
// log.logError(Const.getStackTracker(ex));
throw new KettleDatabaseException("Error inserting/updating row", ex);
}
catch(Exception e)
{
// System.out.println("Unexpected exception in ["+debug+"] : "+e.getMessage());
throw new KettleDatabaseException("Unexpected error inserting/updating row in part ["+debug+"]", e);
}
}
/**
* Clears batch of insert prepared statement
* @deprecated
* @throws KettleDatabaseException
*/
public void clearInsertBatch() throws KettleDatabaseException
{
clearBatch(prepStatementInsert);
}
public void clearBatch(PreparedStatement preparedStatement) throws KettleDatabaseException
{
try
{
preparedStatement.clearBatch();
}
catch(SQLException e)
{
throw new KettleDatabaseException("Unable to clear batch for prepared statement", e);
}
}
public void insertFinished(boolean batch) throws KettleDatabaseException
{
insertFinished(prepStatementInsert, batch);
prepStatementInsert = null;
}
/**
* Close the prepared statement of the insert statement.
*
* @param ps The prepared statement to empty and close.
* @param batch true if you are using batch processing
* @param psBatchCounter The number of rows on the batch queue
* @throws KettleDatabaseException
*/
public void emptyAndCommit(PreparedStatement ps, boolean batch, int batchCounter) throws KettleDatabaseException {
try
{
if (ps!=null)
{
if (!isAutoCommit())
{
// Execute the batch or just perform a commit.
if (batch && getDatabaseMetaData().supportsBatchUpdates() && batchCounter>0)
{
// The problem with the batch counters is that you can't just execute the current batch.
// Certain databases have a problem if you execute the batch and if there are no statements in it.
// You can't just catch the exception either because you would have to roll back on certain databases before you can then continue to do anything.
// That leaves the task of keeping track of the number of rows up to our responsibility.
ps.executeBatch();
commit();
}
else
{
commit();
}
}
// Let's not forget to close the prepared statement.
ps.close();
}
}
catch(BatchUpdateException ex)
{
KettleDatabaseBatchException kdbe = new KettleDatabaseBatchException("Error updating batch", ex);
kdbe.setUpdateCounts(ex.getUpdateCounts());
List<Exception> exceptions = new ArrayList<Exception>();
SQLException nextException = ex.getNextException();
SQLException oldException = null;
// This construction is specifically done for some JDBC drivers, these drivers
// always return the same exception on getNextException() (and thus go into an infinite loop).
// So it's not "equals" but != (comments from Sven Boden).
while ( (nextException != null) && (oldException != nextException) )
{
exceptions.add(nextException);
oldException = nextException;
nextException = nextException.getNextException();
}
kdbe.setExceptionsList(exceptions);
throw kdbe;
}
catch(SQLException ex)
{
throw new KettleDatabaseException("Unable to empty ps and commit connection.", ex);
}
}
/**
* Close the prepared statement of the insert statement.
*
* @param ps The prepared statement to empty and close.
* @param batch true if you are using batch processing (typically true for this method)
* @param psBatchCounter The number of rows on the batch queue
* @throws KettleDatabaseException
*
* @deprecated use emptyAndCommit() instead (pass in the number of rows left in the batch)
*/
public void insertFinished(PreparedStatement ps, boolean batch) throws KettleDatabaseException
{
try
{
if (ps!=null)
{
if (!isAutoCommit())
{
// Execute the batch or just perform a commit.
if (batch && getDatabaseMetaData().supportsBatchUpdates())
{
// The problem with the batch counters is that you can't just execute the current batch.
// Certain databases have a problem if you execute the batch and if there are no statements in it.
// You can't just catch the exception either because you would have to roll back on certain databases before you can then continue to do anything.
// That leaves the task of keeping track of the number of rows up to our responsibility.
ps.executeBatch();
commit();
}
else
{
commit();
}
}
// Let's not forget to close the prepared statement.
ps.close();
}
}
catch(BatchUpdateException ex)
{
KettleDatabaseBatchException kdbe = new KettleDatabaseBatchException("Error updating batch", ex);
kdbe.setUpdateCounts(ex.getUpdateCounts());
List<Exception> exceptions = new ArrayList<Exception>();
SQLException nextException = ex.getNextException();
SQLException oldException = null;
// This construction is specifically done for some JDBC drivers, these drivers
// always return the same exception on getNextException() (and thus go into an infinite loop).
// So it's not "equals" but != (comments from Sven Boden).
while ( (nextException != null) && (oldException != nextException) )
{
exceptions.add(nextException);
oldException = nextException;
nextException = nextException.getNextException();
}
kdbe.setExceptionsList(exceptions);
throw kdbe;
}
catch(SQLException ex)
{
throw new KettleDatabaseException("Unable to commit connection after having inserted rows.", ex);
}
}
/**
* Execute an SQL statement on the database connection (has to be open)
* @param sql The SQL to execute
* @return a Result object indicating the number of lines read, deleted, inserted, updated, ...
* @throws KettleDatabaseException in case anything goes wrong.
*/
public Result execStatement(String sql) throws KettleDatabaseException
{
return execStatement(sql, null, null);
}
public Result execStatement(String sql, RowMetaInterface params, Object[] data) throws KettleDatabaseException
{
Result result = new Result();
try
{
boolean resultSet;
int count;
if (params!=null)
{
PreparedStatement prep_stmt = connection.prepareStatement(databaseMeta.stripCR(sql));
setValues(params, data, prep_stmt); // set the parameters!
resultSet = prep_stmt.execute();
count = prep_stmt.getUpdateCount();
prep_stmt.close();
}
else
{
String sqlStripped = databaseMeta.stripCR(sql);
// log.logDetailed("Executing SQL Statement: ["+sqlStripped+"]");
Statement stmt = connection.createStatement();
resultSet = stmt.execute(sqlStripped);
count = stmt.getUpdateCount();
stmt.close();
}
if (resultSet)
{
// the result is a resultset, but we don't do anything with it!
// You should have called something else!
// log.logDetailed("What to do with ResultSet??? (count="+count+")");
}
else
{
if (count > 0)
{
if (sql.toUpperCase().startsWith("INSERT")) result.setNrLinesOutput(count);
if (sql.toUpperCase().startsWith("UPDATE")) result.setNrLinesUpdated(count);
if (sql.toUpperCase().startsWith("DELETE")) result.setNrLinesDeleted(count);
}
}
// See if a cache needs to be cleared...
if (sql.toUpperCase().startsWith("ALTER TABLE") ||
sql.toUpperCase().startsWith("DROP TABLE") ||
sql.toUpperCase().startsWith("CREATE TABLE")
)
{
DBCache.getInstance().clear(databaseMeta.getName());
}
}
catch(SQLException ex)
{
throw new KettleDatabaseException("Couldn't execute SQL: "+sql+Const.CR, ex);
}
catch(Exception e)
{
throw new KettleDatabaseException("Unexpected error executing SQL: "+Const.CR, e);
}
return result;
}
/**
* Execute a series of SQL statements, separated by ;
*
* We are already connected...
* Multiple statements have to be split into parts
* We use the ";" to separate statements...
*
* We keep the results in Result object from Jobs
*
* @param script The SQL script to be execute
* @throws KettleDatabaseException In case an error occurs
* @return A result with counts of the number or records updates, inserted, deleted or read.
*/
public Result execStatements(String script) throws KettleDatabaseException
{
Result result = new Result();
String all = script;
int from=0;
int to=0;
int length = all.length();
int nrstats = 0;
while (to<length)
{
char c = all.charAt(to);
if (c=='"')
{
c=' ';
while (to<length && c!='"') { to++; c=all.charAt(to); }
}
else
if (c=='\'') // skip until next '
{
c=' ';
while (to<length && c!='\'') { to++; c=all.charAt(to); }
}
else
if (all.substring(to).startsWith("--")) // -- means: ignore comment until end of line...
{
while (to<length && c!='\n' && c!='\r') { to++; c=all.charAt(to); }
}
if (c==';' || to>=length-1) // end of statement
{
if (to>=length-1) to++; // grab last char also!
String stat;
if (to<=length) stat = all.substring(from, to);
else stat = all.substring(from);
// If it ends with a ; remove that ;
// Oracle for example can't stand it when this happens...
if (stat.length()>0 && stat.charAt(stat.length()-1)==';')
{
stat = stat.substring(0,stat.length()-1);
}
if (!Const.onlySpaces(stat))
{
String sql=Const.trim(stat);
if (sql.toUpperCase().startsWith("SELECT"))
{
// A Query
if(log.isDetailed()) log.logDetailed("launch SELECT statement: "+Const.CR+sql);
nrstats++;
ResultSet rs = null;
try
{
rs = openQuery(sql);
if (rs!=null)
{
Object[] row = getRow(rs);
while (row!=null)
{
result.setNrLinesRead(result.getNrLinesRead()+1);
if (log.isDetailed()) log.logDetailed(rowMeta.getString(row));
row = getRow(rs);
}
}
else
{
if (log.isDebug()) log.logDebug("Error executing query: "+Const.CR+sql);
}
} catch (KettleValueException e) {
throw new KettleDatabaseException(e); // just pass the error upwards.
}
finally
{
try
{
if ( rs != null ) rs.close();
}
catch (SQLException ex )
{
if (log.isDebug()) log.logDebug("Error closing query: "+Const.CR+sql);
}
}
}
else // any kind of statement
{
if(log.isDetailed()) log.logDetailed("launch DDL statement: "+Const.CR+sql);
// A DDL statement
nrstats++;
Result res = execStatement(sql);
result.add(res);
}
}
to++;
from=to;
}
else
{
to++;
}
}
if(log.isDetailed()) log.logDetailed(nrstats+" statement"+(nrstats==1?"":"s")+" executed");
return result;
}
public ResultSet openQuery(String sql) throws KettleDatabaseException
{
return openQuery(sql, null, null);
}
/**
* Open a query on the database with a set of parameters stored in a Kettle Row
* @param sql The SQL to launch with question marks (?) as placeholders for the parameters
* @param params The parameters or null if no parameters are used.
* @data the parameter data to open the query with
* @return A JDBC ResultSet
* @throws KettleDatabaseException when something goes wrong with the query.
*/
public ResultSet openQuery(String sql, RowMetaInterface params, Object[] data) throws KettleDatabaseException
{
return openQuery(sql, params, data, ResultSet.FETCH_FORWARD);
}
public ResultSet openQuery(String sql, RowMetaInterface params, Object[] data, int fetch_mode) throws KettleDatabaseException
{
return openQuery(sql, params, data, fetch_mode, false);
}
public ResultSet openQuery(String sql, RowMetaInterface params, Object[] data, int fetch_mode, boolean lazyConversion) throws KettleDatabaseException
{
ResultSet res;
String debug = "Start";
// Create a Statement
try
{
if (params!=null)
{
debug = "P create prepared statement (con==null? "+(connection==null)+")";
pstmt = connection.prepareStatement(databaseMeta.stripCR(sql), ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
debug = "P Set values";
setValues(params, data); // set the dates etc!
if (canWeSetFetchSize(pstmt) )
{
debug = "P Set fetchsize";
int fs = Const.FETCH_SIZE<=pstmt.getMaxRows()?pstmt.getMaxRows():Const.FETCH_SIZE;
// System.out.println("Setting pstmt fetchsize to : "+fs);
{
if (databaseMeta.getDatabaseInterface() instanceof MySQLDatabaseMeta && databaseMeta.isStreamingResults())
{
pstmt.setFetchSize(Integer.MIN_VALUE);
}
else
pstmt.setFetchSize(fs);
}
debug = "P Set fetch direction";
pstmt.setFetchDirection(fetch_mode);
}
debug = "P Set max rows";
if (rowlimit>0 && databaseMeta.supportsSetMaxRows()) pstmt.setMaxRows(rowlimit);
debug = "exec query";
res = pstmt.executeQuery();
}
else
{
debug = "create statement";
sel_stmt = connection.createStatement();
if (canWeSetFetchSize(sel_stmt))
{
debug = "Set fetchsize";
int fs = Const.FETCH_SIZE<=sel_stmt.getMaxRows()?sel_stmt.getMaxRows():Const.FETCH_SIZE;
if (databaseMeta.getDatabaseInterface() instanceof MySQLDatabaseMeta && databaseMeta.isStreamingResults())
{
sel_stmt.setFetchSize(Integer.MIN_VALUE);
}
else
{
sel_stmt.setFetchSize(fs);
}
debug = "Set fetch direction";
sel_stmt.setFetchDirection(fetch_mode);
}
debug = "Set max rows";
if (rowlimit>0 && databaseMeta.supportsSetMaxRows()) sel_stmt.setMaxRows(rowlimit);
debug = "exec query";
res=sel_stmt.executeQuery(databaseMeta.stripCR(sql));
}
debug = "openQuery : get rowinfo";
// MySQL Hack only. It seems too much for the cursor type of operation on MySQL, to have another cursor opened
// to get the length of a String field. So, on MySQL, we ingore the length of Strings in result rows.
rowMeta = getRowInfo(res.getMetaData(), databaseMeta.getDatabaseInterface() instanceof MySQLDatabaseMeta, lazyConversion);
}
catch(SQLException ex)
{
// log.logError("ERROR executing ["+sql+"]");
// log.logError("ERROR in part: ["+debug+"]");
// printSQLException(ex);
throw new KettleDatabaseException("An error occurred executing SQL: "+Const.CR+sql, ex);
}
catch(Exception e)
{
log.logError("ERROR executing query: "+e.toString());
log.logError("ERROR in part: "+debug);
throw new KettleDatabaseException("An error occurred executing SQL in part ["+debug+"]:"+Const.CR+sql, e);
}
return res;
}
private boolean canWeSetFetchSize(Statement statement) throws SQLException
{
return databaseMeta.isFetchSizeSupported() &&
( statement.getMaxRows()>0 ||
databaseMeta.getDatabaseInterface() instanceof PostgreSQLDatabaseMeta ||
( databaseMeta.getDatabaseInterface() instanceof MySQLDatabaseMeta && databaseMeta.isStreamingResults() )
);
}
public ResultSet openQuery(PreparedStatement ps, RowMetaInterface params, Object[] data) throws KettleDatabaseException
{
ResultSet res;
String debug = "Start";
// Create a Statement
try
{
debug = "OQ Set values";
setValues(params, data, ps); // set the parameters!
if (canWeSetFetchSize(ps))
{
debug = "OQ Set fetchsize";
int fs = Const.FETCH_SIZE<=ps.getMaxRows()?ps.getMaxRows():Const.FETCH_SIZE;
if (databaseMeta.getDatabaseInterface() instanceof MySQLDatabaseMeta && databaseMeta.isStreamingResults())
{
ps.setFetchSize(Integer.MIN_VALUE);
}
else
{
ps.setFetchSize(fs);
}
debug = "OQ Set fetch direction";
ps.setFetchDirection(ResultSet.FETCH_FORWARD);
}
debug = "OQ Set max rows";
if (rowlimit>0 && databaseMeta.supportsSetMaxRows()) ps.setMaxRows(rowlimit);
debug = "OQ exec query";
res = ps.executeQuery();
debug = "OQ getRowInfo()";
// rowinfo = getRowInfo(res.getMetaData());
// MySQL Hack only. It seems too much for the cursor type of operation on MySQL, to have another cursor opened
// to get the length of a String field. So, on MySQL, we ignore the length of Strings in result rows.
rowMeta = getRowInfo(res.getMetaData(), databaseMeta.getDatabaseInterface() instanceof MySQLDatabaseMeta, false);
}
catch(SQLException ex)
{
throw new KettleDatabaseException("ERROR executing query in part["+debug+"]", ex);
}
catch(Exception e)
{
throw new KettleDatabaseException("ERROR executing query in part["+debug+"]", e);
}
return res;
}
public RowMetaInterface getTableFields(String tablename) throws KettleDatabaseException
{
return getQueryFields(databaseMeta.getSQLQueryFields(tablename), false);
}
public RowMetaInterface getQueryFields(String sql, boolean param) throws KettleDatabaseException
{
return getQueryFields(sql, param, null, null);
}
/**
* See if the table specified exists by reading
* @param tablename The name of the table to check.<br>
* This is supposed to be the properly quoted name of the table or the complete schema-table name combination.
* @return true if the table exists, false if it doesn't.
*/
public boolean checkTableExists(String tablename) throws KettleDatabaseException
{
try
{
if(log.isDebug()) log.logDebug("Checking if table ["+tablename+"] exists!");
// Just try to read from the table.
String sql = databaseMeta.getSQLTableExists(tablename);
try
{
getOneRow(sql);
return true;
}
catch(KettleDatabaseException e)
{
return false;
}
/*
if (getDatabaseMetaData()!=null)
{
ResultSet alltables = getDatabaseMetaData().getTables(null, null, "%" , new String[] { "TABLE", "VIEW", "SYNONYM" } );
boolean found = false;
if (alltables!=null)
{
while (alltables.next() && !found)
{
String schemaName = alltables.getString("TABLE_SCHEM");
String name = alltables.getString("TABLE_NAME");
if ( tablename.equalsIgnoreCase(name) ||
( schemaName!=null && tablename.equalsIgnoreCase( databaseMeta.getSchemaTableCombination(schemaName, name)) )
)
{
log.logDebug("table ["+tablename+"] was found!");
found=true;
}
}
alltables.close();
return found;
}
else
{
throw new KettleDatabaseException("Unable to read table-names from the database meta-data.");
}
}
else
{
throw new KettleDatabaseException("Unable to get database meta-data from the database.");
}
*/
}
catch(Exception e)
{
throw new KettleDatabaseException("Unable to check if table ["+tablename+"] exists on connection ["+databaseMeta.getName()+"]", e);
}
}
/**
* See if the column specified exists by reading
* @param columnname The name of the column to check.
* @param tablename The name of the table to check.<br>
* This is supposed to be the properly quoted name of the table or the complete schema-table name combination.
* @return true if the table exists, false if it doesn't.
*/
public boolean checkColumnExists(String columnname, String tablename) throws KettleDatabaseException
{
try
{
if(log.isDebug()) log.logDebug("Checking if column [" + columnname + "] exists in table ["+tablename+"] !");
// Just try to read from the table.
String sql = databaseMeta.getSQLColumnExists(columnname,tablename);
try
{
getOneRow(sql);
return true;
}
catch(KettleDatabaseException e)
{
return false;
}
}
catch(Exception e)
{
throw new KettleDatabaseException("Unable to check if column [" + columnname + "] exists in table ["+tablename+"] on connection ["+databaseMeta.getName()+"]", e);
}
}
/**
* Check whether the sequence exists, Oracle only!
* @param sequenceName The name of the sequence
* @return true if the sequence exists.
*/
public boolean checkSequenceExists(String sequenceName) throws KettleDatabaseException
{
return checkSequenceExists(null, sequenceName);
}
/**
* Check whether the sequence exists, Oracle only!
* @param sequenceName The name of the sequence
* @return true if the sequence exists.
*/
public boolean checkSequenceExists(String schemaName, String sequenceName) throws KettleDatabaseException
{
boolean retval=false;
if (!databaseMeta.supportsSequences()) return retval;
String schemaSequence = databaseMeta.getQuotedSchemaTableCombination(schemaName, sequenceName);
try
{
// Get the info from the data dictionary...
String sql = databaseMeta.getSQLSequenceExists(schemaSequence);
ResultSet res = openQuery(sql);
if (res!=null)
{
Object[] row = getRow(res);
if (row!=null)
{
retval=true;
}
closeQuery(res);
}
}
catch(Exception e)
{
throw new KettleDatabaseException("Unexpected error checking whether or not sequence ["+schemaSequence+"] exists", e);
}
return retval;
}
/**
* Check if an index on certain fields in a table exists.
* @param tableName The table on which the index is checked
* @param idx_fields The fields on which the indexe is checked
* @return True if the index exists
*/
public boolean checkIndexExists(String tableName, String idx_fields[]) throws KettleDatabaseException
{
return checkIndexExists(null, tableName, idx_fields);
}
/**
* Check if an index on certain fields in a table exists.
* @param tablename The table on which the index is checked
* @param idx_fields The fields on which the indexe is checked
* @return True if the index exists
*/
public boolean checkIndexExists(String schemaName, String tableName, String idx_fields[]) throws KettleDatabaseException
{
String tablename = databaseMeta.getQuotedSchemaTableCombination(schemaName, tableName);
if (!checkTableExists(tablename)) return false;
if(log.isDebug()) log.logDebug("CheckIndexExists() tablename = "+tablename+" type = "+databaseMeta.getPluginId());
return databaseMeta.getDatabaseInterface().checkIndexExists(this, schemaName, tableName, idx_fields);
}
public String getCreateIndexStatement(String tablename, String indexname, String idx_fields[], boolean tk, boolean unique, boolean bitmap, boolean semi_colon)
{
return getCreateIndexStatement(null, tablename, indexname, idx_fields, tk, unique, bitmap, semi_colon);
}
public String getCreateIndexStatement(String schemaname, String tablename, String indexname, String idx_fields[], boolean tk, boolean unique, boolean bitmap, boolean semi_colon)
{
String cr_index="";
cr_index += "CREATE ";
if (unique || ( tk && databaseMeta.getDatabaseInterface() instanceof SybaseDatabaseMeta))
cr_index += "UNIQUE ";
if (bitmap && databaseMeta.supportsBitmapIndex())
cr_index += "BITMAP ";
cr_index += "INDEX "+databaseMeta.quoteField(indexname)+Const.CR+" ";
cr_index += "ON ";
// assume table has already been quoted (and possibly includes schema)
cr_index += tablename;
cr_index += Const.CR + "( "+Const.CR;
for (int i=0;i<idx_fields.length;i++)
{
if (i>0) cr_index+=", "; else cr_index+=" ";
cr_index += databaseMeta.quoteField(idx_fields[i])+Const.CR;
}
cr_index+=")"+Const.CR;
if (databaseMeta.getDatabaseInterface() instanceof OracleDatabaseMeta &&
databaseMeta.getIndexTablespace()!=null && databaseMeta.getIndexTablespace().length()>0)
{
cr_index+="TABLESPACE "+databaseMeta.quoteField(databaseMeta.getIndexTablespace());
}
if (semi_colon)
{
cr_index+=";"+Const.CR;
}
return cr_index;
}
public String getCreateSequenceStatement(String sequence, long start_at, long increment_by, long max_value, boolean semi_colon)
{
return getCreateSequenceStatement(null, sequence, Long.toString(start_at), Long.toString(increment_by), Long.toString(max_value), semi_colon);
}
public String getCreateSequenceStatement(String sequence, String start_at, String increment_by, String max_value, boolean semi_colon)
{
return getCreateSequenceStatement(null, sequence, start_at, increment_by, max_value, semi_colon);
}
public String getCreateSequenceStatement(String schemaName, String sequence, long start_at, long increment_by, long max_value, boolean semi_colon)
{
return getCreateSequenceStatement(schemaName, sequence, Long.toString(start_at), Long.toString(increment_by), Long.toString(max_value), semi_colon);
}
public String getCreateSequenceStatement(String schemaName, String sequenceName, String start_at, String increment_by, String max_value, boolean semi_colon)
{
String cr_seq="";
if (Const.isEmpty(sequenceName)) return cr_seq;
if (databaseMeta.supportsSequences())
{
String schemaSequence = databaseMeta.getQuotedSchemaTableCombination(schemaName, sequenceName);
cr_seq += "CREATE SEQUENCE "+schemaSequence+" "+Const.CR; // Works for both Oracle and PostgreSQL :-)
cr_seq += "START WITH "+start_at+" "+Const.CR;
cr_seq += "INCREMENT BY "+increment_by+" "+Const.CR;
if (max_value != null) {
// "-1" means there is no maxvalue, must be handles different by DB2 / AS400
if (databaseMeta.supportsSequenceNoMaxValueOption() && max_value.trim().equals("-1")) {
cr_seq += "NOMAXVALUE"+Const.CR;
} else {
// set the max value
cr_seq += "MAXVALUE "+max_value+Const.CR;
}
}
if (semi_colon) cr_seq+=";"+Const.CR;
}
return cr_seq;
}
public RowMetaInterface getQueryFields(String sql, boolean param, RowMetaInterface inform, Object[] data) throws KettleDatabaseException
{
RowMetaInterface fields;
DBCache dbcache = DBCache.getInstance();
DBCacheEntry entry=null;
// Check the cache first!
if (dbcache!=null)
{
entry = new DBCacheEntry(databaseMeta.getName(), sql);
fields = dbcache.get(entry);
if (fields!=null)
{
return fields;
}
}
if (connection==null) return null; // Cache test without connect.
// No cache entry found
// The new method of retrieving the query fields fails on Oracle because
// they failed to implement the getMetaData method on a prepared statement. (!!!)
// Even recent drivers like 10.2 fail because of it.
// There might be other databases that don't support it (we have no knowledge of this at the time of writing).
// If we discover other RDBMSs, we will create an interface for it.
// For now, we just try to get the field layout on the re-bound in the exception block below.
if (databaseMeta.supportsPreparedStatementMetadataRetrieval()) {
// On with the regular program.
PreparedStatement preparedStatement = null;
try
{
preparedStatement = connection.prepareStatement(databaseMeta.stripCR(sql), ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
ResultSetMetaData rsmd = preparedStatement.getMetaData();
fields = getRowInfo(rsmd, false, false);
}
catch(Exception e)
{
fields = getQueryFieldsFallback(sql, param, inform, data);
}
finally
{
if (preparedStatement!=null)
{
try
{
preparedStatement.close();
}
catch (SQLException e)
{
throw new KettleDatabaseException("Unable to close prepared statement after determining SQL layout", e);
}
}
}
} else {
/*
databaseMeta.getDatabaseType()==DatabaseMeta.TYPE_DATABASE_SYBASEIQ
)
{
*/
fields=getQueryFieldsFallback(sql, param, inform, data);
}
// Store in cache!!
if (dbcache!=null && entry!=null)
{
if (fields!=null)
{
dbcache.put(entry, fields);
}
}
return fields;
}
private RowMetaInterface getQueryFieldsFallback(String sql, boolean param, RowMetaInterface inform, Object[] data) throws KettleDatabaseException
{
RowMetaInterface fields;
try
{
if (inform==null
// Hack for MSSQL jtds 1.2 when using xxx NOT IN yyy we have to use a prepared statement (see BugID 3214)
&& databaseMeta.getDatabaseInterface() instanceof MSSQLServerDatabaseMeta )
{
sel_stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
if (databaseMeta.isFetchSizeSupported() && sel_stmt.getMaxRows()>=1)
{
if (databaseMeta.getDatabaseInterface() instanceof MySQLDatabaseMeta) {
sel_stmt.setFetchSize(Integer.MIN_VALUE);
} else {
sel_stmt.setFetchSize(1);
}
}
if (databaseMeta.supportsSetMaxRows()) sel_stmt.setMaxRows(1);
ResultSet r=sel_stmt.executeQuery(databaseMeta.stripCR(sql));
fields = getRowInfo(r.getMetaData(), false, false);
r.close();
sel_stmt.close();
sel_stmt=null;
}
else
{
PreparedStatement ps = connection.prepareStatement(databaseMeta.stripCR(sql));
if (param)
{
RowMetaInterface par = inform;
if (par==null || par.isEmpty()) par = getParameterMetaData(ps);
if (par==null || par.isEmpty()) par = getParameterMetaData(sql, inform, data);
setValues(par, data, ps);
}
ResultSet r = ps.executeQuery();
fields=getRowInfo(ps.getMetaData(), false, false);
r.close();
ps.close();
}
}
catch(Exception ex)
{
throw new KettleDatabaseException("Couldn't get field info from ["+sql+"]"+Const.CR, ex);
}
return fields;
}
public void closeQuery(ResultSet res) throws KettleDatabaseException
{
// close everything involved in the query!
try
{
if (res!=null) res.close();
if (sel_stmt!=null) { sel_stmt.close(); sel_stmt=null; }
if (pstmt!=null) { pstmt.close(); pstmt=null;}
}
catch(SQLException ex)
{
throw new KettleDatabaseException("Couldn't close query: resultset or prepared statements", ex);
}
}
/**
* Build the row using ResultSetMetaData rsmd
* @param rm The resultset metadata to inquire
* @param ignoreLength true if you want to ignore the length (workaround for MySQL bug/problem)
* @param lazyConversion true if lazy conversion needs to be enabled where possible
*/
private RowMetaInterface getRowInfo(ResultSetMetaData rm, boolean ignoreLength, boolean lazyConversion) throws KettleDatabaseException
{
if (rm==null) {
throw new KettleDatabaseException("No result set metadata available to retrieve row metadata!");
}
rowMeta = new RowMeta();
try
{
// TODO If we do lazy conversion, we need to find out about the encoding
int fieldNr = 1;
int nrcols=rm.getColumnCount();
for (int i=1;i<=nrcols;i++)
{
String name=new String(rm.getColumnName(i));
// Check the name, sometimes it's empty.
if (Const.isEmpty(name) || Const.onlySpaces(name))
{
name = "Field"+fieldNr;
fieldNr++;
}
ValueMetaInterface v = getValueFromSQLType(name, rm, i, ignoreLength, lazyConversion);
rowMeta.addValueMeta(v);
}
return rowMeta;
}
catch(SQLException ex)
{
throw new KettleDatabaseException("Error getting row information from database: ", ex);
}
}
private ValueMetaInterface getValueFromSQLType(String name, ResultSetMetaData rm, int index, boolean ignoreLength, boolean lazyConversion) throws SQLException
{
int length=-1;
int precision=-1;
int valtype=ValueMetaInterface.TYPE_NONE;
boolean isClob = false;
int type = rm.getColumnType(index);
boolean signed = rm.isSigned(index);
switch(type)
{
case java.sql.Types.CHAR:
case java.sql.Types.VARCHAR:
case java.sql.Types.LONGVARCHAR: // Character Large Object
valtype=ValueMetaInterface.TYPE_STRING;
if (!ignoreLength) length=rm.getColumnDisplaySize(index);
break;
case java.sql.Types.CLOB:
valtype=ValueMetaInterface.TYPE_STRING;
length=DatabaseMeta.CLOB_LENGTH;
isClob=true;
break;
case java.sql.Types.BIGINT:
// verify Unsigned BIGINT overflow!
if (signed)
{
valtype=ValueMetaInterface.TYPE_INTEGER;
precision=0; // Max 9.223.372.036.854.775.807
length=15;
}
else
{
valtype=ValueMetaInterface.TYPE_BIGNUMBER;
precision=0; // Max 18.446.744.073.709.551.615
length=16;
}
break;
case java.sql.Types.INTEGER:
valtype=ValueMetaInterface.TYPE_INTEGER;
precision=0; // Max 2.147.483.647
length=9;
break;
case java.sql.Types.SMALLINT:
valtype=ValueMetaInterface.TYPE_INTEGER;
precision=0; // Max 32.767
length=4;
break;
case java.sql.Types.TINYINT:
valtype=ValueMetaInterface.TYPE_INTEGER;
precision=0; // Max 127
length=2;
break;
case java.sql.Types.DECIMAL:
case java.sql.Types.DOUBLE:
case java.sql.Types.FLOAT:
case java.sql.Types.REAL:
case java.sql.Types.NUMERIC:
valtype=ValueMetaInterface.TYPE_NUMBER;
length=rm.getPrecision(index);
precision=rm.getScale(index);
if (length >=126) length=-1;
if (precision >=126) precision=-1;
if (type==java.sql.Types.DOUBLE || type==java.sql.Types.FLOAT || type==java.sql.Types.REAL)
{
if (precision==0)
{
precision=-1; // precision is obviously incorrect if the type if Double/Float/Real
}
// If we're dealing with PostgreSQL and double precision types
if (databaseMeta.getDatabaseInterface() instanceof PostgreSQLDatabaseMeta && type==java.sql.Types.DOUBLE && precision==16 && length==16)
{
precision=-1;
length=-1;
}
// MySQL: max resolution is double precision floating point (double)
// The (12,31) that is given back is not correct
if (databaseMeta.getDatabaseInterface() instanceof MySQLDatabaseMeta)
{
if (precision >= length) {
precision=-1;
length=-1;
}
}
// if the length or precision needs a BIGNUMBER
if (length>15 || precision>15) valtype=ValueMetaInterface.TYPE_BIGNUMBER;
}
else
{
if (precision==0) {
if (length<=18 && length>0) { // Among others Oracle is affected here.
valtype=ValueMetaInterface.TYPE_INTEGER; // Long can hold up to 18 significant digits
} else if (length>18) {
valtype=ValueMetaInterface.TYPE_BIGNUMBER;
}
} else { // we have a precision: keep NUMBER or change to BIGNUMBER?
if (length>15 || precision>15) valtype=ValueMetaInterface.TYPE_BIGNUMBER;
}
}
if (databaseMeta.getDatabaseInterface() instanceof PostgreSQLDatabaseMeta || databaseMeta.getDatabaseInterface() instanceof GreenplumDatabaseMeta)
{
// undefined size => arbitrary precision
if (type == java.sql.Types.NUMERIC && length == 0 && precision == 0)
{
valtype = ValueMetaInterface.TYPE_BIGNUMBER;
length = -1;
precision = -1;
}
}
if (databaseMeta.getDatabaseInterface() instanceof OracleDatabaseMeta)
{
if (precision == 0 && length == 38 )
{
valtype=ValueMetaInterface.TYPE_INTEGER;
}
if (precision<=0 && length<=0) // undefined size: BIGNUMBER, precision on Oracle can be 38, too big for a Number type
{
valtype=ValueMetaInterface.TYPE_BIGNUMBER;
length=-1;
precision=-1;
}
}
break;
case java.sql.Types.DATE:
if (databaseMeta.getDatabaseInterface() instanceof TeradataDatabaseMeta) {
precision = 1;
}
case java.sql.Types.TIME:
case java.sql.Types.TIMESTAMP:
valtype=ValueMetaInterface.TYPE_DATE;
if (databaseMeta.getDatabaseInterface() instanceof MySQLDatabaseMeta) {
String property = databaseMeta.getConnectionProperties().getProperty("yearIsDateType");
if (property != null && property.equalsIgnoreCase("false")
&& rm.getColumnTypeName(index).equalsIgnoreCase("YEAR")) {
valtype = ValueMetaInterface.TYPE_INTEGER;
precision = 0;
length = 4;
break;
}
}
break;
case java.sql.Types.BOOLEAN:
case java.sql.Types.BIT:
valtype=ValueMetaInterface.TYPE_BOOLEAN;
break;
case java.sql.Types.BINARY:
case java.sql.Types.BLOB:
case java.sql.Types.VARBINARY:
case java.sql.Types.LONGVARBINARY:
valtype=ValueMetaInterface.TYPE_BINARY;
if (databaseMeta.isDisplaySizeTwiceThePrecision() && (2 * rm.getPrecision(index)) == rm.getColumnDisplaySize(index))
{
// set the length for "CHAR(X) FOR BIT DATA"
length = rm.getPrecision(index);
}
else
if (databaseMeta.getDatabaseInterface() instanceof OracleDatabaseMeta &&
( type==java.sql.Types.VARBINARY || type==java.sql.Types.LONGVARBINARY )
)
{
// set the length for Oracle "RAW" or "LONGRAW" data types
valtype = ValueMetaInterface.TYPE_STRING;
length = rm.getColumnDisplaySize(index);
}
else
{
length=-1;
}
precision=-1;
break;
default:
valtype=ValueMetaInterface.TYPE_STRING;
precision=rm.getScale(index);
break;
}
// Grab the comment as a description to the field as well.
String comments=rm.getColumnLabel(index);
// get & store more result set meta data for later use
int originalColumnType=rm.getColumnType(index);
String originalColumnTypeName=rm.getColumnTypeName(index);
int originalPrecision=-1;
if (!ignoreLength) rm.getPrecision(index); // Throws exception on MySQL
int originalScale=rm.getScale(index);
// boolean originalAutoIncrement=rm.isAutoIncrement(index); DISABLED FOR PERFORMANCE REASONS : PDI-1788
// int originalNullable=rm.isNullable(index); DISABLED FOR PERFORMANCE REASONS : PDI-1788
boolean originalSigned=rm.isSigned(index);
ValueMetaInterface v=new ValueMeta(name, valtype);
v.setLength(length);
v.setPrecision(precision);
v.setComments(comments);
v.setLargeTextField(isClob);
v.setOriginalColumnType(originalColumnType);
v.setOriginalColumnTypeName(originalColumnTypeName);
v.setOriginalPrecision(originalPrecision);
v.setOriginalScale(originalScale);
// v.setOriginalAutoIncrement(originalAutoIncrement); DISABLED FOR PERFORMANCE REASONS : PDI-1788
// v.setOriginalNullable(originalNullable); DISABLED FOR PERFORMANCE REASONS : PDI-1788
v.setOriginalSigned(originalSigned);
// See if we need to enable lazy conversion...
if (lazyConversion && valtype==ValueMetaInterface.TYPE_STRING) {
v.setStorageType(ValueMetaInterface.STORAGE_TYPE_BINARY_STRING);
// TODO set some encoding to go with this.
// Also set the storage metadata. a copy of the parent, set to String too.
ValueMetaInterface storageMetaData = v.clone();
storageMetaData.setType(ValueMetaInterface.TYPE_STRING);
storageMetaData.setStorageType(ValueMetaInterface.STORAGE_TYPE_NORMAL);
v.setStorageMetadata(storageMetaData);
}
return v;
}
public boolean absolute(ResultSet rs, int position) throws KettleDatabaseException
{
try
{
return rs.absolute(position);
}
catch(SQLException e)
{
throw new KettleDatabaseException("Unable to move resultset to position "+position, e);
}
}
public boolean relative(ResultSet rs, int rows) throws KettleDatabaseException
{
try
{
return rs.relative(rows);
}
catch(SQLException e)
{
throw new KettleDatabaseException("Unable to move the resultset forward "+rows+" rows", e);
}
}
public void afterLast(ResultSet rs)
throws KettleDatabaseException
{
try
{
rs.afterLast();
}
catch(SQLException e)
{
throw new KettleDatabaseException("Unable to move resultset to after the last position", e);
}
}
public void first(ResultSet rs) throws KettleDatabaseException
{
try
{
rs.first();
}
catch(SQLException e)
{
throw new KettleDatabaseException("Unable to move resultset to the first position", e);
}
}
/**
* Get a row from the resultset. Do not use lazy conversion
* @param rs The resultset to get the row from
* @return one row or null if no row was found on the resultset or if an error occurred.
*/
public Object[] getRow(ResultSet rs) throws KettleDatabaseException
{
return getRow(rs, false);
}
/**
* Get a row from the resultset.
* @param rs The resultset to get the row from
* @param lazyConversion set to true if strings need to have lazy conversion enabled
* @return one row or null if no row was found on the resultset or if an error occurred.
*/
public Object[] getRow(ResultSet rs, boolean lazyConversion) throws KettleDatabaseException
{
if (rowMeta==null)
{
ResultSetMetaData rsmd = null;
try
{
rsmd = rs.getMetaData();
}
catch(SQLException e)
{
throw new KettleDatabaseException("Unable to retrieve metadata from resultset", e);
}
rowMeta = getRowInfo(rsmd, false, lazyConversion);
}
return getRow(rs, null, rowMeta);
}
/**
* Get a row from the resultset.
* @param rs The resultset to get the row from
* @return one row or null if no row was found on the resultset or if an error occurred.
*/
public Object[] getRow(ResultSet rs, ResultSetMetaData dummy, RowMetaInterface rowInfo) throws KettleDatabaseException
{
try
{
int nrcols=rowInfo.size();
Object[] data = RowDataUtil.allocateRowData(nrcols);
if (rs.next())
{
for (int i=0;i<nrcols;i++)
{
ValueMetaInterface val = rowInfo.getValueMeta(i);
switch(val.getType())
{
case ValueMetaInterface.TYPE_BOOLEAN : data[i] = Boolean.valueOf( rs.getBoolean(i+1) ); break;
case ValueMetaInterface.TYPE_NUMBER : data[i] = new Double( rs.getDouble(i+1) ); break;
case ValueMetaInterface.TYPE_BIGNUMBER : data[i] = rs.getBigDecimal(i+1); break;
case ValueMetaInterface.TYPE_INTEGER : data[i] = Long.valueOf( rs.getLong(i+1) ); break;
case ValueMetaInterface.TYPE_STRING :
{
if (val.isStorageBinaryString()) {
data[i] = rs.getBytes(i+1);
}
else {
data[i] = rs.getString(i+1);
}
}
break;
case ValueMetaInterface.TYPE_BINARY :
{
if (databaseMeta.supportsGetBlob())
{
Blob blob = rs.getBlob(i+1);
if (blob!=null)
{
data[i] = blob.getBytes(1L, (int)blob.length());
}
else
{
data[i] = null;
}
}
else
{
data[i] = rs.getBytes(i+1);
}
}
break;
case ValueMetaInterface.TYPE_DATE :
if (databaseMeta.getDatabaseInterface() instanceof NeoviewDatabaseMeta && val.getOriginalColumnType()==java.sql.Types.TIME)
{
// Neoview can not handle getDate / getTimestamp for a Time column
data[i] = rs.getTime(i+1); break; // Time is a subclass of java.util.Date, the default date will be 1970-01-01
}
else if (val.getPrecision()!=1 && databaseMeta.supportsTimeStampToDateConversion())
{
data[i] = rs.getTimestamp(i+1); break; // Timestamp extends java.util.Date
}
else
{
data[i] = rs.getDate(i+1); break;
}
default: break;
}
if (rs.wasNull()) data[i] = null; // null value, it's the default but we want it just to make sure we handle this case too.
}
}
else
{
data=null;
}
return data;
}
catch(SQLException ex)
{
throw new KettleDatabaseException("Couldn't get row from result set", ex);
}
}
public void printSQLException(SQLException ex)
{
log.logError("==> SQLException: ");
while (ex != null)
{
log.logError("Message: " + ex.getMessage ());
log.logError("SQLState: " + ex.getSQLState ());
log.logError("ErrorCode: " + ex.getErrorCode ());
ex = ex.getNextException();
log.logError("");
}
}
public void setLookup(String table, String codes[], String condition[],
String gets[], String rename[], String orderby
) throws KettleDatabaseException
{
setLookup(table, codes, condition, gets, rename, orderby, false);
}
public void setLookup(String schema, String table, String codes[], String condition[],
String gets[], String rename[], String orderby
) throws KettleDatabaseException
{
setLookup(schema, table, codes, condition, gets, rename, orderby, false);
}
public void setLookup(String tableName, String codes[], String condition[],
String gets[], String rename[], String orderby,
boolean checkForMultipleResults) throws KettleDatabaseException
{
setLookup(null, tableName, codes, condition, gets, rename, orderby, checkForMultipleResults);
}
// Lookup certain fields in a table
public void setLookup(String schemaName, String tableName, String codes[], String condition[],
String gets[], String rename[], String orderby,
boolean checkForMultipleResults) throws KettleDatabaseException
{
String table = databaseMeta.getQuotedSchemaTableCombination(schemaName, tableName);
String sql = "SELECT ";
for (int i=0;i<gets.length;i++)
{
if (i!=0) sql += ", ";
sql += databaseMeta.quoteField(gets[i]);
if (rename!=null && rename[i]!=null && !gets[i].equalsIgnoreCase(rename[i]))
{
sql+=" AS "+databaseMeta.quoteField(rename[i]);
}
}
sql += " FROM "+table+" WHERE ";
for (int i=0;i<codes.length;i++)
{
if (i!=0) sql += " AND ";
sql += databaseMeta.quoteField(codes[i]);
if ("BETWEEN".equalsIgnoreCase(condition[i]))
{
sql+=" BETWEEN ? AND ? ";
}
else
if ("IS NULL".equalsIgnoreCase(condition[i]) || "IS NOT NULL".equalsIgnoreCase(condition[i]))
{
sql+=" "+condition[i]+" ";
}
else
{
sql+=" "+condition[i]+" ? ";
}
}
if (orderby!=null && orderby.length()!=0)
{
sql += " ORDER BY "+orderby;
}
try
{
if(log.isDetailed()) log.logDetailed("Setting preparedStatement to ["+sql+"]");
prepStatementLookup=connection.prepareStatement(databaseMeta.stripCR(sql));
if (!checkForMultipleResults && databaseMeta.supportsSetMaxRows())
{
prepStatementLookup.setMaxRows(1); // alywas get only 1 line back!
}
}
catch(SQLException ex)
{
throw new KettleDatabaseException("Unable to prepare statement for update ["+sql+"]", ex);
}
}
public boolean prepareUpdate(String table, String codes[], String condition[], String sets[])
{
return prepareUpdate(null, table, codes, condition, sets);
}
// Lookup certain fields in a table
public boolean prepareUpdate(String schemaName, String tableName, String codes[], String condition[], String sets[])
{
StringBuffer sql = new StringBuffer(128);
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(schemaName, tableName);
sql.append("UPDATE ").append(schemaTable).append(Const.CR).append("SET ");
for (int i=0;i<sets.length;i++)
{
if (i!=0) sql.append(", ");
sql.append(databaseMeta.quoteField(sets[i]));
sql.append(" = ?").append(Const.CR);
}
sql.append("WHERE ");
for (int i=0;i<codes.length;i++)
{
if (i!=0) sql.append("AND ");
sql.append(databaseMeta.quoteField(codes[i]));
if ("BETWEEN".equalsIgnoreCase(condition[i]))
{
sql.append(" BETWEEN ? AND ? ");
}
else
if ("IS NULL".equalsIgnoreCase(condition[i]) || "IS NOT NULL".equalsIgnoreCase(condition[i]))
{
sql.append(' ').append(condition[i]).append(' ');
}
else
{
sql.append(' ').append(condition[i]).append(" ? ");
}
}
try
{
String s = sql.toString();
if(log.isDetailed()) log.logDetailed("Setting update preparedStatement to ["+s+"]");
prepStatementUpdate=connection.prepareStatement(databaseMeta.stripCR(s));
}
catch(SQLException ex)
{
printSQLException(ex);
return false;
}
return true;
}
/**
* Prepare a delete statement by giving it the tablename, fields and conditions to work with.
* @param table The table-name to delete in
* @param codes
* @param condition
* @return true when everything went OK, false when something went wrong.
*/
public boolean prepareDelete(String table, String codes[], String condition[])
{
return prepareDelete(null, table, codes, condition);
}
/**
* Prepare a delete statement by giving it the tablename, fields and conditions to work with.
* @param schemaName the schema-name to delete in
* @param tableName The table-name to delete in
* @param codes
* @param condition
* @return true when everything went OK, false when something went wrong.
*/
public boolean prepareDelete(String schemaName, String tableName, String codes[], String condition[])
{
String sql;
String table = databaseMeta.getQuotedSchemaTableCombination(schemaName, tableName);
sql = "DELETE FROM "+table+Const.CR;
sql+= "WHERE ";
for (int i=0;i<codes.length;i++)
{
if (i!=0) sql += "AND ";
sql += codes[i];
if ("BETWEEN".equalsIgnoreCase(condition[i]))
{
sql+=" BETWEEN ? AND ? ";
}
else
if ("IS NULL".equalsIgnoreCase(condition[i]) || "IS NOT NULL".equalsIgnoreCase(condition[i]))
{
sql+=" "+condition[i]+" ";
}
else
{
sql+=" "+condition[i]+" ? ";
}
}
try
{
if(log.isDetailed()) log.logDetailed("Setting update preparedStatement to ["+sql+"]");
prepStatementUpdate=connection.prepareStatement(databaseMeta.stripCR(sql));
}
catch(SQLException ex)
{
printSQLException(ex);
return false;
}
return true;
}
public void setProcLookup(String proc, String arg[], String argdir[], int argtype[], String returnvalue, int returntype)
throws KettleDatabaseException
{
String sql;
int pos=0;
sql = "{ ";
if (returnvalue!=null && returnvalue.length()!=0)
{
sql+="? = ";
}
sql+="call "+proc+" ";
if (arg.length>0) sql+="(";
for (int i=0;i<arg.length;i++)
{
if (i!=0) sql += ", ";
sql += " ?";
}
if (arg.length>0) sql+=")";
sql+="}";
try
{
if(log.isDetailed()) log.logDetailed("DBA setting callableStatement to ["+sql+"]");
cstmt=connection.prepareCall(sql);
pos=1;
if (!Const.isEmpty(returnvalue))
{
switch(returntype)
{
case ValueMetaInterface.TYPE_NUMBER : cstmt.registerOutParameter(pos, java.sql.Types.DOUBLE); break;
case ValueMetaInterface.TYPE_BIGNUMBER : cstmt.registerOutParameter(pos, java.sql.Types.DECIMAL); break;
case ValueMetaInterface.TYPE_INTEGER : cstmt.registerOutParameter(pos, java.sql.Types.BIGINT); break;
case ValueMetaInterface.TYPE_STRING : cstmt.registerOutParameter(pos, java.sql.Types.VARCHAR); break;
case ValueMetaInterface.TYPE_DATE : cstmt.registerOutParameter(pos, java.sql.Types.TIMESTAMP); break;
case ValueMetaInterface.TYPE_BOOLEAN : cstmt.registerOutParameter(pos, java.sql.Types.BOOLEAN); break;
default: break;
}
pos++;
}
for (int i=0;i<arg.length;i++)
{
if (argdir[i].equalsIgnoreCase("OUT") || argdir[i].equalsIgnoreCase("INOUT"))
{
switch(argtype[i])
{
case ValueMetaInterface.TYPE_NUMBER : cstmt.registerOutParameter(i+pos, java.sql.Types.DOUBLE); break;
case ValueMetaInterface.TYPE_BIGNUMBER : cstmt.registerOutParameter(i+pos, java.sql.Types.DECIMAL); break;
case ValueMetaInterface.TYPE_INTEGER : cstmt.registerOutParameter(i+pos, java.sql.Types.BIGINT); break;
case ValueMetaInterface.TYPE_STRING : cstmt.registerOutParameter(i+pos, java.sql.Types.VARCHAR); break;
case ValueMetaInterface.TYPE_DATE : cstmt.registerOutParameter(i+pos, java.sql.Types.TIMESTAMP); break;
case ValueMetaInterface.TYPE_BOOLEAN : cstmt.registerOutParameter(i+pos, java.sql.Types.BOOLEAN); break;
default: break;
}
}
}
}
catch(SQLException ex)
{
throw new KettleDatabaseException("Unable to prepare database procedure call", ex);
}
}
public Object[] getLookup() throws KettleDatabaseException
{
return getLookup(prepStatementLookup);
}
public Object[] getLookup(boolean failOnMultipleResults) throws KettleDatabaseException
{
return getLookup(prepStatementLookup, failOnMultipleResults);
}
public Object[] getLookup(PreparedStatement ps) throws KettleDatabaseException
{
return getLookup(ps, false);
}
public Object[] getLookup(PreparedStatement ps, boolean failOnMultipleResults) throws KettleDatabaseException
{
ResultSet res = null;
try
{
res = ps.executeQuery();
rowMeta = getRowInfo(res.getMetaData(), false, false);
Object[] ret = getRow(res);
if (failOnMultipleResults)
{
if (ret != null && res.next())
{
// if the previous row was null, there's no reason to try res.next() again.
// on DB2 this will even cause an exception (because of the buggy DB2 JDBC driver).
throw new KettleDatabaseException("Only 1 row was expected as a result of a lookup, and at least 2 were found!");
}
}
return ret;
}
catch(SQLException ex)
{
throw new KettleDatabaseException("Error looking up row in database", ex);
}
finally
{
try
{
if (res!=null) res.close(); // close resultset!
}
catch(SQLException e)
{
throw new KettleDatabaseException("Unable to close resultset after looking up data", e);
}
}
}
public DatabaseMetaData getDatabaseMetaData() throws KettleDatabaseException
{
try
{
if (dbmd==null) dbmd = connection.getMetaData(); // Only get the metadata once!
}
catch(Exception e)
{
throw new KettleDatabaseException("Unable to get database metadata from this database connection", e);
}
return dbmd;
}
public String getDDL(String tablename, RowMetaInterface fields) throws KettleDatabaseException
{
return getDDL(tablename, fields, null, false, null, true);
}
public String getDDL(String tablename, RowMetaInterface fields, String tk, boolean use_autoinc, String pk) throws KettleDatabaseException
{
return getDDL(tablename, fields, tk, use_autoinc, pk, true);
}
public String getDDL(String tableName, RowMetaInterface fields, String tk, boolean use_autoinc, String pk, boolean semicolon) throws KettleDatabaseException
{
String retval;
// First, check for reserved SQL in the input row r...
databaseMeta.quoteReservedWords(fields);
String quotedTk = tk != null ? databaseMeta.quoteField(tk) : null;
if (checkTableExists(tableName))
{
retval=getAlterTableStatement(tableName, fields, quotedTk, use_autoinc, pk, semicolon);
}
else
{
retval=getCreateTableStatement(tableName, fields, quotedTk, use_autoinc, pk, semicolon);
}
return retval;
}
/**
* Generates SQL
* @param tableName the table name or schema/table combination: this needs to be quoted properly in advance.
* @param fields the fields
* @param tk the name of the technical key field
* @param use_autoinc true if we need to use auto-increment fields for a primary key
* @param pk the name of the primary/technical key field
* @param semicolon append semicolon to the statement
* @param pkc primary key composite ( name of the key fields)
* @return the SQL needed to create the specified table and fields.
*/
public String getCreateTableStatement(String tableName, RowMetaInterface fields, String tk, boolean use_autoinc, String pk, boolean semicolon)
{
StringBuilder retval = new StringBuilder("CREATE TABLE ");
retval.append(tableName+Const.CR);
retval.append("(").append(Const.CR);
for (int i=0;i<fields.size();i++)
{
if (i>0) retval.append(", "); else retval.append(" ");
ValueMetaInterface v=fields.getValueMeta(i);
retval.append(databaseMeta.getFieldDefinition(v, tk, pk, use_autoinc));
}
// At the end, before the closing of the statement, we might need to add some constraints...
// Technical keys
if (tk!=null)
{
if (databaseMeta.requiresCreateTablePrimaryKeyAppend())
{
retval.append(", PRIMARY KEY (").append(tk).append(")").append(Const.CR);
}
}
// Primary keys
if (pk!=null)
{
if (databaseMeta.requiresCreateTablePrimaryKeyAppend())
{
retval.append(", PRIMARY KEY (").append(pk).append(")").append(Const.CR);
}
}
retval.append(")").append(Const.CR);
if (databaseMeta.getDatabaseInterface() instanceof OracleDatabaseMeta &&
databaseMeta.getIndexTablespace()!=null && databaseMeta.getIndexTablespace().length()>0)
{
retval.append("TABLESPACE ").append(databaseMeta.getDataTablespace());
}
if (pk==null && tk==null && databaseMeta.getDatabaseInterface() instanceof NeoviewDatabaseMeta)
{
retval.append("NO PARTITION"); // use this as a default when no pk/tk is there, otherwise you get an error
}
if (semicolon) retval.append(";");
// TODO: All this custom database code shouldn't really be in Database.java. It should be in the DB implementations.
if (databaseMeta.getDatabaseInterface() instanceof VerticaDatabaseMeta)
{
retval.append(Const.CR).append("CREATE PROJECTION ").append(tableName).append("_unseg_super").append(Const.CR);
retval.append("(").append(Const.CR);
for (int i=0;i<fields.size();i++)
{
if (i>0) retval.append(", "); else retval.append(" ");
retval.append(fields.getValueMeta(i).getName()).append(Const.CR);
}
retval.append(")").append(Const.CR);
retval.append("AS SELECT").append(Const.CR);
for (int i=0;i<fields.size();i++)
{
if (i>0) retval.append(", "); else retval.append(" ");
retval.append(fields.getValueMeta(i).getName()).append(Const.CR);
}
retval.append("FROM ").append(tableName).append(Const.CR);
retval.append("-- Replace UNSEGMENTED with a hash segmentation for optimum performance").append(Const.CR);
retval.append("--SEGMENTED BY HASH(X,Y,Z)").append(Const.CR);
retval.append("UNSEGMENTED ALL NODES").append(Const.CR);
retval.append(";");
}
return retval.toString();
}
public String getAlterTableStatement(String tableName, RowMetaInterface fields, String tk, boolean use_autoinc, String pk, boolean semicolon) throws KettleDatabaseException
{
String retval="";
// Get the fields that are in the table now:
RowMetaInterface tabFields = getTableFields(tableName);
// Don't forget to quote these as well...
databaseMeta.quoteReservedWords(tabFields);
// Find the missing fields
RowMetaInterface missing = new RowMeta();
for (int i=0;i<fields.size();i++)
{
ValueMetaInterface v = fields.getValueMeta(i);
// Not found?
if (tabFields.searchValueMeta( v.getName() )==null )
{
missing.addValueMeta(v); // nope --> Missing!
}
}
if (missing.size()!=0)
{
for (int i=0;i<missing.size();i++)
{
ValueMetaInterface v=missing.getValueMeta(i);
retval+=databaseMeta.getAddColumnStatement(tableName, v, tk, use_autoinc, pk, true);
}
}
// Find the surplus fields
RowMetaInterface surplus = new RowMeta();
for (int i=0;i<tabFields.size();i++)
{
ValueMetaInterface v = tabFields.getValueMeta(i);
// Found in table, not in input ?
if (fields.searchValueMeta( v.getName() )==null )
{
surplus.addValueMeta(v); // yes --> surplus!
}
}
if (surplus.size()!=0)
{
for (int i=0;i<surplus.size();i++)
{
ValueMetaInterface v=surplus.getValueMeta(i);
retval+=databaseMeta.getDropColumnStatement(tableName, v, tk, use_autoinc, pk, true);
}
}
// OK, see if there are fields for which we need to modify the type... (length, precision)
RowMetaInterface modify = new RowMeta();
for (int i=0;i<fields.size();i++)
{
ValueMetaInterface desiredField = fields.getValueMeta(i);
ValueMetaInterface currentField = tabFields.searchValueMeta( desiredField.getName());
if (desiredField!=null && currentField!=null)
{
String desiredDDL = databaseMeta.getFieldDefinition(desiredField, tk, pk, use_autoinc);
String currentDDL = databaseMeta.getFieldDefinition(currentField, tk, pk, use_autoinc);
boolean mod = !desiredDDL.equalsIgnoreCase(currentDDL);
if (mod)
{
// System.out.println("Desired field: ["+desiredField.toStringMeta()+"], current field: ["+currentField.toStringMeta()+"]");
modify.addValueMeta(desiredField);
}
}
}
if (modify.size()>0)
{
for (int i=0;i<modify.size();i++)
{
ValueMetaInterface v=modify.getValueMeta(i);
retval+=databaseMeta.getModifyColumnStatement(tableName, v, tk, use_autoinc, pk, true);
}
}
return retval;
}
public void truncateTable(String tablename) throws KettleDatabaseException
{
if (Const.isEmpty(connectionGroup))
{
execStatement(databaseMeta.getTruncateTableStatement(null, tablename));
}
else
{
execStatement("DELETE FROM "+databaseMeta.quoteField(tablename));
}
}
public void truncateTable(String schema, String tablename) throws KettleDatabaseException
{
if (Const.isEmpty(connectionGroup))
{
execStatement(databaseMeta.getTruncateTableStatement(schema, tablename));
}
else
{
execStatement("DELETE FROM "+databaseMeta.getQuotedSchemaTableCombination(schema, tablename));
}
}
/**
* Execute a query and return at most one row from the resultset
* @param sql The SQL for the query
* @return one Row with data or null if nothing was found.
*/
public RowMetaAndData getOneRow(String sql) throws KettleDatabaseException
{
ResultSet rs = openQuery(sql);
if (rs!=null)
{
Object[] row = getRow(rs); // One row only;
try { rs.close(); } catch(Exception e) { throw new KettleDatabaseException("Unable to close resultset", e); }
if (pstmt!=null)
{
try { pstmt.close(); } catch(Exception e) { throw new KettleDatabaseException("Unable to close prepared statement pstmt", e); }
pstmt=null;
}
if (sel_stmt!=null)
{
try { sel_stmt.close(); } catch(Exception e) { throw new KettleDatabaseException("Unable to close prepared statement sel_stmt", e); }
sel_stmt=null;
}
return new RowMetaAndData(rowMeta, row);
}
else
{
throw new KettleDatabaseException("error opening resultset for query: "+sql);
}
}
public RowMeta getMetaFromRow( Object[] row, ResultSetMetaData md ) throws SQLException {
RowMeta meta = new RowMeta();
for( int i=0; i<md.getColumnCount(); i++ ) {
String name = md.getColumnName(i+1);
ValueMetaInterface valueMeta = getValueFromSQLType( name, md, i+1, true, false );
meta.addValueMeta( valueMeta );
}
return meta;
}
public RowMetaAndData getOneRow(String sql, RowMetaInterface param, Object[] data) throws KettleDatabaseException
{
ResultSet rs = openQuery(sql, param, data);
if (rs!=null)
{
Object[] row = getRow(rs); // One value: a number;
rowMeta=null;
RowMeta tmpMeta = null;
try {
ResultSetMetaData md = rs.getMetaData();
tmpMeta = getMetaFromRow( row, md );
} catch (Exception e) {
e.printStackTrace();
} finally {
try { rs.close(); } catch(Exception e) { throw new KettleDatabaseException("Unable to close resultset", e); }
if (pstmt!=null)
{
try { pstmt.close(); } catch(Exception e) { throw new KettleDatabaseException("Unable to close prepared statement pstmt", e); }
pstmt=null;
}
if (sel_stmt!=null)
{
try { sel_stmt.close(); } catch(Exception e) { throw new KettleDatabaseException("Unable to close prepared statement sel_stmt", e); }
sel_stmt=null;
}
}
return new RowMetaAndData(tmpMeta, row);
}
else
{
return null;
}
}
public RowMetaInterface getParameterMetaData(PreparedStatement ps)
{
RowMetaInterface par = new RowMeta();
try
{
ParameterMetaData pmd = ps.getParameterMetaData();
for (int i=1;i<=pmd.getParameterCount();i++)
{
String name = "par"+i;
int sqltype = pmd.getParameterType(i);
int length = pmd.getPrecision(i);
int precision = pmd.getScale(i);
ValueMeta val;
switch(sqltype)
{
case java.sql.Types.CHAR:
case java.sql.Types.VARCHAR:
val=new ValueMeta(name, ValueMetaInterface.TYPE_STRING);
break;
case java.sql.Types.BIGINT:
case java.sql.Types.INTEGER:
case java.sql.Types.NUMERIC:
case java.sql.Types.SMALLINT:
case java.sql.Types.TINYINT:
val=new ValueMeta(name, ValueMetaInterface.TYPE_INTEGER);
break;
case java.sql.Types.DECIMAL:
case java.sql.Types.DOUBLE:
case java.sql.Types.FLOAT:
case java.sql.Types.REAL:
val=new ValueMeta(name, ValueMetaInterface.TYPE_NUMBER);
break;
case java.sql.Types.DATE:
case java.sql.Types.TIME:
case java.sql.Types.TIMESTAMP:
val=new ValueMeta(name, ValueMetaInterface.TYPE_DATE);
break;
case java.sql.Types.BOOLEAN:
case java.sql.Types.BIT:
val=new ValueMeta(name, ValueMetaInterface.TYPE_BOOLEAN);
break;
default:
val=new ValueMeta(name, ValueMetaInterface.TYPE_NONE);
break;
}
if (val.isNumeric() && ( length>18 || precision>18) )
{
val = new ValueMeta(name, ValueMetaInterface.TYPE_BIGNUMBER);
}
par.addValueMeta(val);
}
}
// Oops: probably the database or JDBC doesn't support it.
catch(AbstractMethodError e) { return null; }
catch(SQLException e) { return null; }
catch(Exception e) { return null; }
return par;
}
public int countParameters(String sql)
{
int q=0;
boolean quote_opened=false;
boolean dquote_opened=false;
for (int x=0;x<sql.length();x++)
{
char c = sql.charAt(x);
switch(c)
{
case '\'': quote_opened= !quote_opened; break;
case '"' : dquote_opened=!dquote_opened; break;
case '?' : if (!quote_opened && !dquote_opened) q++; break;
}
}
return q;
}
// Get the fields back from an SQL query
public RowMetaInterface getParameterMetaData(String sql, RowMetaInterface inform, Object[] data)
{
// The database couldn't handle it: try manually!
int q=countParameters(sql);
RowMetaInterface par=new RowMeta();
if (inform!=null && q==inform.size())
{
for (int i=0;i<q;i++)
{
ValueMetaInterface inf=inform.getValueMeta(i);
ValueMetaInterface v = inf.clone();
par.addValueMeta(v);
}
}
else
{
for (int i=0;i<q;i++)
{
ValueMetaInterface v = new ValueMeta("name"+i, ValueMetaInterface.TYPE_NUMBER);
par.addValueMeta(v);
}
}
return par;
}
public void writeLogRecord(LogTableInterface logTable, LogStatus status, Object subject, Object parent) throws KettleException {
try {
RowMetaAndData logRecord = logTable.getLogRecord(status, subject, parent);
if (logRecord==null) return;
boolean update = (logTable.getKeyField()!=null) && !status.equals(LogStatus.START);
String schemaTable = databaseMeta.getSchemaTableCombination(logTable.getSchemaName(), logTable.getTableName());
RowMetaInterface rowMeta = logRecord.getRowMeta();
Object[] rowData = logRecord.getData();
if (update) {
RowMetaInterface updateRowMeta = new RowMeta();
Object[] updateRowData = new Object[rowMeta.size()];
ValueMetaInterface keyValueMeta = rowMeta.getValueMeta(0);
String sql = "UPDATE " + schemaTable + " SET ";
for (int i = 1; i < rowMeta.size() ; i++) // Without ID_JOB or ID_BATCH
{
ValueMetaInterface valueMeta = rowMeta.getValueMeta(i);
if (i > 1) {
sql += ", ";
}
sql += databaseMeta.quoteField(valueMeta.getName()) + "=? ";
updateRowMeta.addValueMeta(valueMeta);
updateRowData[i-1] = rowData[i];
}
sql += "WHERE ";
sql += databaseMeta.quoteField(keyValueMeta.getName()) + "=? ";
updateRowMeta.addValueMeta(keyValueMeta);
updateRowData[rowMeta.size()-1] = rowData[0];
execStatement(sql, updateRowMeta, updateRowData);
} else {
insertRow(logTable.getSchemaName(), logTable.getTableName(), logRecord.getRowMeta(), logRecord.getData());
}
} catch(Exception e) {
throw new KettleDatabaseException("Unable to write log record to log table " + logTable.getTableName(), e);
}
}
public void cleanupLogRecords(LogTableInterface logTable) throws KettleException {
try {
double timeout = Const.toDouble( Const.trim( environmentSubstitute( logTable.getTimeoutInDays())), 0.0 );
if (timeout>0.000001) {
// The timeout has to be at least a few seconds, otherwise we don't bother
String schemaTable = databaseMeta.getSchemaTableCombination(logTable.getSchemaName(), logTable.getTableName());
// The log date field
LogTableField logField = logTable.getLogDateField();
if (logField!=null) {
String sql = "DELETE FROM "+schemaTable+" WHERE "+databaseMeta.quoteField(logField.getFieldName())+" < ?"; // $NON-NLS$1
// Now calculate the date...
long now = System.currentTimeMillis();
long limit = now - Math.round(timeout*24*60*60*1000);
RowMetaAndData row = new RowMetaAndData();
row.addValue(logField.getFieldName(), ValueMetaInterface.TYPE_DATE, new Date(limit));
execStatement(sql, row.getRowMeta(), row.getData());
} else {
throw new KettleException(BaseMessages.getString(PKG, "Database.Exception.LogTimeoutDefinedOnTableWithoutLogField", logTable.getTableName()));
}
}
} catch(Exception e) {
throw new KettleDatabaseException(BaseMessages.getString(PKG, "Database.Exception.UnableToCleanUpOlderRecordsFromLogTable", logTable.getTableName()), e);
}
}
public Object[] getLastLogDate( String logtable, String name, boolean job, LogStatus status ) throws KettleDatabaseException
{
Object[] row = null;
String jobtrans = job?databaseMeta.quoteField("JOBNAME"):databaseMeta.quoteField("TRANSNAME");
String sql = "";
sql+=" SELECT "+databaseMeta.quoteField("ENDDATE")+", "+databaseMeta.quoteField("DEPDATE")+", "+databaseMeta.quoteField("STARTDATE");
sql+=" FROM "+logtable;
sql+=" WHERE "+databaseMeta.quoteField("ERRORS")+" = 0";
sql+=" AND "+databaseMeta.quoteField("STATUS")+" = 'end'";
sql+=" AND "+jobtrans+" = ?";
sql+=" ORDER BY "+databaseMeta.quoteField("LOGDATE")+" DESC, "+databaseMeta.quoteField("ENDDATE")+" DESC";
try
{
pstmt = connection.prepareStatement(databaseMeta.stripCR(sql));
RowMetaInterface r = new RowMeta();
r.addValueMeta( new ValueMeta("TRANSNAME", ValueMetaInterface.TYPE_STRING));
setValues(r, new Object[] { name });
ResultSet res = pstmt.executeQuery();
if (res!=null)
{
rowMeta = getRowInfo(res.getMetaData(), false, false);
row = getRow(res);
res.close();
}
pstmt.close(); pstmt=null;
}
catch(SQLException ex)
{
throw new KettleDatabaseException("Unable to obtain last logdate from table "+logtable, ex);
}
return row;
}
public synchronized Long getNextValue(Hashtable<String,Counter> counters, String tableName, String val_key) throws KettleDatabaseException
{
return getNextValue(counters, null, tableName, val_key);
}
public synchronized Long getNextValue(Hashtable<String,Counter> counters, String schemaName, String tableName, String val_key) throws KettleDatabaseException
{
Long nextValue = null;
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(schemaName, tableName);
String lookup = schemaTable+"."+databaseMeta.quoteField(val_key);
// Try to find the previous sequence value...
Counter counter = null;
if (counters!=null) counter=counters.get(lookup);
if (counter==null)
{
RowMetaAndData rmad = getOneRow("SELECT MAX("+databaseMeta.quoteField(val_key)+") FROM "+schemaTable);
if (rmad!=null)
{
long previous;
try
{
Long tmp = rmad.getRowMeta().getInteger(rmad.getData(), 0);
// A "select max(x)" on a table with no matching rows will return null.
if ( tmp != null )
previous = tmp.longValue();
else
previous = 0L;
}
catch (KettleValueException e)
{
throw new KettleDatabaseException("Error getting the first long value from the max value returned from table : "+schemaTable);
}
counter = new Counter(previous+1, 1);
nextValue = Long.valueOf( counter.next() );
if (counters!=null) counters.put(lookup, counter);
}
else
{
throw new KettleDatabaseException("Couldn't find maximum key value from table "+schemaTable);
}
}
else
{
nextValue = Long.valueOf( counter.next() );
}
return nextValue;
}
public String toString()
{
if (databaseMeta!=null) return databaseMeta.getName();
else return "-";
}
public boolean isSystemTable(String table_name)
{
return databaseMeta.isSystemTable(table_name);
}
/** Reads the result of an SQL query into an ArrayList
*
* @param sql The SQL to launch
* @param limit <=0 means unlimited, otherwise this specifies the maximum number of rows read.
* @return An ArrayList of rows.
* @throws KettleDatabaseException if something goes wrong.
*/
public List<Object[]> getRows(String sql, int limit) throws KettleDatabaseException
{
return getRows(sql, limit, null);
}
/** Reads the result of an SQL query into an ArrayList
*
* @param sql The SQL to launch
* @param limit <=0 means unlimited, otherwise this specifies the maximum number of rows read.
* @param monitor The progress monitor to update while getting the rows.
* @return An ArrayList of rows.
* @throws KettleDatabaseException if something goes wrong.
*/
public List<Object[]> getRows(String sql, int limit, ProgressMonitorListener monitor) throws KettleDatabaseException
{
if (monitor!=null) monitor.setTaskName("Opening query...");
ResultSet rset = openQuery(sql);
return getRows(rset, limit, monitor);
}
/** Reads the result of a ResultSet into an ArrayList
*
* @param rset the ResultSet to read out
* @param limit <=0 means unlimited, otherwise this specifies the maximum number of rows read.
* @param monitor The progress monitor to update while getting the rows.
* @return An ArrayList of rows.
* @throws KettleDatabaseException if something goes wrong.
*/
public List<Object[]> getRows(ResultSet rset, int limit, ProgressMonitorListener monitor) throws KettleDatabaseException
{
try
{
List<Object[]> result = new ArrayList<Object[]>();
boolean stop=false;
int i=0;
if (rset!=null)
{
if (monitor!=null && limit>0) monitor.beginTask("Reading rows...", limit);
while ((limit<=0 || i<limit) && !stop)
{
Object[] row = getRow(rset);
if (row!=null)
{
result.add(row);
i++;
}
else
{
stop=true;
}
if (monitor!=null && limit>0) monitor.worked(1);
}
closeQuery(rset);
if (monitor!=null) monitor.done();
}
return result;
}
catch(Exception e)
{
throw new KettleDatabaseException("Unable to get list of rows from ResultSet : ", e);
}
}
public List<Object[]> getFirstRows(String table_name, int limit) throws KettleDatabaseException
{
return getFirstRows(table_name, limit, null);
}
/**
* Get the first rows from a table (for preview)
* @param table_name The table name (or schema/table combination): this needs to be quoted properly
* @param limit limit <=0 means unlimited, otherwise this specifies the maximum number of rows read.
* @param monitor The progress monitor to update while getting the rows.
* @return An ArrayList of rows.
* @throws KettleDatabaseException in case something goes wrong
*/
public List<Object[]> getFirstRows(String table_name, int limit, ProgressMonitorListener monitor) throws KettleDatabaseException
{
String sql = "SELECT";
if (databaseMeta.getDatabaseInterface() instanceof NeoviewDatabaseMeta)
{
sql+=" [FIRST " + limit +"]";
}
else if (databaseMeta.getDatabaseInterface() instanceof SybaseIQDatabaseMeta) // improve support Sybase IQ
{
sql+=" TOP " + limit +" ";
}
sql += " * FROM "+table_name;
if (limit>0)
{
sql+=databaseMeta.getLimitClause(limit);
}
return getRows(sql, limit, monitor);
}
public RowMetaInterface getReturnRowMeta()
{
return rowMeta;
}
public String[] getTableTypes() throws KettleDatabaseException
{
try
{
ArrayList<String> types = new ArrayList<String>();
ResultSet rstt = getDatabaseMetaData().getTableTypes();
while(rstt.next())
{
String ttype = rstt.getString("TABLE_TYPE");
types.add(ttype);
}
return types.toArray(new String[types.size()]);
}
catch(SQLException e)
{
throw new KettleDatabaseException("Unable to get table types from database!", e);
}
}
public String[] getTablenames() throws KettleDatabaseException
{
return getTablenames(false);
}
public String[] getTablenames(boolean includeSchema) throws KettleDatabaseException
{
return getTablenames(null, includeSchema);
}
public String[] getTablenames(String schemanamein, boolean includeSchema) throws KettleDatabaseException
{
String schemaname=schemanamein;
if(schemaname==null) {
if (databaseMeta.useSchemaNameForTableList()) schemaname = environmentSubstitute(databaseMeta.getUsername()).toUpperCase();
}
List<String> names = new ArrayList<String>();
ResultSet alltables=null;
try
{
alltables = getDatabaseMetaData().getTables(null, schemaname, null, databaseMeta.getTableTypes() );
while (alltables.next())
{
// due to PDI-743 with ODBC and MS SQL Server the order is changed and try/catch included for safety
String cat = "";
try {
cat = alltables.getString("TABLE_CAT");
} catch (Exception e) {
// ignore
if(log.isDebug()) log.logDebug("Error getting tables for field TABLE_CAT (ignored): "+e.toString());
}
String schema = "";
try {
schema = alltables.getString("TABLE_SCHEM");
} catch (Exception e) {
// ignore
if(log.isDebug()) log.logDebug("Error getting tables for field TABLE_SCHEM (ignored): "+e.toString());
}
if (Const.isEmpty(schema)) schema = cat;
String table = alltables.getString("TABLE_NAME");
String schemaTable;
if (includeSchema) schemaTable = databaseMeta.getQuotedSchemaTableCombination(schema, table);
else schemaTable = table;
if (log.isRowLevel()) log.logRowlevel(toString(), "got table from meta-data: "+schemaTable);
names.add(schemaTable);
}
}
catch(SQLException e)
{
log.logError("Error getting tablenames from schema ["+schemaname+"]");
}
finally
{
try
{
if (alltables!=null) alltables.close();
}
catch(SQLException e)
{
throw new KettleDatabaseException("Error closing resultset after getting views from schema ["+schemaname+"]", e);
}
}
if(log.isDetailed()) log.logDetailed("read :"+names.size()+" table names from db meta-data.");
return names.toArray(new String[names.size()]);
}
public String[] getViews() throws KettleDatabaseException
{
return getViews(false);
}
public String[] getViews(boolean includeSchema) throws KettleDatabaseException
{
return getViews(null, includeSchema);
}
public String[] getViews(String schemanamein, boolean includeSchema) throws KettleDatabaseException
{
if (!databaseMeta.supportsViews()) return new String[] {};
String schemaname = schemanamein;
if(schemaname==null) {
if (databaseMeta.useSchemaNameForTableList()) schemaname = environmentSubstitute(databaseMeta.getUsername()).toUpperCase();
}
ArrayList<String> names = new ArrayList<String>();
ResultSet alltables=null;
try
{
alltables = dbmd.getTables(null, schemaname, null, databaseMeta.getViewTypes() );
while (alltables.next())
{
// due to PDI-743 with ODBC and MS SQL Server the order is changed and try/catch included for safety
String cat = "";
try {
cat = alltables.getString("TABLE_CAT");
} catch (Exception e) {
// ignore
if(log.isDebug()) log.logDebug("Error getting views for field TABLE_CAT (ignored): "+e.toString());
}
String schema = "";
try {
schema = alltables.getString("TABLE_SCHEM");
} catch (Exception e) {
// ignore
if(log.isDebug()) log.logDebug("Error getting views for field TABLE_SCHEM (ignored): "+e.toString());
}
if (Const.isEmpty(schema)) schema = cat;
String table = alltables.getString("TABLE_NAME");
String schemaTable;
if (includeSchema) schemaTable = databaseMeta.getQuotedSchemaTableCombination(schema, table);
else schemaTable = table;
if (log.isRowLevel()) log.logRowlevel(toString(), "got view from meta-data: "+schemaTable);
names.add(schemaTable);
}
}
catch(SQLException e)
{
throw new KettleDatabaseException("Error getting views from schema ["+schemaname+"]", e);
}
finally
{
try
{
if (alltables!=null) alltables.close();
}
catch(SQLException e)
{
throw new KettleDatabaseException("Error closing resultset after getting views from schema ["+schemaname+"]", e);
}
}
if(log.isDetailed()) log.logDetailed("read :"+names.size()+" views from db meta-data.");
return names.toArray(new String[names.size()]);
}
public String[] getSynonyms() throws KettleDatabaseException
{
return getSynonyms(false);
}
public String[] getSynonyms(boolean includeSchema) throws KettleDatabaseException
{
return getSynonyms(null,includeSchema);
}
public String[] getSynonyms(String schemanamein, boolean includeSchema) throws KettleDatabaseException
{
if (!databaseMeta.supportsSynonyms()) return new String[] {};
String schemaname = schemanamein;
if(schemaname==null) {
if (databaseMeta.useSchemaNameForTableList()) schemaname = environmentSubstitute(databaseMeta.getUsername()).toUpperCase();
}
ArrayList<String> names = new ArrayList<String>();
ResultSet alltables=null;
try
{
alltables = dbmd.getTables(null, schemaname, null, databaseMeta.getSynonymTypes() );
while (alltables.next())
{
// due to PDI-743 with ODBC and MS SQL Server the order is changed and try/catch included for safety
String cat = "";
try {
cat = alltables.getString("TABLE_CAT");
} catch (Exception e) {
// ignore
if(log.isDebug()) log.logDebug("Error getting synonyms for field TABLE_CAT (ignored): "+e.toString());
}
String schema = "";
try {
schema = alltables.getString("TABLE_SCHEM");
} catch (Exception e) {
// ignore
if(log.isDebug()) log.logDebug("Error getting synonyms for field TABLE_SCHEM (ignored): "+e.toString());
}
if (Const.isEmpty(schema)) schema = cat;
String table = alltables.getString("TABLE_NAME");
String schemaTable;
if (includeSchema) schemaTable = databaseMeta.getQuotedSchemaTableCombination(schema, table);
else schemaTable = table;
if (log.isRowLevel()) log.logRowlevel(toString(), "got view from meta-data: "+schemaTable);
names.add(schemaTable);
}
}
catch(SQLException e)
{
throw new KettleDatabaseException("Error getting synonyms from schema ["+schemaname+"]", e);
}
finally
{
try
{
if (alltables!=null) alltables.close();
}
catch(SQLException e)
{
throw new KettleDatabaseException("Error closing resultset after getting synonyms from schema ["+schemaname+"]", e);
}
}
if(log.isDetailed()) log.logDetailed("read :"+names.size()+" views from db meta-data.");
return names.toArray(new String[names.size()]);
}
public String[] getSchemas() throws KettleDatabaseException
{
ArrayList<String> catalogList = new ArrayList<String>();
ResultSet catalogResultSet=null;
try
{
catalogResultSet =getDatabaseMetaData().getSchemas();
// Grab all the catalog names and put them in an array list
while (catalogResultSet!=null && catalogResultSet.next())
{
catalogList.add(catalogResultSet.getString(1));
}
}
catch(SQLException e)
{
throw new KettleDatabaseException("Error getting schemas!", e);
}
finally
{
try
{
if (catalogResultSet!=null) catalogResultSet.close();
}
catch(SQLException e)
{
throw new KettleDatabaseException("Error closing resultset after getting schemas!", e);
}
}
if(log.isDetailed()) log.logDetailed("read :"+catalogList.size()+" schemas from db meta-data.");
return catalogList.toArray(new String[catalogList.size()]);
}
public String[] getCatalogs() throws KettleDatabaseException
{
ArrayList<String> catalogList = new ArrayList<String>();
ResultSet catalogResultSet=null;
try
{
catalogResultSet =getDatabaseMetaData().getCatalogs();
// Grab all the catalog names and put them in an array list
while (catalogResultSet!=null && catalogResultSet.next())
{
catalogList.add(catalogResultSet.getString(1));
}
}
catch(SQLException e)
{
throw new KettleDatabaseException("Error getting catalogs!", e);
}
finally
{
try
{
if (catalogResultSet!=null) catalogResultSet.close();
}
catch(SQLException e)
{
throw new KettleDatabaseException("Error closing resultset after getting catalogs!", e);
}
}
if(log.isDetailed()) log.logDetailed(toString(), "read :"+catalogList.size()+" catalogs from db meta-data.");
return catalogList.toArray(new String[catalogList.size()]);
}
public String[] getProcedures() throws KettleDatabaseException
{
String sql = databaseMeta.getSQLListOfProcedures();
if (sql!=null)
{
//System.out.println("SQL= "+sql);
List<Object[]> procs = getRows(sql, 1000);
//System.out.println("Found "+procs.size()+" rows");
String[] str = new String[procs.size()];
for (int i=0;i<procs.size();i++)
{
str[i] = ((Object[])procs.get(i))[0].toString();
}
return str;
}
else
{
ResultSet rs = null;
try
{
DatabaseMetaData dbmd = getDatabaseMetaData();
rs = dbmd.getProcedures(null, null, null);
List<Object[]> rows = getRows(rs, 0, null);
String result[] = new String[rows.size()];
for (int i=0;i<rows.size();i++)
{
Object[] row = (Object[])rows.get(i);
String procCatalog = rowMeta.getString(row, "PROCEDURE_CAT", null);
String procSchema = rowMeta.getString(row, "PROCEDURE_SCHEMA", null);
String procName = rowMeta.getString(row, "PROCEDURE_NAME", "");
String name = "";
if (procCatalog!=null) name+=procCatalog+".";
else if (procSchema!=null) name+=procSchema+".";
name+=procName;
result[i] = name;
}
return result;
}
catch(Exception e)
{
throw new KettleDatabaseException("Unable to get list of procedures from database meta-data: ", e);
}
finally
{
if (rs!=null) try { rs.close(); } catch(Exception e) {}
}
}
}
public boolean isAutoCommit()
{
return commitsize<=0;
}
/**
* @return Returns the databaseMeta.
*/
public DatabaseMeta getDatabaseMeta()
{
return databaseMeta;
}
/**
* Lock a tables in the database for write operations
* @param tableNames The tables to lock
* @throws KettleDatabaseException
*/
public void lockTables(String tableNames[]) throws KettleDatabaseException
{
if (Const.isEmpty(tableNames)) return;
// Quote table names too...
String[] quotedTableNames = new String[tableNames.length];
for (int i=0;i<tableNames.length;i++) quotedTableNames[i] = databaseMeta.getQuotedSchemaTableCombination(null, tableNames[i]);
// Get the SQL to lock the (quoted) tables
String sql = databaseMeta.getSQLLockTables(quotedTableNames);
if (sql!=null)
{
execStatements(sql);
}
}
/**
* Unlock certain tables in the database for write operations
* @param tableNames The tables to unlock
* @throws KettleDatabaseException
*/
public void unlockTables(String tableNames[]) throws KettleDatabaseException
{
if (Const.isEmpty(tableNames)) return;
// Quote table names too...
String[] quotedTableNames = new String[tableNames.length];
for (int i=0;i<tableNames.length;i++) quotedTableNames[i] = databaseMeta.getQuotedSchemaTableCombination(null, tableNames[i]);
// Get the SQL to unlock the (quoted) tables
String sql = databaseMeta.getSQLUnlockTables(quotedTableNames);
if (sql!=null)
{
execStatement(sql);
}
}
/**
* @return the opened
*/
public int getOpened()
{
return opened;
}
/**
* @param opened the opened to set
*/
public void setOpened(int opened)
{
this.opened = opened;
}
/**
* @return the connectionGroup
*/
public String getConnectionGroup()
{
return connectionGroup;
}
/**
* @param connectionGroup the connectionGroup to set
*/
public void setConnectionGroup(String connectionGroup)
{
this.connectionGroup = connectionGroup;
}
/**
* @return the partitionId
*/
public String getPartitionId()
{
return partitionId;
}
/**
* @param partitionId the partitionId to set
*/
public void setPartitionId(String partitionId)
{
this.partitionId = partitionId;
}
/**
* @return the copy
*/
public int getCopy()
{
return copy;
}
/**
* @param copy the copy to set
*/
public void setCopy(int copy)
{
this.copy = copy;
}
public void copyVariablesFrom(VariableSpace space)
{
variables.copyVariablesFrom(space);
}
public String environmentSubstitute(String aString)
{
return variables.environmentSubstitute(aString);
}
public String[] environmentSubstitute(String aString[])
{
return variables.environmentSubstitute(aString);
}
public VariableSpace getParentVariableSpace()
{
return variables.getParentVariableSpace();
}
public void setParentVariableSpace(VariableSpace parent)
{
variables.setParentVariableSpace(parent);
}
public String getVariable(String variableName, String defaultValue)
{
return variables.getVariable(variableName, defaultValue);
}
public String getVariable(String variableName)
{
return variables.getVariable(variableName);
}
public boolean getBooleanValueOfVariable(String variableName, boolean defaultValue) {
if (!Const.isEmpty(variableName))
{
String value = environmentSubstitute(variableName);
if (!Const.isEmpty(value))
{
return ValueMeta.convertStringToBoolean(value);
}
}
return defaultValue;
}
public void initializeVariablesFrom(VariableSpace parent)
{
variables.initializeVariablesFrom(parent);
}
public String[] listVariables()
{
return variables.listVariables();
}
public void setVariable(String variableName, String variableValue)
{
variables.setVariable(variableName, variableValue);
}
public void shareVariablesWith(VariableSpace space)
{
variables = space;
// Also share the variables with the meta data object
// Make sure it's not the databaseMeta object itself. We would get an infinite loop in that case.
if (space!=databaseMeta) databaseMeta.shareVariablesWith(space);
}
public void injectVariables(Map<String,String> prop)
{
variables.injectVariables(prop);
}
public RowMetaAndData callProcedure(String arg[], String argdir[], int argtype[],
String resultname, int resulttype) throws KettleDatabaseException {
RowMetaAndData ret;
try {
cstmt.execute();
ret = new RowMetaAndData();
int pos = 1;
if (resultname != null && resultname.length() != 0) {
ValueMeta vMeta = new ValueMeta(resultname, resulttype);
Object v =null;
switch (resulttype) {
case ValueMetaInterface.TYPE_BOOLEAN:
v=Boolean.valueOf(cstmt.getBoolean(pos));
break;
case ValueMetaInterface.TYPE_NUMBER:
v=new Double(cstmt.getDouble(pos));
break;
case ValueMetaInterface.TYPE_BIGNUMBER:
v=cstmt.getBigDecimal(pos);
break;
case ValueMetaInterface.TYPE_INTEGER:
v=Long.valueOf(cstmt.getLong(pos));
break;
case ValueMetaInterface.TYPE_STRING:
v=cstmt.getString(pos);
break;
case ValueMetaInterface.TYPE_BINARY:
if (databaseMeta.supportsGetBlob())
{
Blob blob = cstmt.getBlob(pos);
if (blob!=null)
{
v = blob.getBytes(1L, (int)blob.length());
}
else
{
v = null;
}
}
else
{
v = cstmt.getBytes(pos);
}
break;
case ValueMetaInterface.TYPE_DATE:
if (databaseMeta.supportsTimeStampToDateConversion())
{
v=cstmt.getTimestamp(pos);
}
else
{
v=cstmt.getDate(pos);
}
break;
}
ret.addValue(vMeta, v);
pos++;
}
for (int i = 0; i < arg.length; i++) {
if (argdir[i].equalsIgnoreCase("OUT")
|| argdir[i].equalsIgnoreCase("INOUT")) {
ValueMeta vMeta = new ValueMeta(arg[i], argtype[i]);
Object v=null;
switch (argtype[i]) {
case ValueMetaInterface.TYPE_BOOLEAN:
v=Boolean.valueOf(cstmt.getBoolean(pos + i));
break;
case ValueMetaInterface.TYPE_NUMBER:
v=new Double(cstmt.getDouble(pos + i));
break;
case ValueMetaInterface.TYPE_BIGNUMBER:
v=cstmt.getBigDecimal(pos + i);
break;
case ValueMetaInterface.TYPE_INTEGER:
v=Long.valueOf(cstmt.getLong(pos + i));
break;
case ValueMetaInterface.TYPE_STRING:
v=cstmt.getString(pos + i);
break;
case ValueMetaInterface.TYPE_BINARY:
if (databaseMeta.supportsGetBlob())
{
Blob blob = cstmt.getBlob(pos + i);
if (blob!=null)
{
v = blob.getBytes(1L, (int)blob.length());
}
else
{
v = null;
}
}
else
{
v = cstmt.getBytes(pos + i);
}
break;
case ValueMetaInterface.TYPE_DATE:
if (databaseMeta.supportsTimeStampToDateConversion())
{
v=cstmt.getTimestamp(pos + i);
}
else
{
v=cstmt.getDate(pos + i);
}
break;
}
ret.addValue(vMeta, v);
}
}
return ret;
} catch (SQLException ex) {
throw new KettleDatabaseException("Unable to call procedure", ex);
}
}
/**
* Return SQL CREATION statement for a Table
* @param tableName The table to create
* @throws KettleDatabaseException
*/
public String getDDLCreationTable(String tableName, RowMetaInterface fields) throws KettleDatabaseException
{
String retval;
// First, check for reserved SQL in the input row r...
databaseMeta.quoteReservedWords(fields);
String quotedTk=databaseMeta.quoteField(null);
retval=getCreateTableStatement(tableName, fields, quotedTk, false, null, true);
return retval;
}
/**
* Return SQL TRUNCATE statement for a Table
* @param schema The schema
* @param tableNameWithSchema The table to create
* @throws KettleDatabaseException
*/
public String getDDLTruncateTable(String schema, String tablename) throws KettleDatabaseException
{
if (Const.isEmpty(connectionGroup))
{
return(databaseMeta.getTruncateTableStatement(schema, tablename));
}
else
{
return("DELETE FROM "+databaseMeta.getQuotedSchemaTableCombination(schema, tablename));
}
}
/**
* Return SQL statement (INSERT INTO TableName ...
* @param schemaName tableName The schema
* @param tableName
* @param fields
* @param dateFormat date format of field
* @throws KettleDatabaseException
*/
public String getSQLOutput(String schemaName, String tableName, RowMetaInterface fields, Object[] r,String dateFormat) throws KettleDatabaseException
{
StringBuffer ins=new StringBuffer(128);
try{
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(schemaName, tableName);
ins.append("INSERT INTO ").append(schemaTable).append('(');
// now add the names in the row:
for (int i=0;i<fields.size();i++)
{
if (i>0) ins.append(", ");
String name = fields.getValueMeta(i).getName();
ins.append(databaseMeta.quoteField(name));
}
ins.append(") VALUES (");
java.text.SimpleDateFormat[] fieldDateFormatters = new java.text.SimpleDateFormat[fields.size()];
// new add values ...
for (int i=0;i<fields.size();i++)
{
ValueMetaInterface valueMeta = fields.getValueMeta(i);
Object valueData = r[i];
if (i>0) ins.append(",");
// Check for null values...
if (valueMeta.isNull(valueData)) {
ins.append("null");
} else {
// Normal cases...
switch(valueMeta.getType()) {
case ValueMetaInterface.TYPE_BOOLEAN:
case ValueMetaInterface.TYPE_STRING:
String string = valueMeta.getString(valueData);
// Have the database dialect do the quoting.
// This also adds the single quotes around the string (thanks to PostgreSQL)
string = databaseMeta.quoteSQLString(string);
ins.append(string) ;
break;
case ValueMetaInterface.TYPE_DATE:
Date date = fields.getDate(r,i);
if (Const.isEmpty(dateFormat))
if (databaseMeta.getDatabaseInterface() instanceof OracleDatabaseMeta) {
if (fieldDateFormatters[i]==null) {
fieldDateFormatters[i]=new java.text.SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
}
ins.append("TO_DATE('").append(fieldDateFormatters[i].format(date)).append("', 'YYYY/MM/DD HH24:MI:SS')");
} else {
ins.append("'" + fields.getString(r,i)+ "'") ;
}
else
{
try
{
java.text.SimpleDateFormat formatter = new java.text.SimpleDateFormat(dateFormat);
ins.append("'" + formatter.format(fields.getDate(r,i))+ "'") ;
}
catch(Exception e)
{
throw new KettleDatabaseException("Error : ", e);
}
}
break;
default:
ins.append( fields.getString(r,i)) ;
break;
}
}
}
ins.append(')');
}catch (Exception e)
{
throw new KettleDatabaseException(e);
}
return ins.toString();
}
public Savepoint setSavepoint() throws KettleDatabaseException {
try {
return connection.setSavepoint();
} catch (SQLException e) {
throw new KettleDatabaseException(BaseMessages.getString(PKG, "Database.Exception.UnableToSetSavepoint"), e);
}
}
public Savepoint setSavepoint(String savePointName) throws KettleDatabaseException {
try {
return connection.setSavepoint(savePointName);
} catch (SQLException e) {
throw new KettleDatabaseException(BaseMessages.getString(PKG, "Database.Exception.UnableToSetSavepointName", savePointName), e);
}
}
public void releaseSavepoint(Savepoint savepoint) throws KettleDatabaseException {
try {
connection.releaseSavepoint(savepoint);
} catch (SQLException e) {
throw new KettleDatabaseException(BaseMessages.getString(PKG, "Database.Exception.UnableToReleaseSavepoint"), e);
}
}
public void rollback(Savepoint savepoint) throws KettleDatabaseException {
try {
connection.rollback(savepoint);
} catch (SQLException e) {
throw new KettleDatabaseException(BaseMessages.getString(PKG, "Database.Exception.UnableToRollbackToSavepoint"), e);
}
}
public Object getParentObject() {
return parentLoggingObject;
}
/**
* Return primary key column names ...
* @param tablename
* @throws KettleDatabaseException
*/
public String[] getPrimaryKeyColumnNames(String tablename) throws KettleDatabaseException {
List<String> names = new ArrayList<String>();
ResultSet allkeys=null;
try {
allkeys=getDatabaseMetaData().getPrimaryKeys(null, null, tablename);
while (allkeys.next()) {
String keyname=allkeys.getString("PK_NAME");
String col_name=allkeys.getString("COLUMN_NAME");
if(!names.contains(col_name)) names.add(col_name);
if (log.isRowLevel()) log.logRowlevel(toString(), "getting key : "+keyname + " on column "+col_name);
}
}
catch(SQLException e) {
log.logError(toString(), "Error getting primary keys columns from table ["+tablename+"]");
}
finally {
try {
if (allkeys!=null) allkeys.close();
} catch(SQLException e) {
throw new KettleDatabaseException("Error closing connection while searching primary keys in table ["+tablename+"]", e);
}
}
return names.toArray(new String[names.size()]);
}
/**
* Return all sequence names from connection
* @return The sequences name list.
* @throws KettleDatabaseException
*/
public String[] getSequences() throws KettleDatabaseException
{
if(databaseMeta.supportsSequences()) {
String sql = databaseMeta.getSQLListOfSequences();
if (sql!=null)
{
List<Object[]> seqs = getRows(sql, 0);
String[] str = new String[seqs.size()];
for (int i=0;i<seqs.size();i++)
{
str[i] = ((Object[])seqs.get(i))[0].toString();
}
return str;
}
}else {
throw new KettleDatabaseException("Sequences are only available for Oracle databases.");
}
return null;
}
public String getFilename() {
return null;
}
public String getLogChannelId() {
return log.getLogChannelId();
}
public String getObjectName() {
return databaseMeta.getName();
}
public String getObjectCopy() {
return null;
}
public ObjectId getObjectId() {
return databaseMeta.getObjectId();
}
public ObjectRevision getObjectRevision() {
return databaseMeta.getObjectRevision();
}
public LoggingObjectType getObjectType() {
return LoggingObjectType.DATABASE;
}
public LoggingObjectInterface getParent() {
return parentLoggingObject;
}
public RepositoryDirectory getRepositoryDirectory() {
return null;
}
public LogLevel getLogLevel() {
return logLevel;
}
public void setLogLevel(LogLevel logLevel) {
this.logLevel = logLevel;
log.setLogLevel(logLevel);
}
/**
* @return the carteObjectId
*/
public String getContainerObjectId() {
return containerObjectId;
}
/**
* @param containerObjectId the execution container Object id to set
*/
public void setContainerObjectId(String containerObjectId) {
this.containerObjectId = containerObjectId;
}
} |
package org.eclipse.mylyn.internal.context.ui;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.StructuredViewer;
import org.eclipse.jface.viewers.TreeViewer;
import org.eclipse.mylyn.commons.core.StatusHandler;
import org.eclipse.mylyn.context.core.AbstractContextListener;
import org.eclipse.mylyn.context.core.AbstractContextStructureBridge;
import org.eclipse.mylyn.context.core.ContextChangeEvent;
import org.eclipse.mylyn.context.core.ContextCore;
import org.eclipse.mylyn.context.core.IInteractionElement;
import org.eclipse.mylyn.internal.context.core.ContextCorePlugin;
import org.eclipse.mylyn.internal.provisional.commons.ui.DelayedRefreshJob;
import org.eclipse.ui.ISelectionListener;
import org.eclipse.ui.IWorkbenchPart;
import org.eclipse.ui.PlatformUI;
/**
* Encapsulates the element refresh and expansion state policy for all viewers focused on context.
*
* @author Mik Kersten
* @author Shawn Minto
*/
public class FocusedViewerManager extends AbstractContextListener implements ISelectionListener {
private final CopyOnWriteArrayList<StructuredViewer> managedViewers = new CopyOnWriteArrayList<StructuredViewer>();
private final CopyOnWriteArrayList<StructuredViewer> filteredViewers = new CopyOnWriteArrayList<StructuredViewer>();
private final Map<StructuredViewer, BrowseFilteredListener> listenerMap = new HashMap<StructuredViewer, BrowseFilteredListener>();
private final Map<IWorkbenchPart, StructuredViewer> partToViewerMap = new HashMap<IWorkbenchPart, StructuredViewer>();
private final Map<StructuredViewer, FocusedViewerDelayedRefreshJob> fullRefreshJobs = new HashMap<StructuredViewer, FocusedViewerDelayedRefreshJob>();
// TODO: consider merging in order to discard minors when majors come in, see bug 209846
private final Map<StructuredViewer, FocusedViewerDelayedRefreshJob> minorRefreshJobs = new HashMap<StructuredViewer, FocusedViewerDelayedRefreshJob>();
private class FocusedViewerDelayedRefreshJob extends DelayedRefreshJob {
private boolean minor = false;
public FocusedViewerDelayedRefreshJob(StructuredViewer viewer, String name, boolean minor) {
super(viewer, name);
this.minor = minor;
}
@Override
protected void doRefresh(Object[] items) {
if (viewer == null) {
return;
} else if (viewer.getControl().isDisposed()) {
managedViewers.remove(viewer);
} else {
if (items == null || items.length == 0) {
if (!minor) {
viewer.refresh(false);
FocusedViewerManager.this.updateExpansionState(viewer, null);
} else {
try {
viewer.getControl().setRedraw(false);
viewer.refresh(true);
FocusedViewerManager.this.updateExpansionState(viewer, null);
} finally {
viewer.getControl().setRedraw(true);
}
}
} else {
if (filteredViewers.contains(viewer)) {
try {
viewer.getControl().setRedraw(false);
viewer.refresh(minor);
// prior to Mylyn 3.1 used: FocusedViewerManager.this.updateExpansionState(viewer, null);
for (Object item : items) {
Object objectToRefresh = getObjectToRefresh(item);
if (objectToRefresh != null) {
FocusedViewerManager.this.updateExpansionState(viewer, objectToRefresh);
}
}
} finally {
viewer.getControl().setRedraw(true);
}
} else { // don't need to worry about content changes
try {
viewer.getControl().setRedraw(false);
for (Object item : items) {
Object objectToRefresh = getObjectToRefresh(item);
if (objectToRefresh != null) {
viewer.update(objectToRefresh, null);
FocusedViewerManager.this.updateExpansionState(viewer, objectToRefresh);
}
}
} finally {
viewer.getControl().setRedraw(true);
}
}
}
}
}
private Object getObjectToRefresh(Object item) {
Object objectToRefresh = item;
if (item instanceof IInteractionElement) {
IInteractionElement node = (IInteractionElement) item;
AbstractContextStructureBridge structureBridge = ContextCorePlugin.getDefault().getStructureBridge(
node.getContentType());
objectToRefresh = structureBridge.getObjectForHandle(node.getHandleIdentifier());
}
return objectToRefresh;
}
}
/**
* For testing.
*/
private boolean syncRefreshMode = false;
private boolean internalExpandExceptionLogged;
public FocusedViewerManager() {
// NOTE: no longer using viewer part tracker due to bug 162346
// VIEWER_PART_TRACKER.install(PlatformUI.getWorkbench());
}
public void dispose() {
// VIEWER_PART_TRACKER.dispose(PlatformUI.getWorkbench());
}
public void selectionChanged(IWorkbenchPart part, ISelection selection) {
// ignore
}
public void addManagedViewer(StructuredViewer viewer, IWorkbenchPart viewPart) {
if (viewer != null && !managedViewers.contains(viewer)) {
managedViewers.add(viewer);
partToViewerMap.put(viewPart, viewer);
BrowseFilteredListener listener = new BrowseFilteredListener(viewer);
listenerMap.put(viewer, listener);
viewer.getControl().addMouseListener(listener);
viewer.getControl().addKeyListener(listener);
try {
// NOTE: this needs to be done because some views (e.g. Project Explorer) are not
// correctly initialized on startup and do not have the dummy selection event
// sent to them. See PartPluginAction and bug 213545.
// TODO consider a mechanism to identify only views that provide focus
UiUtil.initializeViewerSelection(viewPart);
Set<IInteractionElement> emptySet = Collections.emptySet();
refreshViewer(emptySet, true, viewer);
} catch (Exception e) {
StatusHandler.log(new Status(IStatus.ERROR, ContextUiPlugin.ID_PLUGIN,
"Could not initialize focused viewer", e)); //$NON-NLS-1$
}
}
}
public void removeManagedViewer(StructuredViewer viewer, IWorkbenchPart viewPart) {
managedViewers.remove(viewer);
partToViewerMap.remove(viewPart);
BrowseFilteredListener listener = listenerMap.get(viewer);
if (listener != null && viewer != null && !viewer.getControl().isDisposed()) {
viewer.getControl().removeMouseListener(listener);
viewer.getControl().removeKeyListener(listener);
}
}
public void addFilteredViewer(StructuredViewer viewer) {
if (viewer != null && !filteredViewers.contains(viewer)) {
filteredViewers.add(viewer);
}
}
public void removeFilteredViewer(StructuredViewer viewer) {
filteredViewers.remove(viewer);
}
@Override
public void contextChanged(ContextChangeEvent event) {
switch (event.getEventKind()) {
case ACTIVATED:
refreshViewers();
break;
case DEACTIVATED:
refreshViewers();
for (StructuredViewer structuredViewer : managedViewers) {
if (structuredViewer instanceof TreeViewer) {
((TreeViewer) structuredViewer).collapseAll();
}
}
break;
case CLEARED:
if (event.isActiveContext()) {
// ensure we dont refresh the viewers if a context other than the active one is deleted or cleared
// bug #265688
refreshViewers();
for (StructuredViewer structuredViewer : managedViewers) {
if (structuredViewer instanceof TreeViewer) {
((TreeViewer) structuredViewer).collapseAll();
}
}
}
break;
case INTEREST_CHANGED:
refreshViewers(event.getElements(), false);
break;
case LANDMARKS_ADDED:
refreshViewers(event.getElements(), true);
break;
case LANDMARKS_REMOVED:
refreshViewers(event.getElements(), true);
break;
case ELEMENTS_DELETED:
/*
* TODO: consider making this work per-element and parent
* Should we collect all parents before calling refresh?
*/
ArrayList<IInteractionElement> toRefresh = new ArrayList<IInteractionElement>();
for (IInteractionElement interactionElement : event.getElements()) {
AbstractContextStructureBridge structureBridge = ContextCore.getStructureBridge(interactionElement.getContentType());
IInteractionElement parent = ContextCore.getContextManager().getElement(
structureBridge.getParentHandle(interactionElement.getHandleIdentifier()));
if (parent != null) {
toRefresh.add(parent);
}
}
refreshViewers(toRefresh, false);
break;
}
}
protected void refreshViewers() {
List<IInteractionElement> toRefresh = Collections.emptyList();
refreshViewers(toRefresh, true);
}
protected void refreshViewers(IInteractionElement node, boolean updateLabels) {
List<IInteractionElement> toRefresh = new ArrayList<IInteractionElement>();
toRefresh.add(node);
refreshViewers(toRefresh, updateLabels);
}
protected void refreshViewers(final List<IInteractionElement> nodesToRefresh, final boolean updateLabels) {
// TODO replace by Assert.isNotNull(nodesToRefresh);
if (nodesToRefresh == null) {
return;
}
if (syncRefreshMode) {
internalRefresh(new HashSet<IInteractionElement>(nodesToRefresh), updateLabels);
} else {
PlatformUI.getWorkbench().getDisplay().asyncExec(new Runnable() {
public void run() {
internalRefresh(new HashSet<IInteractionElement>(nodesToRefresh), updateLabels);
}
});
}
}
private void internalRefresh(final Set<IInteractionElement> nodesToRefresh, final boolean updateLabels) {
try {
for (StructuredViewer viewer : managedViewers) {
refreshViewer(nodesToRefresh, updateLabels, viewer);
}
} catch (Throwable t) {
StatusHandler.log(new Status(IStatus.ERROR, ContextUiPlugin.ID_PLUGIN, "Could not refresh viewer", t)); //$NON-NLS-1$
}
}
public void refreshViewer(final Set<IInteractionElement> nodesToRefresh, final boolean updateLabels,
StructuredViewer viewer) {
Map<StructuredViewer, FocusedViewerDelayedRefreshJob> refreshJobs = null;
if (updateLabels) {
refreshJobs = minorRefreshJobs;
} else {
refreshJobs = fullRefreshJobs;
}
FocusedViewerDelayedRefreshJob job = refreshJobs.get(viewer);
if (job == null) {
job = new FocusedViewerDelayedRefreshJob(viewer, "refresh viewer", updateLabels); //$NON-NLS-1$
refreshJobs.put(viewer, job);
}
job.refreshElements(nodesToRefresh.toArray());
}
private void updateExpansionState(StructuredViewer viewer, Object objectToRefresh) {
if (viewer instanceof TreeViewer
&& filteredViewers.contains(viewer)
&& ContextUiPlugin.getDefault().getPreferenceStore().getBoolean(
IContextUiPreferenceContstants.AUTO_MANAGE_EXPANSION)) {
TreeViewer treeViewer = (TreeViewer) viewer;
// HACK to fix bug 278569: [context] errors with Markers view and active Mylyn task
if ("org.eclipse.ui.internal.views.markers.MarkersTreeViewer".equals(treeViewer.getClass() //$NON-NLS-1$
.getCanonicalName())) {
objectToRefresh = null;
}
if (objectToRefresh == null) {
treeViewer.expandAll();
} else {
// treeViewer.reveal(objectToRefresh);
// boolean failed = false;
// try {
// // reveal will fail if the content provider does not properly implement getParent();
// // check if node is now visible in view and fallback to expandAll() in
// // case of an error
// Method method = AbstractTreeViewer.class.getDeclaredMethod(
// "internalGetWidgetToSelect", Object.class); //$NON-NLS-1$
// method.setAccessible(true);
// if (method.invoke(treeViewer, objectToRefresh) == null) {
// failed = true;
// } catch (Exception e) {
// if (!internalExpandExceptionLogged) {
// internalExpandExceptionLogged = true;
// StatusHandler.log(new Status(IStatus.ERROR, ContextUiPlugin.ID_PLUGIN,
// "Failed to verify expansion state, falling back to expanding all nodes", e)); //$NON-NLS-1$
// failed = true;
// if (failed) {
treeViewer.expandAll();
}
}
}
/**
* Set to true for testing
*/
public void setSyncRefreshMode(boolean syncRefreshMode) {
this.syncRefreshMode = syncRefreshMode;
}
public void forceRefresh() {
refreshViewers();
}
} |
// -*- mode:java; encoding:utf-8 -*-
// vim:set fileencoding=utf-8:
// @homepage@
package example;
import java.awt.*;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.geom.Area;
import java.awt.geom.Path2D;
import java.awt.geom.Rectangle2D;
import java.awt.geom.RoundRectangle2D;
import java.util.Objects;
import javax.accessibility.Accessible;
import javax.swing.*;
import javax.swing.border.AbstractBorder;
import javax.swing.event.PopupMenuEvent;
import javax.swing.event.PopupMenuListener;
import javax.swing.plaf.basic.BasicComboBoxUI;
import javax.swing.plaf.basic.BasicComboPopup;
import javax.swing.plaf.basic.BasicScrollBarUI;
import javax.swing.plaf.basic.ComboPopup;
public final class MainPanel extends JPanel {
public static final Color BACKGROUND = Color.WHITE;
public static final Color FOREGROUND = Color.BLACK;
public static final Color SELECTION_FOREGROUND = Color.BLUE;
public static final Color THUMB = new Color(0xCD_CD_CD);
public static final String KEY = "ComboBox.border";
private MainPanel() {
super(new BorderLayout(15, 15));
UIManager.put("ScrollBar.width", 10);
UIManager.put("ScrollBar.thumbHeight", 20); // GTKLookAndFeel, SynthLookAndFeel, NimbusLookAndFeel
UIManager.put("ScrollBar.minimumThumbSize", new Dimension(30, 30));
UIManager.put("ScrollBar.incrementButtonGap", 0);
UIManager.put("ScrollBar.decrementButtonGap", 0);
UIManager.put("ScrollBar.thumb", THUMB);
UIManager.put("ScrollBar.track", BACKGROUND);
UIManager.put("ComboBox.foreground", FOREGROUND);
UIManager.put("ComboBox.background", BACKGROUND);
UIManager.put("ComboBox.selectionForeground", SELECTION_FOREGROUND);
UIManager.put("ComboBox.selectionBackground", BACKGROUND);
UIManager.put("ComboBox.buttonDarkShadow", BACKGROUND);
UIManager.put("ComboBox.buttonBackground", FOREGROUND);
UIManager.put("ComboBox.buttonHighlight", FOREGROUND);
UIManager.put("ComboBox.buttonShadow", FOREGROUND);
JComboBox<String> combo = new JComboBox<String>(makeModel()) {
private transient MouseListener handler;
private transient PopupMenuListener listener;
@Override public void updateUI() {
removeMouseListener(handler);
removePopupMenuListener(listener);
UIManager.put(KEY, new TopRoundedCornerBorder());
super.updateUI();
setUI(new BasicComboBoxUI() {
@Override protected JButton createArrowButton() {
JButton b = new JButton(new ArrowIcon(BACKGROUND, FOREGROUND));
b.setContentAreaFilled(false);
b.setFocusPainted(false);
b.setBorder(BorderFactory.createEmptyBorder());
return b;
}
@Override protected ComboPopup createPopup() {
return new BasicComboPopup(comboBox) {
@Override protected JScrollPane createScroller() {
JScrollPane sp = new JScrollPane(list) {
@Override public void updateUI() {
super.updateUI();
getVerticalScrollBar().setUI(new WithoutArrowButtonScrollBarUI());
getHorizontalScrollBar().setUI(new WithoutArrowButtonScrollBarUI());
}
};
sp.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED);
sp.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
sp.setHorizontalScrollBar(null);
return sp;
}
};
}
});
handler = new ComboRolloverHandler();
addMouseListener(handler);
listener = new HeavyWeightContainerListener();
addPopupMenuListener(listener);
Object o = getAccessibleContext().getAccessibleChild(0);
if (o instanceof JComponent) {
JComponent c = (JComponent) o;
c.setBorder(new BottomRoundedCornerBorder());
c.setForeground(FOREGROUND);
c.setBackground(BACKGROUND);
}
}
};
JPanel p = new JPanel(new GridLayout(0, 1, 15, 15));
p.setOpaque(true);
p.add(combo);
JTree tree = new JTree();
int row = 0;
while (row < tree.getRowCount()) {
tree.expandRow(row);
row++;
}
JScrollPane scroll = new JScrollPane(tree) {
@Override public void updateUI() {
super.updateUI();
getVerticalScrollBar().setUI(new WithoutArrowButtonScrollBarUI());
getHorizontalScrollBar().setUI(new WithoutArrowButtonScrollBarUI());
}
};
scroll.setBackground(tree.getBackground());
scroll.setBorder(new RoundedCornerBorder());
setBorder(BorderFactory.createEmptyBorder(8, 8, 8, 8));
add(scroll);
add(p, BorderLayout.NORTH);
setOpaque(true);
setPreferredSize(new Dimension(320, 240));
}
private static DefaultComboBoxModel<String> makeModel() {
DefaultComboBoxModel<String> model = new DefaultComboBoxModel<>();
model.addElement("333333");
model.addElement("aaa");
model.addElement("1234555");
model.addElement("555555555555");
model.addElement("666666");
model.addElement("bbb");
model.addElement("444444444");
model.addElement("1234");
model.addElement("000000000000000");
model.addElement("2222222222");
model.addElement("ccc");
model.addElement("111111111111111111");
return model;
}
public static void main(String[] args) {
EventQueue.invokeLater(MainPanel::createAndShowGui);
}
private static void createAndShowGui() {
// try {
// UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
// ex.printStackTrace();
JFrame frame = new JFrame("@title@");
frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.getContentPane().add(new MainPanel());
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
}
}
class HeavyWeightContainerListener implements PopupMenuListener {
@Override public void popupMenuWillBecomeVisible(PopupMenuEvent e) {
EventQueue.invokeLater(() -> {
JComboBox<?> combo = (JComboBox<?>) e.getSource();
Accessible a = combo.getUI().getAccessibleChild(combo, 0);
if (a instanceof JPopupMenu) {
JPopupMenu pop = (JPopupMenu) a;
Container top = pop.getTopLevelAncestor();
if (top instanceof JWindow) {
System.out.println("HeavyWeightContainer");
top.setBackground(new Color(0x0, true));
}
}
});
}
@Override public void popupMenuWillBecomeInvisible(PopupMenuEvent e) {
/* not needed */
}
@Override public void popupMenuCanceled(PopupMenuEvent e) {
/* not needed */
}
}
class ComboRolloverHandler extends MouseAdapter {
private static ButtonModel getButtonModel(MouseEvent e) {
Container c = (Container) e.getComponent();
JButton b = (JButton) c.getComponent(0);
return b.getModel();
}
@Override public void mouseEntered(MouseEvent e) {
getButtonModel(e).setRollover(true);
}
@Override public void mouseExited(MouseEvent e) {
getButtonModel(e).setRollover(false);
}
@Override public void mousePressed(MouseEvent e) {
getButtonModel(e).setPressed(true);
}
@Override public void mouseReleased(MouseEvent e) {
getButtonModel(e).setPressed(false);
}
}
class ArrowIcon implements Icon {
private final Color color;
private final Color rollover;
protected ArrowIcon(Color color, Color rollover) {
this.color = color;
this.rollover = rollover;
}
@Override public void paintIcon(Component c, Graphics g, int x, int y) {
Graphics2D g2 = (Graphics2D) g.create();
g2.setPaint(color);
int shift = 0;
if (c instanceof AbstractButton) {
ButtonModel m = ((AbstractButton) c).getModel();
if (m.isPressed()) {
shift = 1;
} else {
if (m.isRollover()) {
g2.setPaint(rollover);
}
}
}
g2.translate(x, y + shift);
g2.drawLine(2, 3, 6, 3);
g2.drawLine(3, 4, 5, 4);
g2.drawLine(4, 5, 4, 5);
g2.dispose();
}
@Override public int getIconWidth() {
return 9;
}
@Override public int getIconHeight() {
return 9;
}
}
class RoundedCornerBorder extends AbstractBorder {
protected static final int ARC = 12;
@Override public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) {
Graphics2D g2 = (Graphics2D) g.create();
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
int r = ARC;
int w = width - 1;
int h = height - 1;
Area round = new Area(new RoundRectangle2D.Double(x, y, w, h, r, r));
if (c instanceof JPopupMenu) {
g2.setPaint(c.getBackground());
g2.fill(round);
} else {
Container parent = c.getParent();
if (Objects.nonNull(parent)) {
g2.setPaint(parent.getBackground());
Area corner = new Area(new Rectangle2D.Double(x, y, width, height));
corner.subtract(round);
g2.fill(corner);
}
}
g2.setPaint(c.getForeground());
g2.draw(round);
g2.dispose();
}
@Override public Insets getBorderInsets(Component c) {
return new Insets(4, 8, 4, 8);
}
@Override public Insets getBorderInsets(Component c, Insets insets) {
insets.set(4, 8, 4, 8);
return insets;
}
}
class TopRoundedCornerBorder extends RoundedCornerBorder {
@Override public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) {
Graphics2D g2 = (Graphics2D) g.create();
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
if (c instanceof JPopupMenu) {
g2.clearRect(x, y, width, height);
}
double r = ARC;
double w = width - 1d;
double h = height - 1d;
Area round = new Area(new RoundRectangle2D.Double(x, y, w, h, r, r));
Rectangle b = round.getBounds();
b.setBounds(b.x, b.y + ARC, b.width, b.height - ARC);
round.add(new Area(b));
Container parent = c.getParent();
if (Objects.nonNull(parent)) {
g2.setPaint(parent.getBackground());
Area corner = new Area(new Rectangle2D.Double(x, y, width, height));
corner.subtract(round);
g2.fill(corner);
}
g2.setPaint(c.getForeground());
g2.draw(round);
g2.dispose();
}
}
class BottomRoundedCornerBorder extends RoundedCornerBorder {
@Override public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) {
Graphics2D g2 = (Graphics2D) g.create();
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
// // TEST: WindowsLookAndFeel
// if (c instanceof JPopupMenu) {
// Container top = ((JPopupMenu) c).getTopLevelAncestor();
// if (top instanceof JWindow) {
// Composite cmp = g2.getComposite();
// g2.setComposite(AlphaComposite.Clear);
// g2.setPaint(new Color(0x0, true));
// g2.clearRect(x, y, width, height);
// g2.setComposite(cmp);
double r = ARC;
double w = width - 1d;
double h = height - 1d;
Path2D p = new Path2D.Double();
p.moveTo(x, y);
p.lineTo(x, y + h - r);
p.quadTo(x, y + h, x + r, y + h);
p.lineTo(x + w - r, y + h);
p.quadTo(x + w, y + h, x + w, y + h - r);
p.lineTo(x + w, y);
p.closePath();
// Area round = new Area(p);
g2.setPaint(c.getBackground());
g2.fill(p);
g2.setPaint(c.getForeground());
g2.draw(p);
g2.setPaint(c.getBackground());
g2.drawLine(x + 1, y, x + width - 2, y);
g2.dispose();
}
}
class ZeroSizeButton extends JButton {
@Override public Dimension getPreferredSize() {
return new Dimension();
}
}
class WithoutArrowButtonScrollBarUI extends BasicScrollBarUI {
@Override protected JButton createDecreaseButton(int orientation) {
return new ZeroSizeButton();
}
@Override protected JButton createIncreaseButton(int orientation) {
return new ZeroSizeButton();
}
// @Override protected Dimension getMinimumThumbSize() {
// // return new Dimension(20, 20);
// return UIManager.getDimension("ScrollBar.minimumThumbSize");
@Override protected void paintTrack(Graphics g, JComponent c, Rectangle r) {
Graphics2D g2 = (Graphics2D) g.create();
g2.setPaint(trackColor);
g2.fill(r);
g2.dispose();
}
@Override protected void paintThumb(Graphics g, JComponent c, Rectangle r) {
JScrollBar sb = (JScrollBar) c;
if (!sb.isEnabled()) {
return;
}
BoundedRangeModel m = sb.getModel();
if (m.getMaximum() - m.getMinimum() - m.getExtent() > 0) {
Graphics2D g2 = (Graphics2D) g.create();
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
Color color;
if (isDragging) {
color = thumbDarkShadowColor;
} else if (isThumbRollover()) {
color = thumbLightShadowColor;
} else {
color = thumbColor;
}
g2.setPaint(color);
g2.fillRoundRect(r.x + 1, r.y + 1, r.width - 2, r.height - 2, 10, 10);
g2.dispose();
}
}
} |
package org.mwc.debrief.track_shift.zig_detector.target;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import junit.framework.TestCase;
import org.eclipse.core.runtime.ILog;
import org.eclipse.core.runtime.ILogListener;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.mwc.debrief.track_shift.zig_detector.moving_average.TimeRestrictedMovingAverage;
import org.mwc.debrief.track_shift.zig_detector.ownship.LegOfData;
import org.osgi.framework.Bundle;
import flanagan.math.Minimisation;
import flanagan.math.MinimisationFunction;
public class ZigDetector
{
static class FlanaganArctan implements MinimisationFunction
{
private static double calcForecast(final double B, final double P,
final double Q, final double elapsedSecs)
{
final double dX = Math.cos(Math.toRadians(B)) + Q * elapsedSecs;
final double dY = Math.sin(Math.toRadians(B)) + P * elapsedSecs;
return Math.toDegrees(Math.atan2(dY, dX));
}
final private List<Long> _times;
final private List<Double> _bearings;
public FlanaganArctan(final List<Long> beforeTimes,
final List<Double> beforeBearings)
{
_times = beforeTimes;
_bearings = beforeBearings;
}
// evaluation function
@Override
public double function(final double[] point)
{
final double B = point[0];
final double P = point[1];
final double Q = point[2];
double runningSum = 0;
final Long firstTime = _times.get(0);
// ok, loop through the data
for (int i = 0; i < _times.size(); i++)
{
final long elapsedMillis = Math.abs(_times.get(i) - firstTime);
final double elapsedSecs = elapsedMillis / 1000d;
final double thisForecast = calcForecast(B, P, Q, elapsedSecs);
final double thisMeasured = _bearings.get(i);
double thisError = thisForecast - thisMeasured;
if (thisError > 180)
{
thisError -= 360;
}
else if (thisError < -180)
{
thisError += 360;
}
final double sqError = Math.pow(thisError, 2);
runningSum += sqError;
}
// normalise by dividing by the number cuts - 3
// (the -3 is relates to the number of variables being considered)
final double mean = runningSum / (_times.size() - 3);
return mean;
// final double rms = Math.sqrt(mean);
// return rms;
}
}
public static class TestMe extends TestCase
{
public void testStartTimes()
{
assertEquals(2, 5 / 2);
List<Long> times =
new ArrayList<Long>(Arrays.asList(new Long[]
{1000L, 1200L, 1500L, 1800L, 2100L, 2400L, 2700L, 3000L, 3300L,
3600L, 3900L}));
assertEquals("correct", -1, getEnd(0, times, 200, 0));
assertEquals("correct", -1, getEnd(0, times, 200, 1));
assertEquals("correct", -1, getEnd(0, times, 200, 2));
assertEquals("correct", 2, getEnd(0, times, 200, 3));
assertEquals("correct", 3, getEnd(0, times, 200, 4));
assertEquals("correct", 4, getEnd(0, times, 200, 5));
assertEquals("correct", 5, getEnd(0, times, 200, 6));
assertEquals("correct", 6, getEnd(0, times, 200, 7));
assertEquals("correct", 7, getEnd(0, times, 200, 8));
assertEquals("correct", 8, getEnd(0, times, 200, 9));
assertEquals("correct", 9, getEnd(0, times, 200, 10));
assertEquals("correct", 2, getStart(0, times, 400, 0));
assertEquals("correct", 2, getStart(0, times, 400, 1));
assertEquals("correct", 3, getStart(0, times, 400, 2));
assertEquals("correct", 4, getStart(0, times, 400, 3));
assertEquals("correct", 5, getStart(0, times, 400, 4));
assertEquals("correct", 6, getStart(0, times, 400, 5));
assertEquals("correct", 7, getStart(0, times, 400, 6));
assertEquals("correct", 8, getStart(0, times, 400, 7));
assertEquals("correct", -1, getStart(0, times, 400, 8));
assertEquals("correct", -1, getStart(0, times, 400, 9));
assertEquals("correct", -1, getStart(0, times, 400, 10));
}
public void testCalcStart()
{
List<Long> list = new ArrayList<Long>(Arrays.asList(new Long[]
{0L, 40L, 100L, 140L, 180L, 220L, 260L, 280L}));
assertEquals("correct time", 3, calculateNewStart(list, 1, 100));
assertEquals("correct time", 5, calculateNewStart(list, 2, 100));
assertEquals("correct time", 1, calculateNewStart(list, 0, 20));
// and a reverse list
Collections.reverse(list);
assertEquals("correct time", 4, calculateNewStart(list, 1, 100));
assertEquals("correct time", 5, calculateNewStart(list, 2, 100));
assertEquals("correct time", 2, calculateNewStart(list, 0, 30));
}
public void testCleanValues()
{
List<Double> list = new ArrayList<Double>();
list.add(355d);
list.add(4d);
list.add(2d);
list.add(349d);
list.add(2d);
list = prepareBearings(list);
assertEquals("correct length", 5, list.size());
assertEquals("fixed bearings", 364d, list.get(1), 0.001);
assertEquals("fixed bearings", 362d, list.get(2), 0.001);
assertEquals("fixed bearings", 349d, list.get(3), 0.001);
assertEquals("fixed bearings", 362d, list.get(4), 0.001);
// and the other way around
list.clear();
list.add(55d);
list.add(354d);
list.add(339d);
list.add(9d);
list.add(2d);
list = prepareBearings(list);
assertEquals("correct length", 5, list.size());
assertEquals("fixed bearings", -6d, list.get(1), 0.001);
assertEquals("fixed bearings", -21d, list.get(2), 0.001);
assertEquals("fixed bearings", 9d, list.get(3), 0.001);
assertEquals("fixed bearings", 2d, list.get(4), 0.001);
}
public void testMultiSlice() throws ParseException
{
// Long[] times =
// new Long[]
// {1248237792000L, 1248237799000L, 1248237896000L, 1248237944000L,
// 1248237990000L, 1248238098000L, 1248238177000L, 1248238249000L,
// 1248238321000L, 1248238393000L, 1248238484000L, 1248238556000L,
// 1248238624000L, 1248238695000L, 1248238759000L, 1248238843000L,
// 1248238931000L, 1248239006000L, 1248239074000L, 1248239162000L,
// 1248239277000L, 1248239353000L, 1248239444000L, 1248239520000L,
// 1248239600000L, 1248239644000L, 1248239735000L, 1248239799000L,
// 1248239891000L, 1248239951000L, 1248240030000L, 1248240090000L,
// 1248240142000L, 1248240198000L, 1248240257000L, 1248240305000L};
// Double[] bearings =
// new Double[]
// {295.8, 295.5, 293.5, 293.0, 292.8, 290.3, 289.0, 288.3, 288.0,
// 288.0, 288.8, 288.8, 288.8, 289.8, 289.8, 291.0, 291.5, 292.3,
// 292.3, 293.0, 293.5, 294.0, 294.3, 294.8, 294.8, 294.8, 295.8,
// 295.8, 295.8, 296.5, 296.5, 297.5, 297.8, 298.3, 299.0, 299.5};
// Long[] times2 =
// new Long[]
// {946697610000L, 946697640000L, 946697670000L, 946697700000L,
// 946697730000L, 946697760000L, 946697790000L, 946697820000L,
// 946697850000L, 946697880000L, 946697910000L, 946697940000L,
// 946697970000L, 946698000000L, 946698030000L, 946698060000L,
// 946698090000L, 946698120000L, 946698150000L, 946698180000L,
// 946698210000L, 946698240000L, 946698270000L, 946698300000L,
// 946698330000L, 946698360000L, 946698390000L, 946698420000L,
// 946698450000L, 946698480000L, 946698510000L, 946698540000L,
// 946698570000L, 946698600000L, 946698630000L, 946698660000L,
// 946698690000L, 946698720000L, 946698750000L, 946698780000L,
// 946698810000L, 946698840000L, 946698870000L, 946698900000L,
// 946698930000L, 946698960000L, 946698990000L, 946699020000L,
// 946699050000L, 946699080000L, 946699110000L, 946699140000L,
// 946699170000L, 946699200000L, 946699230000L, 946699260000L,
// 946699290000L, 946699320000L, 946699350000L, 946699380000L,
// 946699410000L, 946699440000L, 946699470000L, 946699500000L,
// 946699530000L, 946699560000L, 946699590000L, 946699620000L,
// 946699650000L, 946699680000L, 946699710000L, 946699740000L,
// 946699770000L, 946699800000L, 946699830000L, 946699860000L,
// 946699890000L, 946699920000L, 946699950000L, 946699980000L,
// 946700010000L, 946700040000L, 946700070000L, 946700100000L,
// 946700130000L, 946700160000L, 946700190000L, 946700220000L,
// 946700250000L, 946700280000L, 946700310000L, 946700340000L,
// 946700370000L, 946700400000L, 946700430000L, 946700460000L,
// 946700490000L, 946700520000L, 946700550000L, 946700580000L,
// 946700610000L, 946700640000L, 946700670000L, 946700700000L,
// 946700730000L, 946700760000L, 946700790000L, 946700820000L,
// 946700850000L, 946700880000L, 946700910000L, 946700940000L,
// 946700970000L};
// Double[] bearings2 =
// new Double[]
// {170.095, 170.566, 171.404, 172.021, 172.757, 173.25, 173.767,
// 174.391, 174.958, 175.839, 176.485, 177.282, 177.66, 178.444,
// 179.09, 179.671, -179.482, -178.846, -178.363, -177.853,
// -177.173, -175.994, -175.115, -174.628, -174.019, -173.208,
// -172.378, -171.79, -170.932, -170.251, -169.526, -168.751,
// -168.123, -167.354, -166.331, -165.639, -164.767, -164.272,
// -163.407, -162.441, -161.783, -161.074, -159.886, -158.873,
// -158.367, -157.495, -156.606, -155.92, -154.829, -153.856,
// -152.983, -152.355, -151.561, -151.01, -149.65, -149.143,
// -148.211, -147.211, -146.283, -145.55, -145.102, -144.119,
// -143.22, -143.185, -141.704, -140.562, -139.975, -139.124,
// -138.346, -137.36, -137.276, -135.746, -135.333, -134.338,
// -133.295, -132.577, -131.86, -131.143, -130.278, -129.278,
// -128.344, -127.83199999999998, -127.107, -126.345,
// -125.40799999999999, -124.49999999999999, -123.88299999999998,
// -123.195, -122.52800000000002, -122.17599999999999, -121.21,
// -120.267, -120.11499999999998, -119.31799999999998, -118.507,
// -117.99500000000002, -117.689, -117.71000000000001, -117.36,
// -117.09399999999998, -117.23799999999999, -117.01, -116.633,
// -116.74, -116.40300000000002, -116.296, -116.158,
// -115.85400000000001, -115.82, -115.777, -115.56000000000002,
// -115.071, -114.71999999999998};
// Double[] bearings =
// new Double[]
// {180d, 180.3, 180.7, 181d, 181.4, 181.7, 182.1, 182.5, 182.8, 183.2,
// 183.6, 184.1, 184.5, 184.9, 185.3, 185.8, 186.3, 186.7, 187.2,
// 187.7, 188.2, 188.8, 189.3, 189.8, 190.4, 191d, 191.6, 192.2,
// 192.8, 193.4, 194.1, 194.8, 195.5, 196.2, 196.9, 197.6, 198.4,
// 199.2, 200d, 200.8, 201.7, 202.6, 203.4, 204.4, 205.3, 206.1,
// 206.7, 207.3, 207.9, 208.5, 209.2, 209.9, 210.6, 211.4, 212.2,
// 213.1, 214d, 214.9, 215.9, 216.9, 218d, 219.1, 220.3, 221.6,
// 223d, 224.4, 225.9, 227.4, 229.1, 230.8, 232.7, 234.6, 236.6,
// 238.8, 241d, 243.4, 245.9, 248.2, 250.3, 252.3, 254.3, 256.1,
// 257.9, 259.6, 261.2, 262.8, 264.3, 265.7, 267.1, 268.4, 269.7,
// 270.9, 272d, 273.1, 274.2, 275.2, 276.2, 277.1, 278d, 278.8,
// 279.7, 280.4, 281.2};
// int[] timeStr =
// new int[]
// {120000, 120050, 120140, 120230, 120320, 120410, 120500, 120550,
// 120640, 120730, 120820, 120910, 121000, 121050, 121140, 121230,
// 121320, 121410, 121500, 121550, 121640, 121730, 121820, 121910,
// 122000, 122050, 122140, 122230, 122320, 122410, 122500, 122550,
// 122640, 122730, 122820, 122910, 123000, 123050, 123140, 123230,
// 123320, 123410, 123500, 123550, 123640, 123730, 123820, 123910,
// 124000, 124050, 124140, 124230, 124320, 124410, 124500, 124550,
// 124640, 124730, 124820, 124910, 125000, 125050, 125140, 125230,
// 125320, 125410, 125500, 125550, 125640, 125730, 125820, 125910,
// 130000, 130050, 130140, 130230, 130320, 130410, 130500, 130550,
// 130640, 130730, 130820, 130910, 131000, 131050, 131140, 131230,
// 131320, 131410, 131500, 131550, 131640, 131730, 131820, 131910,
// 132000, 132050, 132140, 132230, 132320, 132410, 132500};
Long[] times =
new Long[]
{946699110000L, 946699140000L, 946699170000L, 946699200000L,
946699230000L, 946699260000L, 946699290000L, 946699320000L,
946699350000L, 946699380000L, 946699410000L, 946699440000L,
946699470000L, 946699500000L, 946699530000L, 946699560000L,
946699590000L, 946699620000L, 946699650000L, 946699680000L,
946699710000L, 946699740000L, 946699770000L, 946699800000L,
946699830000L, 946699860000L, 946699890000L, 946699920000L,
946699950000L, 946699980000L, 946700010000L, 946700040000L,
946700070000L, 946700100000L, 946700130000L, 946700160000L,
946700190000L, 946700220000L, 946700250000L, 946700280000L,
946700310000L, 946700340000L, 946700370000L, 946700400000L,
946700430000L, 946700460000L, 946700490000L, 946700520000L,
946700550000L, 946700580000L, 946700610000L, 946700640000L,
946700670000L, 946700700000L, 946700730000L, 946700760000L,
946700790000L, 946700820000L, 946700850000L, 946700880000L,
946700910000L, 946700940000L, 946700970000L};
Double[] bearings =
new Double[]
{207.017, 207.645, 208.439, 208.99, 210.35, 210.857, 211.789,
212.789, 213.717, 214.45, 214.898, 215.881, 216.78, 216.815,
218.296, 219.438, 220.025, 220.876, 221.654, 222.64, 222.724,
224.254, 224.667, 225.662, 226.705, 227.423, 228.14, 228.857,
229.722, 230.722, 231.656, 232.168, 232.893, 233.655, 234.592,
235.5, 236.11700000000002, 236.805, 237.47199999999998, 237.824,
238.79000000000002, 239.733, 239.88500000000002,
240.68200000000002, 241.493, 242.005, 242.311, 242.29, 242.64,
242.906, 242.762, 242.99, 243.36700000000002, 243.26,
243.59699999999998, 243.704, 243.84199999999998, 244.146, 244.18,
244.223, 244.44, 244.929, 245.28000000000003};
// Long[] times = new Long[]{946699110000L, 946699140000L, 946699170000L, 946699200000L,
// 946699230000L, 946699260000L, 946699290000L, 946699320000L, 946699350000L, 946699380000L,
// 946699410000L, 946699440000L, 946699470000L, 946699500000L, 946699530000L, 946699560000L,
// 946699590000L, 946699620000L, 946699650000L, 946699680000L, 946699710000L, 946699740000L,
// 946699770000L, 946699800000L, 946699830000L, 946699860000L, 946699890000L, 946699920000L,
// 946699950000L, 946699980000L, 946700010000L, 946700040000L, 946700070000L, 946700100000L,
// 946700130000L, 946700160000L, 946700190000L, 946700220000L, 946700250000L, 946700280000L,
// 946700310000L, 946700340000L, 946700370000L, 946700400000L, 946700430000L, 946700460000L,
// 946700490000L, 946700520000L, 946700550000L, 946700580000L};
// Double[] bearings = new Double[]{-152.983, -152.355, -151.561, -151.01, -149.65, -149.143,
// -148.211, -147.211, -146.283, -145.55, -145.102, -144.119, -143.22, -143.185, -141.704,
// -140.562, -139.975, -139.124, -138.346, -137.36, -137.276, -135.746, -135.333, -134.338,
// -133.295, -132.577, -131.86, -131.143, -130.278, -129.278, -128.344, -127.83199999999998,
// -127.107, -126.345, -125.40799999999999, -124.49999999999999, -123.88299999999998,
// -123.195, -122.52800000000002, -122.17599999999999, -121.21, -120.267, -120.11499999999998,
// -119.31799999999998, -118.507, -117.99500000000002, -117.689, -117.71000000000001, -117.36,
// -117.09399999999998};
// Long[] times = new Long[timeStr.length];
// java.text.DateFormat sdf = new SimpleDateFormat("HHmmss");
// for (int i = 0; i < timeStr.length; i++)
// String thisVal = "" + timeStr[i];
// times[i] = sdf.parse(thisVal).getTime();
// start to collate the adta
List<Long> tList1 = Arrays.asList(times);
List<Double> tBearings1 = Arrays.asList(bearings);
// System.out
// .println("last time:" + new Date(tList1.get(tList1.size() - 1)));
// get the last 40 elements
// final int start = tList1.size() - 50;
// final int end = tList1.size() - 1;
// List<Long> tList = tList1.subList(start, end);
// List<Double> tBearings = tBearings1.subList(start, end);
List<Long> tList = tList1;
List<Double> tBearings = tBearings1;
// System.out.println("from:" + new Date(times[0]) + " // to:"
// + new Date(times[times.length - 1]) + " // " + times.length + " entries");
final ZigDetector detector = new ZigDetector();
double zigRatio = 1000d;
double optimiseTolerance = 0.0000000004;
ILog logger = getLogger();
ILegStorer legStorer = getLegStorer();
IZigStorer zigStorer = getZigStorer();
detector.sliceThis(logger, "some name", "scenario", times[0],
times[times.length - 1], legStorer, zigStorer, zigRatio,
optimiseTolerance, tList, tBearings);
// reverse the arrays
// Collections.reverse(tList);
// Collections.reverse(tBearings);
// long timeWindow = 120000;
// zigRatio = 1000d;
// EventHappened happened = new EventHappened()
// public void eventAt(long time, double score, double threshold)
// System.out.println("event at " + new Date(time) + " score:" + score);
// detector.runThrough(optimiseTolerance, tList, tBearings, happened,
// zigRatio, timeWindow);
}
@SuppressWarnings("unused")
private ILegStorer getLegStorer()
{
ILegStorer legStorer = new ILegStorer()
{
@Override
public void storeLeg(String scenarioName, long tStart, long tEnd,
double rms)
{
// System.out.println("store it: " + new Date(tStart) + ", "
// + new Date(tEnd));
}
};
return legStorer;
}
@SuppressWarnings("unused")
private IZigStorer getZigStorer()
{
IZigStorer zigStorer = new IZigStorer()
{
@Override
public void storeZig(String scenarioName, long tStart, long tEnd,
double rms)
{
// TODO Auto-generated method stub
}
@Override
public void finish()
{
// TODO Auto-generated method stub
}
};
return zigStorer;
}
@SuppressWarnings("unused")
private ILog getLogger()
{
ILog logger = new ILog()
{
@Override
public void addLogListener(ILogListener listener)
{
// TODO Auto-generated method stub
}
@Override
public Bundle getBundle()
{
// TODO Auto-generated method stub
return null;
}
@Override
public void log(IStatus status)
{
// TODO Auto-generated method stub
}
@Override
public void removeLogListener(ILogListener listener)
{
// TODO Auto-generated method stub
}
};
return logger;
}
}
static class FlanaganArctan_Legacy implements MinimisationFunction
{
private static double calcForecast(final double B, final double P,
final double Q, final double elapsedSecs)
{
final double dX = Math.cos(Math.toRadians(B)) + Q * elapsedSecs;
final double dY = Math.sin(Math.toRadians(B)) + P * elapsedSecs;
return Math.toDegrees(Math.atan2(dY, dX));
}
final private Long[] _times;
final private Double[] _bearings;
public FlanaganArctan_Legacy(final List<Long> beforeTimes,
final List<Double> beforeBearings)
{
_times = beforeTimes.toArray(new Long[]
{});
_bearings = beforeBearings.toArray(new Double[]
{});
}
// evaluation function
@Override
public double function(final double[] point)
{
final double B = point[0];
final double P = point[1];
final double Q = point[2];
double runningSum = 0;
final Long firstTime = _times[0];
// ok, loop through the data
for (int i = 0; i < _times.length; i++)
{
final long elapsedMillis = _times[i] - firstTime;
final double elapsedSecs = elapsedMillis / 1000d;
final double thisForecast = calcForecast(B, P, Q, elapsedSecs);
final double thisMeasured = _bearings[i];
double thisError = thisForecast - thisMeasured;
if (thisError > 180)
{
thisError -= 360;
}
else if (thisError < -180)
{
thisError += 360;
}
final double sqError = Math.pow(thisError, 2);
runningSum += sqError;
}
final double mean = runningSum / _times.length;
final double rms = Math.sqrt(mean);
return rms;
}
}
final SimpleDateFormat dateF = new SimpleDateFormat("HH:mm:ss");
/**
* if we slice these times in two, with a buffer, what is the index of the last item in the first
* leg?
*
* @param start
* @param thisLegTimes
* @param buffer
* @param index
* @return
*/
private static int getEnd(final int start, final List<Long> thisLegTimes,
final long buffer, final int index)
{
int res = -1;
final int MIN_SIZE = 3;
final long halfWid = buffer / 2l;
// find the half-way mark
final long sliceAt = thisLegTimes.get(index);
// and the time of the last item in the first leg
final long endTime = sliceAt - halfWid;
// ok, loop through
for (int ctr = 0; ctr < thisLegTimes.size(); ctr++)
{
// what's this time?
long thisTime = thisLegTimes.get(ctr);
// have we passed the end time?
if (thisTime > endTime)
{
// yes, it must have been the previous time
if (ctr >= MIN_SIZE)
{
res = ctr - 1;
}
break;
}
}
return res;
}
/**
* if we slice these times in two, with a buffer, what is the index of the first item in the
* second leg?
*
* @param start
* @param thisLegTimes
* @param buffer
* @param index
* @return
*/
private static int getStart(final int start, final List<Long> thisLegTimes,
final long buffer, final int index)
{
int res = -1;
final int MIN_SIZE = 3;
final long halfWid = buffer / 2l;
// find the half-way mark
final long halfWay = thisLegTimes.get(index);
// and the time of the first item in the second leg
final long startTime = halfWay + halfWid;
// ok, loop through
for (int ctr = 0; ctr < thisLegTimes.size(); ctr++)
{
// what's this time?
long thisTime = thisLegTimes.get(ctr);
// have we passed the end time?
if (thisTime > startTime)
{
// have we passed the min size?
if (ctr <= thisLegTimes.size() - MIN_SIZE)
{
// ok, we still have the min number of points
// yes, it must have been the previous time
res = ctr;
break;
}
}
}
return res;
}
final private Minimisation optimiseThis(final List<Long> times,
final List<Double> bearings, final double optimiserTolerance)
{
// Create instance of Minimisation
final Minimisation min = new Minimisation();
// Create instace of class holding function to be minimised
final FlanaganArctan funct = new FlanaganArctan(times, bearings);
// initial estimates
final double firstBearing = bearings.get(0);
final double[] start =
{firstBearing, 0.0D, 0.0D};
// initial step sizes
final double[] step =
{0.2D, 0.3D, 0.3D};
// convergence tolerance
final double ftol = optimiserTolerance;
// Nelder and Mead minimisation procedure
min.nelderMead(funct, start, step, ftol);
return min;
}
/**
* @param trialIndex
* @param bearings
* @param times
* @param legOneEnd
* @param legTwoStart
* @param optimiserTolerance
* @param fittedQ
* @param fittedP
* @param overallScore
* the overall score for this leg
* @param BUFFER_REGION
* @param straightBar
* @param thisSeries
* @return
*/
private double sliceLeg(final int trialIndex, final List<Double> bearings,
final List<Long> times, final int legOneEnd, final int legTwoStart,
final double optimiserTolerance)
{
final List<Long> theseTimes = times;
final List<Double> theseBearings = bearings;
final Date thisD = new Date(times.get(trialIndex));
// if((legOneEnd == -1) || (legTwoStart == -1))
// return Double.MAX_VALUE;
double beforeScore = Double.MAX_VALUE;
double afterScore = Double.MAX_VALUE;
@SuppressWarnings("unused")
String msg = dateF.format(thisD);
Minimisation beforeOptimiser = null;
Minimisation afterOptimiser = null;
if (legOneEnd != -1)
{
final List<Long> beforeTimes = theseTimes.subList(0, legOneEnd);
final List<Double> beforeBearings = theseBearings.subList(0, legOneEnd);
beforeOptimiser =
optimiseThis_Legacy(beforeTimes, beforeBearings, beforeBearings
.get(0), optimiserTolerance);
beforeScore = beforeOptimiser.getMinimum();
// System.out.println(" before:" + _outDates(beforeTimes));
}
if (legTwoStart != -1)
{
final List<Long> afterTimes =
theseTimes.subList(legTwoStart, theseTimes.size() - 1);
final List<Double> afterBearings =
theseBearings.subList(legTwoStart, theseTimes.size() - 1);
afterOptimiser =
optimiseThis_Legacy(afterTimes, afterBearings, afterBearings.get(0),
optimiserTolerance);
afterScore = afterOptimiser.getMinimum();
// System.out.println(" after:" + _outDates(afterTimes));
}
// find the total error sum
double sum = Double.MAX_VALUE;
// do we have both legs?
if ((legOneEnd != -1) && (legTwoStart != -1))
{
final int beforeLen = theseTimes.subList(0, legOneEnd).size();
final int afterLen =
theseTimes.subList(legTwoStart, theseTimes.size() - 1).size();
final int totalCuts = beforeLen + afterLen;
final double beforeNormal = beforeScore * beforeLen / totalCuts;
final double afterNormal = afterScore * afterLen / totalCuts;
sum = beforeNormal + afterNormal;
// double[] bValues = beforeOptimiser.getParamValues();
// msg +=
// " ,BEFORE," + dateF.format(times.get(0)) + ","
// + dateF.format(times.get(legOneEnd)) + "," + beforeScore;
// msg += ",B," + bValues[0] + ",P," + bValues[1] + ",Q," + bValues[2] + ",score," +
// beforeNormal;
// double[] aValues = afterOptimiser.getParamValues();
// msg +=
// " ,AFTER," + dateF.format(times.get(legTwoStart)) + ","
// + dateF.format(times.get(times.size() - 1)) + "," + afterScore;
// msg += ",B," + aValues[0] + ",P," + aValues[1] + ",Q," + aValues[2] + ",score," +
// afterNormal;
// System.out.println(msg + ",sum," + sum);
}
return sum;
}
/**
*
* @param log
* the logger
* @param PLUGIN_ID
* the id of the plugin that is runnign this
* @param scenario
* the name of this scenario
* @param wholeStart
* overall start time
* @param wholeEnd
* overall end time
* @param legStorer
* someone interested in legs
* @param zigStorer
* someone interested in zigs
* @param RMS_ZIG_RATIO
* how much better the slice has to be
* @param optimiseTolerance
* when the ARC_TAN fit is good enough
* @param thisLegTimes
* bearing times
* @param thisLegBearings
* bearing values
*/
public void sliceThis_Original(final ILog log, final String PLUGIN_ID,
final String scenario, final long wholeStart, final long wholeEnd,
final ILegStorer legStorer, IZigStorer zigStorer,
final double RMS_ZIG_RATIO, final double optimiseTolerance,
final List<Long> thisLegTimes, final List<Double> thisLegBearings)
{
// ok, find the best slice
// prepare the data
// final int len = thisLegTimes.size();
// for (int i = 0; i < len; i++)
// System.out.print(thisLegTimes.get(i) + "L, ");
// for (int i = 0; i < len; i++)
// System.out.print(thisLegBearings.get(i) + ", ");
if (thisLegBearings.size() == 0)
{
return;
}
final Minimisation wholeLeg =
optimiseThis_Legacy(thisLegTimes, thisLegBearings, thisLegBearings
.get(0), optimiseTolerance);
final double wholeLegScore = wholeLeg.getMinimum();
// System.out.println("Whole leg score is:" + wholeLegScore);
// ok, now have to slice it
double bestScore = Double.MAX_VALUE;
// int bestSlice = -1;
long sliceTime = -1;
long bestLegOneEnd = -1;
long bestLegTwoStart = -1;
/**
* how long we allow for a turn (millis)
*
*/
final long BUFFER_SIZE = 300 * 1000;
// TODO - drop this object, it's just for debugging
// DateFormat ds = new SimpleDateFormat("hh:mm:ss");
// find the optimal first slice
for (int index = 0; index < thisLegTimes.size(); index++)
{
final int legOneEnd = getEnd(0, thisLegTimes, BUFFER_SIZE, index);
final int legTwoStart = getStart(0, thisLegTimes, BUFFER_SIZE, index);
// check we have two legitimate legs
if (legOneEnd != -1 && legTwoStart != -1)
{
// what's the total score for slicing at this index?
final double sum =
sliceLeg(index, thisLegBearings, thisLegTimes, legOneEnd,
legTwoStart, optimiseTolerance);
// System.out.println(ds.format(new Date(thisLegTimes.get(index))) + ", " + sum);
// is this better?
if ((sum != Double.MAX_VALUE) && (sum < bestScore))
{
// yes - store it.
bestScore = sum;
// bestSlice = index;
sliceTime = thisLegTimes.get(index);
bestLegOneEnd = thisLegTimes.get(legOneEnd);
bestLegTwoStart = thisLegTimes.get(legTwoStart);
}
}
}
// right, how did we get on?
if (sliceTime != -1)
{
// System.out.println(ds.format(new Date(sliceTime)));
// System.out.println("Best score:" + bestScore + " whole score:" + wholeLegScore + " ratio:"
// + (bestScore / wholeLegScore));
// is this slice acceptable?
if (bestScore < wholeLegScore * RMS_ZIG_RATIO)
{
legStorer.storeLeg(scenario, wholeStart, bestLegOneEnd, bestScore
/ wholeLegScore * 100);
legStorer.storeLeg(scenario, bestLegTwoStart, wholeEnd, bestScore
/ wholeLegScore * 100);
if (zigStorer != null)
{
zigStorer.storeZig(scenario, bestLegOneEnd, bestLegTwoStart,
bestScore / wholeLegScore * 100);
}
}
else
{
// right - we couldn't get a good slice. see what the whole score is
// SATC_Activator.log(Status.INFO, "Couldn't slice: whole leg score:"
// + wholeLegScore + " best slice:" + bestScore, null);
// just store the whole slice
legStorer.storeLeg(scenario, wholeStart, wholeEnd, wholeLegScore
/ wholeLegScore * 100);
}
}
else
{
log.log(new Status(Status.INFO, PLUGIN_ID,
"slicing complete, can't slice", null));
}
// and tell the storer that we're done.
zigStorer.finish();
}
private static interface EventHappened
{
public void eventAt(long time, double score, double threshold);
}
/**
*
* @param log
* the logger
* @param PLUGIN_ID
* the id of the plugin that is runnign this
* @param scenario
* the name of this scenario
* @param wholeStart
* overall start time
* @param wholeEnd
* overall end time
* @param legStorer
* someone interested in legs
* @param zigStorer
* someone interested in zigs
* @param RMS_ZIG_RATIO
* how much better the slice has to be
* @param optimiseTolerance
* when the ARC_TAN fit is good enough
* @param legTimes
* bearing times
* @param legBearings
* bearing values
*/
public void sliceThis(final ILog log, final String PLUGIN_ID,
final String scenario, final long wholeStart, final long wholeEnd,
final ILegStorer legStorer, IZigStorer zigStorer,
final double RMS_ZIG_RATIO, final double optimiseTolerance,
final List<Long> legTimes, final List<Double> rawLegBearings)
{
// ok, find the best slice
// prepare the data
final List<Double> legBearings = prepareBearings(rawLegBearings);
// final int len = legTimes.size();
// for (int i = 0; i < len; i++)
// System.out.print(legTimes.get(i) + "L, ");
// System.out.println("");
// System.out.println("");
// for (int i = 0; i < len; i++)
// System.out.print(legBearings.get(i) + ", ");
if (legBearings.size() == 0)
{
return;
}
final Set<Long> legEnds = new TreeSet<Long>();
final Set<Long> legStarts = new TreeSet<Long>();
// include our start/end values
legStarts.add(legTimes.get(0));
legEnds.add(legTimes.get(legTimes.size() - 1));
EventHappened fwdListener = new EventHappened()
{
@Override
public void eventAt(long time, double score, double threshold)
{
System.out
.println("zig start at:" + new Date(time) + " score:" + score);
legEnds.add(time);
}
};
// double threshold = 0.002;
long timeWindow = 120000;
runThrough(optimiseTolerance, legTimes, legBearings, fwdListener,
RMS_ZIG_RATIO, timeWindow);
// ok, now reverse the steps
@SuppressWarnings("unused")
EventHappened backListener = new EventHappened()
{
@Override
public void eventAt(long time, double score, double threshold)
{
System.out.println("zig end at:" + new Date(time) + " score:" + score);
legStarts.add(time);
}
};
Collections.reverse(legTimes);
Collections.reverse(legBearings);
// ok, now run through it
final double reverseZigRation = RMS_ZIG_RATIO;
runThrough(optimiseTolerance, legTimes, legBearings, backListener,
reverseZigRation, timeWindow);
// ok, now tidy up the data
List<LegOfData> legs = new ArrayList<LegOfData>();
Long lastZig = null;
for (long legStart : legStarts)
{
if (lastZig == null || legStart > lastZig)
{
// ok, we have start time. find the next leg end time
for (long legEnd : legEnds)
{
if (legEnd > legStart)
{
LegOfData newLeg =
new LegOfData("Leg:" + (legs.size() + 1), legStart, legEnd);
System.out.println("adding leg:" + newLeg);
legs.add(newLeg);
lastZig = legEnd;
break;
}
}
}
}
// ok, share the good news
LegOfData lastLeg = null;
for (LegOfData leg : legs)
{
if (legStorer != null)
{
legStorer.storeLeg(leg.getName(), leg.getStart(), leg.getEnd(), 2d);
}
if (zigStorer != null)
{
// do we know the last leg?
if (lastLeg != null)
{
// System.out.println("storing zig at:" + new Date(lastLeg.getEnd())
// + " to:" + new Date(leg.getStart()));
zigStorer.storeZig(leg.getName(), lastLeg.getEnd(), leg.getStart(),
2d);
}
}
}
}
private void runThrough(final double optimiseTolerance,
final List<Long> legTimes, final List<Double> legBearings,
EventHappened listener, final double zigThreshold, final long timeWindow)
{
final int len = legTimes.size();
java.text.DateFormat df = new SimpleDateFormat("HH:mm:ss");
TimeRestrictedMovingAverage avgScore =
new TimeRestrictedMovingAverage(timeWindow);
int start = 0;
for (int end = 0; end < len; end++)
{
final long thisTime = legTimes.get(end);
Date legEnd = new Date(thisTime);
if (legEnd.toString().contains("17:30"))
{
// System.out.println("ghw");
}
// we need at least 4 cuts
if (end >= start + 4)
{
// ok, if we've got more than entries, just use the most recent onces
// start = Math.max(start, end - 20);
final List<Long> times = legTimes.subList(start, end);
final List<Double> bearings = legBearings.subList(start, end);
Minimisation optimiser =
optimiseThis(times, bearings, optimiseTolerance);
double score = optimiser.getMinimum();
// double[] values = optimiser.getParamValues();
// System.out.println("scores: B:" + values[0] + " P:" + values[1] + " Q:" + values[2]);
// FlanaganArctan func = new FlanaganArctan(times, bearings);
// double[] permutation = new double[]{179.7654684, -0.000123539292961681,
// 0.000189892808700104};
// double[] permutation = new double[]{179.766017, -0.000123519863656172,
// 0.0001899251729596};
// double score = func.function(permutation);
@SuppressWarnings("unused")
final double lastScore;
if (avgScore.isEmpty())
{
lastScore = score;
}
else
{
lastScore = avgScore.lastValue();
}
// ok, see how things are going
final double avg = avgScore.getAverage();
// ok, is it increasing by more than double the variance?
final double variance = avgScore.getVariance();
// how far have we travelled from the last score?
final double scoreDelta;
if (avgScore.isEmpty())
{
scoreDelta = Double.NaN;
}
else
{
// scoreDelta = score - lastScore;
scoreDelta = score - avg;
}
// contribute this score
avgScore.add(thisTime, score);
// final double thisProportion = scoreDelta / variance;
final double thisProportion = scoreDelta / (variance * variance);
// NumberFormat nf = new DecimalFormat(" 0.000000;-0.000000");
// System.out.println(df.format(new Date(thisTime)) + " " + nf.format(avg)
// + " " + nf.format(score) + " " + nf.format(scoreDelta) + " " + nf.format(variance) + " "
// + nf.format(thisProportion) + " "
// do we have enough data?
if (avgScore.isPopulated())
{
// are we twice the variance?
if (thisProportion > zigThreshold)
{
// System.out.println("this proportion:" + thisProportion);
listener.eventAt(thisTime, thisProportion, zigThreshold);
// System.out.println("diverging. delta:" + scoreDelta + ", variance:"
// + variance + ", proportion:" + (scoreDelta / variance)
// + " threshold:" + zigThreshold);
// // ok, move the start past the turn
start = calculateNewStart(legTimes, end, 120000);
// and clear the moving average
avgScore.clear();
}
}
else
{
// still building up our initial population
// mAverage.add(thisTime, score);
// System.out.println(df.format(new Date(thisTime)) + ", " + avg + ", "
// + score + ", " + scoreDelta + ", " + variance + ", " + scoreDelta
// / variance + ", " + legBearings.get(end) + ", "
// + (legBearings.get(end - 1) - legBearings.get(end)));
}
}
}
}
private static int calculateNewStart(List<Long> legTimes, int startPoint,
long interval)
{
final long startValue = legTimes.get(startPoint);
for (int i = startPoint; i < legTimes.size(); i++)
{
long thisValue = legTimes.get(i);
if (Math.abs(thisValue - startValue) >= interval)
{
return i;
}
}
return legTimes.size() - 1;
}
/**
* put the bearings in the same domain, so we don't jump across 360
*
* @param raw
* set of raw bearings
* @return processed bearings
*/
private static List<Double> prepareBearings(List<Double> raw)
{
final List<Double> res = new ArrayList<Double>();
for (int i = 0; i < raw.size(); i++)
{
final double thisCourse = raw.get(i);
final double cleanValue;
if (i == 0)
{
cleanValue = thisCourse;
}
else
{
final double lastCourse = res.get(i - 1);
double thisDiff = thisCourse - lastCourse;
if (Math.abs(thisDiff) > 180d)
{
// ok, we've flippped
if (thisDiff > 180)
{
// ok, deduct 360
cleanValue = thisCourse - 360d;
}
else
{
// ok, add 360
cleanValue = thisCourse + 360d;
}
}
else
{
cleanValue = thisCourse;
}
}
res.add(cleanValue);
}
return res;
}
final private Minimisation optimiseThis_Legacy(final List<Long> times,
final List<Double> bearings, final double initialBearing,
final double optimiserTolerance)
{
// Create instance of Minimisation
final Minimisation min = new Minimisation();
// Create instace of class holding function to be minimised
final FlanaganArctan funct = new FlanaganArctan(times, bearings);
// initial estimates
final double firstBearing = bearings.get(0);
final double[] start =
{firstBearing, 0.0D, 0.0D};
// initial step sizes
final double[] step =
{0.2D, 0.3D, 0.3D};
// convergence tolerance
final double ftol = optimiserTolerance;
// Nelder and Mead minimisation procedure
min.nelderMead(funct, start, step, ftol);
return min;
}
} |
package io.minecloud.daemon;
import io.minecloud.MineCloud;
import io.minecloud.MineCloudException;
import io.minecloud.db.Credentials;
import io.minecloud.models.bungee.Bungee;
import io.minecloud.models.bungee.BungeeRepository;
import io.minecloud.models.bungee.type.BungeeType;
import io.minecloud.models.network.Network;
import io.minecloud.models.nodes.Node;
import io.minecloud.models.server.Server;
import io.minecloud.models.server.ServerMetadata;
import io.minecloud.models.server.ServerRepository;
import io.minecloud.models.server.type.ServerType;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
public final class Deployer {
private static final AtomicInteger PORT_COUNTER = new AtomicInteger(32812);
private Deployer() {
}
public static void deployServer(Network network, ServerType type, List<ServerMetadata> metadata) {
Credentials mongoCreds = MineCloud.instance().mongo().credentials();
Credentials redisCreds = MineCloud.instance().redis().credentials();
ServerRepository repository = MineCloud.instance().mongo().repositoryBy(Server.class);
Server server = new Server();
server.setType(type);
server.setNumber(repository.nextNumberFor(type));
server.setNetwork(network);
server.setNode(MineCloudDaemon.instance().node());
server.setOnlinePlayers(new ArrayList<>());
server.setRamUsage(-1);
server.setId(server.type().name() + server.number());
server.setMetadata(metadata);
server.setPort(PORT_COUNTER.incrementAndGet());
Map<String, String> env = new HashMap<String, String>() {{
put("mongo_hosts", mongoCreds.formattedHosts());
put("mongo_username", mongoCreds.username());
put("mongo_password", new String(mongoCreds.password()));
put("mongo_database", mongoCreds.database());
put("redis_host", redisCreds.hosts()[0]);
put("redis_password", new String(redisCreds.password()));
put("SERVER_MOD", server.type().mod());
put("DEDICATED_RAM", String.valueOf(server.type().dedicatedRam()));
put("MAX_PLAYERS", String.valueOf(server.type().maxPlayers()));
put("server_id", server.entityId());
put("DEFAULT_WORLD", type.defaultWorld().name());
put("DEFAULT_WORLD_VERSION", type.defaultWorld().version());
put("PORT", String.valueOf(server.port()));
put("PRIVATE_IP", server.node().privateIp());
}};
int pid = startApplication(processScript("/mnt/minecloud/server/bukkit/" + server.type().mod() + "/init.sh", env), server.name());
server.setContainerId(String.valueOf(pid));
repository.save(server);
MineCloud.logger().info("Started server " + server.name() + " with container id " + server.containerId());
}
public static void deployBungee(Network network, BungeeType type) {
BungeeRepository repository = MineCloud.instance().mongo().repositoryBy(Bungee.class);
Node node = MineCloudDaemon.instance().node();
Bungee bungee = new Bungee();
if (repository.count("_id", node.publicIp()) > 0) {
MineCloud.logger().log(Level.WARNING, "Did not create bungee on this node; public ip is already in use");
return;
}
bungee.setId(node.publicIp());
bungee.setType(type);
Credentials mongoCreds = MineCloud.instance().mongo().credentials();
Credentials redisCreds = MineCloud.instance().redis().credentials();
Map<String, String> env = new HashMap<String, String>() {{
put("mongo_hosts", mongoCreds.formattedHosts());
put("mongo_username", mongoCreds.username());
put("mongo_password", new String(mongoCreds.password()));
put("mongo_database", mongoCreds.database());
put("redis_host", redisCreds.hosts()[0]);
put("redis_password", new String(redisCreds.password()));
put("DEDICATED_RAM", String.valueOf(type.dedicatedRam()));
put("bungee_id", node.publicIp());
}};
startApplication(processScript("/mnt/minecloud/scripts/bungee-init.sh", env), "bungee");
bungee.setNetwork(network);
bungee.setNode(node);
bungee.setPublicIp(node.publicIp());
repository.save(bungee);
MineCloud.logger().info("Started bungee " + bungee.name() + " with id " + bungee.containerId());
}
public static int pidOf(String app) throws IOException {
return Integer.parseInt(Files.readAllLines(Paths.get("/var/run/" + app + "/app.pid")).get(0));
}
public static boolean isRunning(String app) throws InterruptedException, IOException {
Process process = Runtime.getRuntime().exec("ps -p " + pidOf(app));
process.waitFor();
return process.exitValue() == 0;
}
private static List<String> processScript(String file, Map<String, String> env) {
List<String> script;
try {
script = Files.readAllLines(Paths.get(file));
} catch (IOException ex) {
throw new MineCloudException(ex);
}
script.replaceAll((s) -> {
Container<String> container = new Container<>(s);
env.forEach((find, replace) -> container.set(container.get().replace("]" + find, replace)));
return container.get();
});
return script;
}
private static int startApplication(List<String> startScript, String name) {
File runDir = new File("/var/run/" + name);
if (runDir.exists()) {
runDir.delete();
}
runDir.mkdirs();
try {
Files.write(Paths.get(runDir.getAbsolutePath(), "init.sh"), startScript);
Process process = new ProcessBuilder()
.directory(runDir)
.command("screen -S " + name + " sh init.sh")
.start();
return Integer.parseInt(Files.readAllLines(Paths.get(runDir.getAbsolutePath(), "app.pid")).get(0));
} catch (IOException ex) {
throw new MineCloudException(ex);
}
}
private static class Container<T> {
private T value;
public Container(T value) {
this.value = value;
}
public Container() {
this.value = null;
}
public T get() {
return value;
}
public void set(T value) {
this.value = value;
}
}
} |
package org.strategoxt.imp.runtime.stratego;
import org.eclipse.core.resources.IProject;
import org.eclipse.imp.language.Language;
import org.eclipse.imp.language.LanguageRegistry;
import org.spoofax.interpreter.core.IContext;
import org.spoofax.interpreter.core.InterpreterException;
import org.spoofax.interpreter.library.AbstractPrimitive;
import org.spoofax.interpreter.library.ssl.SSLLibrary;
import org.spoofax.interpreter.stratego.Strategy;
import org.spoofax.interpreter.terms.IStrategoString;
import org.spoofax.interpreter.terms.IStrategoTerm;
import org.strategoxt.imp.runtime.Environment;
import org.strategoxt.imp.runtime.dynamicloading.BadDescriptorException;
import org.strategoxt.imp.runtime.dynamicloading.Descriptor;
import org.strategoxt.imp.runtime.services.StrategoObserver;
/**
* Call a strategy by name in another language by name
*
* @author Vlad Vergu <v.a.vergu add tudelft.nl>
*
*/
public class ForeignLangCallPrimitive extends AbstractPrimitive {
public ForeignLangCallPrimitive() {
super("SSL_EXT_foreigncall", 0, 2);
}
/**
* Example usage:
*
* <code>
* foreign-call(|lang,strategy) = PRIM("SSL_EXT_buildercall");
*
* foobar:
* foreign-call(|"OtherLang", "strategy-name")
*
* </code>
*/
@Override
public boolean call(final IContext env, Strategy[] svars, IStrategoTerm[] tvars)
throws InterpreterException {
assert tvars.length == 2;
assert tvars[0] instanceof IStrategoString;
assert tvars[1] instanceof IStrategoString;
final String oLangName = ((IStrategoString) tvars[0]).stringValue();
final String strategyName = ((IStrategoString) tvars[1]).stringValue();
boolean result = false;
try {
final IStrategoTerm inputTerm = env.current();
final EditorIOAgent agent = (EditorIOAgent) SSLLibrary.instance(env).getIOAgent();
final IProject project = agent.getProject();
final String dir = ((EditorIOAgent) SSLLibrary.instance(env).getIOAgent())
.getProjectPath();
final Language oLang = LanguageRegistry.findLanguage(oLangName);
if (oLang == null)
return false;
final Descriptor oLangDescr = Environment.getDescriptor(oLang);
assert oLangDescr != null;
final StrategoObserver observer = oLangDescr
.createService(StrategoObserver.class, null);
observer.configureRuntime(project, dir);
observer.getRuntime().setCurrent(inputTerm);
result = observer.getRuntime().invoke(strategyName);
env.setCurrent(observer.getRuntime().current());
observer.uninitialize();
} catch (RuntimeException cex) {
Environment.logException(cex);
} catch (BadDescriptorException e) {
Environment.logException(e);
} catch (InterpreterException e) {
Environment.logException(e);
}
return result;
}
} |
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// all copies or substantial portions of the Software.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package org.capnproto;
import java.nio.ByteBuffer;
final class WireHelpers {
static int roundBytesUpToWords(int bytes) {
return (bytes + 7) / 8;
}
static int roundBitsUpToBytes(int bits) {
return (bits + 7) / Constants.BITS_PER_BYTE;
}
static int roundBitsUpToWords(long bits) {
//# This code assumes 64-bit words.
return (int)((bits + 63) / ((long) Constants.BITS_PER_WORD));
}
static class AllocateResult {
public final int ptr;
public final int refOffset;
public final SegmentBuilder segment;
AllocateResult(int ptr, int refOffset, SegmentBuilder segment) {
this.ptr = ptr; this.refOffset = refOffset; this.segment = segment;
}
}
static AllocateResult allocate(int refOffset,
SegmentBuilder segment,
int amount, // in words
byte kind) {
long ref = segment.get(refOffset);
if (!WirePointer.isNull(ref)) {
zeroObject(segment, refOffset);
}
if (amount == 0 && kind == WirePointer.STRUCT) {
WirePointer.setKindAndTargetForEmptyStruct(segment.buffer, refOffset);
return new AllocateResult(refOffset, refOffset, segment);
}
int ptr = segment.allocate(amount);
if (ptr == SegmentBuilder.FAILED_ALLOCATION) {
//# Need to allocate in a new segment. We'll need to
//# allocate an extra pointer worth of space to act as
//# the landing pad for a far pointer.
int amountPlusRef = amount + Constants.POINTER_SIZE_IN_WORDS;
BuilderArena.AllocateResult allocation = segment.getArena().allocate(amountPlusRef);
//# Set up the original pointer to be a far pointer to
//# the new segment.
FarPointer.set(segment.buffer, refOffset, false, allocation.offset);
FarPointer.setSegmentId(segment.buffer, refOffset, allocation.segment.id);
//# Initialize the landing pad to indicate that the
//# data immediately follows the pad.
int resultRefOffset = allocation.offset;
int ptr1 = allocation.offset + Constants.POINTER_SIZE_IN_WORDS;
WirePointer.setKindAndTarget(allocation.segment.buffer, resultRefOffset, kind,
ptr1);
return new AllocateResult(ptr1, resultRefOffset, allocation.segment);
} else {
WirePointer.setKindAndTarget(segment.buffer, refOffset, kind, ptr);
return new AllocateResult(ptr, refOffset, segment);
}
}
static class FollowBuilderFarsResult {
public final int ptr;
public final long ref;
public final SegmentBuilder segment;
FollowBuilderFarsResult(int ptr, long ref, SegmentBuilder segment) {
this.ptr = ptr; this.ref = ref; this.segment = segment;
}
}
static FollowBuilderFarsResult followBuilderFars(long ref, int refTarget,
SegmentBuilder segment) {
//# If `ref` is a far pointer, follow it. On return, `ref` will
//# have been updated to point at a WirePointer that contains
//# the type information about the target object, and a pointer
//# to the object contents is returned. The caller must NOT use
//# `ref->target()` as this may or may not actually return a
//# valid pointer. `segment` is also updated to point at the
//# segment which actually contains the object.
//
//# If `ref` is not a far pointer, this simply returns
//# `refTarget`. Usually, `refTarget` should be the same as
//# `ref->target()`, but may not be in cases where `ref` is
//# only a tag.
if (WirePointer.kind(ref) == WirePointer.FAR) {
SegmentBuilder resultSegment = segment.getArena().getSegment(FarPointer.getSegmentId(ref));
int padOffset = FarPointer.positionInSegment(ref);
long pad = resultSegment.get(padOffset);
if (! FarPointer.isDoubleFar(ref)) {
return new FollowBuilderFarsResult(WirePointer.target(padOffset, pad), pad, resultSegment);
}
//# Landing pad is another far pointer. It is followed by a
//# tag describing the pointed-to object.
int refOffset = padOffset + 1;
ref = resultSegment.get(refOffset);
resultSegment = resultSegment.getArena().getSegment(FarPointer.getSegmentId(pad));
return new FollowBuilderFarsResult(FarPointer.positionInSegment(pad), ref, resultSegment);
} else {
return new FollowBuilderFarsResult(refTarget, ref, segment);
}
}
static class FollowFarsResult {
public final int ptr;
public final long ref;
public final SegmentReader segment;
FollowFarsResult(int ptr, long ref, SegmentReader segment) {
this.ptr = ptr; this.ref = ref; this.segment = segment;
}
}
static FollowFarsResult followFars(long ref, int refTarget, SegmentReader segment) {
//# If the segment is null, this is an unchecked message,
//# so there are no FAR pointers.
if (segment != null && WirePointer.kind(ref) == WirePointer.FAR) {
SegmentReader resultSegment = segment.arena.tryGetSegment(FarPointer.getSegmentId(ref));
int padOffset = FarPointer.positionInSegment(ref);
long pad = resultSegment.get(padOffset);
int padWords = FarPointer.isDoubleFar(ref) ? 2 : 1;
// TODO read limiting
if (!FarPointer.isDoubleFar(ref)) {
return new FollowFarsResult(WirePointer.target(padOffset, pad),
pad, resultSegment);
} else {
//# Landing pad is another far pointer. It is
//# followed by a tag describing the pointed-to
//# object.
long tag = resultSegment.get(padOffset + 1);
resultSegment = resultSegment.arena.tryGetSegment(FarPointer.getSegmentId(pad));
return new FollowFarsResult(FarPointer.positionInSegment(pad), tag, resultSegment);
}
} else {
return new FollowFarsResult(refTarget, ref, segment);
}
}
static void zeroObject(SegmentBuilder segment, int refOffset) {
//# Zero out the pointed-to object. Use when the pointer is
//# about to be overwritten making the target object no longer
//# reachable.
//# We shouldn't zero out external data linked into the message.
if (!segment.isWritable()) return;
long ref = segment.get(refOffset);
switch (WirePointer.kind(ref)) {
case WirePointer.STRUCT:
case WirePointer.LIST:
zeroObject(segment, ref, WirePointer.target(refOffset, ref));
break;
case WirePointer.FAR: {
segment = segment.getArena().getSegment(FarPointer.getSegmentId(ref));
if (segment.isWritable()) { //# Don't zero external data.
int padOffset = FarPointer.positionInSegment(ref);
long pad = segment.get(padOffset);
if (FarPointer.isDoubleFar(ref)) {
SegmentBuilder otherSegment = segment.getArena().getSegment(FarPointer.getSegmentId(ref));
if (otherSegment.isWritable()) {
zeroObject(otherSegment, padOffset + 1, FarPointer.positionInSegment(pad));
}
segment.buffer.putLong(padOffset * 8, 0L);
segment.buffer.putLong((padOffset + 1) * 8, 0L);
} else {
zeroObject(segment, padOffset);
segment.buffer.putLong(padOffset * 8, 0L);
}
}
break;
}
case WirePointer.OTHER: {
// TODO
}
}
}
static void zeroObject(SegmentBuilder segment, long tag, int ptr) {
//# We shouldn't zero out external data linked into the message.
if (!segment.isWritable()) return;
switch (WirePointer.kind(tag)) {
case WirePointer.STRUCT: {
int pointerSection = ptr + StructPointer.dataSize(tag);
int count = StructPointer.ptrCount(tag);
for (int ii = 0; ii < count; ++ii) {
zeroObject(segment, pointerSection + ii);
}
memset(segment.buffer, ptr * Constants.BYTES_PER_WORD, (byte)0,
StructPointer.wordSize(tag) * Constants.BYTES_PER_WORD);
break;
}
case WirePointer.LIST: {
switch (ListPointer.elementSize(tag)) {
case ElementSize.VOID: break;
case ElementSize.BIT:
case ElementSize.BYTE:
case ElementSize.TWO_BYTES:
case ElementSize.FOUR_BYTES:
case ElementSize.EIGHT_BYTES: {
memset(segment.buffer, ptr * Constants.BYTES_PER_WORD, (byte)0,
roundBitsUpToWords(
ListPointer.elementCount(tag) *
ElementSize.dataBitsPerElement(ListPointer.elementSize(tag))) * Constants.BYTES_PER_WORD);
break;
}
case ElementSize.POINTER: {
int count = ListPointer.elementCount(tag);
for (int ii = 0; ii < count; ++ii) {
zeroObject(segment, ptr + ii);
}
memset(segment.buffer, ptr * Constants.BYTES_PER_WORD, (byte)0,
count * Constants.BYTES_PER_WORD);
break;
}
case ElementSize.INLINE_COMPOSITE: {
long elementTag = segment.get(ptr);
if (WirePointer.kind(elementTag) != WirePointer.STRUCT) {
throw new Error("Don't know how to handle non-STRUCT inline composite.");
}
int dataSize = StructPointer.dataSize(elementTag);
int pointerCount = StructPointer.ptrCount(elementTag);
int pos = ptr + Constants.POINTER_SIZE_IN_WORDS;
int count = WirePointer.inlineCompositeListElementCount(elementTag);
for (int ii = 0; ii < count; ++ii) {
pos += dataSize;
for (int jj = 0; jj < pointerCount; ++jj) {
zeroObject(segment, pos);
pos += Constants.POINTER_SIZE_IN_WORDS;
}
}
memset(segment.buffer, ptr * Constants.BYTES_PER_WORD, (byte)0,
(StructPointer.wordSize(elementTag) * count + Constants.POINTER_SIZE_IN_WORDS) * Constants.BYTES_PER_WORD);
break;
}
}
break;
}
case WirePointer.FAR:
throw new Error("Unexpected FAR pointer.");
case WirePointer.OTHER:
throw new Error("Unexpected OTHER pointer.");
}
}
static void zeroPointerAndFars(SegmentBuilder segment, int refOffset) {
//# Zero out the pointer itself and, if it is a far pointer, zero the landing pad as well,
//# but do not zero the object body. Used when upgrading.
long ref = segment.get(refOffset);
if (WirePointer.kind(ref) == WirePointer.FAR) {
SegmentBuilder padSegment = segment.getArena().getSegment(FarPointer.getSegmentId(ref));
if (padSegment.isWritable()) { //# Don't zero external data.
int padOffset = FarPointer.positionInSegment(ref);
padSegment.buffer.putLong(padOffset * Constants.BYTES_PER_WORD, 0L);
if (FarPointer.isDoubleFar(ref)) {
padSegment.buffer.putLong(padOffset * Constants.BYTES_PER_WORD + 1, 0L);
}
}
}
segment.put(refOffset, 0L);
}
static void transferPointer(SegmentBuilder dstSegment, int dstOffset,
SegmentBuilder srcSegment, int srcOffset) {
//# Make *dst point to the same object as *src. Both must reside in the same message, but can
//# be in different segments.
//
//# Caller MUST zero out the source pointer after calling this, to make sure no later code
//# mistakenly thinks the source location still owns the object. transferPointer() doesn't do
//# this zeroing itself because many callers transfer several pointers in a loop then zero out
//# the whole section.
long src = srcSegment.get(srcOffset);
if (WirePointer.isNull(src)) {
dstSegment.put(dstOffset, 0L);
} else if (WirePointer.kind(src) == WirePointer.FAR) {
//# Far pointers are position-independent, so we can just copy.
dstSegment.put(dstOffset, srcSegment.get(srcOffset));
} else {
transferPointer(dstSegment, dstOffset, srcSegment, srcOffset,
WirePointer.target(srcOffset, src));
}
}
static void transferPointer(SegmentBuilder dstSegment, int dstOffset,
SegmentBuilder srcSegment, int srcOffset, int srcTargetOffset) {
//# Like the other overload, but splits src into a tag and a target. Particularly useful for
//# OrphanBuilder.
long src = srcSegment.get(srcOffset);
long srcTarget = srcSegment.get(srcTargetOffset);
if (dstSegment == srcSegment) {
//# Same segment, so create a direct pointer.
if (WirePointer.kind(src) == WirePointer.STRUCT && StructPointer.wordSize(src) == 0) {
WirePointer.setKindAndTargetForEmptyStruct(dstSegment.buffer, dstOffset);
} else {
WirePointer.setKindAndTarget(dstSegment.buffer, dstOffset,
WirePointer.kind(src), srcTargetOffset);
// We can just copy the upper 32 bits.
dstSegment.buffer.putInt(dstOffset * Constants.BYTES_PER_WORD + 4,
srcSegment.buffer.getInt(srcOffset * Constants.BYTES_PER_WORD + 4));
}
} else {
//# Need to create a far pointer. Try to allocate it in the same segment as the source,
//# so that it doesn't need to be a double-far.
int landingPadOffset = srcSegment.allocate(1);
if (landingPadOffset == SegmentBuilder.FAILED_ALLOCATION) {
//# Darn, need a double-far.
BuilderArena.AllocateResult allocation = srcSegment.getArena().allocate(2);
SegmentBuilder farSegment = allocation.segment;
landingPadOffset = allocation.offset;
FarPointer.set(farSegment.buffer, landingPadOffset, false, srcTargetOffset);
FarPointer.setSegmentId(farSegment.buffer, landingPadOffset, srcSegment.id);
WirePointer.setKindWithZeroOffset(farSegment.buffer, landingPadOffset + 1,
WirePointer.kind(srcTarget));
farSegment.buffer.putInt((landingPadOffset + 1) * Constants.BYTES_PER_WORD + 4,
srcSegment.buffer.getInt(srcOffset * Constants.BYTES_PER_WORD + 4));
FarPointer.set(dstSegment.buffer, dstOffset,
true, landingPadOffset);
FarPointer.setSegmentId(dstSegment.buffer, dstOffset,
farSegment.id);
} else {
//# Simple landing pad is just a pointer.
WirePointer.setKindAndTarget(srcSegment.buffer, landingPadOffset,
WirePointer.kind(srcTarget), srcTargetOffset);
srcSegment.buffer.putInt(landingPadOffset * Constants.BYTES_PER_WORD + 4,
srcSegment.buffer.getInt(srcOffset * Constants.BYTES_PER_WORD + 4));
FarPointer.set(dstSegment.buffer, dstOffset,
false, landingPadOffset);
FarPointer.setSegmentId(dstSegment.buffer, dstOffset,
srcSegment.id);
}
}
}
static <T> T initStructPointer(StructBuilder.Factory<T> factory,
int refOffset,
SegmentBuilder segment,
StructSize size) {
AllocateResult allocation = allocate(refOffset, segment, size.total(), WirePointer.STRUCT);
StructPointer.setFromStructSize(allocation.segment.buffer, allocation.refOffset, size);
return factory.constructBuilder(allocation.segment, allocation.ptr * Constants.BYTES_PER_WORD,
allocation.ptr + size.data,
size.data * 64, size.pointers);
}
static <T> T getWritableStructPointer(StructBuilder.Factory<T> factory,
int refOffset,
SegmentBuilder segment,
StructSize size,
SegmentReader defaultSegment,
int defaultOffset) {
long ref = segment.get(refOffset);
int target = WirePointer.target(refOffset, ref);
if (WirePointer.isNull(ref)) {
if (defaultSegment == null) {
return initStructPointer(factory, refOffset, segment, size);
} else {
throw new Error("unimplemented");
}
}
FollowBuilderFarsResult resolved = followBuilderFars(ref, target, segment);
short oldDataSize = StructPointer.dataSize(resolved.ref);
short oldPointerCount = StructPointer.ptrCount(resolved.ref);
int oldPointerSection = resolved.ptr + oldDataSize;
if (oldDataSize < size.data || oldPointerCount < size.pointers) {
//# The space allocated for this struct is too small. Unlike with readers, we can't just
//# run with it and do bounds checks at access time, because how would we handle writes?
//# Instead, we have to copy the struct to a new space now.
short newDataSize = (short)Math.max(oldDataSize, size.data);
short newPointerCount = (short)Math.max(oldPointerCount, size.pointers);
int totalSize = newDataSize + newPointerCount * Constants.WORDS_PER_POINTER;
//# Don't let allocate() zero out the object just yet.
zeroPointerAndFars(segment, refOffset);
AllocateResult allocation = allocate(refOffset, segment,
totalSize, WirePointer.STRUCT);
StructPointer.set(allocation.segment.buffer, allocation.refOffset,
newDataSize, newPointerCount);
//# Copy data section.
memcpy(allocation.segment.buffer, allocation.ptr * Constants.BYTES_PER_WORD,
resolved.segment.buffer, resolved.ptr * Constants.BYTES_PER_WORD,
oldDataSize * Constants.BYTES_PER_WORD);
//# Copy pointer section.
int newPointerSection = allocation.ptr + newDataSize;
for (int ii = 0; ii < oldPointerCount; ++ii) {
transferPointer(allocation.segment, newPointerSection + ii,
resolved.segment, oldPointerSection + ii);
}
//# Zero out old location. This has two purposes:
//# 1) We don't want to leak the original contents of the struct when the message is written
//# out as it may contain secrets that the caller intends to remove from the new copy.
//# 2) Zeros will be deflated by packing, making this dead memory almost-free if it ever
//# hits the wire.
memset(resolved.segment.buffer, resolved.ptr * Constants.BYTES_PER_WORD, (byte)0,
(oldDataSize + oldPointerCount * Constants.WORDS_PER_POINTER) * Constants.BYTES_PER_WORD);
return factory.constructBuilder(allocation.segment, allocation.ptr * Constants.BYTES_PER_WORD,
newPointerSection, newDataSize * Constants.BITS_PER_WORD,
newPointerCount);
} else {
return factory.constructBuilder(resolved.segment, resolved.ptr * Constants.BYTES_PER_WORD,
oldPointerSection, oldDataSize * Constants.BITS_PER_WORD,
oldPointerCount);
}
}
static <T> T initListPointer(ListBuilder.Factory<T> factory,
int refOffset,
SegmentBuilder segment,
int elementCount,
byte elementSize) {
assert elementSize != ElementSize.INLINE_COMPOSITE : "Should have called initStructListPointer instead";
int dataSize = ElementSize.dataBitsPerElement(elementSize);
int pointerCount = ElementSize.pointersPerElement(elementSize);
int step = dataSize + pointerCount * Constants.BITS_PER_POINTER;
int wordCount = roundBitsUpToWords((long)elementCount * (long)step);
AllocateResult allocation = allocate(refOffset, segment, wordCount, WirePointer.LIST);
ListPointer.set(allocation.segment.buffer, allocation.refOffset, elementSize, elementCount);
return factory.constructBuilder(allocation.segment,
allocation.ptr * Constants.BYTES_PER_WORD,
elementCount, step, dataSize, (short)pointerCount);
}
static <T> T initStructListPointer(ListBuilder.Factory<T> factory,
int refOffset,
SegmentBuilder segment,
int elementCount,
StructSize elementSize) {
int wordsPerElement = elementSize.total();
//# Allocate the list, prefixed by a single WirePointer.
int wordCount = elementCount * wordsPerElement;
AllocateResult allocation = allocate(refOffset, segment, Constants.POINTER_SIZE_IN_WORDS + wordCount,
WirePointer.LIST);
//# Initialize the pointer.
ListPointer.setInlineComposite(allocation.segment.buffer, allocation.refOffset, wordCount);
WirePointer.setKindAndInlineCompositeListElementCount(allocation.segment.buffer, allocation.ptr,
WirePointer.STRUCT, elementCount);
StructPointer.setFromStructSize(allocation.segment.buffer, allocation.ptr, elementSize);
return factory.constructBuilder(allocation.segment,
(allocation.ptr + 1) * Constants.BYTES_PER_WORD,
elementCount, wordsPerElement * Constants.BITS_PER_WORD,
elementSize.data * Constants.BITS_PER_WORD, elementSize.pointers);
}
static <T> T getWritableListPointer(ListBuilder.Factory<T> factory,
int origRefOffset,
SegmentBuilder origSegment,
byte elementSize,
SegmentReader defaultSegment,
int defaultOffset) {
assert elementSize != ElementSize.INLINE_COMPOSITE : "Use getStructList{Element,Field} for structs";
long origRef = origSegment.get(origRefOffset);
int origRefTarget = WirePointer.target(origRefOffset, origRef);
if (WirePointer.isNull(origRef)) {
throw new Error("unimplemented");
}
//# We must verify that the pointer has the right size. Unlike
//# in getWritableStructListPointer(), we never need to
//# "upgrade" the data, because this method is called only for
//# non-struct lists, and there is no allowed upgrade path *to*
//# a non-struct list, only *from* them.
FollowBuilderFarsResult resolved = followBuilderFars(origRef, origRefTarget, origSegment);
if (WirePointer.kind(resolved.ref) != WirePointer.LIST) {
throw new DecodeException("Called getList{Field,Element}() but existing pointer is not a list");
}
byte oldSize = ListPointer.elementSize(resolved.ref);
if (oldSize == ElementSize.INLINE_COMPOSITE) {
//# The existing element size is InlineComposite, which
//# means that it is at least two words, which makes it
//# bigger than the expected element size. Since fields can
//# only grow when upgraded, the existing data must have
//# been written with a newer version of the protocol. We
//# therefore never need to upgrade the data in this case,
//# but we do need to validate that it is a valid upgrade
//# from what we expected.
throw new Error("unimplemented");
} else {
int dataSize = ElementSize.dataBitsPerElement(oldSize);
int pointerCount = ElementSize.pointersPerElement(oldSize);
if (dataSize < ElementSize.dataBitsPerElement(elementSize)) {
throw new DecodeException("Existing list value is incompatible with expected type.");
}
if (pointerCount < ElementSize.pointersPerElement(elementSize)) {
throw new DecodeException("Existing list value is incompatible with expected type.");
}
int step = dataSize + pointerCount * Constants.BITS_PER_POINTER;
return factory.constructBuilder(resolved.segment, resolved.ptr * Constants.BYTES_PER_WORD,
ListPointer.elementCount(resolved.ref),
step, dataSize, (short) pointerCount);
}
}
static <T> T getWritableStructListPointer(ListBuilder.Factory<T> factory,
int origRefOffset,
SegmentBuilder origSegment,
StructSize elementSize,
SegmentReader defaultSegment,
int defaultOffset) {
long origRef = origSegment.get(origRefOffset);
int origRefTarget = WirePointer.target(origRefOffset, origRef);
if (WirePointer.isNull(origRef)) {
throw new Error("unimplemented");
}
//# We must verify that the pointer has the right size and potentially upgrade it if not.
FollowBuilderFarsResult resolved = followBuilderFars(origRef, origRefTarget, origSegment);
if (WirePointer.kind(resolved.ref) != WirePointer.LIST) {
throw new DecodeException("Called getList{Field,Element}() but existing pointer is not a list");
}
byte oldSize = ListPointer.elementSize(resolved.ref);
if (oldSize == ElementSize.INLINE_COMPOSITE) {
//# Existing list is INLINE_COMPOSITE, but we need to verify that the sizes match.
long oldTag = resolved.segment.get(resolved.ptr);
int oldPtr = resolved.ptr + Constants.POINTER_SIZE_IN_WORDS;
if (WirePointer.kind(oldTag) != WirePointer.STRUCT) {
throw new DecodeException("INLINE_COMPOSITE list with non-STRUCT elements not supported.");
}
int oldDataSize = StructPointer.dataSize(oldTag);
short oldPointerCount = StructPointer.ptrCount(oldTag);
int oldStep = (oldDataSize + oldPointerCount * Constants.POINTER_SIZE_IN_WORDS);
int elementCount = WirePointer.inlineCompositeListElementCount(oldTag);
if (oldDataSize >= elementSize.data && oldPointerCount >= elementSize.pointers) {
//# Old size is at least as large as we need. Ship it.
return factory.constructBuilder(resolved.segment, oldPtr * Constants.BYTES_PER_WORD,
elementCount,
oldStep * Constants.BITS_PER_WORD,
oldDataSize * Constants.BITS_PER_WORD, oldPointerCount);
}
//# The structs in this list are smaller than expected, probably written using an older
//# version of the protocol. We need to make a copy and expand them.
short newDataSize = (short)Math.max(oldDataSize, elementSize.data);
short newPointerCount = (short)Math.max(oldPointerCount, elementSize.pointers);
int newStep = newDataSize + newPointerCount * Constants.WORDS_PER_POINTER;
int totalSize = newStep * elementCount;
//# Don't let allocate() zero out the object just yet.
zeroPointerAndFars(origSegment, origRefOffset);
AllocateResult allocation = allocate(origRefOffset, origSegment,
totalSize + Constants.POINTER_SIZE_IN_WORDS,
WirePointer.LIST);
ListPointer.setInlineComposite(allocation.segment.buffer, allocation.refOffset, totalSize);
long tag = allocation.segment.get(allocation.ptr);
WirePointer.setKindAndInlineCompositeListElementCount(
allocation.segment.buffer, allocation.ptr,
WirePointer.STRUCT, elementCount);
StructPointer.set(allocation.segment.buffer, allocation.ptr,
newDataSize, newPointerCount);
int newPtr = allocation.ptr + Constants.POINTER_SIZE_IN_WORDS;
int src = oldPtr;
int dst = newPtr;
for (int ii = 0; ii < elementCount; ++ii) {
//# Copy data section.
memcpy(allocation.segment.buffer, dst * Constants.BYTES_PER_WORD,
resolved.segment.buffer, src * Constants.BYTES_PER_WORD,
oldDataSize * Constants.BYTES_PER_WORD);
//# Copy pointer section.
int newPointerSection = dst + newDataSize;
int oldPointerSection = src + oldDataSize;
for (int jj = 0; jj < oldPointerCount; ++jj) {
transferPointer(allocation.segment, newPointerSection + jj,
resolved.segment, oldPointerSection + jj);
}
dst += newStep;
src += oldStep;
}
//# Zero out old location. See explanation in getWritableStructPointer().
memset(resolved.segment.buffer, resolved.ptr * Constants.BYTES_PER_WORD,
(byte)0, oldStep * elementCount * Constants.BYTES_PER_WORD);
return factory.constructBuilder(allocation.segment, newPtr * Constants.BYTES_PER_WORD,
elementCount,
newStep * Constants.BITS_PER_WORD,
newDataSize * Constants.BITS_PER_WORD,
newPointerCount);
} else {
//# We're upgrading from a non-struct list.
int oldDataSize = ElementSize.dataBitsPerElement(oldSize);
int oldPointerCount = ElementSize.pointersPerElement(oldSize);
int oldStep = oldDataSize + oldPointerCount * Constants.BITS_PER_POINTER;
int elementCount = ListPointer.elementCount(origRef);
if (oldSize == ElementSize.VOID) {
//# Nothing to copy, just allocate a new list.
return initStructListPointer(factory, origRefOffset, origSegment,
elementCount, elementSize);
} else {
//# Upgrading to an inline composite list.
if (oldSize == ElementSize.BIT) {
throw new Error("Found bit list where struct list was expected; " +
"upgrading boolean lists to struct is no longer supported.");
}
short newDataSize = elementSize.data;
short newPointerCount = elementSize.pointers;
if (oldSize == ElementSize.POINTER) {
newPointerCount = (short)Math.max(newPointerCount, 1);
} else {
//# Old list contains data elements, so we need at least 1 word of data.
newDataSize = (short)Math.max(newDataSize, 1);
}
int newStep = (newDataSize + newPointerCount * Constants.WORDS_PER_POINTER);
int totalWords = elementCount * newStep;
//# Don't let allocate() zero out the object just yet.
zeroPointerAndFars(origSegment, origRefOffset);
AllocateResult allocation = allocate(origRefOffset, origSegment,
totalWords + Constants.POINTER_SIZE_IN_WORDS,
WirePointer.LIST);
ListPointer.setInlineComposite(allocation.segment.buffer, allocation.refOffset, totalWords);
long tag = allocation.segment.get(allocation.ptr);
WirePointer.setKindAndInlineCompositeListElementCount(
allocation.segment.buffer, allocation.ptr,
WirePointer.STRUCT, elementCount);
StructPointer.set(allocation.segment.buffer, allocation.ptr,
newDataSize, newPointerCount);
int newPtr = allocation.ptr + Constants.POINTER_SIZE_IN_WORDS;
if (oldSize == ElementSize.POINTER) {
int dst = newPtr + newDataSize;
int src = resolved.ptr;
for (int ii = 0; ii < elementCount; ++ii) {
transferPointer(origSegment, dst, resolved.segment, src);
dst += newStep / Constants.WORDS_PER_POINTER;
src += 1;
}
} else {
int dst = newPtr;
int srcByteOffset = resolved.ptr * Constants.BYTES_PER_WORD;
int oldByteStep = oldDataSize / Constants.BITS_PER_BYTE;
for (int ii = 0; ii < elementCount; ++ii) {
memcpy(allocation.segment.buffer, dst * Constants.BYTES_PER_WORD,
resolved.segment.buffer, srcByteOffset, oldByteStep);
srcByteOffset += oldByteStep;
dst += newStep;
}
}
//# Zero out old location. See explanation in getWritableStructPointer().
memset(resolved.segment.buffer, resolved.ptr * Constants.BYTES_PER_WORD,
(byte)0, roundBitsUpToBytes(oldStep * elementCount));
return factory.constructBuilder(allocation.segment, newPtr * Constants.BYTES_PER_WORD,
elementCount,
newStep * Constants.BITS_PER_WORD,
newDataSize * Constants.BITS_PER_WORD,
newPointerCount);
}
}
}
// size is in bytes
static Text.Builder initTextPointer(int refOffset,
SegmentBuilder segment,
int size) {
//# The byte list must include a NUL terminator.
int byteSize = size + 1;
//# Allocate the space.
AllocateResult allocation = allocate(refOffset, segment, roundBytesUpToWords(byteSize),
WirePointer.LIST);
//# Initialize the pointer.
ListPointer.set(allocation.segment.buffer, allocation.refOffset, ElementSize.BYTE, byteSize);
return new Text.Builder(allocation.segment.buffer, allocation.ptr * Constants.BYTES_PER_WORD, size);
}
static Text.Builder setTextPointer(int refOffset,
SegmentBuilder segment,
Text.Reader value) {
Text.Builder builder = initTextPointer(refOffset, segment, value.size);
ByteBuffer slice = value.buffer.duplicate();
slice.position(value.offset);
slice.limit(value.offset + value.size);
builder.buffer.position(builder.offset);
builder.buffer.put(slice);
return builder;
}
static Text.Builder getWritableTextPointer(int refOffset,
SegmentBuilder segment,
ByteBuffer defaultBuffer,
int defaultOffset,
int defaultSize) {
long ref = segment.get(refOffset);
if (WirePointer.isNull(ref)) {
if (defaultBuffer == null) {
return new Text.Builder();
} else {
Text.Builder builder = initTextPointer(refOffset, segment, defaultSize);
// TODO is there a way to do this with bulk methods?
for (int i = 0; i < builder.size; ++i) {
builder.buffer.put(builder.offset + i, defaultBuffer.get(defaultOffset * 8 + i));
}
return builder;
}
}
int refTarget = WirePointer.target(refOffset, ref);
FollowBuilderFarsResult resolved = followBuilderFars(ref, refTarget, segment);
if (WirePointer.kind(resolved.ref) != WirePointer.LIST) {
throw new DecodeException("Called getText{Field,Element} but existing pointer is not a list.");
}
if (ListPointer.elementSize(resolved.ref) != ElementSize.BYTE) {
throw new DecodeException(
"Called getText{Field,Element} but existing list pointer is not byte-sized.");
}
int size = ListPointer.elementCount(resolved.ref);
if (size == 0 ||
resolved.segment.buffer.get(resolved.ptr * Constants.BYTES_PER_WORD + size - 1) != 0) {
throw new DecodeException("Text blob missing NUL terminator.");
}
return new Text.Builder(resolved.segment.buffer, resolved.ptr * Constants.BYTES_PER_WORD,
size - 1);
}
// size is in bytes
static Data.Builder initDataPointer(int refOffset,
SegmentBuilder segment,
int size) {
//# Allocate the space.
AllocateResult allocation = allocate(refOffset, segment, roundBytesUpToWords(size),
WirePointer.LIST);
//# Initialize the pointer.
ListPointer.set(allocation.segment.buffer, allocation.refOffset, ElementSize.BYTE, size);
return new Data.Builder(allocation.segment.buffer, allocation.ptr * Constants.BYTES_PER_WORD, size);
}
static Data.Builder setDataPointer(int refOffset,
SegmentBuilder segment,
Data.Reader value) {
Data.Builder builder = initDataPointer(refOffset, segment, value.size);
// TODO is there a way to do this with bulk methods?
for (int i = 0; i < builder.size; ++i) {
builder.buffer.put(builder.offset + i, value.buffer.get(value.offset + i));
}
return builder;
}
static Data.Builder getWritableDataPointer(int refOffset,
SegmentBuilder segment,
ByteBuffer defaultBuffer,
int defaultOffset,
int defaultSize) {
long ref = segment.get(refOffset);
if (WirePointer.isNull(ref)) {
if (defaultBuffer == null) {
return new Data.Builder();
} else {
Data.Builder builder = initDataPointer(refOffset, segment, defaultSize);
// TODO is there a way to do this with bulk methods?
for (int i = 0; i < builder.size; ++i) {
builder.buffer.put(builder.offset + i, defaultBuffer.get(defaultOffset * 8 + i));
}
return builder;
}
}
int refTarget = WirePointer.target(refOffset, ref);
FollowBuilderFarsResult resolved = followBuilderFars(ref, refTarget, segment);
if (WirePointer.kind(resolved.ref) != WirePointer.LIST) {
throw new DecodeException("Called getData{Field,Element} but existing pointer is not a list.");
}
if (ListPointer.elementSize(resolved.ref) != ElementSize.BYTE) {
throw new DecodeException(
"Called getData{Field,Element} but existing list pointer is not byte-sized.");
}
return new Data.Builder(resolved.segment.buffer, resolved.ptr * Constants.BYTES_PER_WORD,
ListPointer.elementCount(resolved.ref));
}
static <T> T readStructPointer(StructReader.Factory<T> factory,
SegmentReader segment,
int refOffset,
SegmentReader defaultSegment,
int defaultOffset,
int nestingLimit) {
long ref = segment.get(refOffset);
if (WirePointer.isNull(ref)) {
if (defaultSegment == null) {
return factory.constructReader(SegmentReader.EMPTY, 0, 0, 0, (short) 0, 0x7fffffff);
} else {
segment = defaultSegment;
refOffset = defaultOffset;
ref = segment.get(refOffset);
}
}
if (nestingLimit <= 0) {
throw new DecodeException("Message is too deeply nested or contains cycles.");
}
int refTarget = WirePointer.target(refOffset, ref);
FollowFarsResult resolved = followFars(ref, refTarget, segment);
int dataSizeWords = StructPointer.dataSize(resolved.ref);
if (WirePointer.kind(resolved.ref) != WirePointer.STRUCT) {
throw new DecodeException("Message contains non-struct pointer where struct pointer was expected.");
}
resolved.segment.arena.checkReadLimit(StructPointer.wordSize(resolved.ref));
return factory.constructReader(resolved.segment,
resolved.ptr * Constants.BYTES_PER_WORD,
(resolved.ptr + dataSizeWords),
dataSizeWords * Constants.BITS_PER_WORD,
StructPointer.ptrCount(resolved.ref),
nestingLimit - 1);
}
static SegmentBuilder setStructPointer(SegmentBuilder segment, int refOffset, StructReader value) {
short dataSize = (short)roundBitsUpToWords(value.dataSize);
int totalSize = dataSize + value.pointerCount * Constants.POINTER_SIZE_IN_WORDS;
AllocateResult allocation = allocate(refOffset, segment, totalSize, WirePointer.STRUCT);
StructPointer.set(allocation.segment.buffer, allocation.refOffset,
dataSize, value.pointerCount);
if (value.dataSize == 1) {
throw new Error("single bit case not handled");
} else {
memcpy(allocation.segment.buffer, allocation.ptr * Constants.BYTES_PER_WORD,
value.segment.buffer, value.data, value.dataSize / Constants.BITS_PER_BYTE);
}
int pointerSection = allocation.ptr + dataSize;
for (int i = 0; i < value.pointerCount; ++i) {
copyPointer(allocation.segment, pointerSection + i, value.segment, value.pointers + i,
value.nestingLimit);
}
return allocation.segment;
};
static SegmentBuilder setListPointer(SegmentBuilder segment, int refOffset, ListReader value) {
int totalSize = roundBitsUpToWords(value.elementCount * value.step);
if (value.step <= Constants.BITS_PER_WORD) {
//# List of non-structs.
AllocateResult allocation = allocate(refOffset, segment, totalSize, WirePointer.LIST);
if (value.structPointerCount == 1) {
//# List of pointers.
ListPointer.set(allocation.segment.buffer, allocation.refOffset, ElementSize.POINTER, value.elementCount);
for (int i = 0; i < value.elementCount; ++i) {
copyPointer(allocation.segment, allocation.ptr + i,
value.segment, value.ptr / Constants.BYTES_PER_WORD + i, value.nestingLimit);
}
} else {
//# List of data.
byte elementSize = ElementSize.VOID;
switch (value.step) {
case 0: elementSize = ElementSize.VOID; break;
case 1: elementSize = ElementSize.BIT; break;
case 8: elementSize = ElementSize.BYTE; break;
case 16: elementSize = ElementSize.TWO_BYTES; break;
case 32: elementSize = ElementSize.FOUR_BYTES; break;
case 64: elementSize = ElementSize.EIGHT_BYTES; break;
default:
throw new Error("invalid list step size: " + value.step);
}
ListPointer.set(allocation.segment.buffer, allocation.refOffset, elementSize, value.elementCount);
memcpy(allocation.segment.buffer, allocation.ptr * Constants.BYTES_PER_WORD,
value.segment.buffer, value.ptr, totalSize * Constants.BYTES_PER_WORD);
}
return allocation.segment;
} else {
//# List of structs.
AllocateResult allocation = allocate(refOffset, segment, totalSize + Constants.POINTER_SIZE_IN_WORDS, WirePointer.LIST);
ListPointer.setInlineComposite(allocation.segment.buffer, allocation.refOffset, totalSize);
short dataSize = (short)roundBitsUpToWords(value.structDataSize);
short pointerCount = value.structPointerCount;
WirePointer.setKindAndInlineCompositeListElementCount(allocation.segment.buffer, allocation.ptr,
WirePointer.STRUCT, value.elementCount);
StructPointer.set(allocation.segment.buffer, allocation.ptr,
dataSize, pointerCount);
int dstOffset = allocation.ptr + Constants.POINTER_SIZE_IN_WORDS;
int srcOffset = value.ptr / Constants.BYTES_PER_WORD;
for (int i = 0; i < value.elementCount; ++i) {
memcpy(allocation.segment.buffer, dstOffset * Constants.BYTES_PER_WORD,
value.segment.buffer, srcOffset * Constants.BYTES_PER_WORD,
value.structDataSize / Constants.BITS_PER_BYTE);
dstOffset += dataSize;
srcOffset += dataSize;
for (int j = 0; j < pointerCount; ++j) {
copyPointer(allocation.segment, dstOffset, value.segment, srcOffset, value.nestingLimit);
dstOffset += Constants.POINTER_SIZE_IN_WORDS;
srcOffset += Constants.POINTER_SIZE_IN_WORDS;
}
}
return allocation.segment;
}
}
static void memset(ByteBuffer dstBuffer, int dstByteOffset, byte value, int length) {
// TODO we can probably do this faster
for (int ii = dstByteOffset; ii < dstByteOffset + length; ++ii) {
dstBuffer.put(ii, value);
}
}
static void memcpy(ByteBuffer dstBuffer, int dstByteOffset, ByteBuffer srcBuffer, int srcByteOffset, int length) {
ByteBuffer dstDup = dstBuffer.duplicate();
dstDup.position(dstByteOffset);
dstDup.limit(dstByteOffset + length);
ByteBuffer srcDup = srcBuffer.duplicate();
srcDup.position(srcByteOffset);
srcDup.limit(srcByteOffset + length);
dstDup.put(srcDup);
}
static SegmentBuilder copyPointer(SegmentBuilder dstSegment, int dstOffset,
SegmentReader srcSegment, int srcOffset, int nestingLimit) {
// Deep-copy the object pointed to by src into dst. It turns out we can't reuse
// readStructPointer(), etc. because they do type checking whereas here we want to accept any
// valid pointer.
long srcRef = srcSegment.get(srcOffset);
if (WirePointer.isNull(srcRef)) {
dstSegment.buffer.putLong(dstOffset * 8, 0L);
return dstSegment;
}
int srcTarget = WirePointer.target(srcOffset, srcRef);
FollowFarsResult resolved = followFars(srcRef, srcTarget, srcSegment);
switch (WirePointer.kind(resolved.ref)) {
case WirePointer.STRUCT :
if (nestingLimit <= 0) {
throw new DecodeException("Message is too deeply nested or contains cycles. See org.capnproto.ReaderOptions.");
}
resolved.segment.arena.checkReadLimit(StructPointer.wordSize(resolved.ref));
return setStructPointer(dstSegment, dstOffset,
new StructReader(resolved.segment,
resolved.ptr * Constants.BYTES_PER_WORD,
resolved.ptr + StructPointer.dataSize(resolved.ref),
StructPointer.dataSize(resolved.ref) * Constants.BITS_PER_WORD,
StructPointer.ptrCount(resolved.ref),
nestingLimit - 1));
case WirePointer.LIST :
byte elementSize = ListPointer.elementSize(resolved.ref);
if (nestingLimit <= 0) {
throw new DecodeException("Message is too deeply nested or contains cycles. See org.capnproto.ReaderOptions.");
}
if (elementSize == ElementSize.INLINE_COMPOSITE) {
int wordCount = ListPointer.inlineCompositeWordCount(resolved.ref);
long tag = resolved.segment.get(resolved.ptr);
int ptr = resolved.ptr + 1;
resolved.segment.arena.checkReadLimit(wordCount + 1);
if (WirePointer.kind(tag) != WirePointer.STRUCT) {
throw new DecodeException("INLINE_COMPOSITE lists of non-STRUCT type are not supported.");
}
int elementCount = WirePointer.inlineCompositeListElementCount(tag);
int wordsPerElement = StructPointer.wordSize(tag);
if ((long)wordsPerElement * elementCount > wordCount) {
throw new DecodeException("INLINE_COMPOSITE list's elements overrun its word count.");
}
if (wordsPerElement == 0) {
// Watch out for lists of zero-sized structs, which can claim to be arbitrarily
// large without having sent actual data.
resolved.segment.arena.checkReadLimit(elementCount);
}
return setListPointer(dstSegment, dstOffset,
new ListReader(resolved.segment,
ptr * Constants.BYTES_PER_WORD,
elementCount,
wordsPerElement * Constants.BITS_PER_WORD,
StructPointer.dataSize(tag) * Constants.BITS_PER_WORD,
StructPointer.ptrCount(tag),
nestingLimit - 1));
} else {
int dataSize = ElementSize.dataBitsPerElement(elementSize);
short pointerCount = ElementSize.pointersPerElement(elementSize);
int step = dataSize + pointerCount * Constants.BITS_PER_POINTER;
int elementCount = ListPointer.elementCount(resolved.ref);
int wordCount = roundBitsUpToWords((long) elementCount * step);
resolved.segment.arena.checkReadLimit(wordCount);
if (elementSize == ElementSize.VOID) {
// Watch out for lists of void, which can claim to be arbitrarily large without
// having sent actual data.
resolved.segment.arena.checkReadLimit(elementCount);
}
return setListPointer(dstSegment, dstOffset,
new ListReader(resolved.segment,
resolved.ptr * Constants.BYTES_PER_WORD,
elementCount,
step,
dataSize,
pointerCount,
nestingLimit - 1));
}
case WirePointer.FAR :
throw new Error("Far pointer should have been handled above.");
case WirePointer.OTHER :
throw new Error("copyPointer is unimplemented");
}
throw new Error("unreachable");
}
static <T> T readListPointer(ListReader.Factory<T> factory,
SegmentReader segment,
int refOffset,
SegmentReader defaultSegment,
int defaultOffset,
byte expectedElementSize,
int nestingLimit) {
long ref = segment.get(refOffset);
if (WirePointer.isNull(ref)) {
if (defaultSegment == null) {
return factory.constructReader(SegmentReader.EMPTY, 0, 0, 0, 0, (short) 0, 0x7fffffff);
} else {
segment = defaultSegment;
refOffset = defaultOffset;
ref = segment.get(refOffset);
}
}
if (nestingLimit <= 0) {
throw new Error("nesting limit exceeded");
}
int refTarget = WirePointer.target(refOffset, ref);
FollowFarsResult resolved = followFars(ref, refTarget, segment);
byte elementSize = ListPointer.elementSize(resolved.ref);
switch (elementSize) {
case ElementSize.INLINE_COMPOSITE : {
int wordCount = ListPointer.inlineCompositeWordCount(resolved.ref);
long tag = resolved.segment.get(resolved.ptr);
int ptr = resolved.ptr + 1;
resolved.segment.arena.checkReadLimit(wordCount + 1);
int size = WirePointer.inlineCompositeListElementCount(tag);
int wordsPerElement = StructPointer.wordSize(tag);
if ((long)size * wordsPerElement > wordCount) {
throw new DecodeException("INLINE_COMPOSITE list's elements overrun its word count.");
}
if (wordsPerElement == 0) {
// Watch out for lists of zero-sized structs, which can claim to be arbitrarily
// large without having sent actual data.
resolved.segment.arena.checkReadLimit(size);
}
// TODO check whether the size is compatible
return factory.constructReader(resolved.segment,
ptr * Constants.BYTES_PER_WORD,
size,
wordsPerElement * Constants.BITS_PER_WORD,
StructPointer.dataSize(tag) * Constants.BITS_PER_WORD,
StructPointer.ptrCount(tag),
nestingLimit - 1);
}
default : {
//# This is a primitive or pointer list, but all such
//# lists can also be interpreted as struct lists. We
//# need to compute the data size and pointer count for
//# such structs.
int dataSize = ElementSize.dataBitsPerElement(ListPointer.elementSize(resolved.ref));
int pointerCount = ElementSize.pointersPerElement(ListPointer.elementSize(resolved.ref));
int elementCount = ListPointer.elementCount(resolved.ref);
int step = dataSize + pointerCount * Constants.BITS_PER_POINTER;
resolved.segment.arena.checkReadLimit(
roundBitsUpToWords(elementCount * step));
if (elementSize == ElementSize.VOID) {
// Watch out for lists of void, which can claim to be arbitrarily large without
// having sent actual data.
resolved.segment.arena.checkReadLimit(elementCount);
}
//# Verify that the elements are at least as large as
//# the expected type. Note that if we expected
//# InlineComposite, the expected sizes here will be
//# zero, because bounds checking will be performed at
//# field access time. So this check here is for the
//# case where we expected a list of some primitive or
//# pointer type.
int expectedDataBitsPerElement = ElementSize.dataBitsPerElement(expectedElementSize);
int expectedPointersPerElement = ElementSize.pointersPerElement(expectedElementSize);
if (expectedDataBitsPerElement > dataSize) {
throw new DecodeException("Message contains list with incompatible element type.");
}
if (expectedPointersPerElement > pointerCount) {
throw new DecodeException("Message contains list with incompatible element type.");
}
return factory.constructReader(resolved.segment,
resolved.ptr * Constants.BYTES_PER_WORD,
ListPointer.elementCount(resolved.ref),
step,
dataSize,
(short)pointerCount,
nestingLimit - 1);
}
}
}
static Text.Reader readTextPointer(SegmentReader segment,
int refOffset,
ByteBuffer defaultBuffer,
int defaultOffset,
int defaultSize) {
long ref = segment.get(refOffset);
if (WirePointer.isNull(ref)) {
if (defaultBuffer == null) {
return new Text.Reader();
} else {
return new Text.Reader(defaultBuffer, defaultOffset, defaultSize);
}
}
int refTarget = WirePointer.target(refOffset, ref);
FollowFarsResult resolved = followFars(ref, refTarget, segment);
int size = ListPointer.elementCount(resolved.ref);
if (WirePointer.kind(resolved.ref) != WirePointer.LIST) {
throw new DecodeException("Message contains non-list pointer where text was expected.");
}
if (ListPointer.elementSize(resolved.ref) != ElementSize.BYTE) {
throw new DecodeException("Message contains list pointer of non-bytes where text was expected.");
}
resolved.segment.arena.checkReadLimit(roundBytesUpToWords(size));
if (size == 0 || resolved.segment.buffer.get(8 * resolved.ptr + size - 1) != 0) {
throw new DecodeException("Message contains text that is not NUL-terminated.");
}
return new Text.Reader(resolved.segment.buffer, resolved.ptr, size - 1);
}
static Data.Reader readDataPointer(SegmentReader segment,
int refOffset,
ByteBuffer defaultBuffer,
int defaultOffset,
int defaultSize) {
long ref = segment.get(refOffset);
if (WirePointer.isNull(ref)) {
if (defaultBuffer == null) {
return new Data.Reader();
} else {
return new Data.Reader(defaultBuffer, defaultOffset, defaultSize);
}
}
int refTarget = WirePointer.target(refOffset, ref);
FollowFarsResult resolved = followFars(ref, refTarget, segment);
int size = ListPointer.elementCount(resolved.ref);
if (WirePointer.kind(resolved.ref) != WirePointer.LIST) {
throw new DecodeException("Message contains non-list pointer where data was expected.");
}
if (ListPointer.elementSize(resolved.ref) != ElementSize.BYTE) {
throw new DecodeException("Message contains list pointer of non-bytes where data was expected.");
}
resolved.segment.arena.checkReadLimit(roundBytesUpToWords(size));
return new Data.Reader(resolved.segment.buffer, resolved.ptr, size);
}
} |
package org.sergiiz.rxkata;
class Country {
final String name;
final String currency;
final long population;
Country(String name, String currency, long population) {
this.name = name;
this.currency = currency;
this.population = population;
}
public String getName() {
return name;
}
public String getCurrency() {
return currency;
}
public long getPopulation() {
return population;
}
@Override
public String toString() {
return "Country{" +
"name='" + name + '\'' +
", currency='" + currency + '\'' +
", population=" + population +
'}';
}
} |
package test;
import static org.junit.Assert.*;
import org.junit.Test;
import elements.HTMLElement;
public class HTMLElementTest {
HTMLElement element = new HTMLElement();
HTMLElement element2 = new HTMLElement();
String test = "<Name id=\"test\" class=\"test\">\n</Name>";
@Test
public void test() {
element.setName("Name");
element.setAtrribute("id", "test");
element.setAtrribute("class", "test");
assertEquals(test, element.toString());
element.appendText("test Text");
assertEquals("test Text",element.getInnerText());
element2.setName("name2");
element2.setAtrribute("id", "test2");
element2.setAtrribute("class", "test2");
element.appendInnerHTMLElement(element2);
System.out.println(element.toString());
}
} |
package org.jetel.graph;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Future;
import junit.framework.Test;
import junit.framework.TestSuite;
import org.apache.commons.io.filefilter.AbstractFileFilter;
import org.apache.commons.io.filefilter.IOFileFilter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jetel.graph.runtime.EngineInitializer;
import org.jetel.graph.runtime.GraphRuntimeContext;
import org.jetel.main.runGraph;
import org.jetel.test.CloverTestCase;
import org.jetel.util.file.FileUtils;
import org.jetel.util.string.StringUtils;
public class ResetTest extends CloverTestCase {
private final static String SCENARIOS_RELATIVE_PATH = "../cloveretl.test.scenarios/";
private final static String[] EXAMPLE_PATH = {
"../cloveretl.examples/SimpleExamples/",
"../cloveretl.examples/AdvancedExamples/",
"../cloveretl.examples/CTL1FunctionsTutorial/",
"../cloveretl.examples/CTL2FunctionsTutorial/",
"../cloveretl.examples/DataProfiling/",
"../cloveretl.examples/DataSampling/",
"../cloveretl.examples/ExtExamples/",
"../cloveretl.examples.community/RealWorldExamples/",
"../cloveretl.examples.community/WebSiteExamples/",
"../cloveretl.examples/BasicExamples/",
"../cloveretl.test.scenarios/",
"../cloveretl.examples.commercial/CommercialExamples/",
"../cloveretl.examples.commercial/DataQualityExamples/",
"../cloveretl.examples/CompanyTransactionsTutorial/"
};
private final static String[] NEEDS_SCENARIOS_CONNECTION = {
"graphRevenues.grf",
"graphDBExecuteMsSql.grf",
"graphDBExecuteMySql.grf",
"graphDBExecuteOracle.grf",
"graphDBExecutePostgre.grf",
"graphDBExecuteSybase.grf",
"graphInfobrightDataWriterRemote.grf",
"graphLdapReaderWriter.grf"
};
private final static String[] NEEDS_SCENARIOS_LIB = {
"graphDBExecuteOracle.grf",
"graphDBExecuteSybase.grf",
"graphLdapReaderWriter.grf"
};
private final static Map<String, List<String>> CLASSPATHS = new HashMap<String, List<String>>();
static {
CLASSPATHS.put("rpc-literal-service-test.grf", Collections.singletonList("lib/rpc-literal-test.jar"));
}
private final static String GRAPHS_DIR = "graph";
private final static String TRANS_DIR = "trans";
private final static String[] OUT_DIRS = {"data-out/", "data-tmp/", "seq/"};
private final String basePath;
private final File graphFile;
private final boolean batchMode;
private boolean cleanUp = true;
private static Log logger = LogFactory.getLog(ResetTest.class);
public static Test suite() {
final TestSuite suite = new TestSuite();
for (int i = 0; i < EXAMPLE_PATH.length; i++) {
logger.info("Testing graphs in " + EXAMPLE_PATH[i]);
final File graphsDir = new File(EXAMPLE_PATH[i], GRAPHS_DIR);
if(!graphsDir.exists()){
throw new IllegalStateException("Graphs directory " + graphsDir.getAbsolutePath() +" not found");
}
IOFileFilter fileFilter = new AbstractFileFilter() {
@Override
public boolean accept(File file) {
return file.getName().endsWith(".grf")
&& !file.getName().startsWith("TPCH")// ok, performance tests - last very long
&& !file.getName().contains("Performance")// ok, performance tests - last very long
&& !file.getName().equals("graphJoinData.grf") // ok, uses class file that is not created
&& !file.getName().equals("graphJoinHash.grf") // ok, uses class file that is not created
&& !file.getName().equals("graphOrdersReformat.grf") // ok, uses class file that is not created
&& !file.getName().equals("graphDataGeneratorExt.grf") // ok, uses class file that is not created
&& !file.getName().equals("graphApproximativeJoin.grf") // ok, uses class file that is not created
&& !file.getName().equals("graphDBJoin.grf") // ok, uses class file that is not created
&& !file.getName().equals("conversionNum2num.grf") // ok, should fail
&& !file.getName().equals("outPortWriting.grf") // ok, should fail
&& !file.getName().equals("graphDb2Load.grf") // ok, can only work with db2 client
&& !file.getName().equals("graphMsSqlDataWriter.grf") // ok, can only work with MsSql client
&& !file.getName().equals("graphMysqlDataWriter.grf") // ok, can only work with MySql client
&& !file.getName().equals("graphOracleDataWriter.grf") // ok, can only work with Oracle client
&& !file.getName().equals("graphPostgreSqlDataWriter.grf") // ok, can only work with postgre client
&& !file.getName().equals("graphInformixDataWriter.grf") // ok, can only work with informix server
&& !file.getName().equals("graphInfobrightDataWriter.grf") // ok, can only work with infobright server
&& !file.getName().equals("graphSystemExecuteWin.grf") // ok, graph for Windows
&& !file.getName().equals("graphLdapReader_Uninett.grf") // ok, invalid server
&& !file.getName().equals("graphSequenceChecker.grf") // ok, is to fail
&& !file.getName().equals("FixedData.grf") // ok, is to fail
&& !file.getName().equals("xpathReaderStates.grf") // ok, is to fail
&& !file.getName().equals("graphDataPolicy.grf") // ok, is to fail
&& !file.getName().equals("conversionDecimal2integer.grf") // ok, is to fail
&& !file.getName().equals("conversionDecimal2long.grf") // ok, is to fail
&& !file.getName().equals("conversionDouble2integer.grf") // ok, is to fail
&& !file.getName().equals("conversionDouble2long.grf") // ok, is to fail
&& !file.getName().equals("conversionLong2integer.grf") // ok, is to fail
&& !file.getName().equals("nativeSortTestGraph.grf") // ok, invalid paths
&& !file.getName().equals("mountainsInformix.grf") // see issue 2550
&& !file.getName().equals("SystemExecuteWin_EchoFromFile.grf") // graph for windows
&& !file.getName().equals("XLSEncryptedFail.grf") // ok, is to fail
&& !file.getName().equals("XLSXEncryptedFail.grf") // ok, is to fail
&& !file.getName().equals("XLSInvalidFile.grf") // ok, is to fail
&& !file.getName().equals("XLSReaderOrderMappingFail.grf") // ok, is to fail
&& !file.getName().equals("XLSXReaderOrderMappingFail.grf") // ok, is to fail
&& !file.getName().equals("XLSWildcardStrict.grf") // ok, is to fail
&& !file.getName().equals("XLSXWildcardStrict.grf") // ok, is to fail
&& !file.getName().equals("XLSWildcardControlled1.grf") // ok, is to fail
&& !file.getName().equals("XLSXWildcardControlled1.grf") // ok, is to fail
&& !file.getName().equals("XLSWildcardControlled7.grf") // ok, is to fail
&& !file.getName().equals("XLSXWildcardControlled7.grf") // ok, is to fail
&& !file.getName().equals("SSWRITER_MultilineInsertIntoTemplate.grf") // uses graph parameter definition from after-commit.ts
&& !file.getName().equals("SSWRITER_FormatInMetadata.grf") // uses graph parameter definition from after-commit.ts
&& !file.getName().equals("WSC_NamespaceBindingsDefined.grf") // ok, is to fail
&& !file.getName().equals("FailingGraph.grf") // ok, is to fail
&& !file.getName().equals("RunGraph_FailWhenUnderlyingGraphFails.grf") // probably should fail, recheck after added to after-commit.ts
&& !file.getName().equals("DataIntersection_order_check_A.grf") // ok, is to fail
&& !file.getName().equals("DataIntersection_order_check_B.grf") // ok, is to fail
&& !file.getName().equals("UDR_Logging_SFTP_CL1469.grf") // ok, is to fail
&& !file.getName().startsWith("AddressDoctor") //wrong path to db file, try to fix when AD installed on jenkins machines
&& !file.getName().equals("EmailReader_Local.grf") // remove after CL-2167 solved
&& !file.getName().equals("EmailReader_Server.grf") // remove after CLD-3437 solved (or mail.javlin.eu has valid certificate)
&& !file.getName().contains("firebird") // remove after CL-2170 solved
&& !file.getName().startsWith("ListOfRecords_Functions_02_") // remove after CL-2173 solved
&& !file.getName().equals("UDR_FileURL_OneZipMultipleFilesUnspecified.grf") // remove after CL-2174 solved
&& !file.getName().equals("UDR_FileURL_OneZipOneFileUnspecified.grf") // remove after CL-2174 solved
&& !file.getName().startsWith("MapOfRecords_Functions_01_Compiled_") // remove after CL-2175 solved
&& !file.getName().startsWith("MapOfRecords_Functions_01_Interpreted_") // remove after CL-2176 solved
&& !file.getName().equals("manyRecords.grf") // remove after CL-1292 implemented
&& !file.getName().equals("packedDecimal.grf") // remove after CL-1811 solved
&& !file.getName().equals("SimpleZipWrite.grf") // used by ArchiveFlushTest.java, doesn't make sense to run it separately
&& !file.getName().equals("XMLExtract_TKLK_003_Back.grf") // needs output from XMLWriter_LKTW_003.grf
&& !file.getName().equals("XMLWriter-CL-2404-CNO_OTF_ITSS.grf") // runs too long
&& !file.getName().equals("SQLDataParser_precision_CL2187.grf") // ok, is to fail
&& !file.getName().equals("incrementalReadingDB_explicitMapping.grf") // remove after CL-2239 solved
&& !file.getName().equals("HTTPConnector_get_bodyparams.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_get_error_unknownhost.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_get_error_unknownprotocol.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_get_inputfield.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_get_inputfileURL.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_get_requestcontent.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_post_error_unknownhost.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_post_error_unknownprotocol.grf") // ok, is to fail
&& !file.getName().equals("HTTPConnector_inputmapping_null_values.grf") // ok, is to fail
&& !file.getName().equals("HttpConnector_errHandlingNoRedir.grf") // ok, is to fail
&& !file.getName().equals("XMLExtract_fileURL_not_xml.grf") // ok, is to fail
&& !file.getName().equals("XMLExtract_charset_invalid.grf") // ok, is to fail
&& !file.getName().equals("XMLExtract_mappingURL_missing.grf") // ok, is to fail
&& !file.getName().equals("XMLExtract_fileURL_not_exists.grf") // ok, is to fail
&& !file.getName().equals("XMLExtract_charset_not_default_fail.grf") // ok, is to fail
&& !file.getName().equals("RunGraph_differentOutputMetadataFail.grf") // ok, is to fail
&& !file.getName().equals("LUTPersistent_wrong_metadata.grf") // ok, is to fail
&& !file.getName().equals("UDW_nonExistingDir_fail_CL-2478.grf") // ok, is to fail
&& !file.getName().equals("CTL_lookup_put_fail.grf") // ok, is to fail
&& !file.getName().equals("SystemExecute_printBatchFile.grf") // ok, is to fail
&& !file.getName().equals("JoinMergeIssue_FailWhenMasterUnsorted.grf") // ok, is to fail
&& !file.getName().equals("UDW_remoteZipPartitioning_fail_CL-2564.grf") // ok, is to fail
&& !file.getName().equals("checkConfigTest.grf") // ok, is to fail
&& !file.getName().equals("DebuggingGraph.grf") // ok, is to fail
&& !file.getName().equals("graphDebuggingGraph.grf") // ok, is to fail
&& !file.getName().equals("CLO-404-recordCountsInErrorMessage.grf") // ok, is to fail
&& !file.getName().equals("TreeReader_CLO-4699.grf") // ok, is to fail
&& !file.getName().matches("Locale_.*_default.grf") // server only
&& !file.getName().equals("CompanyChecks.grf") // an example that needs embedded derby
&& !file.getName().equals("DatabaseAccess.grf") // an example that needs embedded derby
&& !file.getName().equals("graphDatabaseAccess.grf") // an example that needs embedded derby
&& !file.getName().equals("Twitter.grf") // an example that needs credentials
&& !file.getName().equals("XMLReader_no_output_port.grf") // ok, is to fail
&& !file.getName().startsWith("Proxy_") // allowed to run only on virt-cyan as proxy tests
&& !file.getName().equals("SandboxOperationHandlerTest.grf") // runs only on server
&& !file.getName().equals("DenormalizerWithoutInputFile.grf") // probably subgraph not supposed to be executed separately
&& !file.getName().equals("SimpleSequence_longValue.grf") // needs the sequence to be reset on start
&& !file.getName().equals("BeanWriterReader_employees.grf") // remove after CL-2474 solved
&& !file.getName().equals("GraphParameters_secure.grf") // server test
&& !file.getName().equals("GraphParameters_secureOverriden.grf") // server test
&& !file.getName().equals("GraphParameters_secureOverriden_subgraph.grf") // subgraph of server test
&& !file.getName().equals("SSR_CloseOnError.grf") // subgraph of server test
&& !file.getName().equals("TypedProperties_CLO-1997.grf") // server test
&& !file.getName().equals("EmptySubGraph.grf") // server test
&& !file.getName().equals("ParallelReader_HDFS.grf") // cluster test
&& !file.getName().equals("SubgraphsReuse.grf") // contains subgraphs
&& !file.getName().startsWith("Issues") // contains subgraphs
&& !file.getName().equals("SubgraphsSimplifyingGraph.grf") // contains subgraphs
&& !file.getName().equals("GEOCoding.grf") // contains subgraphs
&& !file.getName().equals("RandomDataGenerator.grf") // contains subgraphs
&& !file.getName().equals("graphHTTPConnector.grf") // external service is unstable
&& !file.getName().equals("CLO-2214_pre_post_execute_race_condition.grf") // ok, is to fail
&& !file.getName().equals("EmptyGraph.grf") // ok, is to fail
&& !file.getName().equals("informix.grf") // remove after CLO-2793 solved
&& !file.getName().equals("EmailReader_BadDataFormatException.grf") // ok, is to fail
&& !file.getName().equals("PhaseOrderCheck.grf") // ok, is to fail
&& !file.getName().equals("JoinApproximative_invalid_join_key_CLO-4748.grf") // ok, is to fail
&& !file.getName().equals("ExtSort_missing_sort_key_CLO-4741.grf") // ok, is to fail
&& !file.getName().equals("Transformations_invalid_language.grf") // ok, is to fail
&& !file.getName().equals("graphCloverData.grf") // remove after CLO-4360 fixed
&& !file.getName().equals("MetadataWriting.grf"); // server test
}
};
IOFileFilter dirFilter = new AbstractFileFilter() {
@Override
public boolean accept(File file) {
return file.isDirectory() && file.getName().equals("hadoop");
}
};
Collection<File> filesCollection = org.apache.commons.io.FileUtils.listFiles(graphsDir, fileFilter, dirFilter);
File[] graphFiles = filesCollection.toArray(new File[0]);
Arrays.sort(graphFiles);
for(int j = 0; j < graphFiles.length; j++){
suite.addTest(new ResetTest(EXAMPLE_PATH[i], graphFiles[j], false, false));
suite.addTest(new ResetTest(EXAMPLE_PATH[i], graphFiles[j], true, j == graphFiles.length - 1 ? true : false));
}
}
return suite;
}
@Override
protected void setUp() throws Exception {
super.setUp();
initEngine();
}
protected static String getTestName(String basePath, File graphFile, boolean batchMode) {
final StringBuilder ret = new StringBuilder();
final String n = graphFile.getName();
int lastDot = n.lastIndexOf('.');
if (lastDot == -1) {
ret.append(n);
} else {
ret.append(n.substring(0, lastDot));
}
if (batchMode) {
ret.append("-batch");
} else {
ret.append("-nobatch");
}
return ret.toString();
}
protected ResetTest(String basePath, File graphFile, boolean batchMode, boolean cleanup) {
super(getTestName(basePath, graphFile, batchMode));
this.basePath = basePath;
this.graphFile = graphFile;
this.batchMode = batchMode;
this.cleanUp = cleanup;
}
@Override
protected void runTest() throws Throwable {
final String baseAbsolutePath = new File(basePath).getAbsolutePath().replace('\\', '/');
logger.info("Project dir: " + baseAbsolutePath);
logger.info("Analyzing graph " + graphFile.getPath());
logger.info("Batch mode: " + batchMode);
final GraphRuntimeContext runtimeContext = new GraphRuntimeContext();
runtimeContext.setUseJMX(false);
runtimeContext.setContextURL(FileUtils.getFileURL(FileUtils.appendSlash(baseAbsolutePath))); // context URL should be absolute
// absolute path in PROJECT parameter is required for graphs using Derby database
runtimeContext.addAdditionalProperty("PROJECT", baseAbsolutePath);
if (StringUtils.findString(graphFile.getName(), NEEDS_SCENARIOS_CONNECTION) != -1) {
final String connDir = new File(SCENARIOS_RELATIVE_PATH + "conn").getAbsolutePath();
runtimeContext.addAdditionalProperty("CONN_DIR", connDir);
logger.info("CONN_DIR set to " + connDir);
}
if (StringUtils.findString(graphFile.getName(), NEEDS_SCENARIOS_LIB) != -1) {// set LIB_DIR to jdbc drivers directory
final String libDir = new File(SCENARIOS_RELATIVE_PATH + "lib").getAbsolutePath();
runtimeContext.addAdditionalProperty("LIB_DIR", libDir);
logger.info("LIB_DIR set to " + libDir);
}
// for scenarios graphs, add the TRANS dir to the classpath
if (basePath.contains("cloveretl.test.scenarios")) {
List<URL> classpath = new ArrayList<URL>();
classpath.add(FileUtils.getFileURL(FileUtils.appendSlash(baseAbsolutePath) + TRANS_DIR + "/"));
if (CLASSPATHS.containsKey(graphFile.getName())) {
for (String path : CLASSPATHS.get(graphFile.getName())) {
classpath.add(FileUtils.getFileURL(runtimeContext.getContextURL(), path));
}
}
runtimeContext.setRuntimeClassPath(classpath.toArray(new URL[classpath.size()]));
runtimeContext.setCompileClassPath(runtimeContext.getRuntimeClassPath());
}
runtimeContext.setBatchMode(batchMode);
final TransformationGraph graph = TransformationGraphXMLReaderWriter.loadGraph(new FileInputStream(graphFile), runtimeContext);
try {
graph.setDebugMode(false);
EngineInitializer.initGraph(graph);
for (int i = 0; i < 3; i++) {
final Future<Result> futureResult = runGraph.executeGraph(graph, runtimeContext);
Result result = Result.N_A;
result = futureResult.get();
switch (result) {
case FINISHED_OK:
// everything O.K.
logger.info("Execution of graph successful !");
break;
case ABORTED:
// execution was ABORTED !!
logger.info("Execution of graph failed !");
fail("Execution of graph failed !");
break;
default:
logger.info("Execution of graph failed !");
fail("Execution of graph failed !");
}
}
} catch (Throwable e) {
throw new IllegalStateException("Error executing grap " + graphFile, e);
} finally {
if (cleanUp) {
cleanupData();
}
logger.info("Transformation graph is freeing.\n");
graph.free();
}
}
private void cleanupData() {
for (String outDir : OUT_DIRS) {
File outDirFile = new File(basePath, outDir);
File[] file = outDirFile.listFiles(new FileFilter() {
@Override
public boolean accept(File f) {
return f.isFile();
}
});
for (int i = 0; i < file.length; i++) {
final boolean drt = file[i].delete();
if (drt) {
logger.info("Cleanup: deleted file " + file[i].getAbsolutePath());
} else {
logger.info("Cleanup: error delete file " + file[i].getAbsolutePath());
}
}
}
}
} |
/**
* Test socket timeouts for accept and read for simple stream sockets
*
* @author Godmar Back <gback@cs.utah.edu>
*/
import java.net.*;
import java.io.*;
public class SoTimeout {
public static void main(String av[]) throws Exception {
final String foo = "foo";
int tryport = 45054;
ServerSocket server;
for(;;++tryport) {
try {
server = new ServerSocket(tryport);
break;
} catch (IOException _) {}
}
final int port = tryport;
Thread watchdog = new Thread() {
public void run() {
try {
Thread.sleep(10000);
} catch (InterruptedException _) { }
System.out.println("Failure: Time out.");
System.exit(-1);
}
};
watchdog.start();
Thread t = new Thread() {
public void run() {
try {
Socket s = new Socket(InetAddress.getLocalHost(), port);
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
System.out.println("Failure " + e);
}
PrintWriter p = new PrintWriter(s.getOutputStream());
p.println(foo);
p.close();
} catch (Exception e) {
System.out.println("Failure " + e);
}
}
};
server.setSoTimeout(1000);
Socket rsocket = null;
try {
rsocket = server.accept();
} catch (InterruptedIOException e) {
// System.out.println(e);
System.out.println("Success 1.");
}
t.start();
rsocket = server.accept();
System.out.println("Success 2.");
rsocket.setSoTimeout(2000); // NB: 2 * 2000 > 3000
InputStream is = rsocket.getInputStream();
LineNumberReader r = new LineNumberReader(new InputStreamReader(is));
byte []b = null;
try {
r.readLine();
} catch (InterruptedIOException e) {
// System.out.println(e);
System.out.println("Success 3.");
}
String s = r.readLine();
if (s.equals(foo)) {
System.out.println("Success 4.");
}
System.exit(0);
}
}
/* Expected Output:
Success 1.
Success 2.
Success 3.
Success 4.
*/ |
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import org.json.JSONArray;
import org.json.JSONObject;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/*
* This Java source file was auto generated by running 'gradle init --type java-library'
* by 'yelinaung' at '7/10/15 12:18 AM' with Gradle 2.4
*
* @author yelinaung, @date 7/10/15 12:18 AM
*/
public class RabbitTest {
private String ZG = "zg";
private String UNI = "uni";
private JSONObject readJsonFromFile() {
try {
return new JSONObject(new String(Files.readAllBytes(
Paths.get(System.getProperty("user.dir") + "/src/test/java/" + "samples.json"))));
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
@Test public void testUni2Zawgyi() {
JSONObject jsonObj = readJsonFromFile();
if (jsonObj != null) {
JSONArray zgArray = jsonObj.getJSONArray(ZG);
for (int i = 0; i < zgArray.length(); i++) {
assertEquals(zgArray.get(i), Rabbit.uni2zg((String) jsonObj.getJSONArray(UNI).get(i)));
}
}
}
@Test public void testZawgyi2Uni() {
JSONObject jsonObj = readJsonFromFile();
if (jsonObj != null) {
JSONArray uniArray = jsonObj.getJSONArray(UNI);
for (int i = 0; i < uniArray.length(); i++) {
assertEquals(uniArray.get(i), Rabbit.zg2uni((String) jsonObj.getJSONArray(ZG).get(i)));
}
}
}
} |
package VjConfig;
import java.util.*;
import java.io.StreamTokenizer;
import java.util.NoSuchElementException;
import java.io.IOException;
import VjConfig.PropertyDesc;
import VjConfig.ChunkFactory;
public class Property {
protected String token;
protected int num;
protected PropertyDesc desc;
protected ValType valtype;
protected List vals;
protected ChunkDesc embeddesc; // used only for t_embeddedchunk
/** Attempts to apply a new PropertyDesc while preserving values.
* THIS IS DANGEROUS and should only be called by
* ConfigChunk.applyNewDesc().
*/
public void applyNewDesc (PropertyDesc _desc) {
desc = _desc;
token = desc.getToken();
num = desc.getNumValues();
if (!valtype.equals(desc.getValType())) {
vals.clear();
valtype = desc.getValType();
}
if (valtype.equals (ValType.t_embeddedchunk)) {
ChunkDesc newembeddesc = ChunkFactory.getChunkDescByToken(desc.getEnumAtIndex(0).str);
if (newembeddesc == null) {
System.err.println ("Big Messup in Property Constructor!!!");
embeddesc = new ChunkDesc ();
}
if (newembeddesc != embeddesc) {
vals.clear();
embeddesc = newembeddesc;
}
}
else {
embeddesc = null;
}
if (num != -1) {
while (vals.size() > num)
vals.remove(vals.size()-1);
while (vals.size() < num)
vals.add (createVarValue(vals.size()));
}
}
public Property (Property p) {
token = p.token;
num = p.num;
desc = p.desc;
embeddesc = p.embeddesc;
valtype = new ValType (p.valtype);
vals = new ArrayList();
int i, n = p.vals.size();
for (i = 0; i < n; i++)
vals.add (new VarValue ((VarValue)p.vals.get(i)));
}
public Property (PropertyDesc d) {
desc = d;
token = desc.getToken();
valtype = desc.getValType();
num = desc.getNumValues();
vals = new ArrayList();
if (valtype.equals (ValType.t_embeddedchunk)) {
embeddesc = ChunkFactory.getChunkDescByToken(d.getEnumAtIndex(0).str);
if (embeddesc == null) {
System.err.println ("Big Messup in Property Constructor!!!");
embeddesc = new ChunkDesc ();
}
}
else {
embeddesc = null;
}
for (int i = 0; i < num; i++)
vals.add (createVarValue(i));
}
private VarValue createVarValue (int i) {
// if i == -1, we're just tacking onto the end
if (i == -1)
i = vals.size();
if (valtype.equals (ValType.t_embeddedchunk)) {
ConfigChunk ch = ChunkFactory.createChunk (embeddesc);
if (i < desc.getValueLabelsSize())
ch.setName (desc.getValueLabel(i));
else
ch.setName (desc.name + " " + i);
return new VarValue (ch);
}
else
return new VarValue (valtype);
}
public final String getName () {
return desc.getName();
}
public final String getToken () {
return token;
}
public final PropertyDesc getDesc () {
return desc;
}
public void clearValues () {
if (num == -1)
vals.clear();
}
public void setValue (String s, int ind) {
if (ind < 0)
return;
if (num == -1) {
while (ind >= vals.size())
vals.add (createVarValue(-1));
}
else if (ind >= num)
return;
((VarValue)vals.get(ind)).set(s);
}
public void setValue (boolean s, int ind) {
if (ind < 0)
return;
if (num == -1) {
while (ind >= vals.size())
vals.add (createVarValue(-1));
}
else if (ind >= num)
return;
((VarValue)vals.get(ind)).set(s);
}
public void setValue (VarValue s, int v) {
if (!valtype.equals (s.getValType())) {
System.out.println ("Property.setValue() - type mismatch");
return;
}
if (v < 0)
return;
if (num == -1) {
while (v >= vals.size())
vals.add (createVarValue(-1));
}
else if (v >= num)
return;
((VarValue)vals.get(v)).set(s);
}
public VarValue getValue (int i) {
if (i < 0 || i >= vals.size())
return null;
return (VarValue)vals.get(i);
}
public final int getNum () {
return vals.size();
}
public final boolean hasFixedNumberOfValues () {
return num != -1;
}
public final ChunkDesc getEmbeddedDesc () {
return embeddesc;
}
public boolean equals (Property p) {
/* note - this does a total value comparison, not just checking names
* The exception to this is that right now I only check the name of the
* chunkdesc
*/
VarValue v1,v2;
if (p == null)
return false;
if (num != p.num)
return false;
if (!desc.getToken().equals(p.desc.getToken()))
return false;
if (!valtype.equals(p.valtype))
return false;
if (vals.size() != p.vals.size())
return false;
int i, n = vals.size();
for (i = 0; i < n; i++) {
v1 = (VarValue)vals.get(i);
v2 = (VarValue)p.vals.get(i);
if (!v1.equals(v2))
return false;
}
return true;
}
public String toString(String pad) {
VarValue v;
String s = pad + desc.token + " { ";
for (int i = 0; i < vals.size(); i++) {
v = (VarValue)vals.get(i);
if (valtype.equals (ValType.t_embeddedchunk)) {
s += "\n" + v.getEmbeddedChunk().toString(pad + " ");
}
else {
s += "\"" + desc.getEnumString(v) + "\"";
}
s+= " ";
}
s += "}";
return s;
}
public String xmlRep (String pad) {
VarValue v;
StringBuffer s = new StringBuffer (128);
s.append(pad);
s.append('<');
s.append(XMLConfigIOHandler.escapeString(desc.token));
s.append('>');
int i, n = vals.size();
for (i = 0; i < n; i++) {
v = (VarValue)vals.get(i);
if (valtype.equals (ValType.t_embeddedchunk)) {
s.append('\n');
s.append(v.getEmbeddedChunk().xmlRep (pad + " "));
}
else {
s.append('"');
//s.append(XMLConfigIOHandler.escapeString(desc.getEnumString(v)));
s.append(XMLConfigIOHandler.escapeString(v.toString()));
s.append("\" ");
}
}
s.append("</");
s.append(XMLConfigIOHandler.escapeString(desc.token));
s.append(">\n");
return s.toString();
}
public boolean read (ConfigStreamTokenizer st) throws IOException {
/* the idea here is that st holds the string, less the property name
* which was already read by ConfigChunk.read()
*/
VarValue v;
int n = 0;
try {
int t = st.nextToken();
String token = st.sval;
if (st.ttype == '{') {
/* multiple entries */
vals.clear();
while ((v = buildVarValue (st)) != null)
vals.add(v);
if (st.ttype != '}') {
System.out.println ("ERROR: expected '}'");
}
}
else {
/* one entry */
v = buildVarValue(st);
vals.clear();
vals.add (v);
}
if (num != -1) {
/* are we a proper # of values? */
while (vals.size() < num)
vals.add (new VarValue(valtype));
}
return true;
}
catch (IOException e) {
/* Either a streamtokenizer error or unexpected eof */
System.err.println ("Error in Property.read()");
return false;
}
}
private VarValue buildVarValue (ConfigStreamTokenizer st) {
/* here we build a token into an appopriate VarValue; includes
* type resolution and enumeration lookup
*/
try {
st.nextToken();
}
catch (IOException e) {
return null;
}
if (st.ttype == '}')
return null;
if (valtype.equals (ValType.t_embeddedchunk)) {
if (embeddesc != null) {
ConfigChunk c = ChunkFactory.createChunk (embeddesc);
c.read(st);
return new VarValue (c);
}
return null;
}
else try {
return (desc.getEnumValue(st.sval));
}
catch (NoSuchElementException e) {
VarValue v = createVarValue(-1);
v.set(st.sval);
return v;
}
}
} |
package biz.bsoft;
import biz.bsoft.orders.dao.ItemRepository;
import biz.bsoft.orders.model.Item;
import biz.bsoft.users.dao.UserRepository;
import biz.bsoft.users.model.User;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.web.client.TestRestTemplate;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.http.*;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.test.annotation.Commit;
import org.springframework.test.context.junit4.AbstractTransactionalJUnit4SpringContextTests;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.context.transaction.TestTransaction;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment= SpringBootTest.WebEnvironment.RANDOM_PORT)
@Transactional
public class OrdersApplicationTests extends AbstractTransactionalJUnit4SpringContextTests {
@PersistenceContext
private EntityManager entityManager;
@Autowired
private TestRestTemplate restTemplate;
@Autowired
private UserRepository userRepository;
@Autowired
private ItemRepository itemRepository;
@Autowired
private PasswordEncoder passwordEncoder;
private static final Logger logger =
LoggerFactory.getLogger(OrdersApplicationTests.class);
private String email="test@gmail.com";
private String userName="testUser";
private String password=email;
@Before
public void givenUserAndVerificationToken() {
User user = new User();
user.setUsername(userName);
user.setEmail(email);
user.setPassword(passwordEncoder.encode(password));
user.setEnabled(true);
entityManager.persist(user);
entityManager.flush();
entityManager.clear();
// TestTransaction.flagForCommit();
// TestTransaction.end();
}
@After
public void flushAfter() {
entityManager.flush();
entityManager.clear();
}
//@Test
public void testRestItemsGetCount() {
//get test user from properties file or create one in test
TestRestTemplate restTemplateWithAuth = restTemplate.withBasicAuth(userName,password);
HttpHeaders requestHeaders = new HttpHeaders();
HttpEntity<?> requestEntity = new HttpEntity<>(requestHeaders);
String url;
url= "/users/user";
ResponseEntity<String> userResponse =
restTemplateWithAuth.exchange(url, HttpMethod.GET, requestEntity, String.class);
String user = userResponse.getBody();
logger.info(user);
assertThat("Status code is ok",userResponse.getStatusCode(),equalTo(HttpStatus.OK));
url= "/orders/items";
ResponseEntity<List<Item>> itemResponse =
restTemplateWithAuth.exchange(url, HttpMethod.GET, requestEntity, new ParameterizedTypeReference<List<Item>>() {});
List<Item> items = itemResponse.getBody();
//logger.info(items.toString());
assertEquals(items.size(),itemRepository.count());
}
@Test
public void testUserRepositoryCanFindUser(){
User user=userRepository.findByEmail(email);
assertNotNull(user);
//logger.info(user.toString());
assertThat("Can decode password",passwordEncoder.matches(password,user.getPassword()));
assertEquals(userName, user.getUsername());
assertEquals(email, user.getEmail());
}
} |
package com.example;
import static org.hamcrest.core.StringEndsWith.endsWith;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.header;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.test.SpringApplicationConfiguration;
import org.springframework.boot.test.WebIntegrationTest;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
@RunWith(SpringJUnit4ClassRunner.class)
@SpringApplicationConfiguration(classes = DemoApplication.class)
@WebIntegrationTest(randomPort=true)
public class DemoApplicationTests {
@Value("${local.server.port}")
private int port;
@Autowired
private WebApplicationContext context;
private MockMvc mvc;
@Before
public void setup() {
mvc = MockMvcBuilders
.webAppContextSetup(context)
.build();
}
@Test
public void restTest() throws Exception {
mvc.perform(put("/mice/0f629be4-e6cc-11e5-bb3b-3bf5fa6b2828").content(
new ObjectMapper().writeValueAsString(ImmutableMap.of("name", "My Mouse"))
))
.andExpect(status().isCreated())
.andExpect(header().string("Location", endsWith("/mice/0f629be4-e6cc-11e5-bb3b-3bf5fa6b2828")))
.andDo(print())
.andDo((a) -> {
// follow the location header.
mvc.perform(get(a.getResponse().getHeader("Location")))
.andDo(print())
.andExpect(status().isOk());
});
}
} |
package com.lipisha.sdk;
import com.lipisha.sdk.response.AirtimeDisbursement;
import com.lipisha.sdk.response.Payout;
import com.lipisha.sdk.response.SMSReport;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Test money/sms/airtime disbursement
*/
public class DisbursementTest extends TestCase {
private LipishaClient lipishaClient;
public DisbursementTest(String name) {
super(name);
this.lipishaClient = lipishaClient;
}
/**
* @return the suite of tests being tested
*/
public static Test suite() {
return new TestSuite(DisbursementTest.class);
}
@Override
protected void setUp() throws Exception {
super.setUp();
lipishaClient = new LipishaClient(TestConfig.API_KEY, TestConfig.API_SIGNATURE, LipishaClient.SANDBOX_BASE_URL);
}
public void testSendMoney() {
Payout payout = lipishaClient.sendMoney(TestConfig.TEST_MOBILE_NUMBER, 100, TestConfig.PAYOUT_ACCOUNT_NUMBER);
assertEquals(true, payout.isSuccessful());
assertNotNull(payout.getAmount());
assertNotNull(payout.getCustomerName());
assertNotNull(payout.getMobileNumber());
assertNotNull(payout.getReference());
assertEquals(payout.getStatusResponse().getStatus(), "SUCCESS");
}
public void testSendAirtime() {
AirtimeDisbursement airtimeDisbursement = lipishaClient.sendAirtime(TestConfig.TEST_MOBILE_NUMBER, 100,
TestConfig.AIRTIME_ACCOUNT_NUMBER, "SAF");
assertEquals(true, airtimeDisbursement.isSuccessful());
assertNotNull(airtimeDisbursement.getMobileNumber());
assertNotNull(airtimeDisbursement.getReference());
assertNotNull(airtimeDisbursement.getAmount());
assertEquals(airtimeDisbursement.getStatusResponse().getStatus(), "SUCCESS");
}
public void testSendAirtimeInvalidAmount() {
AirtimeDisbursement airtimeDisbursement = lipishaClient.sendAirtime(TestConfig.TEST_MOBILE_NUMBER, 0,
TestConfig.AIRTIME_ACCOUNT_NUMBER, "SAF");
assertEquals(false, airtimeDisbursement.isSuccessful());
assertEquals(airtimeDisbursement.getStatusResponse().getStatus(), "FAIL");
}
public void testSendSMS(){
SMSReport smsReport = lipishaClient.sendSMS(TestConfig.TEST_MOBILE_NUMBER, TestConfig.AIRTIME_ACCOUNT_NUMBER,
"TEST MESSAGE");
assertEquals(true, smsReport.isSuccessful());
assertNotNull(smsReport.getMessage());
assertNotNull(smsReport.getRecipient());
assertNotNull(smsReport.getCost());
}
} |
package com.sixtyfour.test;
import java.io.File;
import java.io.FileInputStream;
import java.io.FilenameFilter;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.sixtyfour.Assembler;
import com.sixtyfour.Basic;
import com.sixtyfour.Loader;
import com.sixtyfour.cbmnative.NativeCompiler;
import com.sixtyfour.cbmnative.PseudoCpu;
import com.sixtyfour.config.CompilerConfig;
import com.sixtyfour.config.LoopMode;
import com.sixtyfour.config.MemoryConfig;
import com.sixtyfour.parser.assembly.AssemblyParser;
import com.sixtyfour.system.Conversions;
import com.sixtyfour.system.Cpu;
import com.sixtyfour.system.CpuTracer;
import com.sixtyfour.system.FileWriter;
import com.sixtyfour.system.Machine;
import com.sixtyfour.system.Program;
import com.sixtyfour.system.ProgramPart;
/**
* @author EgonOlsen
*
*/
public class GamesCompiler {
public static void main(String[] args) throws Exception {
File src = new File("src/test/resources/games");
File dst = new File("compiled");
dst.mkdir();
File[] games = src.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.endsWith(".bas");
}
});
for (File game : games) {
System.out.println("Compiling " + game);
String[] vary = Loader.loadProgram(new FileInputStream(game));
String gameName = game.getName();
Assembler assy = initTestEnvironment(gameName, vary, false, 30000, -1);
String target = dst.getPath() + "/+" + gameName.replace(".bas", ".prg");
System.out.println("Code ends at: " + assy.getProgram().getParts().get(0).getEndAddress());
System.out.println("Binary ends at: " + assy.getProgram().getParts().get(assy.getProgram().getParts().size() - 1).getEndAddress());
FileWriter.writeAsPrg(assy.getProgram(), target, assy.getProgram().getCodeStart() < 2100);
// assy.getCpu().setCpuTracer(new MySimpleTracer(assy));
// executeTest(assy);
}
}
private static Machine executeTest(CompilerConfig conf, final Assembler assy) {
Program prg = assy.getProgram();
for (ProgramPart pp : prg.getParts()) {
System.out.println("Size: " + pp.size());
}
System.out.println("Running compiled program...");
Machine machine = assy.getMachine();
machine.addRoms();
// printZeropage(assy);
System.out.println(assy.toString());
try {
assy.run(conf);
} catch (Exception e) {
e.printStackTrace();
printMemory(assy, machine);
}
System.out.println("program end: " + prg.getParts().get(prg.getParts().size() - 1).getEndAddress());
System.out.println("...done!");
// printMemory(assy, machine);
return machine;
}
private static void printMemory(Assembler assy, Machine machine) {
Program prg = assy.getProgram();
String code = assy.toString();
String[] lines = code.split("\n");
System.out.println("Lines: " + lines.length);
Map<String, String> addr2line = new HashMap<String, String>();
for (String line : lines) {
if (line.startsWith(".")) {
int pos = line.indexOf("\t");
addr2line.put(line.substring(0, pos), line);
}
}
StringBuilder sb = new StringBuilder();
for (ProgramPart pp : prg.getParts()) {
for (int i = pp.getAddress(); i < pp.getEndAddress(); i++) {
String addr = Integer.toString(i, 16);
addr = "." + addr;
if (addr2line.containsKey(addr)) {
if (sb.length() > 0) {
sb.append("\n");
}
sb.append("> ").append(addr2line.get(addr)).append("\n");
sb.append(" " + addr).append("\t");
}
int val = AssemblyParser.getLowByte(machine.getRam()[i]);
String num = Integer.toString(val, 16);
if (num.length() == 1) {
num = "0" + num;
}
sb.append(num).append(" ");
}
}
System.out.println(sb.toString());
System.out.println("
lines = sb.toString().split("\n");
for (int i = 0; i < lines.length; i += 2) {
String l1 = lines[i];
String l2 = lines[i + 1];
if (l1.length() > 0 && l2.length() > 0 && l1.substring(1).equals(l2.substring(1))) {
continue;
}
System.out.println(l1);
System.out.println(l2);
}
}
private static Assembler initTestEnvironment(String name, String[] vary, boolean executePseudo, int variableStart, int stringMemoryEnd) {
CompilerConfig conf = new CompilerConfig();
boolean optis = true;
conf.setConstantFolding(optis);
conf.setConstantPropagation(optis);
conf.setDeadStoreElimination(optis);
conf.setDeadStoreEliminationOfStrings(optis);
conf.setIntermediateLanguageOptimizations(optis);
conf.setNativeLanguageOptimizations(optis);
conf.setOptimizedLinker(optis);
conf.setLoopMode(LoopMode.REMOVE);
conf.setCompactThreshold(4);
final Basic basic = new Basic(vary);
basic.compile(conf);
List<String> mCode = NativeCompiler.getCompiler().compileToPseudeCode(conf, basic);
System.out.println("
for (String line : mCode) {
System.out.println(line);
}
System.out.println("
if (executePseudo) {
System.out.println("Running pseudo code...");
PseudoCpu pc = new PseudoCpu();
pc.execute(conf, basic.getMachine(), mCode);
}
System.out.println("
int start = -1;
if (name.indexOf("_") != -1) {
name = name.substring(name.lastIndexOf("_") + 1, name.lastIndexOf("."));
try {
start = Integer.valueOf(name);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
System.out.println("Program '" + name + "' starts at " + start);
List<String> nCode = NativeCompiler.getCompiler().compile(conf, basic, new MemoryConfig(start, -1, variableStart, stringMemoryEnd));
for (String line : nCode) {
System.out.println(line);
}
final Assembler assy = new Assembler(nCode);
assy.compile(conf);
return assy;
}
@SuppressWarnings("unused")
private static class MySimpleTracer implements CpuTracer {
private final Assembler assy;
private MySimpleTracer(Assembler assy) {
this.assy = assy;
}
private int cnt;
@Override
public void commandExecuted(Cpu cpu, int opcode, int opcodePc, int newPc) {
String line = assy.getCodeLine(opcodePc);
if (line != null) {
cnt++;
System.out.println(Integer.toHexString(opcodePc) + " - " + Integer.toHexString(opcode) + " -> " + Integer.toHexString(newPc) + " / a=" + cpu.getAcc() + " / x="
+ cpu.getX() + " / y=" + cpu.getY() + "/ z=" + (cpu.getStatus() & 0b10) + " / TMP_ZP=" + printReg(105, assy) + " / TMP2_ZP=" + printReg(107, assy)
+ " / TMP3_ZP=" + printReg(34, assy) + "/" + line + " " + assy.getRam()[opcodePc + 1] + "/" + cnt + " - FAC1:"
+ Conversions.convertFloat(assy.getMachine(), 0x61) + " @ " + cpu.getClockTicks());
}
}
private String printReg(int i, Assembler assy) {
int addr = (assy.getRam()[i] + 256 * assy.getRam()[i + 1]);
return addr + " [" + (assy.getRam()[addr] + 256 * assy.getRam()[addr + 1]) + "] ";
}
private String print16Bit(int i, Assembler assy) {
return (assy.getRam()[i] + 256 * assy.getRam()[i + 1]) + "";
}
@Override
public void exception(Cpu cpu, int opcode, int opcodePc, int newPc) {
System.out.println("Exception in " + cnt);
}
}
} |
package de.dfki.lt.mdparser;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import de.bwaldvogel.liblinear.Linear;
import de.dfki.lt.mdparser.caller.MDPrunner;
import de.dfki.lt.mdparser.caller.MDPtrainer;
import de.dfki.lt.mdparser.config.ConfigKeys;
import de.dfki.lt.mdparser.config.GlobalConfig;
import de.dfki.lt.mdparser.eval.Eval;
public class TestMDParser {
@Before
public void setUp()
throws IOException {
Utils.deleteFolder(GlobalConfig.getModelBuildFolder());
List<Path> filesToDelete = Utils.getAllFilesFromFolder(Paths.get("src/test/resources"), "*.zip");
for (Path oneFileToDelete : filesToDelete) {
Files.delete(oneFileToDelete);
}
Linear.resetRandom();
}
@Test
public void testTrainEvalFilesCovington() throws IOException {
String algorithmId = "covington";
String modelName = "de-2009-" + algorithmId + ".zip";
double expectedParentAccuracy = 0.841186515716906;
double expectedLabelAccuracy = 0.8006767440389602;
GlobalConfig.getInstance().setProperty(ConfigKeys.ALGORITHM, algorithmId);
// parallel training is not deterministic, so restrict number of threads to 1
GlobalConfig.getInstance().setProperty(ConfigKeys.TRAINING_THREADS, 1);
testTrainFiles(modelName, algorithmId);
testEval(modelName, expectedParentAccuracy, expectedLabelAccuracy);
}
@Test
public void testTrainEvalFilesStack() throws IOException {
String algorithmId = "stack";
String modelName = "de-2009-" + algorithmId + ".zip";
double expectedParentAccuracy = 0.8100056922395801;
double expectedLabelAccuracy = 0.7698437796470812;
GlobalConfig.getInstance().setProperty(ConfigKeys.ALGORITHM, algorithmId);
// parallel training is not deterministic, so restrict number of threads to 1
GlobalConfig.getInstance().setProperty(ConfigKeys.TRAINING_THREADS, 1);
testTrainFiles(modelName, algorithmId);
testEval(modelName, expectedParentAccuracy, expectedLabelAccuracy);
}
private void testTrainFiles(String modelName, String algorithmId)
throws IOException {
MDPtrainer.train("src/test/resources/corpora/de-train-2009.conll",
GlobalConfig.getPath(ConfigKeys.MODEL_OUTPUT_FOLDER).resolve(modelName).toString());
assertThat(GlobalConfig.getPath(ConfigKeys.MODEL_OUTPUT_FOLDER).resolve(modelName)).exists();
compareFolders(
GlobalConfig.SPLIT_ALPHA_FOLDER,
Paths.get("src/test/resources/expected/file-" + algorithmId + "/split_alphas"));
compareFolders(
GlobalConfig.FEATURE_VECTORS_FOLDER,
Paths.get("src/test/resources/expected/file-" + algorithmId + "/1_initial_feature_vectors"));
compareFolders(
GlobalConfig.SPLIT_INITIAL_FOLDER,
Paths.get("src/test/resources/expected/file-" + algorithmId + "/2_initial_splits"));
compareFolders(
GlobalConfig.SPLIT_ADJUST_FOLDER,
Paths.get("src/test/resources/expected/file-" + algorithmId + "/3_adjusted_splits"));
compareFolders(
GlobalConfig.SPLIT_COMPACT_FOLDER,
Paths.get("src/test/resources/expected/file-" + algorithmId + "/4_compacted_splits"));
compareFolders(
GlobalConfig.SPLIT_MODELS_FOLDER,
Paths.get("src/test/resources/expected/file-" + algorithmId + "/split_models"));
assertThat(GlobalConfig.ALPHA_FILE).usingCharset(StandardCharsets.UTF_8)
.hasSameContentAs(Paths.get("src/test/resources/expected/file-" + algorithmId + "/alpha.txt"),
StandardCharsets.UTF_8);
assertThat(GlobalConfig.SPLIT_FILE).usingCharset(StandardCharsets.UTF_8)
.hasSameContentAs(Paths.get("src/test/resources/expected/file-" + algorithmId + "/split.txt"),
StandardCharsets.UTF_8);
}
@Test
public void testTrainEvalMemoryCovington()
throws IOException {
String algorithmId = "covington";
String modelName = "de-2009-" + algorithmId + ".zip";
double expectedParentAccuracy = 0.8452343305293782;
double expectedLabelAccuracy = 0.8051672885965467;
GlobalConfig.getInstance().setProperty(ConfigKeys.ALGORITHM, algorithmId);
testTrainMemory(modelName, algorithmId);
testEval(modelName, expectedParentAccuracy, expectedLabelAccuracy);
}
@Test
public void testTrainEvalMemoryStack()
throws IOException {
String algorithmId = "stack";
String modelName = "de-2009-" + algorithmId + ".zip";
double expectedParentAccuracy = 0.8104800455379166;
double expectedLabelAccuracy = 0.7700967680728606;
GlobalConfig.getInstance().setProperty(ConfigKeys.ALGORITHM, algorithmId);
testTrainMemory(modelName, algorithmId);
testEval(modelName, expectedParentAccuracy, expectedLabelAccuracy);
}
private void testTrainMemory(String modelName, String algorithmId)
throws IOException {
MDPtrainer.trainMem("src/test/resources/corpora/de-train-2009.conll",
GlobalConfig.getPath(ConfigKeys.MODEL_OUTPUT_FOLDER).resolve(modelName).toString());
assertThat(GlobalConfig.getPath(ConfigKeys.MODEL_OUTPUT_FOLDER).resolve(modelName)).exists();
compareFolders(
GlobalConfig.SPLIT_ALPHA_FOLDER,
Paths.get("src/test/resources/expected/memory-" + algorithmId + "/split_alphas"));
compareFolders(
GlobalConfig.SPLIT_MODELS_FOLDER,
Paths.get("src/test/resources/expected/memory-" + algorithmId + "/split_models"));
assertThat(GlobalConfig.ALPHA_FILE).usingCharset(StandardCharsets.UTF_8)
.hasSameContentAs(Paths.get("src/test/resources/expected/memory-" + algorithmId + "/alpha.txt"),
StandardCharsets.UTF_8);
assertThat(GlobalConfig.SPLIT_FILE).usingCharset(StandardCharsets.UTF_8)
.hasSameContentAs(Paths.get("src/test/resources/expected/memory-" + algorithmId + "/split.txt"),
StandardCharsets.UTF_8);
}
private void testEval(String modelName, double expectedParentAccuracy, double expectedLabelAccuracy)
throws IOException {
Eval evaluator = MDPrunner.conllFileParsingAndEval(
"src/test/resources/corpora/de-test-2009.conll",
"src/test/resources/corpora/de-result-2009.conll",
GlobalConfig.getPath(ConfigKeys.MODEL_OUTPUT_FOLDER).resolve(modelName).toString());
assertThat(evaluator.getParentsAccuracy()).isEqualTo(expectedParentAccuracy);
assertThat(evaluator.getLabelsAccuracy()).isEqualTo(expectedLabelAccuracy);
}
private static void compareFolders(Path testPath, Path expectedPath)
throws IOException {
List<Path> filesCreatedByTest = Utils.getAllFilesFromFolder(testPath, "*");
List<Path> expectedFiles = Utils.getAllFilesFromFolder(expectedPath, "*");
assertThat(filesCreatedByTest).hasSameSizeAs(expectedFiles);
filesCreatedByTest.sort(null);
expectedFiles.sort(null);
for (int i = 0; i < filesCreatedByTest.size(); i++) {
assertThat(filesCreatedByTest.get(i).getFileName()).isEqualTo(expectedFiles.get(i).getFileName());
assertThat(filesCreatedByTest.get(i)).usingCharset(StandardCharsets.UTF_8)
.hasSameContentAs(expectedFiles.get(i), StandardCharsets.UTF_8);
}
}
} |
package com.bookbase.app.model.entity;
import android.arch.persistence.room.Entity;
import android.arch.persistence.room.Ignore;
import android.arch.persistence.room.PrimaryKey;
import android.os.Parcel;
import android.os.Parcelable;
import com.bookbase.app.utils.Converters;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Locale;
@Entity(tableName = "Book")
public class Book implements Parcelable{
//, foreignKeys = @ForeignKey(entity = Author.class, parentColumns = "authorId", childColumns = "author", onDelete = CASCADE)
@PrimaryKey(autoGenerate = true)
private int bookId;
private String title;
private Author author;
private String description;
private Genre genre;
private String coverImage;
private String isbn;
private int rating;
private Review review;
private boolean isRead;
private Calendar purchaseDate;
private double purchasePrice;
public static final Parcelable.Creator<Book> CREATOR = new Parcelable.Creator<Book>() {
@Override
public Book createFromParcel(Parcel source) {
return new Book(source);
}
@Override
public Book[] newArray(int size) {
return new Book[size];
}
};
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(bookId);
dest.writeString(title);
dest.writeParcelable(author, flags);
dest.writeString(description);
dest.writeParcelable(genre, flags);
dest.writeString(coverImage);
dest.writeString(isbn);
dest.writeInt(rating);
dest.writeParcelable(review, flags);
dest.writeByte((byte) (isRead ? 1 : 0));
dest.writeString(Converters.calendarToString(purchaseDate));
dest.writeDouble(purchasePrice);
}
// Default constructor for Room database.
public Book(){
}
@Ignore
public Book(String title, Author author){
this.title = title;
this.author = author;
this.isRead = false;
this.rating = 0;
this.author = author;
this.description = "";
this.isbn = "";
this.genre = new Genre("");
this.review = new Review();
}
@Ignore
public Book(String title, Author author, String description, Genre genre){
this.title = title;
this.author = author;
this.isRead = false;
this.rating = 0;
this.author = author;
this.description = "";
this.isbn = "";
this.genre = new Genre("");
this.review = new Review();
}
@Ignore
private Book(Parcel in){
bookId = in.readInt();
title = in.readString();
author = in.readParcelable(Author.class.getClassLoader());
description = in.readString();
genre = in.readParcelable(Genre.class.getClassLoader());
coverImage = in.readString();
isbn = in.readString();
rating = in.readInt();
review = in.readParcelable(Review.class.getClassLoader());
isRead = in.readByte() == 1;
purchaseDate = Converters.toCalendar(in.readString());
purchasePrice = in.readDouble();
}
public Book(int bookId, boolean isRead, int rating, Author author, String description,
Genre genre, String isbn, String title, Review review, String coverImage,
Calendar purchaseDate, double purchasePrice, boolean isOwned) {
this.bookId = bookId;
this.title = title;
this.author = author;
this.description = description;
this.genre = genre;
this.coverImage = coverImage;
this.isbn = isbn;
this.rating = rating;
this.review = review;
this.isRead = isRead;
this.purchaseDate = purchaseDate;
this.purchasePrice = purchasePrice;
}
public int getBookId(){ return bookId; }
public boolean getIsRead(){ return isRead; }
public int getRating(){ return rating; }
public Author getAuthor(){ return author; }
public String getDescription(){ return description; }
public Genre getGenre() { return genre; }
public String getIsbn(){ return isbn; }
public String getTitle(){ return title; }
public Review getReview() { return review; }
public Calendar getPurchaseDate() { return purchaseDate; }
public double getPurchasePrice() { return purchasePrice; }
public String getCoverImage() { return coverImage; }
public String getPurchaseDateString(){
DateFormat df = new SimpleDateFormat("dd/MM/yyyy", Locale.ENGLISH);
String date = df.format(Calendar.getInstance().getTime());
if (purchaseDate != null) {
date = df.format(purchaseDate.getTime());
}
return date;
}
public void setBookId(int bookId){ this.bookId = bookId; }
public void setIsRead(boolean isRead){ this.isRead = isRead; }
public void setRating(int rating){ this.rating = rating; }
public void setAuthor(Author author){ this.author = author; }
public void setDescription(String description){ this.description = description; }
public void setGenre(Genre genre) { this.genre = genre; }
public void setIsbn(String isbn){ this.isbn = isbn; }
public void setTitle(String title){ this.title = title; }
public void setReview(Review review){ this.review = review; }
public void setPurchaseDate(Calendar date){ this.purchaseDate = date; }
public void setPurchasePrice(double price){ this.purchasePrice = price; }
public void setCoverImage(String imageDirectory) { this.coverImage = imageDirectory; }
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
Book book = (Book) obj;
return bookId == book.getBookId() && (title != null ? title.equals(book.getTitle()) : book.getTitle() == null);
}
@Override
public int hashCode() {
int result = (bookId ^ (bookId >>> 8));
result = 31 * result + (title != null ? title.hashCode() : 0);
return result;
}
} |
package org.cojen.tupl.core;
import java.util.*;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.TimeUnit;
import org.junit.*;
import static org.junit.Assert.*;
import org.cojen.tupl.*;
import org.cojen.tupl.util.Latch;
import static org.cojen.tupl.TestUtils.*;
/**
*
*
* @author Brian S O'Neill
*/
public class DeadlockTest {
public static void main(String[] args) throws Exception {
org.junit.runner.JUnitCore.main(DeadlockTest.class.getName());
}
volatile LockManager mManager;
private List<Task> mTasks;
@Before
public void setup() {
mManager = new LockManager(null, null, -1);
mTasks = new ArrayList<Task>();
}
@Test
public void test_1() throws Throwable {
// Create a deadlock among three threads and a victim thread.
final long timeout = 5L * 1000 * 1000 * 1000;
final byte[][] keys = {"k0".getBytes(), "k1".getBytes(), "k2".getBytes()};
var tasks = new TestTask[3];
var cb = new CyclicBarrier(tasks.length);
// Culprit threads.
for (int i=0; i<tasks.length; i++) {
final byte[] k1 = keys[i];
final byte[] k2 = keys[(i + 1) % keys.length];
tasks[i] = startTestTaskAndWaitUntilBlocked(() -> {
var locker = new Locker(mManager);
try {
locker.doLockShared(1, k1, timeout);
cb.await();
locker.doLockExclusive(1, k2, timeout);
fail();
} catch (DeadlockException e) {
// This thread helped create the deadlock.
assertTrue(e.isGuilty());
} catch (Exception e) {
Utils.rethrow(e);
} finally {
locker.scopeUnlockAll();
}
});
}
// Victim thread.
var victim = startTestTaskAndWaitUntilBlocked(() -> {
var locker = new Locker(mManager);
try {
// The first lock doesn't participate in deadlock.
locker.doLockExclusive(1, "xxx".getBytes(), timeout / 2);
locker.doLockExclusive(1, keys[0], timeout / 2);
fail();
} catch (DeadlockException e) {
// Deadlock observed, but this thread didn't create it.
assertFalse(e.isGuilty());
} catch (Exception e) {
Utils.rethrow(e);
} finally {
locker.scopeUnlockAll();
}
});
victim.join();
for (TestTask task : tasks) {
task.join();
}
}
@Test
public void test_2() throws Throwable {
try {
doTest_2();
} catch (AssertionError e) {
// Time-sensitive test, so try again.
setup();
doTest_2();
}
}
private void doTest_2() throws Throwable {
// Deadlock caused by three threads and three keys.
final long timeout = 5L * 1000 * 1000 * 1000;
class TheTask extends Task {
private final long mTimeout;
private final boolean mExpectDeadlock;
private final byte[][] mKeys;
TheTask(long timeout, boolean expectDeadlock, String... keys) {
mTimeout = timeout;
mExpectDeadlock = expectDeadlock;
mKeys = new byte[keys.length][];
for (int i=0; i<keys.length; i++) {
mKeys[i] = keys[i].getBytes();
}
}
void doRun() throws Throwable {
try {
var locker = new Locker(mManager);
try {
for (byte[] key : mKeys) {
locker.doLockUpgradable(1, key, mTimeout);
sleep(100);
}
} finally {
locker.scopeUnlockAll();
}
assertFalse(mExpectDeadlock);
} catch (DeadlockException e) {
assertTrue(mExpectDeadlock);
}
}
}
// All three threads must be waiting for a deadlock to occur. As soon
// as one times out, the rest can proceed because the dependency cycle
// is broken.
mTasks.add(new TheTask(timeout, true, "k1", "k2"));
mTasks.add(new TheTask(timeout * 10, false, "k2", "k3"));
mTasks.add(new TheTask(timeout * 10, false, "k3", "k1"));
startTasks();
joinTasks();
}
@Test
public void deadlockInfo() throws Throwable {
Database db = Database.open(new DatabaseConfig()
.directPageAccess(false)
.lockUpgradeRule(LockUpgradeRule.UNCHECKED));
Index ix = db.openIndex("test");
Transaction txn1 = db.newTransaction();
txn1.attach("txn1");
Transaction txn2 = db.newTransaction();
txn2.attach("txn2");
byte[] key = "hello".getBytes();
txn1.lockUpgradable(ix.id(), key);
txn2.lockShared(ix.id(), key);
try {
txn2.lockExclusive(ix.id(), key);
fail();
} catch (DeadlockException e) {
assertTrue(e.getMessage().indexOf("indexName: test") > 0);
assertTrue(e.getMessage().indexOf("owner attachment: txn1") > 0);
assertEquals("txn1", e.ownerAttachment());
assertEquals("txn1", e.deadlockSet().iterator().next().ownerAttachment());
}
// Deadlock detection works with zero timeout, except with the tryLock variant.
txn2.lockTimeout(0, null);
try {
txn2.lockExclusive(ix.id(), key);
fail();
} catch (DeadlockException e) {
assertEquals(0, e.timeout());
assertTrue(e.getMessage().indexOf("indexName: test") > 0);
assertTrue(e.getMessage().indexOf("owner attachment: txn1") > 0);
assertEquals("txn1", e.ownerAttachment());
assertEquals("txn1", e.deadlockSet().iterator().next().ownerAttachment());
}
// No deadlock detected here.
assertEquals(LockResult.TIMED_OUT_LOCK, txn2.tryLockExclusive(ix.id(), key, 0));
}
@Test
public void deadlockAttachments() throws Throwable {
Database db = Database.open(new DatabaseConfig().directPageAccess(false));
Index ix = db.openIndex("test");
// Create a deadlock among three threads.
var threads = new Thread[3];
var cb = new CyclicBarrier(threads.length);
byte[] k1 = "k1".getBytes();
byte[] k2 = "k2".getBytes();
byte[] k3 = "k3".getBytes();
for (int i=0; i<threads.length; i++) {
final int fi = i;
threads[i] = new Thread(() -> {
try {
Transaction txn = db.newTransaction();
try {
txn.lockTimeout(10, TimeUnit.SECONDS);
if (fi == 0) {
txn.attach("txn1");
ix.lockExclusive(txn, k1);
cb.await();
ix.lockShared(txn, k2);
} else if (fi == 1) {
txn.attach("txn2");
ix.lockExclusive(txn, k2);
cb.await();
ix.lockUpgradable(txn, k3);
} else {
txn.attach("txn3");
ix.lockExclusive(txn, k3);
cb.await();
ix.lockUpgradable(txn, k1);
}
} finally {
txn.reset();
}
} catch (Exception e) {
// Ignore.
}
});
threads[i].start();
}
waitForDeadlock: {
check: for (int i=0; i<100; i++) {
for (int j=0; j<threads.length; j++) {
if (threads[j].getState() != Thread.State.TIMED_WAITING) {
Thread.sleep(100);
continue check;
}
}
break waitForDeadlock;
}
fail("no deadlock after waiting");
}
Transaction txn = db.newTransaction();
try {
ix.lockShared(txn, k1);
fail("no deadlock");
} catch (DeadlockException e) {
assertFalse(e.isGuilty());
assertEquals("txn1", e.ownerAttachment());
Set<DeadlockInfo> set = e.deadlockSet();
assertEquals(3, set.size());
var expect = new HashSet<>(Set.of("txn1", "txn2", "txn3"));
for (DeadlockInfo info : set) {
Object att = info.ownerAttachment();
if (!expect.remove(att)) {
fail("Unknown attachments: " + att);
}
}
}
db.close();
for (Thread t : threads) {
t.join();
}
}
@Test
public void selfDeadlock() throws Throwable {
Database db = Database.open(new DatabaseConfig().directPageAccess(false));
Index ix = db.openIndex("test");
Transaction txn1 = db.newTransaction();
txn1.attach("txn1");
Transaction txn2 = db.newTransaction();
txn2.attach("txn2");
byte[] key1 = "key1".getBytes();
byte[] key2 = "key2".getBytes();
txn1.lockUpgradable(ix.id(), key1);
txn2.lockUpgradable(ix.id(), key2);
try {
txn2.lockUpgradable(ix.id(), key1);
fail();
} catch (DeadlockException e) {
// Not expected to work.
throw e;
} catch (LockTimeoutException e) {
assertEquals("txn1", e.ownerAttachment());
}
try {
txn1.lockUpgradable(ix.id(), key2);
fail();
} catch (DeadlockException e) {
// Not expected to work.
throw e;
} catch (LockTimeoutException e) {
assertEquals("txn2", e.ownerAttachment());
}
// Verify owner attachment when not using an explicit transaction.
try {
ix.store(null, key1, key1);
fail();
} catch (LockTimeoutException e) {
assertEquals("txn1", e.ownerAttachment());
}
}
@Test
public void sharedOwner() throws Throwable {
// Not really a deadlock test. Checks for shared lock owner attachments.
Database db = Database.open(new DatabaseConfig().directPageAccess(false));
Index ix = db.openIndex("test");
Transaction txn1 = db.newTransaction();
txn1.attach("txn1");
Transaction txn2 = db.newTransaction();
txn2.attach("txn2");
byte[] key = "key".getBytes();
txn1.lockShared(ix.id(), key);
// No conflict.
txn2.lockUpgradable(ix.id(), key);
txn2.unlock();
try {
txn2.lockExclusive(ix.id(), key);
fail();
} catch (LockTimeoutException e) {
assertEquals("txn1", e.ownerAttachment());
}
txn2.lockShared(ix.id(), key);
Transaction txn3 = db.newTransaction();
try {
txn3.lockExclusive(ix.id(), key);
fail();
} catch (LockTimeoutException e) {
Object att = e.ownerAttachment();
assertTrue("txn1".equals(att) || "txn2".equals(att));
}
// Can still get attachment even when not waited.
txn3.lockTimeout(0, null);
try {
txn3.lockExclusive(ix.id(), key);
fail();
} catch (LockTimeoutException e) {
assertEquals(0, e.timeout());
Object att = e.ownerAttachment();
assertTrue("txn1".equals(att) || "txn2".equals(att));
}
// Verify owner attachment when not using an explicit transaction.
try {
ix.store(null, key, key);
fail();
} catch (LockTimeoutException e) {
Object att = e.ownerAttachment();
assertTrue("txn1".equals(att) || "txn2".equals(att));
}
}
@Test
public void deleteTimeout() throws Throwable {
// Regression test. Deleting an entry within a transaction would cause the attachment
// check code to fail with a ClassCastException.
Database db = Database.open(new DatabaseConfig().directPageAccess(false));
Index ix = db.openIndex("test");
byte[] key = "key".getBytes();
ix.store(null, key, key);
Transaction txn = db.newTransaction();
ix.store(txn, key, null);
try {
ix.store(null, key, key);
fail();
} catch (LockTimeoutException e) {
assertNull(e.ownerAttachment());
}
// Also make sure that attachments can be retrieved.
txn.attach("foo");
try {
ix.store(null, key, key);
fail();
} catch (LockTimeoutException e) {
assertEquals("foo", e.ownerAttachment());
}
}
@Test
public void trivialShared() throws Throwable {
trivialShared(false);
}
@Test
public void trivialSharedWithQueue() throws Throwable {
trivialShared(true);
}
private void trivialShared(boolean withQueue) throws Throwable {
// Detect a trivial deadlock when acquiring a shared lock.
final byte[] key1 = "key1".getBytes();
final byte[] key2 = "key2".getBytes();
var locker1 = new Locker(mManager);
locker1.doLockExclusive(1, key1, -1);
var task1 = startTestTaskAndWaitUntilBlocked(() -> {
var locker = new Locker(mManager);
try {
locker.doLockExclusive(1, key2, -1);
locker.doLockShared(1, key1, -1);
} catch (Throwable e) {
throw Utils.rethrow(e);
} finally {
locker.scopeUnlockAll();
}
});
TestTask<?> task2 = null;
if (withQueue) {
// Another thread is stuck waiting, as a victim.
task2 = startTestTaskAndWaitUntilBlocked(() -> {
var locker = new Locker(mManager);
try {
locker.doLockShared(1, key2, -1);
} catch (Throwable e) {
throw Utils.rethrow(e);
} finally {
locker.scopeUnlockAll();
}
});
}
try {
locker1.doLockShared(1, key2, -1);
fail();
} catch (DeadlockException e) {
}
locker1.scopeUnlockAll();
task1.join();
if (task2 != null) {
task2.join();
}
}
@Test
public void trivialUpgradable() throws Throwable {
trivialUpgradable(false);
}
@Test
public void trivialUpgradableWithQueue() throws Throwable {
trivialUpgradable(true);
}
private void trivialUpgradable(boolean withQueue) throws Throwable {
// Detect a trivial deadlock when acquiring an upgradable lock.
final byte[] key1 = "key1".getBytes();
final byte[] key2 = "key2".getBytes();
var locker1 = new Locker(mManager);
locker1.doLockExclusive(1, key1, -1);
var task1 = startTestTaskAndWaitUntilBlocked(() -> {
var locker = new Locker(mManager);
try {
locker.doLockExclusive(1, key2, -1);
locker.doLockUpgradable(1, key1, -1);
} catch (Throwable e) {
throw Utils.rethrow(e);
} finally {
locker.scopeUnlockAll();
}
});
TestTask<?> task2 = null;
if (withQueue) {
// Another thread is stuck waiting, as a victim.
task2 = startTestTaskAndWaitUntilBlocked(() -> {
var locker = new Locker(mManager);
try {
locker.doLockUpgradable(1, key2, -1);
} catch (Throwable e) {
throw Utils.rethrow(e);
} finally {
locker.scopeUnlockAll();
}
});
}
try {
locker1.doLockUpgradable(1, key2, -1);
fail();
} catch (DeadlockException e) {
}
locker1.scopeUnlockAll();
task1.join();
if (task2 != null) {
task2.join();
}
}
@Test
public void trivialExclusive() throws Throwable {
// Detect a trivial deadlock when acquiring an exclusive lock.
final byte[] key1 = "key1".getBytes();
final byte[] key2 = "key2".getBytes();
var locker1 = new Locker(mManager);
locker1.doLockShared(1, key1, -1);
var task1 = startTestTaskAndWaitUntilBlocked(() -> {
var locker = new Locker(mManager);
try {
locker.doLockShared(1, key2, -1);
locker.doLockExclusive(1, key1, -1);
} catch (Throwable e) {
throw Utils.rethrow(e);
} finally {
locker.scopeUnlockAll();
}
});
try {
locker1.doLockExclusive(1, key2, -1);
fail();
} catch (DeadlockException e) {
}
locker1.scopeUnlockAll();
task1.join();
}
private void startTasks() {
for (Task t : mTasks) {
t.start();
}
}
private void joinTasks() throws Throwable {
for (Task t : mTasks) {
t.join();
}
for (Task t : mTasks) {
t.check();
}
}
static abstract class Task extends Thread {
private volatile Throwable mFailure;
@Override
public final void run() {
try {
doRun();
} catch (Throwable t) {
mFailure = t;
}
}
void check() throws Throwable {
Throwable t = mFailure;
if (t != null) {
throw t;
}
}
abstract void doRun() throws Throwable;
}
} |
package seedu.taskitty.testutil;
import com.google.common.io.Files;
import guitests.guihandles.TaskCardHandle;
import javafx.geometry.Bounds;
import javafx.geometry.Point2D;
import javafx.scene.Node;
import javafx.scene.Scene;
import javafx.scene.input.KeyCode;
import javafx.scene.input.KeyCodeCombination;
import javafx.scene.input.KeyCombination;
import junit.framework.AssertionFailedError;
import org.loadui.testfx.GuiTest;
import org.testfx.api.FxToolkit;
import seedu.taskitty.TestApp;
import seedu.taskitty.commons.exceptions.IllegalValueException;
import seedu.taskitty.commons.util.FileUtil;
import seedu.taskitty.commons.util.XmlUtil;
import seedu.taskitty.model.TaskManager;
import seedu.taskitty.model.tag.Tag;
import seedu.taskitty.model.tag.UniqueTagList;
import seedu.taskitty.model.task.*;
import seedu.taskitty.storage.XmlSerializableTaskManager;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
/**
* A utility class for test cases.
*/
public class TestUtil {
public static String LS = System.lineSeparator();
public static void assertThrows(Class<? extends Throwable> expected, Runnable executable) {
try {
executable.run();
}
catch (Throwable actualException) {
if (!actualException.getClass().isAssignableFrom(expected)) {
String message = String.format("Expected thrown: %s, actual: %s", expected.getName(),
actualException.getClass().getName());
throw new AssertionFailedError(message);
} else return;
}
throw new AssertionFailedError(
String.format("Expected %s to be thrown, but nothing was thrown.", expected.getName()));
}
/**
* Folder used for temp files created during testing. Ignored by Git.
*/
public static String SANDBOX_FOLDER = FileUtil.getPath("./src/test/data/sandbox/");
public static final Task[] sampleTaskData = getSampleTaskData();
//@@author A0139930B
private static Task[] getSampleTaskData() {
try {
return new Task[]{
new Task(new Name("todo task"), new TaskPeriod(), new UniqueTagList()),
new Task(new Name("deadline task"),
new TaskPeriod(new TaskDate("23/12/2016"), new TaskTime("08:00")),
new UniqueTagList()),
new Task(new Name("event task"),
new TaskPeriod(new TaskDate("13/12/2016"), new TaskTime("13:00"),
new TaskDate("15/12/2016"), new TaskTime("10:00")),
new UniqueTagList()),
new Task(new Name("read clean code task"), new TaskPeriod(), new UniqueTagList()),
new Task(new Name("spring cleaning task"),
new TaskPeriod(new TaskDate("31/12/2016"), new TaskTime("15:00")),
new UniqueTagList()),
new Task(new Name("shop for xmas task"),
new TaskPeriod(new TaskDate("12/12/2016"), new TaskTime("10:00"),
new TaskDate("12/12/2016"), new TaskTime("19:00")),
new UniqueTagList()),
new Task(new Name("xmas dinner task"),
new TaskPeriod(new TaskDate("25/12/2016"), new TaskTime("18:30"),
new TaskDate("26/12/2016"), new TaskTime("02:00")),
new UniqueTagList())
};
} catch (IllegalValueException e) {
assert false;
//not possible
return null;
}
}
//@@author
public static final Tag[] sampleTagData = getSampleTagData();
private static Tag[] getSampleTagData() {
try {
return new Tag[]{
new Tag("relatives"),
new Tag("friends")
};
} catch (IllegalValueException e) {
assert false;
return null;
//not possible
}
}
public static List<Task> generateSampleTaskData() {
return Arrays.asList(sampleTaskData);
}
/**
* Appends the file name to the sandbox folder path.
* Creates the sandbox folder if it doesn't exist.
* @param fileName
* @return
*/
public static String getFilePathInSandboxFolder(String fileName) {
try {
FileUtil.createDirs(new File(SANDBOX_FOLDER));
} catch (IOException e) {
throw new RuntimeException(e);
}
return SANDBOX_FOLDER + fileName;
}
public static void createDataFileWithSampleData(String filePath) {
createDataFileWithData(generateSampleStorageAddressBook(), filePath);
}
public static <T> void createDataFileWithData(T data, String filePath) {
try {
File saveFileForTesting = new File(filePath);
FileUtil.createIfMissing(saveFileForTesting);
XmlUtil.saveDataToFile(saveFileForTesting, data);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static void main(String... s) {
createDataFileWithSampleData(TestApp.SAVE_LOCATION_FOR_TESTING);
}
public static TaskManager generateEmptyTaskManager() {
return new TaskManager(new UniqueTaskList(), new UniqueTagList());
}
public static XmlSerializableTaskManager generateSampleStorageAddressBook() {
return new XmlSerializableTaskManager(generateEmptyTaskManager());
}
/**
* Tweaks the {@code keyCodeCombination} to resolve the {@code KeyCode.SHORTCUT} to their
* respective platform-specific keycodes
*/
public static KeyCode[] scrub(KeyCodeCombination keyCodeCombination) {
List<KeyCode> keys = new ArrayList<>();
if (keyCodeCombination.getAlt() == KeyCombination.ModifierValue.DOWN) {
keys.add(KeyCode.ALT);
}
if (keyCodeCombination.getShift() == KeyCombination.ModifierValue.DOWN) {
keys.add(KeyCode.SHIFT);
}
if (keyCodeCombination.getMeta() == KeyCombination.ModifierValue.DOWN) {
keys.add(KeyCode.META);
}
if (keyCodeCombination.getControl() == KeyCombination.ModifierValue.DOWN) {
keys.add(KeyCode.CONTROL);
}
keys.add(keyCodeCombination.getCode());
return keys.toArray(new KeyCode[]{});
}
public static boolean isHeadlessEnvironment() {
String headlessProperty = System.getProperty("testfx.headless");
return headlessProperty != null && headlessProperty.equals("true");
}
public static void captureScreenShot(String fileName) {
File file = GuiTest.captureScreenshot();
try {
Files.copy(file, new File(fileName + ".png"));
} catch (IOException e) {
e.printStackTrace();
}
}
public static String descOnFail(Object... comparedObjects) {
return "Comparison failed \n"
+ Arrays.asList(comparedObjects).stream()
.map(Object::toString)
.collect(Collectors.joining("\n"));
}
public static void setFinalStatic(Field field, Object newValue) throws NoSuchFieldException, IllegalAccessException{
field.setAccessible(true);
// remove final modifier from field
Field modifiersField = Field.class.getDeclaredField("modifiers");
modifiersField.setAccessible(true);
// ~Modifier.FINAL is used to remove the final modifier from field so that its value is no longer
// final and can be changed
modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL);
field.set(null, newValue);
}
public static void initRuntime() throws TimeoutException {
FxToolkit.registerPrimaryStage();
FxToolkit.hideStage();
}
public static void tearDownRuntime() throws Exception {
FxToolkit.cleanupStages();
}
/**
* Gets private method of a class
* Invoke the method using method.invoke(objectInstance, params...)
*
* Caveat: only find method declared in the current Class, not inherited from supertypes
*/
public static Method getPrivateMethod(Class objectClass, String methodName) throws NoSuchMethodException {
Method method = objectClass.getDeclaredMethod(methodName);
method.setAccessible(true);
return method;
}
public static void renameFile(File file, String newFileName) {
try {
Files.copy(file, new File(newFileName));
} catch (IOException e1) {
e1.printStackTrace();
}
}
/**
* Gets mid point of a node relative to the screen.
* @param node
* @return
*/
public static Point2D getScreenMidPoint(Node node) {
double x = getScreenPos(node).getMinX() + node.getLayoutBounds().getWidth() / 2;
double y = getScreenPos(node).getMinY() + node.getLayoutBounds().getHeight() / 2;
return new Point2D(x,y);
}
/**
* Gets mid point of a node relative to its scene.
* @param node
* @return
*/
public static Point2D getSceneMidPoint(Node node) {
double x = getScenePos(node).getMinX() + node.getLayoutBounds().getWidth() / 2;
double y = getScenePos(node).getMinY() + node.getLayoutBounds().getHeight() / 2;
return new Point2D(x,y);
}
/**
* Gets the bound of the node relative to the parent scene.
* @param node
* @return
*/
public static Bounds getScenePos(Node node) {
return node.localToScene(node.getBoundsInLocal());
}
public static Bounds getScreenPos(Node node) {
return node.localToScreen(node.getBoundsInLocal());
}
public static double getSceneMaxX(Scene scene) {
return scene.getX() + scene.getWidth();
}
public static double getSceneMaxY(Scene scene) {
return scene.getX() + scene.getHeight();
}
public static Object getLastElement(List<?> list) {
return list.get(list.size() - 1);
}
/**
* Removes a subset from the list of persons.
* @param persons The list of persons
* @param personsToRemove The subset of persons.
* @return The modified persons after removal of the subset from persons.
*/
public static TestTask[] removePersonsFromList(final TestTask[] persons, TestTask... personsToRemove) {
List<TestTask> listOfPersons = asList(persons);
listOfPersons.removeAll(asList(personsToRemove));
return listOfPersons.toArray(new TestTask[listOfPersons.size()]);
}
/**
* Returns a copy of the list with the person at specified index removed.
* @param list original list to copy from
* @param targetIndexInOneIndexedFormat e.g. if the first element to be removed, 1 should be given as index.
*/
public static TestTask[] removePersonFromList(final TestTask[] list, int targetIndexInOneIndexedFormat) {
return removePersonsFromList(list, list[targetIndexInOneIndexedFormat-1]);
}
/**
* Replaces persons[i] with a person.
* @param persons The array of persons.
* @param person The replacement person
* @param index The index of the person to be replaced.
* @return
*/
public static TestTask[] replacePersonFromList(TestTask[] persons, TestTask person, int index) {
TestTask[] editedList = Arrays.copyOf(persons, persons.length);
editedList[index] = person;
return editedList;
}
/**
* Appends persons to the array of persons.
* @param persons A array of persons.
* @param personsToAdd The persons that are to be appended behind the original array.
* @return The modified array of persons.
*/
public static TestTask[] addPersonsToList(final TestTask[] persons, TestTask... personsToAdd) {
List<TestTask> listOfPersons = asList(persons);
listOfPersons.addAll(asList(personsToAdd));
return listOfPersons.toArray(new TestTask[listOfPersons.size()]);
}
private static <T> List<T> asList(T[] objs) {
List<T> list = new ArrayList<>();
for(T obj : objs) {
list.add(obj);
}
return list;
}
public static boolean compareCardAndPerson(TaskCardHandle card, ReadOnlyTask person) {
return card.isSamePerson(person);
}
public static Tag[] getTagList(String tags) {
if ("".equals(tags)) {
return new Tag[]{};
}
final String[] split = tags.split(", ");
final List<Tag> collect = Arrays.asList(split).stream().map(e -> {
try {
return new Tag(e.replaceFirst("Tag: ", ""));
} catch (IllegalValueException e1) {
//not possible
assert false;
return null;
}
}).collect(Collectors.toList());
return collect.toArray(new Tag[split.length]);
}
} |
package test.com.qiniu.qvs;
import com.qiniu.common.QiniuException;
import com.qiniu.http.Response;
import com.qiniu.qvs.NameSpaceManager;
import com.qiniu.qvs.model.NameSpace;
import com.qiniu.qvs.model.PatchOperation;
import com.qiniu.util.Auth;
import test.com.qiniu.TestConfig;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
public class NameSpaceTest {
Auth auth = TestConfig.testAuth;
private NameSpaceManager nameSpaceManager;
private Response res = null;
private Response res2 = null;
private final String namespaceId = "3nm4x1e0xw855";
private final String name = "" + System.currentTimeMillis();
@BeforeEach
public void setUp() throws Exception {
this.nameSpaceManager = new NameSpaceManager(auth);
}
@Test
@Tag("IntegrationTest")
public void testCreateNameSpace() {
NameSpace nameSpace = new NameSpace();
nameSpace.setName(name);
nameSpace.setAccessType("rtmp");
nameSpace.setRtmpUrlType(NameSpace.Static);
nameSpace.setDomains(new String[]{name + ".qnlinking.com"});
try {
res = nameSpaceManager.createNameSpace(nameSpace);
System.out.println(res.bodyString());
} catch (QiniuException e) {
e.printStackTrace();
} finally {
if (res != null) {
res.close();
}
}
}
@Test
@Tag("IntegrationTest")
public void testQueryNameSpace() {
try {
res = nameSpaceManager.queryNameSpace(namespaceId);
System.out.println(res.bodyString());
} catch (QiniuException e) {
e.printStackTrace();
} finally {
if (res != null) {
res.close();
}
}
}
@Test
@Tag("IntegrationTest")
public void testUpdateNameSpace() {
PatchOperation[] patchOperation = { new PatchOperation("replace", "recordTemplateApplyAll", true) };
try {
res = nameSpaceManager.updateNameSpace(namespaceId, patchOperation);
System.out.println(res.bodyString());
} catch (QiniuException e) {
e.printStackTrace();
} finally {
if (res != null) {
res.close();
}
}
}
@Test
@Tag("IntegrationTest")
public void testListNameSpace() {
int offset = 0;
int line = 1;
String sortBy = "asc:updatedAt";
try {
res = nameSpaceManager.listNameSpace(offset, line, sortBy);
System.out.println(res.bodyString());
} catch (QiniuException e) {
e.printStackTrace();
} finally {
if (res != null) {
res.close();
}
}
}
@Test
@Tag("IntegrationTest")
public void testDisableNameSpace() {
try {
res = nameSpaceManager.disableNameSpace(namespaceId);
res2 = nameSpaceManager.enableNameSpace(namespaceId);
System.out.println(res.bodyString());
} catch (QiniuException e) {
e.printStackTrace();
} finally {
if (res != null) {
res.close();
}
}
}
@Test
@Tag("IntegrationTest")
public void testEnableNameSpace() {
try {
res = nameSpaceManager.enableNameSpace(namespaceId);
System.out.println(res.bodyString());
} catch (QiniuException e) {
e.printStackTrace();
} finally {
if (res != null) {
res.close();
}
}
}
// @Test
// @Tag("IntegrationTest")
// public void testDeleteNameSpace() {
// try {
// res = nameSpaceManager.deleteNameSpace("3nm4x1e07mmvz");
// System.out.println(res.bodyString());
// } catch (QiniuException e) {
// e.printStackTrace();
// } finally {
// if (res != null) {
// res.close();
} |
package com.samourai.wallet.api;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.net.Uri;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import com.auth0.android.jwt.JWT;
import com.samourai.wallet.BuildConfig;
import com.samourai.wallet.R;
import com.samourai.wallet.SamouraiWallet;
import com.samourai.wallet.bip47.BIP47Meta;
import com.samourai.wallet.bip47.BIP47Util;
import com.samourai.wallet.bip47.rpc.NotSecp256k1Exception;
import com.samourai.wallet.bip47.rpc.PaymentCode;
import com.samourai.wallet.crypto.DecryptionException;
import com.samourai.wallet.hd.HD_Address;
import com.samourai.wallet.hd.HD_Wallet;
import com.samourai.wallet.hd.HD_WalletFactory;
import com.samourai.wallet.network.dojo.DojoUtil;
import com.samourai.wallet.payload.PayloadUtil;
import com.samourai.wallet.segwit.BIP49Util;
import com.samourai.wallet.segwit.BIP84Util;
import com.samourai.wallet.segwit.SegwitAddress;
import com.samourai.wallet.segwit.bech32.Bech32Util;
import com.samourai.wallet.send.BlockedUTXO;
import com.samourai.wallet.send.FeeUtil;
import com.samourai.wallet.send.MyTransactionOutPoint;
import com.samourai.wallet.send.RBFUtil;
import com.samourai.wallet.send.SuggestedFee;
import com.samourai.wallet.send.UTXO;
import com.samourai.wallet.send.UTXOFactory;
import com.samourai.wallet.tor.TorManager;
import com.samourai.wallet.util.AddressFactory;
import com.samourai.wallet.util.AppUtil;
import com.samourai.wallet.util.FormatsUtil;
import com.samourai.wallet.util.PrefsUtil;
import com.samourai.wallet.util.SentToFromBIP47Util;
import com.samourai.wallet.util.WebUtil;
import com.samourai.wallet.utxos.UTXOUtil;
import com.samourai.wallet.whirlpool.WhirlpoolMeta;
import com.samourai.whirlpool.client.wallet.AndroidWhirlpoolWalletService;
import com.samourai.whirlpool.client.wallet.WhirlpoolWallet;
import com.samourai.whirlpool.client.wallet.beans.WhirlpoolAccount;
import org.apache.commons.lang3.StringUtils;
import org.bitcoinj.core.Address;
import org.bitcoinj.core.AddressFormatException;
import org.bitcoinj.core.ECKey;
import org.bitcoinj.core.Sha256Hash;
import org.bitcoinj.core.TransactionOutPoint;
import org.bitcoinj.crypto.MnemonicException;
import org.bitcoinj.script.Script;
import org.bouncycastle.util.encoders.Base64;
import org.bouncycastle.util.encoders.Hex;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.math.BigInteger;
import java.net.URLEncoder;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.spec.InvalidKeySpecException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import io.reactivex.subjects.BehaviorSubject;
import java8.util.Optional;
import static com.samourai.wallet.util.LogUtil.debug;
import static com.samourai.wallet.util.LogUtil.info;
public class APIFactory {
private static String APP_TOKEN = null; // API app token
private static String ACCESS_TOKEN = null; // API access token
private static long ACCESS_TOKEN_REFRESH = 300L; // in seconds
private static long xpub_balance = 0L;
private static long xpub_premix_balance = 0L;
private static long xpub_postmix_balance = 0L;
private static long xpub_badbank_balance = 0L;
private static HashMap<String, Long> xpub_amounts = null;
private static HashMap<String,List<Tx>> xpub_txs = null;
private static HashMap<String,List<Tx>> premix_txs = null;
private static HashMap<String,List<Tx>> postmix_txs = null;
private static HashMap<String,List<Tx>> badbank_txs = null;
private static HashMap<String,Integer> unspentAccounts = null;
private static HashMap<String,Integer> unspentBIP49 = null;
private static HashMap<String,Integer> unspentBIP84 = null;
private static HashMap<String,Integer> unspentBIP84PreMix = null;
private static HashMap<String,Integer> unspentBIP84PostMix = null;
private static HashMap<String,Integer> unspentBIP84BadBank = null;
private static HashMap<String,String> unspentPaths = null;
private static HashMap<String,UTXO> utxos = null;
private static HashMap<String,UTXO> utxosPreMix = null;
private static HashMap<String,UTXO> utxosPostMix = null;
private static HashMap<String,UTXO> utxosBadBank = null;
private static JSONObject utxoObj0 = null;
private static JSONObject utxoObj1 = null;
private static HashMap<String, Long> bip47_amounts = null;
public boolean walletInit = false;
public BehaviorSubject<Long> walletBalanceObserver = BehaviorSubject.create();
private static long latest_block_height = -1L;
private static String latest_block_hash = null;
private static APIFactory instance = null;
private static Context context = null;
private static AlertDialog alertDialog = null;
private APIFactory() {
walletBalanceObserver.onNext(0L);
}
public static APIFactory getInstance(Context ctx) {
context = ctx;
if(instance == null) {
xpub_amounts = new HashMap<String, Long>();
xpub_txs = new HashMap<String,List<Tx>>();
premix_txs = new HashMap<String,List<Tx>>();
postmix_txs = new HashMap<String,List<Tx>>();
badbank_txs = new HashMap<String,List<Tx>>();
xpub_balance = 0L;
xpub_premix_balance = 0L;
xpub_postmix_balance = 0L;
xpub_badbank_balance = 0L;
bip47_amounts = new HashMap<String, Long>();
unspentPaths = new HashMap<String, String>();
unspentAccounts = new HashMap<String, Integer>();
unspentBIP49 = new HashMap<String, Integer>();
unspentBIP84 = new HashMap<String, Integer>();
unspentBIP84PostMix = new HashMap<String, Integer>();
unspentBIP84PreMix = new HashMap<String, Integer>();
unspentBIP84BadBank = new HashMap<String, Integer>();
utxos = new HashMap<String, UTXO>();
utxosPreMix = new HashMap<String, UTXO>();
utxosPostMix = new HashMap<String, UTXO>();
utxosBadBank = new HashMap<String, UTXO>();
instance = new APIFactory();
}
return instance;
}
public synchronized void reset() {
xpub_balance = 0L;
xpub_premix_balance = 0L;
xpub_postmix_balance = 0L;
xpub_badbank_balance = 0L;
xpub_amounts.clear();
bip47_amounts.clear();
xpub_txs.clear();
premix_txs.clear();
postmix_txs.clear();
badbank_txs.clear();
unspentPaths = new HashMap<String, String>();
unspentAccounts = new HashMap<String, Integer>();
unspentBIP49 = new HashMap<String, Integer>();
unspentBIP84 = new HashMap<String, Integer>();
unspentBIP84PostMix = new HashMap<String, Integer>();
unspentBIP84PreMix = new HashMap<String, Integer>();
unspentBIP84BadBank = new HashMap<String, Integer>();
utxos = new HashMap<String, UTXO>();
utxosPostMix = new HashMap<String, UTXO>();
utxosPreMix = new HashMap<String, UTXO>();
utxosBadBank = new HashMap<String, UTXO>();
UTXOFactory.getInstance().clear();
}
public String getAccessTokenNotExpired() {
boolean setupDojo = DojoUtil.getInstance(context).getDojoParams() != null;
String currentAccessToken = getAccessToken();
if(currentAccessToken == null) {
// no current token => request new token
Log.v("APIFactory", "getAccessTokenNotExpired => requesting new, setupDojo="+setupDojo);
getToken(setupDojo);
currentAccessToken = getAccessToken();
}
// still no token => not available
if (StringUtils.isEmpty(currentAccessToken)) {
Log.v("APIFactory", "getAccessTokenNotExpired => not available");
return currentAccessToken;
}
// check current token not expired
JWT jwt = new JWT(currentAccessToken);
if(jwt.isExpired(getAccessTokenRefresh())) {
// expired => request new token
Log.v("APIFactory", "getAccessTokenNotExpired => expired, request new");
getToken(setupDojo);
currentAccessToken = getAccessToken();
}
return currentAccessToken;
}
public String getAccessToken() {
if(ACCESS_TOKEN == null && APIFactory.getInstance(context).APITokenRequired()) {
getToken(true);
}
return DojoUtil.getInstance(context).getDojoParams() == null ? "" : ACCESS_TOKEN;
}
public void setAccessToken(String accessToken) {
ACCESS_TOKEN = accessToken;
}
public void setAppToken(String token) {
APP_TOKEN = token;
}
public String getAppToken() {
if(APP_TOKEN != null) {
return APP_TOKEN;
}
else {
return new String(getXORKey());
}
}
public byte[] getXORKey() {
if(APP_TOKEN != null) {
return APP_TOKEN.getBytes();
}
if(BuildConfig.XOR_1.length() > 0 && BuildConfig.XOR_2.length() > 0) {
byte[] xorSegments0 = Base64.decode(BuildConfig.XOR_1);
byte[] xorSegments1 = Base64.decode(BuildConfig.XOR_2);
return xor(xorSegments0, xorSegments1);
}
else {
return null;
}
}
private byte[] xor(byte[] b0, byte[] b1) {
byte[] ret = new byte[b0.length];
for(int i = 0; i < b0.length; i++){
ret[i] = (byte)(b0[i] ^ b1[i]);
}
return ret;
}
public long getAccessTokenRefresh() {
return ACCESS_TOKEN_REFRESH;
}
public boolean stayingAlive() {
if(!AppUtil.getInstance(context).isOfflineMode() && APITokenRequired()) {
if(APIFactory.getInstance(context).getAccessToken() == null) {
APIFactory.getInstance(context).getToken(false);
}
if(APIFactory.getInstance(context).getAccessToken() != null) {
JWT jwt = new JWT(APIFactory.getInstance(context).getAccessToken());
if(jwt != null && jwt.isExpired(APIFactory.getInstance(context).getAccessTokenRefresh())) {
if(APIFactory.getInstance(context).getToken(false)) {
return true;
}
else {
return false;
}
}
}
return false;
}
else {
return true;
}
}
public synchronized boolean APITokenRequired() {
return DojoUtil.getInstance(context).getDojoParams() == null ? false : true;
}
public synchronized boolean getToken(boolean setupDojo) {
if(!APITokenRequired()) {
return true;
}
String _url = SamouraiWallet.getInstance().isTestNet() ? WebUtil.SAMOURAI_API2_TESTNET : WebUtil.SAMOURAI_API2;
if(DojoUtil.getInstance(context).getDojoParams() != null || setupDojo) {
_url = SamouraiWallet.getInstance().isTestNet() ? WebUtil.SAMOURAI_API2_TESTNET_TOR : WebUtil.SAMOURAI_API2_TOR;
}
debug("APIFactory", "getToken() url:" + _url);
JSONObject jsonObject = null;
try {
String response = null;
if(AppUtil.getInstance(context).isOfflineMode()) {
return true;
}
else if(!TorManager.getInstance(context).isRequired()) {
// use POST
StringBuilder args = new StringBuilder();
args.append("apikey=");
args.append(new String(getXORKey()));
response = WebUtil.getInstance(context).postURL(_url + "auth/login?", args.toString());
info("APIFactory", "API token response:" + response);
}
else {
HashMap<String,String> args = new HashMap<String,String>();
args.put("apikey", new String(getXORKey()));
info("APIFactory", "API key (XOR):" + new String(getXORKey()));
info("APIFactory", "API key url:" + _url);
response = WebUtil.getInstance(context).tor_postURL(_url + "auth/login", args);
info("APIFactory", "API token response:" + response);
}
try {
jsonObject = new JSONObject(response);
if(jsonObject != null && jsonObject.has("authorizations")) {
JSONObject authObj = jsonObject.getJSONObject("authorizations");
if(authObj.has("access_token")) {
info("APIFactory", "setting access token:" + authObj.getString("access_token"));
setAccessToken(authObj.getString("access_token"));
return true;
}
}
}
catch(JSONException je) {
je.printStackTrace();
jsonObject = null;
return false;
}
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
return false;
}
return true;
}
private synchronized JSONObject getXPUB(String[] xpubs, boolean parse) {
String _url = WebUtil.getAPIUrl(context);
JSONObject jsonObject = null;
try {
String response = null;
if(AppUtil.getInstance(context).isOfflineMode()) {
response = PayloadUtil.getInstance(context).deserializeMultiAddr().toString();
}
else if(!TorManager.getInstance(context).isRequired()) {
// use POST
StringBuilder args = new StringBuilder();
args.append("active=");
args.append(StringUtils.join(xpubs, URLEncoder.encode("|", "UTF-8")));
info("APIFactory", "XPUB:" + args.toString());
args.append("&at=");
args.append(getAccessToken());
response = WebUtil.getInstance(context).postURL(_url + "multiaddr?", args.toString());
info("APIFactory", "XPUB response:" + response);
}
else {
HashMap<String,String> args = new HashMap<String,String>();
args.put("active", StringUtils.join(xpubs, "|"));
info("APIFactory", "XPUB:" + args.toString());
args.put("at", getAccessToken());
info("APIFactory", "XPUB access token:" + getAccessToken());
response = WebUtil.getInstance(context).tor_postURL(_url + "multiaddr", args);
info("APIFactory", "XPUB response:" + response);
}
try {
jsonObject = new JSONObject(response);
if(!parse) {
return jsonObject;
}
xpub_txs.put(xpubs[0], new ArrayList<Tx>());
parseXPUB(jsonObject);
xpub_amounts.put(HD_WalletFactory.getInstance(context).get().getAccount(0).xpubstr(), xpub_balance - BlockedUTXO.getInstance().getTotalValueBlocked0());
walletBalanceObserver.onNext( xpub_balance - BlockedUTXO.getInstance().getTotalValueBlocked0());
}
catch(JSONException je) {
je.printStackTrace();
jsonObject = null;
}
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
}
return jsonObject;
}
private synchronized JSONObject registerXPUB(String xpub, int purpose, String tag) {
String _url = WebUtil.getAPIUrl(context);
JSONObject jsonObject = null;
try {
String response = null;
if(!TorManager.getInstance(context).isRequired()) {
// use POST
StringBuilder args = new StringBuilder();
args.append("xpub=");
args.append(xpub);
args.append("&type=");
if(PrefsUtil.getInstance(context).getValue(PrefsUtil.IS_RESTORE, false) == true) {
args.append("restore");
}
else {
args.append("new");
}
if(purpose == 49) {
args.append("&segwit=");
args.append("bip49");
}
else if(purpose == 84) {
args.append("&segwit=");
args.append("bip84");
}
else {
;
}
info("APIFactory", "XPUB:" + args.toString());
args.append("&at=");
args.append(getAccessToken());
response = WebUtil.getInstance(context).postURL(_url + "xpub?", args.toString());
info("APIFactory", "XPUB response:" + response);
}
else {
HashMap<String,String> args = new HashMap<String,String>();
args.put("xpub", xpub);
if(PrefsUtil.getInstance(context).getValue(PrefsUtil.IS_RESTORE, false) == true) {
args.put("type", "restore");
}
else {
args.put("type", "new");
}
if(purpose == 49) {
args.put("segwit", "bip49");
}
else if(purpose == 84) {
args.put("segwit", "bip84");
}
else {
;
}
info("APIFactory", "XPUB:" + args.toString());
args.put("at", getAccessToken());
response = WebUtil.getInstance(context).tor_postURL(_url + "xpub", args);
info("APIFactory", "XPUB response:" + response);
}
try {
jsonObject = new JSONObject(response);
info("APIFactory", "XPUB response:" + jsonObject.toString());
if(jsonObject.has("status") && jsonObject.getString("status").equals("ok")) {
if(tag != null) {
PrefsUtil.getInstance(context).setValue(tag, true);
if(tag.equals(PrefsUtil.XPUBPOSTREG)) {
PrefsUtil.getInstance(context).removeValue(PrefsUtil.IS_RESTORE);
}
}
else if(purpose == 44) {
PrefsUtil.getInstance(context).setValue(PrefsUtil.XPUB44REG, true);
}
else if(purpose == 49) {
PrefsUtil.getInstance(context).setValue(PrefsUtil.XPUB49REG, true);
}
else if(purpose == 84) {
PrefsUtil.getInstance(context).setValue(PrefsUtil.XPUB84REG, true);
}
else {
;
}
}
}
catch(JSONException je) {
je.printStackTrace();
jsonObject = null;
}
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
}
return jsonObject;
}
private synchronized boolean parseXPUB(JSONObject jsonObject) throws JSONException {
if(jsonObject != null) {
HashMap<String,Integer> pubkeys = new HashMap<String,Integer>();
if(jsonObject.has("wallet")) {
JSONObject walletObj = (JSONObject)jsonObject.get("wallet");
if(walletObj.has("final_balance")) {
xpub_balance = walletObj.getLong("final_balance");
debug("APIFactory", "xpub_balance:" + xpub_balance);
}
}
if(jsonObject.has("info")) {
JSONObject infoObj = (JSONObject)jsonObject.get("info");
if(infoObj.has("latest_block")) {
JSONObject blockObj = (JSONObject)infoObj.get("latest_block");
if(blockObj.has("height")) {
latest_block_height = blockObj.getLong("height");
}
if(blockObj.has("hash")) {
latest_block_hash = blockObj.getString("hash");
}
}
}
if(jsonObject.has("addresses")) {
JSONArray addressesArray = (JSONArray)jsonObject.get("addresses");
JSONObject addrObj = null;
for(int i = 0; i < addressesArray.length(); i++) {
addrObj = (JSONObject)addressesArray.get(i);
if(addrObj != null && addrObj.has("final_balance") && addrObj.has("address")) {
if(FormatsUtil.getInstance().isValidXpub((String)addrObj.get("address"))) {
xpub_amounts.put((String)addrObj.get("address"), addrObj.getLong("final_balance"));
if(addrObj.getString("address").equals(BIP84Util.getInstance(context).getWallet().getAccount(0).xpubstr()) ||
addrObj.getString("address").equals(BIP84Util.getInstance(context).getWallet().getAccount(0).zpubstr())) {
AddressFactory.getInstance().setHighestBIP84ReceiveIdx(addrObj.has("account_index") ? addrObj.getInt("account_index") : 0);
AddressFactory.getInstance().setHighestBIP84ChangeIdx(addrObj.has("change_index") ? addrObj.getInt("change_index") : 0);
BIP84Util.getInstance(context).getWallet().getAccount(0).getChain(0).setAddrIdx(addrObj.has("account_index") ? addrObj.getInt("account_index") : 0);
BIP84Util.getInstance(context).getWallet().getAccount(0).getChain(1).setAddrIdx(addrObj.has("change_index") ? addrObj.getInt("change_index") : 0);
}
else if(addrObj.getString("address").equals(BIP49Util.getInstance(context).getWallet().getAccount(0).xpubstr()) ||
addrObj.getString("address").equals(BIP49Util.getInstance(context).getWallet().getAccount(0).ypubstr())) {
AddressFactory.getInstance().setHighestBIP49ReceiveIdx(addrObj.has("account_index") ? addrObj.getInt("account_index") : 0);
AddressFactory.getInstance().setHighestBIP49ChangeIdx(addrObj.has("change_index") ? addrObj.getInt("change_index") : 0);
BIP49Util.getInstance(context).getWallet().getAccount(0).getChain(0).setAddrIdx(addrObj.has("account_index") ? addrObj.getInt("account_index") : 0);
BIP49Util.getInstance(context).getWallet().getAccount(0).getChain(1).setAddrIdx(addrObj.has("change_index") ? addrObj.getInt("change_index") : 0);
}
else if(AddressFactory.getInstance().xpub2account().get((String) addrObj.get("address")) != null) {
AddressFactory.getInstance().setHighestTxReceiveIdx(AddressFactory.getInstance().xpub2account().get((String) addrObj.get("address")), addrObj.has("account_index") ? addrObj.getInt("account_index") : 0);
AddressFactory.getInstance().setHighestTxChangeIdx(AddressFactory.getInstance().xpub2account().get((String)addrObj.get("address")), addrObj.has("change_index") ? addrObj.getInt("change_index") : 0);
try {
HD_WalletFactory.getInstance(context).get().getAccount(0).getChain(0).setAddrIdx(addrObj.has("account_index") ? addrObj.getInt("account_index") : 0);
HD_WalletFactory.getInstance(context).get().getAccount(0).getChain(1).setAddrIdx(addrObj.has("change_index") ? addrObj.getInt("change_index") : 0);
}
catch(IOException | MnemonicException.MnemonicLengthException e) {
;
}
}
else {
;
}
}
else {
long amount = 0L;
String addr = null;
addr = (String)addrObj.get("address");
amount = addrObj.getLong("final_balance");
String pcode = BIP47Meta.getInstance().getPCode4Addr(addr);
if(addrObj.has("pubkey")) {
bip47Lookahead(pcode, addrObj.getString("pubkey"));
}
if(addr != null && addr.length() > 0 && pcode != null && pcode.length() > 0 && BIP47Meta.getInstance().getIdx4Addr(addr) != null) {
int idx = BIP47Meta.getInstance().getIdx4Addr(addr);
if(amount > 0L) {
BIP47Meta.getInstance().addUnspent(pcode, idx);
if(idx > BIP47Meta.getInstance().getIncomingIdx(pcode)) {
BIP47Meta.getInstance().setIncomingIdx(pcode, idx);
}
}
else {
if(addrObj.has("pubkey")) {
String pubkey = addrObj.getString("pubkey");
if(pubkeys.containsKey(pubkey)) {
int count = pubkeys.get(pubkey);
count++;
if(count == BIP47Meta.INCOMING_LOOKAHEAD) {
BIP47Meta.getInstance().removeUnspent(pcode, Integer.valueOf(idx));
}
else {
pubkeys.put(pubkey, count + 1);
}
}
else {
pubkeys.put(pubkey, 1);
}
}
else {
BIP47Meta.getInstance().removeUnspent(pcode, Integer.valueOf(idx));
}
}
if(addr != null) {
bip47_amounts.put(addr, amount);
}
}
}
}
}
}
if(jsonObject.has("txs")) {
List<String> seenHashes = new ArrayList<String>();
JSONArray txArray = (JSONArray)jsonObject.get("txs");
JSONObject txObj = null;
for(int i = 0; i < txArray.length(); i++) {
txObj = (JSONObject)txArray.get(i);
long height = 0L;
long amount = 0L;
long ts = 0L;
String hash = null;
String addr = null;
String _addr = null;
if(txObj.has("block_height")) {
height = txObj.getLong("block_height");
}
else {
height = -1L; // 0 confirmations
}
if(txObj.has("hash")) {
hash = (String)txObj.get("hash");
}
if(txObj.has("result")) {
amount = txObj.getLong("result");
}
if(txObj.has("time")) {
ts = txObj.getLong("time");
}
if(!seenHashes.contains(hash)) {
seenHashes.add(hash);
}
if(txObj.has("inputs")) {
JSONArray inputArray = (JSONArray)txObj.get("inputs");
JSONObject inputObj = null;
for(int j = 0; j < inputArray.length(); j++) {
inputObj = (JSONObject)inputArray.get(j);
if(inputObj.has("prev_out")) {
JSONObject prevOutObj = (JSONObject)inputObj.get("prev_out");
if(prevOutObj.has("xpub")) {
JSONObject xpubObj = (JSONObject)prevOutObj.get("xpub");
addr = (String)xpubObj.get("m");
}
else if(prevOutObj.has("addr") && BIP47Meta.getInstance().getPCode4Addr((String)prevOutObj.get("addr")) != null) {
_addr = (String)prevOutObj.get("addr");
}
else {
_addr = (String)prevOutObj.get("addr");
}
}
}
}
if(txObj.has("out")) {
JSONArray outArray = (JSONArray)txObj.get("out");
JSONObject outObj = null;
for(int j = 0; j < outArray.length(); j++) {
outObj = (JSONObject)outArray.get(j);
if(outObj.has("xpub")) {
JSONObject xpubObj = (JSONObject)outObj.get("xpub");
addr = (String)xpubObj.get("m");
}
else {
_addr = (String)outObj.get("addr");
}
}
}
if(addr != null || _addr != null) {
if(addr == null) {
addr = _addr;
}
Tx tx = new Tx(hash, addr, amount, ts, (latest_block_height > 0L && height > 0L) ? (latest_block_height - height) + 1 : 0);
if(SentToFromBIP47Util.getInstance().getByHash(hash) != null) {
tx.setPaymentCode(SentToFromBIP47Util.getInstance().getByHash(hash));
}
if(BIP47Meta.getInstance().getPCode4Addr(addr) != null) {
tx.setPaymentCode(BIP47Meta.getInstance().getPCode4Addr(addr));
}
if(!xpub_txs.containsKey(addr)) {
xpub_txs.put(addr, new ArrayList<Tx>());
}
if(FormatsUtil.getInstance().isValidXpub(addr)) {
xpub_txs.get(addr).add(tx);
}
else {
xpub_txs.get(AddressFactory.getInstance().account2xpub().get(0)).add(tx);
}
if(height > 0L) {
RBFUtil.getInstance().remove(hash);
}
}
}
List<String> hashesSentToViaBIP47 = SentToFromBIP47Util.getInstance().getAllHashes();
if(hashesSentToViaBIP47.size() > 0) {
for(String s : hashesSentToViaBIP47) {
if(!seenHashes.contains(s)) {
SentToFromBIP47Util.getInstance().removeHash(s);
}
}
}
}
try {
PayloadUtil.getInstance(context).serializeMultiAddr(jsonObject);
}
catch(IOException | DecryptionException e) {
;
}
return true;
}
return false;
}
private synchronized void bip47Lookahead(String pcode, String addr) {
debug("APIFactory", "bip47Lookahead():" + addr);
debug("APIFactory", "bip47Lookahead():" + pcode);
debug("APIFactory", "bip47Lookahead():" + BIP47Meta.getInstance().getPCode4Addr(addr));
int idx = BIP47Meta.getInstance().getIdx4Addr(addr);
debug("APIFactory", "bip47Lookahead():" + idx);
try {
idx++;
for (int i = idx; i < (idx + BIP47Meta.INCOMING_LOOKAHEAD); i++) {
info("APIFactory", "receive from " + i + ":" + BIP47Util.getInstance(context).getReceivePubKey(new PaymentCode(pcode), i));
BIP47Meta.getInstance().getIdx4AddrLookup().put(BIP47Util.getInstance(context).getReceivePubKey(new PaymentCode(pcode), i), i);
BIP47Meta.getInstance().getPCode4AddrLookup().put(BIP47Util.getInstance(context).getReceivePubKey(new PaymentCode(pcode), i), pcode.toString());
// _addrs.add(BIP47Util.getInstance(RefreshService.this).getReceivePubKey(new PaymentCode(pcode), i));
}
idx
if (idx >= 2) {
for (int i = idx; i >= (idx - (BIP47Meta.INCOMING_LOOKAHEAD - 1)); i
info("APIFactory", "receive from " + i + ":" + BIP47Util.getInstance(context).getReceivePubKey(new PaymentCode(pcode), i));
BIP47Meta.getInstance().getIdx4AddrLookup().put(BIP47Util.getInstance(context).getReceivePubKey(new PaymentCode(pcode), i), i);
BIP47Meta.getInstance().getPCode4AddrLookup().put(BIP47Util.getInstance(context).getReceivePubKey(new PaymentCode(pcode), i), pcode.toString());
// _addrs.add(BIP47Util.getInstance(context).getReceivePubKey(new PaymentCode(pcode), i));
}
}
// addrs = _addrs.toArray(new String[_addrs.size()]);
} catch (NullPointerException | NotSecp256k1Exception | InvalidKeyException | NoSuchAlgorithmException | NoSuchProviderException | InvalidKeySpecException e) {
;
}
}
/*
public synchronized JSONObject deleteXPUB(String xpub, boolean bip49) {
String _url = SamouraiWallet.getInstance().isTestNet() ? WebUtil.SAMOURAI_API2_TESTNET : WebUtil.SAMOURAI_API2;
JSONObject jsonObject = null;
try {
String response = null;
ECKey ecKey = null;
if(AddressFactory.getInstance(context).xpub2account().get(xpub) != null || xpub.equals(BIP49Util.getInstance(context).getWallet().getAccount(0).ypubstr())) {
HD_Address addr = null;
if(bip49) {
addr = BIP49Util.getInstance(context).getWallet().getAccountAt(0).getChange().getAddressAt(0);
}
else {
addr = HD_WalletFactory.getInstance(context).get().getAccount(0).getChain(AddressFactory.CHANGE_CHAIN).getAddressAt(0);
}
ecKey = addr.getECKey();
if(ecKey != null && ecKey.hasPrivKey()) {
String sig = ecKey.signMessage(xpub);
String address = null;
if(bip49) {
SegwitAddress segwitAddress = new SegwitAddress(ecKey.getPubKey(), SamouraiWallet.getInstance().getCurrentNetworkParams());
address = segwitAddress.getAddressAsString();
}
else {
address = ecKey.toAddress(SamouraiWallet.getInstance().getCurrentNetworkParams()).toString();
}
if(!TorUtil.getInstance(context).statusFromBroadcast()) {
StringBuilder args = new StringBuilder();
args.append("message=");
args.append(xpub);
args.append("address=");
args.append(address);
args.append("&signature=");
args.append(Uri.encode(sig));
info("APIFactory", "delete XPUB:" + args.toString());
response = WebUtil.getInstance(context).deleteURL(_url + "delete/" + xpub, args.toString());
info("APIFactory", "delete XPUB response:" + response);
}
else {
HashMap<String,String> args = new HashMap<String,String>();
args.put("message", xpub);
args.put("address", address);
args.put("signature", Uri.encode(sig));
info("APIFactory", "delete XPUB:" + args.toString());
response = WebUtil.getInstance(context).tor_deleteURL(_url + "delete", args);
info("APIFactory", "delete XPUB response:" + response);
}
try {
jsonObject = new JSONObject(response);
if(jsonObject.has("status") && jsonObject.getString("status").equals("ok")) {
;
}
}
catch(JSONException je) {
je.printStackTrace();
jsonObject = null;
}
}
}
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
}
return jsonObject;
}
*/
public synchronized JSONObject lockXPUB(String xpub, int purpose, String tag) {
String _url = WebUtil.getAPIUrl(context);
JSONObject jsonObject = null;
try {
String response = null;
ECKey ecKey = null;
if(AddressFactory.getInstance(context).xpub2account().get(xpub) != null ||
xpub.equals(BIP49Util.getInstance(context).getWallet().getAccount(0).ypubstr()) ||
xpub.equals(BIP84Util.getInstance(context).getWallet().getAccount(0).zpubstr()) ||
xpub.equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPremixAccount()).zpubstr()) ||
xpub.equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPostmix()).zpubstr())
) {
HD_Address addr = null;
switch(purpose) {
case 49:
addr = BIP49Util.getInstance(context).getWallet().getAccountAt(0).getChange().getAddressAt(0);
break;
case 84:
if(tag != null && tag.equals(PrefsUtil.XPUBPRELOCK)) {
addr = BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPremixAccount()).getChange().getAddressAt(0);
}
else if(tag != null && tag.equals(PrefsUtil.XPUBPOSTLOCK)) {
addr = BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPostmix()).getChange().getAddressAt(0);
}
else {
addr = BIP84Util.getInstance(context).getWallet().getAccountAt(0).getChange().getAddressAt(0);
}
break;
default:
addr = HD_WalletFactory.getInstance(context).get().getAccount(0).getChain(AddressFactory.CHANGE_CHAIN).getAddressAt(0);
break;
}
ecKey = addr.getECKey();
if(ecKey != null && ecKey.hasPrivKey()) {
String sig = ecKey.signMessage("lock");
String address = null;
switch(purpose) {
case 49:
SegwitAddress p2shp2wpkh = new SegwitAddress(ecKey.getPubKey(), SamouraiWallet.getInstance().getCurrentNetworkParams());
address = p2shp2wpkh.getAddressAsString();
break;
case 84:
SegwitAddress segwitAddress = new SegwitAddress(ecKey.getPubKey(), SamouraiWallet.getInstance().getCurrentNetworkParams());
address = segwitAddress.getBech32AsString();
break;
default:
address = ecKey.toAddress(SamouraiWallet.getInstance().getCurrentNetworkParams()).toString();
break;
}
if(!TorManager.getInstance(context).isRequired()) {
StringBuilder args = new StringBuilder();
args.append("address=");
args.append(address);
args.append("&signature=");
args.append(Uri.encode(sig));
args.append("&message=");
args.append("lock");
// info("APIFactory", "lock XPUB:" + args.toString());
args.append("&at=");
args.append(getAccessToken());
response = WebUtil.getInstance(context).postURL(_url + "xpub/" + xpub + "/lock/", args.toString());
// info("APIFactory", "lock XPUB response:" + response);
}
else {
HashMap<String,String> args = new HashMap<String,String>();
args.put("address", address);
// args.put("signature", Uri.encode(sig));
args.put("signature", sig);
args.put("message", "lock");
args.put("at", getAccessToken());
info("APIFactory", "lock XPUB:" + _url);
info("APIFactory", "lock XPUB:" + args.toString());
response = WebUtil.getInstance(context).tor_postURL(_url + "xpub/" + xpub + "/lock/", args);
info("APIFactory", "lock XPUB response:" + response);
}
try {
jsonObject = new JSONObject(response);
if(jsonObject.has("status") && jsonObject.getString("status").equals("ok")) {
if(tag != null) {
PrefsUtil.getInstance(context).setValue(tag, true);
}
else {
switch(purpose) {
case 49:
PrefsUtil.getInstance(context).setValue(PrefsUtil.XPUB49LOCK, true);
break;
case 84:
PrefsUtil.getInstance(context).setValue(PrefsUtil.XPUB84LOCK, true);
break;
default:
PrefsUtil.getInstance(context).setValue(PrefsUtil.XPUB44LOCK, true);
break;
}
}
}
}
catch(JSONException je) {
je.printStackTrace();
jsonObject = null;
}
}
}
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
}
return jsonObject;
}
public long getLatestBlockHeight() {
return latest_block_height;
}
public String getLatestBlockHash() {
return latest_block_hash;
}
public JSONObject getNotifTx(String hash, String addr) {
String _url = WebUtil.getAPIUrl(context);
JSONObject jsonObject = null;
try {
StringBuilder url = new StringBuilder(_url);
url.append("tx/");
url.append(hash);
url.append("?fees=1");
// info("APIFactory", "Notif tx:" + url.toString());
url.append("&at=");
url.append(getAccessToken());
String response = WebUtil.getInstance(null).getURL(url.toString());
// info("APIFactory", "Notif tx:" + response);
try {
jsonObject = new JSONObject(response);
parseNotifTx(jsonObject, addr, hash);
}
catch(JSONException je) {
je.printStackTrace();
jsonObject = null;
}
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
}
return jsonObject;
}
public JSONObject getNotifAddress(String addr) {
String _url = SamouraiWallet.getInstance().isTestNet() ? WebUtil.SAMOURAI_API2_TESTNET : WebUtil.SAMOURAI_API2;
JSONObject jsonObject = null;
try {
StringBuilder url = new StringBuilder(_url);
url.append("multiaddr?active=");
url.append(addr);
// info("APIFactory", "Notif address:" + url.toString());
url.append("&at=");
url.append(getAccessToken());
String response = WebUtil.getInstance(null).getURL(url.toString());
// info("APIFactory", "Notif address:" + response);
try {
jsonObject = new JSONObject(response);
parseNotifAddress(jsonObject, addr);
}
catch(JSONException je) {
je.printStackTrace();
jsonObject = null;
}
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
}
return jsonObject;
}
public void parseNotifAddress(JSONObject jsonObject, String addr) throws JSONException {
if(jsonObject != null && jsonObject.has("txs")) {
JSONArray txArray = jsonObject.getJSONArray("txs");
JSONObject txObj = null;
for(int i = 0; i < txArray.length(); i++) {
txObj = (JSONObject)txArray.get(i);
if(!txObj.has("block_height") || (txObj.has("block_height") && txObj.getLong("block_height") < 1L)) {
return;
}
String hash = null;
if(txObj.has("hash")) {
hash = (String)txObj.get("hash");
if(BIP47Meta.getInstance().getIncomingStatus(hash) == null) {
getNotifTx(hash, addr);
}
}
}
}
}
public void parseNotifTx(JSONObject jsonObject, String addr, String hash) throws JSONException {
info("APIFactory", "notif address:" + addr);
info("APIFactory", "hash:" + hash);
if(jsonObject != null) {
byte[] mask = null;
byte[] payload = null;
PaymentCode pcode = null;
if(jsonObject.has("inputs")) {
JSONArray inArray = (JSONArray)jsonObject.get("inputs");
if(inArray.length() > 0) {
JSONObject objInput = (JSONObject)inArray.get(0);
byte[] pubkey = null;
String strScript = objInput.getString("sig");
info("APIFactory", "scriptsig:" + strScript);
if((strScript == null || strScript.length() == 0 || strScript.startsWith("160014")) && objInput.has("witness")) {
JSONArray witnessArray = (JSONArray)objInput.get("witness");
if(witnessArray.length() == 2) {
pubkey = Hex.decode((String)witnessArray.get(1));
}
}
else {
Script script = new Script(Hex.decode(strScript));
info("APIFactory", "pubkey from script:" + Hex.toHexString(script.getPubKey()));
pubkey = script.getPubKey();
}
ECKey pKey = new ECKey(null, pubkey, true);
info("APIFactory", "address from script:" + pKey.toAddress(SamouraiWallet.getInstance().getCurrentNetworkParams()).toString());
// info("APIFactory", "uncompressed public key from script:" + Hex.toHexString(pKey.decompress().getPubKey()));
if(((JSONObject)inArray.get(0)).has("outpoint")) {
JSONObject received_from = ((JSONObject) inArray.get(0)).getJSONObject("outpoint");
String strHash = received_from.getString("txid");
int idx = received_from.getInt("vout");
byte[] hashBytes = Hex.decode(strHash);
Sha256Hash txHash = new Sha256Hash(hashBytes);
TransactionOutPoint outPoint = new TransactionOutPoint(SamouraiWallet.getInstance().getCurrentNetworkParams(), idx, txHash);
byte[] outpoint = outPoint.bitcoinSerialize();
info("APIFactory", "outpoint:" + Hex.toHexString(outpoint));
try {
mask = BIP47Util.getInstance(context).getIncomingMask(pubkey, outpoint);
info("APIFactory", "mask:" + Hex.toHexString(mask));
}
catch(Exception e) {
e.printStackTrace();
}
}
}
}
if(jsonObject.has("outputs")) {
JSONArray outArray = (JSONArray)jsonObject.get("outputs");
JSONObject outObj = null;
boolean isIncoming = false;
String _addr = null;
String script = null;
String op_return = null;
for(int j = 0; j < outArray.length(); j++) {
outObj = (JSONObject)outArray.get(j);
if(outObj.has("address")) {
_addr = outObj.getString("address");
if(addr.equals(_addr)) {
isIncoming = true;
}
}
if(outObj.has("scriptpubkey")) {
script = outObj.getString("scriptpubkey");
if(script.startsWith("6a4c50")) {
op_return = script;
}
}
}
if(isIncoming && op_return != null && op_return.startsWith("6a4c50")) {
payload = Hex.decode(op_return.substring(6));
}
}
if(mask != null && payload != null) {
try {
byte[] xlat_payload = PaymentCode.blind(payload, mask);
info("APIFactory", "xlat_payload:" + Hex.toHexString(xlat_payload));
pcode = new PaymentCode(xlat_payload);
info("APIFactory", "incoming payment code:" + pcode.toString());
if(!pcode.toString().equals(BIP47Util.getInstance(context).getPaymentCode().toString()) && pcode.isValid() && !BIP47Meta.getInstance().incomingExists(pcode.toString())) {
BIP47Meta.getInstance().setLabel(pcode.toString(), "");
BIP47Meta.getInstance().setIncomingStatus(hash);
}
}
catch(AddressFormatException afe) {
afe.printStackTrace();
}
}
// get receiving addresses for spends from decoded payment code
if(pcode != null) {
try {
// initial lookup
for(int i = 0; i < BIP47Meta.INCOMING_LOOKAHEAD; i++) {
info("APIFactory", "receive from " + i + ":" + BIP47Util.getInstance(context).getReceivePubKey(pcode, i));
BIP47Meta.getInstance().getIdx4AddrLookup().put(BIP47Util.getInstance(context).getReceivePubKey(pcode, i), i);
BIP47Meta.getInstance().getPCode4AddrLookup().put(BIP47Util.getInstance(context).getReceivePubKey(pcode, i), pcode.toString());
}
}
catch(Exception e) {
;
}
}
}
}
public synchronized int getNotifTxConfirmations(String hash) {
String _url = WebUtil.getAPIUrl(context);
// info("APIFactory", "Notif tx:" + hash);
JSONObject jsonObject = null;
try {
StringBuilder url = new StringBuilder(_url);
url.append("tx/");
url.append(hash);
url.append("?fees=1");
// info("APIFactory", "Notif tx:" + url.toString());
url.append("&at=");
url.append(getAccessToken());
String response = WebUtil.getInstance(null).getURL(url.toString());
// info("APIFactory", "Notif tx:" + response);
jsonObject = new JSONObject(response);
// info("APIFactory", "Notif tx json:" + jsonObject.toString());
return parseNotifTx(jsonObject);
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
}
return 0;
}
public synchronized int parseNotifTx(JSONObject jsonObject) throws JSONException {
int cf = 0;
if(jsonObject != null && jsonObject.has("block") && jsonObject.getJSONObject("block").has("height")) {
long latestBlockHeght = getLatestBlockHeight();
long height = jsonObject.getJSONObject("block").getLong("height");
cf = (int)((latestBlockHeght - height) + 1);
if(cf < 0) {
cf = 0;
}
}
return cf;
}
public synchronized JSONObject getUnspentOutputs(String[] xpubs) {
String _url = WebUtil.getAPIUrl(context);
JSONObject jsonObject = null;
String response = null;
try {
if(AppUtil.getInstance(context).isOfflineMode()) {
response = PayloadUtil.getInstance(context).deserializeUTXO().toString();
}
else if(!TorManager.getInstance(context).isRequired()) {
StringBuilder args = new StringBuilder();
args.append("active=");
args.append(StringUtils.join(xpubs, URLEncoder.encode("|", "UTF-8")));
debug("APIFactory", "UTXO args:" + args.toString());
args.append("&at=");
args.append(getAccessToken());
response = WebUtil.getInstance(context).postURL(_url + "unspent?", args.toString());
debug("APIFactory", "UTXO:" + response);
}
else {
HashMap<String,String> args = new HashMap<String,String>();
args.put("active", StringUtils.join(xpubs, "|"));
args.put("at", getAccessToken());
response = WebUtil.getInstance(context).tor_postURL(_url + "unspent", args);
}
parseUnspentOutputs(response);
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
}
if(!AppUtil.getInstance(context).isOfflineMode()) {
try {
jsonObject = new JSONObject(response);
}
catch(JSONException je) {
;
}
}
return jsonObject;
}
private synchronized boolean parseUnspentOutputs(String unspents) {
if(unspents != null) {
try {
JSONObject jsonObj = new JSONObject(unspents);
if(jsonObj == null || !jsonObj.has("unspent_outputs")) {
return false;
}
JSONArray utxoArray = jsonObj.getJSONArray("unspent_outputs");
if(utxoArray == null || utxoArray.length() == 0) {
return false;
}
for (int i = 0; i < utxoArray.length(); i++) {
JSONObject outDict = utxoArray.getJSONObject(i);
byte[] hashBytes = Hex.decode((String)outDict.get("tx_hash"));
Sha256Hash txHash = Sha256Hash.wrap(hashBytes);
int txOutputN = ((Number)outDict.get("tx_output_n")).intValue();
BigInteger value = BigInteger.valueOf(((Number)outDict.get("value")).longValue());
String script = (String)outDict.get("script");
byte[] scriptBytes = Hex.decode(script);
int confirmations = ((Number)outDict.get("confirmations")).intValue();
String path = null;
try {
String address = null;
if(Bech32Util.getInstance().isBech32Script(script)) {
address = Bech32Util.getInstance().getAddressFromScript(script);
}
else {
address = new Script(scriptBytes).getToAddress(SamouraiWallet.getInstance().getCurrentNetworkParams()).toString();
}
if(outDict.has("xpub")) {
JSONObject xpubObj = (JSONObject)outDict.get("xpub");
path = (String)xpubObj.get("path");
String m = (String)xpubObj.get("m");
unspentPaths.put(address, path);
if(m.equals(BIP49Util.getInstance(context).getWallet().getAccount(0).xpubstr())) {
unspentBIP49.put(address, 0); // assume account 0
}
else if(m.equals(BIP84Util.getInstance(context).getWallet().getAccount(0).xpubstr())) {
unspentBIP84.put(address, 0); // assume account 0
}
else {
unspentAccounts.put(address, AddressFactory.getInstance(context).xpub2account().get(m));
}
}
else if(outDict.has("pubkey")) {
int idx = BIP47Meta.getInstance().getIdx4AddrLookup().get(outDict.getString("pubkey"));
BIP47Meta.getInstance().getIdx4AddrLookup().put(address, idx);
String pcode = BIP47Meta.getInstance().getPCode4AddrLookup().get(outDict.getString("pubkey"));
BIP47Meta.getInstance().getPCode4AddrLookup().put(address, pcode);
debug("APIFactory", outDict.getString("pubkey") + "," + pcode);
debug("APIFactory", outDict.getString("pubkey") + "," + idx);
}
else {
;
}
// Construct the output
MyTransactionOutPoint outPoint = new MyTransactionOutPoint(txHash, txOutputN, value, scriptBytes, address);
outPoint.setConfirmations(confirmations);
if(utxos.containsKey(script)) {
utxos.get(script).getOutpoints().add(outPoint);
}
else {
UTXO utxo = new UTXO();
utxo.getOutpoints().add(outPoint);
utxo.setPath(path);
utxos.put(script, utxo);
}
if(Bech32Util.getInstance().isBech32Script(script)) {
UTXOFactory.getInstance().addP2WPKH(txHash.toString(), txOutputN, script, utxos.get(script));
}
else if(Address.fromBase58(SamouraiWallet.getInstance().getCurrentNetworkParams(), address).isP2SHAddress()) {
UTXOFactory.getInstance().addP2SH_P2WPKH(txHash.toString(), txOutputN, script, utxos.get(script));
}
else {
UTXOFactory.getInstance().addP2PKH(txHash.toString(), txOutputN, script, utxos.get(script));
}
}
catch(Exception e) {
e.printStackTrace();
}
}
long amount = 0L;
for(String key : utxos.keySet()) {
for(MyTransactionOutPoint out : utxos.get(key).getOutpoints()) {
debug("APIFactory", "utxo:" + out.getAddress() + "," + out.getValue());
debug("APIFactory", "utxo:" + utxos.get(key).getPath());
amount += out.getValue().longValue();
}
}
debug("APIFactory", "utxos by value (post-parse):" + amount);
return true;
}
catch(JSONException je) {
;
}
}
return false;
}
public synchronized JSONObject getAddressInfo(String addr) {
return getXPUB(new String[] { addr }, false);
}
public synchronized JSONObject getTxInfo(String hash) {
String _url = WebUtil.getAPIUrl(context);
JSONObject jsonObject = null;
try {
StringBuilder url = new StringBuilder(_url);
url.append("tx/");
url.append(hash);
url.append("?fees=true");
url.append("&at=");
url.append(getAccessToken());
String response = WebUtil.getInstance(context).getURL(url.toString());
jsonObject = new JSONObject(response);
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
}
return jsonObject;
}
public synchronized JSONObject getBlockHeader(String hash) {
String _url = WebUtil.getAPIUrl(context);
JSONObject jsonObject = null;
try {
StringBuilder url = new StringBuilder(_url);
url.append("header/");
url.append(hash);
url.append("?at=");
url.append(getAccessToken());
String response = WebUtil.getInstance(context).getURL(url.toString());
jsonObject = new JSONObject(response);
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
}
return jsonObject;
}
public synchronized JSONObject getDynamicFees() {
JSONObject jsonObject = null;
try {
String _url = WebUtil.getAPIUrl(context);
// info("APIFactory", "Dynamic fees:" + url.toString());
String response = null;
if(!AppUtil.getInstance(context).isOfflineMode()) {
response = WebUtil.getInstance(null).getURL(_url + "fees" + "?at=" + getAccessToken());
}
else {
response = PayloadUtil.getInstance(context).deserializeFees().toString();
}
// info("APIFactory", "Dynamic fees response:" + response);
try {
jsonObject = new JSONObject(response);
parseDynamicFees_bitcoind(jsonObject);
}
catch(JSONException je) {
je.printStackTrace();
jsonObject = null;
}
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
}
return jsonObject;
}
private synchronized boolean parseDynamicFees_bitcoind(JSONObject jsonObject) throws JSONException {
if(jsonObject != null) {
// bitcoind
List<SuggestedFee> suggestedFees = new ArrayList<SuggestedFee>();
if(jsonObject.has("2")) {
long fee = jsonObject.getInt("2");
SuggestedFee suggestedFee = new SuggestedFee();
suggestedFee.setDefaultPerKB(BigInteger.valueOf(fee * 1000L));
suggestedFee.setStressed(false);
suggestedFee.setOK(true);
suggestedFees.add(suggestedFee);
}
if(jsonObject.has("6")) {
long fee = jsonObject.getInt("6");
SuggestedFee suggestedFee = new SuggestedFee();
suggestedFee.setDefaultPerKB(BigInteger.valueOf(fee * 1000L));
suggestedFee.setStressed(false);
suggestedFee.setOK(true);
suggestedFees.add(suggestedFee);
}
if(jsonObject.has("24")) {
long fee = jsonObject.getInt("24");
SuggestedFee suggestedFee = new SuggestedFee();
suggestedFee.setDefaultPerKB(BigInteger.valueOf(fee * 1000L));
suggestedFee.setStressed(false);
suggestedFee.setOK(true);
suggestedFees.add(suggestedFee);
}
if(suggestedFees.size() > 0) {
FeeUtil.getInstance().setEstimatedFees(suggestedFees);
// debug("APIFactory", "high fee:" + FeeUtil.getInstance().getHighFee().getDefaultPerKB().toString());
// debug("APIFactory", "suggested fee:" + FeeUtil.getInstance().getSuggestedFee().getDefaultPerKB().toString());
// debug("APIFactory", "low fee:" + FeeUtil.getInstance().getLowFee().getDefaultPerKB().toString());
}
try {
PayloadUtil.getInstance(context).serializeFees(jsonObject);
}
catch(IOException | DecryptionException e) {
;
}
return true;
}
return false;
}
public synchronized void validateAPIThread() {
final Handler handler = new Handler();
new Thread(new Runnable() {
@Override
public void run() {
Looper.prepare();
if(!AppUtil.getInstance(context).isOfflineMode()) {
try {
String response = WebUtil.getInstance(context).getURL(WebUtil.SAMOURAI_API_CHECK);
JSONObject jsonObject = new JSONObject(response);
if(!jsonObject.has("process")) {
showAlertDialog(context.getString(R.string.api_error), false);
}
}
catch(Exception e) {
showAlertDialog(context.getString(R.string.cannot_reach_api), false);
}
} else {
showAlertDialog(context.getString(R.string.no_internet), false);
}
handler.post(new Runnable() {
@Override
public void run() {
;
}
});
Looper.loop();
}
}).start();
}
private void showAlertDialog(final String message, final boolean forceExit){
if (!((Activity) context).isFinishing()) {
if(alertDialog != null)alertDialog.dismiss();
final AlertDialog.Builder builder = new AlertDialog.Builder(context);
builder.setMessage(message);
builder.setCancelable(false);
if(!forceExit) {
builder.setPositiveButton(R.string.retry,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface d, int id) {
d.dismiss();
//Retry
validateAPIThread();
}
});
}
builder.setNegativeButton(R.string.exit,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface d, int id) {
d.dismiss();
((Activity) context).finish();
}
});
alertDialog = builder.create();
alertDialog.show();
}
}
public synchronized void initWallet() {
info("APIFactory", "initWallet()");
initWalletAmounts();
}
private synchronized void initWalletAmounts() {
APIFactory.getInstance(context).reset();
List<String> addressStrings = new ArrayList<String>();
String[] s = null;
try {
if(PrefsUtil.getInstance(context).getValue(PrefsUtil.XPUB44REG, false) == false) {
registerXPUB(HD_WalletFactory.getInstance(context).get().getAccount(0).xpubstr(), 44, null);
}
if(PrefsUtil.getInstance(context).getValue(PrefsUtil.XPUB49REG, false) == false) {
registerXPUB(BIP49Util.getInstance(context).getWallet().getAccount(0).xpubstr(), 49, null);
}
if(PrefsUtil.getInstance(context).getValue(PrefsUtil.XPUB84REG, false) == false) {
registerXPUB(BIP84Util.getInstance(context).getWallet().getAccount(0).xpubstr(), 84, null);
}
if(PrefsUtil.getInstance(context).getValue(PrefsUtil.XPUBPREREG, false) == false) {
registerXPUB(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPremixAccount()).xpubstr(), 84, PrefsUtil.XPUBPREREG);
}
if(PrefsUtil.getInstance(context).getValue(PrefsUtil.XPUBPOSTREG, false) == false) {
registerXPUB(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPostmix()).xpubstr(), 84, PrefsUtil.XPUBPOSTREG);
}
if(PrefsUtil.getInstance(context).getValue(PrefsUtil.XPUBBADBANKREG, false) == false) {
registerXPUB(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolBadBank()).xpubstr(), 84, PrefsUtil.XPUBPOSTREG);
}
xpub_txs.put(HD_WalletFactory.getInstance(context).get().getAccount(0).xpubstr(), new ArrayList<Tx>());
addressStrings.addAll(Arrays.asList(BIP47Meta.getInstance().getIncomingAddresses(false)));
for(String _s : Arrays.asList(BIP47Meta.getInstance().getIncomingLookAhead(context))) {
if(!addressStrings.contains(_s)) {
addressStrings.add(_s);
}
}
for(String pcode : BIP47Meta.getInstance().getUnspentProviders()) {
for(String addr : BIP47Meta.getInstance().getUnspentAddresses(context, pcode)) {
if(!addressStrings.contains(addr)) {
addressStrings.add(addr);
}
}
List<Integer> idxs = BIP47Meta.getInstance().getUnspent(pcode);
for(Integer idx : idxs) {
String receivePubKey = BIP47Util.getInstance(context).getReceivePubKey(new PaymentCode(pcode), idx);
BIP47Meta.getInstance().getIdx4AddrLookup().put(receivePubKey, idx);
BIP47Meta.getInstance().getPCode4AddrLookup().put(receivePubKey, pcode.toString());
if(!addressStrings.contains(receivePubKey)) {
addressStrings.add(receivePubKey);
}
}
}
if(addressStrings.size() > 0) {
s = addressStrings.toArray(new String[0]);
// info("APIFactory", addressStrings.toString());
utxoObj0 = getUnspentOutputs(s);
}
debug("APIFactory", "addresses:" + addressStrings.toString());
HD_Wallet hdw = HD_WalletFactory.getInstance(context).get();
if(hdw != null && hdw.getXPUBs() != null) {
String[] all = null;
if(s != null && s.length > 0) {
all = new String[hdw.getXPUBs().length + 2 + s.length];
all[0] = BIP49Util.getInstance(context).getWallet().getAccount(0).xpubstr();
all[1] = BIP84Util.getInstance(context).getWallet().getAccount(0).xpubstr();
System.arraycopy(hdw.getXPUBs(), 0, all, 2, hdw.getXPUBs().length);
System.arraycopy(s, 0, all, hdw.getXPUBs().length + 2, s.length);
}
else {
all = new String[hdw.getXPUBs().length + 2];
all[0] = BIP49Util.getInstance(context).getWallet().getAccount(0).xpubstr();
all[1] = BIP84Util.getInstance(context).getWallet().getAccount(0).xpubstr();
System.arraycopy(hdw.getXPUBs(), 0, all, 2, hdw.getXPUBs().length);
}
APIFactory.getInstance(context).getXPUB(all, true);
String[] xs = new String[3];
xs[0] = HD_WalletFactory.getInstance(context).get().getAccount(0).xpubstr();
xs[1] = BIP49Util.getInstance(context).getWallet().getAccount(0).xpubstr();
xs[2] = BIP84Util.getInstance(context).getWallet().getAccount(0).xpubstr();
utxoObj1 = getUnspentOutputs(xs);
getDynamicFees();
}
try {
List<JSONObject> utxoObjs = new ArrayList<JSONObject>();
if(utxoObj0 != null) {
utxoObjs.add(utxoObj0);
}
if(utxoObj1 != null) {
utxoObjs.add(utxoObj1);
}
PayloadUtil.getInstance(context).serializeUTXO(utxoObjs);
}
catch(IOException | DecryptionException e) {
;
}
List<String> seenOutputs = new ArrayList<String>();
List<UTXO> _utxos = getUtxos(false);
for(UTXO _u : _utxos) {
for(MyTransactionOutPoint _o : _u.getOutpoints()) {
seenOutputs.add(_o.getTxHash().toString() + "-" + _o.getTxOutputN());
}
}
for(String _s : BlockedUTXO.getInstance().getNotDustedUTXO()) {
// debug("APIFactory", "not dusted:" + _s);
if(!seenOutputs.contains(_s)) {
BlockedUTXO.getInstance().removeNotDusted(_s);
// debug("APIFactory", "not dusted removed:" + _s);
}
}
for(String _s : BlockedUTXO.getInstance().getBlockedUTXO().keySet()) {
// debug("APIFactory", "blocked:" + _s);
if(!seenOutputs.contains(_s)) {
BlockedUTXO.getInstance().remove(_s);
// debug("APIFactory", "blocked removed:" + _s);
}
}
String strPreMix = BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPremixAccount()).xpubstr();
String strPostMix = BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPostmix()).xpubstr();
String strBadBank = BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolBadBank()).xpubstr();
JSONObject preMultiAddrObj = getRawXPUB(new String[] { strPreMix });
JSONObject preUnspentObj = getRawUnspentOutputs(new String[] { strPreMix });
debug("APIFactory", "pre-mix multi:" + preMultiAddrObj.toString(2));
debug("APIFactory", "pre-mix unspent:" + preUnspentObj.toString());
boolean parsedPreMultiAddr = parseMixXPUB(preMultiAddrObj);
boolean parsedPreUnspent = parseMixUnspentOutputs(preUnspentObj.toString());
JSONObject postMultiAddrObj = getRawXPUB(new String[] { strPostMix });
JSONObject postUnspentObj = getRawUnspentOutputs(new String[] { strPostMix });
debug("APIFactory", "post-mix multi:" + postMultiAddrObj.toString());
debug("APIFactory", "post-mix unspent:" + postUnspentObj.toString());
boolean parsedPostMultiAddr = parseMixXPUB(postMultiAddrObj);
boolean parsedPostUnspent = parseMixUnspentOutputs(postUnspentObj.toString());
// debug("APIFactory", "post-mix multi:" + parsedPostMultiAddr);
// debug("APIFactory", "post-mix unspent:" + parsedPostUnspent);
// debug("APIFactory", "post-mix multi:" + getXpubPostMixBalance());
// debug("APIFactory", "post-mix unspent:" + getUtxosPostMix().size());
JSONObject badbankMultiAddrObj = getRawXPUB(new String[] { strBadBank });
JSONObject badbankUnspentObj = getRawUnspentOutputs(new String[] { strBadBank });
debug("APIFactory", "bad bank multi:" + badbankMultiAddrObj.toString());
debug("APIFactory", "bad bank unspent:" + badbankUnspentObj.toString());
boolean parsedBadBankMultiAddr = parseMixXPUB(badbankMultiAddrObj);
boolean parsedBadBanktUnspent = parseMixUnspentOutputs(badbankUnspentObj.toString());
List<String> seenOutputsPostMix = new ArrayList<String>();
List<UTXO> _utxosPostMix = getUtxosPostMix(false);
for(UTXO _u : _utxosPostMix) {
for(MyTransactionOutPoint _o : _u.getOutpoints()) {
seenOutputsPostMix.add(_o.getTxHash().toString() + "-" + _o.getTxOutputN());
}
}
for(String _s : UTXOUtil.getInstance().getTags().keySet()) {
if(!seenOutputsPostMix.contains(_s) && !seenOutputs.contains(_s)) {
UTXOUtil.getInstance().remove(_s);
UTXOUtil.getInstance().removeNote(_s);
}
}
List<String> seenOutputsBadBank = new ArrayList<String>();
List<UTXO> _utxosBadBank = getUtxosBadBank(false);
for(UTXO _u : _utxosBadBank) {
for(MyTransactionOutPoint _o : _u.getOutpoints()) {
seenOutputsBadBank.add(_o.getTxHash().toString() + "-" + _o.getTxOutputN());
}
}
for(String _s : UTXOUtil.getInstance().getTags().keySet()) {
if(!seenOutputsBadBank.contains(_s) && !seenOutputs.contains(_s)) {
UTXOUtil.getInstance().remove(_s);
}
}
/*
for(String _s : BlockedUTXO.getInstance().getNotDustedUTXO()) {
// debug("APIFactory", "not dusted:" + _s);
if(!seenOutputsPostMix.contains(_s)) {
BlockedUTXO.getInstance().removeNotDusted(_s);
// debug("APIFactory", "not dusted removed:" + _s);
}
}
*/
for(String _s : BlockedUTXO.getInstance().getBlockedUTXOPostMix().keySet()) {
debug("APIFactory", "blocked post-mix:" + _s);
if(!seenOutputsPostMix.contains(_s)) {
BlockedUTXO.getInstance().removePostMix(_s);
debug("APIFactory", "blocked removed:" + _s);
}
}
// refresh Whirlpool utxos
WhirlpoolWallet whirlpoolWallet = AndroidWhirlpoolWalletService.getInstance(context).getWhirlpoolWalletOrNull();
if (whirlpoolWallet != null) {
whirlpoolWallet.getUtxoSupplier().expire();
}
}
catch (IndexOutOfBoundsException ioobe) {
ioobe.printStackTrace();
}
catch (Exception e) {
e.printStackTrace();
}
walletInit = true;
}
public synchronized int syncBIP47Incoming(String[] addresses) {
JSONObject jsonObject = getXPUB(addresses, false);
debug("APIFactory", "sync BIP47 incoming:" + jsonObject.toString());
int ret = 0;
try {
if(jsonObject != null && jsonObject.has("addresses")) {
HashMap<String,Integer> pubkeys = new HashMap<String,Integer>();
JSONArray addressArray = (JSONArray)jsonObject.get("addresses");
JSONObject addrObj = null;
for(int i = 0; i < addressArray.length(); i++) {
addrObj = (JSONObject)addressArray.get(i);
long amount = 0L;
int nbTx = 0;
String addr = null;
String pcode = null;
int idx = -1;
if(addrObj.has("address")) {
if(addrObj.has("pubkey")) {
addr = (String)addrObj.get("pubkey");
pcode = BIP47Meta.getInstance().getPCode4Addr(addr);
idx = BIP47Meta.getInstance().getIdx4Addr(addr);
BIP47Meta.getInstance().getIdx4AddrLookup().put(addrObj.getString("address"), idx);
BIP47Meta.getInstance().getPCode4AddrLookup().put(addrObj.getString("address"), pcode);
}
else {
addr = (String)addrObj.get("address");
pcode = BIP47Meta.getInstance().getPCode4Addr(addr);
idx = BIP47Meta.getInstance().getIdx4Addr(addr);
}
if(addrObj.has("final_balance")) {
amount = addrObj.getLong("final_balance");
if(amount > 0L) {
BIP47Meta.getInstance().addUnspent(pcode, idx);
info("APIFactory", "BIP47 incoming amount:" + idx + ", " + addr + ", " + amount);
}
else {
if(addrObj.has("pubkey")) {
String pubkey = addrObj.getString("pubkey");
if(pubkeys.containsKey(pubkey)) {
int count = pubkeys.get(pubkey);
count++;
if(count == 3) {
BIP47Meta.getInstance().removeUnspent(pcode, Integer.valueOf(idx));
info("APIFactory", "BIP47 remove unspent:" + pcode + ":" + idx);
}
else {
pubkeys.put(pubkey, count + 1);
}
}
else {
pubkeys.put(pubkey, 1);
}
}
else {
BIP47Meta.getInstance().removeUnspent(pcode, Integer.valueOf(idx));
}
}
}
if(addrObj.has("n_tx")) {
nbTx = addrObj.getInt("n_tx");
if(nbTx > 0) {
if(idx > BIP47Meta.getInstance().getIncomingIdx(pcode)) {
BIP47Meta.getInstance().setIncomingIdx(pcode, idx);
}
info("APIFactory", "sync receive idx:" + idx + ", " + addr);
ret++;
}
}
}
}
}
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
}
return ret;
}
public synchronized int syncBIP47Outgoing(String[] addresses) {
JSONObject jsonObject = getXPUB(addresses, false);
int ret = 0;
try {
if(jsonObject != null && jsonObject.has("addresses")) {
JSONArray addressArray = (JSONArray)jsonObject.get("addresses");
JSONObject addrObj = null;
for(int i = 0; i < addressArray.length(); i++) {
addrObj = (JSONObject)addressArray.get(i);
int nbTx = 0;
String addr = null;
String pcode = null;
int idx = -1;
info("APIFactory", "address object:" + addrObj.toString());
if(addrObj.has("pubkey")) {
addr = (String)addrObj.get("pubkey");
pcode = BIP47Meta.getInstance().getPCode4Addr(addr);
idx = BIP47Meta.getInstance().getIdx4Addr(addr);
BIP47Meta.getInstance().getIdx4AddrLookup().put(addrObj.getString("address"), idx);
BIP47Meta.getInstance().getPCode4AddrLookup().put(addrObj.getString("address"), pcode);
}
else {
addr = (String)addrObj.get("address");
pcode = BIP47Meta.getInstance().getPCode4Addr(addr);
idx = BIP47Meta.getInstance().getIdx4Addr(addr);
}
if(addrObj.has("n_tx")) {
nbTx = addrObj.getInt("n_tx");
if(nbTx > 0) {
if(idx >= BIP47Meta.getInstance().getOutgoingIdx(pcode)) {
info("APIFactory", "sync send idx:" + idx + ", " + addr);
BIP47Meta.getInstance().setOutgoingIdx(pcode, idx + 1);
}
ret++;
}
}
}
}
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
}
return ret;
}
public long getXpubBalance() {
long ret = xpub_balance - BlockedUTXO.getInstance().getTotalValueBlocked0();
return (ret < 0L) ? 0L : ret;
}
public void setXpubBalance(long value) {
xpub_balance = value;
walletBalanceObserver.onNext(value);
}
public long getXpubPreMixBalance() {
return xpub_premix_balance;
}
public long getXpubPostMixBalance() {
long ret = xpub_postmix_balance - BlockedUTXO.getInstance().getTotalValueBlockedPostMix();
return (ret < 0L) ? 0L : ret;
}
public long getXpubBadBankBalance() {
long ret = xpub_badbank_balance - BlockedUTXO.getInstance().getTotalValueBlockedBadBank();
return (ret < 0L) ? 0L : ret;
}
public void setXpubPostMixBalance(long value) {
xpub_postmix_balance = value;
}
public void setXpubBadBankBalance(long value) {
xpub_badbank_balance = value;
}
public HashMap<String,Long> getXpubAmounts() {
return xpub_amounts;
}
public HashMap<String,List<Tx>> getXpubTxs() {
return xpub_txs;
}
public HashMap<String,List<Tx>> getPremixXpubTxs() {
return premix_txs;
}
public HashMap<String, String> getUnspentPaths() {
return unspentPaths;
}
public HashMap<String, Integer> getUnspentAccounts() {
return unspentAccounts;
}
public HashMap<String, Integer> getUnspentBIP49() {
return unspentBIP49;
}
public HashMap<String, Integer> getUnspentBIP84() {
return unspentBIP84;
}
public List<UTXO> getUtxos(boolean filter) {
long amount = 0L;
for(String key : utxos.keySet()) {
// for(MyTransactionOutPoint out : utxos.get(key).getOutpoints()) {
// debug("APIFactory", "utxo:" + out.getAddress() + "," + out.getValue());
// debug("APIFactory", "utxo:" + utxos.get(key).getPath());
// amount += out.getValue().longValue();
}
debug("APIFactory", "utxos by value:" + amount);
List<UTXO> unspents = new ArrayList<UTXO>();
if(filter) {
for(String key : utxos.keySet()) {
UTXO item = utxos.get(key);
UTXO u = new UTXO();
u.setPath(item.getPath());
for(MyTransactionOutPoint out : item.getOutpoints()) {
if(!BlockedUTXO.getInstance().contains(out.getTxHash().toString(), out.getTxOutputN())) {
u.getOutpoints().add(out);
u.setPath(utxos.get(key).getPath());
}
}
if(u.getOutpoints().size() > 0) {
unspents.add(u);
}
}
}
else {
unspents.addAll(utxos.values());
}
return unspents;
}
public List<UTXO> getUtxosWithLocalCache(boolean filter,boolean useLocalCache) {
List<UTXO> unspents = new ArrayList<UTXO>();
if(utxos.isEmpty() && useLocalCache){
try {
String response = PayloadUtil.getInstance(context).deserializeUTXO().toString();
parseUnspentOutputs(response);
} catch (IOException e) {
e.printStackTrace();
} catch (JSONException e) {
e.printStackTrace();
}
}
if(filter) {
for(String key : utxos.keySet()) {
UTXO item = utxos.get(key);
UTXO u = new UTXO();
u.setPath(item.getPath());
for(MyTransactionOutPoint out : item.getOutpoints()) {
if(!BlockedUTXO.getInstance().contains(out.getTxHash().toString(), out.getTxOutputN())) {
u.getOutpoints().add(out);
}
}
if(u.getOutpoints().size() > 0) {
unspents.add(u);
}
}
}
else {
unspents.addAll(utxos.values());
}
return unspents;
}
public List<UTXO> getUtxosPostMix(boolean filter) {
List<UTXO> unspents = new ArrayList<UTXO>();
if(filter) {
for(String key : utxosPostMix.keySet()) {
UTXO item = utxosPostMix.get(key);
UTXO u = new UTXO();
u.setPath(item.getPath());
for(MyTransactionOutPoint out : item.getOutpoints()) {
if(!BlockedUTXO.getInstance().containsPostMix(out.getTxHash().toString(), out.getTxOutputN())) {
u.getOutpoints().add(out);
}
}
if(u.getOutpoints().size() > 0) {
unspents.add(u);
}
}
}
else {
unspents.addAll(utxosPostMix.values());
}
return unspents;
}
public List<UTXO> getUtxosPreMix(boolean filter) {
List<UTXO> unspents = new ArrayList<UTXO>();
if(filter) {
for(String key : utxosPreMix.keySet()) {
UTXO item = utxosPreMix.get(key);
UTXO u = new UTXO();
u.setPath(item.getPath());
for(MyTransactionOutPoint out : item.getOutpoints()) {
if(!BlockedUTXO.getInstance().containsPostMix(out.getTxHash().toString(), out.getTxOutputN())) {
u.getOutpoints().add(out);
}
}
if(u.getOutpoints().size() > 0) {
unspents.add(u);
}
}
}
else {
unspents.addAll(utxosPreMix.values());
}
return unspents;
}
public List<UTXO> getUtxosBadBank(boolean filter) {
List<UTXO> unspents = new ArrayList<UTXO>();
if(filter) {
for(String key : utxosBadBank.keySet()) {
UTXO item = utxosBadBank.get(key);
UTXO u = new UTXO();
u.setPath(item.getPath());
for(MyTransactionOutPoint out : item.getOutpoints()) {
if(!BlockedUTXO.getInstance().containsBadBank(out.getTxHash().toString(), out.getTxOutputN())) {
u.getOutpoints().add(out);
}
}
if(u.getOutpoints().size() > 0) {
unspents.add(u);
}
}
}
else {
unspents.addAll(utxosBadBank.values());
}
return unspents;
}
public synchronized List<Tx> getAllXpubTxs() {
List<Tx> ret = new ArrayList<Tx>();
for(String key : xpub_txs.keySet()) {
List<Tx> txs = xpub_txs.get(key);
for(Tx tx : txs) {
ret.add(tx);
}
}
Collections.sort(ret, new TxMostRecentDateComparator());
return ret;
}
public synchronized List<Tx> getAllPostMixTxs() {
List<Tx> ret = new ArrayList<Tx>();
for(String key : postmix_txs.keySet()) {
List<Tx> txs = postmix_txs.get(key);
for(Tx tx : txs) {
ret.add(tx);
}
}
Collections.sort(ret, new TxMostRecentDateComparator());
return ret;
}
public synchronized UTXO getUnspentOutputsForSweep(String address) {
String _url = WebUtil.getAPIUrl(context);
try {
String response = null;
if(!TorManager.getInstance(context).isRequired()) {
StringBuilder args = new StringBuilder();
args.append("active=");
args.append(address);
args.append("&at=");
args.append(getAccessToken());
// debug("APIFactory", args.toString());
response = WebUtil.getInstance(context).postURL(_url + "unspent?", args.toString());
// debug("APIFactory", response);
}
else {
HashMap<String,String> args = new HashMap<String,String>();
args.put("active", address);
args.put("at", getAccessToken());
// debug("APIFactory", args.toString());
response = WebUtil.getInstance(context).tor_postURL(_url + "unspent", args);
// debug("APIFactory", response);
}
return parseUnspentOutputsForSweep(response);
}
catch(Exception e) {
e.printStackTrace();
}
return null;
}
private synchronized UTXO parseUnspentOutputsForSweep(String unspents) {
UTXO utxo = null;
if(unspents != null) {
try {
JSONObject jsonObj = new JSONObject(unspents);
if(jsonObj == null || !jsonObj.has("unspent_outputs")) {
return null;
}
JSONArray utxoArray = jsonObj.getJSONArray("unspent_outputs");
if(utxoArray == null || utxoArray.length() == 0) {
return null;
}
// debug("APIFactory", "unspents found:" + outputsRoot.size());
for (int i = 0; i < utxoArray.length(); i++) {
JSONObject outDict = utxoArray.getJSONObject(i);
byte[] hashBytes = Hex.decode((String)outDict.get("tx_hash"));
Sha256Hash txHash = Sha256Hash.wrap(hashBytes);
int txOutputN = ((Number)outDict.get("tx_output_n")).intValue();
BigInteger value = BigInteger.valueOf(((Number)outDict.get("value")).longValue());
String script = (String)outDict.get("script");
byte[] scriptBytes = Hex.decode(script);
int confirmations = ((Number)outDict.get("confirmations")).intValue();
try {
String address = null;
if(Bech32Util.getInstance().isBech32Script(script)) {
address = Bech32Util.getInstance().getAddressFromScript(script);
debug("address parsed:", address);
}
else {
address = new Script(scriptBytes).getToAddress(SamouraiWallet.getInstance().getCurrentNetworkParams()).toString();
}
// Construct the output
MyTransactionOutPoint outPoint = new MyTransactionOutPoint(txHash, txOutputN, value, scriptBytes, address);
outPoint.setConfirmations(confirmations);
if(utxo == null) {
utxo = new UTXO();
}
utxo.getOutpoints().add(outPoint);
}
catch(Exception e) {
;
}
}
}
catch(JSONException je) {
;
}
}
return utxo;
}
public static class TxMostRecentDateComparator implements Comparator<Tx> {
public int compare(Tx t1, Tx t2) {
final int BEFORE = -1;
final int EQUAL = 0;
final int AFTER = 1;
int ret = 0;
if(t1.getTS() > t2.getTS()) {
ret = BEFORE;
}
else if(t1.getTS() < t2.getTS()) {
ret = AFTER;
}
else {
ret = EQUAL;
}
return ret;
}
}
// use for post-mix
private synchronized JSONObject getRawXPUB(String[] xpubs) {
String _url = WebUtil.getAPIUrl(context);
JSONObject jsonObject = null;
try {
String response = null;
if(AppUtil.getInstance(context).isOfflineMode()) {
response = PayloadUtil.getInstance(context).deserializeMultiAddrPost().toString();
}
else if(!TorManager.getInstance(context).isRequired()) {
// use POST
StringBuilder args = new StringBuilder();
args.append("active=");
args.append(StringUtils.join(xpubs, URLEncoder.encode("|", "UTF-8")));
info("APIFactory", "XPUB:" + args.toString());
args.append("&at=");
args.append(getAccessToken());
response = WebUtil.getInstance(context).postURL(_url + "multiaddr?", args.toString());
info("APIFactory", "XPUB response:" + response);
}
else {
HashMap<String,String> args = new HashMap<String,String>();
args.put("active", StringUtils.join(xpubs, "|"));
info("APIFactory", "XPUB:" + args.toString());
args.put("at", getAccessToken());
response = WebUtil.getInstance(context).tor_postURL(_url + "multiaddr", args);
info("APIFactory", "XPUB response:" + response);
}
try {
jsonObject = new JSONObject(response);
return jsonObject;
}
catch(JSONException je) {
je.printStackTrace();
jsonObject = null;
}
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
}
return jsonObject;
}
public synchronized JSONObject getRawUnspentOutputs(String[] xpubs) {
String _url = WebUtil.getAPIUrl(context);
JSONObject jsonObject = null;
String response = null;
try {
if(AppUtil.getInstance(context).isOfflineMode()) {
response = PayloadUtil.getInstance(context).deserializeUTXOPost().toString();
}
else if(!TorManager.getInstance(context).isRequired()) {
StringBuilder args = new StringBuilder();
args.append("active=");
args.append(StringUtils.join(xpubs, URLEncoder.encode("|", "UTF-8")));
debug("APIFactory", "UTXO args:" + args.toString());
args.append("&at=");
args.append(getAccessToken());
response = WebUtil.getInstance(context).postURL(_url + "unspent?", args.toString());
debug("APIFactory", "UTXO:" + response);
}
else {
HashMap<String,String> args = new HashMap<String,String>();
args.put("active", StringUtils.join(xpubs, "|"));
args.put("at", getAccessToken());
response = WebUtil.getInstance(context).tor_postURL(_url + "unspent", args);
}
}
catch(Exception e) {
jsonObject = null;
e.printStackTrace();
}
if(!AppUtil.getInstance(context).isOfflineMode()) {
try {
jsonObject = new JSONObject(response);
}
catch(JSONException je) {
;
}
}
return jsonObject;
}
private synchronized boolean parseMixXPUB(JSONObject jsonObject) throws JSONException {
final int PRE_MIX = 0;
final int POST_MIX = 1;
final int BAD_BANK = 2;
int account_type = 0;
if(jsonObject != null) {
long xpub_mix_balance = 0;
if(jsonObject.has("wallet")) {
JSONObject walletObj = (JSONObject)jsonObject.get("wallet");
if(walletObj.has("final_balance")) {
xpub_mix_balance = walletObj.getLong("final_balance");
}
}
if(jsonObject.has("info")) {
JSONObject infoObj = (JSONObject)jsonObject.get("info");
if(infoObj.has("latest_block")) {
JSONObject blockObj = (JSONObject)infoObj.get("latest_block");
if(blockObj.has("height")) {
latest_block_height = blockObj.getLong("height");
}
if(blockObj.has("hash")) {
latest_block_hash = blockObj.getString("hash");
}
}
}
if(jsonObject.has("addresses")) {
JSONArray addressesArray = (JSONArray)jsonObject.get("addresses");
JSONObject addrObj = null;
for(int i = 0; i < addressesArray.length(); i++) {
addrObj = (JSONObject)addressesArray.get(i);
if(addrObj != null && addrObj.has("final_balance") && addrObj.has("address")) {
if(FormatsUtil.getInstance().isValidXpub((String)addrObj.get("address"))) {
// xpub_amounts.put((String)addrObj.get("address"), addrObj.getLong("final_balance"));
if(addrObj.getString("address").equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPostmix()).xpubstr()) ||
addrObj.getString("address").equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPostmix()).zpubstr())) {
xpub_postmix_balance = xpub_mix_balance;
account_type = POST_MIX;
AddressFactory.getInstance().setHighestPostReceiveIdx(addrObj.has("account_index") ? addrObj.getInt("account_index") : 0);
AddressFactory.getInstance().setHighestPostChangeIdx(addrObj.has("change_index") ? addrObj.getInt("change_index") : 0);
BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPostmix()).getChain(0).setAddrIdx(addrObj.has("account_index") ? addrObj.getInt("account_index") : 0);
BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPostmix()).getChain(1).setAddrIdx(addrObj.has("change_index") ? addrObj.getInt("change_index") : 0);
}
else if(addrObj.getString("address").equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPremixAccount()).xpubstr()) ||
addrObj.getString("address").equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPremixAccount()).zpubstr())) {
xpub_premix_balance = xpub_mix_balance;
account_type = PRE_MIX;
AddressFactory.getInstance().setHighestPreReceiveIdx(addrObj.has("account_index") ? addrObj.getInt("account_index") : 0);
AddressFactory.getInstance().setHighestPreChangeIdx(addrObj.has("change_index") ? addrObj.getInt("change_index") : 0);
BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPremixAccount()).getChain(0).setAddrIdx(addrObj.has("account_index") ? addrObj.getInt("account_index") : 0);
BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPremixAccount()).getChain(1).setAddrIdx(addrObj.has("change_index") ? addrObj.getInt("change_index") : 0);
}
else if(addrObj.getString("address").equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolBadBank()).xpubstr()) ||
addrObj.getString("address").equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolBadBank()).zpubstr())) {
xpub_badbank_balance = xpub_mix_balance;
account_type = BAD_BANK;
AddressFactory.getInstance().setHighestBadBankReceiveIdx(addrObj.has("account_index") ? addrObj.getInt("account_index") : 0);
AddressFactory.getInstance().setHighestBadBankChangeIdx(addrObj.has("change_index") ? addrObj.getInt("change_index") : 0);
BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolBadBank()).getChain(0).setAddrIdx(addrObj.has("account_index") ? addrObj.getInt("account_index") : 0);
BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolBadBank()).getChain(1).setAddrIdx(addrObj.has("change_index") ? addrObj.getInt("change_index") : 0);
}
else {
;
}
}
}
}
}
if(jsonObject.has("txs")) {
JSONArray txArray = (JSONArray)jsonObject.get("txs");
JSONObject txObj = null;
for(int i = 0; i < txArray.length(); i++) {
txObj = (JSONObject)txArray.get(i);
long height = 0L;
long amount = 0L;
long ts = 0L;
String hash = null;
String addr = null;
String _addr = null;
if(txObj.has("block_height")) {
height = txObj.getLong("block_height");
}
else {
height = -1L; // 0 confirmations
}
if(txObj.has("hash")) {
hash = (String)txObj.get("hash");
}
if(txObj.has("result")) {
amount = txObj.getLong("result");
}
if(txObj.has("time")) {
ts = txObj.getLong("time");
}
if(txObj.has("inputs")) {
JSONArray inputArray = (JSONArray)txObj.get("inputs");
JSONObject inputObj = null;
for(int j = 0; j < inputArray.length(); j++) {
inputObj = (JSONObject)inputArray.get(j);
if(inputObj.has("prev_out")) {
JSONObject prevOutObj = (JSONObject)inputObj.get("prev_out");
if(prevOutObj.has("xpub")) {
JSONObject xpubObj = (JSONObject)prevOutObj.get("xpub");
addr = (String)xpubObj.get("m");
}
else if(prevOutObj.has("addr") && BIP47Meta.getInstance().getPCode4Addr((String)prevOutObj.get("addr")) != null) {
_addr = (String)prevOutObj.get("addr");
}
else {
_addr = (String)prevOutObj.get("addr");
}
}
}
}
if(txObj.has("out")) {
JSONArray outArray = (JSONArray)txObj.get("out");
JSONObject outObj = null;
for(int j = 0; j < outArray.length(); j++) {
outObj = (JSONObject)outArray.get(j);
if(outObj.has("xpub")) {
JSONObject xpubObj = (JSONObject)outObj.get("xpub");
addr = (String)xpubObj.get("m");
}
else {
_addr = (String)outObj.get("addr");
}
}
}
if (addr != null || _addr != null) {
Tx tx = new Tx(hash, addr, amount, ts, (latest_block_height > 0L && height > 0L) ? (latest_block_height - height) + 1 : 0);
if (addr == null) {
addr = _addr;
}
if (addr.equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPostmix()).xpubstr()) ||
addr.equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPostmix()).zpubstr())) {
if (!postmix_txs.containsKey(addr)) {
postmix_txs.put(addr, new ArrayList<Tx>());
}
if (FormatsUtil.getInstance().isValidXpub(addr)) {
postmix_txs.get(addr).add(tx);
} else {
postmix_txs.get(AddressFactory.getInstance().account2xpub().get(0)).add(tx);
}
} else if (addr.equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPremixAccount()).xpubstr()) ||
addr.equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPremixAccount()).zpubstr())) {
if (!premix_txs.containsKey(addr)) {
premix_txs.put(addr, new ArrayList<Tx>());
}
if (FormatsUtil.getInstance().isValidXpub(addr)) {
premix_txs.get(addr).add(tx);
} else {
premix_txs.get(AddressFactory.getInstance().account2xpub().get(0)).add(tx);
}
} else if (addr.equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolBadBank()).xpubstr()) ||
addr.equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolBadBank()).zpubstr())) {
if (!badbank_txs.containsKey(addr)) {
badbank_txs.put(addr, new ArrayList<Tx>());
}
if (FormatsUtil.getInstance().isValidXpub(addr)) {
badbank_txs.get(addr).add(tx);
} else {
badbank_txs.get(AddressFactory.getInstance().account2xpub().get(0)).add(tx);
}
}
}
}
}
try {
if(account_type == POST_MIX) {
PayloadUtil.getInstance(context).serializeMultiAddrPost(jsonObject);
}
else if(account_type == PRE_MIX) {
PayloadUtil.getInstance(context).serializeMultiAddrPre(jsonObject);
}
else {
PayloadUtil.getInstance(context).serializeMultiAddrBadBank(jsonObject);
}
}
catch(IOException | DecryptionException e) {
;
}
return true;
}
return false;
}
private synchronized boolean parseMixUnspentOutputs(String unspents) {
final int PRE_MIX = 0;
final int POST_MIX = 1;
final int BAD_BANK = 2;
int account_type = 0;
if(unspents != null) {
try {
JSONObject jsonObj = new JSONObject(unspents);
if(jsonObj == null || !jsonObj.has("unspent_outputs")) {
return false;
}
JSONArray utxoArray = jsonObj.getJSONArray("unspent_outputs");
if(utxoArray == null || utxoArray.length() == 0) {
return false;
}
for (int i = 0; i < utxoArray.length(); i++) {
JSONObject outDict = utxoArray.getJSONObject(i);
byte[] hashBytes = Hex.decode((String)outDict.get("tx_hash"));
Sha256Hash txHash = Sha256Hash.wrap(hashBytes);
int txOutputN = ((Number)outDict.get("tx_output_n")).intValue();
BigInteger value = BigInteger.valueOf(((Number)outDict.get("value")).longValue());
String script = (String)outDict.get("script");
byte[] scriptBytes = Hex.decode(script);
int confirmations = ((Number)outDict.get("confirmations")).intValue();
String path = null;
try {
String address = Bech32Util.getInstance().getAddressFromScript(script);
if(outDict.has("xpub")) {
JSONObject xpubObj = (JSONObject)outDict.get("xpub");
path = (String)xpubObj.get("path");
String m = (String)xpubObj.get("m");
unspentPaths.put(address, path);
if(m.equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPostmix()).xpubstr())) {
unspentBIP84PostMix.put(address, WhirlpoolMeta.getInstance(context).getWhirlpoolPostmix());
account_type = POST_MIX;
}
else if(m.equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolPremixAccount()).xpubstr())) {
unspentBIP84PreMix.put(address, WhirlpoolMeta.getInstance(context).getWhirlpoolPremixAccount());
account_type = PRE_MIX;
}
else if(m.equals(BIP84Util.getInstance(context).getWallet().getAccountAt(WhirlpoolMeta.getInstance(context).getWhirlpoolBadBank()).xpubstr())) {
unspentBIP84BadBank.put(address, WhirlpoolMeta.getInstance(context).getWhirlpoolBadBank());
account_type = BAD_BANK;
}
else {
;
}
}
else {
;
}
// Construct the output
MyTransactionOutPoint outPoint = new MyTransactionOutPoint(txHash, txOutputN, value, scriptBytes, address);
outPoint.setConfirmations(confirmations);
if(account_type == POST_MIX) {
if(utxosPostMix.containsKey(script)) {
utxosPostMix.get(script).getOutpoints().add(outPoint);
}
else {
UTXO utxo = new UTXO();
utxo.getOutpoints().add(outPoint);
utxo.setPath(path);
utxosPostMix.put(script, utxo);
}
UTXOFactory.getInstance().addPostMix(txHash.toString(), txOutputN, script, utxosPostMix.get(script));
}
else if(account_type == PRE_MIX) {
if(utxosPreMix.containsKey(script)) {
utxosPreMix.get(script).getOutpoints().add(outPoint);
}
else {
UTXO utxo = new UTXO();
utxo.getOutpoints().add(outPoint);
utxo.setPath(path);
utxosPreMix.put(script, utxo);
}
UTXOFactory.getInstance().addPreMix(txHash.toString(), txOutputN, script, utxosPreMix.get(script));
} if(account_type == BAD_BANK) {
if(utxosBadBank.containsKey(script)) {
utxosBadBank.get(script).getOutpoints().add(outPoint);
}
else {
UTXO utxo = new UTXO();
utxo.getOutpoints().add(outPoint);
utxo.setPath(path);
utxosBadBank.put(script, utxo);
}
UTXOFactory.getInstance().addBadBank(txHash.toString(), txOutputN, script, utxosBadBank.get(script));
}
}
catch(Exception e) {
e.printStackTrace();
;
}
}
if(account_type == POST_MIX) {
PayloadUtil.getInstance(context).serializeUTXOPost(jsonObj);
}
else if(account_type == PRE_MIX) {
PayloadUtil.getInstance(context).serializeUTXOPre(jsonObj);
}
else {
PayloadUtil.getInstance(context).serializeUTXOBadBank(jsonObj);
}
return true;
}
catch(Exception j) {
j.printStackTrace();
;
}
}
return false;
}
} |
package ljdp.minechem.common.utils;
import net.minecraft.util.ResourceLocation;
public interface ConstantValue {
public static final String TEXTURE_DIR = "assets/minechem/textures/";
public static final String TEXTURE_GUI_DIR = "textures/gui/";
public static final String TEXTURE_MODEL_DIR = "textures/model/";
public static final String SOUNDS_DIR = "sounds/minechem/";
public static final String LANG_DIR = "/assets/minechem/languages/";
//Used for resourceLocations in 1.6
public static final String MOD_ID = "minechem";
public static final String TEXTURE_MOD_ID = MOD_ID+":";
public static final String ATOMIC_MANIPULATOR_TEX = TEXTURE_MOD_ID + "AtomicManipulator";
public static final String BLUEPRINT_TEX = TEXTURE_MOD_ID + "Blueprint";
public static final String CHEMIST_JOURNAL_TEX = TEXTURE_MOD_ID + "ChemistJournal";
public static final String CHEMISTRY_UPGRADE_TEX = TEXTURE_MOD_ID + "ChemistryUpgrade";
public static final String FILLED_TESTTUBE_TEX = TEXTURE_MOD_ID + "filledTestTube";
public static final String MOLECULE_PASS1_TEX = TEXTURE_MOD_ID + "Molecule_Pass1";
public static final String MOLECULE_PASS2_TEX = TEXTURE_MOD_ID + "Molecule_Pass2";
public static final String FILLED_MOLECULE_TEX = TEXTURE_MOD_ID + "filledMolecule";
public static final String ELEMENT_GAS1_TEX = TEXTURE_MOD_ID + "gas1";
public static final String ELEMENT_GAS2_TEX = TEXTURE_MOD_ID + "gas2";
public static final String ELEMENT_GAS3_TEX = TEXTURE_MOD_ID + "gas3";
public static final String ELEMENT_GAS4_TEX = TEXTURE_MOD_ID + "gas4";
public static final String ELEMENT_GAS5_TEX = TEXTURE_MOD_ID + "gas5";
public static final String ELEMENT_GAS6_TEX = TEXTURE_MOD_ID + "gas6";
public static final String ELEMENT_GAS7_TEX = TEXTURE_MOD_ID + "gas7";
public static final String ELEMENT_LIQUID1_TEX = TEXTURE_MOD_ID + "liquid1";
public static final String ELEMENT_LIQUID2_TEX = TEXTURE_MOD_ID + "liquid2";
public static final String ELEMENT_LIQUID3_TEX = TEXTURE_MOD_ID + "liquid3";
public static final String ELEMENT_LIQUID4_TEX = TEXTURE_MOD_ID + "liquid4";
public static final String ELEMENT_LIQUID5_TEX = TEXTURE_MOD_ID + "liquid5";
public static final String ELEMENT_LIQUID6_TEX = TEXTURE_MOD_ID + "liquid6";
public static final String ELEMENT_LIQUID7_TEX = TEXTURE_MOD_ID + "liquid7";
public static final String ELEMENT_SOLID_TEX = TEXTURE_MOD_ID + "solid";
public static final String TESTTUBE_TEX = TEXTURE_MOD_ID + "TestTube";
public static final String PILL_TEX = TEXTURE_MOD_ID + "pill";
public static final String PHOTONIC_INDUCTION_TEX = TEXTURE_MOD_ID + "PhotonicInduction";
public static final String HAZMAT_FEET_TEX = TEXTURE_MOD_ID + "hazmatFeet";
public static final String HAZMAT_HEAD_TEX = TEXTURE_MOD_ID + "hazmatHead";
public static final String HAZMAT_LEGS_TEX = TEXTURE_MOD_ID + "hazmatLegs";
public static final String HAZMAT_TORSO_TEX = TEXTURE_MOD_ID + "hazmatTorso";
public static final String LENS1_TEX = TEXTURE_MOD_ID + "lens1";
public static final String LENS2_TEX = TEXTURE_MOD_ID + "lens2";
public static final String LENS3_TEX = TEXTURE_MOD_ID + "lens3";
public static final String LENS4_TEX = TEXTURE_MOD_ID + "lens4";
public static final String JAMMED_ICON = TEXTURE_MOD_ID + "i_jammed";
public static final String NO_BOTTLES_ICON = TEXTURE_MOD_ID + "i_noBottles";
public static final String UNPOWERED_ICON = TEXTURE_MOD_ID + "i_unpowered";
public static final String NO_RECIPE_ICON = TEXTURE_MOD_ID + "i_noRecipe";
public static final String POWER_ICON = TEXTURE_MOD_ID + "i_power";
public static final String HELP_ICON = TEXTURE_MOD_ID + "i_help";
public static final String FULL_ENERGY_ICON = TEXTURE_MOD_ID + "i_fullEnergy";
public static final String DECOMPOSER_FRONT_TEX = TEXTURE_MOD_ID + "decomposerFront";
public static final String DECOMPOSER_TEX = TEXTURE_MOD_ID + "decomposer";
public static final String MICROSCOPE_FRONT_TEX = TEXTURE_MOD_ID + "microscopeFront";
public static final String MICROSCOPE_TEX = TEXTURE_MOD_ID + "microscope";
public static final String FUSION1_TEX = TEXTURE_MOD_ID + "fusion1";
public static final String FUSION2_TEX = TEXTURE_MOD_ID + "fusion2";
public static final String DEFAULT_TEX = TEXTURE_MOD_ID + "default";
public static final String table_HEX = TEXTURE_GUI_DIR + "TableGUI.png";
public static final String TAB_LEFT = TEXTURE_GUI_DIR + "tab_left.png";
public static final String TAB_RIGHT = TEXTURE_GUI_DIR + "tab_right.png";
public static final String DECOMPOSER_GUI = TEXTURE_GUI_DIR + "ChemicalDecomposerGUI.png";
public static final String MICROSCOPE_GUI = TEXTURE_GUI_DIR + "MicroscopeGUI.png";
public static final String SYNTHESIS_GUI = TEXTURE_GUI_DIR + "SynthesisGUI.png";
public static final String FUSION_GUI = TEXTURE_GUI_DIR + "FusionChamberGUI.png";
public static final String PROJECTOR_GUI = TEXTURE_GUI_DIR + "ProjectorGUI.png";
public static final String JOURNAL_GUI = TEXTURE_GUI_DIR + "ChemistsJournalGUI.png";
public static final String VAT_GUI = TEXTURE_GUI_DIR + "ChemicalVatGUI.png";
public static final String PRINT_GUI = TEXTURE_GUI_DIR + "PrintGUI.png";
public static final String MICROSCOPE_MODEL = TEXTURE_MODEL_DIR + "MicroscopeModel.png";
public static final String DECOMPOSER_MODEL_ON = TEXTURE_MODEL_DIR + "DecomposerModelOn.png";
public static final String DECOMPOSER_MODEL_OFF = TEXTURE_MODEL_DIR + "DecomposerModelOff.png";
public static final String SYNTHESIS_MODEL = TEXTURE_MODEL_DIR + "SynthesiserModel.png";
public static final String PRINTER_MODEL = TEXTURE_MODEL_DIR + "RushmeadPrinter.png";
public static final String PROJECTOR_MODEL_ON = TEXTURE_MODEL_DIR + "ProjectorModelOn.png";
public static final String PROJECTOR_MODEL_OFF = TEXTURE_MODEL_DIR + "ProjectorModelOff.png";
public static final String HAZMAT_TEX = TEXTURE_MODEL_DIR + "hazmatArmor.png";
public static final String CHEMICAL_STORAGE_MODEL = TEXTURE_MODEL_DIR + "ChemicalStorageModel.png";
public static final String PROJECTOR_SOUND = SOUNDS_DIR + "projector.ogg";
public static final String ICON_BASE="textures/icons/";
public static final String URANIUM_TEX = TEXTURE_MOD_ID + "uraniumOre";
public static final String FISSION_GUI = TEXTURE_GUI_DIR+"FissionGUI.png";
} |
package com.torodb.packaging.guice;
import com.eightkdata.mongowp.annotations.MongoWP;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.inject.AbstractModule;
import com.torodb.common.util.ThreadFactoryIdleService;
import com.torodb.core.annotations.ToroDbIdleService;
import com.torodb.core.annotations.ToroDbRunnableService;
import com.torodb.mongodb.repl.guice.MongoDbRepl;
import com.torodb.packaging.ExecutorsService;
import com.torodb.torod.guice.TorodLayer;
import java.util.Collection;
import java.util.Collections;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class ExecutorServicesModule extends AbstractModule {
@Override
protected void configure() {
ThreadFactory threadFactory = Executors.defaultThreadFactory();
ExecutorService torodbDefaultThreadPool = Executors.newCachedThreadPool(
new ThreadFactoryBuilder()
.setNameFormat("torodb-%d")
.build()
);
bind(ThreadFactory.class)
.annotatedWith(ToroDbIdleService.class)
.toInstance(threadFactory);
bind(ThreadFactory.class)
.annotatedWith(ToroDbRunnableService.class)
.toInstance(threadFactory);
bind(ThreadFactory.class)
.annotatedWith(MongoWP.class)
.toInstance(threadFactory);
bind(ExecutorService.class)
.annotatedWith(TorodLayer.class)
.toInstance(torodbDefaultThreadPool);
bind(ExecutorService.class)
.annotatedWith(MongoDbRepl.class)
.toInstance(torodbDefaultThreadPool);
bind(ExecutorsService.class)
.toInstance(new DefaultExecutorsService(
threadFactory,
Collections.singletonList(torodbDefaultThreadPool))
);
}
private static class DefaultExecutorsService extends ThreadFactoryIdleService implements ExecutorsService {
private static final Logger LOGGER = LogManager.getLogger(DefaultExecutorsService.class);
private final Collection<ExecutorService> executorServices;
public DefaultExecutorsService(@ToroDbIdleService ThreadFactory threadFactory,
Collection<ExecutorService> executorServices) {
super(threadFactory);
this.executorServices = executorServices;
}
@Override
protected void startUp() throws Exception {
//Nothing to do
}
@Override
protected void shutDown() throws Exception {
executorServices.stream().forEach((executorService) -> {
executorService.shutdown();
});
for (ExecutorService executorService : executorServices) {
if (!executorService.awaitTermination(100, TimeUnit.SECONDS)) {
LOGGER.warn("The executor service " + executorService + " did not terminate "
+ "on the expected time");
}
}
}
}
} |
package org.jdesktop.swingx;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.Vector;
import java.util.logging.Logger;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.BorderFactory;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.SwingUtilities;
import javax.swing.Timer;
import javax.swing.border.Border;
import javax.swing.event.TableModelEvent;
import javax.swing.table.TableCellRenderer;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeCellRenderer;
import javax.swing.tree.TreePath;
import org.jdesktop.swingx.action.LinkAction;
import org.jdesktop.swingx.decorator.HighlighterFactory;
import org.jdesktop.swingx.renderer.ButtonProvider;
import org.jdesktop.swingx.renderer.CellContext;
import org.jdesktop.swingx.renderer.ComponentProvider;
import org.jdesktop.swingx.renderer.DefaultTableRenderer;
import org.jdesktop.swingx.renderer.DefaultTreeRenderer;
import org.jdesktop.swingx.renderer.HyperlinkProvider;
import org.jdesktop.swingx.renderer.LabelProvider;
import org.jdesktop.swingx.renderer.StringValue;
import org.jdesktop.swingx.renderer.WrappingIconPanel;
import org.jdesktop.swingx.renderer.WrappingProvider;
import org.jdesktop.swingx.renderer.RendererVisualCheck.TextAreaProvider;
import org.jdesktop.swingx.test.ActionMapTreeTableModel;
import org.jdesktop.swingx.test.ComponentTreeTableModel;
import org.jdesktop.swingx.test.TreeTableUtils;
import org.jdesktop.swingx.treetable.AbstractMutableTreeTableNode;
import org.jdesktop.swingx.treetable.DefaultMutableTreeTableNode;
import org.jdesktop.swingx.treetable.DefaultTreeTableModel;
import org.jdesktop.swingx.treetable.FileSystemModel;
import org.jdesktop.swingx.treetable.MutableTreeTableNode;
import org.jdesktop.swingx.treetable.TreeTableModel;
import org.jdesktop.swingx.treetable.TreeTableNode;
import org.jdesktop.test.TableModelReport;
/**
* Test to exposed known issues of <code>JXTreeTable</code>. <p>
*
* Ideally, there would be at least one failing test method per open
* issue in the issue tracker. Plus additional failing test methods for
* not fully specified or not yet decided upon features/behaviour.<p>
*
* Once the issues are fixed and the corresponding methods are passing, they
* should be moved over to the XXTest.
*
* @author Jeanette Winzenburg
*/
public class JXTreeTableIssues extends InteractiveTestCase {
private static final Logger LOG = Logger.getLogger(JXTreeTableIssues.class
.getName());
public static void main(String[] args) {
// setSystemLF(true);
JXTreeTableIssues test = new JXTreeTableIssues();
try {
// test.runInteractiveTests();
// test.runInteractiveTests(".*AdapterDeleteUpdate.*");
// test.runInteractiveTests(".*Text.*");
test.runInteractiveTests(".*TreeExpand.*");
} catch (Exception e) {
System.err.println("exception when executing interactive tests:");
e.printStackTrace();
}
}
/**
* Issue #493-swingx: JXTreeTable.TreeTableModelAdapter: Inconsistency
* firing update.
*
* Test update events after updating table.
*
* from tiberiu@dev.java.net
*
* NOTE: the failing assert is wrapped in invokeLater ..., so
* appears to pass in the testrunner.
*/
public void testTableEventUpdateOnTreeTableSetValueForRoot() {
TreeTableModel model = createCustomTreeTableModelFromDefault();
final JXTreeTable table = new JXTreeTable(model);
table.setRootVisible(true);
table.expandAll();
final int row = 0;
// sanity
assertEquals("JTree", table.getValueAt(row, 0).toString());
assertTrue("root must be editable", table.getModel().isCellEditable(0, 0));
final TableModelReport report = new TableModelReport();
table.getModel().addTableModelListener(report);
// doesn't fire or isn't detectable?
// Problem was: model was not-editable.
table.setValueAt("games", row, 0);
SwingUtilities.invokeLater(new Runnable() {
public void run() {
LOG.info("sanity - did testTableEventUpdateOnTreeTableSetValueForRoot run?");
assertEquals("tableModel must have fired", 1, report.getEventCount());
assertEquals("the event type must be update", 1, report.getUpdateEventCount());
TableModelEvent event = report.getLastUpdateEvent();
assertEquals("the updated row ", row, event.getFirstRow());
}
});
}
/**
* Issue #493-swingx: incorrect table events fired.
*
* Here: must fire structureChanged on setRoot(null).
* fails - because the treeStructureChanged is mapped to a
* tableDataChanged.
*
* NOTE: the failing assert is wrapped in invokeLater ..., so
* appears to pass in the testrunner.
*/
public void testTableEventOnSetNullRoot() {
TreeTableModel model = createCustomTreeTableModelFromDefault();
final JXTreeTable table = new JXTreeTable(model);
table.setRootVisible(true);
table.expandAll();
final TableModelReport report = new TableModelReport();
table.getModel().addTableModelListener(report);
((DefaultTreeTableModel) model).setRoot(null);
SwingUtilities.invokeLater(new Runnable() {
public void run() {
LOG.info("sanity - did testTableEventOnSetNullRoot run?");
assertEquals("tableModel must have fired", 1, report.getEventCount());
assertTrue("event type must be structureChanged " + TableModelReport.printEvent(report.getLastEvent()),
report.isStructureChanged(report.getLastEvent()));
}
});
}
/**
* Issue #493-swingx: incorrect table events fired.
*
* Here: must fire structureChanged on setRoot(otherroot).
* fails - because the treeStructureChanged is mapped to a
* tableDataChanged.
*
* NOTE: the failing assert is wrapped in invokeLater ..., so
* appears to pass in the testrunner.
*/
public void testTableEventOnSetRoot() {
TreeTableModel model = createCustomTreeTableModelFromDefault();
final JXTreeTable table = new JXTreeTable(model);
table.setRootVisible(true);
table.expandAll();
final TableModelReport report = new TableModelReport();
table.getModel().addTableModelListener(report);
((DefaultTreeTableModel) model).setRoot(new DefaultMutableTreeTableNode("other"));
SwingUtilities.invokeLater(new Runnable() {
public void run() {
LOG.info("sanity - did testTableEventOnSetRoot run?");
assertEquals("tableModel must have fired", 1, report.getEventCount());
assertTrue("event type must be structureChanged " + TableModelReport.printEvent(report.getLastEvent()),
report.isStructureChanged(report.getLastEvent()));
}
});
}
/**
* Issue #493-swingx: incorrect table events fired.
*
* Here: must fire structureChanged on setModel.
*
*/
public void testTableEventOnSetModel() {
TreeTableModel model = createCustomTreeTableModelFromDefault();
final JXTreeTable table = new JXTreeTable(model);
table.setRootVisible(true);
table.expandAll();
final TableModelReport report = new TableModelReport();
table.getModel().addTableModelListener(report);
table.setTreeTableModel(createCustomTreeTableModelFromDefault());
SwingUtilities.invokeLater(new Runnable() {
public void run() {
LOG.info("sanity - did testTableEventOnSetModel run?");
assertEquals("tableModel must have fired", 1, report.getEventCount());
assertTrue("event type must be structureChanged " + TableModelReport.printEvent(report.getLastEvent()),
report.isStructureChanged(report.getLastEvent()));
}
});
}
/**
* Issue ??-swingx: JXTreeTable - scrollsOnExpand has no effect.
*
* Compare tree/table:
* - tree expands if property is true and
* expand triggered by mouse (not programmatically?).
* - treeTable never
*
*/
public void interactiveTestTreeExpand() {
final JXTreeTable treeTable = new JXTreeTable(new FileSystemModel());
final JXTree tree = new JXTree(treeTable.getTreeTableModel());
treeTable.setScrollsOnExpand(tree.getScrollsOnExpand());
tree.setRowHeight(treeTable.getRowHeight());
Action toggleScrolls = new AbstractAction("Toggle Scroll") {
public void actionPerformed(ActionEvent e) {
tree.setScrollsOnExpand(!tree.getScrollsOnExpand());
treeTable.setScrollsOnExpand(tree.getScrollsOnExpand());
}
};
Action expand = new AbstractAction("Expand") {
public void actionPerformed(ActionEvent e) {
int[] selectedRows = tree.getSelectionRows();
if (selectedRows.length > 0) {
tree.expandRow(selectedRows[0]);
}
int selected = treeTable.getSelectedRow();
if (selected >= 0) {
treeTable.expandRow(selected);
}
}
};
JXFrame frame = wrapWithScrollingInFrame(tree, treeTable,
"Compare Tree/Table expand properties ");
addAction(frame, toggleScrolls);
addAction(frame, expand);
frame.setVisible(true);
}
/**
* Issue #493-swingx: JXTreeTable.TreeTableModelAdapter: Inconsistency
* firing update on a recursive delete on a parent node.
*
* By recursive delete on a parent node it is understood that first we
* remove its children and then the parent node. After each child removed
* we are making an update over the parent. During this update the problem
* occurs: the index row for the parent is -1 and hence it is made an update
* over the row -1 (the header) and as it can be seen the preffered widths
* of column header are not respected anymore and are restored to the default
* preferences (all equal).
*
* from tiberiu@dev.java.net
*/
public void interactiveTreeTableModelAdapterDeleteUpdate() {
final DefaultTreeTableModel customTreeTableModel = (DefaultTreeTableModel)
createCustomTreeTableModelFromDefault();
final JXTreeTable table = new JXTreeTable(customTreeTableModel);
table.setRootVisible(true);
table.expandAll();
table.getColumn("A").setPreferredWidth(100);
table.getColumn("A").setMinWidth(100);
table.getColumn("A").setMaxWidth(100);
JXTree xtree = new JXTree(customTreeTableModel);
xtree.setRootVisible(true);
xtree.expandAll();
final JXFrame frame = wrapWithScrollingInFrame(table, xtree,
"JXTreeTable.TreeTableModelAdapter: Inconsistency firing update on recursive delete");
final MutableTreeTableNode deletedNode = (MutableTreeTableNode) table.getPathForRow(6).getLastPathComponent();
MutableTreeTableNode child1 = (MutableTreeTableNode) table.getPathForRow(6+1).getLastPathComponent();
MutableTreeTableNode child2 = (MutableTreeTableNode) table.getPathForRow(6+2).getLastPathComponent();
MutableTreeTableNode child3 = (MutableTreeTableNode) table.getPathForRow(6+3).getLastPathComponent();
MutableTreeTableNode child4 = (MutableTreeTableNode) table.getPathForRow(6+4).getLastPathComponent();
final MutableTreeTableNode[] children = {child1, child2, child3, child4 };
final String[] values = {"v1", "v2", "v3", "v4"};
final ActionListener l = new ActionListener() {
int count = 0;
public void actionPerformed(ActionEvent e) {
if (count > values.length) return;
if (count == values.length) {
customTreeTableModel.removeNodeFromParent(deletedNode);
count++;
} else {
// one in each run
removeChild(customTreeTableModel, deletedNode, children, values);
count++;
// all in one
// for (int i = 0; i < values.length; i++) {
// removeChild(customTreeTableModel, deletedNode, children, values);
// count++;
}
}
/**
* @param customTreeTableModel
* @param deletedNode
* @param children
* @param values
*/
private void removeChild(final DefaultTreeTableModel customTreeTableModel, final MutableTreeTableNode deletedNode, final MutableTreeTableNode[] children, final String[] values) {
customTreeTableModel.removeNodeFromParent(children[count]);
customTreeTableModel.setValueAt(values[count], deletedNode, 0);
}
};
Action changeValue = new AbstractAction("delete node sports recursively") {
Timer timer;
public void actionPerformed(ActionEvent e) {
if (timer == null) {
timer = new Timer(10, l);
timer.start();
} else {
timer.stop();
setEnabled(false);
}
}
};
addAction(frame, changeValue);
frame.setVisible(true);
}
/**
* Issue #493-swingx: JXTreeTable.TreeTableModelAdapter: Inconsistency
* firing update. Use the second child of root - first is accidentally okay.
*
* from tiberiu@dev.java.net
*
* TODO DefaultMutableTreeTableNodes do not allow value changes, so this
* test will never work
*/
public void interactiveTreeTableModelAdapterUpdate() {
TreeTableModel customTreeTableModel = createCustomTreeTableModelFromDefault();
final JXTreeTable table = new JXTreeTable(customTreeTableModel);
table.setRootVisible(true);
table.expandAll();
table.setLargeModel(true);
JXTree xtree = new JXTree(customTreeTableModel);
xtree.setRootVisible(true);
xtree.expandAll();
final JXFrame frame = wrapWithScrollingInFrame(table, xtree,
"JXTreeTable.TreeTableModelAdapter: Inconsistency firing update");
Action changeValue = new AbstractAction("change sports to games") {
public void actionPerformed(ActionEvent e) {
String newValue = "games";
table.getTreeTableModel().setValueAt(newValue,
table.getPathForRow(6).getLastPathComponent(), 0);
}
};
addAction(frame, changeValue);
Action changeRoot = new AbstractAction("change root") {
public void actionPerformed(ActionEvent e) {
DefaultMutableTreeTableNode newRoot = new DefaultMutableTreeTableNode("new Root");
((DefaultTreeTableModel) table.getTreeTableModel()).setRoot(newRoot);
}
};
addAction(frame, changeRoot);
frame.pack();
frame.setVisible(true);
}
/**
* Issue #493-swingx: JXTreeTable.TreeTableModelAdapter: Inconsistency
* firing delete.
*
* from tiberiu@dev.java.net
*/
public void interactiveTreeTableModelAdapterDelete() {
final TreeTableModel customTreeTableModel = createCustomTreeTableModelFromDefault();
final JXTreeTable table = new JXTreeTable(customTreeTableModel);
table.setRootVisible(true);
table.expandAll();
JXTree xtree = new JXTree(customTreeTableModel);
xtree.setRootVisible(true);
xtree.expandAll();
final JXFrame frame = wrapWithScrollingInFrame(table, xtree,
"JXTreeTable.TreeTableModelAdapter: Inconsistency firing update");
Action changeValue = new AbstractAction("delete first child of sports") {
public void actionPerformed(ActionEvent e) {
MutableTreeTableNode firstChild = (MutableTreeTableNode) table.getPathForRow(6 +1).getLastPathComponent();
((DefaultTreeTableModel) customTreeTableModel).removeNodeFromParent(firstChild);
}
};
addAction(frame, changeValue);
frame.setVisible(true);
}
/**
* Issue #493-swingx: JXTreeTable.TreeTableModelAdapter: Inconsistency
* firing delete.
*
* from tiberiu@dev.java.net
*/
public void interactiveTreeTableModelAdapterMutateSelected() {
final TreeTableModel customTreeTableModel = createCustomTreeTableModelFromDefault();
final JXTreeTable table = new JXTreeTable(customTreeTableModel);
table.setRootVisible(true);
table.expandAll();
JXTree xtree = new JXTree(customTreeTableModel);
xtree.setRootVisible(true);
xtree.expandAll();
final JXFrame frame = wrapWithScrollingInFrame(table, xtree,
"JXTreeTable.TreeTableModelAdapter: Inconsistency firing delete expanded folder");
Action changeValue = new AbstractAction("delete selected node") {
public void actionPerformed(ActionEvent e) {
int row = table.getSelectedRow();
if (row < 0) return;
MutableTreeTableNode firstChild = (MutableTreeTableNode) table.getPathForRow(row).getLastPathComponent();
((DefaultTreeTableModel) customTreeTableModel).removeNodeFromParent(firstChild);
}
};
addAction(frame, changeValue);
Action changeValue1 = new AbstractAction("insert as first child of selected node") {
public void actionPerformed(ActionEvent e) {
int row = table.getSelectedRow();
if (row < 0) return;
MutableTreeTableNode firstChild = (MutableTreeTableNode) table.getPathForRow(row).getLastPathComponent();
MutableTreeTableNode newChild = new DefaultMutableTreeTableNode("inserted");
((DefaultTreeTableModel) customTreeTableModel)
.insertNodeInto(newChild, firstChild, 0);
}
};
addAction(frame, changeValue1);
frame.pack();
frame.setVisible(true);
}
/**
* Issue #493-swingx: JXTreeTable.TreeTableModelAdapter: Inconsistency
* firing delete.
*
* from tiberiu@dev.java.net
*/
public void interactiveTreeTableModelAdapterMutateSelectedDiscontinous() {
final TreeTableModel customTreeTableModel = createCustomTreeTableModelFromDefault();
final JXTreeTable table = new JXTreeTable(customTreeTableModel);
table.setRootVisible(true);
table.expandAll();
JXTree xtree = new JXTree(customTreeTableModel);
xtree.setRootVisible(true);
xtree.expandAll();
final JXFrame frame = wrapWithScrollingInFrame(table, xtree,
"JXTreeTable.TreeTableModelAdapter: Inconsistency firing delete expanded folder");
Action changeValue = new AbstractAction("delete selected node + sibling") {
public void actionPerformed(ActionEvent e) {
int row = table.getSelectedRow();
if (row < 0) return;
MutableTreeTableNode firstChild = (MutableTreeTableNode) table.getPathForRow(row).getLastPathComponent();
MutableTreeTableNode parent = (MutableTreeTableNode) firstChild.getParent();
MutableTreeTableNode secondNextSibling = null;
int firstIndex = parent.getIndex(firstChild);
if (firstIndex + 2 < parent.getChildCount()) {
secondNextSibling = (MutableTreeTableNode) parent.getChildAt(firstIndex + 2);
}
if (secondNextSibling != null) {
((DefaultTreeTableModel) customTreeTableModel).removeNodeFromParent(secondNextSibling);
}
((DefaultTreeTableModel) customTreeTableModel).removeNodeFromParent(firstChild);
}
};
addAction(frame, changeValue);
Action changeValue1 = new AbstractAction("insert as first child of selected node") {
public void actionPerformed(ActionEvent e) {
int row = table.getSelectedRow();
if (row < 0) return;
MutableTreeTableNode firstChild = (MutableTreeTableNode) table.getPathForRow(row).getLastPathComponent();
MutableTreeTableNode newChild = new DefaultMutableTreeTableNode("inserted");
((DefaultTreeTableModel) customTreeTableModel)
.insertNodeInto(newChild, firstChild, 0);
}
};
addAction(frame, changeValue1);
frame.pack();
frame.setVisible(true);
}
/**
* Creates and returns a custom model from JXTree default model. The model
* is of type DefaultTreeModel, allowing for easy insert/remove.
*
* @return
*/
private TreeTableModel createCustomTreeTableModelFromDefault() {
JXTree tree = new JXTree();
DefaultTreeModel treeModel = (DefaultTreeModel) tree.getModel();
TreeTableModel customTreeTableModel = TreeTableUtils
.convertDefaultTreeModel(treeModel);
return customTreeTableModel;
}
/**
* A TreeTableModel inheriting from DefaultTreeModel (to ease
* insert/delete).
*/
public static class CustomTreeTableModel extends DefaultTreeTableModel {
/**
* @param root
*/
public CustomTreeTableModel(TreeTableNode root) {
super(root);
}
public int getColumnCount() {
return 1;
}
public String getColumnName(int column) {
return "User Object";
}
public Object getValueAt(Object node, int column) {
return ((DefaultMutableTreeNode) node).getUserObject();
}
public boolean isCellEditable(Object node, int column) {
return true;
}
public void setValueAt(Object value, Object node, int column) {
((MutableTreeTableNode) node).setUserObject(value);
modelSupport.firePathChanged(new TreePath(getPathToRoot((TreeTableNode) node)));
}
}
/**
* Issue #??-swingx: hyperlink in JXTreeTable hierarchical column not
* active.
*
*/
public void interactiveTreeTableLinkRendererSimpleText() {
LinkAction simpleAction = new LinkAction<Object>(null) {
public void actionPerformed(ActionEvent e) {
LOG.info("hit: " + getTarget());
}
};
JXTreeTable tree = new JXTreeTable(new FileSystemModel());
HyperlinkProvider provider = new HyperlinkProvider(simpleAction);
tree.getColumn(2).setCellRenderer(new DefaultTableRenderer(provider));
tree.setTreeCellRenderer(new DefaultTreeRenderer( //provider));
new WrappingProvider(provider)));
// tree.setCellRenderer(new LinkRenderer(simpleAction));
tree.setHighlighters(HighlighterFactory.createSimpleStriping());
JFrame frame = wrapWithScrollingInFrame(tree, "table and simple links");
frame.setVisible(true);
}
/**
* Issue ??-swingx: hyperlink/rollover in hierarchical column.
*
*/
public void testTreeRendererInitialRollover() {
JXTreeTable tree = new JXTreeTable(new FileSystemModel());
assertEquals(tree.isRolloverEnabled(), ((JXTree) tree.getCellRenderer(0, 0)).isRolloverEnabled());
}
/**
* Issue ??-swingx: hyperlink/rollover in hierarchical column.
*
*/
public void testTreeRendererModifiedRollover() {
JXTreeTable tree = new JXTreeTable(new FileSystemModel());
tree.setRolloverEnabled(!tree.isRolloverEnabled());
assertEquals(tree.isRolloverEnabled(), ((JXTree) tree.getCellRenderer(0, 0)).isRolloverEnabled());
}
/**
* example how to use a custom component as
* renderer in tree column of TreeTable.
*
*/
public void interactiveTreeTableCustomRenderer() {
JXTreeTable tree = new JXTreeTable(new FileSystemModel());
ComponentProvider provider = new ButtonProvider() {
/**
* show a unselected checkbox and text.
*/
@Override
protected void format(CellContext context) {
super.format(context);
rendererComponent.setText(" ... " + getStringValue(context));
}
/**
* custom tooltip: show row. Note: the context is that
* of the rendering tree. No way to get at table state?
*/
@Override
protected void configureState(CellContext context) {
super.configureState(context);
rendererComponent.setToolTipText("Row: " + context.getRow());
}
};
provider.setHorizontalAlignment(JLabel.LEADING);
tree.setTreeCellRenderer(new DefaultTreeRenderer(provider));
tree.setHighlighters(HighlighterFactory.createSimpleStriping());
JFrame frame = wrapWithScrollingInFrame(tree, "treetable and custom renderer");
frame.setVisible(true);
}
/**
* Quick example to use a TextArea in the hierarchical column
* of a treeTable. Not really working .. the wrap is not reliable?.
*
*/
public void interactiveTextAreaTreeTable() {
TreeTableModel model = createTreeTableModelWithLongNode();
JXTreeTable treeTable = new JXTreeTable(model);
treeTable.setVisibleRowCount(5);
treeTable.setRowHeight(50);
treeTable.getColumnExt(0).setPreferredWidth(200);
TreeCellRenderer renderer = new DefaultTreeRenderer(
new WrappingProvider(new TextAreaProvider()));
treeTable.setTreeCellRenderer(renderer);
showWithScrollingInFrame(treeTable, "TreeTable with text wrapping");
}
/**
* @return
*/
private TreeTableModel createTreeTableModelWithLongNode() {
MutableTreeTableNode root = createLongNode("some really, maybe really really long text - "
+ "wrappit .... where needed ");
root.insert(createLongNode("another really, maybe really really long text - "
+ "with nothing but junk. wrappit .... where needed"), 0);
root.insert(createLongNode("another really, maybe really really long text - "
+ "with nothing but junk. wrappit .... where needed"), 0);
MutableTreeTableNode node = createLongNode("some really, maybe really really long text - "
+ "wrappit .... where needed ");
node.insert(createLongNode("another really, maybe really really long text - "
+ "with nothing but junk. wrappit .... where needed"), 0);
root.insert(node, 0);
root.insert(createLongNode("another really, maybe really really long text - "
+ "with nothing but junk. wrappit .... where needed"), 0);
Vector ids = new Vector();
ids.add("long text");
ids.add("dummy");
return new DefaultTreeTableModel(root, ids);
}
/**
* @param string
* @return
*/
private MutableTreeTableNode createLongNode(final String string) {
AbstractMutableTreeTableNode node = new AbstractMutableTreeTableNode() {
Object rnd = Math.random();
public int getColumnCount() {
return 2;
}
public Object getValueAt(int column) {
if (column == 0) {
return string;
}
return rnd;
}
};
node.setUserObject(string);
return node;
}
/**
* example how to use a custom component as
* renderer in tree column of TreeTable.
*
*/
public void interactiveTreeTableWrappingProvider() {
final JXTreeTable treeTable = new JXTreeTable(createActionTreeModel());
treeTable.setHorizontalScrollEnabled(true);
treeTable.packColumn(0, -1);
StringValue format = new StringValue() {
public String getString(Object value) {
if (value instanceof Action) {
return ((Action) value).getValue(Action.NAME) + "xx";
}
return StringValue.TO_STRING.getString(value);
}
};
ComponentProvider tableProvider = new LabelProvider(format);
TableCellRenderer tableRenderer = new DefaultTableRenderer(tableProvider);
WrappingProvider wrappingProvider = new WrappingProvider(tableProvider) {
Border redBorder = BorderFactory.createLineBorder(Color.RED);
@Override
public WrappingIconPanel getRendererComponent(CellContext context) {
Dimension old = rendererComponent.getPreferredSize();
rendererComponent.setPreferredSize(null);
super.getRendererComponent(context);
Dimension dim = rendererComponent.getPreferredSize();
dim.width = Math.max(dim.width, treeTable.getColumn(0).getWidth());
rendererComponent.setPreferredSize(dim);
rendererComponent.setBorder(redBorder);
return rendererComponent;
}
};
DefaultTreeRenderer treeCellRenderer = new DefaultTreeRenderer(wrappingProvider);
treeTable.setTreeCellRenderer(treeCellRenderer);
treeTable.setHighlighters(HighlighterFactory.createSimpleStriping());
JXTree tree = new JXTree(treeTable.getTreeTableModel());
tree.setCellRenderer(treeCellRenderer);
tree.setLargeModel(true);
tree.setScrollsOnExpand(false);
JFrame frame = wrapWithScrollingInFrame(treeTable, tree, "treetable and default wrapping provider");
frame.setVisible(true);
}
/**
* Dirty example how to configure a custom renderer
* to use treeTableModel.getValueAt(...) for showing.
*
*/
public void interactiveTreeTableGetValueRenderer() {
JXTreeTable tree = new JXTreeTable(new ComponentTreeTableModel(new JXFrame()));
ComponentProvider provider = new ButtonProvider() {
/**
* show a unselected checkbox and text.
*/
@Override
protected void format(CellContext context) {
// this is dirty because the design idea was to keep the renderer
// unaware of the context type
TreeTableModel model = (TreeTableModel) ((JXTree) context.getComponent()).getModel();
// beware: currently works only if the node is not a DefaultMutableTreeNode
// otherwise the WrappingProvider tries to be smart and replaces the node
// by the userObject before passing on to the wrappee!
Object nodeValue = model.getValueAt(context.getValue(), 0);
rendererComponent.setText(" ... " + formatter.getString(nodeValue));
}
/**
* custom tooltip: show row. Note: the context is that
* of the rendering tree. No way to get at table state?
*/
@Override
protected void configureState(CellContext context) {
super.configureState(context);
rendererComponent.setToolTipText("Row: " + context.getRow());
}
};
provider.setHorizontalAlignment(JLabel.LEADING);
tree.setTreeCellRenderer(new DefaultTreeRenderer(provider));
tree.expandAll();
tree.setHighlighters(HighlighterFactory.createSimpleStriping());
JFrame frame = wrapWithScrollingInFrame(tree, "treeTable and getValueAt renderer");
frame.setVisible(true);
}
/**
* Issue #399-swingx: editing terminated by selecting editing row.
*
*/
public void testSelectionKeepsEditingWithExpandsTrue() {
JXTreeTable treeTable = new JXTreeTable(new FileSystemModel()) {
@Override
public boolean isCellEditable(int row, int column) {
return true;
}
};
// sanity: default value of expandsSelectedPath
assertTrue(treeTable.getExpandsSelectedPaths());
boolean canEdit = treeTable.editCellAt(1, 2);
// sanity: editing started
assertTrue(canEdit);
// sanity: nothing selected
assertTrue(treeTable.getSelectionModel().isSelectionEmpty());
int editingRow = treeTable.getEditingRow();
treeTable.setRowSelectionInterval(editingRow, editingRow);
assertEquals("after selection treeTable editing state must be unchanged", canEdit, treeTable.isEditing());
}
/**
* Issue #212-jdnc: reuse editor, install only once.
*
*/
public void testReuseEditor() {
//TODO rework this test, since we no longer use TreeTableModel.class
// JXTreeTable treeTable = new JXTreeTable(treeTableModel);
// CellEditor editor = treeTable.getDefaultEditor(TreeTableModel.class);
// assertTrue(editor instanceof TreeTableCellEditor);
// treeTable.setTreeTableModel(simpleTreeTableModel);
// assertSame("hierarchical editor must be unchanged", editor,
// treeTable.getDefaultEditor(TreeTableModel.class));
fail("#212-jdnc - must be revisited after treeTableModel overhaul");
}
/**
* sanity: toggling select/unselect via mouse the lead is
* always painted, doing unselect via model (clear/remove path)
* seems to clear the lead?
*
*/
public void testBasicTreeLeadSelection() {
JXTree tree = new JXTree();
TreePath path = tree.getPathForRow(0);
tree.setSelectionPath(path);
assertEquals(0, tree.getSelectionModel().getLeadSelectionRow());
assertEquals(path, tree.getLeadSelectionPath());
tree.removeSelectionPath(path);
assertNotNull(tree.getLeadSelectionPath());
assertEquals(0, tree.getSelectionModel().getLeadSelectionRow());
}
/**
* Issue #341-swingx: missing synch of lead.
* test lead after setting selection via table.
*
* PENDING: this passes locally, fails on server
*/
public void testLeadSelectionFromTable() {
JXTreeTable treeTable = prepareTreeTable(false);
assertEquals(-1, treeTable.getSelectionModel().getLeadSelectionIndex());
assertEquals(-1, treeTable.getTreeSelectionModel().getLeadSelectionRow());
treeTable.setRowSelectionInterval(0, 0);
assertEquals(treeTable.getSelectionModel().getLeadSelectionIndex(),
treeTable.getTreeSelectionModel().getLeadSelectionRow());
}
/**
* Issue #341-swingx: missing synch of lead.
* test lead after setting selection via treeSelection.
* PENDING: this passes locally, fails on server
*
*/
public void testLeadSelectionFromTree() {
JXTreeTable treeTable = prepareTreeTable(false);
assertEquals(-1, treeTable.getSelectionModel().getLeadSelectionIndex());
assertEquals(-1, treeTable.getTreeSelectionModel().getLeadSelectionRow());
treeTable.getTreeSelectionModel().setSelectionPath(treeTable.getPathForRow(0));
assertEquals(treeTable.getSelectionModel().getLeadSelectionIndex(),
treeTable.getTreeSelectionModel().getLeadSelectionRow());
assertEquals(0, treeTable.getTreeSelectionModel().getLeadSelectionRow());
}
/**
* Issue #341-swingx: missing synch of lead.
* test lead after remove selection via tree.
*
*/
public void testLeadAfterRemoveSelectionFromTree() {
JXTreeTable treeTable = prepareTreeTable(true);
treeTable.getTreeSelectionModel().removeSelectionPath(
treeTable.getTreeSelectionModel().getLeadSelectionPath());
assertEquals(treeTable.getSelectionModel().getLeadSelectionIndex(),
treeTable.getTreeSelectionModel().getLeadSelectionRow());
}
/**
* Issue #341-swingx: missing synch of lead.
* test lead after clear selection via table.
*
*/
public void testLeadAfterClearSelectionFromTable() {
JXTreeTable treeTable = prepareTreeTable(true);
treeTable.clearSelection();
assertEquals(treeTable.getSelectionModel().getLeadSelectionIndex(),
treeTable.getTreeSelectionModel().getLeadSelectionRow());
}
/**
* Issue #341-swingx: missing synch of lead.
* test lead after clear selection via table.
*
*/
public void testLeadAfterClearSelectionFromTree() {
JXTreeTable treeTable = prepareTreeTable(true);
treeTable.getTreeSelectionModel().clearSelection();
assertEquals(treeTable.getSelectionModel().getLeadSelectionIndex(),
treeTable.getTreeSelectionModel().getLeadSelectionRow());
}
/**
* creates and configures a treetable for usage in selection tests.
*
* @param selectFirstRow boolean to indicate if the first row should
* be selected.
* @return
*/
protected JXTreeTable prepareTreeTable(boolean selectFirstRow) {
JXTreeTable treeTable = new JXTreeTable(new ComponentTreeTableModel(new JXFrame()));
treeTable.setRootVisible(true);
// sanity: assert that we have at least two rows to change selection
assertTrue(treeTable.getRowCount() > 1);
if (selectFirstRow) {
treeTable.setRowSelectionInterval(0, 0);
}
return treeTable;
}
public void testDummy() {
}
/**
* @return
*/
private TreeTableModel createActionTreeModel() {
JXTable table = new JXTable(10, 10);
table.setHorizontalScrollEnabled(true);
return new ActionMapTreeTableModel(table);
}
} |
package com.x1unix.avi.rest;
import com.x1unix.avi.model.KPMovieDetailViewResponse;
import com.x1unix.avi.model.KPSearchResponse;
import retrofit2.Call;
import retrofit2.http.GET;
import retrofit2.http.Path;
import retrofit2.http.Query;
public interface KPApiInterface {
@GET("getKPSearchInFilms")
Call<KPSearchResponse> findMovies(@Query("keyword") String keyword);
@GET("getKPFilmDetailView")
Call<KPMovieDetailViewResponse> getMovieById(@Query("filmID") String filmId);
} |
import edu.iu.ise.svm.util.Util;
import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.mllib.classification.SVMModel;
import org.apache.spark.mllib.classification.SVMWithSGD;
import org.apache.spark.mllib.evaluation.BinaryClassificationMetrics;
import org.apache.spark.mllib.linalg.Vector;
import org.apache.spark.mllib.linalg.Vectors;
import org.apache.spark.mllib.regression.LabeledPoint;
import org.apache.spark.mllib.util.MLUtils;
import scala.Tuple2;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.io.File;
import java.io.IOException;
import java.util.*;
public class ExpSVM {
private static final Logger log = Logger.getLogger(ExpSVM.class.getName());
private String[] args = null;
private static Options options = new Options();
private static String LOG_PATH = "logs";
private static String LOG_FILE = "log.txt";
private static String LOG_DEST = "";
public static void main(String [] args) throws IOException {
long start_time = System.currentTimeMillis();
System.out.println("Hello Spark");
SparkConf conf = new SparkConf().setAppName("Simple Application");
SparkContext sc = new SparkContext(conf);
init(args);
CommandLine cmd = parse(args);
String trainingDataSet = cmd.getOptionValue("train");
String testingDataSet = cmd.getOptionValue("test");
int numIterations = Integer.parseInt(cmd.getOptionValue("iterations"));
double stepSize = Double.parseDouble(cmd.getOptionValue("stepSize"));
double regParam = Double.parseDouble(cmd.getOptionValue("regParam"));
if((cmd.getOptionValue("log"))!=null){
LOG_DEST = cmd.getOptionValue("log");
}else{
Util.mkdir(LOG_PATH);
LOG_DEST = LOG_PATH+"/"+LOG_FILE;
}
Util.appendLogs(LOG_DEST,"===============================================================================");
Util.appendLogs(LOG_DEST,"Experiment Started :"+ new Date().toString());
Util.appendLogs(LOG_DEST,"Training File: " + cmd.getOptionValue("train") );
Util.appendLogs(LOG_DEST,"Iterations: " + cmd.getOptionValue("iterations") );
Util.appendLogs(LOG_DEST,"Step Size: " + cmd.getOptionValue("stepSize") );
Util.appendLogs(LOG_DEST,"Regularization Parameter: " + cmd.getOptionValue("regParam") );
if((cmd.getOptionValue("split"))!=null){
Util.appendLogs(LOG_DEST,"Splitting Ratio: " + cmd.getOptionValue("split") );
double splitRatio = Double.parseDouble(cmd.getOptionValue("split"));
System.out.println("Split Ratio: " + splitRatio);
ArrayList<JavaRDD<LabeledPoint>> dataList = dataSplit(trainingDataSet, sc, splitRatio);
JavaRDD<LabeledPoint> training = dataList.get(0);
JavaRDD<LabeledPoint> testing = dataList.get(1);
task(sc, training, testing, numIterations, stepSize, regParam);
}else{
Util.appendLogs(LOG_DEST,"Testing File: " + cmd.getOptionValue("test") );
task(sc, trainingDataSet, testingDataSet, numIterations, stepSize, regParam);
}
Util.appendLogs(LOG_DEST,"===============================================================================");
}
public static void task(SparkContext sc, String trainingDataSet, String testingDataSet) throws IOException {
String datasource = "ijcnn1";
String path = "file:"+trainingDataSet; //"file:/home/vibhatha/data/sparksvm/ijcnn1/ijcnn1_train_spark.txt";
String test_path = "file:"+testingDataSet;
JavaRDD<LabeledPoint> data = MLUtils.loadLibSVMFile(sc, path).toJavaRDD();
JavaRDD<LabeledPoint> testdata = MLUtils.loadLibSVMFile(sc, test_path).toJavaRDD();
ArrayList<LabeledPoint> newrdd = new ArrayList<>();
LabeledPoint pos = new LabeledPoint(1.0, Vectors.dense(1.0, 0.0, 3.0));
Double label = pos.label();
Vector features = pos.features();
System.out.println(label);
System.out.println(features);
JavaRDD<LabeledPoint> parsedData = data.map(line -> {
Double label2 = line.label();
Vector feature = line.features();
if(label2==-1.0){
label2=0.0;
}
return new LabeledPoint(label2, feature);
});
JavaRDD<LabeledPoint> parsedTestData = testdata.map(line -> {
Double label2 = line.label();
Vector feature = line.features();
if(label2==-1.0){
label2=0.0;
}
return new LabeledPoint(label2, feature);
});
// Split initial RDD into two... [60% training data, 40% testing data].
JavaRDD<LabeledPoint> training = parsedData;
training.cache();
JavaRDD<LabeledPoint> test = parsedTestData;
//printRDD(training);
//printRDD(test);
train(sc,training, test);
}
public static void task(SparkContext sc, String trainingDataSet, String testingDataSet, int numIterations, double stepSize, double regParam) throws IOException {
String datasource = "ijcnn1";
String path = "file:"+trainingDataSet; //"file:/home/vibhatha/data/sparksvm/ijcnn1/ijcnn1_train_spark.txt";
String test_path = "file:"+testingDataSet;
JavaRDD<LabeledPoint> data = MLUtils.loadLibSVMFile(sc, path).toJavaRDD();
JavaRDD<LabeledPoint> testdata = MLUtils.loadLibSVMFile(sc, test_path).toJavaRDD();
ArrayList<LabeledPoint> newrdd = new ArrayList<>();
LabeledPoint pos = new LabeledPoint(1.0, Vectors.dense(1.0, 0.0, 3.0));
Double label = pos.label();
Vector features = pos.features();
System.out.println(label);
System.out.println(features);
JavaRDD<LabeledPoint> parsedData = data.map(line -> {
Double label2 = line.label();
Vector feature = line.features();
if(label2==-1.0){
label2=0.0;
}
return new LabeledPoint(label2, feature);
});
JavaRDD<LabeledPoint> parsedTestData = testdata.map(line -> {
Double label2 = line.label();
Vector feature = line.features();
if(label2==-1.0){
label2=0.0;
}
return new LabeledPoint(label2, feature);
});
// Split initial RDD into two... [60% training data, 40% testing data].
JavaRDD<LabeledPoint> training = parsedData;
training.cache();
JavaRDD<LabeledPoint> test = parsedTestData;
//printRDD(training);
//printRDD(test);
train(sc,training, test, numIterations, stepSize, regParam);
}
public static void task(SparkContext sc, JavaRDD<LabeledPoint> trainingDataSet, JavaRDD<LabeledPoint> testingDataSet, int numIterations, double stepSize, double regParam) throws IOException {
String datasource = "ijcnn1";
String path = "file:"+trainingDataSet; //"file:/home/vibhatha/data/sparksvm/ijcnn1/ijcnn1_train_spark.txt";
String test_path = "file:"+testingDataSet;
JavaRDD<LabeledPoint> data = trainingDataSet;
JavaRDD<LabeledPoint> testdata = testingDataSet;
ArrayList<LabeledPoint> newrdd = new ArrayList<>();
LabeledPoint pos = new LabeledPoint(1.0, Vectors.dense(1.0, 0.0, 3.0));
Double label = pos.label();
Vector features = pos.features();
System.out.println(label);
System.out.println(features);
JavaRDD<LabeledPoint> parsedData = data.map(line -> {
Double label2 = line.label();
Vector feature = line.features();
if(label2==-1.0 || label2==2.0){
label2=0.0;
}
return new LabeledPoint(label2, feature);
});
JavaRDD<LabeledPoint> parsedTestData = testdata.map(line -> {
Double label2 = line.label();
Vector feature = line.features();
if(label2==-1.0 || label2==2.0){
label2=0.0;
}
return new LabeledPoint(label2, feature);
});
// Split initial RDD into two... [60% training data, 40% testing data].
JavaRDD<LabeledPoint> training = parsedData;
training.cache();
JavaRDD<LabeledPoint> test = parsedTestData;
//printRDD(training);
//printRDD(test);
train(sc,training, test, numIterations, stepSize, regParam);
}
public static void train(SparkContext sc,JavaRDD<LabeledPoint> training, JavaRDD<LabeledPoint> test ) throws IOException {
// Run training algorithm to build the model.
int numIterations = 100;
long start_time = System.currentTimeMillis();
final SVMModel model = SVMWithSGD.train(training.rdd(), numIterations, 0.01, 0.01);
//model.clearThreshold();
long end_time = System.currentTimeMillis();
long elapsed_time = end_time - start_time;
String svmModelPath= "model/svm/exp1";
// Save and load model
File file = new File(svmModelPath);
if(file.exists()){
FileUtils.deleteDirectory(file);
}
model.save(sc, svmModelPath);
SVMModel sameModel = SVMModel.load(sc, svmModelPath);
JavaRDD<Vector> testFeatures = test.map(line -> {
Vector feature = line.features();
return feature;
});
JavaRDD<Double> testLabels = test.map(line -> {
Double label = line.label();
return label;
});
JavaRDD<Double> predictions = sameModel.predict(testFeatures);
// double prediction = sameModel.predict(testFeatures.first());
List<Double> predictionVals = predictions.collect();
List<Double> expectedVals = testLabels.collect();
double accuracy = predictionAccuracy(predictionVals, expectedVals);
String record = "Accuracy : "+accuracy+", Training Time : "+elapsed_time/1000.0;
System.out.println(record);
Util.appendLogs(LOG_DEST,record);
}
public static void train(SparkContext sc,JavaRDD<LabeledPoint> training, JavaRDD<LabeledPoint> test, int numIterations, double stepSize, double regParam) throws IOException {
// Run training algorithm to build the model.
long start_time = System.currentTimeMillis();
final SVMModel model = SVMWithSGD.train(training.rdd(), numIterations, stepSize, regParam);
long end_time = System.currentTimeMillis();
long elapsed_time = end_time - start_time;
String svmModelPath= "model/svm/exp1";
// Save and load model
File file = new File(svmModelPath);
if(file.exists()){
FileUtils.deleteDirectory(file);
}
model.save(sc, svmModelPath);
SVMModel sameModel = SVMModel.load(sc, svmModelPath);
JavaRDD<Vector> testFeatures = test.map(line -> {
Vector feature = line.features();
return feature;
});
JavaRDD<Double> testLabels = test.map(line -> {
Double label = line.label();
return label;
});
JavaRDD<Double> predictions = sameModel.predict(testFeatures);
// double prediction = sameModel.predict(testFeatures.first());
List<Double> predictionVals = predictions.collect();
List<Double> expectedVals = testLabels.collect();
double accuracy = predictionAccuracy(predictionVals, expectedVals);
String record = "Accuracy : "+accuracy+", Training Time : "+elapsed_time/1000.0;
System.out.println(record);
Util.appendLogs(LOG_DEST,record);
}
public static double predictionAccuracy(List<Double> predictions, List<Double> tests){
double acc = 0.0;
int count = 0;
int matches = 0;
for (Double d: predictions){
//System.out.println(d+","+tests.get(count));
if(d.intValue() == tests.get(count).intValue()){
matches++;
}
count++;
}
acc = (double)matches / (double)(predictions.size())*100.0;
return acc;
}
public static void printRDD(JavaRDD<LabeledPoint> parsedData){
parsedData.foreach(x->{
Double label1 = x.label();
Vector feature = x.features();
LabeledPoint newLabelPoint = new LabeledPoint(label1, feature);
System.out.println(newLabelPoint.label());
});
}
public static void init(String[] args) {
options.addOption("h", "help", false, "show help.");
options.addOption("train", "training data set path", true, "Set training data set . ex: -train train_data");
options.addOption("test", "testing data set path", true, "Set testing data set . ex: -test test_data");
options.addOption("iterations", "iteration number", true, "Set number of iterations . ex: -iterations 100");
options.addOption("stepSize", "step size", true, "Set step size . ex: -stepSize 0.01");
options.addOption("regParam", "regularization parameter", true, "Set testing data set. ex: -regParam 0.02");
options.addOption("split", "Data splitting ratio", true, "Training and Testing data splitting. ex: -split 0.8 (80% of training and 20% of testing)");
options.addOption("log", "Logging functionality", true, "Log file path addition. ex: logs/log1.txt");
options.getOption("test").setOptionalArg(true);
options.getOption("split").setOptionalArg(true);
options.getOption("log").setOptionalArg(true);
}
public static CommandLine parse(String [] args) {
CommandLineParser parser = new BasicParser();
CommandLine cmd = null;
try {
cmd = parser.parse(options, args);
if (cmd.hasOption("h"))
help();
if (cmd.hasOption("train")) {
log.log(Level.INFO, "Training data set -train=" + cmd.getOptionValue("train"));
// Whatever you want to do with the setting goes here
} else {
log.log(Level.SEVERE, "Missing -train option");
help();
}
if (cmd.hasOption("test")) {
log.log(Level.INFO, "Testing data set -test=" + cmd.getOptionValue("test"));
// Whatever you want to do with the setting goes here
}
if (cmd.hasOption("iterations")) {
log.log(Level.INFO, "Iterations -iterations=" + cmd.getOptionValue("iterations"));
// Whatever you want to do with the setting goes here
} else {
log.log(Level.SEVERE, "Missing -iterations option");
help();
}
if (cmd.hasOption("stepSize")) {
log.log(Level.INFO, "Step Size -stepSize=" + cmd.getOptionValue("stepSize"));
// Whatever you want to do with the setting goes here
} else {
log.log(Level.SEVERE, "Missing -stepSize option");
help();
}
if (cmd.hasOption("regParam")) {
log.log(Level.INFO, "Regularization Parameter -regParam=" + cmd.getOptionValue("regParam"));
// Whatever you want to do with the setting goes here
} else {
log.log(Level.SEVERE, "Missing -regParam option");
help();
}
if (cmd.hasOption("split")) {
log.log(Level.INFO, "Split Parameter -split=" + cmd.getOptionValue("split"));
// Whatever you want to do with the setting goes here
}
if (cmd.hasOption("log")) {
log.log(Level.INFO, "Log Parameter -log=" + cmd.getOptionValue("log"));
// Whatever you want to do with the setting goes here
}
} catch (ParseException e) {
log.log(Level.SEVERE, "Failed to parse comand line properties", e);
help();
}
return cmd;
}
private static void help() {
// This prints out some help
HelpFormatter formater = new HelpFormatter();
formater.printHelp("ExpSVM", options);
System.exit(0);
}
public static ArrayList<JavaRDD<LabeledPoint>> dataSplit(String path, SparkContext sc, double ratio){
JavaRDD<LabeledPoint> data = MLUtils.loadLibSVMFile(sc, path).toJavaRDD();
ArrayList<JavaRDD<LabeledPoint>> list = new ArrayList<>();
JavaRDD<LabeledPoint> training = data.sample(false, ratio, 11L);
training.cache();
JavaRDD<LabeledPoint> test = data.subtract(training);
list.add(training);
list.add(test);
return list;
}
} |
package com.redhat.ceylon.eclipse.code.editor;
import static com.redhat.ceylon.eclipse.code.editor.EditorActionIds.ADD_BLOCK_COMMENT;
import static com.redhat.ceylon.eclipse.code.editor.EditorActionIds.CORRECT_INDENTATION;
import static com.redhat.ceylon.eclipse.code.editor.EditorActionIds.GOTO_MATCHING_FENCE;
import static com.redhat.ceylon.eclipse.code.editor.EditorActionIds.REMOVE_BLOCK_COMMENT;
import static com.redhat.ceylon.eclipse.code.editor.EditorActionIds.RESTORE_PREVIOUS;
import static com.redhat.ceylon.eclipse.code.editor.EditorActionIds.SELECT_ENCLOSING;
import static com.redhat.ceylon.eclipse.code.editor.EditorActionIds.SHOW_OUTLINE;
import static com.redhat.ceylon.eclipse.code.editor.EditorActionIds.TOGGLE_COMMENT;
import static com.redhat.ceylon.eclipse.code.editor.EditorInputUtils.getFile;
import static com.redhat.ceylon.eclipse.code.editor.EditorInputUtils.getPath;
import static com.redhat.ceylon.eclipse.code.editor.SourceArchiveDocumentProvider.isSrcArchive;
import static com.redhat.ceylon.eclipse.code.outline.CeylonLabelProvider.getImageForFile;
import static com.redhat.ceylon.eclipse.ui.CeylonPlugin.PLUGIN_ID;
import static java.util.ResourceBundle.getBundle;
import static org.eclipse.core.resources.IncrementalProjectBuilder.CLEAN_BUILD;
import static org.eclipse.core.resources.ResourcesPlugin.getWorkspace;
import static org.eclipse.jdt.ui.PreferenceConstants.EDITOR_FOLDING_ENABLED;
import static org.eclipse.ui.texteditor.ITextEditorActionConstants.GROUP_RULERS;
import static org.eclipse.ui.texteditor.ITextEditorActionDefinitionIds.CONTENT_ASSIST_PROPOSALS;
import static org.eclipse.ui.texteditor.ITextEditorActionDefinitionIds.DELETE_NEXT_WORD;
import static org.eclipse.ui.texteditor.ITextEditorActionDefinitionIds.DELETE_PREVIOUS_WORD;
import static org.eclipse.ui.texteditor.ITextEditorActionDefinitionIds.SELECT_WORD_NEXT;
import static org.eclipse.ui.texteditor.ITextEditorActionDefinitionIds.SELECT_WORD_PREVIOUS;
import static org.eclipse.ui.texteditor.ITextEditorActionDefinitionIds.WORD_NEXT;
import static org.eclipse.ui.texteditor.ITextEditorActionDefinitionIds.WORD_PREVIOUS;
import java.lang.reflect.Method;
import java.text.BreakIterator;
import java.text.CharacterIterator;
import java.util.Iterator;
import java.util.List;
import java.util.ResourceBundle;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.IResourceChangeEvent;
import org.eclipse.core.resources.IResourceChangeListener;
import org.eclipse.core.resources.IResourceDelta;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.debug.ui.actions.IToggleBreakpointsTarget;
import org.eclipse.debug.ui.actions.ToggleBreakpointAction;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.action.IContributionItem;
import org.eclipse.jface.action.IMenuManager;
import org.eclipse.jface.action.Separator;
import org.eclipse.jface.commands.ActionHandler;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.text.BadLocationException;
import org.eclipse.jface.text.DocumentEvent;
import org.eclipse.jface.text.IDocument;
import org.eclipse.jface.text.IDocumentListener;
import org.eclipse.jface.text.IRegion;
import org.eclipse.jface.text.ITextSelection;
import org.eclipse.jface.text.Region;
import org.eclipse.jface.text.link.LinkedModeModel;
import org.eclipse.jface.text.link.LinkedPosition;
import org.eclipse.jface.text.source.CompositeRuler;
import org.eclipse.jface.text.source.DefaultCharacterPairMatcher;
import org.eclipse.jface.text.source.ICharacterPairMatcher;
import org.eclipse.jface.text.source.ISourceViewer;
import org.eclipse.jface.text.source.IVerticalRuler;
import org.eclipse.jface.text.source.IVerticalRulerInfo;
import org.eclipse.jface.text.source.SourceViewer;
import org.eclipse.jface.text.source.projection.ProjectionSupport;
import org.eclipse.jface.text.source.projection.ProjectionViewer;
import org.eclipse.jface.util.IPropertyChangeListener;
import org.eclipse.jface.util.PropertyChangeEvent;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CaretEvent;
import org.eclipse.swt.custom.CaretListener;
import org.eclipse.swt.custom.ST;
import org.eclipse.swt.custom.StyledText;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.IEditorInput;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IFileEditorInput;
import org.eclipse.ui.IPropertyListener;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.editors.text.EditorsUI;
import org.eclipse.ui.editors.text.TextEditor;
import org.eclipse.ui.handlers.IHandlerActivation;
import org.eclipse.ui.handlers.IHandlerService;
import org.eclipse.ui.texteditor.AbstractTextEditor;
import org.eclipse.ui.texteditor.AnnotationPreference;
import org.eclipse.ui.texteditor.ContentAssistAction;
import org.eclipse.ui.texteditor.IDocumentProvider;
import org.eclipse.ui.texteditor.IEditorStatusLine;
import org.eclipse.ui.texteditor.ITextEditorActionConstants;
import org.eclipse.ui.texteditor.IUpdate;
import org.eclipse.ui.texteditor.MarkerAnnotationPreferences;
import org.eclipse.ui.texteditor.SourceViewerDecorationSupport;
import org.eclipse.ui.texteditor.TextNavigationAction;
import org.eclipse.ui.texteditor.TextOperationAction;
import org.eclipse.ui.themes.ITheme;
import org.eclipse.ui.views.contentoutline.IContentOutlinePage;
import ceylon.language.StringBuilder;
import com.redhat.ceylon.eclipse.code.outline.CeylonOutlineBuilder;
import com.redhat.ceylon.eclipse.code.outline.CeylonOutlinePage;
import com.redhat.ceylon.eclipse.code.parse.CeylonParseController;
import com.redhat.ceylon.eclipse.code.parse.CeylonParserScheduler;
import com.redhat.ceylon.eclipse.code.parse.TreeLifecycleListener;
import com.redhat.ceylon.eclipse.code.preferences.CeylonEditorPreferencesPage;
import com.redhat.ceylon.eclipse.code.refactor.RefactorMenuItems;
import com.redhat.ceylon.eclipse.code.search.FindMenuItems;
import com.redhat.ceylon.eclipse.ui.CeylonPlugin;
import com.redhat.ceylon.eclipse.ui.CeylonResources;
/**
* An editor for Ceylon source code.
*
* @author Gavin King
* @author Chris Laffra
* @author Robert M. Fuhrer
*/
public class CeylonEditor extends TextEditor {
public static final String MESSAGE_BUNDLE= "com.redhat.ceylon.eclipse.code.editor.EditorActionMessages";
private static final int REPARSE_SCHEDULE_DELAY= 100;
//preference keys
public final static String MATCHING_BRACKET= "matchingBrackets";
public final static String MATCHING_BRACKETS_COLOR= "matchingBracketsColor";
public final static String SELECTED_BRACKET= "highlightBracketAtCaretLocation";
public final static String ENCLOSING_BRACKETS= "enclosingBrackets";
public final static String SUB_WORD_NAVIGATION= "subWordNavigation";
public final static String AUTO_FOLD_IMPORTS= "autoFoldImports";
public final static String AUTO_FOLD_COMMENTS= "autoFoldComments";
private CeylonParserScheduler parserScheduler;
private ProblemMarkerManager problemMarkerManager;
private ICharacterPairMatcher bracketMatcher;
private ToggleBreakpointAction toggleBreakpointAction;
private IAction enableDisableBreakpointAction;
private FoldingActionGroup foldingActionGroup;
private SourceArchiveDocumentProvider sourceArchiveDocumentProvider;
private ToggleBreakpointAdapter toggleBreakpointTarget;
private CeylonOutlinePage outlinePage;
private boolean backgroundParsingPaused;
private CeylonParseController parseController;
private ProjectionSupport projectionSupport;
private LinkedModeModel linkedMode;
private MarkerAnnotationUpdater markerAnnotationUpdater = new MarkerAnnotationUpdater(this);
private ProjectionAnnotationManager projectionAnnotationManager = new ProjectionAnnotationManager(this);
private AnnotationCreator annotationCreator = new AnnotationCreator(this);
ToggleFoldingRunner fFoldingRunner;
public CeylonEditor() {
setSourceViewerConfiguration(createSourceViewerConfiguration());
setRangeIndicator(new CeylonRangeIndicator());
configureInsertMode(SMART_INSERT, true);
setInsertMode(SMART_INSERT);
problemMarkerManager= new ProblemMarkerManager();
}
static String[][] getFences() {
return new String[][] { { "(", ")" }, { "[", "]" }, { "{", "}" } };
}
public synchronized void pauseBackgroundParsing() {
backgroundParsingPaused = true;
}
public synchronized void unpauseBackgroundParsing() {
backgroundParsingPaused = false;
}
public synchronized boolean isBackgroundParsingPaused() {
return backgroundParsingPaused;
}
public boolean isInLinkedMode() {
return linkedMode!=null;
}
public void setLinkedMode(LinkedModeModel linkedMode) {
this.linkedMode = linkedMode;
}
public LinkedModeModel getLinkedMode() {
return linkedMode;
}
/**
* Sub-classes may override this method to extend the behavior provided by IMP's
* standard StructuredSourceViewerConfiguration.
* @return the StructuredSourceViewerConfiguration to use with this editor
*/
protected CeylonSourceViewerConfiguration createSourceViewerConfiguration() {
return new CeylonSourceViewerConfiguration(this);
}
public IPreferenceStore getPrefStore() {
return super.getPreferenceStore();
}
public Object getAdapter(@SuppressWarnings("rawtypes") Class required) {
if (IContentOutlinePage.class.equals(required)) {
return getOutlinePage();
}
if (IToggleBreakpointsTarget.class.equals(required)) {
return getToggleBreakpointAdapter();
}
return super.getAdapter(required);
}
public Object getToggleBreakpointAdapter() {
if (toggleBreakpointTarget == null) {
toggleBreakpointTarget = new ToggleBreakpointAdapter();
}
return toggleBreakpointTarget;
}
public CeylonOutlinePage getOutlinePage() {
if (outlinePage == null) {
outlinePage = new CeylonOutlinePage(getParseController(),
new CeylonOutlineBuilder(), getCeylonSourceViewer());
parserScheduler.addModelListener(outlinePage);
getSourceViewer().getTextWidget().addCaretListener(outlinePage);
//myOutlinePage.update(parseController);
}
return outlinePage;
}
protected void createActions() {
super.createActions();
final ResourceBundle bundle= getBundle(MESSAGE_BUNDLE);
Action action= new ContentAssistAction(bundle, "ContentAssistProposal.", this);
action.setActionDefinitionId(CONTENT_ASSIST_PROPOSALS);
setAction("ContentAssistProposal", action);
markAsStateDependentAction("ContentAssistProposal", true);
IVerticalRuler verticalRuler = getVerticalRuler();
if (verticalRuler!=null) {
toggleBreakpointAction= new ToggleBreakpointAction(this,
getDocumentProvider().getDocument(getEditorInput()),
verticalRuler);
setAction("ToggleBreakpoint", action);
enableDisableBreakpointAction= new RulerEnableDisableBreakpointAction(this,
verticalRuler);
setAction("ToggleBreakpoint", action);
}
// action= new TextOperationAction(bundle, "Format.", this,
// CeylonSourceViewer.FORMAT);
// action.setActionDefinitionId(FORMAT);
// setAction("Format", action);
// markAsStateDependentAction("Format", true);
// markAsSelectionDependentAction("Format", true);
//getWorkbench().getHelpSystem().setHelp(action, IJavaHelpContextIds.FORMAT_ACTION);
action= new TextOperationAction(bundle, "AddBlockComment.", this,
CeylonSourceViewer.ADD_BLOCK_COMMENT);
action.setActionDefinitionId(ADD_BLOCK_COMMENT);
setAction(ADD_BLOCK_COMMENT, action);
markAsStateDependentAction(ADD_BLOCK_COMMENT, true);
markAsSelectionDependentAction(ADD_BLOCK_COMMENT, true);
//PlatformUI.getWorkbench().getHelpSystem().setHelp(action, IJavaHelpContextIds.ADD_BLOCK_COMMENT_ACTION);
action= new TextOperationAction(bundle, "RemoveBlockComment.", this,
CeylonSourceViewer.REMOVE_BLOCK_COMMENT);
action.setActionDefinitionId(REMOVE_BLOCK_COMMENT);
setAction(REMOVE_BLOCK_COMMENT, action);
markAsStateDependentAction(REMOVE_BLOCK_COMMENT, true);
markAsSelectionDependentAction(REMOVE_BLOCK_COMMENT, true);
//PlatformUI.getWorkbench().getHelpSystem().setHelp(action, IJavaHelpContextIds.REMOVE_BLOCK_COMMENT_ACTION);
action= new TextOperationAction(bundle, "ShowOutline.", this,
CeylonSourceViewer.SHOW_OUTLINE, true /* runsOnReadOnly */);
action.setActionDefinitionId(SHOW_OUTLINE);
setAction(SHOW_OUTLINE, action);
//getWorkbench().getHelpSystem().setHelp(action, IJavaHelpContextIds.SHOW_OUTLINE_ACTION);
action= new TextOperationAction(bundle, "ToggleComment.", this,
CeylonSourceViewer.TOGGLE_COMMENT);
action.setActionDefinitionId(TOGGLE_COMMENT);
setAction(TOGGLE_COMMENT, action);
//getWorkbench().getHelpSystem().setHelp(action, IJavaHelpContextIds.TOGGLE_COMMENT_ACTION);
action= new TextOperationAction(bundle, "CorrectIndentation.", this,
CeylonSourceViewer.CORRECT_INDENTATION);
action.setActionDefinitionId(CORRECT_INDENTATION);
setAction(CORRECT_INDENTATION, action);
action= new GotoMatchingFenceAction(this);
action.setActionDefinitionId(GOTO_MATCHING_FENCE);
setAction(GOTO_MATCHING_FENCE, action);
// action= new GotoPreviousTargetAction(this);
// action.setActionDefinitionId(GOTO_PREVIOUS_TARGET);
// setAction(GOTO_PREVIOUS_TARGET, action);
// action= new GotoNextTargetAction(this);
// action.setActionDefinitionId(GOTO_NEXT_TARGET);
// setAction(GOTO_NEXT_TARGET, action);
action= new SelectEnclosingAction(this);
action.setActionDefinitionId(SELECT_ENCLOSING);
setAction(SELECT_ENCLOSING, action);
action= new RestorePreviousSelectionAction(this);
action.setActionDefinitionId(RESTORE_PREVIOUS);
setAction(RESTORE_PREVIOUS, action);
action= new TextOperationAction(bundle, "ShowHierarchy.", this,
CeylonSourceViewer.SHOW_HIERARCHY, true);
action.setActionDefinitionId(EditorActionIds.SHOW_CEYLON_HIERARCHY);
setAction(EditorActionIds.SHOW_CEYLON_HIERARCHY, action);
action= new TextOperationAction(bundle, "ShowCode.", this,
CeylonSourceViewer.SHOW_CODE, true);
action.setActionDefinitionId(EditorActionIds.SHOW_CEYLON_CODE);
setAction(EditorActionIds.SHOW_CEYLON_CODE, action);
action= new TerminateStatementAction(this);
action.setActionDefinitionId(EditorActionIds.TERMINATE_STATEMENT);
setAction(EditorActionIds.TERMINATE_STATEMENT, action);
foldingActionGroup= new FoldingActionGroup(this, this.getSourceViewer());
getAction(ITextEditorActionConstants.SHIFT_LEFT)
.setImageDescriptor(CeylonPlugin.getInstance().getImageRegistry()
.getDescriptor(CeylonResources.SHIFT_LEFT));
getAction(ITextEditorActionConstants.SHIFT_RIGHT)
.setImageDescriptor(CeylonPlugin.getInstance().getImageRegistry()
.getDescriptor(CeylonResources.SHIFT_RIGHT));
IAction qaa=getAction(ITextEditorActionConstants.QUICK_ASSIST);
qaa.setImageDescriptor(CeylonPlugin.getInstance().getImageRegistry()
.getDescriptor(CeylonResources.QUICK_ASSIST));
qaa.setText("Quick Fix/Assist");
installQuickAccessAction();
}
@Override
protected String[] collectContextMenuPreferencePages() {
String[] pages = super.collectContextMenuPreferencePages();
String[] result = new String[pages.length+1];
System.arraycopy(pages, 0, result, 1, pages.length);
result[0] = CeylonEditorPreferencesPage.ID;
return result;
}
@Override
protected void createNavigationActions() {
super.createNavigationActions();
final StyledText textWidget= getSourceViewer().getTextWidget();
/*IAction action= new SmartLineStartAction(textWidget, false);
action.setActionDefinitionId(ITextEditorActionDefinitionIds.LINE_START);
editor.setAction(ITextEditorActionDefinitionIds.LINE_START, action);
action= new SmartLineStartAction(textWidget, true);
action.setActionDefinitionId(ITextEditorActionDefinitionIds.SELECT_LINE_START);
editor.setAction(ITextEditorActionDefinitionIds.SELECT_LINE_START, action);*/
getPreferenceStore().setDefault(SUB_WORD_NAVIGATION, true);
IAction action = new NavigatePreviousSubWordAction();
action.setActionDefinitionId(WORD_PREVIOUS);
setAction(WORD_PREVIOUS, action);
textWidget.setKeyBinding(SWT.CTRL | SWT.ARROW_LEFT, SWT.NULL);
action = new NavigateNextSubWordAction();
action.setActionDefinitionId(WORD_NEXT);
setAction(WORD_NEXT, action);
textWidget.setKeyBinding(SWT.CTRL | SWT.ARROW_RIGHT, SWT.NULL);
action = new SelectPreviousSubWordAction();
action.setActionDefinitionId(SELECT_WORD_PREVIOUS);
setAction(SELECT_WORD_PREVIOUS, action);
textWidget.setKeyBinding(SWT.CTRL | SWT.SHIFT | SWT.ARROW_LEFT, SWT.NULL);
action = new SelectNextSubWordAction();
action.setActionDefinitionId(SELECT_WORD_NEXT);
setAction(SELECT_WORD_NEXT, action);
textWidget.setKeyBinding(SWT.CTRL | SWT.SHIFT | SWT.ARROW_RIGHT, SWT.NULL);
action = new DeletePreviousSubWordAction();
action.setActionDefinitionId(DELETE_PREVIOUS_WORD);
setAction(DELETE_PREVIOUS_WORD, action);
textWidget.setKeyBinding(SWT.CTRL | SWT.BS, SWT.NULL);
markAsStateDependentAction(DELETE_PREVIOUS_WORD, true);
action = new DeleteNextSubWordAction();
action.setActionDefinitionId(DELETE_NEXT_WORD);
setAction(DELETE_NEXT_WORD, action);
textWidget.setKeyBinding(SWT.CTRL | SWT.DEL, SWT.NULL);
markAsStateDependentAction(DELETE_NEXT_WORD, true);
}
/**
* Text navigation action to navigate to the next sub-word.
*
* @since 3.0
*/
protected abstract class NextSubWordAction extends TextNavigationAction {
protected CeylonWordIterator fIterator= new CeylonWordIterator();
/**
* Creates a new next sub-word action.
*
* @param code Action code for the default operation. Must be an action code from @see org.eclipse.swt.custom.ST.
*/
protected NextSubWordAction(int code) {
super(getSourceViewer().getTextWidget(), code);
}
@Override
public void run() {
// Check whether we are in a java code partition and the preference is enabled
final IPreferenceStore store= getPreferenceStore();
if (!store.getBoolean(SUB_WORD_NAVIGATION)) {
super.run();
return;
}
final ISourceViewer viewer= getSourceViewer();
final IDocument document= viewer.getDocument();
try {
fIterator.setText((CharacterIterator)new DocumentCharacterIterator(document));
int position= widgetOffset2ModelOffset(viewer, viewer.getTextWidget().getCaretOffset());
if (position == -1)
return;
int next= findNextPosition(position);
if (isBlockSelectionModeEnabled() && document.getLineOfOffset(next) != document.getLineOfOffset(position)) {
super.run(); // may navigate into virtual white space
} else if (next != BreakIterator.DONE) {
setCaretPosition(next);
getTextWidget().showSelection();
fireSelectionChanged();
}
} catch (BadLocationException x) {
// ignore
}
}
/**
* Finds the next position after the given position.
*
* @param position the current position
* @return the next position
*/
protected int findNextPosition(int position) {
ISourceViewer viewer= getSourceViewer();
int widget= -1;
int next= position;
while (next != BreakIterator.DONE && widget == -1) { // XXX: optimize
next= fIterator.following(next);
if (next != BreakIterator.DONE)
widget= modelOffset2WidgetOffset(viewer, next);
}
IDocument document= viewer.getDocument();
LinkedModeModel model= LinkedModeModel.getModel(document, position);
if (model != null && next != BreakIterator.DONE) {
LinkedPosition linkedPosition= model.findPosition(new LinkedPosition(document, position, 0));
if (linkedPosition != null) {
int linkedPositionEnd= linkedPosition.getOffset() + linkedPosition.getLength();
if (position != linkedPositionEnd && linkedPositionEnd < next)
next= linkedPositionEnd;
} else {
LinkedPosition nextLinkedPosition= model.findPosition(new LinkedPosition(document, next, 0));
if (nextLinkedPosition != null) {
int nextLinkedPositionOffset= nextLinkedPosition.getOffset();
if (position != nextLinkedPositionOffset && nextLinkedPositionOffset < next)
next= nextLinkedPositionOffset;
}
}
}
return next;
}
/**
* Sets the caret position to the sub-word boundary given with <code>position</code>.
*
* @param position Position where the action should move the caret
*/
protected abstract void setCaretPosition(int position);
}
/**
* Text navigation action to navigate to the next sub-word.
*
* @since 3.0
*/
protected class NavigateNextSubWordAction extends NextSubWordAction {
/**
* Creates a new navigate next sub-word action.
*/
public NavigateNextSubWordAction() {
super(ST.WORD_NEXT);
}
@Override
protected void setCaretPosition(final int position) {
getTextWidget().setCaretOffset(modelOffset2WidgetOffset(getSourceViewer(), position));
}
}
/**
* Text operation action to delete the next sub-word.
*
* @since 3.0
*/
protected class DeleteNextSubWordAction extends NextSubWordAction implements IUpdate {
/**
* Creates a new delete next sub-word action.
*/
public DeleteNextSubWordAction() {
super(ST.DELETE_WORD_NEXT);
}
@Override
protected void setCaretPosition(final int position) {
if (!validateEditorInputState())
return;
final ISourceViewer viewer= getSourceViewer();
StyledText text= viewer.getTextWidget();
Point widgetSelection= text.getSelection();
if (isBlockSelectionModeEnabled() && widgetSelection.y != widgetSelection.x) {
final int caret= text.getCaretOffset();
final int offset= modelOffset2WidgetOffset(viewer, position);
if (caret == widgetSelection.x)
text.setSelectionRange(widgetSelection.y, offset - widgetSelection.y);
else
text.setSelectionRange(widgetSelection.x, offset - widgetSelection.x);
text.invokeAction(ST.DELETE_NEXT);
} else {
Point selection= viewer.getSelectedRange();
final int caret, length;
if (selection.y != 0) {
caret= selection.x;
length= selection.y;
} else {
caret= widgetOffset2ModelOffset(viewer, text.getCaretOffset());
length= position - caret;
}
try {
viewer.getDocument().replace(caret, length, ""); //$NON-NLS-1$
} catch (BadLocationException exception) {
// Should not happen
}
}
}
public void update() {
setEnabled(isEditorInputModifiable());
}
}
/**
* Text operation action to select the next sub-word.
*
* @since 3.0
*/
protected class SelectNextSubWordAction extends NextSubWordAction {
/**
* Creates a new select next sub-word action.
*/
public SelectNextSubWordAction() {
super(ST.SELECT_WORD_NEXT);
}
@Override
protected void setCaretPosition(final int position) {
final ISourceViewer viewer= getSourceViewer();
final StyledText text= viewer.getTextWidget();
if (text != null && !text.isDisposed()) {
final Point selection= text.getSelection();
final int caret= text.getCaretOffset();
final int offset= modelOffset2WidgetOffset(viewer, position);
if (caret == selection.x)
text.setSelectionRange(selection.y, offset - selection.y);
else
text.setSelectionRange(selection.x, offset - selection.x);
}
}
}
/**
* Text navigation action to navigate to the previous sub-word.
*
* @since 3.0
*/
protected abstract class PreviousSubWordAction extends TextNavigationAction {
protected CeylonWordIterator fIterator= new CeylonWordIterator();
/**
* Creates a new previous sub-word action.
*
* @param code Action code for the default operation. Must be an action code from @see org.eclipse.swt.custom.ST.
*/
protected PreviousSubWordAction(final int code) {
super(getSourceViewer().getTextWidget(), code);
}
@Override
public void run() {
// Check whether we are in a java code partition and the preference is enabled
final IPreferenceStore store= getPreferenceStore();
if (!store.getBoolean(SUB_WORD_NAVIGATION)) {
super.run();
return;
}
final ISourceViewer viewer= getSourceViewer();
final IDocument document= viewer.getDocument();
try {
fIterator.setText((CharacterIterator)new DocumentCharacterIterator(document));
int position= widgetOffset2ModelOffset(viewer, viewer.getTextWidget().getCaretOffset());
if (position == -1)
return;
int previous= findPreviousPosition(position);
if (isBlockSelectionModeEnabled() &&
document.getLineOfOffset(previous)!=document.getLineOfOffset(position)) {
super.run(); // may navigate into virtual white space
} else if (previous != BreakIterator.DONE) {
setCaretPosition(previous);
getTextWidget().showSelection();
fireSelectionChanged();
}
} catch (BadLocationException x) {
// ignore - getLineOfOffset failed
}
}
/**
* Finds the previous position before the given position.
*
* @param position the current position
* @return the previous position
*/
protected int findPreviousPosition(int position) {
ISourceViewer viewer= getSourceViewer();
int widget= -1;
int previous= position;
while (previous != BreakIterator.DONE && widget == -1) { // XXX: optimize
previous= fIterator.preceding(previous);
if (previous != BreakIterator.DONE)
widget= modelOffset2WidgetOffset(viewer, previous);
}
IDocument document= viewer.getDocument();
LinkedModeModel model= LinkedModeModel.getModel(document, position);
if (model != null && previous != BreakIterator.DONE) {
LinkedPosition linkedPosition= model.findPosition(new LinkedPosition(document, position, 0));
if (linkedPosition != null) {
int linkedPositionOffset= linkedPosition.getOffset();
if (position != linkedPositionOffset && previous < linkedPositionOffset)
previous= linkedPositionOffset;
} else {
LinkedPosition previousLinkedPosition= model.findPosition(new LinkedPosition(document, previous, 0));
if (previousLinkedPosition != null) {
int previousLinkedPositionEnd= previousLinkedPosition.getOffset() + previousLinkedPosition.getLength();
if (position != previousLinkedPositionEnd && previous < previousLinkedPositionEnd)
previous= previousLinkedPositionEnd;
}
}
}
return previous;
}
/**
* Sets the caret position to the sub-word boundary given with <code>position</code>.
*
* @param position Position where the action should move the caret
*/
protected abstract void setCaretPosition(int position);
}
/**
* Text navigation action to navigate to the previous sub-word.
*
* @since 3.0
*/
protected class NavigatePreviousSubWordAction extends PreviousSubWordAction {
/**
* Creates a new navigate previous sub-word action.
*/
public NavigatePreviousSubWordAction() {
super(ST.WORD_PREVIOUS);
}
@Override
protected void setCaretPosition(final int position) {
getTextWidget().setCaretOffset(modelOffset2WidgetOffset(getSourceViewer(), position));
}
}
/**
* Text operation action to delete the previous sub-word.
*
* @since 3.0
*/
protected class DeletePreviousSubWordAction extends PreviousSubWordAction implements IUpdate {
/**
* Creates a new delete previous sub-word action.
*/
public DeletePreviousSubWordAction() {
super(ST.DELETE_WORD_PREVIOUS);
}
@Override
protected void setCaretPosition(int position) {
if (!validateEditorInputState())
return;
final int length;
final ISourceViewer viewer= getSourceViewer();
StyledText text= viewer.getTextWidget();
Point widgetSelection= text.getSelection();
if (isBlockSelectionModeEnabled() && widgetSelection.y != widgetSelection.x) {
final int caret= text.getCaretOffset();
final int offset= modelOffset2WidgetOffset(viewer, position);
if (caret == widgetSelection.x)
text.setSelectionRange(widgetSelection.y, offset - widgetSelection.y);
else
text.setSelectionRange(widgetSelection.x, offset - widgetSelection.x);
text.invokeAction(ST.DELETE_PREVIOUS);
} else {
Point selection= viewer.getSelectedRange();
if (selection.y != 0) {
position= selection.x;
length= selection.y;
} else {
length= widgetOffset2ModelOffset(viewer, text.getCaretOffset()) - position;
}
try {
viewer.getDocument().replace(position, length, ""); //$NON-NLS-1$
} catch (BadLocationException exception) {
// Should not happen
}
}
}
public void update() {
setEnabled(isEditorInputModifiable());
}
}
/**
* Text operation action to select the previous sub-word.
*
* @since 3.0
*/
protected class SelectPreviousSubWordAction extends PreviousSubWordAction {
/**
* Creates a new select previous sub-word action.
*/
public SelectPreviousSubWordAction() {
super(ST.SELECT_WORD_PREVIOUS);
}
@Override
protected void setCaretPosition(final int position) {
final ISourceViewer viewer= getSourceViewer();
final StyledText text= viewer.getTextWidget();
if (text != null && !text.isDisposed()) {
final Point selection= text.getSelection();
final int caret= text.getCaretOffset();
final int offset= modelOffset2WidgetOffset(viewer, position);
if (caret == selection.x)
text.setSelectionRange(selection.y, offset - selection.y);
else
text.setSelectionRange(selection.x, offset - selection.x);
}
}
}
protected void initializeKeyBindingScopes() {
setKeyBindingScopes(new String[] { PLUGIN_ID + ".context" });
}
private IHandlerActivation fFindQuickAccessHandlerActivation;
private IHandlerActivation fRefactorQuickAccessHandlerActivation;
private IHandlerService fHandlerService;
public static final String REFACTOR_MENU_ID = "com.redhat.ceylon.eclipse.ui.menu.refactorQuickMenu";
public static final String FIND_MENU_ID = "com.redhat.ceylon.eclipse.ui.menu.findQuickMenu";
private class RefactorQuickAccessAction extends QuickMenuAction {
public RefactorQuickAccessAction() {
super(REFACTOR_MENU_ID);
}
protected void fillMenu(IMenuManager menu) {
IContributionItem[] cis = new RefactorMenuItems().getContributionItems();
for (IContributionItem ci: cis) {
menu.add(ci);
}
}
}
private class FindQuickAccessAction extends QuickMenuAction {
public FindQuickAccessAction() {
super(FIND_MENU_ID);
}
protected void fillMenu(IMenuManager menu) {
IContributionItem[] cis = new FindMenuItems().getContributionItems();
for (IContributionItem ci: cis) {
menu.add(ci);
}
}
}
private void installQuickAccessAction() {
fHandlerService= (IHandlerService) getSite().getService(IHandlerService.class);
if (fHandlerService != null) {
QuickMenuAction refactorQuickAccessAction= new RefactorQuickAccessAction();
fRefactorQuickAccessHandlerActivation= fHandlerService.activateHandler(refactorQuickAccessAction.getActionDefinitionId(),
new ActionHandler(refactorQuickAccessAction));
QuickMenuAction findQuickAccessAction= new FindQuickAccessAction();
fRefactorQuickAccessHandlerActivation= fHandlerService.activateHandler(findQuickAccessAction.getActionDefinitionId(),
new ActionHandler(findQuickAccessAction));
}
}
protected void uninstallQuickAccessAction() {
if (fHandlerService != null) {
fHandlerService.deactivateHandler(fRefactorQuickAccessHandlerActivation);
fHandlerService.deactivateHandler(fFindQuickAccessHandlerActivation);
}
}
protected boolean isOverviewRulerVisible() {
return true;
}
protected void rulerContextMenuAboutToShow(IMenuManager menu) {
addDebugActions(menu);
super.rulerContextMenuAboutToShow(menu);
menu.appendToGroup(GROUP_RULERS, new Separator());
menu.appendToGroup(GROUP_RULERS, getAction("FoldingToggle"));
menu.appendToGroup(GROUP_RULERS, getAction("FoldingExpandAll"));
menu.appendToGroup(GROUP_RULERS, getAction("FoldingCollapseAll"));
menu.appendToGroup(GROUP_RULERS, getAction("FoldingCollapseImports"));
menu.appendToGroup(GROUP_RULERS, getAction("FoldingCollapseComments"));
}
private void addDebugActions(IMenuManager menu) {
menu.add(toggleBreakpointAction);
menu.add(enableDisableBreakpointAction);
}
/**
* Sets the given message as error message to this editor's status line.
*
* @param msg message to be set
*/
protected void setStatusLineErrorMessage(String msg) {
IEditorStatusLine statusLine= (IEditorStatusLine) getAdapter(IEditorStatusLine.class);
if (statusLine != null)
statusLine.setMessage(true, msg, null);
}
/**
* Sets the given message as message to this editor's status line.
*
* @param msg message to be set
* @since 3.0
*/
protected void setStatusLineMessage(String msg) {
IEditorStatusLine statusLine= (IEditorStatusLine) getAdapter(IEditorStatusLine.class);
if (statusLine != null)
statusLine.setMessage(false, msg, null);
}
public ProblemMarkerManager getProblemMarkerManager() {
return problemMarkerManager;
}
@Override
protected void setTitleImage(Image titleImage) {
super.setTitleImage(titleImage);
}
public IDocumentProvider getDocumentProvider() {
if (isSrcArchive(getEditorInput())) {
//Note: I would prefer to register the
//document provider in plugin.xml but
//I don't know how to uniquely identity
//that a IURIEditorInput is a source
//archive there
if (sourceArchiveDocumentProvider==null) {
sourceArchiveDocumentProvider = new SourceArchiveDocumentProvider();
}
return sourceArchiveDocumentProvider;
}
else {
return super.getDocumentProvider();
}
}
public CeylonSourceViewer getCeylonSourceViewer() {
return (CeylonSourceViewer) super.getSourceViewer();
}
public void createPartControl(Composite parent) {
// Initialize the parse controller first, since the
// initialization of other things (like the context
// help support) might depend on it.
initializeParseController();
super.createPartControl(parent);
initiateServiceControllers();
updateTitleImage();
//setSourceFontFromPreference();
/*((IContextService) getSite().getService(IContextService.class))
.activateContext(PLUGIN_ID + ".context");*/
ITheme currentTheme = PlatformUI.getWorkbench().getThemeManager().getCurrentTheme();
currentTheme.getColorRegistry().addListener(colorChangeListener);
updateFontAndCaret();
currentTheme.getFontRegistry().addListener(fontChangeListener);
}
public synchronized void scheduleParsing() {
if (parserScheduler!=null && !backgroundParsingPaused) {
parserScheduler.cancel();
parserScheduler.schedule(REPARSE_SCHEDULE_DELAY);
}
}
private void initializeParseController() {
IEditorInput editorInput = getEditorInput();
IFile file = getFile(editorInput);
IPath filePath = getPath(editorInput);
parseController = new CeylonParseController();
IProject project = file!=null && file.exists() ? file.getProject() : null;
parseController.initialize(filePath, project, annotationCreator);
}
private IProblemChangedListener editorIconUpdater = new IProblemChangedListener() {
@Override
public void problemsChanged(IResource[] changedResources, boolean isMarkerChange) {
if (isMarkerChange) {
IEditorInput input= getEditorInput();
if (input instanceof IFileEditorInput) { // The editor might be looking at something outside the workspace (e.g. system include files).
IFileEditorInput fileInput = (IFileEditorInput) input;
IFile file = fileInput.getFile();
if (file != null) {
for (int i= 0; i<changedResources.length; i++) {
if (changedResources[i].equals(file)) {
Shell shell= getEditorSite().getShell();
if (shell!=null && !shell.isDisposed()) {
shell.getDisplay().syncExec(new Runnable() {
@Override
public void run() {
updateTitleImage();
}
});
}
}
}
}
}
}
}
};
private IDocumentListener documentListener = new IDocumentListener() {
public void documentAboutToBeChanged(DocumentEvent event) {}
public void documentChanged(DocumentEvent event) {
synchronized (CeylonEditor.this) {
if (parserScheduler!=null && !backgroundParsingPaused) {
parserScheduler.cancel();
parserScheduler.schedule(REPARSE_SCHEDULE_DELAY);
}
}
}
};
private IResourceChangeListener buildListener = new IResourceChangeListener() {
public void resourceChanged(IResourceChangeEvent event) {
if (event.getBuildKind()!=CLEAN_BUILD) {
scheduleParsing();
}
}
};
/**
* The following listener is intended to detect when the document associated
* with this editor changes its identity, which happens when, e.g., the
* underlying resource gets moved or renamed. We need to see when the editor
* input changes, so we can watch the new document.
*/
private IPropertyListener editorInputPropertyListener = new IPropertyListener() {
public void propertyChanged(Object source, int propId) {
if (source == CeylonEditor.this && propId == IEditorPart.PROP_INPUT) {
IDocument oldDoc= getParseController().getDocument();
IDocument curDoc= getDocumentProvider().getDocument(getEditorInput());
if (curDoc!=oldDoc) {
// Need to unwatch the old document and watch the new document
oldDoc.removeDocumentListener(documentListener);
curDoc.addDocumentListener(documentListener);
}
}
}
};
private IResourceChangeListener moveListener = new IResourceChangeListener() {
public void resourceChanged(IResourceChangeEvent event) {
IProject project = parseController.getProject();
if (project!=null) { //things external to the workspace don't move
IPath oldWSRelPath = project.getFullPath().append(parseController.getPath());
IResourceDelta rd = event.getDelta().findMember(oldWSRelPath);
if (rd != null) {
if ((rd.getFlags() & IResourceDelta.MOVED_TO) != 0) {
// The net effect of the following is to re-initialize() the parse controller with the new path
IPath newPath = rd.getMovedToPath();
IPath newProjRelPath = newPath.removeFirstSegments(1);
String newProjName = newPath.segment(0);
IProject proj = project.getName().equals(newProjName) ?
project : project.getWorkspace().getRoot()
.getProject(newProjName);
// Tell the parse controller about the move - it caches the path
// parserScheduler.cancel(); // avoid a race condition if ParserScheduler was starting/in the middle of a run
parseController.initialize(newProjRelPath, proj, annotationCreator);
}
}
}
}
};
private IProblemChangedListener annotationUpdater = new IProblemChangedListener() {
public void problemsChanged(IResource[] changedResources,
boolean isMarkerChange) {
// Remove annotations that were resolved by changes to
// other resources.
// TODO: It would be better to match the markers to the
// annotations, and decide which annotations to remove.
scheduleParsing();
}
};
private void initiateServiceControllers() {
problemMarkerManager.addListener(annotationUpdater);
problemMarkerManager.addListener(editorIconUpdater);
parserScheduler = new CeylonParserScheduler(parseController, this,
annotationCreator);
addModelListener(new AdditionalAnnotationCreator(this));
installProjectionSupport();
updateProjectionAnnotationManager();
if (isEditable()) {
addModelListener(markerAnnotationUpdater);
}
getSourceViewer().getDocument().addDocumentListener(documentListener);
addPropertyListener(editorInputPropertyListener);
getWorkspace().addResourceChangeListener(moveListener, IResourceChangeEvent.POST_CHANGE);
getWorkspace().addResourceChangeListener(buildListener, IResourceChangeEvent.POST_BUILD);
parserScheduler.schedule();
}
private void installProjectionSupport() {
final CeylonSourceViewer sourceViewer = getCeylonSourceViewer();
projectionSupport = new ProjectionSupport(sourceViewer, getAnnotationAccess(), getSharedColors());
MarkerAnnotationPreferences markerAnnotationPreferences = (MarkerAnnotationPreferences) getAdapter(MarkerAnnotationPreferences.class);
if (markerAnnotationPreferences != null) {
@SuppressWarnings("unchecked")
List<AnnotationPreference> annPrefs = markerAnnotationPreferences.getAnnotationPreferences();
for (Iterator<AnnotationPreference> e = annPrefs.iterator(); e.hasNext();) {
Object annotationType = e.next().getAnnotationType();
if (annotationType instanceof String) {
projectionSupport.addSummarizableAnnotationType((String) annotationType);
}
}
}
/*else {
projectionSupport.addSummarizableAnnotationType(PARSE_ANNOTATION_TYPE_ERROR);
projectionSupport.addSummarizableAnnotationType(PARSE_ANNOTATION_TYPE_WARNING);
projectionSupport.addSummarizableAnnotationType("org.eclipse.ui.workbench.texteditor.error");
projectionSupport.addSummarizableAnnotationType("org.eclipse.ui.workbench.texteditor.warning");
}*/
projectionSupport.install();
IPreferenceStore store = EditorsUI.getPreferenceStore();
store.setDefault(EDITOR_FOLDING_ENABLED, true);
if (store.getBoolean(EDITOR_FOLDING_ENABLED)) {
sourceViewer.doOperation(ProjectionViewer.TOGGLE);
}
sourceViewer.addProjectionListener(projectionAnnotationManager);
}
private void updateProjectionAnnotationManager() {
CeylonSourceViewer sourceViewer = getCeylonSourceViewer();
if (sourceViewer!=null) {
if (sourceViewer.isProjectionMode()) {
addModelListener(projectionAnnotationManager);
}
else if (projectionAnnotationManager!=null) {
removeModelListener(projectionAnnotationManager);
}
}
}
@Override
protected void handlePreferenceStoreChanged(PropertyChangeEvent event) {
super.handlePreferenceStoreChanged(event);
if (EDITOR_FOLDING_ENABLED.equals(event.getProperty())) {
updateProjectionAnnotationManager();
new ToggleFoldingRunner(this).runWhenNextVisible();
}
}
public void updateTitleImage() {
IFile file = getFile(getEditorInput());
if (file!=null) {
setTitleImage(getImageForFile(file));
}
}
public void dispose() {
if (editorIconUpdater!=null) {
problemMarkerManager.removeListener(editorIconUpdater);
editorIconUpdater = null;
}
if (annotationUpdater!=null) {
problemMarkerManager.removeListener(annotationUpdater);
annotationUpdater = null;
}
/*if (fActionBars!=null) {
fActionBars.dispose();
fActionBars = null;
}*/
//document is null here
/*if (documentListener!=null) {
getSourceViewer().getDocument()
.removeDocumentListener(documentListener);
}*/
if (buildListener!=null) {
getWorkspace().removeResourceChangeListener(buildListener);
buildListener = null;
}
if (moveListener!=null) {
getWorkspace().removeResourceChangeListener(moveListener);
moveListener = null;
}
if (toggleBreakpointAction!=null) {
toggleBreakpointAction.dispose(); // this holds onto the IDocument
}
if (foldingActionGroup!=null) {
foldingActionGroup.dispose();
}
if (projectionSupport!=null) {
projectionSupport.dispose();
projectionSupport = null;
}
if (parserScheduler!=null) {
parserScheduler.cancel(); // avoid unnecessary work after the editor is asked to close down
}
parserScheduler= null;
parseController = null;
uninstallQuickAccessAction();
super.dispose();
/*if (fResourceListener != null) {
ResourcesPlugin.getWorkspace().removeResourceChangeListener(fResourceListener);
}*/
ITheme currentTheme = PlatformUI.getWorkbench().getThemeManager().getCurrentTheme();
currentTheme.getColorRegistry().removeListener(colorChangeListener);
currentTheme.getFontRegistry().removeListener(fontChangeListener);
}
private IPropertyChangeListener colorChangeListener = new IPropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent event) {
if (event.getProperty().startsWith(PLUGIN_ID + ".theme.color.")) {
getSourceViewer().invalidateTextPresentation();
}
}
};
IPropertyChangeListener fontChangeListener = new IPropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent event) {
if (event.getProperty().equals(TEXT_FONT_PREFERENCE)) {
updateFontAndCaret();
}
}
};
private static final String TEXT_FONT_PREFERENCE = PLUGIN_ID + ".editorFont";
public static Font getFont() {
ITheme currentTheme = PlatformUI.getWorkbench().getThemeManager().getCurrentTheme();
return currentTheme.getFontRegistry().get(TEXT_FONT_PREFERENCE);
}
private void updateFontAndCaret() {
getSourceViewer().getTextWidget().setFont(getFont());
try {
Method updateCaretMethod = AbstractTextEditor.class.getDeclaredMethod("updateCaret");
updateCaretMethod.setAccessible(true);
updateCaretMethod.invoke(this);
}
catch (Exception e) {
e.printStackTrace();
}
}
protected final SourceViewer createSourceViewer(Composite parent, IVerticalRuler ruler, int styles) {
fAnnotationAccess = getAnnotationAccess();
fOverviewRuler = createOverviewRuler(getSharedColors());
SourceViewer viewer= new CeylonSourceViewer(this, parent, ruler,
getOverviewRuler(), isOverviewRulerVisible(), styles);
// ensure decoration support has been created and configured.
getSourceViewerDecorationSupport(viewer);
viewer.getTextWidget().addCaretListener(new CaretListener() {
@Override
public void caretMoved(CaretEvent event) {
Object adapter = getAdapter(IVerticalRulerInfo.class);
if (adapter instanceof CompositeRuler) {
// redraw initializer annotations according to cursor position
((CompositeRuler) adapter).update();
}
}
});
return viewer;
}
protected void configureSourceViewerDecorationSupport(SourceViewerDecorationSupport support) {
installBracketMatcher(support);
super.configureSourceViewerDecorationSupport(support);
}
private void installBracketMatcher(SourceViewerDecorationSupport support) {
IPreferenceStore store = getPreferenceStore();
store.setDefault(MATCHING_BRACKET, true);
ITheme currentTheme = PlatformUI.getWorkbench().getThemeManager().getCurrentTheme();
Color color = currentTheme.getColorRegistry()
.get(PLUGIN_ID + ".theme.matchingBracketsColor");
store.setDefault(MATCHING_BRACKETS_COLOR,
color.getRed() +"," + color.getGreen() + "," + color.getBlue());
store.setDefault(MATCHING_BRACKET, true);
store.setDefault(ENCLOSING_BRACKETS, false);
store.setDefault(SELECTED_BRACKET, false);
String[][] fences= getFences();
if (fences != null) {
StringBuilder sb= new StringBuilder();
for (int i=0; i<fences.length; i++) {
sb.append(fences[i][0]);
sb.append(fences[i][1]);
}
bracketMatcher = new DefaultCharacterPairMatcher(sb.toString().toCharArray());
support.setCharacterPairMatcher(bracketMatcher);
support.setMatchingCharacterPainterPreferenceKeys(
MATCHING_BRACKET, MATCHING_BRACKETS_COLOR,
SELECTED_BRACKET, ENCLOSING_BRACKETS);
}
}
public ICharacterPairMatcher getBracketMatcher() {
return bracketMatcher;
}
// protected void doSetInput(IEditorInput input) throws CoreException {
// // Catch CoreExceptions here, since it's possible that things like IOExceptions occur
// // while retrieving the input's contents, e.g., if the given input doesn't exist.
// try {
// super.doSetInput(input);
// catch (CoreException e) {
// if (e.getCause() instanceof IOException) {
// throw new CoreException(new Status(IStatus.ERROR, CeylonPlugin.PLUGIN_ID,
// 0, "Unable to read source text", e.getStatus().getException()));
// setInsertMode(SMART_INSERT);
@Override
protected void doSetInput(IEditorInput input) throws CoreException {
//the following crazy stuff seems to be needed in
//order to get syntax highlighting in structured
//compare viewer
CeylonSourceViewer sourceViewer = getCeylonSourceViewer();
if (sourceViewer!=null) {
// uninstall & unregister preference store listener
getSourceViewerDecorationSupport(sourceViewer).uninstall();
sourceViewer.unconfigure();
//setPreferenceStore(createCombinedPreferenceStore(input));
// install & register preference store listener
sourceViewer.configure(getSourceViewerConfiguration());
getSourceViewerDecorationSupport(sourceViewer).install(getPreferenceStore());
}
super.doSetInput(input);
//have to do this or we get a funny-looking caret
setInsertMode(SMART_INSERT);
}
/**
* Add a Model listener to this editor. Any time the underlying AST is recomputed, the listener is notified.
*
* @param listener the listener to notify of Model changes
*/
public void addModelListener(TreeLifecycleListener listener) {
parserScheduler.addModelListener(listener);
}
/**
* Remove a Model listener from this editor.
*
* @param listener the listener to remove
*/
public void removeModelListener(TreeLifecycleListener listener) {
parserScheduler.removeModelListener(listener);
}
public String getSelectionText() {
IRegion sel= getSelection();
IDocument document= getDocumentProvider().getDocument(getEditorInput());
try {
return document.get(sel.getOffset(), sel.getLength());
}
catch (BadLocationException e) {
e.printStackTrace();
return "";
}
}
public IRegion getSelection() {
ITextSelection ts= (ITextSelection) getSelectionProvider().getSelection();
return new Region(ts.getOffset(), ts.getLength());
}
/**
* Returns the signed current selection.
* The length will be negative if the resulting selection
* is right-to-left (RtoL).
* The selection offset is model based.
*/
public IRegion getSignedSelection() {
ISourceViewer sourceViewer = getSourceViewer();
StyledText text= sourceViewer.getTextWidget();
Point selection= text.getSelectionRange();
if (text.getCaretOffset() == selection.x) {
selection.x= selection.x + selection.y;
selection.y= -selection.y;
}
selection.x= widgetOffset2ModelOffset(sourceViewer, selection.x);
return new Region(selection.x, selection.y);
}
public boolean canPerformFind() {
return true;
}
public CeylonParseController getParseController() {
return parseController;
}
public String toString() {
return "Ceylon Editor for " + getEditorInput().getName();
}
boolean isFoldingEnabled() {
return EditorsUI.getPreferenceStore().getBoolean(EDITOR_FOLDING_ENABLED);
}
} |
import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.mllib.classification.SVMModel;
import org.apache.spark.mllib.classification.SVMWithSGD;
import org.apache.spark.mllib.evaluation.BinaryClassificationMetrics;
import org.apache.spark.mllib.linalg.Vector;
import org.apache.spark.mllib.linalg.Vectors;
import org.apache.spark.mllib.regression.LabeledPoint;
import org.apache.spark.mllib.util.MLUtils;
import scala.Tuple2;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.io.File;
import java.io.IOException;
import java.util.*;
public class ExpSVM {
private static final Logger log = Logger.getLogger(ExpSVM.class.getName());
private String[] args = null;
private static Options options = new Options();
public static void main(String [] args) throws IOException {
long start_time = System.currentTimeMillis();
System.out.println("Hello Spark");
SparkConf conf = new SparkConf().setAppName("Simple Application");
SparkContext sc = new SparkContext(conf);
init(args);
CommandLine cmd = parse(args);
String trainingDataSet = cmd.getOptionValue("train");
String testingDataSet = cmd.getOptionValue("test");
int numIterations = Integer.parseInt(cmd.getOptionValue("iterations"));
double stepSize = Double.parseDouble(cmd.getOptionValue("stepSize"));
double regParam = Double.parseDouble(cmd.getOptionValue("regParam"));
if((cmd.getOptionValue("split"))!=null){
double splitRatio = Double.parseDouble(cmd.getOptionValue("split"));
System.out.println("Split Ratio: " + splitRatio);
ArrayList<JavaRDD<LabeledPoint>> dataList = dataSplit(trainingDataSet, sc, splitRatio);
JavaRDD<LabeledPoint> training = dataList.get(0);
JavaRDD<LabeledPoint> testing = dataList.get(1);
task(sc, training, testing, numIterations, stepSize, regParam);
}else{
task(sc, trainingDataSet, testingDataSet, numIterations, stepSize, regParam);
}
}
public static void task(SparkContext sc, String trainingDataSet, String testingDataSet) throws IOException {
String datasource = "ijcnn1";
String path = "file:"+trainingDataSet; //"file:/home/vibhatha/data/sparksvm/ijcnn1/ijcnn1_train_spark.txt";
String test_path = "file:"+testingDataSet;
JavaRDD<LabeledPoint> data = MLUtils.loadLibSVMFile(sc, path).toJavaRDD();
JavaRDD<LabeledPoint> testdata = MLUtils.loadLibSVMFile(sc, test_path).toJavaRDD();
ArrayList<LabeledPoint> newrdd = new ArrayList<>();
LabeledPoint pos = new LabeledPoint(1.0, Vectors.dense(1.0, 0.0, 3.0));
Double label = pos.label();
Vector features = pos.features();
System.out.println(label);
System.out.println(features);
JavaRDD<LabeledPoint> parsedData = data.map(line -> {
Double label2 = line.label();
Vector feature = line.features();
if(label2==-1.0){
label2=0.0;
}
return new LabeledPoint(label2, feature);
});
JavaRDD<LabeledPoint> parsedTestData = testdata.map(line -> {
Double label2 = line.label();
Vector feature = line.features();
if(label2==-1.0){
label2=0.0;
}
return new LabeledPoint(label2, feature);
});
// Split initial RDD into two... [60% training data, 40% testing data].
JavaRDD<LabeledPoint> training = parsedData;
training.cache();
JavaRDD<LabeledPoint> test = parsedTestData;
//printRDD(training);
//printRDD(test);
train(sc,training, test);
}
public static void task(SparkContext sc, String trainingDataSet, String testingDataSet, int numIterations, double stepSize, double regParam) throws IOException {
String datasource = "ijcnn1";
String path = "file:"+trainingDataSet; //"file:/home/vibhatha/data/sparksvm/ijcnn1/ijcnn1_train_spark.txt";
String test_path = "file:"+testingDataSet;
JavaRDD<LabeledPoint> data = MLUtils.loadLibSVMFile(sc, path).toJavaRDD();
JavaRDD<LabeledPoint> testdata = MLUtils.loadLibSVMFile(sc, test_path).toJavaRDD();
ArrayList<LabeledPoint> newrdd = new ArrayList<>();
LabeledPoint pos = new LabeledPoint(1.0, Vectors.dense(1.0, 0.0, 3.0));
Double label = pos.label();
Vector features = pos.features();
System.out.println(label);
System.out.println(features);
JavaRDD<LabeledPoint> parsedData = data.map(line -> {
Double label2 = line.label();
Vector feature = line.features();
if(label2==-1.0){
label2=0.0;
}
return new LabeledPoint(label2, feature);
});
JavaRDD<LabeledPoint> parsedTestData = testdata.map(line -> {
Double label2 = line.label();
Vector feature = line.features();
if(label2==-1.0){
label2=0.0;
}
return new LabeledPoint(label2, feature);
});
// Split initial RDD into two... [60% training data, 40% testing data].
JavaRDD<LabeledPoint> training = parsedData;
training.cache();
JavaRDD<LabeledPoint> test = parsedTestData;
//printRDD(training);
//printRDD(test);
train(sc,training, test, numIterations, stepSize, regParam);
}
public static void task(SparkContext sc, JavaRDD<LabeledPoint> trainingDataSet, JavaRDD<LabeledPoint> testingDataSet, int numIterations, double stepSize, double regParam) throws IOException {
String datasource = "ijcnn1";
String path = "file:"+trainingDataSet; //"file:/home/vibhatha/data/sparksvm/ijcnn1/ijcnn1_train_spark.txt";
String test_path = "file:"+testingDataSet;
JavaRDD<LabeledPoint> data = trainingDataSet;
JavaRDD<LabeledPoint> testdata = testingDataSet;
ArrayList<LabeledPoint> newrdd = new ArrayList<>();
LabeledPoint pos = new LabeledPoint(1.0, Vectors.dense(1.0, 0.0, 3.0));
Double label = pos.label();
Vector features = pos.features();
System.out.println(label);
System.out.println(features);
JavaRDD<LabeledPoint> parsedData = data.map(line -> {
Double label2 = line.label();
Vector feature = line.features();
if(label2==-1.0){
label2=0.0;
}
return new LabeledPoint(label2, feature);
});
JavaRDD<LabeledPoint> parsedTestData = testdata.map(line -> {
Double label2 = line.label();
Vector feature = line.features();
if(label2==-1.0){
label2=0.0;
}
return new LabeledPoint(label2, feature);
});
// Split initial RDD into two... [60% training data, 40% testing data].
JavaRDD<LabeledPoint> training = parsedData;
training.cache();
JavaRDD<LabeledPoint> test = parsedTestData;
//printRDD(training);
//printRDD(test);
train(sc,training, test, numIterations, stepSize, regParam);
}
public static void train(SparkContext sc,JavaRDD<LabeledPoint> training, JavaRDD<LabeledPoint> test ) throws IOException {
// Run training algorithm to build the model.
int numIterations = 100;
long start_time = System.currentTimeMillis();
final SVMModel model = SVMWithSGD.train(training.rdd(), numIterations, 0.01, 0.01);
//model.clearThreshold();
long end_time = System.currentTimeMillis();
long elapsed_time = end_time - start_time;
String svmModelPath= "model/svm/exp1";
// Save and load model
File file = new File(svmModelPath);
if(file.exists()){
FileUtils.deleteDirectory(file);
}
model.save(sc, svmModelPath);
SVMModel sameModel = SVMModel.load(sc, svmModelPath);
JavaRDD<Vector> testFeatures = test.map(line -> {
Vector feature = line.features();
return feature;
});
JavaRDD<Double> testLabels = test.map(line -> {
Double label = line.label();
return label;
});
JavaRDD<Double> predictions = sameModel.predict(testFeatures);
// double prediction = sameModel.predict(testFeatures.first());
List<Double> predictionVals = predictions.collect();
List<Double> expectedVals = testLabels.collect();
double accuracy = predictionAccuracy(predictionVals, expectedVals);
System.out.println("Accuracy : "+accuracy+", Training Time : "+elapsed_time/1000.0 );
}
public static void train(SparkContext sc,JavaRDD<LabeledPoint> training, JavaRDD<LabeledPoint> test, int numIterations, double stepSize, double regParam) throws IOException {
// Run training algorithm to build the model.
numIterations = 100;
long start_time = System.currentTimeMillis();
final SVMModel model = SVMWithSGD.train(training.rdd(), numIterations, stepSize, regParam);
//model.clearThreshold();
long end_time = System.currentTimeMillis();
long elapsed_time = end_time - start_time;
String svmModelPath= "model/svm/exp1";
// Save and load model
File file = new File(svmModelPath);
if(file.exists()){
FileUtils.deleteDirectory(file);
}
model.save(sc, svmModelPath);
SVMModel sameModel = SVMModel.load(sc, svmModelPath);
JavaRDD<Vector> testFeatures = test.map(line -> {
Vector feature = line.features();
return feature;
});
JavaRDD<Double> testLabels = test.map(line -> {
Double label = line.label();
return label;
});
JavaRDD<Double> predictions = sameModel.predict(testFeatures);
// double prediction = sameModel.predict(testFeatures.first());
List<Double> predictionVals = predictions.collect();
List<Double> expectedVals = testLabels.collect();
double accuracy = predictionAccuracy(predictionVals, expectedVals);
System.out.println("Accuracy : "+accuracy+", Training Time : "+elapsed_time/1000.0 );
}
public static double predictionAccuracy(List<Double> predictions, List<Double> tests){
double acc = 0.0;
int count = 0;
int matches = 0;
for (Double d: predictions){
//System.out.println(d+","+tests.get(count));
if(d.intValue() == tests.get(count).intValue()){
matches++;
}
count++;
}
acc = (double)matches / (double)(predictions.size())*100.0;
return acc;
}
public static void printRDD(JavaRDD<LabeledPoint> parsedData){
parsedData.foreach(x->{
Double label1 = x.label();
Vector feature = x.features();
LabeledPoint newLabelPoint = new LabeledPoint(label1, feature);
System.out.println(newLabelPoint.label());
});
}
public static void init(String[] args) {
options.addOption("h", "help", false, "show help.");
options.addOption("train", "training data set path", true, "Set training data set . ex: -train train_data");
options.addOption("test", "testing data set path", true, "Set testing data set . ex: -test test_data");
options.addOption("iterations", "iteration number", true, "Set number of iterations . ex: -iterations 100");
options.addOption("stepSize", "step size", true, "Set step size . ex: -stepSize 0.01");
options.addOption("regParam", "regularization parameter", true, "Set testing data set. ex: -regParam 0.02");
options.addOption("split", "Data splitting ratio", true, "Training and Testing data splitting. ex: -split 0.8 (80% of training and 20% of testing)");
options.getOption("test").setOptionalArg(true);
options.getOption("split").setOptionalArg(true);
}
public static CommandLine parse(String [] args) {
CommandLineParser parser = new BasicParser();
CommandLine cmd = null;
try {
cmd = parser.parse(options, args);
if (cmd.hasOption("h"))
help();
if (cmd.hasOption("train")) {
log.log(Level.INFO, "Training data set -train=" + cmd.getOptionValue("train"));
// Whatever you want to do with the setting goes here
} else {
log.log(Level.SEVERE, "Missing -train option");
help();
}
if (cmd.hasOption("test")) {
log.log(Level.INFO, "Testing data set -test=" + cmd.getOptionValue("test"));
// Whatever you want to do with the setting goes here
}
if (cmd.hasOption("iterations")) {
log.log(Level.INFO, "Iterations -iterations=" + cmd.getOptionValue("iterations"));
// Whatever you want to do with the setting goes here
} else {
log.log(Level.SEVERE, "Missing -iterations option");
help();
}
if (cmd.hasOption("stepSize")) {
log.log(Level.INFO, "Step Size -stepSize=" + cmd.getOptionValue("stepSize"));
// Whatever you want to do with the setting goes here
} else {
log.log(Level.SEVERE, "Missing -stepSize option");
help();
}
if (cmd.hasOption("regParam")) {
log.log(Level.INFO, "Regularization Parameter -regParam=" + cmd.getOptionValue("regParam"));
// Whatever you want to do with the setting goes here
} else {
log.log(Level.SEVERE, "Missing -regParam option");
help();
}
if (cmd.hasOption("split")) {
log.log(Level.INFO, "Split Parameter -split=" + cmd.getOptionValue("split"));
// Whatever you want to do with the setting goes here
}
} catch (ParseException e) {
log.log(Level.SEVERE, "Failed to parse comand line properties", e);
help();
}
return cmd;
}
private static void help() {
// This prints out some help
HelpFormatter formater = new HelpFormatter();
formater.printHelp("ExpSVM", options);
System.exit(0);
}
public static ArrayList<JavaRDD<LabeledPoint>> dataSplit(String path, SparkContext sc, double ratio){
JavaRDD<LabeledPoint> data = MLUtils.loadLibSVMFile(sc, path).toJavaRDD();
ArrayList<JavaRDD<LabeledPoint>> list = new ArrayList<>();
JavaRDD<LabeledPoint> training = data.sample(false, ratio, 11L);
training.cache();
JavaRDD<LabeledPoint> test = data.subtract(training);
list.add(training);
list.add(test);
return list;
}
} |
package io.car.server.mongo.dao;
import java.util.Collections;
import java.util.Set;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.jmkgreen.morphia.Key;
import com.github.jmkgreen.morphia.query.Query;
import com.github.jmkgreen.morphia.query.UpdateResults;
import com.google.inject.Inject;
import io.car.server.core.dao.UserDao;
import io.car.server.core.entities.User;
import io.car.server.core.entities.Users;
import io.car.server.core.util.Pagination;
import io.car.server.mongo.MongoDB;
import io.car.server.mongo.entity.MongoUser;
/**
* @author Christian Autermann <autermann@uni-muenster.de>
* @author Arne de Wall
*/
public class MongoUserDao extends AbstractMongoDao<String, MongoUser, Users>
implements UserDao {
private static final Logger log = LoggerFactory
.getLogger(MongoUserDao.class);
private MongoTrackDao trackDao;
private MongoMeasurementDao measurementDao;
private MongoGroupDao groupDao;
@Inject
public MongoUserDao(MongoDB mongoDB) {
super(MongoUser.class, mongoDB);
}
@Inject
public void setTrackDao(MongoTrackDao trackDao) {
this.trackDao = trackDao;
}
@Inject
public void setMeasurementDao(MongoMeasurementDao measurementDao) {
this.measurementDao = measurementDao;
}
@Inject
public void setGroupDao(MongoGroupDao groupDao) {
this.groupDao = groupDao;
}
@Override
public MongoUser getByName(final String name) {
return q().field(MongoUser.NAME).equal(name).get();
}
@Override
public MongoUser getByMail(String mail) {
return q().field(MongoUser.MAIL).equal(mail).get();
}
@Override
public Users get(Pagination p) {
return fetch(q().order(MongoUser.CREATION_DATE), p);
}
@Override
public MongoUser create(User user) {
return save(user);
}
@Override
public MongoUser save(User user) {
MongoUser mu = (MongoUser) user;
save(mu);
return mu;
}
@Override
public void delete(User u) {
MongoUser user = (MongoUser) u;
trackDao.removeUser(user);
measurementDao.removeUser(user);
groupDao.removeUser(user);
Key<MongoUser> userRef = reference(user);
UpdateResults<MongoUser> result = update(
q().field(MongoUser.FRIENDS).hasThisElement(userRef),
up().removeAll(MongoUser.FRIENDS, userRef));
if (result.getHadError()) {
log.error("Error removing user {} as friend: {}",
u, result.getError());
} else {
log.debug("Removed user {} from {} friend lists",
u, result.getUpdatedCount());
}
delete(user.getName());
}
@Override
protected Users createPaginatedIterable(Iterable<MongoUser> i, Pagination p,
long count) {
return Users.from(i).withPagination(p).withElements(count).build();
}
@Override
public Users getFriends(User user) {
Iterable<MongoUser> friends;
Set<Key<MongoUser>> friendRefs = getFriendRefs(user);
if (friendRefs != null) {
friends = dereference(MongoUser.class, friendRefs);
} else {
friends = Collections.emptyList();
}
return Users.from(friends).build();
}
@Override
public User getFriend(User user, String friendName) {
Set<Key<MongoUser>> friendRefs = getFriendRefs(user);
if (friendRefs != null) {
Key<MongoUser> friendRef = reference(new MongoUser(friendName));
getMapper().updateKind(friendRef);
if (friendRefs.contains(friendRef)) {
return dereference(MongoUser.class, friendRef);
}
}
return null;
}
@Override
public void addFriend(User user, User friend) {
MongoUser g = (MongoUser) user;
update(g.getName(), up()
.add(MongoUser.FRIENDS, reference(friend))
.set(MongoUser.LAST_MODIFIED, new DateTime()));
}
@Override
public void removeFriend(User user, User friend) {
MongoUser g = (MongoUser) user;
update(g.getName(), up()
.removeAll(MongoUser.FRIENDS, reference(friend))
.set(MongoUser.LAST_MODIFIED, new DateTime()));
}
@Override
protected Users fetch(Query<MongoUser> q, Pagination p) {
return super.fetch(q.retrievedFields(false, MongoUser.FRIENDS), p);
}
public Set<Key<MongoUser>> getFriendRefs(User user) {
MongoUser u = (MongoUser) user;
Set<Key<MongoUser>> friendRefs = u.getFriends();
if (friendRefs == null) {
MongoUser userWithFriends = q()
.field(MongoUser.NAME).equal(u.getName())
.retrievedFields(true, MongoUser.FRIENDS).get();
if (userWithFriends != null) {
friendRefs = userWithFriends.getFriends();
}
}
if (friendRefs == null) {
friendRefs = Collections.emptySet();
}
return friendRefs;
}
} |
package org.telegram.android;
import android.app.ActivityOptions;
import android.app.FragmentManager;
import android.content.*;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.PixelFormat;
import android.os.*;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentTransaction;
import android.util.TypedValue;
import android.view.*;
import android.widget.FrameLayout;
import android.widget.Toast;
import com.actionbarsherlock.view.MenuItem;
import com.google.analytics.tracking.android.EasyTracker;
import org.telegram.android.activity.ViewImageActivity;
import org.telegram.android.activity.ViewImagesActivity;
import org.telegram.android.base.SmileyActivity;
import org.telegram.android.core.model.media.TLLocalFileLocation;
import org.telegram.android.fragments.*;
import org.telegram.android.fragments.interfaces.FragmentResultController;
import org.telegram.android.fragments.interfaces.RootController;
import org.telegram.android.kernel.KernelsLoader;
import org.telegram.android.log.Logger;
import org.telegram.android.screens.FragmentScreenController;
import org.telegram.android.screens.RootControllerHolder;
import org.telegram.integration.TestIntegration;
import java.util.ArrayList;
public class StartActivity extends SmileyActivity implements FragmentResultController, RootControllerHolder {
public static boolean isGuideShown = false;
private static final String TAG = "StartActivity";
private static final int REQUEST_OPEN_IMAGE = 300;
public static final String ACTION_OPEN_SETTINGS = "org.telegram.android.OPEN_SETTINGS";
public static final String ACTION_OPEN_CHAT = "org.telegram.android.OPEN";
private boolean barVisible;
private BroadcastReceiver logoutReceiver;
private int lastResultCode = -1;
private Object lastResultData;
private FragmentScreenController controller;
private boolean isStarted = false;
@Override
public RootController getRootController() {
return controller;
}
public void onCreate(Bundle savedInstanceState) {
long start = SystemClock.uptimeMillis();
super.onCreate(savedInstanceState);
getWindow().setBackgroundDrawableResource(R.drawable.transparent);
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
getWindow().setFormat(PixelFormat.RGB_565);
}
setBarBg(true);
getSupportActionBar().setLogo(R.drawable.st_bar_logo);
getSupportActionBar().setIcon(R.drawable.st_bar_logo);
getSupportActionBar().setDisplayUseLogoEnabled(true);
setWindowContentOverlayCompat();
setContentView(R.layout.activity_main);
Bundle savedState = null;
if (savedInstanceState != null && savedInstanceState.containsKey("screen_controller")) {
savedState = savedInstanceState.getBundle("screen_controller");
}
controller = new FragmentScreenController(this, savedState);
isStarted = false;
if (savedInstanceState != null) {
barVisible = savedInstanceState.getBoolean("barVisible");
if (barVisible) {
showBar();
} else {
hideBar();
}
} else {
doInitApp(true);
}
Logger.d(TAG, "Kernel: Activity loaded in " + (SystemClock.uptimeMillis() - start) + " ms");
}
private void setWindowContentOverlayCompat() {
if (Build.VERSION.SDK_INT == 18) {
// Get the content view
View contentView = findViewById(android.R.id.content);
// Make sure it's a valid instance of a FrameLayout
if (contentView instanceof FrameLayout) {
TypedValue tv = new TypedValue();
// Get the windowContentOverlay value of the current theme
if (getTheme().resolveAttribute(
android.R.attr.windowContentOverlay, tv, true)) {
// If it's a valid resource, set it as the foreground drawable
// for the content view
if (tv.resourceId != 0) {
((FrameLayout) contentView).setForeground(
getResources().getDrawable(tv.resourceId));
}
}
}
}
}
public void doInitApp(boolean firstAttempt) {
if (!application.getKernelsLoader().isLoaded()) {
FragmentTransaction transaction = getSupportFragmentManager().beginTransaction();
if (firstAttempt) {
transaction.add(R.id.fragmentContainer, new UpgradeFragment(), "recoverFragment");
} else {
transaction.replace(R.id.fragmentContainer, new UpgradeFragment(), "recoverFragment");
}
transaction.commit();
hideBar();
return;
}
if (application.getKernel().getAuthKernel().isLoggedIn()) {
if (application.getEngine().getUser(application.getCurrentUid()) == null) {
FragmentTransaction transaction = getSupportFragmentManager().beginTransaction();
if (firstAttempt) {
transaction.add(R.id.fragmentContainer, new RecoverFragment(), "recoverFragment");
} else {
transaction.replace(R.id.fragmentContainer, new RecoverFragment(), "recoverFragment");
}
transaction.commit();
hideBar();
return;
}
}
WhatsNewFragment.Definition[] definitions = prepareWhatsNew();
if (definitions.length != 0) {
application.getKernel().sendEvent("show_whats_new");
getSupportFragmentManager().beginTransaction()
.add(R.id.fragmentContainer, new WhatsNewFragment(definitions), "whatsNewFragment")
.commit();
hideBar();
return;
}
if (!application.isLoggedIn()) {
if (!isGuideShown) {
isGuideShown = true;
FragmentTransaction transaction = getSupportFragmentManager().beginTransaction();
if (firstAttempt) {
transaction.add(R.id.fragmentContainer, new TourFragment(), "tourFragment");
} else {
transaction.replace(R.id.fragmentContainer, new TourFragment(), "tourFragment");
}
transaction.commit();
hideBar();
return;
}
FragmentTransaction transaction = getSupportFragmentManager().beginTransaction();
if (firstAttempt) {
transaction.add(R.id.fragmentContainer, new AuthFragment(), "loginFragment");
} else {
transaction.replace(R.id.fragmentContainer, new AuthFragment(), "loginFragment");
}
transaction.commit();
showBar();
return;
}
controller.openDialogs(true);
onNewIntent(getIntent());
showBar();
}
private WhatsNewFragment.Definition[] prepareWhatsNew() {
ArrayList<WhatsNewFragment.Definition> definitions = new ArrayList<WhatsNewFragment.Definition>();
int prevVersionCode = application.getVersionHolder().getPrevVersionInstalled();
if (prevVersionCode == 0) {
return new WhatsNewFragment.Definition[0];
}
// Current version
if (prevVersionCode < 997) {
definitions.add(new WhatsNewFragment.Definition(getString(R.string.whats_new_gif_title),
new String[]{
getString(R.string.whats_new_gif_0),
getString(R.string.whats_new_gif_1),
getString(R.string.whats_new_gif_2),
getString(R.string.whats_new_gif_3),
getString(R.string.whats_new_gif_4),
getString(R.string.whats_new_gif_5),
},
getString(R.string.whats_new_gif_hint)));
}
if (prevVersionCode < 732) {
definitions.add(new WhatsNewFragment.Definition(getString(R.string.whats_contacts_title),
new String[]{
getString(R.string.whats_contacts_0),
getString(R.string.whats_contacts_1),
getString(R.string.whats_contacts_2),
}, null));
}
if (prevVersionCode < 672) {
definitions.add(new WhatsNewFragment.Definition(getString(R.string.whats_new_design_title),
new String[]{
getString(R.string.whats_new_design_0),
getString(R.string.whats_new_design_1),
getString(R.string.whats_new_design_2),
getString(R.string.whats_new_design_3),
}, null));
}
if (prevVersionCode < 517) {
definitions.add(new WhatsNewFragment.Definition(getString(R.string.whats_new_arabic_title),
new String[]{
getString(R.string.whats_new_arabic_0),
getString(R.string.whats_new_arabic_1),
getString(R.string.whats_new_arabic_2),
},
getString(R.string.whats_new_arabic_hint)));
definitions.add(new WhatsNewFragment.Definition(getString(R.string.whats_new_secret_title),
new String[]{
getString(R.string.whats_new_secret_0),
getString(R.string.whats_new_secret_1),
getString(R.string.whats_new_secret_2),
getString(R.string.whats_new_secret_3)
},
getString(R.string.whats_new_secret_hint)));
}
return definitions.toArray(new WhatsNewFragment.Definition[0]);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if (application.getKernelsLoader().isLoaded()) {
application.getUiKernel().onConfigurationChanged();
}
}
@Override
protected void onNewIntent(Intent intent) {
super.onNewIntent(intent);
onIntent(intent);
}
private void onIntent(Intent intent) {
if (ACTION_OPEN_CHAT.equals(intent.getAction())) {
int peerId = intent.getIntExtra("peerId", 0);
int peerType = intent.getIntExtra("peerType", 0);
getRootController().openDialog(peerType, peerId);
}
if (ACTION_OPEN_SETTINGS.equals(intent.getAction())) {
getRootController().openSettings();
}
if (Intent.ACTION_SEND.equals(intent.getAction())) {
if (intent.getType() != null) {
if (intent.getType().startsWith("image/")) {
getRootController().sendImage(intent.getParcelableExtra(Intent.EXTRA_STREAM).toString());
} else if (intent.getType().equals("text/plain")) {
getRootController().sendText(intent.getStringExtra(Intent.EXTRA_TEXT));
} else if (intent.getType().startsWith("video/")) {
getRootController().sendVideo(intent.getParcelableExtra(Intent.EXTRA_STREAM).toString());
} else {
if (intent.hasExtra(Intent.EXTRA_STREAM)) {
getRootController().sendDoc(intent.getParcelableExtra(Intent.EXTRA_STREAM).toString());
} else {
Toast.makeText(this, "Unsupported content", Toast.LENGTH_SHORT).show();
}
}
} else {
if (intent.hasExtra(Intent.EXTRA_STREAM)) {
getRootController().sendDoc(intent.getParcelableExtra(Intent.EXTRA_STREAM).toString());
} else {
Toast.makeText(this, "Unsupported content", Toast.LENGTH_SHORT).show();
}
}
}
if (Intent.ACTION_SEND_MULTIPLE.equals(intent.getAction())) {
if (intent.getType() != null) {
if (intent.getType().startsWith("image/")) {
ArrayList<Parcelable> uris = intent.getParcelableArrayListExtra(Intent.EXTRA_STREAM);
String[] uris2 = new String[uris.size()];
for (int i = 0; i < uris2.length; i++) {
uris2[i] = uris.get(i).toString();
}
getRootController().sendImages(uris2);
} else {
if (intent.hasExtra(Intent.EXTRA_STREAM)) {
ArrayList<Parcelable> uris = intent.getParcelableArrayListExtra(Intent.EXTRA_STREAM);
String[] uris2 = new String[uris.size()];
for (int i = 0; i < uris2.length; i++) {
uris2[i] = uris.get(i).toString();
}
getRootController().sendDocs(uris2);
} else {
Toast.makeText(this, "Unsupported content", Toast.LENGTH_SHORT).show();
}
}
} else {
if (intent.hasExtra(Intent.EXTRA_STREAM)) {
ArrayList<Parcelable> uris = intent.getParcelableArrayListExtra(Intent.EXTRA_STREAM);
String[] uris2 = new String[uris.size()];
for (int i = 0; i < uris2.length; i++) {
uris2[i] = uris.get(i).toString();
}
getRootController().sendDocs(uris2);
} else {
Toast.makeText(this, "Unsupported content", Toast.LENGTH_SHORT).show();
}
}
}
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putBoolean("barVisible", barVisible);
outState.putBundle("screen_controller", controller.saveState());
}
public void openImage(int mid, int peerType, int peerId) {
startActivityForResult(ViewImagesActivity.createIntent(mid, peerType, peerId, this), REQUEST_OPEN_IMAGE);
}
public void openImage(TLLocalFileLocation location) {
startActivity(ViewImageActivity.createIntent(location, this));
}
public void openImageAnimated(int mid, int peerType, int peerId, View view, Bitmap preview, int x, int y) {
if (Build.VERSION.SDK_INT >= 16) {
Bundle bundle = ActivityOptions.makeThumbnailScaleUpAnimation(view, preview, x, y).toBundle();
startActivityForResult(ViewImagesActivity.createIntent(mid, peerType, peerId, this), REQUEST_OPEN_IMAGE, bundle);
} else {
startActivityForResult(ViewImagesActivity.createIntent(mid, peerType, peerId, this), REQUEST_OPEN_IMAGE);
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == REQUEST_OPEN_IMAGE && resultCode == RESULT_OK) {
if (data != null && data.getIntExtra("forward_mid", 0) != 0) {
getRootController().forwardMessage(data.getIntExtra("forward_mid", 0));
}
}
}
private void checkLogout() {
if (!application.getKernelsLoader().isLoaded()) {
return;
}
if (!application.isLoggedIn()) {
Fragment fragment = getSupportFragmentManager().findFragmentById(R.id.fragmentContainer);
if (!(fragment instanceof AuthFragment) && !(fragment instanceof TourFragment)) {
getSupportFragmentManager().popBackStackImmediate(null, FragmentManager.POP_BACK_STACK_INCLUSIVE);
getSupportFragmentManager().beginTransaction()
.replace(R.id.fragmentContainer, new TourFragment())
.commit();
getSupportFragmentManager().executePendingTransactions();
hideBar();
}
}
}
public void onSuccessAuth() {
controller.openDialogs(false);
showBar();
}
public void openApp() {
getSupportFragmentManager().beginTransaction()
.replace(R.id.fragmentContainer, new AuthFragment(), "loginFragment")
.commit();
showBar();
}
@Override
public void onBackPressed() {
if (application.getKernelsLoader().isLoaded()) {
application.getKernel().sendEvent("app_back");
}
if (controller != null) {
if (!controller.doSystemBack()) {
finish();
}
} else {
finish();
}
}
public void showBar() {
getSupportActionBar().show();
barVisible = true;
}
public void hideBar() {
getSupportActionBar().hide();
barVisible = false;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home) {
controller.doUp();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onStart() {
super.onStart();
EasyTracker.getInstance(this).activityStart(this);
}
@Override
protected void onResume() {
super.onResume();
if (application.getKernelsLoader().isLoaded()) {
application.getUiKernel().onAppResume(this);
}
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction("org.telegram.android.ACTION_LOGOUT");
logoutReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
checkLogout();
}
};
registerReceiver(logoutReceiver, intentFilter);
checkLogout();
setBarBg(!isStarted);
isStarted = true;
TestIntegration.initActivity(this);
}
@Override
protected void onPause() {
super.onPause();
if (application.getKernelsLoader().isLoaded()) {
application.getUiKernel().onAppPause();
}
unregisterReceiver(logoutReceiver);
}
@Override
protected void onStop() {
super.onStop();
EasyTracker.getInstance(this).activityStop(this);
}
@Override
public void setResult(int resultCode, Object data) {
this.lastResultCode = resultCode;
this.lastResultData = data;
}
@Override
public int getResultCode() {
return lastResultCode;
}
@Override
public Object getResultData() {
return lastResultData;
}
} |
package org.eclipse.xtext.validation.impl;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EEnum;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.xtext.Assignment;
import org.eclipse.xtext.GrammarUtil;
import org.eclipse.xtext.RuleCall;
import org.eclipse.xtext.parsetree.reconstr.ITokenSerializer.IValueSerializer;
import org.eclipse.xtext.parsetree.reconstr.ITransientValueService;
import org.eclipse.xtext.util.Pair;
import org.eclipse.xtext.validation.IAssignmentQuantityAllocator;
import org.eclipse.xtext.validation.IAssignmentQuantityIntervalProvider;
import org.eclipse.xtext.validation.IConcreteSyntaxConstraintProvider.ConstraintType;
import org.eclipse.xtext.validation.IConcreteSyntaxConstraintProvider.ISyntaxConstraint;
import org.eclipse.xtext.validation.IConcreteSyntaxDiagnosticProvider;
import org.eclipse.xtext.validation.IConcreteSyntaxDiagnosticProvider.IConcreteSyntaxDiagnostic;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps;
import com.google.common.collect.Sets;
import com.google.inject.Inject;
/**
* @author Moritz Eysholdt - Initial contribution and API
*/
public class AssignmentQuantityAllocator implements IAssignmentQuantityAllocator {
protected class Quantities implements IQuantities {
protected Map<ISyntaxConstraint, Integer> assignmentQuants = Maps.newHashMap();
protected EObject delegate;
protected Map<EStructuralFeature, Integer> featureQuants = Maps.newHashMap();
public Quantities(EObject delegate) {
super();
this.delegate = delegate;
}
public Map<ISyntaxConstraint, Integer> getAssignmentQuantities() {
return assignmentQuants;
}
public Integer getAssignmentQuantity(ISyntaxConstraint assignement) {
Integer i = assignmentQuants.get(assignement);
return i == null || i < 0 ? -1 : i;
}
public EObject getEObject() {
return delegate;
}
public Map<EStructuralFeature, Integer> getFeatureQuantities() {
return featureQuants;
}
public Integer getFeatureQuantity(EStructuralFeature feat) {
Integer i = featureQuants.get(feat);
return i == null || i < 0 ? -1 : i;
}
public Map<EStructuralFeature, Collection<ISyntaxConstraint>> groupByFeature() {
Multimap<EStructuralFeature, ISyntaxConstraint> map = Multimaps.newHashMultimap();
for (ISyntaxConstraint e : assignmentQuants.keySet())
map.put(e.getAssignmentFeature(delegate.eClass()), e);
return map.asMap();
}
public void setAssignmentQuantity(ISyntaxConstraint assignement, int quantity) {
assignmentQuants.put(assignement, quantity);
}
public void setFeatureQuantity(EStructuralFeature feature, int quantity) {
featureQuants.put(feature, quantity);
}
@Override
public String toString() {
return toString(null);
}
public String toString(Map<ISyntaxConstraint, Pair<Integer, Integer>> minmax) {
Map<ISyntaxConstraint, String> postfix = Maps.newHashMap();
for (Map.Entry<ISyntaxConstraint, Integer> e : assignmentQuants.entrySet()) {
String s = ":" + e.getValue();
if (minmax != null && minmax.containsKey(e.getKey())) {
Pair<Integer, Integer> p = minmax.get(e.getKey());
s += "<" + p.getFirst() + "," + (p.getSecond() == Integer.MAX_VALUE ? "*" : p.getSecond()) + ">";
}
postfix.put(e.getKey(), s);
}
Iterator<ISyntaxConstraint> i = assignmentQuants.keySet().iterator();
if (!i.hasNext())
return "";
ISyntaxConstraint root = i.next();
while (i.hasNext())
root = root.findCommonContainer(i.next());
return root.toString(postfix);
}
}
@Inject
protected IConcreteSyntaxDiagnosticProvider diagnosticProvider;
@Inject
protected IAssignmentQuantityIntervalProvider intervalProvider;
@Inject
protected ITransientValueService transSrvc;
@Inject
protected IValueSerializer valueSerializer;
protected boolean allowTransient(EObject obj, EStructuralFeature feature, Collection<ISyntaxConstraint> constraint) {
if (feature.getEType() instanceof EEnum)
return true;
Object value = obj.eGet(feature);
List<RuleCall> ruleCalls = GrammarUtil.containedRuleCalls(constraint.iterator().next().getGrammarElement());
if (ruleCalls.isEmpty())
return false;
return valueSerializer.isValid(obj, ruleCalls.get(0), value, null);
}
protected void collectAssignments(ISyntaxConstraint rule, EObject obj, ISyntaxConstraint ele,
Multimap<EStructuralFeature, ISyntaxConstraint> assignments, List<IConcreteSyntaxDiagnostic> acceptor) {
if (ele.getSemanticTypesToCheck() != null && !ele.getSemanticTypesToCheck().contains(obj.eClass()))
return;
if (ele.getType() == ConstraintType.ASSIGNMENT) {
EStructuralFeature f = obj.eClass().getEStructuralFeature(
((Assignment) ele.getGrammarElement()).getFeature());
if (f == null)
acceptor.add(diagnosticProvider.createFeatureMissingDiagnostic(rule, obj, ele, Collections
.<ISyntaxConstraint> emptySet()));
else
assignments.put(f, ele);
}
for (ISyntaxConstraint e : ele.getContents())
collectAssignments(rule, obj, e, assignments, acceptor);
}
protected Quantities createQuantities(EObject obj) {
return new Quantities(obj);
}
public IQuantities getAssignmentQuantities(EObject obj, ISyntaxConstraint rule,
List<IConcreteSyntaxDiagnostic> acceptor) {
Multimap<EStructuralFeature, ISyntaxConstraint> assignments = Multimaps.newHashMultimap();
collectAssignments(rule, obj, rule, assignments, acceptor);
// Map<EStructuralFeature, Integer> quantities = Maps.newHashMap();
Quantities quants = createQuantities(obj);
for (EStructuralFeature f : obj.eClass().getEAllStructuralFeatures()) {
int quantity = getFeatureQuantity(obj, f);
if (quantity > 0 && !assignments.containsKey(f))
acceptor.add(diagnosticProvider.createAssignmentMissingDiagnostic(rule, obj, f, Collections
.<ISyntaxConstraint> emptySet()));
else
quants.setFeatureQuantity(f, quantity);
}
Multimap<EStructuralFeature, ISyntaxConstraint> multipleAssignments = Multimaps.newHashMultimap();
Multimap<EStructuralFeature, ISyntaxConstraint> allowTransients = Multimaps.newHashMultimap();
for (Map.Entry<EStructuralFeature, Integer> f : quants.getFeatureQuantities().entrySet()) {
Collection<ISyntaxConstraint> ass = assignments.get(f.getKey());
if (ass.isEmpty())
continue;
boolean allowTransient = f.getKey() instanceof EAttribute && !f.getKey().isMany() && f.getValue() == 0
&& allowTransient(obj, f.getKey(), ass);
boolean multiNeeded = ass.size() > 1 && f.getValue() != 0;
if (allowTransient)
allowTransients.putAll(f.getKey(), ass);
if (multiNeeded)
multipleAssignments.putAll(f.getKey(), ass);
if (!allowTransient && !multiNeeded)
for (ISyntaxConstraint a : ass)
quants.setAssignmentQuantity(a, f.getValue());
}
if (multipleAssignments.isEmpty() && allowTransients.isEmpty())
return quants;
for (Map.Entry<EStructuralFeature, Collection<ISyntaxConstraint>> e : allowTransients.asMap().entrySet()) {
int min = 0;
for (ISyntaxConstraint x : e.getValue())
min += intervalProvider.getMin(quants, x, Sets.<ISyntaxConstraint> newHashSet());
int val = min > 0 ? 1 : 0;
quants.setFeatureQuantity(e.getKey(), val);
if (e.getValue().size() == 1)
quants.setAssignmentQuantity(e.getValue().iterator().next(), val);
}
// System.out.println("AllowTransientsQuantities: " + quants.toString());
if (multipleAssignments.isEmpty())
return quants;
return null;
}
public int getFeatureQuantity(EObject obj, EStructuralFeature feat) {
if (feat.isMany()) {
int count = 0, max = ((List<?>) obj.eGet(feat)).size();
if (transSrvc.isCheckElementsIndividually(obj, feat)) {
for (int i = 0; i < max; i++)
if (!transSrvc.isTransient(obj, feat, i))
count++;
return count;
}
return transSrvc.isTransient(obj, feat, 0) ? 0 : max;
}
return transSrvc.isTransient(obj, feat, 0) ? 0 : 1;
}
} |
package org.jkiss.dbeaver.model.impl.jdbc.dbc;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.eclipse.swt.graphics.Image;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.ext.ui.IObjectImageProvider;
import org.jkiss.dbeaver.model.DBPDataSource;
import org.jkiss.dbeaver.model.DBPDataSourceInfo;
import org.jkiss.dbeaver.model.DBPObject;
import org.jkiss.dbeaver.model.DBUtils;
import org.jkiss.dbeaver.model.exec.DBCAttributeMetaData;
import org.jkiss.dbeaver.model.exec.DBCStatement;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.struct.*;
import org.jkiss.dbeaver.model.struct.rdb.DBSCatalog;
import org.jkiss.dbeaver.model.struct.rdb.DBSSchema;
import org.jkiss.dbeaver.model.struct.rdb.DBSTable;
import org.jkiss.dbeaver.runtime.VoidProgressMonitor;
import org.jkiss.utils.CommonUtils;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* JDBCColumnMetaData
*/
public class JDBCColumnMetaData implements DBCAttributeMetaData, IObjectImageProvider
{
static final Log log = LogFactory.getLog(JDBCColumnMetaData.class);
private JDBCResultSetMetaData resultSetMeta;
private int index;
private boolean notNull;
private long displaySize;
private String label;
private String name;
private int precision;
private int scale;
private String catalogName;
private String schemaName;
private String tableName;
private int type;
private String typeName;
private boolean readOnly;
private boolean writable;
private JDBCTableMetaData tableMetaData;
private DBSEntityAttribute tableColumn;
JDBCColumnMetaData(JDBCResultSetMetaData resultSetMeta, int index)
throws SQLException
{
this.resultSetMeta = resultSetMeta;
DBPObject rsSource = this.resultSetMeta.getResultSet().getSource();
DBSObject dataContainer = rsSource instanceof DBCStatement ? ((DBCStatement)rsSource).getDataContainer() : null;
DBSTable ownerTable = null;
if (dataContainer instanceof DBSTable) {
ownerTable = (DBSTable)dataContainer;
}
this.index = index;
ResultSetMetaData metaData = resultSetMeta.getJdbcMetaData();
this.label = metaData.getColumnLabel(index);
this.name = metaData.getColumnName(index);
boolean hasData = false;
String fetchedTableName = null;
try {
fetchedTableName = metaData.getTableName(index);
} catch (SQLException e) {
log.debug(e);
}
String fetchedCatalogName = null;
try {
fetchedCatalogName = metaData.getCatalogName(index);
} catch (SQLException e) {
log.debug(e);
}
String fetchedSchemaName = null;
try {
fetchedSchemaName = metaData.getSchemaName(index);
} catch (SQLException e) {
log.debug(e);
}
// Check for tables name
// Sometimes [DBSPEC: Informix] it contains schema/catalog name inside
if (!CommonUtils.isEmpty(fetchedTableName) && CommonUtils.isEmpty(fetchedCatalogName) && CommonUtils.isEmpty(fetchedSchemaName)) {
final DBPDataSource dataSource = resultSetMeta.getResultSet().getContext().getDataSource();
final DBPDataSourceInfo dsInfo = dataSource.getInfo();
if (!DBUtils.isQuotedIdentifier(dataSource, fetchedTableName)) {
final String catalogSeparator = dsInfo.getCatalogSeparator();
final int catDivPos = fetchedTableName.indexOf(catalogSeparator);
if (catDivPos != -1 && (dsInfo.getCatalogUsage() & DBPDataSourceInfo.USAGE_DML) != 0) {
// Catalog in table name - extract it
fetchedCatalogName = fetchedTableName.substring(0, catDivPos);
fetchedTableName = fetchedTableName.substring(catDivPos + catalogSeparator.length());
}
final String structSeparator = dsInfo.getStructSeparator();
final int schemaDivPos = fetchedTableName.indexOf(structSeparator);
if (schemaDivPos != -1 && (dsInfo.getSchemaUsage() & DBPDataSourceInfo.USAGE_DML) != 0) {
// Schema in table name - extract it
fetchedSchemaName = fetchedTableName.substring(0, schemaDivPos);
fetchedTableName = fetchedTableName.substring(schemaDivPos + structSeparator.length());
}
}
}
if (ownerTable != null) {
// Get column using void monitor because all columns MUST be already read
try {
this.tableColumn = ownerTable.getAttribute(VoidProgressMonitor.INSTANCE, name);
}
catch (DBException e) {
log.warn(e);
}
if (this.tableColumn != null) {
this.notNull = this.tableColumn.isRequired();
this.displaySize = this.tableColumn.getMaxLength();
DBSObject tableParent = ownerTable.getParentObject();
DBSObject tableGrandParent = tableParent == null ? null : tableParent.getParentObject();
this.catalogName = tableParent instanceof DBSCatalog ? tableParent.getName() : tableGrandParent instanceof DBSCatalog ? tableGrandParent.getName() : null;
this.schemaName = tableParent instanceof DBSSchema ? tableParent.getName() : null;
this.tableName = fetchedTableName;
this.type = this.tableColumn.getTypeID();
this.typeName = this.tableColumn.getTypeName();
this.readOnly = false;
this.writable = true;
this.precision = this.tableColumn.getPrecision();
this.scale = this.tableColumn.getScale();
try {
this.tableMetaData = resultSetMeta.getTableMetaData(ownerTable);
if (this.tableMetaData != null) {
this.tableMetaData.addColumn(this);
}
}
catch (DBException e) {
log.warn(e);
}
hasData = true;
}
}
if (!hasData) {
this.notNull = metaData.isNullable(index) == ResultSetMetaData.columnNoNulls;
try {
this.displaySize = metaData.getColumnDisplaySize(index);
} catch (SQLException e) {
this.displaySize = 0;
}
this.catalogName = fetchedCatalogName;
this.schemaName = fetchedSchemaName;
this.tableName = fetchedTableName;
this.type = metaData.getColumnType(index);
this.typeName = metaData.getColumnTypeName(index);
this.readOnly = metaData.isReadOnly(index);
this.writable = metaData.isWritable(index);
try {
this.precision = metaData.getPrecision(index);
} catch (Exception e) {
// NumberFormatException occurred in Oracle on BLOB columns
this.precision = 0;
}
try {
this.scale = metaData.getScale(index);
} catch (Exception e) {
this.scale = 0;
}
try {
if (!CommonUtils.isEmpty(this.tableName)) {
this.tableMetaData = resultSetMeta.getTableMetaData(catalogName, schemaName, tableName);
if (this.tableMetaData != null) {
this.tableMetaData.addColumn(this);
}
}
}
catch (DBException e) {
log.warn(e);
}
}
}
JDBCResultSetMetaData getResultSetMeta()
{
return resultSetMeta;
}
@Override
public int getIndex()
{
return index;
}
@Override
public boolean isRequired()
{
return notNull;
}
@Override
public long getMaxLength()
{
return displaySize;
}
@Override
public String getLabel()
{
return label;
}
@Override
public String getName()
{
return name;
}
@Override
public int getPrecision()
{
return precision;
}
@Override
public int getScale()
{
return scale;
}
@Override
public String getTableName()
{
return tableMetaData != null ? tableMetaData.getEntityName() : tableName;
}
@Override
public String getCatalogName()
{
return catalogName;
}
@Override
public String getSchemaName()
{
return schemaName;
}
@Override
public int getTypeID()
{
return type;
}
@Override
public String getTypeName()
{
return typeName;
}
@Override
public boolean isReadOnly()
{
return readOnly;
}
@Override
public JDBCTableMetaData getEntity()
{
return tableMetaData;
}
@Override
public DBSEntityAttribute getAttribute(DBRProgressMonitor monitor)
throws DBException
{
if (tableColumn != null) {
return tableColumn;
}
if (tableMetaData == null) {
return null;
}
tableColumn = tableMetaData.getEntity(monitor).getAttribute(monitor, name);
return tableColumn;
}
@Override
public boolean isReference(DBRProgressMonitor monitor)
throws DBException
{
DBSEntityAttribute tableColumn = getAttribute(monitor);
if (tableColumn == null) {
return false;
}
DBSTable table = tableMetaData.getEntity(monitor);
if (table == null) {
return false;
}
Collection<? extends DBSEntityAssociation> foreignKeys = table.getAssociations(monitor);
if (foreignKeys != null) {
for (DBSEntityAssociation fk : foreignKeys) {
if (fk instanceof DBSEntityReferrer && DBUtils.getConstraintColumn(monitor, (DBSEntityReferrer)fk, tableColumn) != null) {
return true;
}
}
}
return false;
}
@Override
public List<DBSEntityReferrer> getReferrers(DBRProgressMonitor monitor)
throws DBException
{
List<DBSEntityReferrer> refs = new ArrayList<DBSEntityReferrer>();
DBSEntityAttribute tableColumn = getAttribute(monitor);
if (tableColumn == null) {
return refs;
}
DBSEntity table = tableMetaData.getEntity(monitor);
if (table == null) {
return refs;
}
Collection<? extends DBSEntityAssociation> foreignKeys = table.getAssociations(monitor);
if (foreignKeys != null) {
for (DBSEntityAssociation fk : foreignKeys) {
if (fk instanceof DBSEntityReferrer && DBUtils.getConstraintColumn(monitor, (DBSEntityReferrer) fk, tableColumn) != null) {
refs.add((DBSEntityReferrer)fk);
}
}
}
return refs;
}
@Override
public Image getObjectImage()
{
if (tableColumn instanceof IObjectImageProvider) {
return ((IObjectImageProvider) tableColumn).getObjectImage();
}
return DBUtils.getDataIcon(this).getImage();
}
@Override
public String toString()
{
StringBuilder db = new StringBuilder();
if (!CommonUtils.isEmpty(catalogName)) {
db.append(catalogName).append('.');
}
if (!CommonUtils.isEmpty(schemaName)) {
db.append(schemaName).append('.');
}
if (!CommonUtils.isEmpty(tableName)) {
db.append(tableName).append('.');
}
if (!CommonUtils.isEmpty(name)) {
db.append(name);
}
if (!CommonUtils.isEmpty(label)) {
db.append(" as ").append(label);
}
return db.toString();
}
@Override
public boolean equals(Object obj)
{
if (!(obj instanceof JDBCColumnMetaData)) {
return false;
}
JDBCColumnMetaData col = (JDBCColumnMetaData)obj;
return
index == col.index &&
notNull == col.notNull &&
displaySize == col.displaySize &&
CommonUtils.equalObjects(label, col.label) &&
CommonUtils.equalObjects(name, col.name) &&
precision == col.precision &&
scale == col.scale &&
CommonUtils.equalObjects(catalogName, col.catalogName) &&
CommonUtils.equalObjects(schemaName, col.schemaName) &&
CommonUtils.equalObjects(tableName, col.tableName) &&
type == col.type &&
CommonUtils.equalObjects(typeName, col.typeName) &&
readOnly == col.readOnly &&
writable == col.writable;
}
} |
package com.opengamma.financial.security;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import org.fudgemsg.FudgeMsgEnvelope;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.core.id.ExternalSchemes;
import com.opengamma.core.security.Security;
import com.opengamma.core.security.SecuritySource;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.target.ComputationTargetType;
import com.opengamma.engine.target.ComputationTargetTypeMap;
import com.opengamma.engine.value.ValueProperties;
import com.opengamma.engine.value.ValuePropertyNames;
import com.opengamma.financial.currency.CurrencyPair;
import com.opengamma.financial.currency.CurrencyPairs;
import com.opengamma.financial.security.bond.CorporateBondSecurity;
import com.opengamma.financial.security.bond.GovernmentBondSecurity;
import com.opengamma.financial.security.bond.InflationBondSecurity;
import com.opengamma.financial.security.bond.MunicipalBondSecurity;
import com.opengamma.financial.security.capfloor.CapFloorCMSSpreadSecurity;
import com.opengamma.financial.security.capfloor.CapFloorSecurity;
import com.opengamma.financial.security.cash.CashSecurity;
import com.opengamma.financial.security.cashflow.CashFlowSecurity;
import com.opengamma.financial.security.cds.CDSSecurity;
import com.opengamma.financial.security.cds.CreditDefaultSwapIndexDefinitionSecurity;
import com.opengamma.financial.security.cds.CreditDefaultSwapIndexSecurity;
import com.opengamma.financial.security.cds.LegacyFixedRecoveryCDSSecurity;
import com.opengamma.financial.security.cds.LegacyRecoveryLockCDSSecurity;
import com.opengamma.financial.security.cds.LegacyVanillaCDSSecurity;
import com.opengamma.financial.security.cds.StandardFixedRecoveryCDSSecurity;
import com.opengamma.financial.security.cds.StandardRecoveryLockCDSSecurity;
import com.opengamma.financial.security.cds.StandardVanillaCDSSecurity;
import com.opengamma.financial.security.deposit.ContinuousZeroDepositSecurity;
import com.opengamma.financial.security.deposit.PeriodicZeroDepositSecurity;
import com.opengamma.financial.security.deposit.SimpleZeroDepositSecurity;
import com.opengamma.financial.security.equity.EquitySecurity;
import com.opengamma.financial.security.equity.EquityVarianceSwapSecurity;
import com.opengamma.financial.security.forward.AgricultureForwardSecurity;
import com.opengamma.financial.security.forward.EnergyForwardSecurity;
import com.opengamma.financial.security.forward.MetalForwardSecurity;
import com.opengamma.financial.security.fra.FRASecurity;
import com.opengamma.financial.security.future.AgricultureFutureSecurity;
import com.opengamma.financial.security.future.BondFutureSecurity;
import com.opengamma.financial.security.future.DeliverableSwapFutureSecurity;
import com.opengamma.financial.security.future.EnergyFutureSecurity;
import com.opengamma.financial.security.future.EquityFutureSecurity;
import com.opengamma.financial.security.future.EquityIndexDividendFutureSecurity;
import com.opengamma.financial.security.future.FXFutureSecurity;
import com.opengamma.financial.security.future.FederalFundsFutureSecurity;
import com.opengamma.financial.security.future.IndexFutureSecurity;
import com.opengamma.financial.security.future.InterestRateFutureSecurity;
import com.opengamma.financial.security.future.MetalFutureSecurity;
import com.opengamma.financial.security.future.StockFutureSecurity;
import com.opengamma.financial.security.fx.FXForwardSecurity;
import com.opengamma.financial.security.fx.NonDeliverableFXForwardSecurity;
import com.opengamma.financial.security.option.BondFutureOptionSecurity;
import com.opengamma.financial.security.option.CommodityFutureOptionSecurity;
import com.opengamma.financial.security.option.CreditDefaultSwapOptionSecurity;
import com.opengamma.financial.security.option.EquityBarrierOptionSecurity;
import com.opengamma.financial.security.option.EquityIndexDividendFutureOptionSecurity;
import com.opengamma.financial.security.option.EquityIndexFutureOptionSecurity;
import com.opengamma.financial.security.option.EquityIndexOptionSecurity;
import com.opengamma.financial.security.option.EquityOptionSecurity;
import com.opengamma.financial.security.option.FXBarrierOptionSecurity;
import com.opengamma.financial.security.option.FXDigitalOptionSecurity;
import com.opengamma.financial.security.option.FXOptionSecurity;
import com.opengamma.financial.security.option.FxFutureOptionSecurity;
import com.opengamma.financial.security.option.IRFutureOptionSecurity;
import com.opengamma.financial.security.option.NonDeliverableFXDigitalOptionSecurity;
import com.opengamma.financial.security.option.NonDeliverableFXOptionSecurity;
import com.opengamma.financial.security.option.SwaptionSecurity;
import com.opengamma.financial.security.swap.ForwardSwapSecurity;
import com.opengamma.financial.security.swap.InterestRateNotional;
import com.opengamma.financial.security.swap.SwapLeg;
import com.opengamma.financial.security.swap.SwapSecurity;
import com.opengamma.financial.security.swap.YearOnYearInflationSwapSecurity;
import com.opengamma.financial.security.swap.ZeroCouponInflationSwapSecurity;
import com.opengamma.financial.sensitivities.SecurityEntryData;
import com.opengamma.id.ExternalId;
import com.opengamma.lambdava.functions.Function1;
import com.opengamma.master.security.RawSecurity;
import com.opengamma.util.fudgemsg.OpenGammaFudgeContext;
import com.opengamma.util.money.Currency;
import com.opengamma.util.money.CurrencyAmount;
/**
* General utility method applying to Financial Securities
*/
public class FinancialSecurityUtils {
private static ComputationTargetTypeMap<Function1<ComputationTarget, ValueProperties>> s_getCurrencyConstraint = getCurrencyConstraint();
private static ComputationTargetTypeMap<Function1<ComputationTarget, ValueProperties>> getCurrencyConstraint() {
final ComputationTargetTypeMap<Function1<ComputationTarget, ValueProperties>> map = new ComputationTargetTypeMap<>();
map.put(ComputationTargetType.POSITION, new Function1<ComputationTarget, ValueProperties>() {
@Override
public ValueProperties execute(final ComputationTarget target) {
final Security security = target.getPosition().getSecurity();
final Currency ccy = getCurrency(security);
if (ccy != null) {
return ValueProperties.with(ValuePropertyNames.CURRENCY, ccy.getCode()).get();
} else {
return ValueProperties.none();
}
}
});
map.put(ComputationTargetType.SECURITY, new Function1<ComputationTarget, ValueProperties>() {
@Override
public ValueProperties execute(final ComputationTarget target) {
final Security security = target.getSecurity();
final Currency ccy = getCurrency(security);
if (ccy != null) {
return ValueProperties.with(ValuePropertyNames.CURRENCY, ccy.getCode()).get();
} else {
return ValueProperties.none();
}
}
});
map.put(ComputationTargetType.TRADE, new Function1<ComputationTarget, ValueProperties>() {
@Override
public ValueProperties execute(final ComputationTarget target) {
final Security security = target.getTrade().getSecurity();
final Currency ccy = getCurrency(security);
if (ccy != null) {
return ValueProperties.with(ValuePropertyNames.CURRENCY, ccy.getCode()).get();
} else {
return ValueProperties.none();
}
}
});
map.put(ComputationTargetType.CURRENCY, new Function1<ComputationTarget, ValueProperties>() {
@Override
public ValueProperties execute(final ComputationTarget target) {
return ValueProperties.with(ValuePropertyNames.CURRENCY, target.getUniqueId().getValue()).get();
}
});
return map;
}
/**
*
* @param target the computation target being examined.
* @return ValueProperties containing a constraint of the CurrencyUnit or empty if not possible
*/
public static ValueProperties getCurrencyConstraint(final ComputationTarget target) {
final Function1<ComputationTarget, ValueProperties> operation = s_getCurrencyConstraint.get(target.getType());
if (operation != null) {
return operation.execute(target);
} else {
return ValueProperties.none();
}
}
/**
* @param security the security to be examined.
* @return an ExternalId for a Region, where it is possible to determine, null otherwise.
*/
public static ExternalId getRegion(final Security security) {
if (security instanceof FinancialSecurity) {
final FinancialSecurity finSec = (FinancialSecurity) security;
final ExternalId regionId = finSec.accept(new FinancialSecurityVisitorSameValueAdapter<ExternalId>(null) {
@Override
public ExternalId visitGovernmentBondSecurity(final GovernmentBondSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_COUNTRY_ALPHA2, security.getIssuerDomicile());
}
@Override
public ExternalId visitMunicipalBondSecurity(final MunicipalBondSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_COUNTRY_ALPHA2, security.getIssuerDomicile());
}
@Override
public ExternalId visitCorporateBondSecurity(final CorporateBondSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_COUNTRY_ALPHA2, security.getIssuerDomicile());
}
@Override
public ExternalId visitCashSecurity(final CashSecurity security) {
return security.getRegionId();
}
@Override
public ExternalId visitFRASecurity(final FRASecurity security) {
return security.getRegionId();
}
@Override
public ExternalId visitFXForwardSecurity(final FXForwardSecurity security) {
return security.getRegionId();
}
@Override
public ExternalId visitNonDeliverableFXForwardSecurity(final NonDeliverableFXForwardSecurity security) {
return security.getRegionId();
}
@Override
public ExternalId visitEquityVarianceSwapSecurity(final EquityVarianceSwapSecurity security) {
return security.getRegionId();
}
@Override
public ExternalId visitSimpleZeroDepositSecurity(final SimpleZeroDepositSecurity security) {
return security.getRegion();
}
@Override
public ExternalId visitPeriodicZeroDepositSecurity(final PeriodicZeroDepositSecurity security) {
return security.getRegion();
}
@Override
public ExternalId visitContinuousZeroDepositSecurity(final ContinuousZeroDepositSecurity security) {
return security.getRegion();
}
@Override
public ExternalId visitStandardVanillaCDSSecurity(final StandardVanillaCDSSecurity security) {
return security.getRegionId();
}
@Override
public ExternalId visitLegacyVanillaCDSSecurity(final LegacyVanillaCDSSecurity security) {
return security.getRegionId();
}
});
return regionId;
}
return null;
}
/**
* @param security the security to be examined.
* @return an ExternalId for an Exchange, where it is possible to determine, null otherwise.
*/
public static ExternalId getExchange(final Security security) {
if (security instanceof FinancialSecurity) {
final FinancialSecurity finSec = (FinancialSecurity) security;
final ExternalId regionId = finSec.accept(new FinancialSecurityVisitorSameValueAdapter<ExternalId>(null) {
@Override
public ExternalId visitEquityBarrierOptionSecurity(final EquityBarrierOptionSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_MIC, security.getExchange());
}
@Override
public ExternalId visitEquityIndexOptionSecurity(final EquityIndexOptionSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_MIC, security.getExchange());
}
@Override
public ExternalId visitEquityOptionSecurity(final EquityOptionSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_MIC, security.getExchange());
}
@Override
public ExternalId visitEquitySecurity(final EquitySecurity security) {
return ExternalId.of(ExternalSchemes.ISO_MIC, security.getExchangeCode());
}
@Override
public ExternalId visitAgricultureFutureSecurity(final AgricultureFutureSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_MIC, security.getTradingExchange());
}
@Override
public ExternalId visitBondFutureSecurity(final BondFutureSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_MIC, security.getTradingExchange());
}
@Override
public ExternalId visitEquityFutureSecurity(final EquityFutureSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_MIC, security.getTradingExchange());
}
@Override
public ExternalId visitEquityIndexDividendFutureSecurity(final EquityIndexDividendFutureSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_MIC, security.getTradingExchange());
}
@Override
public ExternalId visitFXFutureSecurity(final FXFutureSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_MIC, security.getTradingExchange());
}
@Override
public ExternalId visitIndexFutureSecurity(final IndexFutureSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_MIC, security.getTradingExchange());
}
@Override
public ExternalId visitInterestRateFutureSecurity(final InterestRateFutureSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_MIC, security.getTradingExchange());
}
@Override
public ExternalId visitFederalFundsFutureSecurity(final FederalFundsFutureSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_MIC, security.getTradingExchange());
}
@Override
public ExternalId visitMetalFutureSecurity(final MetalFutureSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_MIC, security.getTradingExchange());
}
@Override
public ExternalId visitStockFutureSecurity(final StockFutureSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_MIC, security.getTradingExchange());
}
@Override
public ExternalId visitEquityIndexFutureOptionSecurity(final EquityIndexFutureOptionSecurity security) {
return ExternalId.of(ExternalSchemes.ISO_MIC, security.getExchange());
}
});
return regionId;
}
return null;
}
/**
* @param security the security to be examined.
* @return a Currency, where it is possible to determine a single Currency association, null otherwise.
*/
public static Currency getCurrency(final Security security) { // CSIGNORE
if (security instanceof FinancialSecurity) {
final FinancialSecurity finSec = (FinancialSecurity) security;
final Currency ccy = finSec.accept(new FinancialSecurityVisitor<Currency>() {
@Override
public Currency visitGovernmentBondSecurity(final GovernmentBondSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitMunicipalBondSecurity(final MunicipalBondSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitInflationBondSecurity(final InflationBondSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitCorporateBondSecurity(final CorporateBondSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitCashSecurity(final CashSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitCashFlowSecurity(final CashFlowSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitEquitySecurity(final EquitySecurity security) {
return security.getCurrency();
}
@Override
public Currency visitFRASecurity(final FRASecurity security) {
return security.getCurrency();
}
@Override
public Currency visitSwapSecurity(final SwapSecurity security) {
if (security.getPayLeg().getNotional() instanceof InterestRateNotional && security.getReceiveLeg().getNotional() instanceof InterestRateNotional) {
final InterestRateNotional payLeg = (InterestRateNotional) security.getPayLeg().getNotional();
final InterestRateNotional receiveLeg = (InterestRateNotional) security.getReceiveLeg().getNotional();
if (payLeg.getCurrency().equals(receiveLeg.getCurrency())) {
return payLeg.getCurrency();
}
}
return null;
}
@Override
public Currency visitForwardSwapSecurity(final ForwardSwapSecurity security) {
if (security.getPayLeg().getNotional() instanceof InterestRateNotional && security.getReceiveLeg().getNotional() instanceof InterestRateNotional) {
final InterestRateNotional payLeg = (InterestRateNotional) security.getPayLeg().getNotional();
final InterestRateNotional receiveLeg = (InterestRateNotional) security.getReceiveLeg().getNotional();
if (payLeg.getCurrency().equals(receiveLeg.getCurrency())) {
return payLeg.getCurrency();
}
}
return null;
}
@Override
public Currency visitEquityIndexOptionSecurity(final EquityIndexOptionSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitEquityOptionSecurity(final EquityOptionSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitEquityBarrierOptionSecurity(final EquityBarrierOptionSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitFXOptionSecurity(final FXOptionSecurity security) {
throw new UnsupportedOperationException("FX securities do not have a currency");
}
@Override
public Currency visitNonDeliverableFXOptionSecurity(final NonDeliverableFXOptionSecurity security) {
throw new UnsupportedOperationException("FX securities do not have a currency");
}
@Override
public Currency visitSwaptionSecurity(final SwaptionSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitIRFutureOptionSecurity(final IRFutureOptionSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitCommodityFutureOptionSecurity(final CommodityFutureOptionSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitFxFutureOptionSecurity(final FxFutureOptionSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitBondFutureOptionSecurity(final BondFutureOptionSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitEquityIndexDividendFutureOptionSecurity(final EquityIndexDividendFutureOptionSecurity equityIndexDividendFutureOptionSecurity) {
return equityIndexDividendFutureOptionSecurity.getCurrency();
}
@Override
public Currency visitEquityIndexFutureOptionSecurity(final EquityIndexFutureOptionSecurity equityIndexFutureOptionSecurity) {
return equityIndexFutureOptionSecurity.getCurrency();
}
@Override
public Currency visitFXBarrierOptionSecurity(final FXBarrierOptionSecurity security) {
throw new UnsupportedOperationException("FX Barrier Options do not have a currency");
}
@Override
public Currency visitFXForwardSecurity(final FXForwardSecurity security) {
throw new UnsupportedOperationException("FX forward securities do not have a currency");
}
@Override
public Currency visitNonDeliverableFXForwardSecurity(final NonDeliverableFXForwardSecurity security) {
throw new UnsupportedOperationException("Non-deliverable FX forward securities do not have a currency");
}
@Override
public Currency visitCapFloorSecurity(final CapFloorSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitCapFloorCMSSpreadSecurity(final CapFloorCMSSpreadSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitEquityVarianceSwapSecurity(final EquityVarianceSwapSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitFXDigitalOptionSecurity(final FXDigitalOptionSecurity security) {
throw new UnsupportedOperationException("FX digital option securities do not have a currency");
}
@Override
public Currency visitNonDeliverableFXDigitalOptionSecurity(final NonDeliverableFXDigitalOptionSecurity security) {
throw new UnsupportedOperationException("NDF FX digital option securities do not have a currency");
}
@Override
public Currency visitSimpleZeroDepositSecurity(final SimpleZeroDepositSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitPeriodicZeroDepositSecurity(final PeriodicZeroDepositSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitContinuousZeroDepositSecurity(final ContinuousZeroDepositSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitAgricultureFutureSecurity(final AgricultureFutureSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitBondFutureSecurity(final BondFutureSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitEnergyFutureSecurity(final EnergyFutureSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitEquityFutureSecurity(final EquityFutureSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitEquityIndexDividendFutureSecurity(final EquityIndexDividendFutureSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitFXFutureSecurity(final FXFutureSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitIndexFutureSecurity(final IndexFutureSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitInterestRateFutureSecurity(final InterestRateFutureSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitFederalFundsFutureSecurity(final FederalFundsFutureSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitMetalFutureSecurity(final MetalFutureSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitStockFutureSecurity(final StockFutureSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitAgricultureForwardSecurity(final AgricultureForwardSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitEnergyForwardSecurity(final EnergyForwardSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitMetalForwardSecurity(final MetalForwardSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitCDSSecurity(final CDSSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitStandardVanillaCDSSecurity(final StandardVanillaCDSSecurity security) {
return security.getNotional().getCurrency();
}
@Override
public Currency visitStandardRecoveryLockCDSSecurity(final StandardRecoveryLockCDSSecurity security) {
return security.getNotional().getCurrency();
}
@Override
public Currency visitStandardFixedRecoveryCDSSecurity(final StandardFixedRecoveryCDSSecurity security) {
return security.getNotional().getCurrency();
}
@Override
public Currency visitLegacyVanillaCDSSecurity(final LegacyVanillaCDSSecurity security) {
return security.getNotional().getCurrency();
}
@Override
public Currency visitLegacyRecoveryLockCDSSecurity(final LegacyRecoveryLockCDSSecurity security) {
return security.getNotional().getCurrency();
}
@Override
public Currency visitLegacyFixedRecoveryCDSSecurity(final LegacyFixedRecoveryCDSSecurity security) {
return security.getNotional().getCurrency();
}
@Override
public Currency visitDeliverableSwapFutureSecurity(final DeliverableSwapFutureSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitCreditDefaultSwapIndexDefinitionSecurity(final CreditDefaultSwapIndexDefinitionSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitCreditDefaultSwapIndexSecurity(final CreditDefaultSwapIndexSecurity security) {
return security.getNotional().getCurrency();
}
@Override
public Currency visitCreditDefaultSwapOptionSecurity(final CreditDefaultSwapOptionSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitZeroCouponInflationSwapSecurity(final ZeroCouponInflationSwapSecurity security) {
if (security.getPayLeg().getNotional() instanceof InterestRateNotional && security.getReceiveLeg().getNotional() instanceof InterestRateNotional) {
final InterestRateNotional payLeg = (InterestRateNotional) security.getPayLeg().getNotional();
final InterestRateNotional receiveLeg = (InterestRateNotional) security.getReceiveLeg().getNotional();
if (payLeg.getCurrency().equals(receiveLeg.getCurrency())) {
return payLeg.getCurrency();
}
}
return null;
}
@Override
public Currency visitYearOnYearInflationSwapSecurity(final YearOnYearInflationSwapSecurity security) {
if (security.getPayLeg().getNotional() instanceof InterestRateNotional && security.getReceiveLeg().getNotional() instanceof InterestRateNotional) {
final InterestRateNotional payLeg = (InterestRateNotional) security.getPayLeg().getNotional();
final InterestRateNotional receiveLeg = (InterestRateNotional) security.getReceiveLeg().getNotional();
if (payLeg.getCurrency().equals(receiveLeg.getCurrency())) {
return payLeg.getCurrency();
}
}
return null;
}
});
return ccy;
} else if (security instanceof RawSecurity) {
final RawSecurity rawSecurity = (RawSecurity) security;
if (security.getSecurityType().equals(SecurityEntryData.EXTERNAL_SENSITIVITIES_SECURITY_TYPE)) {
final FudgeMsgEnvelope msg = OpenGammaFudgeContext.getInstance().deserialize(rawSecurity.getRawData());
final SecurityEntryData securityEntryData = OpenGammaFudgeContext.getInstance().fromFudgeMsg(SecurityEntryData.class, msg.getMessage());
return securityEntryData.getCurrency();
}
}
return null;
}
/**
* @param security the security to be examined.
* @param securitySource a security source
* @return a Currency, where it is possible to determine a single Currency association, null otherwise.
*/
public static Collection<Currency> getCurrencies(final Security security, final SecuritySource securitySource) { // CSIGNORE
if (security instanceof FinancialSecurity) {
final FinancialSecurity finSec = (FinancialSecurity) security;
final Collection<Currency> ccy = finSec.accept(new FinancialSecurityVisitor<Collection<Currency>>() {
@Override
public Collection<Currency> visitCorporateBondSecurity(final CorporateBondSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitGovernmentBondSecurity(final GovernmentBondSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitMunicipalBondSecurity(final MunicipalBondSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitInflationBondSecurity(final InflationBondSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitCashSecurity(final CashSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitCashFlowSecurity(final CashFlowSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitEquitySecurity(final EquitySecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitFRASecurity(final FRASecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitSwapSecurity(final SwapSecurity security) {
if (security.getPayLeg().getNotional() instanceof InterestRateNotional && security.getReceiveLeg().getNotional() instanceof InterestRateNotional) {
final InterestRateNotional payLeg = (InterestRateNotional) security.getPayLeg().getNotional();
final InterestRateNotional receiveLeg = (InterestRateNotional) security.getReceiveLeg().getNotional();
if (payLeg.getCurrency().equals(receiveLeg.getCurrency())) {
return Collections.singletonList(payLeg.getCurrency());
} else {
final Collection<Currency> collection = new ArrayList<Currency>();
collection.add(payLeg.getCurrency());
collection.add(receiveLeg.getCurrency());
return collection;
}
}
return null;
}
@Override
public Collection<Currency> visitForwardSwapSecurity(final ForwardSwapSecurity security) {
if (security.getPayLeg().getNotional() instanceof InterestRateNotional && security.getReceiveLeg().getNotional() instanceof InterestRateNotional) {
final InterestRateNotional payLeg = (InterestRateNotional) security.getPayLeg().getNotional();
final InterestRateNotional receiveLeg = (InterestRateNotional) security.getReceiveLeg().getNotional();
if (payLeg.getCurrency().equals(receiveLeg.getCurrency())) {
return Collections.singletonList(payLeg.getCurrency());
} else {
final Collection<Currency> collection = new ArrayList<Currency>();
collection.add(payLeg.getCurrency());
collection.add(receiveLeg.getCurrency());
return collection;
}
}
return null;
}
@Override
public Collection<Currency> visitEquityIndexOptionSecurity(final EquityIndexOptionSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitEquityOptionSecurity(final EquityOptionSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitEquityBarrierOptionSecurity(final EquityBarrierOptionSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitFXOptionSecurity(final FXOptionSecurity security) {
final Collection<Currency> currencies = new ArrayList<Currency>();
currencies.add(security.getCallCurrency());
currencies.add(security.getPutCurrency());
return currencies;
}
@Override
public Collection<Currency> visitNonDeliverableFXOptionSecurity(final NonDeliverableFXOptionSecurity security) {
final Collection<Currency> currencies = new ArrayList<Currency>();
currencies.add(security.getCallCurrency());
currencies.add(security.getPutCurrency());
//deliveryCurrency is always already covered
return currencies;
}
@Override
public Collection<Currency> visitSwaptionSecurity(final SwaptionSecurity security) {
// REVIEW: jim 1-Aug-2011 -- should we include the currencies of the underlying?
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitIRFutureOptionSecurity(final IRFutureOptionSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitCommodityFutureOptionSecurity(final CommodityFutureOptionSecurity commodityFutureOptionSecurity) {
return Collections.singleton(commodityFutureOptionSecurity.getCurrency());
}
@Override
public Collection<Currency> visitFxFutureOptionSecurity(final FxFutureOptionSecurity security) {
return Collections.singleton(security.getCurrency());
}
@Override
public Collection<Currency> visitBondFutureOptionSecurity(final BondFutureOptionSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitEquityIndexDividendFutureOptionSecurity(final EquityIndexDividendFutureOptionSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitEquityIndexFutureOptionSecurity(final EquityIndexFutureOptionSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitFXBarrierOptionSecurity(final FXBarrierOptionSecurity security) {
final Collection<Currency> currencies = new ArrayList<Currency>();
currencies.add(security.getCallCurrency());
currencies.add(security.getPutCurrency());
return currencies;
}
@Override
public Collection<Currency> visitFXForwardSecurity(final FXForwardSecurity security) {
final Collection<Currency> currencies = new ArrayList<Currency>();
currencies.add(security.getPayCurrency());
currencies.add(security.getReceiveCurrency());
return currencies;
}
@Override
public Collection<Currency> visitNonDeliverableFXForwardSecurity(final NonDeliverableFXForwardSecurity security) {
final Collection<Currency> currencies = new ArrayList<Currency>();
currencies.add(security.getPayCurrency());
currencies.add(security.getReceiveCurrency());
return currencies;
}
@Override
public Collection<Currency> visitCapFloorSecurity(final CapFloorSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitCapFloorCMSSpreadSecurity(final CapFloorCMSSpreadSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitEquityVarianceSwapSecurity(final EquityVarianceSwapSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitFXDigitalOptionSecurity(final FXDigitalOptionSecurity security) {
final Collection<Currency> currencies = new ArrayList<Currency>();
currencies.add(security.getCallCurrency());
currencies.add(security.getPutCurrency());
return currencies;
}
@Override
public Collection<Currency> visitNonDeliverableFXDigitalOptionSecurity(final NonDeliverableFXDigitalOptionSecurity security) {
final Collection<Currency> currencies = new ArrayList<Currency>();
currencies.add(security.getCallCurrency());
currencies.add(security.getPutCurrency());
return currencies;
}
@Override
public Collection<Currency> visitSimpleZeroDepositSecurity(final SimpleZeroDepositSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitPeriodicZeroDepositSecurity(final PeriodicZeroDepositSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitContinuousZeroDepositSecurity(final ContinuousZeroDepositSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitAgricultureFutureSecurity(final AgricultureFutureSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitBondFutureSecurity(final BondFutureSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitEnergyFutureSecurity(final EnergyFutureSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitEquityFutureSecurity(final EquityFutureSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitEquityIndexDividendFutureSecurity(final EquityIndexDividendFutureSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitFXFutureSecurity(final FXFutureSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitIndexFutureSecurity(final IndexFutureSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitInterestRateFutureSecurity(final InterestRateFutureSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitFederalFundsFutureSecurity(final FederalFundsFutureSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitMetalFutureSecurity(final MetalFutureSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitStockFutureSecurity(final StockFutureSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitAgricultureForwardSecurity(final AgricultureForwardSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitEnergyForwardSecurity(final EnergyForwardSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitMetalForwardSecurity(final MetalForwardSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitCDSSecurity(final CDSSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitStandardVanillaCDSSecurity(final StandardVanillaCDSSecurity security) {
return Collections.singletonList(security.getNotional().getCurrency());
}
@Override
public Collection<Currency> visitStandardFixedRecoveryCDSSecurity(final StandardFixedRecoveryCDSSecurity security) {
return Collections.singletonList(security.getNotional().getCurrency());
}
@Override
public Collection<Currency> visitStandardRecoveryLockCDSSecurity(final StandardRecoveryLockCDSSecurity security) {
return Collections.singletonList(security.getNotional().getCurrency());
}
@Override
public Collection<Currency> visitLegacyVanillaCDSSecurity(final LegacyVanillaCDSSecurity security) {
return Collections.singletonList(security.getNotional().getCurrency());
}
@Override
public Collection<Currency> visitLegacyFixedRecoveryCDSSecurity(final LegacyFixedRecoveryCDSSecurity security) {
return Collections.singletonList(security.getNotional().getCurrency());
}
@Override
public Collection<Currency> visitLegacyRecoveryLockCDSSecurity(final LegacyRecoveryLockCDSSecurity security) {
return Collections.singletonList(security.getNotional().getCurrency());
}
@Override
public Collection<Currency> visitDeliverableSwapFutureSecurity(final DeliverableSwapFutureSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitCreditDefaultSwapIndexDefinitionSecurity(final CreditDefaultSwapIndexDefinitionSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitCreditDefaultSwapIndexSecurity(final CreditDefaultSwapIndexSecurity security) {
return Collections.singletonList(security.getNotional().getCurrency());
}
@Override
public Collection<Currency> visitCreditDefaultSwapOptionSecurity(final CreditDefaultSwapOptionSecurity security) {
return Collections.singletonList(security.getCurrency());
}
@Override
public Collection<Currency> visitZeroCouponInflationSwapSecurity(final ZeroCouponInflationSwapSecurity security) {
if (security.getPayLeg().getNotional() instanceof InterestRateNotional && security.getReceiveLeg().getNotional() instanceof InterestRateNotional) {
final InterestRateNotional payLeg = (InterestRateNotional) security.getPayLeg().getNotional();
final InterestRateNotional receiveLeg = (InterestRateNotional) security.getReceiveLeg().getNotional();
if (payLeg.getCurrency().equals(receiveLeg.getCurrency())) {
return Collections.singletonList(payLeg.getCurrency());
}
final Collection<Currency> collection = new ArrayList<Currency>();
collection.add(payLeg.getCurrency());
collection.add(receiveLeg.getCurrency());
return collection;
}
return null;
}
@Override
public Collection<Currency> visitYearOnYearInflationSwapSecurity(final YearOnYearInflationSwapSecurity security) {
if (security.getPayLeg().getNotional() instanceof InterestRateNotional && security.getReceiveLeg().getNotional() instanceof InterestRateNotional) {
final InterestRateNotional payLeg = (InterestRateNotional) security.getPayLeg().getNotional();
final InterestRateNotional receiveLeg = (InterestRateNotional) security.getReceiveLeg().getNotional();
if (payLeg.getCurrency().equals(receiveLeg.getCurrency())) {
return Collections.singletonList(payLeg.getCurrency());
}
final Collection<Currency> collection = new ArrayList<Currency>();
collection.add(payLeg.getCurrency());
collection.add(receiveLeg.getCurrency());
return collection;
}
return null;
}
});
return ccy;
} else if (security instanceof RawSecurity) {
final RawSecurity rawSecurity = (RawSecurity) security;
if (security.getSecurityType().equals(SecurityEntryData.EXTERNAL_SENSITIVITIES_SECURITY_TYPE)) {
final FudgeMsgEnvelope msg = OpenGammaFudgeContext.getInstance().deserialize(rawSecurity.getRawData());
final SecurityEntryData securityEntryData = OpenGammaFudgeContext.getInstance().fromFudgeMsg(SecurityEntryData.class, msg.getMessage());
return Collections.singleton(securityEntryData.getCurrency());
}
}
return null;
}
/**
* Check if a security is exchange traded
*
* @param security the security to be examined.
* @return true if exchange traded or false otherwise.
*/
public static boolean isExchangeTraded(final Security security) {
boolean result = false;
if (security instanceof FinancialSecurity) {
final FinancialSecurity finSec = (FinancialSecurity) security;
final Boolean isExchangeTraded = finSec.accept(
FinancialSecurityVisitorAdapter.<Boolean>builder().
sameValueForSecurityVisitor(false).
equitySecurityVisitor(true).
futureSecurityVisitor(true).
equityIndexOptionVisitor(true).
equityOptionVisitor(true).
equityBarrierOptionVisitor(true).
bondFutureOptionSecurityVisitor(true).
equityIndexFutureOptionVisitor(true).
irfutureOptionVisitor(true).
interestRateFutureSecurityVisitor(true).
federalFundsFutureSecurityVisitor(true).
create());
result = isExchangeTraded == null ? false : isExchangeTraded;
}
return result;
}
/**
* Returns the underlying id of a security (e.g. the id of the equity underlying an equity future).
* @param security The security, not null
* @return The id of the underlying of a security, where it is possible to identify this, or null
*/
public static ExternalId getUnderlyingId(final Security security) {
if (security instanceof FinancialSecurity) {
final FinancialSecurity finSec = (FinancialSecurity) security;
final ExternalId id = finSec.accept(new FinancialSecurityVisitorAdapter<ExternalId>() {
@Override
public ExternalId visitFxFutureOptionSecurity(final FxFutureOptionSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitEnergyForwardSecurity(final EnergyForwardSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitAgricultureForwardSecurity(final AgricultureForwardSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitMetalForwardSecurity(final MetalForwardSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitEquityIndexDividendFutureSecurity(final EquityIndexDividendFutureSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitStockFutureSecurity(final StockFutureSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitEquityFutureSecurity(final EquityFutureSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitEnergyFutureSecurity(final EnergyFutureSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitIndexFutureSecurity(final IndexFutureSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitInterestRateFutureSecurity(final InterestRateFutureSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitFederalFundsFutureSecurity(final FederalFundsFutureSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitMetalFutureSecurity(final MetalFutureSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitCommodityFutureOptionSecurity(final CommodityFutureOptionSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitBondFutureOptionSecurity(final BondFutureOptionSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitEquityBarrierOptionSecurity(final EquityBarrierOptionSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitEquityIndexDividendFutureOptionSecurity(final EquityIndexDividendFutureOptionSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitEquityIndexFutureOptionSecurity(final EquityIndexFutureOptionSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitEquityIndexOptionSecurity(final EquityIndexOptionSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitEquityOptionSecurity(final EquityOptionSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitEquityVarianceSwapSecurity(final EquityVarianceSwapSecurity security) {
return security.getSpotUnderlyingId();
}
@Override
public ExternalId visitIRFutureOptionSecurity(final IRFutureOptionSecurity security) {
return security.getUnderlyingId();
}
@Override
public ExternalId visitCreditDefaultSwapIndexSecurity(final CreditDefaultSwapIndexSecurity security) {
return security.getReferenceEntity();
}
@Override
public ExternalId visitCreditDefaultSwapOptionSecurity(final CreditDefaultSwapOptionSecurity security) {
return security.getUnderlyingId();
}
});
return id;
}
return null;
}
public static CurrencyAmount getNotional(final Security security, final CurrencyPairs currencyPairs) {
if (security instanceof FinancialSecurity) {
final FinancialSecurity finSec = (FinancialSecurity) security;
final CurrencyAmount notional = finSec.accept(new FinancialSecurityVisitorAdapter<CurrencyAmount>() {
@Override
public CurrencyAmount visitSwapSecurity(final SwapSecurity security) {
final SwapLeg payNotional = security.getPayLeg();
final SwapLeg receiveNotional = security.getReceiveLeg();
if (payNotional.getNotional() instanceof InterestRateNotional && receiveNotional.getNotional() instanceof InterestRateNotional) {
final InterestRateNotional pay = (InterestRateNotional) payNotional.getNotional();
final InterestRateNotional receive = (InterestRateNotional) receiveNotional.getNotional();
if (Double.compare(pay.getAmount(), receive.getAmount()) == 0) {
return CurrencyAmount.of(pay.getCurrency(), pay.getAmount());
}
}
throw new OpenGammaRuntimeException("Can only handle interest rate notionals with the same amounts");
}
@Override
public CurrencyAmount visitFXOptionSecurity(final FXOptionSecurity security) {
final Currency currency1 = security.getPutCurrency();
final double amount1 = security.getPutAmount();
final Currency currency2 = security.getCallCurrency();
final double amount2 = security.getCallAmount();
final CurrencyPair currencyPair = currencyPairs.getCurrencyPair(currency1, currency2);
if (currencyPair.getBase().equals(currency1)) {
return CurrencyAmount.of(currency1, amount1);
}
return CurrencyAmount.of(currency2, amount2);
}
@Override
public CurrencyAmount visitNonDeliverableFXOptionSecurity(final NonDeliverableFXOptionSecurity security) {
final Currency currency = security.getDeliveryCurrency();
final double amount = security.getCallCurrency().equals(currency) ? security.getCallAmount() : security.getPutAmount();
return CurrencyAmount.of(currency, amount);
}
@Override
public CurrencyAmount visitFXDigitalOptionSecurity(final FXDigitalOptionSecurity security) {
final Currency currency1 = security.getPutCurrency();
final double amount1 = security.getPutAmount();
final Currency currency2 = security.getCallCurrency();
final double amount2 = security.getCallAmount();
final CurrencyPair currencyPair = currencyPairs.getCurrencyPair(currency1, currency2);
if (currencyPair.getBase().equals(currency1)) {
return CurrencyAmount.of(currency1, amount1);
}
return CurrencyAmount.of(currency2, amount2);
}
@Override
public CurrencyAmount visitNonDeliverableFXDigitalOptionSecurity(final NonDeliverableFXDigitalOptionSecurity security) {
final Currency currency = security.getPaymentCurrency();
final double amount = security.getCallCurrency().equals(currency) ? security.getCallAmount() : security.getPutAmount();
return CurrencyAmount.of(currency, amount);
}
@Override
public CurrencyAmount visitFXForwardSecurity(final FXForwardSecurity security) {
final Currency currency1 = security.getPayCurrency();
final double amount1 = security.getPayAmount();
final Currency currency2 = security.getReceiveCurrency();
final double amount2 = security.getReceiveAmount();
final CurrencyPair currencyPair = currencyPairs.getCurrencyPair(currency1, currency2);
if (currencyPair.getBase().equals(currency1)) {
return CurrencyAmount.of(currency1, amount1);
}
return CurrencyAmount.of(currency2, amount2);
}
@Override
public CurrencyAmount visitStandardVanillaCDSSecurity(final StandardVanillaCDSSecurity security) {
final InterestRateNotional notional = security.getNotional();
final int sign = security.isBuy() ? 1 : -1;
return CurrencyAmount.of(notional.getCurrency(), sign * notional.getAmount());
}
@Override
public CurrencyAmount visitLegacyVanillaCDSSecurity(final LegacyVanillaCDSSecurity security) {
final InterestRateNotional notional = security.getNotional();
final int sign = security.isBuy() ? 1 : -1;
return CurrencyAmount.of(notional.getCurrency(), sign * notional.getAmount());
}
@Override
public CurrencyAmount visitGovernmentBondSecurity(final GovernmentBondSecurity security) {
final Currency currency = security.getCurrency();
final double notional = security.getMinimumAmount();
return CurrencyAmount.of(currency, notional);
}
@Override
public CurrencyAmount visitCorporateBondSecurity(final CorporateBondSecurity security) {
final Currency currency = security.getCurrency();
final double notional = security.getMinimumAmount();
return CurrencyAmount.of(currency, notional);
}
@Override
public CurrencyAmount visitMunicipalBondSecurity(final MunicipalBondSecurity security) {
final Currency currency = security.getCurrency();
final double notional = security.getMinimumAmount();
return CurrencyAmount.of(currency, notional);
}
@Override
public CurrencyAmount visitSwaptionSecurity(final SwaptionSecurity security) {
final Currency currency = security.getCurrency();
final double notional = security.getNotional();
return CurrencyAmount.of(currency, notional);
}
@Override
public CurrencyAmount visitEquityIndexOptionSecurity(final EquityIndexOptionSecurity security) {
final Currency currency = security.getCurrency();
final double notional = security.getPointValue();
return CurrencyAmount.of(currency, notional);
}
@Override
public CurrencyAmount visitInterestRateFutureSecurity(final InterestRateFutureSecurity security) {
final Currency currency = security.getCurrency();
final double notional = security.getUnitAmount();
return CurrencyAmount.of(currency, notional);
}
@Override
public CurrencyAmount visitFederalFundsFutureSecurity(final FederalFundsFutureSecurity security) {
final Currency currency = security.getCurrency();
final double notional = security.getUnitAmount();
return CurrencyAmount.of(currency, notional);
}
@Override
public CurrencyAmount visitCreditDefaultSwapIndexSecurity(final CreditDefaultSwapIndexSecurity security) {
final InterestRateNotional notional = security.getNotional();
return CurrencyAmount.of(notional.getCurrency(), notional.getAmount());
}
@Override
public CurrencyAmount visitCreditDefaultSwapOptionSecurity(final CreditDefaultSwapOptionSecurity security) {
final Currency currency = security.getCurrency();
final double notional = security.getNotional();
return CurrencyAmount.of(currency, notional);
}
});
return notional;
}
return null;
}
} |
package com.opengamma.web.valuerequirementname;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import com.opengamma.util.ArgumentChecker;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONStringer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.opengamma.engine.value.ValueRequirementNames;
import com.opengamma.web.AbstractWebResource;
/**
* RESTful resource that returns the value requirement names for the Web GUI.
*/
@Path("/valuerequirementnames/metaData")
public class WebValueRequirementNamesResource extends AbstractWebResource {
/** Logger. */
private static final Logger s_logger = LoggerFactory.getLogger(WebValueRequirementNamesResource.class);
private static final String VALUE_REQUIREMENT_NAME_CLASSES = "valueRequirementNameClasses";
/**
* The value requirement names.
*/
private final Set<String> _valueRequirementNames;
/**
* Creates the resource.
*/
public WebValueRequirementNamesResource() {
final List<String> list = new ArrayList<String>();
for (Field field : ValueRequirementNames.class.getDeclaredFields()) {
try {
list.add((String) field.get(null));
} catch (Exception e) {
// Ignore
}
}
Collections.sort(list, String.CASE_INSENSITIVE_ORDER);
_valueRequirementNames = new LinkedHashSet<String>(list);
}
public WebValueRequirementNamesResource(String[] valueRequirementNameClasses) {
ArgumentChecker.notEmpty(valueRequirementNameClasses, VALUE_REQUIREMENT_NAME_CLASSES);
final List<String> list = new ArrayList<String>();
for (String className : valueRequirementNameClasses) {
try {
for (Field field : Class.forName(className).getDeclaredFields()) {
list.add((String) field.get(null));
}
} catch (Exception e) {
// Ignore
}
}
Collections.sort(list, String.CASE_INSENSITIVE_ORDER);
_valueRequirementNames = new LinkedHashSet<String>(list);
}
@GET
@Produces(MediaType.APPLICATION_JSON)
public String getJSON() {
String result = null;
try {
result = new JSONStringer()
.object()
.key("types")
.value(new JSONArray(_valueRequirementNames))
.endObject()
.toString();
} catch (JSONException ex) {
s_logger.warn("error creating json document for valueRequirementNames");
}
return result;
}
} |
package gov.nih.nci.cabig.caaers.web.ae;
import gov.nih.nci.cabig.caaers.domain.StudyParticipantAssignment;
import gov.nih.nci.cabig.caaers.domain.Participant;
import gov.nih.nci.cabig.caaers.domain.Study;
import gov.nih.nci.cabig.caaers.domain.expeditedfields.ExpeditedReportTree;
import gov.nih.nci.cabig.caaers.domain.report.ReportDefinition;
import gov.nih.nci.cabig.caaers.dao.ExpeditedAdverseEventReportDao;
import gov.nih.nci.cabig.caaers.dao.StudyParticipantAssignmentDao;
import gov.nih.nci.cabig.caaers.dao.report.ReportDefinitionDao;
/**
* @author Rhett Sutphin
*/
public class EditExpeditedAdverseEventCommand extends AbstractExpeditedAdverseEventInputCommand {
private StudyParticipantAssignmentDao assignmentDao;
////// LOGIC
public EditExpeditedAdverseEventCommand(
ExpeditedAdverseEventReportDao expeditedAeReportDao,
ReportDefinitionDao reportDefinitionDao,
StudyParticipantAssignmentDao assignmentDao,
ExpeditedReportTree expeditedReportTree
) {
super(expeditedAeReportDao, reportDefinitionDao, expeditedReportTree);
this.assignmentDao = assignmentDao;
}
@Override
public StudyParticipantAssignment getAssignment() {
return getAeReport().getAssignment();
}
@Override
public Participant getParticipant() {
return getAssignment().getParticipant();
}
@Override
public Study getStudy() {
return getAssignment().getStudySite().getStudy();
}
@Override
public void save() {
reportDao.save(getAeReport());
}
@Override
public void reassociate() {
super.reassociate();
assignmentDao.reassociate(getAssignment());
}
} |
package com.jetbrains.python.codeInsight.intentions;
import com.intellij.codeInsight.CodeInsightUtilBase;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.codeInsight.template.*;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiReference;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.IncorrectOperationException;
import com.jetbrains.python.PyBundle;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.resolve.PyResolveContext;
import com.jetbrains.python.psi.types.PyReturnTypeReference;
import com.jetbrains.python.psi.types.PyType;
import com.jetbrains.python.psi.types.TypeEvalContext;
import org.jetbrains.annotations.NotNull;
/**
* User: ktisha
*
* Helps to specify type in annotations in python3
*/
public class SpecifyTypeInPy3AnnotationsIntention implements IntentionAction {
public SpecifyTypeInPy3AnnotationsIntention() {
}
@NotNull
public String getText() {
return PyBundle.message("INTN.specify.type.in.annotation");
}
@NotNull
public String getFamilyName() {
return PyBundle.message("INTN.specify.type.in.annotation");
}
public boolean isAvailable(@NotNull Project project, Editor editor, PsiFile file) {
if (!LanguageLevel.forElement(file).isPy3K()) return false;
PyExpression problemElement = PsiTreeUtil.getParentOfType(file.findElementAt(editor.getCaretModel().getOffset() - 1),
PyNamedParameter.class);
if (problemElement == null)
problemElement = PsiTreeUtil.getTopmostParentOfType(file.findElementAt(editor.getCaretModel().getOffset() - 1),
PyQualifiedExpression.class);
if (problemElement == null) return false;
if (problemElement instanceof PyQualifiedExpression) {
final PyExpression qualifier = ((PyQualifiedExpression)problemElement).getQualifier();
if (qualifier != null && !qualifier.getText().equals(PyNames.CANONICAL_SELF)) {
problemElement = qualifier;
}
}
if (problemElement.getParent() instanceof PyCallExpression
|| PsiTreeUtil.getParentOfType(problemElement, PyLambdaExpression.class) != null) {
return false;
}
final PyType type = problemElement.getType(TypeEvalContext.slow());
if (type == null || type instanceof PyReturnTypeReference) {
PyFunction pyFunction = PsiTreeUtil.getParentOfType(problemElement, PyFunction.class);
PsiReference reference = problemElement.getReference();
if (problemElement instanceof PyQualifiedExpression) {
final PyExpression qualifier = ((PyQualifiedExpression)problemElement).getQualifier();
if (qualifier != null && !qualifier.getText().equals(PyNames.CANONICAL_SELF)) reference = qualifier.getReference();
}
if (pyFunction != null) {
PyParameter parameter = null;
final PsiElement resolvedReference = reference != null?reference.resolve() : null;
if (problemElement instanceof PyParameter)
parameter = (PyParameter)problemElement;
else if (resolvedReference instanceof PyParameter)
parameter = (PyParameter)resolvedReference;
if (parameter instanceof PyNamedParameter && (((PyNamedParameter)parameter).getAnnotation() != null ||
parameter.getDefaultValue() != null)) return false;
if (parameter != null)
return true;
else {
if (resolvedReference instanceof PyTargetExpression) {
final PyExpression assignedValue = ((PyTargetExpression)resolvedReference).findAssignedValue();
if (assignedValue instanceof PyCallExpression) {
final PyExpression callee = ((PyCallExpression)assignedValue).getCallee();
if (callee != null) {
final PsiReference psiReference = callee.getReference();
if (psiReference != null && psiReference.resolve() == null) return false;
}
final Callable callable = ((PyCallExpression)assignedValue).resolveCalleeFunction(PyResolveContext.defaultContext());
if (callable instanceof PyFunction && ((PyFunction)callable).getAnnotation() == null) return true;
}
}
}
}
}
return false;
}
public void invoke(@NotNull Project project, Editor editor, PsiFile file) throws IncorrectOperationException {
PyExpression problemElement = PsiTreeUtil.getParentOfType(file.findElementAt(editor.getCaretModel().getOffset() - 1), PyNamedParameter.class);
if (problemElement == null)
problemElement = PsiTreeUtil.getParentOfType(file.findElementAt(editor.getCaretModel().getOffset()-1), PyExpression.class);
if (problemElement != null) {
String name = problemElement.getName();
PsiReference reference = problemElement.getReference();
if (problemElement instanceof PyQualifiedExpression) {
final PyExpression qualifier = ((PyQualifiedExpression)problemElement).getQualifier();
if (qualifier != null) {
reference = qualifier.getReference();
name = qualifier.getText();
}
}
PyElementGenerator elementGenerator = PyElementGenerator.getInstance(project);
PyParameter parameter = null;
final PsiElement resolvedReference = reference != null? reference.resolve() : null;
if (problemElement instanceof PyParameter)
parameter = (PyParameter)problemElement;
else {
if (resolvedReference instanceof PyParameter) {
parameter = (PyParameter)resolvedReference;
}
}
if (parameter != null && name != null) {
final PyFunction function =
elementGenerator.createFromText(LanguageLevel.forElement(problemElement), PyFunction.class,
"def foo(" + name + ": object):\n\tpass");
final PyNamedParameter namedParameter = function.getParameterList().findParameterByName(name);
assert namedParameter != null;
parameter = (PyParameter)parameter.replace(namedParameter);
parameter = CodeInsightUtilBase.forcePsiPostprocessAndRestoreElement(parameter);
editor.getCaretModel().moveToOffset(parameter.getTextOffset());
final TemplateBuilder builder = TemplateBuilderFactory.getInstance().createTemplateBuilder(parameter);
builder.replaceRange(TextRange.create(parameter.getTextLength()-PyNames.OBJECT.length(), parameter.getTextLength()), PyNames.OBJECT);
Template template = ((TemplateBuilderImpl)builder).buildInlineTemplate();
TemplateManager.getInstance(project).startTemplate(editor, template);
}
else { //return type
if (resolvedReference instanceof PyTargetExpression) {
final PyExpression assignedValue = ((PyTargetExpression)resolvedReference).findAssignedValue();
if (assignedValue instanceof PyCallExpression) {
Callable callable = ((PyCallExpression)assignedValue).resolveCalleeFunction(PyResolveContext.defaultContext());
if (callable instanceof PyFunction && ((PyFunction)callable).getAnnotation() == null) {
final String functionSignature = "def " + callable.getName() + callable.getParameterList().getText();
String functionText = functionSignature +
" -> object:";
for (PyStatement st : ((PyFunction)callable).getStatementList().getStatements()) {
functionText = functionText + "\n\t" + st.getText();
}
final PyFunction function = elementGenerator.createFromText(LanguageLevel.forElement(problemElement), PyFunction.class,
functionText);
callable = (PyFunction)callable.replace(function);
callable = CodeInsightUtilBase.forcePsiPostprocessAndRestoreElement(callable);
final PyExpression value = ((PyFunction)callable).getAnnotation().getValue();
final int offset = value.getTextOffset();
final TemplateBuilder builder = TemplateBuilderFactory.getInstance().
createTemplateBuilder(value);
builder.replaceRange(TextRange.create(0, PyNames.OBJECT.length()), PyNames.OBJECT);
Template template = ((TemplateBuilderImpl)builder).buildInlineTemplate();
OpenFileDescriptor descriptor = new OpenFileDescriptor(
project,
value.getContainingFile().getVirtualFile(),
offset
);
Editor targetEditor = FileEditorManager.getInstance(project).openTextEditor(descriptor, true);
if (targetEditor != null) {
targetEditor.getCaretModel().moveToOffset(offset);
TemplateManager.getInstance(project).startTemplate(targetEditor, template);
}
}
}
}
}
}
}
public boolean startInWriteAction() {
return true;
}
} |
package org.sagebionetworks.bridge.researchstack;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.sagebionetworks.bridge.researchstack.ApiUtils.SUCCESS_DATA_RESPONSE;
import android.annotation.SuppressLint;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimaps;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.researchstack.backbone.AppPrefs;
import org.researchstack.backbone.DataProvider;
import org.researchstack.backbone.DataResponse;
import org.researchstack.backbone.ResourceManager;
import org.researchstack.backbone.StorageAccess;
import org.researchstack.backbone.model.ConsentSignatureBody;
import org.researchstack.backbone.model.SchedulesAndTasksModel;
import org.researchstack.backbone.model.TaskModel;
import org.researchstack.backbone.model.User;
import org.researchstack.backbone.result.TaskResult;
import org.researchstack.backbone.storage.NotificationHelper;
import org.researchstack.backbone.task.Task;
import org.researchstack.backbone.ui.ActiveTaskActivity;
import org.researchstack.backbone.utils.ObservableUtils;
import org.sagebionetworks.bridge.android.BridgeConfig;
import org.sagebionetworks.bridge.android.manager.AuthenticationManager;
import org.sagebionetworks.bridge.android.manager.BridgeManagerProvider;
import org.sagebionetworks.bridge.android.manager.ParticipantRecordManager;
import org.sagebionetworks.bridge.android.manager.upload.ArchiveUtil;
import org.sagebionetworks.bridge.android.manager.upload.SchemaKey;
import org.sagebionetworks.bridge.data.JsonArchiveFile;
import org.sagebionetworks.bridge.researchstack.survey.SurveyTaskScheduleModel;
import org.sagebionetworks.bridge.researchstack.wrapper.StorageAccessWrapper;
import org.sagebionetworks.bridge.rest.RestUtils;
import org.sagebionetworks.bridge.rest.model.Activity;
import org.sagebionetworks.bridge.rest.model.AppConfig;
import org.sagebionetworks.bridge.rest.model.ConsentSignature;
import org.sagebionetworks.bridge.rest.model.Message;
import org.sagebionetworks.bridge.rest.model.Phone;
import org.sagebionetworks.bridge.rest.model.ScheduledActivity;
import org.sagebionetworks.bridge.rest.model.ScheduledActivityList;
import org.sagebionetworks.bridge.rest.model.ScheduledActivityListV4;
import org.sagebionetworks.bridge.rest.model.SharingScope;
import org.sagebionetworks.bridge.rest.model.SignUp;
import org.sagebionetworks.bridge.rest.model.StudyParticipant;
import org.sagebionetworks.bridge.rest.model.UserSessionInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import rx.Completable;
import rx.Observable;
import rx.Single;
import rx.functions.Action0;
/**
* DataProvider implementation backed by a Bridge study.
*/
public abstract class BridgeDataProvider extends DataProvider {
private static final Logger logger = LoggerFactory.getLogger(BridgeDataProvider.class);
public static BridgeDataProvider getInstance() {
if (!(DataProvider.getInstance() instanceof BridgeDataProvider)) {
throw new IllegalStateException("This app only works with BridgeDataProvider");
}
return (BridgeDataProvider)DataProvider.getInstance();
}
// set in initialize
protected final TaskHelper taskHelper;
@NonNull
protected final StorageAccessWrapper storageAccessWrapper;
@NonNull
protected final ResearchStackDAO researchStackDAO;
@NonNull
protected final BridgeManagerProvider bridgeManagerProvider;
@NonNull
protected final BridgeConfig bridgeConfig;
@NonNull
private final AuthenticationManager authenticationManager;
@NonNull
private final ParticipantRecordManager participantRecordManager;
/**
* The GUID of the last task that was loaded (used in completion)
*/
protected String lastLoadedTaskGuid = null;
//used by tests to mock service
BridgeDataProvider(BridgeManagerProvider bridgeManagerProvider, ResearchStackDAO researchStackDAO,
StorageAccessWrapper
storageAccessWrapper,
TaskHelper taskHelper) {
this.researchStackDAO = researchStackDAO;
this.storageAccessWrapper = storageAccessWrapper;
this.taskHelper = taskHelper;
this.bridgeManagerProvider = bridgeManagerProvider;
// convenience accessors
this.bridgeConfig = bridgeManagerProvider.getBridgeConfig();
this.authenticationManager = bridgeManagerProvider.getAuthenticationManager();
this.participantRecordManager = bridgeManagerProvider.getParticipantManager();
}
public BridgeDataProvider(@NonNull BridgeManagerProvider bridgeManagerProvider) {
this.researchStackDAO = new ResearchStackDAO(bridgeManagerProvider.getApplicationContext());
this.bridgeManagerProvider = bridgeManagerProvider;
// convenience accessors
this.bridgeConfig = bridgeManagerProvider.getBridgeConfig();
this.authenticationManager = bridgeManagerProvider.getAuthenticationManager();
this.participantRecordManager = bridgeManagerProvider.getParticipantManager();
this.storageAccessWrapper = new StorageAccessWrapper();
NotificationHelper notificationHelper = NotificationHelper.
getInstance(bridgeManagerProvider.getApplicationContext());
this.taskHelper = createTaskHelper(notificationHelper, storageAccessWrapper,
bridgeManagerProvider);
}
public TaskHelper createTaskHelper(NotificationHelper notif, StorageAccessWrapper wrapper,
BridgeManagerProvider provider) {
return new TaskHelper(wrapper, ResourceManager.getInstance(), AppPrefs.getInstance(),
notif, provider);
}
@Override
public Observable<DataResponse> initialize(Context context) {
logger.debug("Called initialize");
return SUCCESS_DATA_RESPONSE;
}
@NonNull
@Override
public String getStudyId() {
return bridgeConfig.getStudyId();
}
//region AppConfig
/** Get app config from the cache, or fall back to server if there is no value in the cache. */
@NonNull
public Single<AppConfig> getAppConfig() {
return bridgeManagerProvider.getAppConfigManager().getAppConfig();
}
//endregion
//region Consent
@NonNull
@Override
public Observable<DataResponse> withdrawConsent(Context context, String reason) {
logger.debug("Called withdrawConsent");
return withdrawAllConsents(reason).andThen(SUCCESS_DATA_RESPONSE);
}
@NonNull
public Completable withdrawConsent(@NonNull String subpopulationGuid, @Nullable String reason) {
logger.debug("Called withdrawConsent for subpopulation: " + subpopulationGuid);
return authenticationManager.withdrawConsent(subpopulationGuid, reason);
}
@NonNull
public Completable withdrawAllConsents(@Nullable String reason) {
return authenticationManager.withdrawAll(reason);
}
/**
* @return true if participant has consented to all required consents
*/
@Override
public boolean isConsented() {
logger.debug("Called isConsented");
return authenticationManager.isConsented();
}
@NonNull
public Single<UserSessionInfo> giveConsent(@NonNull String subpopulationGuid, @NonNull
ConsentSignature consentSignature) {
return giveConsent(subpopulationGuid,
consentSignature.getName(),
consentSignature.getBirthdate(),
consentSignature.getImageData(),
consentSignature.getImageMimeType(),
consentSignature.getScope());
}
@NonNull
public Single<UserSessionInfo> giveConsent(@NonNull String subpopulationGuid,
@NonNull String name,
@NonNull LocalDate birthdate,
@Nullable String base64Image,
@Nullable String imageMimeType,
@NonNull SharingScope sharingScope) {
logger.debug("Called giveConsent");
return authenticationManager.giveConsent(subpopulationGuid, name, birthdate, base64Image,
imageMimeType, sharingScope);
}
@NonNull
public Single<ConsentSignature> getConsent(@NonNull String subpopulation) {
checkNotNull(subpopulation);
logger.debug("Called getConsent");
return authenticationManager.getConsent(subpopulation);
}
// TODO: getConsent rid of Consent methods below on the interface. let ConsentManager handle the
// implementation details and expose leave giveConsent, getConsent, withdrawConsent, and
// isConsented
@Nullable
@Override
public ConsentSignatureBody loadLocalConsent(Context context) {
ConsentSignatureBody consent = createConsentSignatureBody(
authenticationManager.retrieveLocalConsent(getDefaultConsentSubpopulationGuid()));
logger.debug("loadLocalConsent called, got: " + consent);
return consent;
}
@Override
@Deprecated
public void saveConsent(Context context, @NonNull TaskResult consentResult) {
throw new UnsupportedOperationException();
}
@Override
public void saveLocalConsent(Context context, ConsentSignatureBody signatureBody) {
logger.debug("Called saveLocalConsent with: " + signatureBody);
ConsentSignature consentSignature = createConsentSignature(signatureBody);
saveLocalConsent(consentSignature);
}
@VisibleForTesting
SharingScope toSharingScope(String sharingScope) {
SharingScope scopeEnum = SharingScope.NO_SHARING;
for (SharingScope scope : SharingScope.values()) {
if (scope.toString().equals(sharingScope)) {
scopeEnum = scope;
}
}
return scopeEnum;
}
@Nullable
@VisibleForTesting
ConsentSignature createConsentSignature(@Nullable ConsentSignatureBody
consentSignatureBody) {
if (consentSignatureBody == null) {
return null;
}
ConsentSignature signature = new ConsentSignature();
signature.setName(consentSignatureBody.name);
if (consentSignatureBody.birthdate != null) {
signature.setBirthdate(LocalDate.fromDateFields(consentSignatureBody.birthdate));
}
SharingScope sharingScope = toSharingScope(consentSignatureBody.scope);
signature.setImageData(consentSignatureBody.imageData);
signature.setImageMimeType(consentSignatureBody.imageMimeType);
signature.setScope(sharingScope);
return signature;
}
@Nullable
@VisibleForTesting
ConsentSignatureBody createConsentSignatureBody(@Nullable ConsentSignature
consentSignature) {
if (consentSignature == null) {
return null;
}
return new ConsentSignatureBody(
getStudyId(),
consentSignature.getName(),
consentSignature.getBirthdate() != null ? consentSignature.getBirthdate().toDate
() : null,
consentSignature.getImageData(),
consentSignature.getImageMimeType(),
consentSignature.getScope() != null ? consentSignature.getScope().toString() :
null);
}
@Override
@Deprecated
public void uploadConsent(Context context, @NonNull TaskResult consentResult) {
throw new UnsupportedOperationException();
}
/**
* @return the default consent subpopulation guid. The first one made on bridge will
* be the study id, but any other ones created after that will have a unique id.
*/
protected String getDefaultConsentSubpopulationGuid() {
return bridgeConfig.getStudyId();
}
private void saveLocalConsent(@NonNull ConsentSignature consentSignature) {
authenticationManager.storeLocalConsent(
getDefaultConsentSubpopulationGuid(),
consentSignature.getName(),
consentSignature.getBirthdate(),
consentSignature.getImageData(),
consentSignature.getImageMimeType(),
consentSignature.getScope());
}
@Override
public Observable<DataResponse> uploadConsent(Context context, ConsentSignatureBody signature) {
logger.debug("Called uploadConsent");
return uploadConsent(bridgeConfig.getStudyId(), createConsentSignature(signature));
}
protected Observable<DataResponse> uploadConsent(@NonNull String subpopulationGuid,
@NonNull ConsentSignature consent) {
return giveConsent(
subpopulationGuid,
consent.getName(),
consent.getBirthdate(),
consent.getImageData(),
consent.getImageMimeType(),
consent.getScope())
.flatMapObservable(session -> SUCCESS_DATA_RESPONSE)
.compose(ObservableUtils.applyDefault());
}
//endregion
//region Account
@NonNull
@Override
public Observable<DataResponse> signUp(@Nullable Context context, @NonNull String email,
@Nullable String username, @Nullable String password) {
logger.debug("Called signUp");
// we should pass in data groups, removeConsent roles
SignUp signUp = new SignUp().study(getStudyId()).email(email).password(password);
return signUp(signUp);
}
@NonNull
public Observable<DataResponse> signUp(@NonNull SignUp signUp) {
// saving email to user object should exist elsewhere.
// Save email to user object.
return signUp(signUp.getEmail(), signUp.getPassword());
}
@NonNull
public Observable<DataResponse> signUp(@NonNull String email, @Nullable String password) {
checkNotNull(email);
logger.debug("Called signUp");
return authenticationManager
.signUp(email, password)
.andThen(SUCCESS_DATA_RESPONSE);
}
@NonNull
public Observable<DataResponse> signUp(@NonNull Phone phone) {
checkNotNull(phone);
logger.debug("Called signUp using phone");
return authenticationManager
.signUp(phone)
.andThen(SUCCESS_DATA_RESPONSE);
}
@Override
public boolean isSignedUp(@Nullable Context context) {
logger.debug("Called isSignedUp");
return isSignedUp();
}
public boolean isSignedUp() {
logger.debug("Called isSignedUp");
return participantRecordManager.getCachedParticipantRecord() != null;
}
@Override
@NonNull
public Observable<DataResponse> signIn(@Nullable Context context, @NonNull String username,
@NonNull String password) {
logger.debug("Called signIn");
return signIn(username, password)
.andThen(SUCCESS_DATA_RESPONSE);
}
@NonNull
@Override
public Observable<DataResponse> signInWithExternalId(
@Nullable Context context, @NonNull String externalId) {
logger.debug("Called signInWithExternalId");
String email = bridgeConfig.getEmailForExternalId(externalId);
String password = bridgeConfig.getPasswordForExternalId(externalId);
return signIn(email, password).andThen(SUCCESS_DATA_RESPONSE);
}
@Override
public Observable<DataResponse> requestSignInLink(String email) {
logger.debug("Called requestSignInLink");
return authenticationManager.requestEmailSignIn(email)
.andThen(SUCCESS_DATA_RESPONSE);
}
@Override
public Observable<DataResponse> requestPhoneSignIn(String regionCode, String phoneNumber) {
logger.debug("Called requestSMS");
return authenticationManager.requestPhoneSignIn(regionCode, phoneNumber).andThen(SUCCESS_DATA_RESPONSE);
}
@Override
public Observable<DataResponse> signInWithEmailAndToken(String email, String token) {
logger.debug("Called signInWithEmailAndToken");
return authenticationManager.signInViaEmailLink(email, token)
.doOnSuccess(session -> bridgeManagerProvider.getAccountDao()
.setDataGroups(session.getDataGroups()))
.toCompletable()
.andThen(SUCCESS_DATA_RESPONSE);
}
@NonNull
public Single<UserSessionInfo> signInWithPhoneAndToken(@NonNull String regionCode, @NonNull String phoneNumber,
@NonNull String token) {
return authenticationManager.signInViaPhoneLink(regionCode, phoneNumber, token);
}
/**`
* @param email the participant's email
* @param password participant's password
* @return completion
* @see DataProvider#signIn(Context, String, String)
* <p>
* May fail with ConsentRequiredException, to indicate
* consent is required.
* NotAuthenticatedException could indicate the user has not verified their email
*/
@NonNull
public Completable signIn(@NonNull String email, @NonNull String password) {
checkNotNull(email);
checkNotNull(password);
logger.debug("Called signIn");
return authenticationManager
.signIn(email, password)
.doOnSuccess(session -> bridgeManagerProvider.getAccountDao()
.setDataGroups(session.getDataGroups()))
.toCompletable().doOnCompleted((Action0) () -> {
// TODO: upload pending files
});
}
public boolean isSignedIn() {
logger.debug("Called isSignedIn");
return authenticationManager.getUserSessionInfo() != null;
}
@Deprecated
@Override
public boolean isSignedIn(Context context) {
return isSignedIn();
}
@Override
public Observable<DataResponse> signOut(Context context) {
logger.debug("Called signOut");
Observable<DataResponse> dataResponse = authenticationManager.signOut()
.andThen(SUCCESS_DATA_RESPONSE);
// Clear all the parts of the user data whether call is successful or not
AppPrefs.getInstance().clear();
StorageAccess.getInstance().removePinCode(context);
bridgeManagerProvider.getActivityManager().clearDAO();
return dataResponse;
}
@NonNull
@Override
public Observable<DataResponse> resendEmailVerification(Context context, @NonNull String
email) {
return resendEmailVerification(email).andThen(SUCCESS_DATA_RESPONSE);
}
@NonNull
public Completable resendEmailVerification(@NonNull String email) {
checkNotNull(email);
logger.debug("Called resendEmailVerification");
return authenticationManager.resendEmailVerification(email);
}
/**
* Called to verify the user's email address
* Behind the scenes this calls signIn with securely stored username and password
*
* @param context android context
* @return Observable of the result of the method, with {@link DataResponse#isSuccess()}
* returning true if verifyEmail was successful
*/
@NonNull
public Observable<DataResponse> verifyEmail(Context context, @NonNull String password) {
return verifyEmail(checkNotNull(getUserEmail(context)), password).andThen(SUCCESS_DATA_RESPONSE);
}
@NonNull
public Completable verifyEmail(@NonNull String email, @NonNull String password) {
logger.debug("Called verifyEmail");
return authenticationManager.signIn(checkNotNull(email), checkNotNull(password)).toCompletable();
}
@NonNull
@Override
public Observable<DataResponse> forgotPassword(Context context, @NonNull String email) {
return forgotPassword(email).andThen(SUCCESS_DATA_RESPONSE);
}
@NonNull
public Completable forgotPassword(@NonNull String email) {
checkNotNull(email);
logger.debug("Called forgotPassword");
return authenticationManager
.requestPasswordReset(email);
}
//endregion
// region Data Groups
/**
* Add data groups to this account locally. Note: this does not call the server to update the
* participant.
*/
public void addLocalDataGroup(@NonNull String dataGroup) {
logger.debug("Called addLocalDataGroup for: " + dataGroup);
bridgeManagerProvider.getAccountDao().addDataGroup(dataGroup);
}
/**
* Returns a list of data groups associated with this account. If there are no data groups,
* this method returns an empty list.
*/
@NonNull
public List<String> getLocalDataGroups() {
logger.debug("Called getLocalDataGroups");
return ImmutableList.copyOf(bridgeManagerProvider.getAccountDao().getDataGroups());
}
// endregion Data Groups
//region User
@Override
@Nullable
public User getUser(@Nullable Context context) {
return researchStackDAO.getUser();
}
@Override
@Nullable
public void setUser(Context context, User user) {
researchStackDAO.setUser(user);
}
@Nullable
@Override
public String getUserEmail(Context context) {
return authenticationManager.getEmail();
}
//endregion
//region SharingScope
@Override
@Nullable
public String getUserSharingScope(Context context) {
SharingScope scope = getUserSharingScope();
return scope == null ? null : scope.toString();
}
@Nullable
public SharingScope getUserSharingScope() {
logger.debug("Called getUserSharingScope");
UserSessionInfo session = authenticationManager.getUserSessionInfo();
if (session == null) {
return null;
}
return session.getSharingScope();
}
@Override
public void setUserSharingScope(Context context, String scope) {
StudyParticipant participant = new StudyParticipant();
SharingScope sharingScope = RestUtils.GSON.fromJson(scope, SharingScope.class);
participant.setSharingScope(sharingScope);
setUserSharingScope(sharingScope).toBlocking().value();
}
@NonNull
public Single<UserSessionInfo> setUserSharingScope(@Nullable SharingScope scope) {
logger.debug("Called setUserSharingScope with: " + scope);
return bridgeManagerProvider.getParticipantManager()
.updateParticipantRecord((StudyParticipant) new StudyParticipant()
.email(authenticationManager.getEmail())
.sharingScope(scope))
.doOnSuccess(session -> bridgeManagerProvider.getAccountDao()
.setDataGroups(session.getDataGroups()));
}
@NonNull
public Observable<StudyParticipant> getStudyParticipant() {
logger.debug("Called getStudyParticipant");
return bridgeManagerProvider.getParticipantManager().getParticipantRecord()
.doOnSuccess(participant -> bridgeManagerProvider.getAccountDao()
.setDataGroups(participant.getDataGroups()))
.doOnError(throwable -> {
logger.error(throwable.getMessage());
})
.toObservable();
}
@NonNull
public Observable<UserSessionInfo> updateStudyParticipant(StudyParticipant studyParticipant) {
logger.debug("Called updateStudyParticipant");
return bridgeManagerProvider.getParticipantManager().updateParticipantRecord(studyParticipant)
.doOnSuccess(session -> bridgeManagerProvider.getAccountDao()
.setDataGroups(session.getDataGroups()))
.toObservable();
}
/**
* Make participant data available for download.
* <p>
* Request the uploaded data for this user, in a given time range (inclusive). Bridge will
* asynchronously gather the user's data for the given time range and email a secure link to the
* participant's registered email address.
*
* @param startDate The first day to include in reports that are returned (required)
* @param endDate The last day to include in reports that are returned (required)
* @return completable
*/
@NonNull
public Observable<DataResponse> downloadData(LocalDate startDate,
LocalDate endDate) {
logger.debug("Called downloadData");
return bridgeManagerProvider.getParticipantManager()
.emailDataToParticipant(startDate, endDate).andThen(SUCCESS_DATA_RESPONSE);
}
//endregion
//region TasksAndSchedules
public Observable<Message> updateActivity(ScheduledActivity activity) {
logger.debug("Called updateActivity");
return bridgeManagerProvider.getActivityManager().updateActivity(activity);
}
public Observable<ScheduledActivityListV4> getActivities(DateTime start, DateTime end) {
logger.debug("Called getActivities");
return bridgeManagerProvider.getActivityManager().getActivities(start, end)
.doOnSuccess(scheduleActivityList -> logger.debug("Got scheduled activity list"))
.doOnError(throwable -> logger.error(throwable.getMessage()))
.toObservable();
}
@NonNull
@Override
public Single<SchedulesAndTasksModel> loadTasksAndSchedules(Context context) {
logger.info("loadTasksAndSchedules()");
DateTime now = DateTime.now();
return bridgeManagerProvider.getActivityManager()
.getActivities(now, now.plusDays(14))
.map(ScheduledActivityListV4::getItems)
.map(this::translateActivities);
}
private TaskModel loadTaskModel(Context context, SchedulesAndTasksModel.TaskScheduleModel
task) {
logger.debug("Called loadTaskModels");
// cache guid and createdOnDate
return taskHelper.loadTaskModel(context, task);
}
@NonNull
@Override
public Single<Task> loadTask(Context context, SchedulesAndTasksModel.TaskScheduleModel task) {
logger.debug("Called loadTask for: " + task);
lastLoadedTaskGuid = task.taskGUID;
// currently we only support task json files, override this method to taskClassName
return taskHelper.loadTask(context, task);
}
public void uploadTaskResult(@NonNull TaskResult taskResult) {
// TODO: Update/Create TaskNotificationService
logger.debug("Called uploadTaskResult ");
// Context currently isn't need, we just need to fit the base interface implementation
uploadTaskResult(null, taskResult);
}
@SuppressLint("RxLeakedSubscription") // upload should run as long as it needs to, no early unsubscribe
@Override
public void uploadTaskResult(Context context, @NonNull TaskResult taskResult) {
// TODO: Update/Create TaskNotificationService
logger.debug("Called uploadTaskResult");
boolean isActivity = false;
if (taskResult.getTaskDetails().containsKey(ActiveTaskActivity.ACTIVITY_TASK_RESULT_KEY)) {
Object isActivityObject = taskResult.getTaskDetails().get(ActiveTaskActivity
.ACTIVITY_TASK_RESULT_KEY);
if (isActivityObject instanceof Boolean) {
isActivity = (Boolean) isActivityObject;
}
}
ScheduledActivity lastLoadedActivity = null;
if (lastLoadedTaskGuid == null) {
logger.error("lastLoadedTaskGuid must be set for this task to complete");
logger.error("The activity or metadata.json will NOT be updated on bridge");
} else {
lastLoadedActivity = bridgeManagerProvider
.getActivityManager().getLocalActivity(lastLoadedTaskGuid);
if (lastLoadedActivity == null) {
lastLoadedActivity = new ScheduledActivity();
}
lastLoadedActivity.setGuid(lastLoadedTaskGuid);
if (taskResult.getStartDate() != null) {
lastLoadedActivity.setStartedOn(new DateTime(taskResult.getStartDate()));
}
if (taskResult.getEndDate() != null) {
lastLoadedActivity.setFinishedOn(new DateTime(taskResult.getEndDate()));
}
bridgeManagerProvider.getActivityManager().updateActivity(lastLoadedActivity).subscribe(message -> {
logger.info("Update activity success " + message);
}, throwable -> logger.error(throwable.getLocalizedMessage()));
}
JsonArchiveFile metadataFile = null;
if (lastLoadedActivity != null) {
metadataFile = ArchiveUtil.createMetaDataFile(lastLoadedActivity, ImmutableList.copyOf(getLocalDataGroups()));
logger.debug("metadata.json has been successfully created " + metadataFile.toString());
}
if (isActivity) {
String taskId = taskResult.getIdentifier();
SchemaKey schemaKey = bridgeConfig.getTaskToSchemaMap().get(taskId);
if (schemaKey != null) {
taskHelper.uploadActivityResult(
schemaKey.getId(), schemaKey.getRevision(),
metadataFile, taskResult);
} else {
logger.error("No schema key found for task " + taskId +
", falling back to task ID as schema ID");
taskHelper.uploadActivityResult(taskId, metadataFile, taskResult);
}
} else {
taskHelper.uploadSurveyResult(metadataFile, taskResult);
}
}
@Override
public abstract void processInitialTaskResult(Context context, TaskResult taskResult);
//endregion
@NonNull
protected SchedulesAndTasksModel translateActivities(@NonNull ScheduledActivityList activityList) {
return translateActivities(activityList.getItems());
}
SchedulesAndTasksModel translateSchedules(@NonNull Collection<Collection<ScheduledActivity>>
activitiesBySchedule) {
SchedulesAndTasksModel model = new SchedulesAndTasksModel();
model.schedules = new ArrayList<>();
for (Collection<ScheduledActivity> activities : activitiesBySchedule) {
List<ScheduledActivity> aList = Lists.newArrayList(activities);
ScheduledActivity temp = aList.get(0);
SchedulesAndTasksModel.ScheduleModel sm = new SchedulesAndTasksModel.ScheduleModel();
sm.scheduleType = temp.isPersistent() ? "persistent" : "once";
DateTime scheduledOn = temp.getScheduledOn();
sm.scheduledOn = temp.getScheduledOn().toDate();
sm.tasks = new ArrayList<>();
model.schedules.add(sm);
for (ScheduledActivity sa : aList) {
Activity activity = sa.getActivity();
SchedulesAndTasksModel.TaskScheduleModel tsm;
if (activity.getSurvey() != null) {
// This is a survey. Use the subclass.
SurveyTaskScheduleModel surveyTaskScheduleModel = new SurveyTaskScheduleModel();
surveyTaskScheduleModel.surveyGuid = activity.getSurvey().getGuid();
surveyTaskScheduleModel.surveyCreatedOn = activity.getSurvey().getCreatedOn();
tsm = surveyTaskScheduleModel;
} else {
// This is a non-survey. Use the base TaskScheduleModel.
tsm = new SchedulesAndTasksModel.TaskScheduleModel();
}
tsm.taskTitle = activity.getLabel();
tsm.taskCompletionTime = activity.getLabelDetail();
if (activity.getTask() != null) {
tsm.taskID = activity.getTask().getIdentifier();
}
tsm.taskIsOptional = sa.isPersistent();
tsm.taskType = activity.getActivityType().toString();
if (sa.getFinishedOn() != null) {
tsm.taskFinishedOn = sa.getFinishedOn().toDate();
}
tsm.taskGUID = sa.getGuid();
sm.tasks.add(tsm);
}
}
return model;
}
// NOTE: this is a crude translation and needs to be updated to properly
// handle schedules and filters
@NonNull
protected SchedulesAndTasksModel translateActivities(@NonNull List<ScheduledActivity>
activityList) {
logger.info("called translateActivities");
// group activities by day
return translateSchedules(
Multimaps.index(
activityList,
sa -> sa.getScheduledOn().toLocalDate()
).asMap().values());
}
/**
* @return the local tz date the participant created their account
* null is returned if the user has not signed in yet
*/
@Nullable
public DateTime getParticipantCreatedOn() {
return participantRecordManager.getParticipantCreatedOn();
}
} |
package org.opensingular.resources.filter;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.annotation.WebFilter;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.security.SecureRandom;
@WebFilter(urlPatterns = "*")
public class CachingFilter implements Filter {
public static final String CACHE_CONTROL = "Cache-Control";
public static final String MAX_AGE_PATTERN = "max-age=%d";
public static final long THIRTY_DAYS = 86400L * 30; // 30 days in seconds
public static final long TWELVE_HOURS = 86400L / 2; // 12 hours in seconds
public static final SecureRandom RANDOM = new SecureRandom(SecureRandom.getSeed(4));
@Override
public void init(FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
HttpServletResponse httpServletResponse = (HttpServletResponse) response;
httpServletResponse.setHeader(CACHE_CONTROL, String.format(MAX_AGE_PATTERN, THIRTY_DAYS + RANDOM.longs(0, TWELVE_HOURS).findFirst().orElse(0L)));
chain.doFilter(request, httpServletResponse);
}
@Override
public void destroy() {
}
} |
package org.openlca.core.model;
public enum RiskLevel {
HIGH_OPPORTUNITY,
MEDIUM_OPPORTUNITY,
LOW_OPPORTUNITY,
NO_RISK,
VERY_LOW_RISK,
LOW_RISK,
MEDIUM_RISK,
HIGH_RISK,
VERY_HIGH_RISK,
NO_DATA,
NOT_APPLICABLE,
NO_OPPORTUNITY;
} |
package org.openlca.validation;
import java.util.Arrays;
import java.util.HashSet;
import gnu.trove.set.hash.TLongHashSet;
import org.openlca.core.database.NativeSql;
import org.openlca.core.model.ModelType;
import org.openlca.util.Strings;
class UnitCheck implements Runnable {
private final Validation v;
private boolean foundErrors = false;
UnitCheck(Validation v) {
this.v = v;
}
@Override
public void run() {
try {
var unitIDs = checkUnits();
checkGroups(unitIDs);
if (!foundErrors && !v.wasCanceled()) {
v.ok("checked units and unit groups");
}
} catch (Exception e) {
v.error("error in unit validation", e);
} finally {
v.workerFinished();
}
}
private TLongHashSet checkUnits() {
if (v.wasCanceled())
return new TLongHashSet(0);
var unitIDs = new TLongHashSet();
var names = new HashSet<String>();
var sql = "select " +
"id, " +
"ref_id, " +
"name, " +
"conversion_factor, " +
"f_unit_group, " +
"synonyms from tbl_units";
NativeSql.on(v.db).query(sql, r -> {
long id = r.getLong(1);
unitIDs.add(id);
var groupID = r.getLong(5);
if (!v.ids.contains(ModelType.UNIT_GROUP, groupID)) {
v.error(id, ModelType.UNIT_GROUP, "no unit group for unit @" + id);
foundErrors = true;
return !v.wasCanceled();
}
var refID = r.getString(2);
if (Strings.nullOrEmpty(refID)) {
v.error(groupID, ModelType.UNIT_GROUP,
"unit has no reference ID @" + id);
foundErrors = true;
}
var name = r.getString(3);
if (Strings.nullOrEmpty(name)) {
v.error(groupID, ModelType.UNIT_GROUP, "unit without name");
foundErrors = true;
return !v.wasCanceled();
}
var factor = r.getDouble(4);
if (factor <= 0) {
v.error(groupID, ModelType.UNIT_GROUP,
"unit " + name + " has invalid conversion factor: " + factor);
foundErrors = true;
}
// check for duplicate names & synonyms
if (names.contains(name)) {
v.warning(groupID, ModelType.UNIT_GROUP,
"duplicate unit name or synonym: " + name);
foundErrors = true;
return !v.wasCanceled();
}
names.add(name);
var synonyms = r.getString(6);
if (!Strings.nullOrEmpty(synonyms)) {
Arrays.stream(synonyms.split(";"))
.forEach(synonym -> {
var syn = synonym.trim();
if (Strings.notEmpty(syn)) {
if (names.contains(syn)) {
v.warning(groupID, ModelType.UNIT_GROUP,
"duplicate unit name or synonym: " + syn);
foundErrors = true;
}
names.add(syn);
}
});
}
return !v.wasCanceled();
});
return unitIDs;
}
private void checkGroups(TLongHashSet unitIDs) {
if (v.wasCanceled())
return;
var sql = "select " +
"id, " +
"f_reference_unit, " +
"f_default_flow_property from tbl_unit_groups";
NativeSql.on(v.db).query(sql, r -> {
var id = r.getLong(1);
var unitID = r.getLong(2);
if (!unitIDs.contains(unitID)) {
v.error(id, ModelType.UNIT_GROUP,
"invalid reference unit @" + unitID);
foundErrors = true;
}
var propID = r.getLong(3);
if (propID != 0 && !v.ids.contains(ModelType.FLOW_PROPERTY, propID)) {
v.warning(id, ModelType.UNIT_GROUP,
"invalid link to default property @" + propID);
foundErrors = true;
}
return !v.wasCanceled();
});
}
} |
package com.philliphsu.bottomsheetpickers;
import android.provider.Settings;
import android.support.test.espresso.Espresso;
import android.support.test.espresso.ViewAssertion;
import android.support.test.espresso.ViewInteraction;
import android.support.test.espresso.action.ViewActions;
import android.support.test.espresso.assertion.ViewAssertions;
import android.support.test.espresso.matcher.ViewMatchers;
import android.support.test.rule.ActivityTestRule;
import android.support.test.runner.AndroidJUnit4;
import android.text.format.DateFormat;
import android.view.View;
import android.view.ViewGroup;
import com.example.bottomsheetpickers.R;
import com.example.bottomsheetpickers.TextSwitcherActivity;
import com.philliphsu.bottomsheetpickers.view.LocaleModel;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
import org.hamcrest.TypeSafeMatcher;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.ArrayList;
import java.util.List;
@RunWith(AndroidJUnit4.class)
public class NumberPadTimePickerDialogTest {
private static final List<TestCase> MODE_12HR_TESTS_1_TO_9 = new ArrayList<>(9);
private static final List<TestCase> MODE_24HR_TESTS_0_TO_9 = new ArrayList<>(10);
private static final List<TestCase> MODE_12HR_TESTS_10_TO_95 = new ArrayList<>(54);
private static final List<TestCase> MODE_24HR_TESTS_00_TO_95 = new ArrayList<>(65);
private static final List<TestCase> MODE_12HR_TESTS_100_TO_959 = new ArrayList<>();
private static final List<TestCase> MODE_24HR_TESTS_000_TO_959 = new ArrayList<>();
private static final List<TestCase> MODE_12HR_TESTS_1000_TO_1259 = new ArrayList();
// // TODO
// private static final List<TestCase> MODE_24HR_TESTS_0000_TO_2359 = new ArrayList<>();
static {
build_Mode12Hr_Tests_1_to_9();
build_Mode24Hr_Tests_0_to_9();
build_Mode12Hr_Tests_10_to_95();
build_Mode24Hr_Tests_00_to_95();
build_Mode12Hr_Tests_100_to_959();
build_Mode24Hr_Tests_000_to_959();
build_Mode12Hr_Tests_1000_to_1259();
}
private static void build_Mode12Hr_Tests_1_to_9() {
for (int i = 1; i <= 9; i++) {
MODE_12HR_TESTS_1_TO_9.add(new TestCase.Builder(array(i), false)
.numberKeysEnabled(0, 6 /* 1[0-2]:... or i:[0-5]... */)
.backspaceEnabled(true)
.headerDisplayFocused(true)
.altKeysEnabled(true)
.okButtonEnabled(false)
.timeDisplay(text(i))
.build());
}
}
private static void build_Mode24Hr_Tests_0_to_9() {
for (int i = 0; i <= 9; i++) {
TestCase.Builder builder = new TestCase.Builder(array(i), true)
.backspaceEnabled(true)
.headerDisplayFocused(true)
.altKeysEnabled(true)
.okButtonEnabled(false)
.timeDisplay(text(i));
if (i <= 1) {
builder.numberKeysEnabled(0, 10 /* i[0-9]:... or i:[0-5]... */);
} else {
builder.numberKeysEnabled(0, 6 /* 2[0-3]:... or i:[0-5]... */);
}
MODE_24HR_TESTS_0_TO_9.add(builder.build());
}
}
private static void build_Mode12Hr_Tests_10_to_95() {
for (int i = 10; i <= 95; i++) {
if (i % 10 > 5) continue;
TestCase test = new TestCase.Builder(array(i / 10, i % 10), false)
.numberKeysEnabled(0, 10)
.backspaceEnabled(true)
.headerDisplayFocused(true)
.altKeysEnabled(i >= 10 && i <= 12)
.okButtonEnabled(false)
.timeDisplay(String.format("%d", i) /* TODO: Pull formatting logic from
Presenter impl. into its own class. Then format the current sequence of
digits. */)
.build();
MODE_12HR_TESTS_10_TO_95.add(test);
}
}
private static void build_Mode24Hr_Tests_00_to_95() {
for (int i = 0; i <= 95; i++) {
if (i % 10 > 5 && i > 25) continue;
TestCase test = new TestCase.Builder(array(i / 10, i % 10), true)
.numberKeysEnabled(0, (i % 10 > 5) ? 6 : 10 /* (0-1)(6-9):[0-5] or (i_1):(i_2)[0-9]*/)
.backspaceEnabled(true)
.headerDisplayFocused(true)
.altKeysEnabled(i >= 0 && i <= 23)
.okButtonEnabled(false)
.timeDisplay(String.format("%02d", i) /* TODO: Pull formatting logic from
Presenter impl. into its own class. Then format the current sequence of
digits. */)
.build();
MODE_24HR_TESTS_00_TO_95.add(test);
}
}
private static void build_Mode12Hr_Tests_100_to_959() {
for (int i = 100; i <= 959; i++) {
if (i % 100 > 59) continue;
TestCase test = new TestCase.Builder(
array(i / 100, (i % 100) / 10, i % 10), false)
.numberKeysEnabled(0, (i > 125 || i % 10 > 5) ? 0 : 10)
.backspaceEnabled(true)
.headerDisplayFocused(true)
.altKeysEnabled(true)
.build();
MODE_12HR_TESTS_100_TO_959.add(test);
}
}
private static void build_Mode24Hr_Tests_000_to_959() {
for (int i = 0; i <= 959; i++) {
boolean skipEndingIn6Through9From60To100 = i % 10 > 5 && i > 60 && i < 100;
boolean skipEndingIn6Through9From160To200 = i % 10 > 5 && i > 160 && i < 200;
if (skipEndingIn6Through9From60To100
|| skipEndingIn6Through9From160To200
|| i > 259 && i % 100 > 59) {
continue;
}
boolean canBeValidTimeNow = i < 60 || (i >= 100 && i < 160) || i >= 200;
int cap;
if ((i % 10 > 5 && (i < 160 || i > 200)) || i >= 236) {
cap = 0;
} else {
cap = 10;
}
TestCase test = new TestCase.Builder(
array(i / 100, (i % 100) / 10, i % 10), true)
.numberKeysEnabled(0, cap)
.backspaceEnabled(true)
.okButtonEnabled(canBeValidTimeNow)
.headerDisplayFocused(true)
.altKeysEnabled(false)
.build();
MODE_24HR_TESTS_000_TO_959.add(test);
}
}
private static void build_Mode12Hr_Tests_1000_to_1259() {
for (int i = 1000; i <= 1259; i++) {
if (i % 100 > 59) continue;
TestCase test = new TestCase.Builder(
array(i / 1000, (i % 1000) / 100, (i % 100) / 10, i % 10), false)
.numberKeysEnabled(0, 0)
.backspaceEnabled(true)
.headerDisplayFocused(true)
.altKeysEnabled(true)
.build();
MODE_12HR_TESTS_1000_TO_1259.add(test);
}
}
private static int[] array(int... a) {
return a == null ? new int[0] : a;
}
/**
* {@link ActivityTestRule} is a JUnit {@link Rule @Rule} to launch your activity under test.
*
* <p>
* Rules are interceptors which are executed for each test method and are important building
* blocks of Junit tests.
*
* <p>
* The annotated Activity will be launched before each annotated @Test and before any annotated
* {@link Before @Before} methods. The Activity is automatically terminated after the test is
* completed and all {@link After @After} methods are finished.
*/
@Rule
public ActivityTestRule<TextSwitcherActivity> mActivityTestRule =
new ActivityTestRule<>(TextSwitcherActivity.class);
private LocaleModel mLocaleModel;
// Used to restore the device's time format at the end of testing.
private boolean mInitiallyIn24HourMode;
@Before
public void setup() {
mLocaleModel = new LocaleModel(mActivityTestRule.getActivity());
mInitiallyIn24HourMode = DateFormat.is24HourFormat(mActivityTestRule.getActivity());
}
@Test
public void verifyInitialViewEnabledStates() {
openTimePicker();
Espresso.onView(ViewMatchers.withId(R.id.bsp_input_time)).check(
ViewAssertions.matches(ViewMatchers.withText("")));
// Check that the am/pm view is set to the correct visibility.
// Rather than use the isDisplayed() matcher, which, on top of matching the view to a
// View.VISIBLE state, matches the view to being drawn with visible bounds, we use
// the withEffectiveVisibility() matcher to match only the former criterion.
Espresso.onView(ViewMatchers.withId(R.id.bsp_input_ampm)).check(
ViewAssertions.matches(ViewMatchers.withEffectiveVisibility(mInitiallyIn24HourMode ?
ViewMatchers.Visibility.GONE : ViewMatchers.Visibility.VISIBLE)));
if (!mInitiallyIn24HourMode) {
Espresso.onView(ViewMatchers.withId(R.id.bsp_input_ampm)).check(
ViewAssertions.matches(isNthChildOf(
ViewMatchers.withId(R.id.bsp_input_time_container),
mLocaleModel.isAmPmWrittenBeforeTime() ? 0 : 1)));
}
Espresso.onView(ViewMatchers.withId(R.id.bsp_backspace)).check(
matchesIsEnabled(false));
// We can easily manually verify whether the divider is focused, so it's not worth the
// trouble of writing a test.
for (int i = 0; i < 10; i++) {
Espresso.onView(withDigit(i)).check(matchesIsEnabled(mInitiallyIn24HourMode || i > 0));
}
Espresso.onView(ViewMatchers.withId(R.id.bsp_text9)).check(matchesIsEnabled(false));
Espresso.onView(ViewMatchers.withId(R.id.bsp_text11)).check(matchesIsEnabled(false));
Espresso.onView(ViewMatchers.withText(android.R.string.ok)).check(matchesIsEnabled(false));
}
@Test
public void mode12Hr_verifyViewEnabledStates_Input_1_to_9() {
initializeTimePicker(false);
verifyViewEnabledStates(MODE_12HR_TESTS_1_TO_9);
}
@Test
public void mode24Hr_verifyViewEnabledStates_Input_0_to_9() {
initializeTimePicker(true);
verifyViewEnabledStates(MODE_24HR_TESTS_0_TO_9);
}
@Test
public void mode12Hr_verifyViewEnabledStates_Input_10_to_95() {
initializeTimePicker(false);
verifyViewEnabledStates(MODE_12HR_TESTS_10_TO_95);
}
@Test
public void mode24Hr_verifyViewEnabledStates_Input_00_to_95() {
initializeTimePicker(true);
verifyViewEnabledStates(MODE_24HR_TESTS_00_TO_95);
}
@Test
public void mode12Hr_verifyViewEnabledStates_Input_100_to_959() {
initializeTimePicker(false);
verifyViewEnabledStates(MODE_12HR_TESTS_100_TO_959);
}
@Test
public void mode24Hr_verifyViewEnabledStates_Input_000_to_959() {
initializeTimePicker(true);
verifyViewEnabledStates(MODE_24HR_TESTS_000_TO_959);
}
@Test
public void mode12Hr_verifyViewEnabledStates_Input_1000_to_1259() {
initializeTimePicker(false);
verifyViewEnabledStates(MODE_12HR_TESTS_1000_TO_1259);
}
@After
public void resetDeviceTimeFormat() {
setDeviceTo24HourMode(mInitiallyIn24HourMode);
}
private void setDeviceTo24HourMode(boolean use24HourMode) {
Settings.System.putString(mActivityTestRule.getActivity().getContentResolver(),
Settings.System.TIME_12_24, use24HourMode ? "24" : "12");
}
private void initializeTimePicker(boolean use24HourMode) {
setDeviceTo24HourMode(use24HourMode);
openTimePicker();
if (!use24HourMode) {
// Check that '0' button is disabled.
Espresso.onView(ViewMatchers.withId(R.id.bsp_text10)).check(matchesIsEnabled(false));
}
}
private static void openTimePicker() {
Espresso.onView(ViewMatchers.withId(R.id.button3)).perform(ViewActions.click());
}
/**
* Helper method that wraps {@link ViewMatchers#withText(String) withText(String)}.
*
* @return A Matcher that matches a number key button by its text representation
* of {@code digit}.
*/
private static Matcher<View> withDigit(int digit) {
// TODO: When we're comfortable with the APIs, we can statically import them and
// make direct calls to these methods and cut down on the verbosity, instead of
// writing helper methods that wrap these APIs.
return ViewMatchers.withText(text(digit));
}
// TODO: See if we can use ButtonTextModel#text() instead. Currently, it is package private.
private static String text(int digit) {
return String.format("%d", digit);
}
/**
* @param enabled Whether the view should be matched to be enabled or not.
* @return A {@link ViewAssertion} that asserts that a view should be matched
* to be enabled or disabled.
*/
private static ViewAssertion matchesIsEnabled(boolean enabled) {
// TODO: When we're comfortable with the APIs, we can statically import them and
// make direct calls to these methods and cut down on the verbosity, instead of
// writing helper methods that wrap these APIs.
return ViewAssertions.matches(enabled ? ViewMatchers.isEnabled() : Matchers.not(ViewMatchers.isEnabled()));
}
/**
* Returns a matcher that matches a {@link View} that is a child of the described parent
* at the specified index.
*
* @param parentMatcher A matcher that describes the view's parent.
* @param childIndex The index of the view at which it is a child of the described parent.
*/
private static Matcher<View> isNthChildOf(final Matcher<View> parentMatcher, final int childIndex) {
return new TypeSafeMatcher<View>() {
@Override
public void describeTo(Description description) {
description.appendText("is child at index "+childIndex+" of view matched by parentMatcher: ");
parentMatcher.describeTo(description);
}
@Override
public boolean matchesSafely(View view) {
ViewGroup parent = (ViewGroup) view.getParent();
return parentMatcher.matches(parent) && view.equals(parent.getChildAt(childIndex));
}
};
}
private static ViewInteraction[] getButtonInteractions() {
ViewInteraction[] buttonsInteractions = new ViewInteraction[10];
// We cannot rely on the withDigit() matcher to retrieve these because,
// after performing a click on a button, the time display will update to
// take on that button's digit text, and so withDigit() will return a matcher
// that matches multiple views with that digit text: the button
// itself and the time display. This will prevent us from performing
// validation on the same ViewInteractions later.
buttonsInteractions[0] = Espresso.onView(ViewMatchers.withId(R.id.bsp_text10));
buttonsInteractions[1] = Espresso.onView(ViewMatchers.withId(R.id.bsp_text0));
buttonsInteractions[2] = Espresso.onView(ViewMatchers.withId(R.id.bsp_text1));
buttonsInteractions[3] = Espresso.onView(ViewMatchers.withId(R.id.bsp_text2));
buttonsInteractions[4] = Espresso.onView(ViewMatchers.withId(R.id.bsp_text3));
buttonsInteractions[5] = Espresso.onView(ViewMatchers.withId(R.id.bsp_text4));
buttonsInteractions[6] = Espresso.onView(ViewMatchers.withId(R.id.bsp_text5));
buttonsInteractions[7] = Espresso.onView(ViewMatchers.withId(R.id.bsp_text6));
buttonsInteractions[8] = Espresso.onView(ViewMatchers.withId(R.id.bsp_text7));
buttonsInteractions[9] = Espresso.onView(ViewMatchers.withId(R.id.bsp_text8));
return buttonsInteractions;
}
private static ViewInteraction[] getAltButtonInteractions() {
ViewInteraction[] buttonsInteractions = new ViewInteraction[2];
buttonsInteractions[0] = Espresso.onView(ViewMatchers.withId(R.id.bsp_text9));
buttonsInteractions[1] = Espresso.onView(ViewMatchers.withId(R.id.bsp_text11));
return buttonsInteractions;
}
private static void verifyViewEnabledStates(List<TestCase> testSuite) {
for (TestCase test : testSuite) {
verifyViewEnabledStates(test);
}
}
private static void verifyViewEnabledStates(TestCase test) {
ViewInteraction[] buttonsInteractions = getButtonInteractions();
ViewInteraction[] altButtonsInteractions = getAltButtonInteractions();
for (int digit : test.sequence) {
buttonsInteractions[digit]
.check(ViewAssertions.matches(ViewMatchers.isEnabled()))
.perform(ViewActions.click());
}
for (int i = 0; i < 10; i++) {
buttonsInteractions[i].check(matchesIsEnabled(
i >= test.numberKeysEnabledStart && i < test.numberKeysEnabledEnd));
altButtonsInteractions[0].check(matchesIsEnabled(test.leftAltKeyEnabled));
altButtonsInteractions[1].check(matchesIsEnabled(test.rightAltKeyEnabled));
}
Espresso.onView(ViewMatchers.withText(android.R.string.ok))
.check(matchesIsEnabled(test.okButtonEnabled));
ViewInteraction backspaceInteraction = Espresso.onView(
ViewMatchers.withId(R.id.bsp_backspace));
// Reset after each iteration by backspacing on the button just clicked.
backspaceInteraction.check(matchesIsEnabled(true))
.perform(ViewActions.longClick())
.check(matchesIsEnabled(false));
}
private static final class TestCase {
final int[] sequence;
final boolean ampmState;
final int numberKeysEnabledStart;
final int numberKeysEnabledEnd;
final boolean backspaceEnabled;
final boolean headerDisplayFocused;
final boolean leftAltKeyEnabled;
final boolean rightAltKeyEnabled;
final boolean okButtonEnabled;
final CharSequence timeDisplay;
final CharSequence ampmDisplay;
TestCase(int[] sequence, boolean ampmState, int numberKeysEnabledStart, int numberKeysEnabledEnd, boolean backspaceEnabled, boolean headerDisplayFocused, boolean leftAltKeyEnabled, boolean rightAltKeyEnabled, boolean okButtonEnabled, CharSequence timeDisplay, CharSequence ampmDisplay) {
this.sequence = sequence;
this.ampmState = ampmState;
this.numberKeysEnabledStart = numberKeysEnabledStart;
this.numberKeysEnabledEnd = numberKeysEnabledEnd;
this.backspaceEnabled = backspaceEnabled;
this.headerDisplayFocused = headerDisplayFocused;
this.leftAltKeyEnabled = leftAltKeyEnabled;
this.rightAltKeyEnabled = rightAltKeyEnabled;
this.okButtonEnabled = okButtonEnabled;
this.timeDisplay = timeDisplay;
this.ampmDisplay = ampmDisplay;
}
static class Builder {
private final int[] sequence;
private final boolean ampmState;
private int numberKeysEnabledStart;
private int numberKeysEnabledEnd;
private boolean backspaceEnabled;
private boolean headerDisplayFocused;
private boolean leftAltKeyEnabled;
private boolean rightAltKeyEnabled;
private boolean okButtonEnabled;
private CharSequence timeDisplay;
private CharSequence ampmDisplay;
public Builder(int[] sequence, boolean ampmState) {
this.sequence = sequence;
this.ampmState = ampmState;
}
public Builder numberKeysEnabled(int numberKeysEnabledStart, int numberKeysEnabledEnd) {
this.numberKeysEnabledStart = numberKeysEnabledStart;
this.numberKeysEnabledEnd = numberKeysEnabledEnd;
return this;
}
public Builder backspaceEnabled(boolean backspaceEnabled) {
this.backspaceEnabled = backspaceEnabled;
return this;
}
public Builder altKeysEnabled(boolean enabled) {
leftAltKeyEnabled = rightAltKeyEnabled = enabled;
return this;
}
public Builder headerDisplayFocused(boolean headerDisplayFocused) {
this.headerDisplayFocused = headerDisplayFocused;
return this;
}
public Builder timeDisplay(CharSequence timeDisplay) {
this.timeDisplay = timeDisplay;
return this;
}
public Builder ampmDisplay(CharSequence ampmDisplay) {
this.ampmDisplay = ampmDisplay;
return this;
}
public Builder okButtonEnabled(boolean okButtonEnabled) {
this.okButtonEnabled = okButtonEnabled;
return this;
}
public TestCase build() {
return new TestCase(sequence, ampmState, numberKeysEnabledStart, numberKeysEnabledEnd,
backspaceEnabled, headerDisplayFocused, leftAltKeyEnabled,
rightAltKeyEnabled, okButtonEnabled, timeDisplay, ampmDisplay);
}
}
}
} |
package org.sagebionetworks.repo.web.controller;
import static org.junit.Assert.assertNotNull;
import java.io.IOException;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.map.JsonMappingException;
import org.json.JSONException;
import org.json.JSONObject;
import org.sagebionetworks.repo.ServiceConstants;
import org.sagebionetworks.repo.ServiceConstants.AttachmentType;
import org.sagebionetworks.repo.manager.TestUserDAO;
import org.sagebionetworks.repo.manager.UserManager;
import org.sagebionetworks.repo.model.ACLInheritanceException;
import org.sagebionetworks.repo.model.AccessControlList;
import org.sagebionetworks.repo.model.Annotations;
import org.sagebionetworks.repo.model.AuthorizationConstants;
import org.sagebionetworks.repo.model.BooleanResult;
import org.sagebionetworks.repo.model.DatastoreException;
import org.sagebionetworks.repo.model.Entity;
import org.sagebionetworks.repo.model.EntityHeader;
import org.sagebionetworks.repo.model.EntityPath;
import org.sagebionetworks.repo.model.EntityType;
import org.sagebionetworks.repo.model.MigrationType;
import org.sagebionetworks.repo.model.PaginatedResults;
import org.sagebionetworks.repo.model.QueryResults;
import org.sagebionetworks.repo.model.UserGroup;
import org.sagebionetworks.repo.model.UserInfo;
import org.sagebionetworks.repo.model.UserProfile;
import org.sagebionetworks.repo.model.Versionable;
import org.sagebionetworks.repo.model.attachment.PresignedUrl;
import org.sagebionetworks.repo.model.attachment.S3AttachmentToken;
import org.sagebionetworks.repo.model.auth.UserEntityPermissions;
import org.sagebionetworks.repo.model.daemon.BackupRestoreStatus;
import org.sagebionetworks.repo.model.daemon.BackupSubmission;
import org.sagebionetworks.repo.model.daemon.RestoreSubmission;
import org.sagebionetworks.repo.model.ontology.Concept;
import org.sagebionetworks.repo.model.ontology.ConceptResponsePage;
import org.sagebionetworks.repo.model.search.SearchResults;
import org.sagebionetworks.repo.model.status.StackStatus;
import org.sagebionetworks.repo.web.GenericEntityController;
import org.sagebionetworks.repo.web.NotFoundException;
import org.sagebionetworks.repo.web.UrlHelpers;
import org.sagebionetworks.schema.adapter.JSONEntity;
import org.sagebionetworks.schema.adapter.JSONObjectAdapterException;
import org.sagebionetworks.schema.adapter.org.json.EntityFactory;
import org.sagebionetworks.schema.adapter.org.json.JSONObjectAdapterImpl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.mock.web.MockServletConfig;
import org.springframework.web.servlet.DispatcherServlet;
/**
* Helper class to make HttpServlet request.
*
* Users can use all the static methods if they like.
*
* Alternatively the instance methods add a level of convenience by managing the
* deletion of entities created during testing and also the user account(s) to
* be used during testing.
*
* @author jmhill
*
*/
public class ServletTestHelper {
private static final Log log = LogFactory.getLog(ServletTestHelper.class);
private static final EntityObjectMapper objectMapper = new EntityObjectMapper();
private static final String DEFAULT_USERNAME = TestUserDAO.TEST_USER_NAME;
@Autowired
// Used for cleanup
private GenericEntityController entityController;
@Autowired
private UserManager userManager;
private static HttpServlet dispatchServlet = null;
private UserInfo testUser = null;
private List<String> toDelete = null;
private String username = null;
/**
* Setup the servlet, default test user, and entity list for test cleanup.
*
* Create a Spring MVC DispatcherServlet so that we can test our URL
* mapping, request format, response format, and response status code.
*
* @throws Exception
*/
public void setUp() throws Exception {
if(null == dispatchServlet) {
MockServletConfig servletConfig = new MockServletConfig("repository");
servletConfig.addInitParameter("contextConfigLocation",
"classpath:test-context.xml");
dispatchServlet = new DispatcherServlet();
dispatchServlet.init(servletConfig);
}
assertNotNull(entityController);
toDelete = new ArrayList<String>();
this.setTestUser(DEFAULT_USERNAME);
}
/**
* Change the test user
*
* @param username
* @throws Exception
*/
public void setTestUser(String username) throws Exception {
// Make sure we have a valid user.
this.username = username;
testUser = userManager.getUserInfo(this.username);
UserInfo.validateUserInfo(testUser);
}
public UserInfo getTestUser() throws Exception{
return testUser;
}
/**
* Cleanup the created entities and destroy the servlet
*
* @throws Exception
*/
public void tearDown() throws Exception {
if (entityController != null && toDelete != null) {
for (String idToDelete : toDelete) {
try {
entityController.deleteEntity(TestUserDAO.ADMIN_USER_NAME, idToDelete);
} catch (NotFoundException e) {
// nothing to do here
} catch (DatastoreException e) {
// nothing to do here.
}
}
}
}
/**
* @param <T>
* @param entity
* @param extraParams
* @return the entity
* @throws Exception
*/
public <T extends Entity> T createEntity(T entity,
Map<String, String> extraParams) throws Exception {
T returnedEntity = ServletTestHelper.createEntity(dispatchServlet,
entity, username, extraParams);
toDelete.add(returnedEntity.getId());
return returnedEntity;
}
public <T extends Object> T createObject(String uri, T object) throws Exception {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("POST");
request.addHeader("Accept", "application/json");
request.setRequestURI(uri);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, username);
request.addHeader("Content-Type", "application/json; charset=UTF-8");
StringWriter out = new StringWriter();
objectMapper.writeValue(out, object);
String body = out.toString();
// TODO why is this adding the jsonschema property?
JSONObject obj = new JSONObject(body);
obj.remove("jsonschema");
body = obj.toString();
request.setContent(body.getBytes("UTF-8"));
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.CREATED.value()) {
throw new ServletTestHelperException(response);
}
return (T) objectMapper.readValue(response.getContentAsString(),
object.getClass());
}
/**
* @param <T>
* @param entity
* @param extraParams
* @return the entity
* @throws Exception
*/
public <T extends Entity> T getEntity(T entity,
Map<String, String> extraParams) throws Exception {
return (T) getEntityById(entity.getClass(), entity.getId(), extraParams);
}
/**
* @param <T>
* @param clazz
* @param id
* @param extraParams
* @return the entity
* @throws Exception
*/
public <T extends Entity> T getEntityById(Class<? extends T> clazz, String id,
Map<String, String> extraParams) throws Exception {
return ServletTestHelper.getEntity(dispatchServlet, clazz, id,
username, extraParams);
}
/**
* @param <T>
* @param entity
* @param extraParams
* @return
* @throws Exception
*/
public <T extends Entity> T updateEntity(T entity,
Map<String, String> extraParams) throws Exception {
return ServletTestHelper.updateEntity(dispatchServlet, entity,
username, extraParams);
}
/**
* @param <T>
* @param clazz
* @param id
* @param extraParams
* @throws Exception
*/
public <T extends Entity> void deleteEntity(Class<? extends T> clazz,
String id, Map<String, String> extraParams) throws Exception {
ServletTestHelper.deleteEntity(dispatchServlet, clazz, id, username,
extraParams);
}
/**
* @param query
* @return the query results
* @throws Exception
*/
public QueryResults query(String query) throws Exception {
return ServletTestHelper.query(dispatchServlet, query, username);
}
/**
* @param <T>
* @param entity
* @return
* @throws ServletException
* @throws IOException
* @throws ACLInheritanceException
*/
public <T extends Entity> AccessControlList getEntityACL(T entity) throws ServletException,
IOException, ACLInheritanceException {
return ServletTestHelper.getEntityACL(dispatchServlet, entity.getId(),
username);
}
/**
* @param <T>
* @param entity
* @param entityACL
* @return
* @throws ServletException
* @throws IOException
*/
public <T extends Entity> AccessControlList updateEntityAcl(
T entity, AccessControlList entityACL)
throws ServletException, IOException {
return ServletTestHelper.updateEntityAcl(dispatchServlet, entity.getId(),
entityACL, username);
}
public SearchResults getSearchResults(Map<String, String> params) throws Exception {
return ServletTestHelper.getSearchResults(dispatchServlet, username, params);
}
/**
* Create the passed entity by making a request to the passed servlet.
*
* @param dispatchServlet
* @param entity
* @param userId
* @param <T>
* @return
* @throws ServletException
* @throws IOException
*
*/
public static <T extends Entity> T createEntity(
HttpServlet dispatchServlet, T entity, String userId)
throws ServletException, IOException {
return ServletTestHelper.createEntity(dispatchServlet, entity, userId,
null);
}
/**
* Create the passed entity by making a request to the passed servlet.
*
* @param dispatchServlet
* @param entity
* @param userId
* @param extraParams
* @param <T>
* @return
* @throws ServletException
* @throws IOException
*
*/
public static <T extends Entity> T createEntity(
HttpServlet dispatchServlet, T entity, String userId,
Map<String, String> extraParams) throws ServletException,
IOException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
entity.setEntityType(entity.getClass().getName());
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("POST");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
if (null != extraParams) {
for (Map.Entry<String, String> param : extraParams.entrySet()) {
request.setParameter(param.getKey(), param.getValue());
}
}
request.addHeader("Content-Type", "application/json; charset=UTF-8");
StringWriter out = new StringWriter();
objectMapper.writeValue(out, entity);
String body = out.toString();
request.setContent(body.getBytes("UTF-8"));
log.debug("About to send: " + body);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.CREATED.value()) {
throw new ServletTestHelperException(response);
}
@SuppressWarnings("unchecked")
T returnedEntity = (T) objectMapper.readValue(
response.getContentAsString(), entity.getClass());
return returnedEntity;
}
/**
* Get an entity using an id.
*
* @param dispatchServlet
* @param clazz
* @param id
* @param userId
* @param <T>
* @return
* @throws ServletException
* @throws IOException
*
*/
public static <T extends Entity> T getEntity(HttpServlet dispatchServlet,
Class<? extends T> clazz, String id, String userId)
throws ServletException, IOException {
return ServletTestHelper.getEntity(dispatchServlet, clazz, id, userId,
null);
}
/**
* Get an entity using an id.
*
* @param dispatchServlet
* @param clazz
* @param id
* @param userId
* @param extraParams
* @param <T>
* @return
* @throws ServletException
* @throws IOException
*
*/
public static <T extends Entity> T getEntity(HttpServlet dispatchServlet,
Class<? extends T> clazz, String id, String userId,
Map<String, String> extraParams) throws ServletException,
IOException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
if (null != extraParams) {
for (Map.Entry<String, String> param : extraParams.entrySet()) {
request.setParameter(param.getKey(), param.getValue());
}
}
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return (T) objectMapper.readValue(response.getContentAsString(), clazz);
}
/**
* Get an entity using an id.
*
* @param <T>
* @param requestUrl
* @param clazz
* @param id
* @return
* @throws IOException
* @throws ServletException
* @throws Exception
*/
public static <T extends Versionable> T getEntityForVersion(
HttpServlet dispatchServlet, Class<? extends T> clazz, String id,
Long versionNumber, String userId) throws ServletException,
IOException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id
+ UrlHelpers.VERSION + "/" + versionNumber);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return (T) objectMapper.readValue(response.getContentAsString(), clazz);
}
/**
* Get the annotations for an entity
*
* @param <T>
* @param dispatchServlet
* @param clazz
* @param id
* @param userId
* @return
* @throws ServletException
* @throws IOException
*/
public static <T extends Entity> Annotations getEntityAnnotations(
HttpServlet dispatchServlet, Class<? extends T> clazz, String id,
String userId) throws ServletException, IOException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id
+ UrlHelpers.ANNOTATIONS);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return objectMapper.readValue(response.getContentAsString(),
Annotations.class);
}
/**
* Get the annotations for an entity
*
* @param <T>
* @param dispatchServlet
* @param clazz
* @param id
* @param userId
* @return
* @throws ServletException
* @throws IOException
* @throws JSONException
*/
public static <T extends Entity> EntityPath getEntityPath(
HttpServlet dispatchServlet, Class<? extends T> clazz, String id,
String userId) throws ServletException, IOException, JSONException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id + UrlHelpers.PATH);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return (EntityPath) objectMapper.readValue(response.getContentAsString(), clazz);
}
/**
* Get the annotations for a given version.
*
* @param <T>
* @param dispatchServlet
* @param clazz
* @param id
* @param versionNumber
* @param userId
* @return
* @throws ServletException
* @throws IOException
*/
public static <T extends Entity> Annotations getEntityAnnotationsForVersion(
HttpServlet dispatchServlet, Class<? extends T> clazz, String id,
Long versionNumber, String userId) throws ServletException,
IOException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id
+ UrlHelpers.VERSION + "/" + versionNumber
+ UrlHelpers.ANNOTATIONS);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return objectMapper.readValue(response.getContentAsString(),
Annotations.class);
}
/**
* Update the annotations for an entity.
*
* @param <T>
* @param dispatchServlet
* @param clazz
* @param updatedAnnos
* @param userId
* @return
* @throws ServletException
* @throws IOException
*/
public static <T extends Entity> Annotations updateEntityAnnotations(
HttpServlet dispatchServlet, Class<? extends T> clazz,
Annotations updatedAnnos, String userId) throws ServletException,
IOException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("PUT");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + updatedAnnos.getId()
+ UrlHelpers.ANNOTATIONS);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
request.addHeader(ServiceConstants.ETAG_HEADER, updatedAnnos.getEtag());
request.addHeader("Content-Type", "application/json; charset=UTF-8");
StringWriter out = new StringWriter();
objectMapper.writeValue(out, updatedAnnos);
String body = out.toString();
request.setContent(body.getBytes("UTF-8"));
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return objectMapper.readValue(response.getContentAsString(),
Annotations.class);
}
/**
* Update an entity.
*
* @param dispatchServlet
* @param entity
* @param userId
* @param <T>
* @return
* @throws ServletException
* @throws IOException
*/
public static <T extends Entity> T updateEntity(
HttpServlet dispatchServlet, T entity, String userId)
throws ServletException, IOException {
return ServletTestHelper.updateEntity(dispatchServlet, entity, userId,
null);
}
/**
* Update an entity.
*
* @param dispatchServlet
* @param entity
* @param userId
* @param extraParams
* @param <T>
* @return
* @throws ServletException
* @throws IOException
*/
@SuppressWarnings("unchecked")
public static <T extends Entity> T updateEntity(
HttpServlet dispatchServlet, T entity, String userId,
Map<String, String> extraParams) throws ServletException,
IOException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("PUT");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + entity.getId());
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
if (null != extraParams) {
for (Map.Entry<String, String> param : extraParams.entrySet()) {
request.setParameter(param.getKey(), param.getValue());
}
}
request.addHeader(ServiceConstants.ETAG_HEADER, entity.getEtag());
request.addHeader("Content-Type", "application/json; charset=UTF-8");
StringWriter out = new StringWriter();
objectMapper.writeValue(out, entity);
String body = out.toString();
request.setContent(body.getBytes("UTF-8"));
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return (T) objectMapper.readValue(response.getContentAsString(),
entity.getClass());
}
/**
* Update an entity.
*
* @param <T>
* @param dispatchServlet
* @param entity
* @param userId
* @return
* @throws ServletException
* @throws IOException
*/
@SuppressWarnings("unchecked")
public static <T extends Versionable> T createNewVersion(
HttpServlet dispatchServlet, T entity, String userId)
throws ServletException, IOException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
entity.setEntityType(entity.getClass().getName());
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("PUT");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + entity.getId()
+ UrlHelpers.VERSION);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
request.addHeader(ServiceConstants.ETAG_HEADER, entity.getEtag());
request.addHeader("Content-Type", "application/json; charset=UTF-8");
StringWriter out = new StringWriter();
objectMapper.writeValue(out, entity);
String body = out.toString();
request.setContent(body.getBytes("UTF-8"));
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return (T) objectMapper.readValue(response.getContentAsString(),
entity.getClass());
}
/**
* Get all objects of type.
*
* @param <T>
* @param requestUrl
* @param clazz
* @return
* @throws IOException
* @throws ServletException
* @throws JSONException
* @throws Exception
*/
@Deprecated
public static <T extends Entity> PaginatedResults<T> getAllEntites(
HttpServlet dispatchServlet, Class<? extends T> clazz,
Integer offset, Integer limit, String sort, Boolean ascending,
String userId) throws ServletException, IOException, JSONException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
if (offset != null) {
request.setParameter(ServiceConstants.PAGINATION_OFFSET_PARAM,
offset.toString());
}
if (limit != null) {
request.setParameter(ServiceConstants.PAGINATION_LIMIT_PARAM,
limit.toString());
}
if (sort != null) {
request.setParameter(ServiceConstants.SORT_BY_PARAM, sort);
}
if (ascending != null) {
request.setParameter(ServiceConstants.ASCENDING_PARAM,
ascending.toString());
}
request.setRequestURI(UrlHelpers.ENTITY);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return createPaginatedResultsFromJSON(response.getContentAsString(),
clazz);
}
/**
* Get all objects of type.
*
* @param <T>
* @param requestUrl
* @param clazz
* @return
* @throws IOException
* @throws ServletException
* @throws JSONException
* @throws Exception
*/
public static <T extends Versionable> PaginatedResults<T> getAllVersionsOfEntity(
HttpServlet dispatchServlet, Class<? extends T> clazz,
String entityId, Integer offset, Integer limit, String userId)
throws ServletException, IOException, JSONException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
if (offset != null) {
request.setParameter(ServiceConstants.PAGINATION_OFFSET_PARAM,
offset.toString());
}
if (limit != null) {
request.setParameter(ServiceConstants.PAGINATION_LIMIT_PARAM,
limit.toString());
}
request.setRequestURI(UrlHelpers.ENTITY + "/" + entityId
+ UrlHelpers.VERSION);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return createPaginatedResultsFromJSON(response.getContentAsString(),
clazz);
}
/**
* We need extra help to convert from JSON to a PaginatedResults
*
* @param <T>
* @param json
* @param clazz
* @return
* @throws JSONException
* @throws IOException
* @throws JsonMappingException
* @throws JsonParseException
*/
public static <T extends JSONEntity> PaginatedResults<T> createPaginatedResultsFromJSON(
String jsonString, Class<? extends T> clazz) throws JSONException,
JsonParseException, JsonMappingException, IOException {
PaginatedResults<T> pr = new PaginatedResults<T>(clazz);
try {
pr.initializeFromJSONObject(new JSONObjectAdapterImpl(jsonString));
return pr;
} catch (JSONObjectAdapterException e) {
throw new RuntimeException(e);
}
}
/**
* Delete an entity
*
* @param dispatchServlet
* @param clazz
* @param id
* @param userId
* @param <T>
* @throws ServletException
* @throws IOException
*/
public static <T extends Entity> void deleteEntity(
HttpServlet dispatchServlet, Class<? extends T> clazz, String id,
String userId) throws ServletException, IOException {
ServletTestHelper
.deleteEntity(dispatchServlet, clazz, id, userId, null);
}
/**
* Delete an entity
*
* @param dispatchServlet
* @param clazz
* @param id
* @param userId
* @param extraParams
* @param <T>
* @throws ServletException
* @throws IOException
*/
public static <T extends Entity> void deleteEntity(
HttpServlet dispatchServlet, Class<? extends T> clazz, String id,
String userId, Map<String, String> extraParams)
throws ServletException, IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("DELETE");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
if (null != extraParams) {
for (Map.Entry<String, String> param : extraParams.entrySet()) {
request.setParameter(param.getKey(), param.getValue());
}
}
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.NO_CONTENT.value()) {
throw new ServletTestHelperException(response);
}
}
/**
* Delete a specfic version of an entity
*
* @param <T>
* @param requestUrl
* @param clazz
* @param id
* @return
* @throws IOException
* @throws ServletException
* @throws Exception
*/
public static <T extends Entity> void deleteEntityVersion(
HttpServlet dispatchServlet, Class<? extends T> clazz, String id,
Long versionNumber, String userId) throws ServletException,
IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("DELETE");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id
+ UrlHelpers.VERSION + "/" + versionNumber);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.NO_CONTENT.value()) {
throw new ServletTestHelperException(response);
}
}
/**
* @param <T>
* @param dispatchServlet
* @param query
* @param userId
* @return the query results
* @throws ServletException
* @throws IOException
*/
public static <T extends Entity> QueryResults<Map<String,Object>> query(
HttpServlet dispatchServlet, String query,
String userId) throws ServletException,
IOException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.QUERY);
request.setParameter(ServiceConstants.QUERY_PARAM, query);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return objectMapper.readValue(response.getContentAsString(),
QueryResults.class);
}
/**
* create the Access Control List (ACL) for an entity.
*
* @param <T>
* @param dispatchServlet
* @param clazz
* @param id
* @param userId
* @return
* @throws ServletException
* @throws IOException
*/
public static <T extends Entity> AccessControlList createEntityACL(
HttpServlet dispatchServlet, String id,
AccessControlList entityACL, String userId)
throws ServletException, IOException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("POST");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id + UrlHelpers.ACL);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
request.addHeader("Content-Type", "application/json; charset=UTF-8");
StringWriter out = new StringWriter();
objectMapper.writeValue(out, entityACL);
String body = out.toString();
request.setContent(body.getBytes("UTF-8"));
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.CREATED.value()) {
throw new IllegalArgumentException(response.getErrorMessage() + " "
+ response.getStatus() + " for\n" + body);
}
return objectMapper.readValue(response.getContentAsString(),
AccessControlList.class);
}
/**
* Get the Access Control List (ACL) for an entity.
*
* @param <T>
* @param dispatchServlet
* @param clazz
* @param id
* @param userId
* @return
* @throws ServletException
* @throws IOException
* @throws ACLInheritanceException
*/
public static <T extends Entity> AccessControlList getEntityACL(
HttpServlet dispatchServlet, String id,
String userId) throws ServletException, IOException,
ACLInheritanceException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id + UrlHelpers.ACL);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() == HttpStatus.NOT_FOUND.value()) {
// This occurs when we try to access an ACL from an entity that
throw new ACLInheritanceException(response.getErrorMessage());
}
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return objectMapper.readValue(response.getContentAsString(),
AccessControlList.class);
}
/**
* Update an entity ACL
*
* @param <T>
* @param dispatchServlet
* @param clazz
* @param entityACL
* @param userId
* @return
* @throws ServletException
* @throws IOException
*/
public static <T extends Entity> AccessControlList updateEntityAcl(
HttpServlet dispatchServlet, String id,
AccessControlList entityACL, String userId)
throws ServletException, IOException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("PUT");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id + UrlHelpers.ACL);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
request.addHeader(ServiceConstants.ETAG_HEADER, entityACL.getEtag());
request.addHeader("Content-Type", "application/json; charset=UTF-8");
StringWriter out = new StringWriter();
objectMapper.writeValue(out, entityACL);
String body = out.toString();
request.setContent(body.getBytes("UTF-8"));
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return objectMapper.readValue(response.getContentAsString(),
AccessControlList.class);
}
/**
* Delete an entity ACL
*
* @param <T>
* @param dispatchServlet
* @param clazz
* @param entityACL
* @param userId
* @return
* @throws ServletException
* @throws IOException
*/
public static <T extends Entity> void deleteEntityACL(
HttpServlet dispatchServlet,
String resourceId, String userId) throws ServletException,
IOException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("DELETE");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + resourceId
+ UrlHelpers.ACL);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.NO_CONTENT.value()) {
throw new ServletTestHelperException(response);
}
}
/**
* Get the principals
*
* @param dispatchServlet
* @param userId
* @return the principals
* @throws ServletException
* @throws IOException
*/
public static PaginatedResults<UserProfile> getUsers(
HttpServlet dispatchServlet, String userId)
throws ServletException, IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.USER);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
PaginatedResults<UserProfile> us = deserializePaginatedResults(
response.getContentAsString(), UserProfile.class);
return us;
}
public static <T extends JSONEntity> PaginatedResults<T> deserializePaginatedResults(String json, Class<T> clazz) {
try {
PaginatedResults<T> prs = new PaginatedResults<T>(clazz);
prs.initializeFromJSONObject(new JSONObjectAdapterImpl(json));
return prs;
} catch (JSONObjectAdapterException e) {
throw new RuntimeException(e);
}
}
/**
* Get the principals
*
* @param dispatchServlet
* @param userId
* @return the principals
* @throws ServletException
* @throws IOException
*/
public static PaginatedResults<UserGroup> getGroups(
HttpServlet dispatchServlet, String userId)
throws ServletException, IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.USERGROUP);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
PaginatedResults<UserGroup> us = deserializePaginatedResults(
response.getContentAsString(), UserGroup.class);
return us;
}
/**
* calls 'hasAccess'
*
* @param <T>
* @param dispatchServlet
* @param clazz
* @param id
* @param userId
* @return
* @throws ServletException
* @throws IOException
*/
public static <T extends Entity> BooleanResult hasAccess(
HttpServlet dispatchServlet, Class<? extends T> clazz, String id,
String userId, String accessType) throws ServletException,
IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id
+ UrlHelpers.ACCESS);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
request.setParameter(UrlHelpers.ACCESS_TYPE_PARAM, accessType);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return (BooleanResult) objectMapper.readValue(
response.getContentAsString(), BooleanResult.class);
}
/**
* Start the a system backup.
*
* @param dispatchServlet
* @param userId
* @return
* @throws ServletException
* @throws IOException
*/
public static BackupRestoreStatus startBackup(HttpServlet dispatchServlet,
String userId, BackupSubmission submission)
throws ServletException, IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("POST");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY_BACKUP_DAMEON);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
request.setParameter(AuthorizationConstants.MIGRATION_TYPE_PARAM, MigrationType.ENTITY.name());
// Add a body if we were provided a list of entities.
if (submission != null) {
request.addHeader("Content-Type", "application/json; charset=UTF-8");
StringWriter out = new StringWriter();
objectMapper.writeValue(out, submission);
String body = out.toString();
request.setContent(body.getBytes("UTF-8"));
}
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.CREATED.value()) {
throw new ServletTestHelperException(response);
}
return (BackupRestoreStatus) objectMapper.readValue(
response.getContentAsString(), BackupRestoreStatus.class);
}
/**
* Get the status of a backup/restore daemon
*
* @param dispatchServlet
* @param userId
* @param id
* @return
* @throws ServletException
* @throws IOException
*/
public static BackupRestoreStatus getDaemonStatus(
HttpServlet dispatchServlet, String userId, String id)
throws ServletException, IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.DAEMON + "/" + id);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return (BackupRestoreStatus) objectMapper.readValue(
response.getContentAsString(), BackupRestoreStatus.class);
}
/**
* Get the status of a backup/restore daemon
*
* @param dispatchServlet
* @param userId
* @param id
* @return
* @throws ServletException
* @throws IOException
*/
public static StackStatus getStackStatus(
HttpServlet dispatchServlet)
throws ServletException, IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.STACK_STATUS);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return (StackStatus) objectMapper.readValue(response.getContentAsString(), StackStatus.class);
}
/**
* Get the status of a backup/restore daemon
*
* @param dispatchServlet
* @param userId
* @param id
* @return
* @throws ServletException
* @throws IOException
*/
public static StackStatus updateStackStatus(
HttpServlet dispatchServlet, String userId, StackStatus toUpdate)
throws ServletException, IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("PUT");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.STACK_STATUS);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
if (toUpdate != null) {
request.addHeader("Content-Type", "application/json; charset=UTF-8");
StringWriter out = new StringWriter();
objectMapper.writeValue(out, toUpdate);
String body = out.toString();
request.setContent(body.getBytes("UTF-8"));
}
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return (StackStatus) objectMapper.readValue(response.getContentAsString(), StackStatus.class);
}
/**
* Start a system restore daemon
*
* @param dispatchServlet
* @param uesrId
* @param fileName
* @return
* @throws ServletException
* @throws IOException
*/
public static BackupRestoreStatus startRestore(HttpServlet dispatchServlet,
String uesrId, RestoreSubmission file) throws ServletException,
IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("POST");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY_RESTORE_DAMEON);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, uesrId);
request.setParameter(AuthorizationConstants.MIGRATION_TYPE_PARAM, MigrationType.ENTITY.name());
request.addHeader("Content-Type", "application/json; charset=UTF-8");
StringWriter out = new StringWriter();
objectMapper.writeValue(out, file);
String body = out.toString();
request.setContent(body.getBytes("UTF-8"));
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.CREATED.value()) {
throw new ServletTestHelperException(response);
}
return (BackupRestoreStatus) objectMapper.readValue(
response.getContentAsString(), BackupRestoreStatus.class);
}
public static void terminateDaemon(HttpServlet dispatchServlet,
String userId, String id) throws ServletException, IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("DELETE");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.DAEMON + "/" + id);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.NO_CONTENT.value()) {
throw new ServletTestHelperException(response);
}
}
public static EntityHeader getEntityType(HttpServlet dispatchServlet,
String id, String userId) throws ServletException, IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id + UrlHelpers.TYPE);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return (EntityHeader) objectMapper.readValue(
response.getContentAsString(), EntityHeader.class);
}
public static PaginatedResults<EntityHeader> getEntityReferences(HttpServlet dispatchServlet,
String id, String userId) throws ServletException, IOException, JSONException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id + UrlHelpers.REFERENCED_BY);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return createPaginatedResultsFromJSON(response.getContentAsString(),
EntityHeader.class);
}
public static PaginatedResults<EntityHeader> getEntityReferences(HttpServlet dispatchServlet,
String id, Long versionNumber, String userId) throws ServletException, IOException, JSONException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id + UrlHelpers.VERSION + "/" + versionNumber + UrlHelpers.REFERENCED_BY);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return createPaginatedResultsFromJSON(response.getContentAsString(),
EntityHeader.class);
}
public static <T extends Entity> EntityHeader getEntityBenefactor(
HttpServlet dispatchServlet, String id, Class<? extends T> clazz,
String userId) throws ServletException, IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id
+ UrlHelpers.BENEFACTOR);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return (EntityHeader) objectMapper.readValue(
response.getContentAsString(), EntityHeader.class);
}
/**
* Get search results
*/
public static SearchResults getSearchResults(HttpServlet dispatchServlet,
String userId, Map<String, String> extraParams) throws ServletException,
IOException, JSONException {
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI("/search");
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
if (null != extraParams) {
for (Map.Entry<String, String> param : extraParams.entrySet()) {
request.setParameter(param.getKey(), param.getValue());
}
}
dispatchServlet.service(request, response);
log.info("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return objectMapper.readValue(response.getContentAsString(),
SearchResults.class); }
/**
*
* @param dispatchServlet
* @param param
* @return
* @throws ServletException
* @throws IOException
*/
public static ConceptResponsePage getConceptsForParent(String parentId, String pefix, int limit, int offest)
throws ServletException, IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.CONCEPT_ID_CHILDERN_TRANSITIVE);
StringBuilder urlBuilder = new StringBuilder();
urlBuilder.append(UrlHelpers.CONCEPT);
urlBuilder.append("/");
urlBuilder.append(parentId);
urlBuilder.append(UrlHelpers.CHILDERN_TRANSITIVE);
if(pefix != null){
request.setParameter(UrlHelpers.PREFIX_FILTER, pefix);
}
request.setParameter(ServiceConstants.PAGINATION_LIMIT_PARAM, ""+limit);
request.setParameter(ServiceConstants.PAGINATION_OFFSET_PARAM, ""+offest);
request.setRequestURI(urlBuilder.toString());
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
// System.out.println(response.getContentAsString());
return (ConceptResponsePage) objectMapper.readValue(response.getContentAsString(), ConceptResponsePage.class);
}
/**
* Get a single concept from its id.
* @param dispatchServlet
* @param param
* @return
* @throws ServletException
* @throws IOException
*/
public static Concept getConcept(String id)
throws ServletException, IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.CONCEPT_ID_CHILDERN_TRANSITIVE);
StringBuilder urlBuilder = new StringBuilder();
urlBuilder.append(UrlHelpers.CONCEPT);
urlBuilder.append("/");
urlBuilder.append(id);
request.setRequestURI(urlBuilder.toString());
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
// System.out.println(response.getContentAsString());
return (Concept) objectMapper.readValue(response.getContentAsString(), Concept.class);
}
/**
* Get a single concept from its id.
* @param dispatchServlet
* @param param
* @return
* @throws ServletException
* @throws IOException
*/
public static String getConceptAsJSONP(String id, String callbackName)
throws ServletException, IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.CONCEPT_ID_CHILDERN_TRANSITIVE);
StringBuilder urlBuilder = new StringBuilder();
urlBuilder.append(UrlHelpers.CONCEPT);
urlBuilder.append("/");
urlBuilder.append(id);
request.setRequestURI(urlBuilder.toString());
// Add the header that indicates we want JSONP
request.addParameter(UrlHelpers.REQUEST_CALLBACK_JSONP, callbackName);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return response.getContentAsString();
}
public static UserEntityPermissions getUserEntityPermissions(HttpServlet dispatchServlet, String id, String userId) throws ServletException, IOException {
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("GET");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.ENTITY + "/" + id + UrlHelpers.PERMISSIONS);
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
return (UserEntityPermissions) objectMapper.readValue(
response.getContentAsString(), UserEntityPermissions.class);
}
/**
* Create an attachment token.
* @param token
* @return
* @throws JSONObjectAdapterException
* @throws IOException
* @throws ServletException
*/
public static S3AttachmentToken createS3AttachmentToken(String userId, ServiceConstants.AttachmentType attachentType, String id, S3AttachmentToken token) throws JSONObjectAdapterException, ServletException, IOException{
if (dispatchServlet == null)
throw new IllegalArgumentException("Servlet cannot be null");
if(id == null) throw new IllegalArgumentException("Entity ID cannot be null");
if(token == null) throw new IllegalArgumentException("Token cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("POST");
request.addHeader("Accept", "application/json");
String uri = UrlHelpers.getAttachmentTypeURL(attachentType)+"/"+id+UrlHelpers.ATTACHMENT_S3_TOKEN;
request.setRequestURI(uri);
System.out.println(request.getRequestURL());
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
request.addHeader("Content-Type", "application/json; charset=UTF-8");
StringWriter out = new StringWriter();
objectMapper.writeValue(out, token);
String body = out.toString();
request.setContent(body.getBytes("UTF-8"));
// log.debug("About to send: " + body);
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.CREATED.value()) {
throw new ServletTestHelperException(response);
}
// Done!
return EntityFactory.createEntityFromJSONString(response.getContentAsString(), S3AttachmentToken.class);
}
/**
* Get a pre-signed URL for a an attachment.
* @param userId
* @param entityId
* @param tokenId
* @return
* @throws JSONObjectAdapterException
* @throws ServletException
* @throws IOException
*/
public PresignedUrl getAttachmentUrl(String userId, String entityId, String tokenId) throws JSONObjectAdapterException, ServletException, IOException{
return getAttachmentUrl(userId, AttachmentType.ENTITY, entityId, tokenId);
}
/**
* Get a pre-signed URL for a user profile attachment.
* @param userId
* @param profileId
* @param tokenId
* @return
* @throws JSONObjectAdapterException
* @throws ServletException
* @throws IOException
*/
public PresignedUrl getUserProfileAttachmentUrl(String userId, String targetProfileId, String tokenId) throws JSONObjectAdapterException, ServletException, IOException{
return getAttachmentUrl(userId, AttachmentType.USER_PROFILE, targetProfileId, tokenId);
}
/**
* Get a pre-signed URL for a an attachment.
* @param userId
* @param attachmentType
* @param entityId
* @param tokenId
* @return
* @throws JSONObjectAdapterException
* @throws ServletException
* @throws IOException
*/
public PresignedUrl getAttachmentUrl(String userId, AttachmentType type, String id, String tokenId) throws JSONObjectAdapterException, ServletException, IOException{
if(id == null) throw new IllegalArgumentException("ID cannot be null");
if(tokenId == null) throw new IllegalArgumentException("TokenId cannot be null");
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("POST");
request.addHeader("Accept", "application/json");
request.setRequestURI( UrlHelpers.getAttachmentTypeURL(type)+"/"+id+UrlHelpers.ATTACHMENT_URL);
System.out.println(request.getRequestURL());
request.setParameter(AuthorizationConstants.USER_ID_PARAM, userId);
request.addHeader("Content-Type", "application/json; charset=UTF-8");
PresignedUrl url = new PresignedUrl();
url.setTokenID(tokenId);
StringWriter out = new StringWriter();
objectMapper.writeValue(out, url);
String body = out.toString();
request.setContent(body.getBytes("UTF-8"));
dispatchServlet.service(request, response);
log.debug("Results: " + response.getContentAsString());
if (response.getStatus() != HttpStatus.CREATED.value()) {
throw new ServletTestHelperException(response);
}
// Done!
return EntityFactory.createEntityFromJSONString(response.getContentAsString(), PresignedUrl.class);
}
public String checkAmznHealth() throws ServletException, IOException {
String s = "";
MockHttpServletRequest request = new MockHttpServletRequest();
MockHttpServletResponse response = new MockHttpServletResponse();
request.setMethod("HEAD");
request.addHeader("Accept", "application/json");
request.setRequestURI(UrlHelpers.HEALTHCHECK);
System.out.println(request.getRequestURL());
dispatchServlet.service(request, response);
if (response.getStatus() != HttpStatus.OK.value()) {
throw new ServletTestHelperException(response);
}
s = response.getContentAsString();
return s;
}
} |
package rosick.mckesson.IV.tut16;
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.opengl.GL12.*;
import static org.lwjgl.opengl.GL13.*;
import static org.lwjgl.opengl.GL15.*;
import static org.lwjgl.opengl.GL20.*;
import static org.lwjgl.opengl.GL21.*;
import static org.lwjgl.opengl.GL30.*;
import static org.lwjgl.opengl.GL31.*;
import static org.lwjgl.opengl.GL32.*;
import static org.lwjgl.opengl.GL33.*;
import static org.lwjgl.opengl.EXTTextureFilterAnisotropic.*;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import org.lwjgl.BufferUtils;
import org.lwjgl.input.Keyboard;
import org.lwjgl.opengl.GL12;
import rosick.LWJGLWindow;
import rosick.PortingUtils.BufferableData;
import rosick.jglsdk.framework.Framework;
import rosick.jglsdk.framework.Mesh;
import rosick.jglsdk.framework.Timer;
import rosick.jglsdk.glimg.ImageSet;
import rosick.jglsdk.glimg.ImageSet.Dimensions;
import rosick.jglsdk.glimg.ImageSet.SingleImage;
import rosick.jglsdk.glimg.loaders.Dds;
import rosick.jglsdk.glm.Glm;
import rosick.jglsdk.glm.Mat4;
import rosick.jglsdk.glm.Vec3;
import rosick.jglsdk.glutil.MatrixStack;
public class GammaCheckers02 extends LWJGLWindow {
public static void main(String[] args) {
new GammaCheckers02().start();
}
private final static int FLOAT_SIZE = Float.SIZE / 8;
private final String TUTORIAL_DATAPATH = "/rosick/mckesson/IV/tut16/data/";
private class ProgramData {
int theProgram;
int modelToCameraMatrixUnif;
}
private final int g_projectionBlockIndex = 0;
private final int g_colorTexUnit = 0;
private ProgramData g_progNoGamma;
private ProgramData g_progGamma;
private int g_projectionUniformBuffer;
private int g_linearTexture;
private int g_gammaTexture;
private float g_fzNear = 1.0f;
private float g_fzFar = 1000.0f;
private MatrixStack modelMatrix = new MatrixStack();
private FloatBuffer tempFloatBuffer16 = BufferUtils.createFloatBuffer(16);
private ProgramData loadProgram(String strVertexShader, String strFragmentShader) {
ArrayList<Integer> shaderList = new ArrayList<>();
shaderList.add(Framework.loadShader(GL_VERTEX_SHADER, strVertexShader));
shaderList.add(Framework.loadShader(GL_FRAGMENT_SHADER, strFragmentShader));
ProgramData data = new ProgramData();
data.theProgram = Framework.createProgram(shaderList);
data.modelToCameraMatrixUnif = glGetUniformLocation(data.theProgram, "modelToCameraMatrix");
int projectionBlock = glGetUniformBlockIndex(data.theProgram, "Projection");
glUniformBlockBinding(data.theProgram, projectionBlock, g_projectionBlockIndex);
int colorTextureUnif = glGetUniformLocation(data.theProgram, "colorTexture");
glUseProgram(data.theProgram);
glUniform1i(colorTextureUnif, g_colorTexUnit);
glUseProgram(0);
return data;
}
private void initializePrograms() {
g_progNoGamma = loadProgram(TUTORIAL_DATAPATH + "PT.vert", TUTORIAL_DATAPATH + "textureNoGamma.frag");
g_progGamma = loadProgram(TUTORIAL_DATAPATH + "PT.vert", TUTORIAL_DATAPATH + "textureGamma.frag");
}
@Override
protected void init() {
initializePrograms();
try {
g_pCorridor = new Mesh(TUTORIAL_DATAPATH + "Corridor.xml");
g_pPlane = new Mesh(TUTORIAL_DATAPATH + "BigPlane.xml");
} catch (Exception exception) {
exception.printStackTrace();
System.exit(0);
}
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glFrontFace(GL_CW);
final float depthZNear = 0.0f;
final float depthZFar = 1.0f;
glEnable(GL_DEPTH_TEST);
glDepthMask(true);
glDepthFunc(GL_LEQUAL);
glDepthRange(depthZNear, depthZFar);
glEnable(GL_DEPTH_CLAMP);
// Setup our Uniform Buffers
g_projectionUniformBuffer = glGenBuffers();
glBindBuffer(GL_UNIFORM_BUFFER, g_projectionUniformBuffer);
glBufferData(GL_UNIFORM_BUFFER, ProjectionBlock.SIZE, GL_DYNAMIC_DRAW);
glBindBufferRange(GL_UNIFORM_BUFFER, g_projectionBlockIndex, g_projectionUniformBuffer,
0, ProjectionBlock.SIZE);
glBindBuffer(GL_UNIFORM_BUFFER, 0);
loadCheckerTexture();
createSamplers();
}
@Override
protected void update() {
while (Keyboard.next()) {
boolean particularKeyPressed = false;
if (Keyboard.getEventKeyState()) {
switch (Keyboard.getEventKey()) {
case Keyboard.KEY_A:
g_drawGammaProgram = !g_drawGammaProgram;
particularKeyPressed = true;
break;
case Keyboard.KEY_G:
g_drawGammaTexture = !g_drawGammaTexture;
particularKeyPressed = true;
break;
case Keyboard.KEY_SPACE:
g_drawGammaProgram = !g_drawGammaProgram;
g_drawGammaTexture = !g_drawGammaTexture;
particularKeyPressed = true;
break;
case Keyboard.KEY_Y:
g_drawCorridor = !g_drawCorridor;
break;
case Keyboard.KEY_P:
g_camTimer.togglePause();
break;
case Keyboard.KEY_ESCAPE:
leaveMainLoop();
break;
}
if (Keyboard.KEY_1 <= Keyboard.getEventKey() && Keyboard.getEventKey() <= Keyboard.KEY_9) {
int number = Keyboard.getEventKey() - Keyboard.KEY_1;
if (number < NUM_SAMPLERS) {
g_currSampler = number;
}
}
}
if (particularKeyPressed) {
System.out.printf("
System.out.printf("Rendering:\t\t%s\n", g_drawGammaProgram ? "Gamma" : "Linear");
System.out.printf("Mipmap Generation:\t%s\n", g_drawGammaTexture ? "Gamma" : "Linear");
}
}
}
@Override
protected void display() {
glClearColor(0.75f, 0.75f, 1.0f, 1.0f);
glClearDepth(1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
g_camTimer.update((float) getElapsedTime());
float cyclicAngle = g_camTimer.getAlpha() * 6.28f;
float hOffset = (float) (Math.cos(cyclicAngle) * 0.25f);
float vOffset = (float) (Math.sin(cyclicAngle) * 0.25f);
modelMatrix.clear();
final Mat4 worldToCamMat = Glm.lookAt(
new Vec3(hOffset, 1.0f, -64.0f),
new Vec3(hOffset, -5.0f + vOffset, -44.0f),
new Vec3(0.0f, 1.0f, 0.0f));
modelMatrix.applyMatrix(worldToCamMat);
final ProgramData prog = g_drawGammaProgram ? g_progGamma : g_progNoGamma;
glUseProgram(prog.theProgram);
glUniformMatrix4(prog.modelToCameraMatrixUnif, false,
modelMatrix.top().fillAndFlipBuffer(tempFloatBuffer16));
glActiveTexture(GL_TEXTURE0 + g_colorTexUnit);
glBindTexture(GL_TEXTURE_2D, g_drawGammaTexture ? g_gammaTexture : g_linearTexture);
glBindSampler(g_colorTexUnit, g_samplers[g_currSampler]);
if (g_drawCorridor) {
g_pCorridor.render("tex");
} else {
g_pPlane.render("tex");
}
glBindSampler(g_colorTexUnit, 0);
glBindTexture(GL_TEXTURE_2D, 0);
glUseProgram(0);
}
@Override
protected void reshape(int width, int height) {
MatrixStack persMatrix = new MatrixStack();
persMatrix.perspective(90.0f, (width / (float) height), g_fzNear, g_fzFar);
ProjectionBlock projData = new ProjectionBlock();
projData.cameraToClipMatrix = persMatrix.top();
glBindBuffer(GL_UNIFORM_BUFFER, g_projectionUniformBuffer);
glBufferSubData(GL_UNIFORM_BUFFER, 0, projData.fillAndFlipBuffer(tempFloatBuffer16));
glBindBuffer(GL_UNIFORM_BUFFER, 0);
glViewport(0, 0, width, height);
}
private class ProjectionBlock extends BufferableData<FloatBuffer> {
Mat4 cameraToClipMatrix;
static final int SIZE = 16 * FLOAT_SIZE;
@Override
public FloatBuffer fillBuffer(FloatBuffer buffer) {
return cameraToClipMatrix.fillBuffer(buffer);
}
}
private final int NUM_SAMPLERS = 2;
private Mesh g_pPlane;
private Mesh g_pCorridor;
private Timer g_camTimer = new Timer(Timer.Type.TT_LOOP, 5.0f);
private boolean g_drawCorridor;
private boolean g_drawGammaTexture;
private boolean g_drawGammaProgram;
private int g_samplers[] = new int[NUM_SAMPLERS];
private int g_currSampler;
private void createSamplers() {
for (int samplerIx = 0; samplerIx < NUM_SAMPLERS; samplerIx++) {
g_samplers[samplerIx] = glGenSamplers();
glSamplerParameteri(g_samplers[samplerIx], GL_TEXTURE_WRAP_S, GL_REPEAT);
glSamplerParameteri(g_samplers[samplerIx], GL_TEXTURE_WRAP_T, GL_REPEAT);
}
// Linear mipmap linear
glSamplerParameteri(g_samplers[0], GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glSamplerParameteri(g_samplers[0], GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
// Max anisotropic
float maxAniso = glGetFloat(GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT);
glSamplerParameteri(g_samplers[1], GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glSamplerParameteri(g_samplers[1], GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glSamplerParameterf(g_samplers[1], GL_TEXTURE_MAX_ANISOTROPY_EXT, maxAniso);
}
private void loadCheckerTexture() {
try {
ImageSet pImageSet = Dds.loadFromFile(TUTORIAL_DATAPATH + "checker_linear.dds");
g_linearTexture = glGenTextures();
glBindTexture(GL_TEXTURE_2D, g_linearTexture);
for (int mipmapLevel = 0; mipmapLevel < pImageSet.getMipmapCount(); mipmapLevel++) {
SingleImage image = pImageSet.getImage(mipmapLevel, 0, 0);
Dimensions dims = image.getDimensions();
glTexImage2D(GL_TEXTURE_2D, mipmapLevel, GL_SRGB8, dims.width, dims.height, 0,
GL12.GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, image.getImageData());
}
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, pImageSet.getMipmapCount() - 1);
glGenerateMipmap(GL_TEXTURE_2D);
pImageSet = Dds.loadFromFile(TUTORIAL_DATAPATH + "checker_gamma.dds");
g_gammaTexture = glGenTextures();
glBindTexture(GL_TEXTURE_2D, g_gammaTexture);
for (int mipmapLevel = 0; mipmapLevel < pImageSet.getMipmapCount(); mipmapLevel++) {
SingleImage image = pImageSet.getImage(mipmapLevel, 0, 0);
Dimensions dims = image.getDimensions();
glTexImage2D(GL_TEXTURE_2D, mipmapLevel, GL_SRGB8, dims.width, dims.height, 0,
GL12.GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, image.getImageData());
}
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, pImageSet.getMipmapCount() - 1);
glGenerateMipmap(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, 0);
} catch (Exception e) {
e.printStackTrace();
}
}
} |
package com.exedio.cope;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class DataTest extends AbstractLibTest
{
public DataTest()
{
super(Main.dataModel);
}
private DataItem item;
// TODO rename by length
private final byte[] data4 = new byte[]{-86,122,-8,23};
private final byte[] data6 = new byte[]{-97,35,-126,86,19,-8};
private final byte[] data8 = new byte[]{-54,104,-63,23,19,-45,71,-23};
private final byte[] data10 = new byte[]{-97,19,-8,35,-126,-86,122,86,19,-8};
private final byte[] data11 = new byte[]{22,-97,19,-8,35,-126,-86,122,86,19,-8};
private final byte[] dataEmpty = new byte[]{};
private byte[] dataBig;
public void setUp() throws Exception
{
super.setUp();
final int data8Length = data8.length;
final int dataBigLength = (1024*1024) + 77;
dataBig = new byte[dataBigLength];
for(int i = 0; i<dataBigLength; i++)
dataBig[i] = data8[i % data8Length];
deleteOnTearDown(item = new DataItem());
}
public void tearDown() throws Exception
{
// release memory
dataBig = null;
super.tearDown();
}
private void assertIt(final byte[] expectedData) throws MandatoryViolationException, IOException
{
assertIt(expectedData, item);
}
private void assertIt(final byte[] expectedData, final DataItem item)
throws MandatoryViolationException, IOException
{
assertIt(expectedData, item, oracle, model);
}
private static final void assertIt(final byte[] expectedData, final DataItem item, final boolean oracle, final Model model)
throws MandatoryViolationException, IOException
{
if(expectedData!=null && !(oracle && !model.getProperties().hasDatadirPath() && expectedData.length==0))
{
assertTrue(!item.isDataNull());
assertEquals(expectedData.length, item.getDataLength());
assertData(expectedData, item.getData());
final ByteArrayOutputStream tempStream = new ByteArrayOutputStream();
item.getData(tempStream);
assertData(expectedData, tempStream.toByteArray());
final File tempFile = File.createTempFile("cope-DataTest.", ".tmp");
assertTrue(tempFile.delete());
assertFalse(tempFile.exists());
item.getData(tempFile);
assertTrue(tempFile.exists());
assertEqualContent(expectedData, tempFile);
}
else
{
assertTrue(item.isDataNull());
assertEquals(-1, item.getDataLength());
assertEquals(null, item.getData());
final ByteArrayOutputStream tempStream = new ByteArrayOutputStream();
item.getData(tempStream);
assertEquals(0, tempStream.toByteArray().length);
final File tempFile = File.createTempFile("cope-DataTest.", ".tmp");
assertTrue(tempFile.delete());
assertFalse(tempFile.exists());
item.getData(tempFile);
assertFalse(tempFile.exists());
}
}
public void testData() throws MandatoryViolationException, IOException
{
assertEquals(10, data10.length);
assertEquals(11, data11.length);
// test model
assertEquals(item.TYPE, item.data.getType());
assertEquals("data", item.data.getName());
assertEquals(false, item.data.isMandatory());
assertEqualsUnmodifiable(list(), item.data.getPatterns());
assertEquals(item.data.DEFAULT_LENGTH, item.data.getMaximumLength());
assertEquals(item.TYPE, item.data10.getType());
assertEquals("data10", item.data10.getName());
assertEquals(false, item.data10.isMandatory());
assertEqualsUnmodifiable(list(), item.data10.getPatterns());
assertEquals(10, item.data10.getMaximumLength());
try
{
new DataAttribute(Item.OPTIONAL).lengthMax(0);
fail();
}
catch(RuntimeException e)
{
assertEquals("maximum length must be greater zero, but was 0.", e.getMessage());
}
try
{
new DataAttribute(Item.OPTIONAL).lengthMax(-10);
fail();
}
catch(RuntimeException e)
{
assertEquals("maximum length must be greater zero, but was -10.", e.getMessage());
}
// test data
assertIt(null);
// set byte[]
item.setData(data4);
assertIt(data4);
item.setData(data6);
assertIt(data6);
item.setData(dataEmpty);
assertIt(dataEmpty);
item.setData(dataBig);
assertIt(dataBig);
item.setData((byte[])null);
assertIt(null);
// set InputStream
item.setData(stream(data4));
assertStreamClosed();
assertIt(data4);
item.setData(stream(data6));
assertStreamClosed();
assertIt(data6);
item.setData(stream(dataEmpty));
assertStreamClosed();
assertIt(dataEmpty);
item.setData(stream(dataBig));
assertStreamClosed();
assertIt(dataBig);
item.setData((InputStream)null);
assertIt(null);
// set File
item.setData(file(data8));
assertIt(data8);
item.setData(file(dataEmpty));
assertIt(dataEmpty);
item.setData(file(dataBig));
assertIt(dataBig);
item.setData((File)null);
assertIt(null);
try
{
item.getData((OutputStream)null);
fail();
}
catch(NullPointerException e)
{
assertEquals(null, e.getMessage());
}
try
{
item.getData((File)null);
fail();
}
catch(NullPointerException e)
{
assertEquals(null, e.getMessage());
}
final DataSubItem subItem = new DataSubItem();
deleteOnTearDown(subItem);
subItem.setData(stream(data4));
assertStreamClosed();
assertIt(data4, subItem);
assertEquals(data4.length, subItem.getDataLength());
// test maximum length
item.setData10(data10);
assertData(data10, item.getData10());
try
{
item.setData10(data11);
fail();
}
catch(DataLengthViolationException e)
{
assertEquals(item, e.getItem());
assertEquals(item.data10, e.getDataAttribute());
assertEquals(11, e.getLength());
assertEquals("length violation on DataItem.0, 11 bytes is too long for DataItem#data10", e.getMessage());
}
assertData(data10, item.getData10());
try
{
item.setData10(stream(data11));
fail();
}
catch(DataLengthViolationException e)
{
assertEquals(item, e.getItem());
assertEquals(item.data10, e.getDataAttribute());
assertEquals(-1, e.getLength());
assertEquals(e.getMessage(), "length violation on DataItem.0, is too long for DataItem#data10", e.getMessage());
}
if(model.getProperties().hasDatadirPath()) // TODO should not be needed
item.setData10(data10);
assertData(data10, item.getData10());
try
{
item.setData10(file(data11));
fail();
}
catch(DataLengthViolationException e)
{
assertEquals(item, e.getItem());
assertEquals(item.data10, e.getDataAttribute());
assertEquals(11, e.getLength());
assertEquals("length violation on DataItem.0, 11 bytes is too long for DataItem#data10", e.getMessage());
}
assertData(data10, item.getData10());
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.